2020-08-27 19:13:08 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
OnionShare | https://onionshare.org/
|
|
|
|
|
2021-02-22 16:35:14 -05:00
|
|
|
Copyright (C) 2014-2021 Micah Lee, et al. <micah@micahflee.com>
|
2020-08-27 19:13:08 -04:00
|
|
|
|
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
"""
|
|
|
|
|
2021-03-30 10:56:16 -04:00
|
|
|
import binascii
|
|
|
|
import hashlib
|
2018-09-21 02:43:04 -04:00
|
|
|
import os
|
2018-09-21 02:58:27 -04:00
|
|
|
import sys
|
2018-09-21 02:43:04 -04:00
|
|
|
import tempfile
|
|
|
|
import zipfile
|
2018-09-21 02:58:27 -04:00
|
|
|
import mimetypes
|
2021-03-30 10:56:16 -04:00
|
|
|
from datetime import datetime
|
|
|
|
from flask import Response, request, render_template, make_response, abort
|
2021-01-31 06:38:47 -05:00
|
|
|
from unidecode import unidecode
|
2021-03-30 10:56:16 -04:00
|
|
|
from werkzeug.http import parse_date, http_date
|
2021-01-31 06:38:47 -05:00
|
|
|
from werkzeug.urls import url_quote
|
2018-09-21 02:58:27 -04:00
|
|
|
|
2019-09-01 18:05:53 -04:00
|
|
|
from .send_base_mode import SendBaseModeWeb
|
2018-09-21 02:58:27 -04:00
|
|
|
|
|
|
|
|
2021-03-30 10:56:16 -04:00
|
|
|
def make_etag(data):
|
|
|
|
hasher = hashlib.sha256()
|
|
|
|
|
|
|
|
while True:
|
|
|
|
read_bytes = data.read(4096)
|
|
|
|
if read_bytes:
|
|
|
|
hasher.update(read_bytes)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2021-04-29 20:13:05 -04:00
|
|
|
hash_value = binascii.hexlify(hasher.digest()).decode("utf-8")
|
2021-03-30 10:56:16 -04:00
|
|
|
return '"sha256:{}"'.format(hash_value)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_range_header(range_header: str, target_size: int) -> list:
|
|
|
|
end_index = target_size - 1
|
|
|
|
if range_header is None:
|
|
|
|
return [(0, end_index)]
|
|
|
|
|
2021-04-29 20:13:05 -04:00
|
|
|
bytes_ = "bytes="
|
2021-03-30 10:56:16 -04:00
|
|
|
if not range_header.startswith(bytes_):
|
|
|
|
abort(416)
|
|
|
|
|
|
|
|
ranges = []
|
2021-04-29 20:13:05 -04:00
|
|
|
for range_ in range_header[len(bytes_) :].split(","):
|
|
|
|
split = range_.split("-")
|
2021-03-30 10:56:16 -04:00
|
|
|
if len(split) == 1:
|
|
|
|
try:
|
|
|
|
start = int(split[0])
|
|
|
|
end = end_index
|
|
|
|
except ValueError:
|
|
|
|
abort(416)
|
|
|
|
elif len(split) == 2:
|
|
|
|
start, end = split[0], split[1]
|
|
|
|
if not start:
|
|
|
|
# parse ranges of the form "bytes=-100" (i.e., last 100 bytes)
|
|
|
|
end = end_index
|
|
|
|
try:
|
|
|
|
start = end - int(split[1]) + 1
|
|
|
|
except ValueError:
|
|
|
|
abort(416)
|
|
|
|
else:
|
|
|
|
# parse ranges of the form "bytes=100-200"
|
|
|
|
try:
|
|
|
|
start = int(start)
|
|
|
|
if not end:
|
|
|
|
end = target_size
|
|
|
|
else:
|
|
|
|
end = int(end)
|
|
|
|
except ValueError:
|
|
|
|
abort(416)
|
|
|
|
|
|
|
|
if end < start:
|
|
|
|
abort(416)
|
|
|
|
|
|
|
|
end = min(end, end_index)
|
|
|
|
else:
|
|
|
|
abort(416)
|
|
|
|
|
|
|
|
ranges.append((start, end))
|
|
|
|
|
|
|
|
# merge the ranges
|
|
|
|
merged = []
|
|
|
|
ranges = sorted(ranges, key=lambda x: x[0])
|
|
|
|
for range_ in ranges:
|
|
|
|
# initial case
|
|
|
|
if not merged:
|
|
|
|
merged.append(range_)
|
|
|
|
else:
|
|
|
|
# merge ranges that are adjacent or overlapping
|
|
|
|
if range_[0] <= merged[-1][1] + 1:
|
|
|
|
merged[-1] = (merged[-1][0], max(range_[1], merged[-1][1]))
|
|
|
|
else:
|
|
|
|
merged.append(range_)
|
|
|
|
|
|
|
|
return merged
|
|
|
|
|
|
|
|
|
2019-09-01 18:05:53 -04:00
|
|
|
class ShareModeWeb(SendBaseModeWeb):
|
2018-09-21 02:58:27 -04:00
|
|
|
"""
|
2018-09-21 14:14:32 -04:00
|
|
|
All of the web logic for share mode
|
2018-09-21 02:58:27 -04:00
|
|
|
"""
|
2019-10-13 00:01:25 -04:00
|
|
|
|
2019-09-01 23:36:30 -04:00
|
|
|
def init(self):
|
2019-10-13 00:01:25 -04:00
|
|
|
self.common.log("ShareModeWeb", "init")
|
2019-09-02 22:45:14 -04:00
|
|
|
|
2019-09-02 00:22:59 -04:00
|
|
|
# Allow downloading individual files if "Stop sharing after files have been sent" is unchecked
|
2019-11-02 17:35:51 -04:00
|
|
|
self.download_individual_files = not self.web.settings.get(
|
|
|
|
"share", "autostop_sharing"
|
2019-10-13 00:01:25 -04:00
|
|
|
)
|
2019-09-01 23:36:30 -04:00
|
|
|
|
2021-03-30 10:56:16 -04:00
|
|
|
self.download_etag = None
|
|
|
|
self.gzip_etag = None
|
|
|
|
self.last_modified = datetime.utcnow()
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
def define_routes(self):
|
2018-09-21 02:58:27 -04:00
|
|
|
"""
|
2018-09-21 14:14:32 -04:00
|
|
|
The web app routes for sharing files
|
2018-09-21 02:58:27 -04:00
|
|
|
"""
|
2019-10-13 00:01:25 -04:00
|
|
|
|
|
|
|
@self.web.app.route("/", defaults={"path": ""})
|
|
|
|
@self.web.app.route("/<path:path>")
|
2019-06-14 12:21:12 -04:00
|
|
|
def index(path):
|
2018-09-21 14:14:32 -04:00
|
|
|
"""
|
|
|
|
Render the template for the onionshare landing page.
|
|
|
|
"""
|
|
|
|
self.web.add_request(self.web.REQUEST_LOAD, request.path)
|
|
|
|
|
2019-09-01 23:36:30 -04:00
|
|
|
# Deny new downloads if "Stop sharing after files have been sent" is checked and there is
|
2018-09-21 14:14:32 -04:00
|
|
|
# currently a download
|
2019-11-02 18:37:21 -04:00
|
|
|
deny_download = (
|
2019-11-02 20:01:47 -04:00
|
|
|
self.web.settings.get("share", "autostop_sharing")
|
2019-11-02 18:37:21 -04:00
|
|
|
and self.download_in_progress
|
|
|
|
)
|
2018-09-21 14:14:32 -04:00
|
|
|
if deny_download:
|
2020-12-13 13:48:16 -05:00
|
|
|
r = make_response(render_template("denied.html"))
|
2018-09-21 14:14:32 -04:00
|
|
|
return self.web.add_security_headers(r)
|
|
|
|
|
2018-09-21 02:58:27 -04:00
|
|
|
# If download is allowed to continue, serve download page
|
2018-09-21 19:22:37 -04:00
|
|
|
if self.should_use_gzip():
|
2018-09-25 00:40:10 -04:00
|
|
|
self.filesize = self.gzip_filesize
|
2018-09-21 19:22:37 -04:00
|
|
|
else:
|
2018-09-25 00:40:10 -04:00
|
|
|
self.filesize = self.download_filesize
|
2018-09-21 19:22:37 -04:00
|
|
|
|
2019-06-14 12:21:12 -04:00
|
|
|
return self.render_logic(path)
|
2019-06-13 15:47:49 -04:00
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
@self.web.app.route("/download")
|
2019-05-20 22:11:24 -04:00
|
|
|
def download():
|
2018-09-21 14:14:32 -04:00
|
|
|
"""
|
|
|
|
Download the zip file.
|
|
|
|
"""
|
|
|
|
# Deny new downloads if "Stop After First Download" is checked and there is
|
|
|
|
# currently a download
|
2019-11-02 18:37:21 -04:00
|
|
|
deny_download = (
|
2019-11-02 20:01:47 -04:00
|
|
|
self.web.settings.get("share", "autostop_sharing")
|
2019-11-02 18:37:21 -04:00
|
|
|
and self.download_in_progress
|
|
|
|
)
|
2018-09-21 14:14:32 -04:00
|
|
|
if deny_download:
|
2020-12-13 13:48:16 -05:00
|
|
|
r = make_response(render_template("denied.html"))
|
2018-09-21 14:14:32 -04:00
|
|
|
return self.web.add_security_headers(r)
|
|
|
|
|
|
|
|
# Prepare some variables to use inside generate() function below
|
|
|
|
# which is outside of the request context
|
2019-10-13 00:01:25 -04:00
|
|
|
shutdown_func = request.environ.get("werkzeug.server.shutdown")
|
2021-03-30 10:56:16 -04:00
|
|
|
request_path = request.path
|
2018-09-21 14:14:32 -04:00
|
|
|
|
2018-09-21 15:29:23 -04:00
|
|
|
# If this is a zipped file, then serve as-is. If it's not zipped, then,
|
|
|
|
# if the http client supports gzip compression, gzip the file first
|
|
|
|
# and serve that
|
2018-09-21 19:22:37 -04:00
|
|
|
use_gzip = self.should_use_gzip()
|
2018-09-21 15:29:23 -04:00
|
|
|
if use_gzip:
|
|
|
|
file_to_download = self.gzip_filename
|
2021-04-11 17:47:22 -04:00
|
|
|
self.filesize = self.gzip_filesize
|
2021-03-30 10:56:16 -04:00
|
|
|
etag = self.gzip_etag
|
2018-09-21 15:29:23 -04:00
|
|
|
else:
|
|
|
|
file_to_download = self.download_filename
|
2021-04-11 17:47:22 -04:00
|
|
|
self.filesize = self.download_filesize
|
2021-03-30 10:56:16 -04:00
|
|
|
etag = self.download_etag
|
|
|
|
|
|
|
|
# for range requests
|
2021-04-29 20:13:05 -04:00
|
|
|
range_, status_code = self.get_range_and_status_code(
|
|
|
|
self.filesize, etag, self.last_modified
|
|
|
|
)
|
2018-09-21 15:29:23 -04:00
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
# Tell GUI the download started
|
2019-09-04 01:18:30 -04:00
|
|
|
history_id = self.cur_history_id
|
|
|
|
self.cur_history_id += 1
|
2019-10-13 00:01:25 -04:00
|
|
|
self.web.add_request(
|
2021-04-29 20:13:05 -04:00
|
|
|
self.web.REQUEST_STARTED,
|
|
|
|
request_path,
|
|
|
|
{"id": history_id, "use_gzip": use_gzip},
|
2019-10-13 00:01:25 -04:00
|
|
|
)
|
2018-09-21 14:14:32 -04:00
|
|
|
|
2018-09-21 14:36:19 -04:00
|
|
|
basename = os.path.basename(self.download_filename)
|
2018-09-21 14:14:32 -04:00
|
|
|
|
2021-03-30 10:56:16 -04:00
|
|
|
if status_code == 304:
|
|
|
|
r = Response()
|
|
|
|
else:
|
|
|
|
r = Response(
|
2021-04-29 20:13:05 -04:00
|
|
|
self.generate(
|
|
|
|
shutdown_func,
|
|
|
|
range_,
|
|
|
|
file_to_download,
|
|
|
|
request_path,
|
|
|
|
history_id,
|
|
|
|
self.filesize,
|
|
|
|
)
|
|
|
|
)
|
2018-09-21 14:14:32 -04:00
|
|
|
|
2018-09-21 15:29:23 -04:00
|
|
|
if use_gzip:
|
2021-04-29 20:13:05 -04:00
|
|
|
r.headers.set("Content-Encoding", "gzip")
|
2021-03-30 10:56:16 -04:00
|
|
|
|
2021-04-29 20:13:05 -04:00
|
|
|
r.headers.set("Content-Length", range_[1] - range_[0] + 1)
|
2021-04-11 17:47:22 -04:00
|
|
|
filename_dict = {
|
|
|
|
"filename": unidecode(basename),
|
|
|
|
"filename*": "UTF-8''%s" % url_quote(basename),
|
|
|
|
}
|
2021-04-29 20:13:05 -04:00
|
|
|
r.headers.set("Content-Disposition", "attachment", **filename_dict)
|
2018-09-21 14:14:32 -04:00
|
|
|
r = self.web.add_security_headers(r)
|
|
|
|
# guess content type
|
|
|
|
(content_type, _) = mimetypes.guess_type(basename, strict=False)
|
|
|
|
if content_type is not None:
|
2021-04-29 20:13:05 -04:00
|
|
|
r.headers.set("Content-Type", content_type)
|
|
|
|
r.headers.set("Accept-Ranges", "bytes")
|
|
|
|
r.headers.set("ETag", etag)
|
|
|
|
r.headers.set("Last-Modified", http_date(self.last_modified))
|
2021-03-30 10:56:16 -04:00
|
|
|
# we need to set this for range requests
|
2021-04-29 20:13:05 -04:00
|
|
|
r.headers.set("Vary", "Accept-Encoding")
|
2021-03-30 10:56:16 -04:00
|
|
|
|
|
|
|
if status_code == 206:
|
2021-04-29 20:13:05 -04:00
|
|
|
r.headers.set(
|
|
|
|
"Content-Range",
|
|
|
|
"bytes {}-{}/{}".format(range_[0], range_[1], self.filesize),
|
|
|
|
)
|
2021-03-30 10:56:16 -04:00
|
|
|
|
|
|
|
r.status_code = status_code
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
return r
|
|
|
|
|
2021-03-30 10:56:16 -04:00
|
|
|
@classmethod
|
|
|
|
def get_range_and_status_code(cls, dl_size, etag, last_modified):
|
|
|
|
use_default_range = True
|
|
|
|
status_code = 200
|
2021-04-29 20:13:05 -04:00
|
|
|
range_header = request.headers.get("Range")
|
2021-03-30 10:56:16 -04:00
|
|
|
|
|
|
|
# range requests are only allowed for get
|
2021-04-29 20:13:05 -04:00
|
|
|
if request.method == "GET":
|
2021-03-30 10:56:16 -04:00
|
|
|
ranges = parse_range_header(range_header, dl_size)
|
2021-04-29 20:13:05 -04:00
|
|
|
if not (
|
|
|
|
len(ranges) == 1 and ranges[0][0] == 0 and ranges[0][1] == dl_size - 1
|
|
|
|
):
|
2021-03-30 10:56:16 -04:00
|
|
|
use_default_range = False
|
|
|
|
status_code = 206
|
|
|
|
|
|
|
|
if range_header:
|
2021-04-29 20:13:05 -04:00
|
|
|
if_range = request.headers.get("If-Range")
|
2021-03-30 10:56:16 -04:00
|
|
|
if if_range and if_range != etag:
|
|
|
|
use_default_range = True
|
|
|
|
status_code = 200
|
|
|
|
|
|
|
|
if use_default_range:
|
|
|
|
ranges = [(0, dl_size - 1)]
|
|
|
|
|
|
|
|
if len(ranges) > 1:
|
|
|
|
abort(416) # We don't support multipart range requests yet
|
|
|
|
range_ = ranges[0]
|
|
|
|
|
2021-04-29 20:13:05 -04:00
|
|
|
etag_header = request.headers.get("ETag")
|
2021-03-30 10:56:16 -04:00
|
|
|
if etag_header is not None and etag_header != etag:
|
|
|
|
abort(412)
|
|
|
|
|
2021-04-29 20:13:05 -04:00
|
|
|
if_unmod = request.headers.get("If-Unmodified-Since")
|
2021-03-30 10:56:16 -04:00
|
|
|
if if_unmod:
|
|
|
|
if_date = parse_date(if_unmod)
|
|
|
|
if if_date and if_date > last_modified:
|
|
|
|
abort(412)
|
|
|
|
elif range_header is None:
|
|
|
|
status_code = 304
|
|
|
|
|
|
|
|
return range_, status_code
|
|
|
|
|
2021-04-29 20:13:05 -04:00
|
|
|
def generate(
|
|
|
|
self, shutdown_func, range_, file_to_download, path, history_id, filesize
|
|
|
|
):
|
2021-03-30 10:56:16 -04:00
|
|
|
# The user hasn't canceled the download
|
|
|
|
self.client_cancel = False
|
2021-04-29 20:13:05 -04:00
|
|
|
|
2021-03-30 10:56:16 -04:00
|
|
|
# Starting a new download
|
|
|
|
if self.web.settings.get("share", "autostop_sharing"):
|
|
|
|
self.download_in_progress = True
|
|
|
|
|
|
|
|
start, end = range_
|
|
|
|
|
|
|
|
chunk_size = 102400 # 100kb
|
|
|
|
|
|
|
|
fp = open(file_to_download, "rb")
|
|
|
|
fp.seek(start)
|
|
|
|
self.web.done = False
|
|
|
|
canceled = False
|
|
|
|
bytes_left = end - start + 1
|
|
|
|
while not self.web.done:
|
|
|
|
# The user has canceled the download, so stop serving the file
|
|
|
|
if not self.web.stop_q.empty():
|
|
|
|
self.web.add_request(
|
|
|
|
self.web.REQUEST_CANCELED, path, {"id": history_id}
|
|
|
|
)
|
|
|
|
break
|
|
|
|
|
|
|
|
read_size = min(chunk_size, bytes_left)
|
|
|
|
chunk = fp.read(read_size)
|
|
|
|
if chunk == b"":
|
|
|
|
self.web.done = True
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
yield chunk
|
|
|
|
|
|
|
|
# tell GUI the progress
|
|
|
|
downloaded_bytes = fp.tell()
|
2021-03-30 11:50:59 -04:00
|
|
|
percent = (1.0 * downloaded_bytes / filesize) * 100
|
2021-03-30 10:56:16 -04:00
|
|
|
bytes_left -= read_size
|
|
|
|
|
|
|
|
# only output to stdout if running onionshare in CLI mode, or if using Linux (#203, #304)
|
|
|
|
if (
|
|
|
|
not self.web.is_gui
|
|
|
|
or self.common.platform == "Linux"
|
|
|
|
or self.common.platform == "BSD"
|
|
|
|
):
|
|
|
|
sys.stdout.write(
|
|
|
|
"\r{0:s}, {1:.2f}% ".format(
|
2021-04-29 20:13:05 -04:00
|
|
|
self.common.human_readable_filesize(downloaded_bytes),
|
2021-03-30 10:56:16 -04:00
|
|
|
percent,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
self.web.add_request(
|
|
|
|
self.web.REQUEST_PROGRESS,
|
|
|
|
path,
|
2021-04-29 20:13:05 -04:00
|
|
|
{
|
|
|
|
"id": history_id,
|
|
|
|
"bytes": downloaded_bytes,
|
|
|
|
"total_bytes": filesize,
|
|
|
|
},
|
2021-03-30 10:56:16 -04:00
|
|
|
)
|
|
|
|
self.web.done = False
|
2021-04-29 20:13:05 -04:00
|
|
|
except Exception:
|
2021-03-30 10:56:16 -04:00
|
|
|
# looks like the download was canceled
|
|
|
|
self.web.done = True
|
|
|
|
canceled = True
|
|
|
|
|
|
|
|
# tell the GUI the download has canceled
|
|
|
|
self.web.add_request(
|
|
|
|
self.web.REQUEST_CANCELED, path, {"id": history_id}
|
|
|
|
)
|
|
|
|
|
|
|
|
fp.close()
|
|
|
|
|
|
|
|
if self.common.platform != "Darwin":
|
|
|
|
sys.stdout.write("\n")
|
|
|
|
|
|
|
|
# Download is finished
|
|
|
|
if self.web.settings.get("share", "autostop_sharing"):
|
|
|
|
self.download_in_progress = False
|
|
|
|
|
|
|
|
# Close the server, if necessary
|
|
|
|
if self.web.settings.get("share", "autostop_sharing") and not canceled:
|
|
|
|
print("Stopped because transfer is complete")
|
|
|
|
self.web.running = False
|
|
|
|
try:
|
|
|
|
if shutdown_func is None:
|
|
|
|
raise RuntimeError("Not running with the Werkzeug Server")
|
|
|
|
shutdown_func()
|
2021-04-29 20:13:05 -04:00
|
|
|
except Exception:
|
2021-03-30 10:56:16 -04:00
|
|
|
pass
|
|
|
|
|
2019-10-13 00:01:25 -04:00
|
|
|
def directory_listing_template(
|
|
|
|
self, path, files, dirs, breadcrumbs, breadcrumbs_leaf
|
|
|
|
):
|
|
|
|
return make_response(
|
|
|
|
render_template(
|
|
|
|
"send.html",
|
|
|
|
file_info=self.file_info,
|
|
|
|
files=files,
|
|
|
|
dirs=dirs,
|
|
|
|
breadcrumbs=breadcrumbs,
|
|
|
|
breadcrumbs_leaf=breadcrumbs_leaf,
|
|
|
|
filename=os.path.basename(self.download_filename),
|
|
|
|
filesize=self.filesize,
|
|
|
|
filesize_human=self.common.human_readable_filesize(
|
|
|
|
self.download_filesize
|
|
|
|
),
|
|
|
|
is_zipped=self.is_zipped,
|
|
|
|
static_url_path=self.web.static_url_path,
|
|
|
|
download_individual_files=self.download_individual_files,
|
2021-04-12 18:15:51 -04:00
|
|
|
title=self.web.settings.get("general", "title"),
|
2019-10-13 00:01:25 -04:00
|
|
|
)
|
|
|
|
)
|
2019-09-01 18:44:44 -04:00
|
|
|
|
|
|
|
def set_file_info_custom(self, filenames, processed_size_callback):
|
|
|
|
self.common.log("ShareModeWeb", "set_file_info_custom")
|
|
|
|
self.web.cancel_compression = False
|
|
|
|
self.build_zipfile_list(filenames, processed_size_callback)
|
|
|
|
|
2019-10-13 00:01:25 -04:00
|
|
|
def render_logic(self, path=""):
|
2019-09-01 18:44:44 -04:00
|
|
|
if path in self.files:
|
|
|
|
filesystem_path = self.files[path]
|
|
|
|
|
|
|
|
# If it's a directory
|
|
|
|
if os.path.isdir(filesystem_path):
|
|
|
|
# Render directory listing
|
|
|
|
filenames = []
|
|
|
|
for filename in os.listdir(filesystem_path):
|
|
|
|
if os.path.isdir(os.path.join(filesystem_path, filename)):
|
2019-10-13 00:01:25 -04:00
|
|
|
filenames.append(filename + "/")
|
2019-09-01 18:44:44 -04:00
|
|
|
else:
|
|
|
|
filenames.append(filename)
|
|
|
|
filenames.sort()
|
2019-09-01 19:02:10 -04:00
|
|
|
return self.directory_listing(filenames, path, filesystem_path)
|
2019-09-01 18:44:44 -04:00
|
|
|
|
|
|
|
# If it's a file
|
|
|
|
elif os.path.isfile(filesystem_path):
|
2019-09-01 23:36:30 -04:00
|
|
|
if self.download_individual_files:
|
2019-09-02 22:45:14 -04:00
|
|
|
return self.stream_individual_file(filesystem_path)
|
2019-09-01 23:36:30 -04:00
|
|
|
else:
|
2019-09-09 02:35:05 -04:00
|
|
|
history_id = self.cur_history_id
|
|
|
|
self.cur_history_id += 1
|
|
|
|
return self.web.error404(history_id)
|
2019-09-01 18:44:44 -04:00
|
|
|
|
|
|
|
# If it's not a directory or file, throw a 404
|
|
|
|
else:
|
2019-09-09 02:35:05 -04:00
|
|
|
history_id = self.cur_history_id
|
|
|
|
self.cur_history_id += 1
|
|
|
|
return self.web.error404(history_id)
|
2019-09-01 18:44:44 -04:00
|
|
|
else:
|
|
|
|
# Special case loading /
|
|
|
|
|
2019-10-13 00:01:25 -04:00
|
|
|
if path == "":
|
2019-09-01 18:44:44 -04:00
|
|
|
# Root directory listing
|
|
|
|
filenames = list(self.root_files)
|
|
|
|
filenames.sort()
|
|
|
|
return self.directory_listing(filenames, path)
|
|
|
|
|
|
|
|
else:
|
|
|
|
# If the path isn't found, throw a 404
|
2019-09-09 02:35:05 -04:00
|
|
|
history_id = self.cur_history_id
|
|
|
|
self.cur_history_id += 1
|
|
|
|
return self.web.error404(history_id)
|
2019-09-01 18:44:44 -04:00
|
|
|
|
2019-06-13 06:33:34 -04:00
|
|
|
def build_zipfile_list(self, filenames, processed_size_callback=None):
|
2019-06-14 12:21:12 -04:00
|
|
|
self.common.log("ShareModeWeb", "build_zipfile_list")
|
2018-09-21 14:14:32 -04:00
|
|
|
for filename in filenames:
|
|
|
|
info = {
|
2019-10-13 00:01:25 -04:00
|
|
|
"filename": filename,
|
|
|
|
"basename": os.path.basename(filename.rstrip("/")),
|
2018-09-21 14:14:32 -04:00
|
|
|
}
|
|
|
|
if os.path.isfile(filename):
|
2019-10-13 00:01:25 -04:00
|
|
|
info["size"] = os.path.getsize(filename)
|
|
|
|
info["size_human"] = self.common.human_readable_filesize(info["size"])
|
|
|
|
self.file_info["files"].append(info)
|
2018-09-21 14:14:32 -04:00
|
|
|
if os.path.isdir(filename):
|
2019-10-13 00:01:25 -04:00
|
|
|
info["size"] = self.common.dir_size(filename)
|
|
|
|
info["size_human"] = self.common.human_readable_filesize(info["size"])
|
|
|
|
self.file_info["dirs"].append(info)
|
2020-12-21 23:42:29 -05:00
|
|
|
self.file_info["files"].sort(key=lambda k: k["basename"])
|
|
|
|
self.file_info["dirs"].sort(key=lambda k: k["basename"])
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
# Check if there's only 1 file and no folders
|
2019-10-13 00:01:25 -04:00
|
|
|
if len(self.file_info["files"]) == 1 and len(self.file_info["dirs"]) == 0:
|
|
|
|
self.download_filename = self.file_info["files"][0]["filename"]
|
|
|
|
self.download_filesize = self.file_info["files"][0]["size"]
|
2021-04-29 20:13:05 -04:00
|
|
|
with open(self.download_filename, "rb") as f:
|
2021-03-30 10:56:16 -04:00
|
|
|
self.download_etag = make_etag(f)
|
2018-09-21 15:29:23 -04:00
|
|
|
|
|
|
|
# Compress the file with gzip now, so we don't have to do it on each request
|
2019-10-13 00:01:25 -04:00
|
|
|
self.gzip_filename = tempfile.mkstemp("wb+")[1]
|
|
|
|
self._gzip_compress(
|
|
|
|
self.download_filename, self.gzip_filename, 6, processed_size_callback
|
|
|
|
)
|
2018-09-21 19:22:37 -04:00
|
|
|
self.gzip_filesize = os.path.getsize(self.gzip_filename)
|
2021-04-29 20:13:05 -04:00
|
|
|
with open(self.gzip_filename, "rb") as f:
|
2021-03-30 10:56:16 -04:00
|
|
|
self.gzip_etag = make_etag(f)
|
2018-09-21 15:29:23 -04:00
|
|
|
|
|
|
|
# Make sure the gzip file gets cleaned up when onionshare stops
|
2021-05-04 02:21:42 -04:00
|
|
|
self.web.cleanup_filenames.append(self.gzip_filename)
|
2018-09-21 15:29:23 -04:00
|
|
|
|
|
|
|
self.is_zipped = False
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
else:
|
|
|
|
# Zip up the files and folders
|
2019-10-13 00:01:25 -04:00
|
|
|
self.zip_writer = ZipWriter(
|
|
|
|
self.common, processed_size_callback=processed_size_callback
|
|
|
|
)
|
2018-09-21 14:36:19 -04:00
|
|
|
self.download_filename = self.zip_writer.zip_filename
|
2019-10-13 00:01:25 -04:00
|
|
|
for info in self.file_info["files"]:
|
|
|
|
self.zip_writer.add_file(info["filename"])
|
2018-09-21 14:14:32 -04:00
|
|
|
# Canceling early?
|
|
|
|
if self.web.cancel_compression:
|
2018-09-21 14:36:19 -04:00
|
|
|
self.zip_writer.close()
|
2018-09-21 14:14:32 -04:00
|
|
|
return False
|
|
|
|
|
2019-10-13 00:01:25 -04:00
|
|
|
for info in self.file_info["dirs"]:
|
|
|
|
if not self.zip_writer.add_dir(info["filename"]):
|
2018-09-21 14:14:32 -04:00
|
|
|
return False
|
|
|
|
|
2018-09-21 14:36:19 -04:00
|
|
|
self.zip_writer.close()
|
|
|
|
self.download_filesize = os.path.getsize(self.download_filename)
|
2021-04-29 20:13:05 -04:00
|
|
|
with open(self.download_filename, "rb") as f:
|
2021-03-30 10:56:16 -04:00
|
|
|
self.download_etag = make_etag(f)
|
2018-09-21 15:29:23 -04:00
|
|
|
|
|
|
|
# Make sure the zip file gets cleaned up when onionshare stops
|
2021-05-04 02:21:42 -04:00
|
|
|
self.web.cleanup_filenames.append(self.zip_writer.zip_filename)
|
2021-05-04 02:42:23 -04:00
|
|
|
self.web.cleanup_filenames.append(self.zip_writer.zip_temp_dir)
|
2018-09-21 15:29:23 -04:00
|
|
|
|
2018-09-21 14:36:19 -04:00
|
|
|
self.is_zipped = True
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
return True
|
2018-09-21 02:43:04 -04:00
|
|
|
|
|
|
|
|
|
|
|
class ZipWriter(object):
|
|
|
|
"""
|
|
|
|
ZipWriter accepts files and directories and compresses them into a zip file
|
|
|
|
with. If a zip_filename is not passed in, it will use the default onionshare
|
|
|
|
filename.
|
|
|
|
"""
|
2019-10-13 00:01:25 -04:00
|
|
|
|
2018-09-21 02:43:04 -04:00
|
|
|
def __init__(self, common, zip_filename=None, processed_size_callback=None):
|
|
|
|
self.common = common
|
|
|
|
self.cancel_compression = False
|
|
|
|
|
|
|
|
if zip_filename:
|
|
|
|
self.zip_filename = zip_filename
|
|
|
|
else:
|
2021-05-04 02:42:23 -04:00
|
|
|
self.zip_temp_dir = tempfile.mkdtemp()
|
2019-10-20 13:15:16 -04:00
|
|
|
self.zip_filename = (
|
2021-05-04 02:42:23 -04:00
|
|
|
f"{self.zip_temp_dir}/onionshare_{self.common.random_string(4, 6)}.zip"
|
2019-10-13 00:01:25 -04:00
|
|
|
)
|
2018-09-21 02:43:04 -04:00
|
|
|
|
2019-10-13 00:01:25 -04:00
|
|
|
self.z = zipfile.ZipFile(self.zip_filename, "w", allowZip64=True)
|
2018-09-21 02:43:04 -04:00
|
|
|
self.processed_size_callback = processed_size_callback
|
|
|
|
if self.processed_size_callback is None:
|
|
|
|
self.processed_size_callback = lambda _: None
|
|
|
|
self._size = 0
|
|
|
|
self.processed_size_callback(self._size)
|
|
|
|
|
|
|
|
def add_file(self, filename):
|
|
|
|
"""
|
|
|
|
Add a file to the zip archive.
|
|
|
|
"""
|
|
|
|
self.z.write(filename, os.path.basename(filename), zipfile.ZIP_DEFLATED)
|
|
|
|
self._size += os.path.getsize(filename)
|
|
|
|
self.processed_size_callback(self._size)
|
|
|
|
|
|
|
|
def add_dir(self, filename):
|
|
|
|
"""
|
|
|
|
Add a directory, and all of its children, to the zip archive.
|
|
|
|
"""
|
2019-10-13 00:01:25 -04:00
|
|
|
dir_to_strip = os.path.dirname(filename.rstrip("/")) + "/"
|
2018-09-21 02:43:04 -04:00
|
|
|
for dirpath, dirnames, filenames in os.walk(filename):
|
|
|
|
for f in filenames:
|
|
|
|
# Canceling early?
|
|
|
|
if self.cancel_compression:
|
|
|
|
return False
|
|
|
|
|
|
|
|
full_filename = os.path.join(dirpath, f)
|
|
|
|
if not os.path.islink(full_filename):
|
2019-10-13 00:01:25 -04:00
|
|
|
arc_filename = full_filename[len(dir_to_strip) :]
|
2018-09-21 02:43:04 -04:00
|
|
|
self.z.write(full_filename, arc_filename, zipfile.ZIP_DEFLATED)
|
|
|
|
self._size += os.path.getsize(full_filename)
|
|
|
|
self.processed_size_callback(self._size)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
"""
|
|
|
|
Close the zip archive.
|
|
|
|
"""
|
|
|
|
self.z.close()
|