summaryrefslogtreecommitdiff
path: root/onionshare/web/share_mode.py
diff options
context:
space:
mode:
Diffstat (limited to 'onionshare/web/share_mode.py')
-rw-r--r--onionshare/web/share_mode.py324
1 files changed, 167 insertions, 157 deletions
diff --git a/onionshare/web/share_mode.py b/onionshare/web/share_mode.py
index 560a8ba4..21dea639 100644
--- a/onionshare/web/share_mode.py
+++ b/onionshare/web/share_mode.py
@@ -3,65 +3,46 @@ import sys
import tempfile
import zipfile
import mimetypes
-import gzip
from flask import Response, request, render_template, make_response
+from .send_base_mode import SendBaseModeWeb
from .. import strings
-class ShareModeWeb(object):
+class ShareModeWeb(SendBaseModeWeb):
"""
All of the web logic for share mode
"""
- def __init__(self, common, web):
- self.common = common
- self.common.log('ShareModeWeb', '__init__')
-
- self.web = web
-
- # Information about the file to be shared
- self.file_info = []
- self.is_zipped = False
- self.download_filename = None
- self.download_filesize = None
- self.gzip_filename = None
- self.gzip_filesize = None
- self.zip_writer = None
-
- self.download_count = 0
- # If "Stop After First Download" is checked (stay_open == False), only allow
- # one download at a time.
- self.download_in_progress = False
+ def init(self):
+ self.common.log("ShareModeWeb", "init")
- self.define_routes()
+ # Allow downloading individual files if "Stop sharing after files have been sent" is unchecked
+ self.download_individual_files = not self.common.settings.get(
+ "close_after_first_download"
+ )
def define_routes(self):
"""
The web app routes for sharing files
"""
- @self.web.app.route("/<slug_candidate>")
- def index(slug_candidate):
- self.web.check_slug_candidate(slug_candidate)
- return index_logic()
-
- @self.web.app.route("/")
- def index_public():
- if not self.common.settings.get('public_mode'):
- return self.web.error404()
- return index_logic()
-
- def index_logic(slug_candidate=''):
+
+ @self.web.app.route("/", defaults={"path": ""})
+ @self.web.app.route("/<path:path>")
+ def index(path):
"""
Render the template for the onionshare landing page.
"""
self.web.add_request(self.web.REQUEST_LOAD, request.path)
- # Deny new downloads if "Stop After First Download" is checked and there is
+ # Deny new downloads if "Stop sharing after files have been sent" is checked and there is
# currently a download
deny_download = not self.web.stay_open and self.download_in_progress
if deny_download:
- r = make_response(render_template('denied.html'))
+ r = make_response(
+ render_template("denied.html"),
+ static_url_path=self.web.static_url_path,
+ )
return self.web.add_security_headers(r)
# If download is allowed to continue, serve download page
@@ -70,38 +51,10 @@ class ShareModeWeb(object):
else:
self.filesize = self.download_filesize
- if self.web.slug:
- r = make_response(render_template(
- 'send.html',
- slug=self.web.slug,
- file_info=self.file_info,
- filename=os.path.basename(self.download_filename),
- filesize=self.filesize,
- filesize_human=self.common.human_readable_filesize(self.download_filesize),
- is_zipped=self.is_zipped))
- else:
- # If download is allowed to continue, serve download page
- r = make_response(render_template(
- 'send.html',
- file_info=self.file_info,
- filename=os.path.basename(self.download_filename),
- filesize=self.filesize,
- filesize_human=self.common.human_readable_filesize(self.download_filesize),
- is_zipped=self.is_zipped))
- return self.web.add_security_headers(r)
-
- @self.web.app.route("/<slug_candidate>/download")
- def download(slug_candidate):
- self.web.check_slug_candidate(slug_candidate)
- return download_logic()
+ return self.render_logic(path)
@self.web.app.route("/download")
- def download_public():
- if not self.common.settings.get('public_mode'):
- return self.web.error404()
- return download_logic()
-
- def download_logic(slug_candidate=''):
+ def download():
"""
Download the zip file.
"""
@@ -109,16 +62,16 @@ class ShareModeWeb(object):
# currently a download
deny_download = not self.web.stay_open and self.download_in_progress
if deny_download:
- r = make_response(render_template('denied.html'))
+ r = make_response(
+ render_template(
+ "denied.html", static_url_path=self.web.static_url_path
+ )
+ )
return self.web.add_security_headers(r)
- # Each download has a unique id
- download_id = self.download_count
- self.download_count += 1
-
# Prepare some variables to use inside generate() function below
# which is outside of the request context
- shutdown_func = request.environ.get('werkzeug.server.shutdown')
+ shutdown_func = request.environ.get("werkzeug.server.shutdown")
path = request.path
# If this is a zipped file, then serve as-is. If it's not zipped, then,
@@ -133,10 +86,11 @@ class ShareModeWeb(object):
self.filesize = self.download_filesize
# Tell GUI the download started
- self.web.add_request(self.web.REQUEST_STARTED, path, {
- 'id': download_id,
- 'use_gzip': use_gzip
- })
+ history_id = self.cur_history_id
+ self.cur_history_id += 1
+ self.web.add_request(
+ self.web.REQUEST_STARTED, path, {"id": history_id, "use_gzip": use_gzip}
+ )
basename = os.path.basename(self.download_filename)
@@ -147,19 +101,19 @@ class ShareModeWeb(object):
chunk_size = 102400 # 100kb
- fp = open(file_to_download, 'rb')
+ fp = open(file_to_download, "rb")
self.web.done = False
canceled = False
while not self.web.done:
# The user has canceled the download, so stop serving the file
if not self.web.stop_q.empty():
- self.web.add_request(self.web.REQUEST_CANCELED, path, {
- 'id': download_id
- })
+ self.web.add_request(
+ self.web.REQUEST_CANCELED, path, {"id": history_id}
+ )
break
chunk = fp.read(chunk_size)
- if chunk == b'':
+ if chunk == b"":
self.web.done = True
else:
try:
@@ -170,15 +124,26 @@ class ShareModeWeb(object):
percent = (1.0 * downloaded_bytes / self.filesize) * 100
# only output to stdout if running onionshare in CLI mode, or if using Linux (#203, #304)
- if not self.web.is_gui or self.common.platform == 'Linux' or self.common.platform == 'BSD':
+ if (
+ not self.web.is_gui
+ or self.common.platform == "Linux"
+ or self.common.platform == "BSD"
+ ):
sys.stdout.write(
- "\r{0:s}, {1:.2f}% ".format(self.common.human_readable_filesize(downloaded_bytes), percent))
+ "\r{0:s}, {1:.2f}% ".format(
+ self.common.human_readable_filesize(
+ downloaded_bytes
+ ),
+ percent,
+ )
+ )
sys.stdout.flush()
- self.web.add_request(self.web.REQUEST_PROGRESS, path, {
- 'id': download_id,
- 'bytes': downloaded_bytes
- })
+ self.web.add_request(
+ self.web.REQUEST_PROGRESS,
+ path,
+ {"id": history_id, "bytes": downloaded_bytes},
+ )
self.web.done = False
except:
# looks like the download was canceled
@@ -186,13 +151,13 @@ class ShareModeWeb(object):
canceled = True
# tell the GUI the download has canceled
- self.web.add_request(self.web.REQUEST_CANCELED, path, {
- 'id': download_id
- })
+ self.web.add_request(
+ self.web.REQUEST_CANCELED, path, {"id": history_id}
+ )
fp.close()
- if self.common.platform != 'Darwin':
+ if self.common.platform != "Darwin":
sys.stdout.write("\n")
# Download is finished
@@ -205,60 +170,127 @@ class ShareModeWeb(object):
self.web.running = False
try:
if shutdown_func is None:
- raise RuntimeError('Not running with the Werkzeug Server')
+ raise RuntimeError("Not running with the Werkzeug Server")
shutdown_func()
except:
pass
r = Response(generate())
if use_gzip:
- r.headers.set('Content-Encoding', 'gzip')
- r.headers.set('Content-Length', self.filesize)
- r.headers.set('Content-Disposition', 'attachment', filename=basename)
+ r.headers.set("Content-Encoding", "gzip")
+ r.headers.set("Content-Length", self.filesize)
+ r.headers.set("Content-Disposition", "attachment", filename=basename)
r = self.web.add_security_headers(r)
# guess content type
(content_type, _) = mimetypes.guess_type(basename, strict=False)
if content_type is not None:
- r.headers.set('Content-Type', content_type)
+ r.headers.set("Content-Type", content_type)
return r
- def set_file_info(self, filenames, processed_size_callback=None):
- """
- Using the list of filenames being shared, fill in details that the web
- page will need to display. This includes zipping up the file in order to
- get the zip file's name and size.
- """
- self.common.log("ShareModeWeb", "set_file_info")
+ def directory_listing_template(
+ self, path, files, dirs, breadcrumbs, breadcrumbs_leaf
+ ):
+ return make_response(
+ render_template(
+ "send.html",
+ file_info=self.file_info,
+ files=files,
+ dirs=dirs,
+ breadcrumbs=breadcrumbs,
+ breadcrumbs_leaf=breadcrumbs_leaf,
+ filename=os.path.basename(self.download_filename),
+ filesize=self.filesize,
+ filesize_human=self.common.human_readable_filesize(
+ self.download_filesize
+ ),
+ is_zipped=self.is_zipped,
+ static_url_path=self.web.static_url_path,
+ download_individual_files=self.download_individual_files,
+ )
+ )
+
+ def set_file_info_custom(self, filenames, processed_size_callback):
+ self.common.log("ShareModeWeb", "set_file_info_custom")
self.web.cancel_compression = False
+ self.build_zipfile_list(filenames, processed_size_callback)
+
+ def render_logic(self, path=""):
+ if path in self.files:
+ filesystem_path = self.files[path]
+
+ # If it's a directory
+ if os.path.isdir(filesystem_path):
+ # Render directory listing
+ filenames = []
+ for filename in os.listdir(filesystem_path):
+ if os.path.isdir(os.path.join(filesystem_path, filename)):
+ filenames.append(filename + "/")
+ else:
+ filenames.append(filename)
+ filenames.sort()
+ return self.directory_listing(filenames, path, filesystem_path)
+
+ # If it's a file
+ elif os.path.isfile(filesystem_path):
+ if self.download_individual_files:
+ return self.stream_individual_file(filesystem_path)
+ else:
+ history_id = self.cur_history_id
+ self.cur_history_id += 1
+ return self.web.error404(history_id)
+
+ # If it's not a directory or file, throw a 404
+ else:
+ history_id = self.cur_history_id
+ self.cur_history_id += 1
+ return self.web.error404(history_id)
+ else:
+ # Special case loading /
+
+ if path == "":
+ # Root directory listing
+ filenames = list(self.root_files)
+ filenames.sort()
+ return self.directory_listing(filenames, path)
- self.cleanup_filenames = []
+ else:
+ # If the path isn't found, throw a 404
+ history_id = self.cur_history_id
+ self.cur_history_id += 1
+ return self.web.error404(history_id)
- # build file info list
- self.file_info = {'files': [], 'dirs': []}
+ def build_zipfile_list(self, filenames, processed_size_callback=None):
+ self.common.log("ShareModeWeb", "build_zipfile_list")
for filename in filenames:
info = {
- 'filename': filename,
- 'basename': os.path.basename(filename.rstrip('/'))
+ "filename": filename,
+ "basename": os.path.basename(filename.rstrip("/")),
}
if os.path.isfile(filename):
- info['size'] = os.path.getsize(filename)
- info['size_human'] = self.common.human_readable_filesize(info['size'])
- self.file_info['files'].append(info)
+ info["size"] = os.path.getsize(filename)
+ info["size_human"] = self.common.human_readable_filesize(info["size"])
+ self.file_info["files"].append(info)
if os.path.isdir(filename):
- info['size'] = self.common.dir_size(filename)
- info['size_human'] = self.common.human_readable_filesize(info['size'])
- self.file_info['dirs'].append(info)
- self.file_info['files'] = sorted(self.file_info['files'], key=lambda k: k['basename'])
- self.file_info['dirs'] = sorted(self.file_info['dirs'], key=lambda k: k['basename'])
+ info["size"] = self.common.dir_size(filename)
+ info["size_human"] = self.common.human_readable_filesize(info["size"])
+ self.file_info["dirs"].append(info)
+ self.file_info["files"] = sorted(
+ self.file_info["files"], key=lambda k: k["basename"]
+ )
+ self.file_info["dirs"] = sorted(
+ self.file_info["dirs"], key=lambda k: k["basename"]
+ )
# Check if there's only 1 file and no folders
- if len(self.file_info['files']) == 1 and len(self.file_info['dirs']) == 0:
- self.download_filename = self.file_info['files'][0]['filename']
- self.download_filesize = self.file_info['files'][0]['size']
+ if len(self.file_info["files"]) == 1 and len(self.file_info["dirs"]) == 0:
+ self.download_filename = self.file_info["files"][0]["filename"]
+ self.download_filesize = self.file_info["files"][0]["size"]
# Compress the file with gzip now, so we don't have to do it on each request
- self.gzip_filename = tempfile.mkstemp('wb+')[1]
- self._gzip_compress(self.download_filename, self.gzip_filename, 6, processed_size_callback)
+ self.gzip_filename = tempfile.mkstemp("wb+")[1]
+ self._gzip_compress(
+ self.download_filename, self.gzip_filename, 6, processed_size_callback
+ )
self.gzip_filesize = os.path.getsize(self.gzip_filename)
# Make sure the gzip file gets cleaned up when onionshare stops
@@ -268,17 +300,19 @@ class ShareModeWeb(object):
else:
# Zip up the files and folders
- self.zip_writer = ZipWriter(self.common, processed_size_callback=processed_size_callback)
+ self.zip_writer = ZipWriter(
+ self.common, processed_size_callback=processed_size_callback
+ )
self.download_filename = self.zip_writer.zip_filename
- for info in self.file_info['files']:
- self.zip_writer.add_file(info['filename'])
+ for info in self.file_info["files"]:
+ self.zip_writer.add_file(info["filename"])
# Canceling early?
if self.web.cancel_compression:
self.zip_writer.close()
return False
- for info in self.file_info['dirs']:
- if not self.zip_writer.add_dir(info['filename']):
+ for info in self.file_info["dirs"]:
+ if not self.zip_writer.add_dir(info["filename"]):
return False
self.zip_writer.close()
@@ -291,33 +325,6 @@ class ShareModeWeb(object):
return True
- def should_use_gzip(self):
- """
- Should we use gzip for this browser?
- """
- return (not self.is_zipped) and ('gzip' in request.headers.get('Accept-Encoding', '').lower())
-
- def _gzip_compress(self, input_filename, output_filename, level, processed_size_callback=None):
- """
- Compress a file with gzip, without loading the whole thing into memory
- Thanks: https://stackoverflow.com/questions/27035296/python-how-to-gzip-a-large-text-file-without-memoryerror
- """
- bytes_processed = 0
- blocksize = 1 << 16 # 64kB
- with open(input_filename, 'rb') as input_file:
- output_file = gzip.open(output_filename, 'wb', level)
- while True:
- if processed_size_callback is not None:
- processed_size_callback(bytes_processed)
-
- block = input_file.read(blocksize)
- if len(block) == 0:
- break
- output_file.write(block)
- bytes_processed += blocksize
-
- output_file.close()
-
class ZipWriter(object):
"""
@@ -325,6 +332,7 @@ class ZipWriter(object):
with. If a zip_filename is not passed in, it will use the default onionshare
filename.
"""
+
def __init__(self, common, zip_filename=None, processed_size_callback=None):
self.common = common
self.cancel_compression = False
@@ -332,9 +340,11 @@ class ZipWriter(object):
if zip_filename:
self.zip_filename = zip_filename
else:
- self.zip_filename = '{0:s}/onionshare_{1:s}.zip'.format(tempfile.mkdtemp(), self.common.random_string(4, 6))
+ self.zip_filename = "{0:s}/onionshare_{1:s}.zip".format(
+ tempfile.mkdtemp(), self.common.random_string(4, 6)
+ )
- self.z = zipfile.ZipFile(self.zip_filename, 'w', allowZip64=True)
+ self.z = zipfile.ZipFile(self.zip_filename, "w", allowZip64=True)
self.processed_size_callback = processed_size_callback
if self.processed_size_callback is None:
self.processed_size_callback = lambda _: None
@@ -353,7 +363,7 @@ class ZipWriter(object):
"""
Add a directory, and all of its children, to the zip archive.
"""
- dir_to_strip = os.path.dirname(filename.rstrip('/'))+'/'
+ dir_to_strip = os.path.dirname(filename.rstrip("/")) + "/"
for dirpath, dirnames, filenames in os.walk(filename):
for f in filenames:
# Canceling early?
@@ -362,7 +372,7 @@ class ZipWriter(object):
full_filename = os.path.join(dirpath, f)
if not os.path.islink(full_filename):
- arc_filename = full_filename[len(dir_to_strip):]
+ arc_filename = full_filename[len(dir_to_strip) :]
self.z.write(full_filename, arc_filename, zipfile.ZIP_DEFLATED)
self._size += os.path.getsize(full_filename)
self.processed_size_callback(self._size)