Compare commits

..

No commits in common. "develop" and "deluge-2.1.1.dev0" have entirely different histories.

584 changed files with 334502 additions and 359558 deletions

View file

@ -19,37 +19,37 @@ on:
jobs: jobs:
windows_package: windows_package:
runs-on: windows-2022 runs-on: windows-2019
if: (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'package')) if: (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'package'))
strategy: strategy:
matrix: matrix:
arch: [x64, x86] arch: [x64, x86]
python: ["3.9"] python: ["3.9"]
libtorrent: [2.0.7, 1.2.19] libtorrent: [2.0.6, 1.2.15]
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
# Checkout Deluge source to subdir to enable packaging any tag/commit # Checkout Deluge source to subdir to enable packaging any tag/commit
- name: Checkout Deluge source - name: Checkout Deluge source
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
path: deluge_src path: deluge_src
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v4
with: with:
python-version: ${{ matrix.python}} python-version: ${{ matrix.python}}
architecture: ${{ matrix.arch }} architecture: ${{ matrix.arch }}
cache: pip cache: pip
- name: Prepare pip - name: Prepare pip
run: python -m pip install wheel setuptools==68.* run: python -m pip install wheel
- name: Install GTK - name: Install GTK
run: | run: |
@ -62,14 +62,11 @@ jobs:
python -m pip install --no-index --find-links="C:\GTK\release\python" pycairo PyGObject python -m pip install --no-index --find-links="C:\GTK\release\python" pycairo PyGObject
- name: Install Python dependencies - name: Install Python dependencies
# Pillow no longer provides 32-bit wheels for Windows
# so specify only-binary to install old version.
run: > run: >
python -m pip install python -m pip install
--only-binary=pillow twisted[tls]==22.4.0
twisted[tls]==22.8.0
libtorrent==${{ matrix.libtorrent }} libtorrent==${{ matrix.libtorrent }}
pyinstaller pyinstaller==4.10
pygame pygame
-r requirements.txt -r requirements.txt
@ -84,13 +81,12 @@ jobs:
run: | run: |
pyinstaller --clean delugewin.spec --distpath freeze pyinstaller --clean delugewin.spec --distpath freeze
- name: Verify Deluge exes - name: Fix OpenSSL for libtorrent x64
working-directory: packaging/win/freeze/Deluge/ if: ${{ matrix.arch == 'x64' }}
working-directory: packaging/win/freeze/Deluge
run: | run: |
deluge-debug.exe -v cp libssl-1_1.dll libssl-1_1-x64.dll
deluged-debug.exe -v cp libcrypto-1_1.dll libcrypto-1_1-x64.dll
deluge-web-debug.exe -v
deluge-console -v
- name: Make Deluge Installer - name: Make Deluge Installer
working-directory: ./packaging/win working-directory: ./packaging/win
@ -98,7 +94,7 @@ jobs:
python setup_nsis.py python setup_nsis.py
makensis /Darch=${{ matrix.arch }} deluge-win-installer.nsi makensis /Darch=${{ matrix.arch }} deluge-win-installer.nsi
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v2
with: with:
name: deluge-py${{ matrix.python }}-lt${{ matrix.libtorrent }}-${{ matrix.arch }} name: deluge-py${{ matrix.python }}-lt${{ matrix.libtorrent }}-${{ matrix.arch }}
path: packaging/win/*.exe path: packaging/win/*.exe

View file

@ -6,25 +6,22 @@ on:
# Allows you to run this workflow manually from the Actions tab # Allows you to run this workflow manually from the Actions tab
workflow_dispatch: workflow_dispatch:
inputs:
core-dump:
description: "Set to 1 to enable retrieving core dump from crashes"
default: "0"
jobs: jobs:
test-linux: test-linux:
runs-on: ubuntu-22.04 runs-on: ubuntu-20.04
strategy: strategy:
matrix: matrix:
python-version: ["3.7", "3.10"] python-version: ["3.7", "3.10"]
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v2
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
cache: "pip" cache: "pip"
@ -36,8 +33,8 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
pip install --upgrade pip wheel setuptools pip install --upgrade pip wheel
pip install -r requirements-ci.txt pip install -r requirements.txt -r requirements-tests.txt
pip install -e . pip install -e .
- name: Install security dependencies - name: Install security dependencies
@ -49,21 +46,18 @@ jobs:
TESTSSL_VER: 3.0.6 TESTSSL_VER: 3.0.6
TESTSSL_URL: https://codeload.github.com/drwetter/testssl.sh/tar.gz/refs/tags/v TESTSSL_URL: https://codeload.github.com/drwetter/testssl.sh/tar.gz/refs/tags/v
- name: Setup core dump catch and store - name: Setup core dump directory
if: github.event.inputs.core-dump == '1'
run: | run: |
sudo mkdir /cores/ && sudo chmod 777 /cores/ sudo mkdir /cores/ && sudo chmod 777 /cores/
echo "/cores/%E.%p" | sudo tee /proc/sys/kernel/core_pattern echo "/cores/%E.%p" | sudo tee /proc/sys/kernel/core_pattern
ulimit -c unlimited
sudo apt install glibc-tools
echo "DEBUG_PREFIX=catchsegv python -X dev -m" >> $GITHUB_ENV
- name: Test with pytest - name: Test with pytest
run: | run: |
ulimit -c unlimited # Enable core dumps to be captured
python -c 'from deluge._libtorrent import lt; print(lt.__version__)'; python -c 'from deluge._libtorrent import lt; print(lt.__version__)';
$DEBUG_PREFIX pytest -v -m "not (todo or gtkui)" deluge catchsegv python -X dev -m pytest -v -m "not (todo or gtkui)" deluge
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v2
# capture all crashes as build artifacts # capture all crashes as build artifacts
if: failure() if: failure()
with: with:
@ -71,19 +65,19 @@ jobs:
path: /cores path: /cores
test-windows: test-windows:
runs-on: windows-2022 runs-on: windows-latest
strategy: strategy:
matrix: matrix:
python-version: ["3.7", "3.10"] python-version: ["3.7", "3.10"]
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v2
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
cache: "pip" cache: "pip"
@ -91,8 +85,8 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
pip install --upgrade pip wheel setuptools pip install --upgrade pip wheel
pip install -r requirements-ci.txt pip install -r requirements.txt -r requirements-tests.txt
pip install -e . pip install -e .
- name: Test with pytest - name: Test with pytest

View file

@ -15,23 +15,30 @@ jobs:
steps: steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@v2
- uses: actions/setup-python@v5
with: with:
python-version: "3.10" python-version: "3.8"
cache: "pip" - name: Cache pip
cache-dependency-path: "requirements*.txt" uses: actions/cache@v2
with:
# This path is specific to Ubuntu
path: ~/.cache/pip
# Look to see if there is a cache hit for the corresponding requirements file
key: ${{ runner.os }}-pip-${{ hashFiles('requirements*.txt') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Install dependencies - name: Install dependencies
run: | run: |
pip install --upgrade pip wheel pip install --upgrade pip wheel
pip install tox pip install tox
sudo apt-get install enchant-2 sudo apt-get install enchant
- name: Build docs with tox - name: Test with tox
env: env:
TOX_ENV: docs TOX_ENV: docs
run: | run: |

View file

@ -11,7 +11,7 @@ jobs:
lint: lint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
- uses: actions/setup-python@v5 - uses: actions/setup-python@v2
- name: Run pre-commit linting - name: Run pre-commit linting
uses: pre-commit/action@v3.0.1 uses: pre-commit/action@v2.0.2

4
.gitignore vendored
View file

@ -12,14 +12,14 @@ __pycache__/
*.tar.* *.tar.*
.tox/ .tox/
deluge/i18n/*/ deluge/i18n/*/
deluge.pot
deluge/ui/web/js/*.js deluge/ui/web/js/*.js
deluge/ui/web/js/extjs/ext-extensions*.js deluge/ui/web/js/extjs/ext-extensions*.js
*.desktop *.desktop
*.metainfo.xml *.appdata.xml
.build_data* .build_data*
osx/app osx/app
RELEASE-VERSION RELEASE-VERSION
.venv* .venv*
# used by setuptools to cache downloaded eggs # used by setuptools to cache downloaded eggs
/.eggs /.eggs
_pytest_temp/

View file

@ -6,25 +6,35 @@ exclude: >
deluge/tests/data/.*svg| deluge/tests/data/.*svg|
)$ )$
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/psf/black
# Ruff version. rev: 22.3.0
rev: v0.6.4
hooks: hooks:
- id: ruff - id: black
name: Chk Ruff name: Fmt Black
args: [--fix]
- id: ruff-format
name: Fmt Ruff
- repo: https://github.com/pre-commit/mirrors-prettier - repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.7.1 rev: v2.5.1
hooks: hooks:
- id: prettier - id: prettier
name: Fmt Prettier name: Fmt Prettier
# Workaround to list modified files only. # Workaround to list modified files only.
args: [--list-different] args: [--list-different]
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pycqa/isort
rev: v4.4.0 rev: 5.10.1
hooks: hooks:
- id: isort
name: Fmt isort
- repo: https://github.com/pycqa/flake8
rev: 4.0.1
hooks:
- id: flake8
name: Chk Flake8
additional_dependencies:
- pep8-naming==0.12.1
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.1.0
hooks:
- id: double-quote-string-fixer
name: Fix Double-quotes
- id: end-of-file-fixer - id: end-of-file-fixer
name: Fix End-of-files name: Fix End-of-files
exclude_types: [javascript, css] exclude_types: [javascript, css]
@ -34,8 +44,8 @@ repos:
- id: trailing-whitespace - id: trailing-whitespace
name: Fix Trailing whitespace name: Fix Trailing whitespace
- repo: https://github.com/asottile/pyupgrade - repo: https://github.com/asottile/pyupgrade
rev: v3.3.1 rev: v2.31.0
hooks: hooks:
- id: pyupgrade - id: pyupgrade
args: [--py37-plus] args: [--py36-plus]
stages: [manual] stages: [manual]

View file

@ -5,14 +5,6 @@
# Required # Required
version: 2 version: 2
build:
os: ubuntu-22.04
tools:
python: "3.10"
jobs:
post_checkout:
- git fetch --unshallow || true
# Build documentation in the docs/ directory with Sphinx # Build documentation in the docs/ directory with Sphinx
sphinx: sphinx:
configuration: docs/source/conf.py configuration: docs/source/conf.py
@ -22,8 +14,9 @@ formats: all
# Optionally set the version of Python and requirements required to build your docs # Optionally set the version of Python and requirements required to build your docs
python: python:
version: 3.7
install: install:
- requirements: requirements.txt - requirements: requirements.txt
- requirements: docs/requirements.txt - requirements: docs/requirements.txt
- method: pip - method: setuptools
path: . path: .

View file

@ -1,64 +1,6 @@
# Changelog # Changelog
## 2.1.x (TBA) ## unreleased
### Breaking changes
- Removed Python 3.6 support (Python >= 3.7)
### Core
- Fix GHSL-2024-189 - insecure HTTP for new version check.
- Fix alert handler segfault.
- Add support for creating v2 torrents.
### GTK UI
- Fix changing torrent ownership.
- Fix upper limit of upload/download in Add Torrent dialog.
- Fix #3339 - Resizing window crashes with Piecesbar or Stats plugin.
- Fix #3350 - Unable to use quick search.
- Fix #3598 - Missing AppIndicator option in Preferences.
- Set Appindicator as default for tray icon on Linux.
- Add feature to switch between dark/light themes.
### Web UI
- Fix GHSL-2024-191 - potential flag endpoint path traversal.
- Fix GHSL-2024-188 - js script dir traversal vulnerability.
- Fix GHSL-2024-190 - insecure tracker icon endpoint.
- Fix unable to stop daemon in connection manager.
- Fix responsiveness to avoid "Connection lost".
- Add support for network interface name as well as IP address.
- Add ability to change UI theme.
### Console UI
- Fix 'rm' and 'move' commands hanging when done.
- Fix #3538 - Unable to add host in connection manager.
- Disable interactive-mode on Windows.
### UI library
- Fix tracker icon display by converting to png format.
- Fix splitting trackers by newline
- Add clickable URLs for torrent comment and tracker status.
### Label
- Fix torrent deletion not removed from config.
- Fix label display name in submenu.
### AutoAdd
- Fix #3515 - Torrent file decoding errors disabled watch folder.
## 2.1.1 (2022-07-10)
### Core
- Fix missing trackers added via magnet
- Fix handling magnets with tracker tiers
## 2.1.0 (2022-06-28) ## 2.1.0 (2022-06-28)

View file

@ -50,7 +50,7 @@ All modules will require the [common](#common) section dependencies.
- [PyGObject] - [PyGObject]
- [Pycairo] - [Pycairo]
- [librsvg] _>= 2_ - [librsvg] _>= 2_
- [ayatanaappindicator3] w/GIR - Optional: Ubuntu system tray icon. - [libappindicator3] w/GIR - Optional: Ubuntu system tray icon.
### MacOS ### MacOS
@ -95,6 +95,6 @@ All modules will require the [common](#common) section dependencies.
[mako]: https://www.makotemplates.org/ [mako]: https://www.makotemplates.org/
[pygame]: https://www.pygame.org/ [pygame]: https://www.pygame.org/
[libnotify]: https://developer.gnome.org/libnotify/ [libnotify]: https://developer.gnome.org/libnotify/
[ayatanaappindicator3]: https://lazka.github.io/pgi-docs/AyatanaAppIndicator3-0.1/index.html [python-appindicator]: https://packages.ubuntu.com/xenial/python-appindicator
[librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg [librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg
[ifaddr]: https://pypi.org/project/ifaddr/ [ifaddr]: https://pypi.org/project/ifaddr/

View file

@ -1,6 +0,0 @@
from twisted.web.http import Request
__request__: Request
def _(string: str) -> str: ...
def _n(string: str) -> str: ...

View file

@ -14,7 +14,6 @@ Example:
>>> from deluge._libtorrent import lt >>> from deluge._libtorrent import lt
""" """
from deluge.common import VersionSplit, get_version from deluge.common import VersionSplit, get_version
from deluge.error import LibtorrentImportError from deluge.error import LibtorrentImportError

View file

@ -85,6 +85,7 @@ def bdecode(x):
class Bencached: class Bencached:
__slots__ = ['bencoded'] __slots__ = ['bencoded']
def __init__(self, s): def __init__(self, s):

View file

@ -7,7 +7,6 @@
# #
"""Common functions for various parts of Deluge to use.""" """Common functions for various parts of Deluge to use."""
import base64 import base64
import binascii import binascii
import functools import functools
@ -24,21 +23,15 @@ import tarfile
import time import time
from contextlib import closing from contextlib import closing
from datetime import datetime from datetime import datetime
from importlib import resources
from io import BytesIO from io import BytesIO
from pathlib import Path
from urllib.parse import unquote_plus, urljoin from urllib.parse import unquote_plus, urljoin
from urllib.request import pathname2url from urllib.request import pathname2url
import pkg_resources
from deluge.decorators import deprecated from deluge.decorators import deprecated
from deluge.error import InvalidPathError from deluge.error import InvalidPathError
try:
from importlib.metadata import distribution
except ImportError:
from pkg_resources import get_distribution as distribution
try: try:
import chardet import chardet
except ImportError: except ImportError:
@ -97,7 +90,7 @@ def get_version():
Returns: Returns:
str: The version of Deluge. str: The version of Deluge.
""" """
return distribution('Deluge').version return pkg_resources.get_distribution('Deluge').version
def get_default_config_dir(filename=None): def get_default_config_dir(filename=None):
@ -297,22 +290,20 @@ def get_pixmap(fname):
return resource_filename('deluge', os.path.join('ui', 'data', 'pixmaps', fname)) return resource_filename('deluge', os.path.join('ui', 'data', 'pixmaps', fname))
def resource_filename(module: str, path: str) -> str: def resource_filename(module, path):
"""Get filesystem path for a non-python resource. """Get filesystem path for a resource.
Abstracts getting module resource files. Originally created to This function contains a work-around for pkg_resources.resource_filename
workaround pkg_resources.resource_filename limitations with not returning the correct path with multiple packages installed.
multiple Deluge packages installed.
So if there's a second deluge package, installed globally and another in
develop mode somewhere else, while pkg_resources.get_distribution('Deluge')
returns the proper deluge instance, pkg_resources.resource_filename
does not, it returns the first found on the python path, which is wrong.
""" """
path = Path(path) return pkg_resources.get_distribution('Deluge').get_resource_filename(
pkg_resources._manager, os.path.join(*(module.split('.') + [path]))
try: )
with resources.as_file(resources.files(module) / path) as resource_file:
return str(resource_file)
except AttributeError:
# Python <= 3.8
with resources.path(module, path.parts[0]) as resource_file:
return str(resource_file.joinpath(*path.parts[1:]))
def open_file(path, timestamp=None): def open_file(path, timestamp=None):
@ -424,31 +415,25 @@ def translate_size_units():
def fsize(fsize_b, precision=1, shortform=False): def fsize(fsize_b, precision=1, shortform=False):
"""Formats the bytes value into a string with KiB, MiB, GiB or TiB units. """Formats the bytes value into a string with KiB, MiB or GiB units.
Args: Args:
fsize_b (int): The filesize in bytes. fsize_b (int): The filesize in bytes.
precision (int): The output float precision, 1 by default. precision (int): The filesize float precision.
shortform (bool): The output short|long form, False (long form) by default.
Returns: Returns:
str: A formatted string in KiB, MiB, GiB or TiB units. str: A formatted string in KiB, MiB or GiB units.
Examples: Examples:
>>> fsize(112245) >>> fsize(112245)
'109.6 KiB' '109.6 KiB'
>>> fsize(112245, precision=0) >>> fsize(112245, precision=0)
'110 KiB' '110 KiB'
>>> fsize(112245, shortform=True)
'109.6 K'
Note: Note:
This function has been refactored for performance with the This function has been refactored for performance with the
fsize units being translated outside the function. fsize units being translated outside the function.
Notice that short forms K|M|G|T are synonymous here with
KiB|MiB|GiB|TiB. They are powers of 1024, not 1000.
""" """
if fsize_b >= 1024**4: if fsize_b >= 1024**4:
@ -484,7 +469,7 @@ def fpcnt(dec, precision=2):
Args: Args:
dec (float): The ratio in the range [0.0, 1.0]. dec (float): The ratio in the range [0.0, 1.0].
precision (int): The output float precision, 2 by default. precision (int): The percentage float precision.
Returns: Returns:
str: A formatted string representing a percentage. str: A formatted string representing a percentage.
@ -508,8 +493,6 @@ def fspeed(bps, precision=1, shortform=False):
Args: Args:
bps (int): The speed in bytes per second. bps (int): The speed in bytes per second.
precision (int): The output float precision, 1 by default.
shortform (bool): The output short|long form, False (long form) by default.
Returns: Returns:
str: A formatted string representing transfer speed. str: A formatted string representing transfer speed.
@ -518,10 +501,6 @@ def fspeed(bps, precision=1, shortform=False):
>>> fspeed(43134) >>> fspeed(43134)
'42.1 KiB/s' '42.1 KiB/s'
Note:
Notice that short forms K|M|G|T are synonymous here with
KiB|MiB|GiB|TiB. They are powers of 1024, not 1000.
""" """
if bps < 1024**2: if bps < 1024**2:
@ -558,7 +537,7 @@ def fpeer(num_peers, total_peers):
total_peers (int): The total number of peers. total_peers (int): The total number of peers.
Returns: Returns:
str: A formatted string 'num_peers (total_peers)' or if total_peers < 0, just 'num_peers'. str: A formatted string 'num_peers (total_peers)' or total_peers < 0, just 'num_peers'.
Examples: Examples:
>>> fpeer(10, 20) >>> fpeer(10, 20)
@ -607,16 +586,16 @@ def ftime(secs):
time_str = f'{secs // 604800}w {secs // 86400 % 7}d' time_str = f'{secs // 604800}w {secs // 86400 % 7}d'
else: else:
time_str = f'{secs // 31449600}y {secs // 604800 % 52}w' time_str = f'{secs // 31449600}y {secs // 604800 % 52}w'
return time_str return time_str
def fdate(seconds, date_only=False, precision_secs=False): def fdate(seconds, date_only=False, precision_secs=False):
"""Formats a date time string in the locale's date representation based on the system's timezone. """Formats a date time string in the locale's date representation based on the systems timezone.
Args: Args:
seconds (float): Time in seconds since the Epoch. seconds (float): Time in seconds since the Epoch.
date_only (bool): Whether to include only the date, False by default. precision_secs (bool): Include seconds in time format.
precision_secs (bool): Include seconds in time format, False by default.
Returns: Returns:
str: A string in the locale's datetime representation or "" if seconds < 0 str: A string in the locale's datetime representation or "" if seconds < 0
@ -641,14 +620,10 @@ def tokenize(text):
Returns: Returns:
list: A list of strings and/or numbers. list: A list of strings and/or numbers.
Note: This function is used to implement robust tokenization of user input
This function is used to implement robust tokenization of user input It automatically coerces integer and floating point numbers, ignores
It automatically coerces integer and floating point numbers, ignores whitespace and knows how to separate numbers from strings even without
whitespace and knows how to separate numbers from strings even without whitespace.
whitespace.
Possible optimization: move the 2 regexes outside of function.
""" """
tokenized_input = [] tokenized_input = []
for token in re.split(r'(\d+(?:\.\d+)?)', text): for token in re.split(r'(\d+(?:\.\d+)?)', text):
@ -669,16 +644,12 @@ size_units = [
{'prefix': 'GiB', 'divider': 1024**3}, {'prefix': 'GiB', 'divider': 1024**3},
{'prefix': 'TiB', 'divider': 1024**4}, {'prefix': 'TiB', 'divider': 1024**4},
{'prefix': 'PiB', 'divider': 1024**5}, {'prefix': 'PiB', 'divider': 1024**5},
{'prefix': 'k', 'divider': 1000**1},
{'prefix': 'm', 'divider': 1000**2},
{'prefix': 'g', 'divider': 1000**3},
{'prefix': 't', 'divider': 1000**4},
{'prefix': 'p', 'divider': 1000**5},
{'prefix': 'KB', 'divider': 1000**1}, {'prefix': 'KB', 'divider': 1000**1},
{'prefix': 'MB', 'divider': 1000**2}, {'prefix': 'MB', 'divider': 1000**2},
{'prefix': 'GB', 'divider': 1000**3}, {'prefix': 'GB', 'divider': 1000**3},
{'prefix': 'TB', 'divider': 1000**4}, {'prefix': 'TB', 'divider': 1000**4},
{'prefix': 'PB', 'divider': 1000**5}, {'prefix': 'PB', 'divider': 1000**5},
{'prefix': 'm', 'divider': 1000**2},
] ]
@ -721,16 +692,6 @@ def parse_human_size(size):
raise InvalidSize(msg % (size, tokens)) raise InvalidSize(msg % (size, tokens))
def anchorify_urls(text: str) -> str:
"""
Wrap all occurrences of text URLs with HTML
"""
url_pattern = r'((htt)|(ft)|(ud))ps?://\S+'
html_href_pattern = r'<a href="\g<0>">\g<0></a>'
return re.sub(url_pattern, html_href_pattern, text)
def is_url(url): def is_url(url):
""" """
A simple test to check if the URL is valid A simple test to check if the URL is valid
@ -773,8 +734,6 @@ MAGNET_SCHEME = 'magnet:?'
XT_BTIH_PARAM = 'xt=urn:btih:' XT_BTIH_PARAM = 'xt=urn:btih:'
DN_PARAM = 'dn=' DN_PARAM = 'dn='
TR_PARAM = 'tr=' TR_PARAM = 'tr='
TR_TIER_PARAM = 'tr.'
TR_TIER_REGEX = re.compile(r'^tr.(\d+)=(\S+)')
def is_magnet(uri): def is_magnet(uri):
@ -817,6 +776,8 @@ def get_magnet_info(uri):
""" """
tr0_param = 'tr.'
tr0_param_regex = re.compile(r'^tr.(\d+)=(\S+)')
if not uri.startswith(MAGNET_SCHEME): if not uri.startswith(MAGNET_SCHEME):
return {} return {}
@ -844,14 +805,12 @@ def get_magnet_info(uri):
tracker = unquote_plus(param[len(TR_PARAM) :]) tracker = unquote_plus(param[len(TR_PARAM) :])
trackers[tracker] = tier trackers[tracker] = tier
tier += 1 tier += 1
elif param.startswith(TR_TIER_PARAM): elif param.startswith(tr0_param):
tracker_match = re.match(TR_TIER_REGEX, param) try:
if not tracker_match: tier, tracker = re.match(tr0_param_regex, param).groups()
continue trackers[tracker] = tier
except AttributeError:
tier, tracker = tracker_match.groups() pass
tracker = unquote_plus(tracker)
trackers[tracker] = int(tier)
if info_hash: if info_hash:
if not name: if not name:
@ -872,7 +831,7 @@ def create_magnet_uri(infohash, name=None, trackers=None):
Args: Args:
infohash (str): The info-hash of the torrent. infohash (str): The info-hash of the torrent.
name (str, optional): The name of the torrent. name (str, optional): The name of the torrent.
trackers (list or dict, optional): A list of trackers or a dict or some {tracker: tier} pairs. trackers (list or dict, optional): A list of trackers or dict or {tracker: tier} pairs.
Returns: Returns:
str: A magnet URI string. str: A magnet URI string.
@ -914,7 +873,7 @@ def get_path_size(path):
return os.path.getsize(path) return os.path.getsize(path)
dir_size = 0 dir_size = 0
for p, dummy_dirs, files in os.walk(path): for (p, dummy_dirs, files) in os.walk(path):
for _file in files: for _file in files:
filename = os.path.join(p, _file) filename = os.path.join(p, _file)
dir_size += os.path.getsize(filename) dir_size += os.path.getsize(filename)

View file

@ -59,16 +59,11 @@ class Component:
Deluge core. Deluge core.
**update()** - This method is called every 1 second by default while the **update()** - This method is called every 1 second by default while the
Component is in a *Started* state. The interval can be Componented is in a *Started* state. The interval can be
specified during instantiation. The update() timer can be specified during instantiation. The update() timer can be
paused by instructing the :class:`ComponentRegistry` to pause paused by instructing the :class:`ComponentRegistry` to pause
this Component. this Component.
**pause()** - This method is called when the component is being paused.
**resume()** - This method is called when the component resumes from a Paused
state.
**shutdown()** - This method is called when the client is exiting. If the **shutdown()** - This method is called when the client is exiting. If the
Component is in a "Started" state when this is called, a Component is in a "Started" state when this is called, a
call to stop() will be issued prior to shutdown(). call to stop() will be issued prior to shutdown().
@ -85,10 +80,10 @@ class Component:
**Stopped** - The Component has either been stopped or has yet to be started. **Stopped** - The Component has either been stopped or has yet to be started.
**Stopping** - The Component has had its stop method called, but it hasn't **Stopping** - The Component has had it's stop method called, but it hasn't
fully stopped yet. fully stopped yet.
**Paused** - The Component has had its update timer stopped, but will **Paused** - The Component has had it's update timer stopped, but will
still be considered in a Started state. still be considered in a Started state.
""" """
@ -116,8 +111,9 @@ class Component:
_ComponentRegistry.deregister(self) _ComponentRegistry.deregister(self)
def _component_start_timer(self): def _component_start_timer(self):
self._component_timer = LoopingCall(self.update) if hasattr(self, 'update'):
self._component_timer.start(self._component_interval) self._component_timer = LoopingCall(self.update)
self._component_timer.start(self._component_interval)
def _component_start(self): def _component_start(self):
def on_start(result): def on_start(result):
@ -133,10 +129,13 @@ class Component:
return fail(result) return fail(result)
if self._component_state == 'Stopped': if self._component_state == 'Stopped':
self._component_state = 'Starting' if hasattr(self, 'start'):
d = deferLater(reactor, 0, self.start) self._component_state = 'Starting'
d.addCallbacks(on_start, on_start_fail) d = deferLater(reactor, 0, self.start)
self._component_starting_deferred = d d.addCallbacks(on_start, on_start_fail)
self._component_starting_deferred = d
else:
d = maybeDeferred(on_start, None)
elif self._component_state == 'Starting': elif self._component_state == 'Starting':
return self._component_starting_deferred return self._component_starting_deferred
elif self._component_state == 'Started': elif self._component_state == 'Started':
@ -166,11 +165,14 @@ class Component:
return result return result
if self._component_state != 'Stopped' and self._component_state != 'Stopping': if self._component_state != 'Stopped' and self._component_state != 'Stopping':
self._component_state = 'Stopping' if hasattr(self, 'stop'):
d = maybeDeferred(self.stop) self._component_state = 'Stopping'
d.addCallback(on_stop) d = maybeDeferred(self.stop)
d.addErrback(on_stop_fail) d.addCallback(on_stop)
self._component_stopping_deferred = d d.addErrback(on_stop_fail)
self._component_stopping_deferred = d
else:
d = maybeDeferred(on_stop, None)
if self._component_state == 'Stopping': if self._component_state == 'Stopping':
return self._component_stopping_deferred return self._component_stopping_deferred
@ -180,12 +182,13 @@ class Component:
def _component_pause(self): def _component_pause(self):
def on_pause(result): def on_pause(result):
self._component_state = 'Paused' self._component_state = 'Paused'
if self._component_timer and self._component_timer.running:
self._component_timer.stop()
if self._component_state == 'Started': if self._component_state == 'Started':
d = maybeDeferred(self.pause) if self._component_timer and self._component_timer.running:
d.addCallback(on_pause) d = maybeDeferred(self._component_timer.stop)
d.addCallback(on_pause)
else:
d = succeed(None)
elif self._component_state == 'Paused': elif self._component_state == 'Paused':
d = succeed(None) d = succeed(None)
else: else:
@ -202,10 +205,9 @@ class Component:
def _component_resume(self): def _component_resume(self):
def on_resume(result): def on_resume(result):
self._component_state = 'Started' self._component_state = 'Started'
self._component_start_timer()
if self._component_state == 'Paused': if self._component_state == 'Paused':
d = maybeDeferred(self.resume) d = maybeDeferred(self._component_start_timer)
d.addCallback(on_resume) d.addCallback(on_resume)
else: else:
d = fail( d = fail(
@ -220,7 +222,9 @@ class Component:
def _component_shutdown(self): def _component_shutdown(self):
def on_stop(result): def on_stop(result):
return maybeDeferred(self.shutdown) if hasattr(self, 'shutdown'):
return maybeDeferred(self.shutdown)
return succeed(None)
d = self._component_stop() d = self._component_stop()
d.addCallback(on_stop) d.addCallback(on_stop)
@ -241,12 +245,6 @@ class Component:
def shutdown(self): def shutdown(self):
pass pass
def pause(self):
pass
def resume(self):
pass
class ComponentRegistry: class ComponentRegistry:
"""The ComponentRegistry holds a list of currently registered :class:`Component` objects. """The ComponentRegistry holds a list of currently registered :class:`Component` objects.

View file

@ -38,7 +38,6 @@ this can only be done for the 'config file version' and not for the 'format'
version as this will be done internally. version as this will be done internally.
""" """
import json import json
import logging import logging
import os import os

View file

@ -3,7 +3,7 @@
# the additional special exception to link portions of this program with the OpenSSL library. # the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details. # See LICENSE for more details.
# #
import asyncio
import tempfile import tempfile
import warnings import warnings
from unittest.mock import Mock, patch from unittest.mock import Mock, patch
@ -12,7 +12,7 @@ import pytest
import pytest_twisted import pytest_twisted
from twisted.internet import reactor from twisted.internet import reactor
from twisted.internet.defer import Deferred, maybeDeferred from twisted.internet.defer import Deferred, maybeDeferred
from twisted.internet.error import CannotListenError, ProcessTerminated from twisted.internet.error import CannotListenError
from twisted.python.failure import Failure from twisted.python.failure import Failure
import deluge.component as _component import deluge.component as _component
@ -42,18 +42,15 @@ def mock_callback():
The returned Mock instance will have a `deferred` attribute which will complete when the callback has been called. The returned Mock instance will have a `deferred` attribute which will complete when the callback has been called.
""" """
def reset(timeout=0.5, *args, **kwargs): def reset():
if mock.called: if mock.called:
original_reset_mock(*args, **kwargs) original_reset_mock()
if mock.deferred: deferred = Deferred()
mock.deferred.cancel() deferred.addTimeout(0.5, reactor)
deferred = Deferred(canceller=lambda x: deferred.callback(None))
deferred.addTimeout(timeout, reactor)
mock.side_effect = lambda *args, **kw: deferred.callback((args, kw)) mock.side_effect = lambda *args, **kw: deferred.callback((args, kw))
mock.deferred = deferred mock.deferred = deferred
mock = Mock() mock = Mock()
mock.__qualname__ = 'mock'
original_reset_mock = mock.reset_mock original_reset_mock = mock.reset_mock
mock.reset_mock = reset mock.reset_mock = reset
mock.reset_mock() mock.reset_mock()
@ -62,9 +59,8 @@ def mock_callback():
@pytest.fixture @pytest.fixture
def config_dir(tmp_path): def config_dir(tmp_path):
config_dir = tmp_path / 'config' deluge.configmanager.set_config_dir(tmp_path)
deluge.configmanager.set_config_dir(config_dir) yield tmp_path
yield config_dir
@pytest_twisted.async_yield_fixture() @pytest_twisted.async_yield_fixture()
@ -88,10 +84,9 @@ async def client(request, config_dir, monkeypatch, listen_port):
@pytest_twisted.async_yield_fixture @pytest_twisted.async_yield_fixture
async def daemon(request, config_dir, tmp_path): async def daemon(request, config_dir):
listen_port = DEFAULT_LISTEN_PORT listen_port = DEFAULT_LISTEN_PORT
logfile = tmp_path / 'daemon.log' logfile = f'daemon_{request.node.name}.log'
if hasattr(request.cls, 'daemon_custom_script'): if hasattr(request.cls, 'daemon_custom_script'):
custom_script = request.cls.daemon_custom_script custom_script = request.cls.daemon_custom_script
else: else:
@ -121,10 +116,7 @@ async def daemon(request, config_dir, tmp_path):
raise exception_error raise exception_error
daemon.listen_port = listen_port daemon.listen_port = listen_port
yield daemon yield daemon
try: await daemon.kill()
await daemon.kill()
except ProcessTerminated:
pass
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@ -145,7 +137,7 @@ def common_fixture(config_dir, request, monkeypatch, listen_port):
@pytest_twisted.async_yield_fixture(scope='function') @pytest_twisted.async_yield_fixture(scope='function')
async def component(): async def component(request):
"""Verify component registry is clean, and clean up after test.""" """Verify component registry is clean, and clean up after test."""
if len(_component._ComponentRegistry.components) != 0: if len(_component._ComponentRegistry.components) != 0:
warnings.warn( warnings.warn(
@ -198,18 +190,3 @@ def mock_mkstemp(tmp_path):
tmp_file = tempfile.mkstemp(dir=tmp_path) tmp_file = tempfile.mkstemp(dir=tmp_path)
with patch('tempfile.mkstemp', return_value=tmp_file): with patch('tempfile.mkstemp', return_value=tmp_file):
yield tmp_file yield tmp_file
def pytest_collection_modifyitems(session, config, items) -> None:
"""
Automatically runs async tests with pytest_twisted.ensureDeferred
"""
function_items = (item for item in items if isinstance(item, pytest.Function))
for function_item in function_items:
function = function_item.obj
if hasattr(function, '__func__'):
# methods need to be unwrapped.
function = function.__func__
if asyncio.iscoroutinefunction(function):
# This is how pytest_twisted marks ensureDeferred tests
setattr(function, '_pytest_twisted_mark', 'async_test')

View file

@ -14,16 +14,10 @@ This should typically only be used by the Core. Plugins should utilize the
`:mod:EventManager` for similar functionality. `:mod:EventManager` for similar functionality.
""" """
import contextlib
import logging import logging
import threading from types import SimpleNamespace
import time
from collections import defaultdict
from functools import partial
from typing import Any, Callable
from twisted.internet import reactor, task, threads from twisted.internet import reactor
import deluge.component as component import deluge.component as component
from deluge._libtorrent import lt from deluge._libtorrent import lt
@ -37,7 +31,7 @@ class AlertManager(component.Component):
def __init__(self): def __init__(self):
log.debug('AlertManager init...') log.debug('AlertManager init...')
component.Component.__init__(self, 'AlertManager') component.Component.__init__(self, 'AlertManager', interval=0.3)
self.session = component.get('Core').session self.session = component.get('Core').session
# Increase the alert queue size so that alerts don't get lost. # Increase the alert queue size so that alerts don't get lost.
@ -58,88 +52,48 @@ class AlertManager(component.Component):
self.session.apply_settings({'alert_mask': alert_mask}) self.session.apply_settings({'alert_mask': alert_mask})
# handlers is a dictionary of lists {"alert_type": [handler1,h2,..]} # handlers is a dictionary of lists {"alert_type": [handler1,h2,..]}
self.handlers = defaultdict(list) self.handlers = {}
self.handlers_timeout_secs = 2
self.delayed_calls = [] self.delayed_calls = []
self._event = threading.Event()
def update(self): def update(self):
pass self.delayed_calls = [dc for dc in self.delayed_calls if dc.active()]
def start(self):
thread = threading.Thread(
target=self.wait_for_alert_in_thread, name='alert-poller', daemon=True
)
thread.start()
self._event.set()
def stop(self):
self.cancel_delayed_calls()
def pause(self):
self._event.clear()
def resume(self):
self._event.set()
def wait_for_alert_in_thread(self):
while self._component_state not in ('Stopping', 'Stopped'):
if self.check_delayed_calls():
time.sleep(0.05)
continue
if self.session.wait_for_alert(1000) is None:
continue
if self._event.wait():
threads.blockingCallFromThread(reactor, self.maybe_handle_alerts)
def on_delayed_call_timeout(self, result, timeout, **kwargs):
log.warning('Alert handler was timed-out before being called %s', kwargs)
def cancel_delayed_calls(self):
"""Cancel all delayed handlers."""
for delayed_call in self.delayed_calls:
delayed_call.cancel()
self.delayed_calls = []
def check_delayed_calls(self) -> bool:
"""Returns True if any handler calls are delayed."""
self.delayed_calls = [dc for dc in self.delayed_calls if not dc.called]
return len(self.delayed_calls) > 0
def maybe_handle_alerts(self) -> None:
if self._component_state != 'Started':
return
self.handle_alerts() self.handle_alerts()
def register_handler(self, alert_type: str, handler: Callable[[Any], None]) -> None: def stop(self):
for delayed_call in self.delayed_calls:
if delayed_call.active():
delayed_call.cancel()
self.delayed_calls = []
def register_handler(self, alert_type, handler):
""" """
Registers a function that will be called when 'alert_type' is pop'd Registers a function that will be called when 'alert_type' is pop'd
in handle_alerts. The handler function should look like: handler(alert) in handle_alerts. The handler function should look like: handler(alert)
Where 'alert' is the actual alert object from libtorrent. Where 'alert' is the actual alert object from libtorrent.
Args: :param alert_type: str, this is string representation of the alert name
alert_type: String representation of the libtorrent alert name. :param handler: func(alert), the function to be called when the alert is raised
Can be supplied with or without `_alert` suffix.
handler: Callback function when the alert is raised.
""" """
if alert_type and alert_type.endswith('_alert'): if alert_type not in self.handlers:
alert_type = alert_type[: -len('_alert')] # There is no entry for this alert type yet, so lets make it with an
# empty list.
self.handlers[alert_type] = []
# Append the handler to the list in the handlers dictionary
self.handlers[alert_type].append(handler) self.handlers[alert_type].append(handler)
log.debug('Registered handler for alert %s', alert_type) log.debug('Registered handler for alert %s', alert_type)
def deregister_handler(self, handler: Callable[[Any], None]): def deregister_handler(self, handler):
""" """
De-registers the `handler` function from all alert types. De-registers the `:param:handler` function from all alert types.
Args: :param handler: func, the handler function to deregister
handler: The handler function to deregister.
""" """
for alert_type_handlers in self.handlers.values(): # Iterate through all handlers and remove 'handler' where found
with contextlib.suppress(ValueError): for (dummy_key, value) in self.handlers.items():
alert_type_handlers.remove(handler) if handler in value:
# Handler is in this alert type list
value.remove(handler)
def handle_alerts(self): def handle_alerts(self):
""" """
@ -158,32 +112,26 @@ class AlertManager(component.Component):
num_alerts, num_alerts,
) )
# Loop through all alerts in the queue
for alert in alerts: for alert in alerts:
alert_type = alert.what() alert_type = type(alert).__name__
# Display the alert message # Display the alert message
if log.isEnabledFor(logging.DEBUG): if log.isEnabledFor(logging.DEBUG):
log.debug('%s: %s', alert_type, decode_bytes(alert.message())) log.debug('%s: %s', alert_type, decode_bytes(alert.message()))
if alert_type not in self.handlers:
continue
# Call any handlers for this alert type # Call any handlers for this alert type
for handler in self.handlers[alert_type]: if alert_type in self.handlers:
if log.isEnabledFor(logging.DEBUG): for handler in self.handlers[alert_type]:
log.debug('Handling alert: %s', alert_type) if log.isEnabledFor(logging.DEBUG):
d = task.deferLater(reactor, 0, handler, alert) log.debug('Handling alert: %s', alert_type)
on_handler_timeout = partial( # Copy alert attributes
self.on_delayed_call_timeout, alert_copy = SimpleNamespace(
handler=handler.__qualname__, **{
alert_type=alert_type, attr: getattr(alert, attr)
) for attr in dir(alert)
d.addTimeout( if not attr.startswith('__')
self.handlers_timeout_secs, }
reactor, )
onTimeoutCancel=on_handler_timeout, self.delayed_calls.append(reactor.callLater(0, handler, alert_copy))
)
self.delayed_calls.append(d)
def set_alert_queue_size(self, queue_size): def set_alert_queue_size(self, queue_size):
"""Sets the maximum size of the libtorrent alert queue""" """Sets the maximum size of the libtorrent alert queue"""

View file

@ -12,16 +12,17 @@ import logging
import os import os
import shutil import shutil
import tempfile import tempfile
import threading
from base64 import b64decode, b64encode from base64 import b64decode, b64encode
from typing import Any, Dict, List, Optional, Tuple, Union from typing import Any, Dict, List, Optional, Tuple, Union
from urllib.request import URLError, urlopen from urllib.request import URLError, urlopen
from twisted.internet import defer, reactor, task, threads from twisted.internet import defer, reactor, task
from twisted.web.client import Agent, readBody from twisted.web.client import Agent, readBody
import deluge.common import deluge.common
import deluge.component as component import deluge.component as component
from deluge import metafile, path_chooser_common from deluge import path_chooser_common
from deluge._libtorrent import LT_VERSION, lt from deluge._libtorrent import LT_VERSION, lt
from deluge.configmanager import ConfigManager, get_config_dir from deluge.configmanager import ConfigManager, get_config_dir
from deluge.core.alertmanager import AlertManager from deluge.core.alertmanager import AlertManager
@ -198,7 +199,7 @@ class Core(component.Component):
self.session_status_timer_interval = 0.5 self.session_status_timer_interval = 0.5
self.session_status_timer = task.LoopingCall(self.session.post_session_stats) self.session_status_timer = task.LoopingCall(self.session.post_session_stats)
self.alertmanager.register_handler( self.alertmanager.register_handler(
'session_stats', self._on_alert_session_stats 'session_stats_alert', self._on_alert_session_stats
) )
self.session_rates_timer_interval = 2 self.session_rates_timer_interval = 2
self.session_rates_timer = task.LoopingCall(self._update_session_rates) self.session_rates_timer = task.LoopingCall(self._update_session_rates)
@ -373,9 +374,8 @@ class Core(component.Component):
def get_new_release(self): def get_new_release(self):
log.debug('get_new_release') log.debug('get_new_release')
try: try:
# Use HTTPS URL to avoid potential spoofing of release page.
self.new_release = ( self.new_release = (
urlopen('https://ftp.osuosl.org/pub/deluge/version-2.0') urlopen('http://download.deluge-torrent.org/version-2.0')
.read() .read()
.decode() .decode()
.strip() .strip()
@ -992,33 +992,31 @@ class Core(component.Component):
path, path,
tracker, tracker,
piece_length, piece_length,
comment=None, comment,
target=None, target,
webseeds=None, webseeds,
private=False, private,
created_by=None, created_by,
trackers=None, trackers,
add_to_session=False, add_to_session,
torrent_format=metafile.TorrentFormat.V1,
): ):
if isinstance(torrent_format, str):
torrent_format = metafile.TorrentFormat(torrent_format)
log.debug('creating torrent..') log.debug('creating torrent..')
return threads.deferToThread( threading.Thread(
self._create_torrent_thread, target=self._create_torrent_thread,
path, args=(
tracker, path,
piece_length, tracker,
comment=comment, piece_length,
target=target, comment,
webseeds=webseeds, target,
private=private, webseeds,
created_by=created_by, private,
trackers=trackers, created_by,
add_to_session=add_to_session, trackers,
torrent_format=torrent_format, add_to_session,
) ),
).start()
def _create_torrent_thread( def _create_torrent_thread(
self, self,
@ -1032,41 +1030,27 @@ class Core(component.Component):
created_by, created_by,
trackers, trackers,
add_to_session, add_to_session,
torrent_format,
): ):
from deluge import metafile from deluge import metafile
filecontent = metafile.make_meta_file_content( metafile.make_meta_file(
path, path,
tracker, tracker,
piece_length, piece_length,
comment=comment, comment=comment,
target=target,
webseeds=webseeds, webseeds=webseeds,
private=private, private=private,
created_by=created_by, created_by=created_by,
trackers=trackers, trackers=trackers,
torrent_format=torrent_format,
) )
write_file = False
if target or not add_to_session:
write_file = True
if not target:
target = metafile.default_meta_file_path(path)
filename = os.path.split(target)[-1]
if write_file:
with open(target, 'wb') as _file:
_file.write(filecontent)
filedump = b64encode(filecontent)
log.debug('torrent created!') log.debug('torrent created!')
if add_to_session: if add_to_session:
options = {} options = {}
options['download_location'] = os.path.split(path)[0] options['download_location'] = os.path.split(path)[0]
self.add_torrent_file(filename, filedump, options) with open(target, 'rb') as _file:
return filename, filedump filedump = b64encode(_file.read())
self.add_torrent_file(os.path.split(target)[1], filedump, options)
@export @export
def upload_plugin(self, filename: str, filedump: Union[str, bytes]) -> None: def upload_plugin(self, filename: str, filedump: Union[str, bytes]) -> None:

View file

@ -7,7 +7,6 @@
# #
"""The Deluge daemon""" """The Deluge daemon"""
import logging import logging
import os import os
import socket import socket

View file

@ -8,7 +8,6 @@
"""PluginManager for Core""" """PluginManager for Core"""
import logging import logging
from twisted.internet import defer from twisted.internet import defer

View file

@ -200,10 +200,7 @@ class PreferencesManager(component.Component):
def __set_listen_on(self): def __set_listen_on(self):
"""Set the ports and interface address to listen for incoming connections on.""" """Set the ports and interface address to listen for incoming connections on."""
if self.config['random_port']: if self.config['random_port']:
if ( if not self.config['listen_random_port']:
not self.config['listen_reuse_port']
or not self.config['listen_random_port']
):
self.config['listen_random_port'] = random.randrange(49152, 65525) self.config['listen_random_port'] = random.randrange(49152, 65525)
listen_ports = [ listen_ports = [
self.config['listen_random_port'] self.config['listen_random_port']

View file

@ -7,7 +7,6 @@
# #
"""RPCServer Module""" """RPCServer Module"""
import logging import logging
import os import os
import sys import sys
@ -28,7 +27,6 @@ from deluge.core.authmanager import (
) )
from deluge.crypto_utils import check_ssl_keys, get_context_factory from deluge.crypto_utils import check_ssl_keys, get_context_factory
from deluge.error import ( from deluge.error import (
BadLoginError,
DelugeError, DelugeError,
IncompatibleClient, IncompatibleClient,
NotAuthorizedError, NotAuthorizedError,
@ -48,11 +46,13 @@ TCallable = TypeVar('TCallable', bound=Callable)
@overload @overload
def export(func: TCallable) -> TCallable: ... def export(func: TCallable) -> TCallable:
...
@overload @overload
def export(auth_level: int) -> Callable[[TCallable], TCallable]: ... def export(auth_level: int) -> Callable[[TCallable], TCallable]:
...
def export(auth_level=AUTH_LEVEL_DEFAULT): def export(auth_level=AUTH_LEVEL_DEFAULT):
@ -274,22 +274,14 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
raise IncompatibleClient(deluge.common.get_version()) raise IncompatibleClient(deluge.common.get_version())
ret = component.get('AuthManager').authorize(*args, **kwargs) ret = component.get('AuthManager').authorize(*args, **kwargs)
if ret: if ret:
self.factory.authorized_sessions[self.transport.sessionno] = ( self.factory.authorized_sessions[
self.AuthLevel(ret, args[0]) self.transport.sessionno
) ] = self.AuthLevel(ret, args[0])
self.factory.session_protocols[self.transport.sessionno] = self self.factory.session_protocols[self.transport.sessionno] = self
except Exception as ex: except Exception as ex:
send_error() send_error()
if not isinstance(ex, _ClientSideRecreateError): if not isinstance(ex, _ClientSideRecreateError):
log.exception(ex) log.exception(ex)
if isinstance(ex, BadLoginError):
peer = self.transport.getPeer()
log.error(
'Deluge client authentication error made from: %s:%s (%s)',
peer.host,
peer.port,
str(ex),
)
else: else:
self.sendData((RPC_RESPONSE, request_id, (ret))) self.sendData((RPC_RESPONSE, request_id, (ret)))
if not ret: if not ret:
@ -553,8 +545,8 @@ class RPCServer(component.Component):
:type event: :class:`deluge.event.DelugeEvent` :type event: :class:`deluge.event.DelugeEvent`
""" """
log.debug('intevents: %s', self.factory.interested_events) log.debug('intevents: %s', self.factory.interested_events)
# Use copy of `interested_events` since it can mutate while iterating. # Find sessions interested in this event
for session_id, interest in self.factory.interested_events.copy().items(): for session_id, interest in self.factory.interested_events.items():
if event.name in interest: if event.name in interest:
log.debug('Emit Event: %s %s', event.name, event.args) log.debug('Emit Event: %s %s', event.name, event.args)
# This session is interested so send a RPC_EVENT # This session is interested so send a RPC_EVENT

View file

@ -1138,8 +1138,9 @@ class Torrent:
'download_location': lambda: self.options['download_location'], 'download_location': lambda: self.options['download_location'],
'seeds_peers_ratio': lambda: -1.0 'seeds_peers_ratio': lambda: -1.0
if self.status.num_incomplete == 0 if self.status.num_incomplete == 0
# Use -1.0 to signify infinity else ( # Use -1.0 to signify infinity
else (self.status.num_complete / self.status.num_incomplete), self.status.num_complete / self.status.num_incomplete
),
'seed_rank': lambda: self.status.seed_rank, 'seed_rank': lambda: self.status.seed_rank,
'state': lambda: self.state, 'state': lambda: self.state,
'stop_at_ratio': lambda: self.options['stop_at_ratio'], 'stop_at_ratio': lambda: self.options['stop_at_ratio'],
@ -1543,18 +1544,20 @@ class Torrent:
self.status.pieces, self.handle.piece_availability() self.status.pieces, self.handle.piece_availability()
): ):
if piece: if piece:
# Completed. pieces.append(3) # Completed.
pieces.append(3)
elif avail_piece: elif avail_piece:
# Available, just not downloaded nor being downloaded. pieces.append(
pieces.append(1) 1
) # Available, just not downloaded nor being downloaded.
else: else:
# Missing, no known peer with piece, or not asked for yet. pieces.append(
pieces.append(0) 0
) # Missing, no known peer with piece, or not asked for yet.
for peer_info in self.handle.get_peer_info(): for peer_info in self.handle.get_peer_info():
if peer_info.downloading_piece_index >= 0: if peer_info.downloading_piece_index >= 0:
# Being downloaded from peer. pieces[
pieces[peer_info.downloading_piece_index] = 2 peer_info.downloading_piece_index
] = 2 # Being downloaded from peer.
return pieces return pieces

View file

@ -7,7 +7,6 @@
# #
"""TorrentManager handles Torrent objects""" """TorrentManager handles Torrent objects"""
import datetime import datetime
import logging import logging
import operator import operator
@ -51,10 +50,10 @@ from deluge.event import (
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
LT_DEFAULT_ADD_TORRENT_FLAGS = ( LT_DEFAULT_ADD_TORRENT_FLAGS = (
lt.torrent_flags.paused lt.add_torrent_params_flags_t.flag_paused
| lt.torrent_flags.auto_managed | lt.add_torrent_params_flags_t.flag_auto_managed
| lt.torrent_flags.update_subscribe | lt.add_torrent_params_flags_t.flag_update_subscribe
| lt.torrent_flags.apply_ip_filter | lt.add_torrent_params_flags_t.flag_apply_ip_filter
) )
@ -203,32 +202,34 @@ class TorrentManager(component.Component):
# Register alert functions # Register alert functions
alert_handles = [ alert_handles = [
'external_ip', 'external_ip_alert',
'performance', 'performance_alert',
'add_torrent', 'add_torrent_alert',
'metadata_received', 'metadata_received_alert',
'torrent_finished', 'torrent_finished_alert',
'torrent_paused', 'torrent_paused_alert',
'torrent_checked', 'torrent_checked_alert',
'torrent_resumed', 'torrent_resumed_alert',
'tracker_reply', 'tracker_reply_alert',
'tracker_announce', 'tracker_announce_alert',
'tracker_warning', 'tracker_warning_alert',
'tracker_error', 'tracker_error_alert',
'file_renamed', 'file_renamed_alert',
'file_error', 'file_error_alert',
'file_completed', 'file_completed_alert',
'storage_moved', 'storage_moved_alert',
'storage_moved_failed', 'storage_moved_failed_alert',
'state_update', 'state_update_alert',
'state_changed', 'state_changed_alert',
'save_resume_data', 'save_resume_data_alert',
'save_resume_data_failed', 'save_resume_data_failed_alert',
'fastresume_rejected', 'fastresume_rejected_alert',
] ]
for alert_handle in alert_handles: for alert_handle in alert_handles:
on_alert_func = getattr(self, ''.join(['on_alert_', alert_handle])) on_alert_func = getattr(
self, ''.join(['on_alert_', alert_handle.replace('_alert', '')])
)
self.alerts.register_handler(alert_handle, on_alert_func) self.alerts.register_handler(alert_handle, on_alert_func)
# Define timers # Define timers
@ -291,8 +292,8 @@ class TorrentManager(component.Component):
if torrent.options['remove_at_ratio']: if torrent.options['remove_at_ratio']:
self.remove(torrent_id) self.remove(torrent_id)
break break
if not torrent.status.paused:
torrent.pause() torrent.pause()
def __getitem__(self, torrent_id): def __getitem__(self, torrent_id):
"""Return the Torrent with torrent_id. """Return the Torrent with torrent_id.
@ -368,11 +369,11 @@ class TorrentManager(component.Component):
add_torrent_params.flags = ( add_torrent_params.flags = (
( (
LT_DEFAULT_ADD_TORRENT_FLAGS LT_DEFAULT_ADD_TORRENT_FLAGS
| lt.torrent_flags.duplicate_is_error | lt.add_torrent_params_flags_t.flag_duplicate_is_error
| lt.torrent_flags.upload_mode | lt.add_torrent_params_flags_t.flag_upload_mode
) )
^ lt.torrent_flags.auto_managed ^ lt.add_torrent_params_flags_t.flag_auto_managed
^ lt.torrent_flags.paused ^ lt.add_torrent_params_flags_t.flag_paused
) )
torrent_handle = self.session.add_torrent(add_torrent_params) torrent_handle = self.session.add_torrent(add_torrent_params)
@ -435,8 +436,8 @@ class TorrentManager(component.Component):
magnet_info = get_magnet_info(magnet) magnet_info = get_magnet_info(magnet)
if magnet_info: if magnet_info:
add_torrent_params['name'] = magnet_info['name'] add_torrent_params['name'] = magnet_info['name']
add_torrent_params['trackers'] = list(magnet_info['trackers'])
torrent_id = magnet_info['info_hash'] torrent_id = magnet_info['info_hash']
# Workaround lt 1.2 bug for magnet resume data with no metadata
add_torrent_params['info_hash'] = bytes(bytearray.fromhex(torrent_id)) add_torrent_params['info_hash'] = bytes(bytearray.fromhex(torrent_id))
else: else:
raise AddTorrentError( raise AddTorrentError(
@ -480,12 +481,16 @@ class TorrentManager(component.Component):
# Set flags: enable duplicate_is_error & override_resume_data, disable auto_managed. # Set flags: enable duplicate_is_error & override_resume_data, disable auto_managed.
add_torrent_params['flags'] = ( add_torrent_params['flags'] = (
LT_DEFAULT_ADD_TORRENT_FLAGS | lt.torrent_flags.duplicate_is_error LT_DEFAULT_ADD_TORRENT_FLAGS
) ^ lt.torrent_flags.auto_managed | lt.add_torrent_params_flags_t.flag_duplicate_is_error
| lt.add_torrent_params_flags_t.flag_override_resume_data
) ^ lt.add_torrent_params_flags_t.flag_auto_managed
if options['seed_mode']: if options['seed_mode']:
add_torrent_params['flags'] |= lt.torrent_flags.seed_mode add_torrent_params['flags'] |= lt.add_torrent_params_flags_t.flag_seed_mode
if options['super_seeding']: if options['super_seeding']:
add_torrent_params['flags'] |= lt.torrent_flags.super_seeding add_torrent_params[
'flags'
] |= lt.add_torrent_params_flags_t.flag_super_seeding
return torrent_id, add_torrent_params return torrent_id, add_torrent_params

View file

@ -166,8 +166,7 @@ def deprecated(func):
class CoroutineDeferred(defer.Deferred): class CoroutineDeferred(defer.Deferred):
"""Wraps a coroutine in a Deferred. """Wraps a coroutine in a Deferred.
It will dynamically pass through the underlying coroutine without wrapping where apporpriate. It will dynamically pass through the underlying coroutine without wrapping where apporpriate."""
"""
def __init__(self, coro: Coroutine): def __init__(self, coro: Coroutine):
# Delay this import to make sure a reactor was installed first # Delay this import to make sure a reactor was installed first
@ -196,33 +195,17 @@ class CoroutineDeferred(defer.Deferred):
d = defer.ensureDeferred(self.coro) d = defer.ensureDeferred(self.coro)
d.chainDeferred(self) d.chainDeferred(self)
def _callback_activate(self): def addCallbacks(self, *args, **kwargs): # noqa: N802
"""Verify awaited status before calling activate."""
assert not self.awaited, 'Cannot add callbacks to an already awaited coroutine.' assert not self.awaited, 'Cannot add callbacks to an already awaited coroutine.'
self.activate() self.activate()
def addCallback(self, *args, **kwargs): # noqa: N802
self._callback_activate()
return super().addCallback(*args, **kwargs)
def addCallbacks(self, *args, **kwargs): # noqa: N802
self._callback_activate()
return super().addCallbacks(*args, **kwargs) return super().addCallbacks(*args, **kwargs)
def addErrback(self, *args, **kwargs): # noqa: N802
self._callback_activate()
return super().addErrback(*args, **kwargs)
def addBoth(self, *args, **kwargs): # noqa: N802
self._callback_activate()
return super().addBoth(*args, **kwargs)
_RetT = TypeVar('_RetT') _RetT = TypeVar('_RetT')
def maybe_coroutine( def maybe_coroutine(
f: Callable[..., Coroutine[Any, Any, _RetT]], f: Callable[..., Coroutine[Any, Any, _RetT]]
) -> 'Callable[..., defer.Deferred[_RetT]]': ) -> 'Callable[..., defer.Deferred[_RetT]]':
"""Wraps a coroutine function to make it usable as a normal function that returns a Deferred.""" """Wraps a coroutine function to make it usable as a normal function that returns a Deferred."""

View file

@ -13,7 +13,6 @@ This module describes the types of events that can be generated by the daemon
and subsequently emitted to the clients. and subsequently emitted to the clients.
""" """
known_events = {} known_events = {}

View file

@ -6,7 +6,7 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
import email.message import cgi
import logging import logging
import os.path import os.path
import zlib import zlib
@ -21,6 +21,8 @@ from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent from twisted.web.iweb import IAgent
from zope.interface import implementer from zope.interface import implementer
from deluge.common import get_version
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -131,10 +133,9 @@ class HTTPDownloaderAgent:
content_disp = headers.getRawHeaders(b'content-disposition')[0].decode( content_disp = headers.getRawHeaders(b'content-disposition')[0].decode(
'utf-8' 'utf-8'
) )
message = email.message.EmailMessage() content_disp_params = cgi.parse_header(content_disp)[1]
message['content-disposition'] = content_disp if 'filename' in content_disp_params:
new_file_name = message.get_filename() new_file_name = content_disp_params['filename']
if new_file_name:
new_file_name = sanitise_filename(new_file_name) new_file_name = sanitise_filename(new_file_name)
new_file_name = os.path.join( new_file_name = os.path.join(
os.path.split(self.filename)[0], new_file_name os.path.split(self.filename)[0], new_file_name
@ -151,10 +152,7 @@ class HTTPDownloaderAgent:
self.filename = new_file_name self.filename = new_file_name
cont_type_header = headers.getRawHeaders(b'content-type')[0].decode() cont_type_header = headers.getRawHeaders(b'content-type')[0].decode()
message = email.message.EmailMessage() cont_type, params = cgi.parse_header(cont_type_header)
message['content-type'] = cont_type_header
cont_type = message.get_content_type()
params = message['content-type'].params
# Only re-ecode text content types. # Only re-ecode text content types.
encoding = None encoding = None
if cont_type.startswith('text/'): if cont_type.startswith('text/'):
@ -181,7 +179,8 @@ class HTTPDownloaderAgent:
headers = Headers() headers = Headers()
if not headers.hasHeader(b'User-Agent'): if not headers.hasHeader(b'User-Agent'):
user_agent = 'Deluge' version = get_version()
user_agent = 'Deluge/%s (https://deluge-torrent.org)' % version
headers.addRawHeader('User-Agent', user_agent) headers.addRawHeader('User-Agent', user_agent)
d = self.agent.request( d = self.agent.request(

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more