mirror of
https://git.deluge-torrent.org/deluge
synced 2025-08-04 07:28:39 +00:00
Compare commits
No commits in common. "develop" and "deluge-2.0.0b1" have entirely different histories.
develop
...
deluge-2.0
1088 changed files with 309189 additions and 515147 deletions
2
.gitattributes
vendored
2
.gitattributes
vendored
|
@ -2,5 +2,3 @@
|
|||
.gitmodules export-ignore
|
||||
.gitignore export-ignore
|
||||
*.py diff=python
|
||||
ext-all.js diff=minjs
|
||||
*.state -merge -text
|
||||
|
|
104
.github/workflows/cd.yml
vendored
104
.github/workflows/cd.yml
vendored
|
@ -1,104 +0,0 @@
|
|||
name: Package
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "deluge-*"
|
||||
- "!deluge*-dev*"
|
||||
branches:
|
||||
- develop
|
||||
pull_request:
|
||||
types: [labeled, opened, synchronize, reopened]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "Enter a tag or commit to package"
|
||||
default: ""
|
||||
|
||||
jobs:
|
||||
windows_package:
|
||||
runs-on: windows-2022
|
||||
if: (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'package'))
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [x64, x86]
|
||||
python: ["3.9"]
|
||||
libtorrent: [2.0.7, 1.2.19]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout Deluge source to subdir to enable packaging any tag/commit
|
||||
- name: Checkout Deluge source
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
path: deluge_src
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python}}
|
||||
architecture: ${{ matrix.arch }}
|
||||
cache: pip
|
||||
|
||||
- name: Prepare pip
|
||||
run: python -m pip install wheel setuptools==68.*
|
||||
|
||||
- name: Install GTK
|
||||
run: |
|
||||
$WebClient = New-Object System.Net.WebClient
|
||||
$WebClient.DownloadFile("https://github.com/deluge-torrent/gvsbuild-release/releases/download/latest/gvsbuild-py${{ matrix.python }}-vs16-${{ matrix.arch }}.zip","C:\GTK.zip")
|
||||
7z x C:\GTK.zip -oc:\GTK
|
||||
echo "C:\GTK\release\lib" | Out-File -FilePath $env:GITHUB_PATH -Append
|
||||
echo "C:\GTK\release\bin" | Out-File -FilePath $env:GITHUB_PATH -Append
|
||||
echo "C:\GTK\release" | Out-File -FilePath $env:GITHUB_PATH -Append
|
||||
python -m pip install --no-index --find-links="C:\GTK\release\python" pycairo PyGObject
|
||||
|
||||
- name: Install Python dependencies
|
||||
# Pillow no longer provides 32-bit wheels for Windows
|
||||
# so specify only-binary to install old version.
|
||||
run: >
|
||||
python -m pip install
|
||||
--only-binary=pillow
|
||||
twisted[tls]==22.8.0
|
||||
libtorrent==${{ matrix.libtorrent }}
|
||||
pyinstaller
|
||||
pygame
|
||||
-r requirements.txt
|
||||
|
||||
- name: Install Deluge
|
||||
working-directory: deluge_src
|
||||
run: |
|
||||
python -m pip install .
|
||||
python setup.py install_scripts
|
||||
|
||||
- name: Freeze Deluge
|
||||
working-directory: packaging/win
|
||||
run: |
|
||||
pyinstaller --clean delugewin.spec --distpath freeze
|
||||
|
||||
- name: Verify Deluge exes
|
||||
working-directory: packaging/win/freeze/Deluge/
|
||||
run: |
|
||||
deluge-debug.exe -v
|
||||
deluged-debug.exe -v
|
||||
deluge-web-debug.exe -v
|
||||
deluge-console -v
|
||||
|
||||
- name: Make Deluge Installer
|
||||
working-directory: ./packaging/win
|
||||
run: |
|
||||
python setup_nsis.py
|
||||
makensis /Darch=${{ matrix.arch }} deluge-win-installer.nsi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: deluge-py${{ matrix.python }}-lt${{ matrix.libtorrent }}-${{ matrix.arch }}
|
||||
path: packaging/win/*.exe
|
101
.github/workflows/ci.yml
vendored
101
.github/workflows/ci.yml
vendored
|
@ -1,101 +0,0 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
core-dump:
|
||||
description: "Set to 1 to enable retrieving core dump from crashes"
|
||||
default: "0"
|
||||
jobs:
|
||||
test-linux:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.10"]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "pip"
|
||||
cache-dependency-path: "requirements*.txt"
|
||||
|
||||
- name: Sets env var for security
|
||||
if: (github.event_name == 'pull_request' && contains(github.event.pull_request.body, 'security_test')) || (github.event_name == 'push' && contains(github.event.head_commit.message, 'security_test'))
|
||||
run: echo "SECURITY_TESTS=True" >> $GITHUB_ENV
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install --upgrade pip wheel setuptools
|
||||
pip install -r requirements-ci.txt
|
||||
pip install -e .
|
||||
|
||||
- name: Install security dependencies
|
||||
if: contains(env.SECURITY_TESTS, 'True')
|
||||
run: |
|
||||
wget -O- $TESTSSL_URL$TESTSSL_VER | tar xz
|
||||
mv -t deluge/tests/data testssl.sh-$TESTSSL_VER/testssl.sh testssl.sh-$TESTSSL_VER/etc/;
|
||||
env:
|
||||
TESTSSL_VER: 3.0.6
|
||||
TESTSSL_URL: https://codeload.github.com/drwetter/testssl.sh/tar.gz/refs/tags/v
|
||||
|
||||
- name: Setup core dump catch and store
|
||||
if: github.event.inputs.core-dump == '1'
|
||||
run: |
|
||||
sudo mkdir /cores/ && sudo chmod 777 /cores/
|
||||
echo "/cores/%E.%p" | sudo tee /proc/sys/kernel/core_pattern
|
||||
ulimit -c unlimited
|
||||
sudo apt install glibc-tools
|
||||
echo "DEBUG_PREFIX=catchsegv python -X dev -m" >> $GITHUB_ENV
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python -c 'from deluge._libtorrent import lt; print(lt.__version__)';
|
||||
$DEBUG_PREFIX pytest -v -m "not (todo or gtkui)" deluge
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
# capture all crashes as build artifacts
|
||||
if: failure()
|
||||
with:
|
||||
name: crashes
|
||||
path: /cores
|
||||
|
||||
test-windows:
|
||||
runs-on: windows-2022
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.10"]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "pip"
|
||||
cache-dependency-path: "requirements*.txt"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install --upgrade pip wheel setuptools
|
||||
pip install -r requirements-ci.txt
|
||||
pip install -e .
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python -c 'import libtorrent as lt; print(lt.__version__)';
|
||||
pytest -v -m "not (todo or gtkui or security)" deluge
|
38
.github/workflows/docs.yml
vendored
38
.github/workflows/docs.yml
vendored
|
@ -1,38 +0,0 @@
|
|||
name: Docs
|
||||
|
||||
# Controls when the action will run.
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the main branch
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
cache-dependency-path: "requirements*.txt"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install --upgrade pip wheel
|
||||
pip install tox
|
||||
sudo apt-get install enchant-2
|
||||
|
||||
- name: Build docs with tox
|
||||
env:
|
||||
TOX_ENV: docs
|
||||
run: |
|
||||
tox -e $TOX_ENV
|
17
.github/workflows/lint.yml
vendored
17
.github/workflows/lint.yml
vendored
|
@ -1,17 +0,0 @@
|
|||
name: Linting
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: Run pre-commit linting
|
||||
uses: pre-commit/action@v3.0.1
|
18
.gitignore
vendored
18
.gitignore
vendored
|
@ -2,24 +2,18 @@
|
|||
build
|
||||
.cache
|
||||
dist
|
||||
docs/source/modules/deluge*.rst
|
||||
*.egg-info/
|
||||
*.dist-info/
|
||||
docs/source/modules
|
||||
*egg-info
|
||||
*.egg
|
||||
*.log
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.pyc
|
||||
*.tar.*
|
||||
_trial_temp
|
||||
.tox/
|
||||
deluge/i18n/*/
|
||||
deluge/ui/web/js/*.js
|
||||
deluge/ui/web/js/extjs/ext-extensions*.js
|
||||
deluge.pot
|
||||
*.desktop
|
||||
*.metainfo.xml
|
||||
*.appdata.xml
|
||||
.build_data*
|
||||
osx/app
|
||||
RELEASE-VERSION
|
||||
.venv*
|
||||
# used by setuptools to cache downloaded eggs
|
||||
/.eggs
|
||||
_pytest_temp/
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
default_language_version:
|
||||
python: python3
|
||||
exclude: >
|
||||
(?x)^(
|
||||
deluge/ui/web/docs/template/.*|
|
||||
deluge/tests/data/.*svg|
|
||||
)$
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.6.4
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: Chk Ruff
|
||||
args: [--fix]
|
||||
- id: ruff-format
|
||||
name: Fmt Ruff
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.7.1
|
||||
hooks:
|
||||
- id: prettier
|
||||
name: Fmt Prettier
|
||||
# Workaround to list modified files only.
|
||||
args: [--list-different]
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
name: Fix End-of-files
|
||||
exclude_types: [javascript, css]
|
||||
- id: mixed-line-ending
|
||||
name: Fix Line endings
|
||||
args: [--fix=auto]
|
||||
- id: trailing-whitespace
|
||||
name: Fix Trailing whitespace
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
||||
stages: [manual]
|
|
@ -1,6 +0,0 @@
|
|||
deluge/ui/web/css/ext-*.css
|
||||
deluge/ui/web/js/extjs/ext-*.js
|
||||
deluge/ui/web/docs/
|
||||
deluge/ui/web/themes/images/
|
||||
*.py*
|
||||
*.html
|
|
@ -1,13 +0,0 @@
|
|||
trailingComma: "es5"
|
||||
tabWidth: 4
|
||||
singleQuote: true
|
||||
overrides:
|
||||
- files:
|
||||
- "*.yaml"
|
||||
- ".*.yaml"
|
||||
- "*.yml"
|
||||
- ".*.yml"
|
||||
- "*.md"
|
||||
options:
|
||||
tabWidth: 2
|
||||
singleQuote: false
|
|
@ -69,7 +69,7 @@ confidence=
|
|||
# Arranged by category and use symbolic names instead of ids.
|
||||
disable=
|
||||
# Convention
|
||||
missing-docstring, invalid-name, bad-continuation,
|
||||
missing-docstring, invalid-name,
|
||||
# Error
|
||||
no-member, no-name-in-module,
|
||||
# Information
|
||||
|
@ -289,7 +289,7 @@ callbacks=cb_,_cb
|
|||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=
|
||||
redefining-builtins-modules=six.moves,future.builtins,future_builtins
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
@ -359,6 +359,11 @@ known-standard-library=
|
|||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
# .readthedocs.yml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
jobs:
|
||||
post_checkout:
|
||||
- git fetch --unshallow || true
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/source/conf.py
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
formats: all
|
||||
|
||||
# Optionally set the version of Python and requirements required to build your docs
|
||||
python:
|
||||
install:
|
||||
- requirements: requirements.txt
|
||||
- requirements: docs/requirements.txt
|
||||
- method: pip
|
||||
path: .
|
51
.travis.yml
Normal file
51
.travis.yml
Normal file
|
@ -0,0 +1,51 @@
|
|||
dist: trusty
|
||||
sudo: required
|
||||
group: deprecated-2017Q2
|
||||
|
||||
language: python
|
||||
|
||||
python:
|
||||
- "2.7"
|
||||
|
||||
cache: pip
|
||||
|
||||
before_install:
|
||||
- lsb_release -a
|
||||
- sudo add-apt-repository ppa:deluge-team/develop -y
|
||||
- sudo apt-get update
|
||||
|
||||
# command to install dependencies
|
||||
install:
|
||||
- bash -c "echo $APTPACKAGES"
|
||||
- sudo apt-get install $APTPACKAGES
|
||||
- pip install "tox==2.1.1"
|
||||
|
||||
env:
|
||||
global:
|
||||
- APTPACKAGES="python-libtorrent"
|
||||
- APTPACKAGES_GTKUI="python-gobject python-glade2"
|
||||
- DISPLAY=:99.0
|
||||
matrix:
|
||||
- TOX_ENV=pydef
|
||||
- TOX_ENV=flake8
|
||||
# - TOX_ENV=flake8-complexity
|
||||
- TOX_ENV=docs
|
||||
# - TOX_ENV=todo
|
||||
- TOX_ENV=trial APTPACKAGES="$APTPACKAGES $APTPACKAGES_GTKUI"
|
||||
- TOX_ENV=pygtkui APTPACKAGES="$APTPACKAGES $APTPACKAGES_GTKUI"
|
||||
# - TOX_ENV=testcoverage APTPACKAGES="$APTPACKAGES $APTPACKAGES_GTKUI"
|
||||
- TOX_ENV=plugins
|
||||
|
||||
virtualenv:
|
||||
system_site_packages: true
|
||||
|
||||
# We use xvfb for the GTKUI tests
|
||||
before_script:
|
||||
- export PYTHONPATH=$PYTHONPATH:$PWD
|
||||
- python -c "import libtorrent as lt; print lt.__version__"
|
||||
- "/sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1280x1024x16"
|
||||
- echo '2.0.0.dev0' > RELEASE-VERSION
|
||||
|
||||
script:
|
||||
- bash -c "echo $DISPLAY"
|
||||
- tox -e $TOX_ENV
|
17
AUTHORS
17
AUTHORS
|
@ -39,9 +39,14 @@ Images Authors:
|
|||
* files: deluge/ui/data/pixmaps/*.svg, *.png
|
||||
deluge/ui/web/icons/active.png, alert.png, all.png, checking.png, dht.png,
|
||||
downloading.png, inactive.png, queued.png, seeding.png, traffic.png
|
||||
deluge/ui/web/images/deluge*.png
|
||||
exceptions: deluge/ui/data/pixmaps/deluge.svg and derivatives
|
||||
copyright: Andrew Resch
|
||||
license: GPLv3
|
||||
|
||||
* files: deluge/ui/data/pixmaps/deluge.svg and derivatives
|
||||
deluge/ui/web/icons/apple-pre-*.png, deluge*.png
|
||||
copyright: Calum Lind
|
||||
deluge/ui/web/images/deluge*.png
|
||||
copyright: Andrew Wedderburn
|
||||
license: GPLv3
|
||||
|
||||
* files: deluge/plugins/blocklist/blocklist/data/*.png
|
||||
|
@ -50,9 +55,11 @@ Images Authors:
|
|||
license: GPLv2
|
||||
url: http://ftp.acc.umu.se/pub/GNOME/sources/gnome-icon-theme
|
||||
|
||||
* files: deluge/ui/data/pixmaps/magnet*.svg, *.png
|
||||
copyright: Matias Wilkman
|
||||
license:
|
||||
* files: deluge/ui/data/pixmaps/magnet.png
|
||||
copyright: Woothemes
|
||||
license: Freeware
|
||||
icon pack: WP Woothemes Ultimate
|
||||
url: http://www.woothemes.com/
|
||||
|
||||
* files: deluge/ui/data/pixmaps/flags/*.png
|
||||
copyright: Mark James <mjames@gmail.com>
|
||||
|
|
312
CHANGELOG.md
312
CHANGELOG.md
|
@ -1,312 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
## 2.1.x (TBA)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Removed Python 3.6 support (Python >= 3.7)
|
||||
|
||||
### Core
|
||||
|
||||
- Fix GHSL-2024-189 - insecure HTTP for new version check.
|
||||
- Fix alert handler segfault.
|
||||
- Add support for creating v2 torrents.
|
||||
|
||||
### GTK UI
|
||||
|
||||
- Fix changing torrent ownership.
|
||||
- Fix upper limit of upload/download in Add Torrent dialog.
|
||||
- Fix #3339 - Resizing window crashes with Piecesbar or Stats plugin.
|
||||
- Fix #3350 - Unable to use quick search.
|
||||
- Fix #3598 - Missing AppIndicator option in Preferences.
|
||||
- Set Appindicator as default for tray icon on Linux.
|
||||
- Add feature to switch between dark/light themes.
|
||||
|
||||
### Web UI
|
||||
|
||||
- Fix GHSL-2024-191 - potential flag endpoint path traversal.
|
||||
- Fix GHSL-2024-188 - js script dir traversal vulnerability.
|
||||
- Fix GHSL-2024-190 - insecure tracker icon endpoint.
|
||||
- Fix unable to stop daemon in connection manager.
|
||||
- Fix responsiveness to avoid "Connection lost".
|
||||
- Add support for network interface name as well as IP address.
|
||||
- Add ability to change UI theme.
|
||||
|
||||
### Console UI
|
||||
|
||||
- Fix 'rm' and 'move' commands hanging when done.
|
||||
- Fix #3538 - Unable to add host in connection manager.
|
||||
- Disable interactive-mode on Windows.
|
||||
|
||||
### UI library
|
||||
|
||||
- Fix tracker icon display by converting to png format.
|
||||
- Fix splitting trackers by newline
|
||||
- Add clickable URLs for torrent comment and tracker status.
|
||||
|
||||
### Label
|
||||
|
||||
- Fix torrent deletion not removed from config.
|
||||
- Fix label display name in submenu.
|
||||
|
||||
### AutoAdd
|
||||
|
||||
- Fix #3515 - Torrent file decoding errors disabled watch folder.
|
||||
|
||||
## 2.1.1 (2022-07-10)
|
||||
|
||||
### Core
|
||||
|
||||
- Fix missing trackers added via magnet
|
||||
- Fix handling magnets with tracker tiers
|
||||
|
||||
## 2.1.0 (2022-06-28)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Python 2 support removed (Python >= 3.6)
|
||||
- libtorrent minimum requirement increased (>= 1.2).
|
||||
|
||||
### Core
|
||||
|
||||
- Add support for SVG tracker icons.
|
||||
- Fix tracker icon error handling.
|
||||
- Fix cleaning-up tracker icon temp files.
|
||||
- Fix Plugin manager to handle new metadata 2.1.
|
||||
- Hide passwords in config logs.
|
||||
- Fix cleaning-up temp files in add_torrent_url.
|
||||
- Fix KeyError in sessionproxy after torrent delete.
|
||||
- Remove libtorrent deprecated functions.
|
||||
- Fix file_completed_alert handling.
|
||||
- Add plugin keys to get_torrents_status.
|
||||
- Add support for pygeoip dependency.
|
||||
- Fix crash logging to Windows protected folder.
|
||||
- Add is_interface and is_interface_name to validate network interfaces.
|
||||
- Fix is_url and is_infohash error with None value.
|
||||
- Fix load_libintl error.
|
||||
- Add support for IPv6 in host lists.
|
||||
- Add systemd user services.
|
||||
- Fix refresh and expire the torrent status cache.
|
||||
- Fix crash when logging errors initializing gettext.
|
||||
|
||||
### Web UI
|
||||
|
||||
- Fix ETA column sorting in correct order (#3413).
|
||||
- Fix defining foreground and background colors.
|
||||
- Accept charset in content-type for json messages.
|
||||
- Fix 'Complete Seen' and 'Completed' sorting.
|
||||
- Fix encoding HTML entities for torrent attributes to prevent XSS.
|
||||
|
||||
### Gtk UI
|
||||
|
||||
- Fix download location textbox width.
|
||||
- Fix obscured port number in Connection Manager.
|
||||
- Increase connection manager default height.
|
||||
- Fix bug with setting move completed in Options tab.
|
||||
- Fix adding daemon accounts.
|
||||
- Add workaround for crash on Windows with ico or gif icons.
|
||||
- Hide account password length in log.
|
||||
- Added a torrent menu option for magnet copy.
|
||||
- Fix unable to prefetch magnet in thinclient mode.
|
||||
- Use GtkSpinner when testing open port.
|
||||
- Update About Dialog year.
|
||||
- Fix Edit Torrents dialogs close issues.
|
||||
- Fix ETA being copied to neighboring empty cells.
|
||||
- Disable GTK CSD by default on Windows.
|
||||
|
||||
### Console UI
|
||||
|
||||
- Fix curses.init_pair raise ValueError on Py3.10.
|
||||
- Swap j and k key's behavior to fit vim mode.
|
||||
- Fix torrent details status error.
|
||||
- Fix incorrect test for when a host is online.
|
||||
- Add the torrent label to info command.
|
||||
|
||||
### AutoAdd
|
||||
|
||||
- Fix handling torrent decode errors.
|
||||
- Fix error dialog not being shown on error.
|
||||
|
||||
### Blocklist
|
||||
|
||||
- Add frequency unit to interval label.
|
||||
|
||||
### Notifications
|
||||
|
||||
- Fix UnicodeEncodeError upon non-ascii torrent name.
|
||||
|
||||
## 2.0.5 (2021-12-15)
|
||||
|
||||
### WebUI
|
||||
|
||||
- Fix js minifying error resulting in WebUI blank screen.
|
||||
- Silence erronous missing translations warning.
|
||||
|
||||
## 2.0.4 (2021-12-12)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Fix python optional setup.py requirements
|
||||
|
||||
### Gtk UI
|
||||
|
||||
- Add detection of torrent URL on GTK UI focus
|
||||
- Fix piecesbar crashing when enabled
|
||||
- Remove num_blocks_cache_hits in stats
|
||||
- Fix unhandled error with empty clipboard
|
||||
- Add torrentdetails tabs position menu (#3441)
|
||||
- Hide pygame community banner in console
|
||||
- Fix cmp function for None types (#3309)
|
||||
- Fix loading config with double-quotes in string
|
||||
- Fix Status tab download speed and uploaded
|
||||
|
||||
### Web UI
|
||||
|
||||
- Handle torrent add failures
|
||||
- Add menu option to copy magnet URI
|
||||
- Fix md5sums in torrent files breaking file listing (#3388)
|
||||
- Add country flag alt/title for accessibility
|
||||
|
||||
### Console UI
|
||||
|
||||
- Fix allowing use of windows-curses on Windows
|
||||
- Fix hostlist status lookup errors
|
||||
- Fix AttributeError setting config values
|
||||
- Fix setting 'Skip' priority
|
||||
|
||||
### Core
|
||||
|
||||
- Add workaround libtorrent 2.0 file_progress error
|
||||
- Fix allow enabling any plugin Python version
|
||||
- Export torrent get_magnet_uri method
|
||||
- Fix loading magnet with resume_data and no metadata (#3478)
|
||||
- Fix httpdownloader reencoding torrent file downloads (#3440)
|
||||
- Fix lt listen_interfaces not comma-separated (#3337)
|
||||
- Fix unable to remove magnet with delete_copies enabled (#3325)
|
||||
- Fix Python 3.8 compatibility
|
||||
- Fix loading config with double-quotes in string
|
||||
- Fix pickle loading non-ascii state error (#3298)
|
||||
- Fix creation of pidfile via command option
|
||||
- Fix for peer.client UnicodeDecodeError
|
||||
- Fix show_file unhandled dbus error
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add How-to guides about services.
|
||||
|
||||
### Stats plugin
|
||||
|
||||
- Fix constant session status key warnings
|
||||
- Fix cairo error
|
||||
|
||||
### Notifications plugin
|
||||
|
||||
- Fix email KeyError with status name
|
||||
- Fix unhandled TypeErrors on Python 3
|
||||
|
||||
### Autoadd plugin
|
||||
|
||||
- Fix magnet missing applied labels
|
||||
|
||||
### Execute plugin
|
||||
|
||||
- Fix failing to run on Windows (#3439)
|
||||
|
||||
## 2.0.3 (2019-06-12)
|
||||
|
||||
### Gtk UI
|
||||
|
||||
- Fix errors running on Wayland (#3265).
|
||||
- Fix Peers Tab tooltip and context menu errors (#3266).
|
||||
|
||||
### Web UI
|
||||
|
||||
- Fix TypeError in Peers Tab setting country flag.
|
||||
- Fix reverse proxy header TypeError (#3260).
|
||||
- Fix request.base 'idna' codec error (#3261).
|
||||
- Fix unable to change password (#3262).
|
||||
|
||||
### Extractor plugin
|
||||
|
||||
- Fix potential error starting plugin.
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix macOS install typo.
|
||||
- Fix Windows install instructions.
|
||||
|
||||
## 2.0.2 (2019-06-08)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Add systemd deluged and deluge-web service files to package tarball (#2034)
|
||||
|
||||
### Core
|
||||
|
||||
- Fix Python 2 compatibility issue with SimpleNamespace.
|
||||
|
||||
## 2.0.1 (2019-06-07)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Fix `setup.py` build error without git installed.
|
||||
|
||||
## 2.0.0 (2019-06-06)
|
||||
|
||||
### Codebase
|
||||
|
||||
- Ported to Python 3
|
||||
|
||||
### Core
|
||||
|
||||
- Improved Logging
|
||||
- Removed the AutoAdd feature on the core. It's now handled with the AutoAdd
|
||||
plugin, which is also shipped with Deluge, and it does a better job and
|
||||
now, it even supports multiple users perfectly.
|
||||
- Authentication/Permission exceptions are now sent to clients and recreated
|
||||
there to allow acting upon them.
|
||||
- Updated SSL/TLS Protocol parameters for better security.
|
||||
- Make the distinction between adding to the session new unmanaged torrents
|
||||
and torrents loaded from state. This will break backwards compatibility.
|
||||
- Pass a copy of an event instead of passing the event arguments to the
|
||||
event handlers. This will break backwards compatibility.
|
||||
- Allow changing ownership of torrents.
|
||||
- File modifications on the auth file are now detected and when they happen,
|
||||
the file is reloaded. Upon finding an old auth file with an old format, an
|
||||
upgrade to the new format is made, file saved, and reloaded.
|
||||
- Authentication no longer requires a username/password. If one or both of
|
||||
these is missing, an authentication error will be sent to the client
|
||||
which should then ask the username/password to the user.
|
||||
- Implemented sequential downloads.
|
||||
- Provide information about a torrent's pieces states
|
||||
- Add Option To Specify Outgoing Connection Interface.
|
||||
- Fix potential for host_id collision when creating hostlist entries.
|
||||
|
||||
### Gtk UI
|
||||
|
||||
- Ported to GTK3 (3rd-party plugins will need updated).
|
||||
- Allow changing ownership of torrents.
|
||||
- Host entries in the Connection Manager UI are now editable.
|
||||
- Implemented sequential downloads UI handling.
|
||||
- Add optional pieces bar instead of a regular progress bar in torrent status tab.
|
||||
- Make torrent opening compatible with all Unicode paths.
|
||||
- Fix magnet association button on Windows.
|
||||
- Add keyboard shortcuts for changing queue position:
|
||||
- Up: `Ctrl+Alt+Up`
|
||||
- Down: `Ctrl+Alt+Down`
|
||||
- Top: `Ctrl+Alt+Shift+Up`
|
||||
- Bottom: `Ctrl+Alt+Shift+Down`
|
||||
|
||||
### Web UI
|
||||
|
||||
- Server (deluge-web) now daemonizes by default, use '-d' or '--do-not-daemonize' to disable.
|
||||
- Fixed the '--base' option to work for regular use, not just with reverse proxies.
|
||||
|
||||
### Blocklist Plugin
|
||||
|
||||
- Implemented whitelist support to both core and GTK UI.
|
||||
- Implemented IP filter cleaning before each update. Restarting the deluge
|
||||
daemon is no longer needed.
|
||||
- If "check_after_days" is 0(zero), the timer is not started anymore. It
|
||||
would keep updating one call after the other. If the value changed, the
|
||||
timer is now stopped and restarted using the new value.
|
50
ChangeLog
Normal file
50
ChangeLog
Normal file
|
@ -0,0 +1,50 @@
|
|||
=== Deluge 2.0 (In Development) ===
|
||||
|
||||
* Improved Logging
|
||||
* Removed the AutoAdd feature on the core. It's now handled with the AutoAdd
|
||||
plugin, which is also shipped with Deluge, and it does a better job and
|
||||
now, it even supports multiple users perfectly.
|
||||
* Authentication/Permission exceptions are now sent to clients and recreated
|
||||
there to allow acting upon them.
|
||||
* Enforced the use of the "deluge.plugins" namespace to reduce package
|
||||
names clashing beetween regular packages and deluge plugins.
|
||||
|
||||
==== Core ====
|
||||
* Make the distinction between adding to the session new unmanaged torrents
|
||||
and torrents loaded from state. This will break backwards compatability.
|
||||
* Pass a copy of an event instead of passing the event arguments to the
|
||||
event handlers. This will break backwards compatability.
|
||||
* Allow changing ownership of torrents.
|
||||
* File modifications on the auth file are now detected and when they happen,
|
||||
the file is reloaded. Upon finding an old auth file with an old format, an
|
||||
upgrade to the new format is made, file saved, and reloaded.
|
||||
* Authentication no longer requires a username/password. If one or both of
|
||||
these is missing, an authentication error will be sent to the client
|
||||
which sould then ask the username/password to the user.
|
||||
* Implemented sequential downloads.
|
||||
* Provide information about a torrent's pieces states
|
||||
|
||||
==== GtkUI ====
|
||||
* Allow changing ownership of torrents.
|
||||
* Host entries in the Connection Manager UI are now editable.
|
||||
* Implemented sequential downloads UI handling.
|
||||
* Add optional pieces bar instead of a regular progress bar in torrent status tab.
|
||||
* Make torrent opening compatible with all unicode paths.
|
||||
* Fix magnet association button on Windows.
|
||||
* Add keyboard shortcuts for changing queue position:
|
||||
- Up: Ctrl+Alt+Up
|
||||
- Down: Ctrl+Alt+Down
|
||||
- Top: Ctrl+Alt+Shift+Up
|
||||
- Bottom: Ctrl+Alt+Shift+Down
|
||||
|
||||
==== WebUI ====
|
||||
* Server (deluge-web) now daemonizes by default, use '-d' or '--do-not-daemonize' to disable.
|
||||
* Fixed the '--base' option to work for regular use, not just with reverse proxies.
|
||||
|
||||
==== Blocklist Plugin ====
|
||||
* Implemented whitelist support to both core and GTK UI.
|
||||
* Implemented ip filter cleaning before each update. Restarting the deluge
|
||||
daemon is no longer needed.
|
||||
* If "check_after_days" is 0(zero), the timer is not started anymore. It
|
||||
would keep updating one call after the other. If the value changed, the
|
||||
timer is now stopped and restarted using the new value.
|
29
DEPENDS
Normal file
29
DEPENDS
Normal file
|
@ -0,0 +1,29 @@
|
|||
=== Core ===
|
||||
* libtorrent (rasterbar) >= 1.1.1
|
||||
* python >= 2.7.7
|
||||
* setuptools
|
||||
* twisted >= 11.1
|
||||
* pyopenssl
|
||||
* pyxdg
|
||||
* chardet
|
||||
* gettext
|
||||
* python-geoip (optional)
|
||||
* geoip-database (optional)
|
||||
* setproctitle (optional)
|
||||
* pillow (optional)
|
||||
* py2-ipaddress (optional, required for Windows IPv6)
|
||||
* rencode >= 1.0.2 (optional), python port bundled.
|
||||
|
||||
|
||||
=== Gtk UI ===
|
||||
* pygtk >= 2.16
|
||||
* librsvg
|
||||
* xdg-utils
|
||||
* intltool
|
||||
* python-notify (optional)
|
||||
* pygame (optional)
|
||||
* python-appindicator (optional)
|
||||
|
||||
=== Web UI ===
|
||||
* mako
|
||||
* slimit (optional), minifies JS files.
|
100
DEPENDS.md
100
DEPENDS.md
|
@ -1,100 +0,0 @@
|
|||
# Deluge dependencies
|
||||
|
||||
The following are required to install and run Deluge. They are separated into
|
||||
sections to distinguish the precise requirements for each module.
|
||||
|
||||
All modules will require the [common](#common) section dependencies.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
- [Python] _>= 3.6_
|
||||
|
||||
## Build
|
||||
|
||||
- [setuptools]
|
||||
- [intltool] - Optional: Desktop file translation for \*nix.
|
||||
- [closure-compiler] - Minify javascript (alternative is [rjsmin])
|
||||
|
||||
## Common
|
||||
|
||||
- [Twisted] _>= 17.1_ - Use `TLS` extras for `service_identity` and `idna`.
|
||||
- [OpenSSL] _>= 1.0.1_
|
||||
- [pyOpenSSL]
|
||||
- [rencode] _>= 1.0.2_ - Encoding library.
|
||||
- [PyXDG] - Access freedesktop.org standards for \*nix.
|
||||
- [xdg-utils] - Provides xdg-open for \*nix.
|
||||
- [zope.interface]
|
||||
- [chardet] - Optional: Encoding detection.
|
||||
- [setproctitle] - Optional: Renaming processes.
|
||||
- [Pillow] - Optional: Support for resizing tracker icons.
|
||||
- [dbus-python] - Optional: Show item location in filemanager.
|
||||
- [ifaddr] - Optional: Verify network interfaces.
|
||||
|
||||
### Linux and BSD
|
||||
|
||||
- [distro] - Optional: OS platform information.
|
||||
|
||||
### Windows OS
|
||||
|
||||
- [pywin32]
|
||||
- [certifi]
|
||||
|
||||
## Core (deluged daemon)
|
||||
|
||||
- [libtorrent] _>= 1.2.0_
|
||||
- [GeoIP] or [pygeoip] - Optional: IP address country lookup. (_Debian: `python-geoip`_)
|
||||
|
||||
## GTK UI
|
||||
|
||||
- [GTK+] >= 3.10
|
||||
- [PyGObject]
|
||||
- [Pycairo]
|
||||
- [librsvg] _>= 2_
|
||||
- [ayatanaappindicator3] w/GIR - Optional: Ubuntu system tray icon.
|
||||
|
||||
### MacOS
|
||||
|
||||
- [GtkOSXApplication]
|
||||
|
||||
## Web UI
|
||||
|
||||
- [mako]
|
||||
|
||||
## Plugins
|
||||
|
||||
### Notifications
|
||||
|
||||
- [pygame] - Optional: Play sounds
|
||||
- [libnotify] w/GIR - Optional: Desktop popups.
|
||||
|
||||
[python]: https://www.python.org/
|
||||
[setuptools]: https://setuptools.readthedocs.io/en/latest/
|
||||
[intltool]: https://freedesktop.org/wiki/Software/intltool/
|
||||
[closure-compiler]: https://developers.google.com/closure/compiler/
|
||||
[rjsmin]: https://pypi.org/project/rjsmin/
|
||||
[openssl]: https://www.openssl.org/
|
||||
[pyopenssl]: https://pyopenssl.org
|
||||
[twisted]: https://twistedmatrix.com
|
||||
[pillow]: https://pypi.org/project/Pillow/
|
||||
[libtorrent]: https://libtorrent.org/
|
||||
[zope.interface]: https://pypi.org/project/zope.interface/
|
||||
[distro]: https://github.com/nir0s/distro
|
||||
[pywin32]: https://github.com/mhammond/pywin32
|
||||
[certifi]: https://pypi.org/project/certifi/
|
||||
[dbus-python]: https://pypi.org/project/dbus-python/
|
||||
[setproctitle]: https://pypi.org/project/setproctitle/
|
||||
[gtkosxapplication]: https://github.com/jralls/gtk-mac-integration
|
||||
[chardet]: https://chardet.github.io/
|
||||
[rencode]: https://github.com/aresch/rencode
|
||||
[pyxdg]: https://www.freedesktop.org/wiki/Software/pyxdg/
|
||||
[xdg-utils]: https://www.freedesktop.org/wiki/Software/xdg-utils/
|
||||
[gtk+]: https://www.gtk.org/
|
||||
[pycairo]: https://cairographics.org/pycairo/
|
||||
[pygobject]: https://pygobject.readthedocs.io/en/latest/
|
||||
[geoip]: https://pypi.org/project/GeoIP/
|
||||
[mako]: https://www.makotemplates.org/
|
||||
[pygame]: https://www.pygame.org/
|
||||
[libnotify]: https://developer.gnome.org/libnotify/
|
||||
[ayatanaappindicator3]: https://lazka.github.io/pgi-docs/AyatanaAppIndicator3-0.1/index.html
|
||||
[librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg
|
||||
[ifaddr]: https://pypi.org/project/ifaddr/
|
20
MANIFEST.in
20
MANIFEST.in
|
@ -1,29 +1,23 @@
|
|||
include *.md
|
||||
include AUTHORS
|
||||
include LICENSE
|
||||
include RELEASE-VERSION
|
||||
include msgfmt.py
|
||||
include minify_web_js.py
|
||||
include version.py
|
||||
include gen_web_gettext.py
|
||||
include AUTHORS ChangeLog DEPENDS LICENSE RELEASE-VERSION README.rst
|
||||
include msgfmt.py minify_web_js.py version.py
|
||||
exclude setup.cfg
|
||||
|
||||
graft docs/man
|
||||
graft packaging/systemd
|
||||
|
||||
include deluge/i18n/*.po
|
||||
recursive-exclude deluge/i18n *.mo
|
||||
recursive-exclude deluge/i18n LC_MESSAGES *.mo
|
||||
|
||||
graft deluge/plugins
|
||||
recursive-exclude deluge/plugins create_dev_link.sh *.pyc *.egg
|
||||
prune deluge/plugins/*/build
|
||||
prune deluge/plugins/*/*.egg-info
|
||||
|
||||
graft deluge/tests/
|
||||
recursive-exclude deluge/tests *.pyc
|
||||
graft deluge/tests/data
|
||||
graft deluge/tests/twisted
|
||||
|
||||
graft deluge/ui/data
|
||||
recursive-exclude deluge/ui/data *.desktop *.xml
|
||||
graft deluge/ui/gtk3/glade
|
||||
graft deluge/ui/gtkui/glade
|
||||
|
||||
include deluge/ui/web/index.html
|
||||
include deluge/ui/web/css/*.css
|
||||
|
|
71
README.md
71
README.md
|
@ -1,71 +0,0 @@
|
|||
# Deluge BitTorrent Client
|
||||
|
||||
[![build-status]][github-ci] [![docs-status]][rtd-deluge]
|
||||
|
||||
Deluge is a BitTorrent client that utilizes a daemon/client model.
|
||||
It has various user interfaces available such as the GTK-UI, Web-UI and
|
||||
Console-UI. It uses [libtorrent][lt] at its core to handle the BitTorrent
|
||||
protocol.
|
||||
|
||||
## Install
|
||||
|
||||
From [PyPi](https://pypi.org/project/deluge):
|
||||
|
||||
pip install deluge
|
||||
|
||||
with all optional dependencies:
|
||||
|
||||
pip install deluge[all]
|
||||
|
||||
From source code:
|
||||
|
||||
pip install .
|
||||
|
||||
with all optional dependencies:
|
||||
|
||||
pip install .[all]
|
||||
|
||||
See [DEPENDS](DEPENDS.md) and [Installing/Source] for dependency details.
|
||||
|
||||
## Usage
|
||||
|
||||
The various user-interfaces and Deluge daemon can be started with the following commands.
|
||||
|
||||
Use the `--help` option for further command options.
|
||||
|
||||
### Gtk UI
|
||||
|
||||
`deluge` or `deluge-gtk`
|
||||
|
||||
### Console UI
|
||||
|
||||
`deluge-console`
|
||||
|
||||
### Web UI
|
||||
|
||||
`deluge-web`
|
||||
|
||||
Open http://localhost:8112 with default password `deluge`.
|
||||
|
||||
### Daemon
|
||||
|
||||
`deluged`
|
||||
|
||||
See the [Thinclient guide] to connect to the daemon from another computer.
|
||||
|
||||
## Contact
|
||||
|
||||
- [Homepage](https://deluge-torrent.org)
|
||||
- [User guide][user guide]
|
||||
- [Forum](https://forum.deluge-torrent.org)
|
||||
- [IRC Libera.Chat #deluge](irc://irc.libera.chat/deluge)
|
||||
- [Discord](https://discord.gg/nwaHSE6tqn)
|
||||
|
||||
[user guide]: https://dev.deluge-torrent.org/wiki/UserGuide
|
||||
[thinclient guide]: https://dev.deluge-torrent.org/wiki/UserGuide/ThinClient
|
||||
[installing/source]: https://dev.deluge-torrent.org/wiki/Installing/Source
|
||||
[build-status]: https://github.com/deluge-torrent/deluge/actions/workflows/ci.yml/badge.svg?branch=develop "CI"
|
||||
[github-ci]: https://github.com/deluge-torrent/deluge/actions/workflows/ci.yml
|
||||
[docs-status]: https://readthedocs.org/projects/deluge/badge/?version=latest
|
||||
[rtd-deluge]: https://deluge.readthedocs.io/en/latest/?badge=latest "Documentation Status"
|
||||
[lt]: https://libtorrent.org
|
68
README.rst
Normal file
68
README.rst
Normal file
|
@ -0,0 +1,68 @@
|
|||
=========================
|
||||
Deluge BitTorrent Client
|
||||
=========================
|
||||
|
||||
|build-status| |docs|
|
||||
|
||||
Homepage: http://deluge-torrent.org
|
||||
|
||||
Authors:
|
||||
Andrew Resch
|
||||
Damien Churchill
|
||||
|
||||
For contributors and past developers see:
|
||||
AUTHORS
|
||||
|
||||
==========================
|
||||
Installation Instructions:
|
||||
==========================
|
||||
|
||||
For detailed instructions see: http://dev.deluge-torrent.org/wiki/Installing/Source
|
||||
|
||||
Ensure build dependencies are installed, see DEPENDS for a full listing.
|
||||
|
||||
Build and install by running::
|
||||
|
||||
$ python setup.py build
|
||||
$ sudo python setup.py install
|
||||
|
||||
================
|
||||
Contact/Support:
|
||||
================
|
||||
|
||||
:Forum: http://forum.deluge-torrent.org
|
||||
:IRC Channel: #deluge on irc.freenode.net
|
||||
|
||||
===
|
||||
FAQ
|
||||
===
|
||||
|
||||
For the full FAQ see: http://dev.deluge-torrent.org/wiki/Faq
|
||||
|
||||
How to start the various user-interfaces:
|
||||
Gtk::
|
||||
|
||||
deluge or deluge-gtk
|
||||
|
||||
Console::
|
||||
|
||||
deluge-console
|
||||
|
||||
Web::
|
||||
|
||||
deluge-web
|
||||
Go to http://localhost:8112/ default-password = "deluge"
|
||||
|
||||
How do I start the daemon?:
|
||||
deluged
|
||||
|
||||
I can't connect to the daemon from another machine:
|
||||
See: http://dev.deluge-torrent.org/wiki/UserGuide/ThinClient
|
||||
|
||||
|
||||
.. |build-status| image:: https://travis-ci.org/deluge-torrent/deluge.svg
|
||||
:target: https://travis-ci.org/deluge-torrent/deluge
|
||||
|
||||
.. |docs| image:: https://readthedocs.org/projects/deluge/badge/?version=develop
|
||||
:target: https://readthedocs.org/projects/deluge/?badge=develop
|
||||
:alt: Documentation Status
|
|
@ -1,6 +0,0 @@
|
|||
from twisted.web.http import Request
|
||||
|
||||
__request__: Request
|
||||
|
||||
def _(string: str) -> str: ...
|
||||
def _n(string: str) -> str: ...
|
|
@ -1 +1,7 @@
|
|||
"""Deluge"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# this is a namespace package
|
||||
import pkg_resources
|
||||
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
|
|
33
deluge/__rpcapi.py
Normal file
33
deluge/__rpcapi.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Damien Churchill <damoxc@gmail.com>
|
||||
#
|
||||
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
|
||||
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from deluge.core.core import Core
|
||||
from deluge.core.daemon import Daemon
|
||||
|
||||
|
||||
class RpcApi(object):
|
||||
pass
|
||||
|
||||
|
||||
def scan_for_methods(obj):
|
||||
methods = {
|
||||
'__doc__': 'Methods available in %s' % obj.__name__.lower()
|
||||
}
|
||||
for d in dir(obj):
|
||||
if not hasattr(getattr(obj, d), '_rpcserver_export'):
|
||||
continue
|
||||
methods[d] = getattr(obj, d)
|
||||
cobj = type(obj.__name__.lower(), (object,), methods)
|
||||
setattr(RpcApi, obj.__name__.lower(), cobj)
|
||||
|
||||
|
||||
scan_for_methods(Core)
|
||||
scan_for_methods(Daemon)
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -14,23 +15,16 @@ Example:
|
|||
>>> from deluge._libtorrent import lt
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from deluge.common import VersionSplit, get_version
|
||||
from deluge.error import LibtorrentImportError
|
||||
|
||||
try:
|
||||
import deluge.libtorrent as lt
|
||||
except ImportError:
|
||||
try:
|
||||
import libtorrent as lt
|
||||
except ImportError as ex:
|
||||
raise LibtorrentImportError('No libtorrent library found: %s' % (ex))
|
||||
|
||||
REQUIRED_VERSION = '1.1.2.0'
|
||||
|
||||
REQUIRED_VERSION = '1.2.0.0'
|
||||
LT_VERSION = lt.__version__
|
||||
|
||||
if VersionSplit(LT_VERSION) < VersionSplit(REQUIRED_VERSION):
|
||||
raise LibtorrentImportError(
|
||||
f'Deluge {get_version()} requires libtorrent >= {REQUIRED_VERSION}'
|
||||
)
|
||||
if VersionSplit(lt.__version__) < VersionSplit(REQUIRED_VERSION):
|
||||
raise ImportError('Deluge %s requires libtorrent >= %s' % (get_version(), REQUIRED_VERSION))
|
||||
|
|
|
@ -9,7 +9,11 @@
|
|||
# License.
|
||||
|
||||
# Written by Petru Paler
|
||||
# Updated by Calum Lind to support Python 3.
|
||||
# Updated by Calum Lind to support both Python 2 and Python 3.
|
||||
|
||||
from sys import version_info
|
||||
|
||||
PY2 = version_info.major == 2
|
||||
|
||||
|
||||
class BTFailure(Exception):
|
||||
|
@ -77,14 +81,15 @@ decode_func[b'9'] = decode_string
|
|||
|
||||
def bdecode(x):
|
||||
try:
|
||||
r, __ = decode_func[x[0:1]](x, 0)
|
||||
except (LookupError, TypeError, ValueError):
|
||||
r, l = decode_func[x[0:1]](x, 0)
|
||||
except (IndexError, KeyError, ValueError):
|
||||
raise BTFailure('Not a valid bencoded string')
|
||||
else:
|
||||
return r
|
||||
|
||||
|
||||
class Bencached:
|
||||
class Bencached(object):
|
||||
|
||||
__slots__ = ['bencoded']
|
||||
|
||||
def __init__(self, s):
|
||||
|
@ -104,7 +109,7 @@ def encode_bool(x, r):
|
|||
|
||||
|
||||
def encode_string(x, r):
|
||||
encode_bytes(x.encode('utf8'), r)
|
||||
encode_string(x.encode('utf8'), r)
|
||||
|
||||
|
||||
def encode_bytes(x, r):
|
||||
|
@ -121,10 +126,6 @@ def encode_list(x, r):
|
|||
def encode_dict(x, r):
|
||||
r.append(DICT_DELIM)
|
||||
for k, v in sorted(x.items()):
|
||||
try:
|
||||
k = k.encode('utf8')
|
||||
except AttributeError:
|
||||
pass
|
||||
r.extend((str(len(k)).encode('utf8'), BYTE_SEP, k))
|
||||
encode_func[type(v)](v, r)
|
||||
r.append(END_DELIM)
|
||||
|
@ -139,6 +140,10 @@ encode_func[dict] = encode_dict
|
|||
encode_func[bool] = encode_bool
|
||||
encode_func[str] = encode_string
|
||||
encode_func[bytes] = encode_bytes
|
||||
if PY2:
|
||||
encode_func[long] = encode_int
|
||||
encode_func[str] = encode_bytes
|
||||
encode_func[unicode] = encode_string
|
||||
|
||||
|
||||
def bencode(x):
|
||||
|
|
682
deluge/common.py
682
deluge/common.py
File diff suppressed because it is too large
Load diff
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2010 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -6,6 +7,8 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
|
@ -14,6 +17,8 @@ from twisted.internet import reactor
|
|||
from twisted.internet.defer import DeferredList, fail, maybeDeferred, succeed
|
||||
from twisted.internet.task import LoopingCall, deferLater
|
||||
|
||||
from deluge.common import PY2
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -22,14 +27,15 @@ class ComponentAlreadyRegistered(Exception):
|
|||
|
||||
|
||||
class ComponentException(Exception):
|
||||
|
||||
def __init__(self, message, tb):
|
||||
super().__init__(message)
|
||||
super(ComponentException, self).__init__(message)
|
||||
self.message = message
|
||||
self.tb = tb
|
||||
|
||||
def __str__(self):
|
||||
s = super().__str__()
|
||||
return '{}\n{}'.format(s, ''.join(self.tb))
|
||||
s = super(ComponentException, self).__str__()
|
||||
return '%s\n%s' % (s, ''.join(self.tb))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
|
@ -41,7 +47,7 @@ class ComponentException(Exception):
|
|||
return not self.__eq__(other)
|
||||
|
||||
|
||||
class Component:
|
||||
class Component(object):
|
||||
"""Component objects are singletons managed by the :class:`ComponentRegistry`.
|
||||
|
||||
When a new Component object is instantiated, it will be automatically
|
||||
|
@ -59,16 +65,11 @@ class Component:
|
|||
Deluge core.
|
||||
|
||||
**update()** - This method is called every 1 second by default while the
|
||||
Component is in a *Started* state. The interval can be
|
||||
Componented is in a *Started* state. The interval can be
|
||||
specified during instantiation. The update() timer can be
|
||||
paused by instructing the :class:`ComponentRegistry` to pause
|
||||
this Component.
|
||||
|
||||
**pause()** - This method is called when the component is being paused.
|
||||
|
||||
**resume()** - This method is called when the component resumes from a Paused
|
||||
state.
|
||||
|
||||
**shutdown()** - This method is called when the client is exiting. If the
|
||||
Component is in a "Started" state when this is called, a
|
||||
call to stop() will be issued prior to shutdown().
|
||||
|
@ -85,14 +86,13 @@ class Component:
|
|||
|
||||
**Stopped** - The Component has either been stopped or has yet to be started.
|
||||
|
||||
**Stopping** - The Component has had its stop method called, but it hasn't
|
||||
**Stopping** - The Component has had it's stop method called, but it hasn't
|
||||
fully stopped yet.
|
||||
|
||||
**Paused** - The Component has had its update timer stopped, but will
|
||||
**Paused** - The Component has had it's update timer stopped, but will
|
||||
still be considered in a Started state.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name, interval=1, depend=None):
|
||||
"""Initialize component.
|
||||
|
||||
|
@ -116,6 +116,7 @@ class Component:
|
|||
_ComponentRegistry.deregister(self)
|
||||
|
||||
def _component_start_timer(self):
|
||||
if hasattr(self, 'update'):
|
||||
self._component_timer = LoopingCall(self.update)
|
||||
self._component_timer.start(self._component_interval)
|
||||
|
||||
|
@ -133,23 +134,22 @@ class Component:
|
|||
return fail(result)
|
||||
|
||||
if self._component_state == 'Stopped':
|
||||
if hasattr(self, 'start'):
|
||||
self._component_state = 'Starting'
|
||||
d = deferLater(reactor, 0, self.start)
|
||||
d.addCallbacks(on_start, on_start_fail)
|
||||
self._component_starting_deferred = d
|
||||
else:
|
||||
d = maybeDeferred(on_start, None)
|
||||
elif self._component_state == 'Starting':
|
||||
return self._component_starting_deferred
|
||||
elif self._component_state == 'Started':
|
||||
d = succeed(True)
|
||||
else:
|
||||
d = fail(
|
||||
ComponentException(
|
||||
'Trying to start component "%s" but it is '
|
||||
'not in a stopped state. Current state: %s'
|
||||
% (self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4),
|
||||
)
|
||||
)
|
||||
d = fail(ComponentException('Trying to start component "%s" but it is '
|
||||
'not in a stopped state. Current state: %s' %
|
||||
(self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4)))
|
||||
return d
|
||||
|
||||
def _component_stop(self):
|
||||
|
@ -166,11 +166,14 @@ class Component:
|
|||
return result
|
||||
|
||||
if self._component_state != 'Stopped' and self._component_state != 'Stopping':
|
||||
if hasattr(self, 'stop'):
|
||||
self._component_state = 'Stopping'
|
||||
d = maybeDeferred(self.stop)
|
||||
d.addCallback(on_stop)
|
||||
d.addErrback(on_stop_fail)
|
||||
self._component_stopping_deferred = d
|
||||
else:
|
||||
d = maybeDeferred(on_stop, None)
|
||||
|
||||
if self._component_state == 'Stopping':
|
||||
return self._component_stopping_deferred
|
||||
|
@ -180,47 +183,41 @@ class Component:
|
|||
def _component_pause(self):
|
||||
def on_pause(result):
|
||||
self._component_state = 'Paused'
|
||||
if self._component_timer and self._component_timer.running:
|
||||
self._component_timer.stop()
|
||||
|
||||
if self._component_state == 'Started':
|
||||
d = maybeDeferred(self.pause)
|
||||
if self._component_timer and self._component_timer.running:
|
||||
d = maybeDeferred(self._component_timer.stop)
|
||||
d.addCallback(on_pause)
|
||||
else:
|
||||
d = succeed(None)
|
||||
elif self._component_state == 'Paused':
|
||||
d = succeed(None)
|
||||
else:
|
||||
d = fail(
|
||||
ComponentException(
|
||||
'Trying to pause component "%s" but it is '
|
||||
'not in a started state. Current state: %s'
|
||||
% (self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4),
|
||||
)
|
||||
)
|
||||
d = fail(ComponentException('Trying to pause component "%s" but it is '
|
||||
'not in a started state. Current state: %s' %
|
||||
(self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4)))
|
||||
return d
|
||||
|
||||
def _component_resume(self):
|
||||
def on_resume(result):
|
||||
self._component_state = 'Started'
|
||||
self._component_start_timer()
|
||||
|
||||
if self._component_state == 'Paused':
|
||||
d = maybeDeferred(self.resume)
|
||||
d = maybeDeferred(self._component_start_timer)
|
||||
d.addCallback(on_resume)
|
||||
else:
|
||||
d = fail(
|
||||
ComponentException(
|
||||
'Trying to resume component "%s" but it is '
|
||||
'not in a paused state. Current state: %s'
|
||||
% (self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4),
|
||||
)
|
||||
)
|
||||
d = fail(ComponentException('Trying to resume component "%s" but it is '
|
||||
'not in a paused state. Current state: %s' %
|
||||
(self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4)))
|
||||
return d
|
||||
|
||||
def _component_shutdown(self):
|
||||
def on_stop(result):
|
||||
if hasattr(self, 'shutdown'):
|
||||
return maybeDeferred(self.shutdown)
|
||||
return succeed(None)
|
||||
|
||||
d = self._component_stop()
|
||||
d.addCallback(on_stop)
|
||||
|
@ -241,19 +238,12 @@ class Component:
|
|||
def shutdown(self):
|
||||
pass
|
||||
|
||||
def pause(self):
|
||||
pass
|
||||
|
||||
def resume(self):
|
||||
pass
|
||||
|
||||
|
||||
class ComponentRegistry:
|
||||
class ComponentRegistry(object):
|
||||
"""The ComponentRegistry holds a list of currently registered :class:`Component` objects.
|
||||
|
||||
It is used to manage the Components by starting, stopping, pausing and shutting them down.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.components = {}
|
||||
# Stores all of the components that are dependent on a particular component
|
||||
|
@ -274,9 +264,7 @@ class ComponentRegistry:
|
|||
"""
|
||||
name = obj._component_name
|
||||
if name in self.components:
|
||||
raise ComponentAlreadyRegistered(
|
||||
'Component already registered with name %s' % name
|
||||
)
|
||||
raise ComponentAlreadyRegistered('Component already registered with name %s' % name)
|
||||
|
||||
self.components[obj._component_name] = obj
|
||||
if obj._component_depend:
|
||||
|
@ -291,8 +279,7 @@ class ComponentRegistry:
|
|||
obj (Component): a component object to deregister
|
||||
|
||||
Returns:
|
||||
Deferred: a deferred object that will fire once the Component has been
|
||||
successfully deregistered
|
||||
Deferred: a deferred object that will fire once the Component has been sucessfully deregistered
|
||||
|
||||
"""
|
||||
if obj in self.components.values():
|
||||
|
@ -302,7 +289,6 @@ class ComponentRegistry:
|
|||
def on_stop(result, name):
|
||||
# Component may have been removed, so pop to ensure it doesn't fail
|
||||
self.components.pop(name, None)
|
||||
|
||||
return d.addCallback(on_stop, obj._component_name)
|
||||
else:
|
||||
return succeed(None)
|
||||
|
@ -323,7 +309,7 @@ class ComponentRegistry:
|
|||
# Start all the components if names is empty
|
||||
if not names:
|
||||
names = list(self.components)
|
||||
elif isinstance(names, str):
|
||||
elif isinstance(names, str if not PY2 else basestring):
|
||||
names = [names]
|
||||
|
||||
def on_depends_started(result, name):
|
||||
|
@ -357,7 +343,7 @@ class ComponentRegistry:
|
|||
"""
|
||||
if not names:
|
||||
names = list(self.components)
|
||||
elif isinstance(names, str):
|
||||
elif isinstance(names, str if not PY2 else basestring):
|
||||
names = [names]
|
||||
|
||||
def on_dependents_stopped(result, name):
|
||||
|
@ -372,9 +358,7 @@ class ComponentRegistry:
|
|||
if name in self.components:
|
||||
if name in self.dependents:
|
||||
# If other components depend on this component, stop them first
|
||||
d = self.stop(self.dependents[name]).addCallback(
|
||||
on_dependents_stopped, name
|
||||
)
|
||||
d = self.stop(self.dependents[name]).addCallback(on_dependents_stopped, name)
|
||||
deferreds.append(d)
|
||||
stopped_in_deferred.update(self.dependents[name])
|
||||
else:
|
||||
|
@ -397,7 +381,7 @@ class ComponentRegistry:
|
|||
"""
|
||||
if not names:
|
||||
names = list(self.components)
|
||||
elif isinstance(names, str):
|
||||
elif isinstance(names, str if not PY2 else basestring):
|
||||
names = [names]
|
||||
|
||||
deferreds = []
|
||||
|
@ -423,7 +407,7 @@ class ComponentRegistry:
|
|||
"""
|
||||
if not names:
|
||||
names = list(self.components)
|
||||
elif isinstance(names, str):
|
||||
elif isinstance(names, str if not PY2 else basestring):
|
||||
names = [names]
|
||||
|
||||
deferreds = []
|
||||
|
@ -444,11 +428,8 @@ class ComponentRegistry:
|
|||
Deferred: Fired once all Components have been successfully shut down.
|
||||
|
||||
"""
|
||||
|
||||
def on_stopped(result):
|
||||
return DeferredList(
|
||||
[comp._component_shutdown() for comp in list(self.components.values())]
|
||||
)
|
||||
return DeferredList([comp._component_shutdown() for comp in self.components.values()])
|
||||
|
||||
return self.stop(list(self.components)).addCallback(on_stopped)
|
||||
|
||||
|
|
246
deluge/config.py
246
deluge/config.py
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -38,67 +39,71 @@ this can only be done for the 'config file version' and not for the 'format'
|
|||
version as this will be done internally.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import cPickle as pickle
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pickle
|
||||
import shutil
|
||||
from codecs import getwriter
|
||||
from tempfile import NamedTemporaryFile
|
||||
from io import open
|
||||
|
||||
from deluge.common import JSON_FORMAT, get_default_config_dir
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
callLater = None # Necessary for the config tests
|
||||
|
||||
|
||||
def find_json_objects(text, decoder=json.JSONDecoder()):
|
||||
"""Find json objects in text.
|
||||
def prop(func):
|
||||
"""Function decorator for defining property attributes
|
||||
|
||||
The decorated function is expected to return a dictionary
|
||||
containing one or more of the following pairs:
|
||||
|
||||
fget - function for getting attribute value
|
||||
fset - function for setting attribute value
|
||||
fdel - function for deleting attribute
|
||||
|
||||
This can be conveniently constructed by the locals() builtin
|
||||
function; see:
|
||||
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/205183
|
||||
"""
|
||||
return property(doc=func.__doc__, **func())
|
||||
|
||||
|
||||
def find_json_objects(s):
|
||||
"""Find json objects in a string.
|
||||
|
||||
Args:
|
||||
text (str): The text to find json objects within.
|
||||
s (str): the string to find json objects in
|
||||
|
||||
Returns:
|
||||
list: A list of tuples containing start and end locations of json
|
||||
objects in the text. e.g. [(start, end), ...]
|
||||
|
||||
objects in string `s`. e.g. [(start, end), ...]
|
||||
|
||||
"""
|
||||
objects = []
|
||||
offset = 0
|
||||
while True:
|
||||
try:
|
||||
start = text.index('{', offset)
|
||||
except ValueError:
|
||||
break
|
||||
opens = 0
|
||||
start = s.find('{')
|
||||
offset = start
|
||||
|
||||
try:
|
||||
__, index = decoder.raw_decode(text[start:])
|
||||
except json.decoder.JSONDecodeError:
|
||||
offset = start + 1
|
||||
else:
|
||||
offset = start + index
|
||||
objects.append((start, offset))
|
||||
if start < 0:
|
||||
return []
|
||||
|
||||
for index, c in enumerate(s[offset:]):
|
||||
if c == '{':
|
||||
opens += 1
|
||||
elif c == '}':
|
||||
opens -= 1
|
||||
if opens == 0:
|
||||
objects.append((start, index + offset + 1))
|
||||
start = index + offset + 1
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
def cast_to_existing_type(value, old_value):
|
||||
"""Attempt to convert new value type to match old value type"""
|
||||
types_match = isinstance(old_value, (type(None), type(value)))
|
||||
if value is not None and not types_match:
|
||||
old_type = type(old_value)
|
||||
# Skip convert to bytes since requires knowledge of encoding and value should
|
||||
# be unicode anyway.
|
||||
if old_type is bytes:
|
||||
return value
|
||||
|
||||
return old_type(value)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class Config:
|
||||
class Config(object):
|
||||
"""This class is used to access/create/modify config files.
|
||||
|
||||
Args:
|
||||
|
@ -108,26 +113,18 @@ class Config:
|
|||
file_version (int): The file format for the default config values when creating
|
||||
a fresh config. This value should be increased whenever a new migration function is
|
||||
setup to convert old config files. (default: 1)
|
||||
log_mask_funcs (dict): A dict of key:function, used to mask sensitive
|
||||
key values (e.g. passwords) when logging is enabled.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filename,
|
||||
defaults=None,
|
||||
config_dir=None,
|
||||
file_version=1,
|
||||
log_mask_funcs=None,
|
||||
):
|
||||
def __init__(self, filename, defaults=None, config_dir=None, file_version=1):
|
||||
self.__config = {}
|
||||
self.__set_functions = {}
|
||||
self.__change_callbacks = []
|
||||
self.__log_mask_funcs = log_mask_funcs if log_mask_funcs else {}
|
||||
|
||||
# These hold the version numbers and they will be set when loaded
|
||||
self.__version = {'format': 1, 'file': file_version}
|
||||
self.__version = {
|
||||
'format': 1,
|
||||
'file': file_version
|
||||
}
|
||||
|
||||
# This will get set with a reactor.callLater whenever a config option
|
||||
# is set.
|
||||
|
@ -135,7 +132,7 @@ class Config:
|
|||
|
||||
if defaults:
|
||||
for key, value in defaults.items():
|
||||
self.set_item(key, value, default=True)
|
||||
self.set_item(key, value)
|
||||
|
||||
# Load the config from file in the config_dir
|
||||
if config_dir:
|
||||
|
@ -145,12 +142,6 @@ class Config:
|
|||
|
||||
self.load()
|
||||
|
||||
def callLater(self, period, func, *args, **kwargs): # noqa: N802 ignore camelCase
|
||||
"""Wrapper around reactor.callLater for test purpose."""
|
||||
from twisted.internet import reactor
|
||||
|
||||
return reactor.callLater(period, func, *args, **kwargs)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.__config
|
||||
|
||||
|
@ -159,7 +150,7 @@ class Config:
|
|||
|
||||
return self.set_item(key, value)
|
||||
|
||||
def set_item(self, key, value, default=False):
|
||||
def set_item(self, key, value):
|
||||
"""Sets item 'key' to 'value' in the config dictionary.
|
||||
|
||||
Does not allow changing the item's type unless it is None.
|
||||
|
@ -171,8 +162,6 @@ class Config:
|
|||
key (str): Item to change to change.
|
||||
value (any): The value to change item to, must be same type as what is
|
||||
currently in the config.
|
||||
default (optional, bool): When setting a default value skip func or save
|
||||
callbacks.
|
||||
|
||||
Raises:
|
||||
ValueError: Raised when the type of value is not the same as what is
|
||||
|
@ -185,51 +174,51 @@ class Config:
|
|||
5
|
||||
|
||||
"""
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode()
|
||||
if key not in self.__config:
|
||||
self.__config[key] = value
|
||||
log.debug('Setting key "%s" to: %s (of type: %s)', key, value, type(value))
|
||||
return
|
||||
|
||||
if key in self.__config:
|
||||
try:
|
||||
value = cast_to_existing_type(value, self.__config[key])
|
||||
except ValueError:
|
||||
log.warning('Value Type "%s" invalid for key: %s', type(value), key)
|
||||
raise
|
||||
else:
|
||||
if self.__config[key] == value:
|
||||
return
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
if key in self.__log_mask_funcs:
|
||||
value = self.__log_mask_funcs[key](value)
|
||||
log.debug(
|
||||
'Setting key "%s" to: %s (of type: %s)',
|
||||
key,
|
||||
value,
|
||||
type(value),
|
||||
)
|
||||
# Do not allow the type to change unless it is None
|
||||
if value is not None and not isinstance(
|
||||
self.__config[key], type(None)) and not isinstance(self.__config[key], type(value)):
|
||||
try:
|
||||
oldtype = type(self.__config[key])
|
||||
value = oldtype(value)
|
||||
except ValueError:
|
||||
log.warning('Value Type "%s" invalid for key: %s', type(value), key)
|
||||
raise
|
||||
|
||||
if isinstance(value, bytes):
|
||||
value.decode('utf8')
|
||||
|
||||
log.debug('Setting key "%s" to: %s (of type: %s)', key, value, type(value))
|
||||
self.__config[key] = value
|
||||
|
||||
# Skip save or func callbacks if setting default value for keys
|
||||
if default:
|
||||
return
|
||||
|
||||
global callLater
|
||||
if callLater is None:
|
||||
# Must import here and not at the top or it will throw ReactorAlreadyInstalledError
|
||||
from twisted.internet.reactor import callLater # pylint: disable=redefined-outer-name
|
||||
# Run the set_function for this key if any
|
||||
for func in self.__set_functions.get(key, []):
|
||||
self.callLater(0, func, key, value)
|
||||
|
||||
try:
|
||||
|
||||
for func in self.__set_functions[key]:
|
||||
callLater(0, func, key, value)
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
def do_change_callbacks(key, value):
|
||||
for func in self.__change_callbacks:
|
||||
func(key, value)
|
||||
|
||||
self.callLater(0, do_change_callbacks, key, value)
|
||||
callLater(0, do_change_callbacks, key, value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# We set the save_timer for 5 seconds if not already set
|
||||
if not self._save_timer or not self._save_timer.active():
|
||||
self._save_timer = self.callLater(5, self.save)
|
||||
self._save_timer = callLater(5, self.save)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""See get_item """
|
||||
|
@ -305,9 +294,14 @@ class Config:
|
|||
|
||||
del self.__config[key]
|
||||
|
||||
global callLater
|
||||
if callLater is None:
|
||||
# Must import here and not at the top or it will throw ReactorAlreadyInstalledError
|
||||
from twisted.internet.reactor import callLater # pylint: disable=redefined-outer-name
|
||||
|
||||
# We set the save_timer for 5 seconds if not already set
|
||||
if not self._save_timer or not self._save_timer.active():
|
||||
self._save_timer = self.callLater(5, self.save)
|
||||
self._save_timer = callLater(5, self.save)
|
||||
|
||||
def register_change_callback(self, callback):
|
||||
"""Registers a callback function for any changed value.
|
||||
|
@ -353,6 +347,7 @@ class Config:
|
|||
# Run the function now if apply_now is set
|
||||
if apply_now:
|
||||
function(key, self.__config[key])
|
||||
return
|
||||
|
||||
def apply_all(self):
|
||||
"""Calls all set functions.
|
||||
|
@ -395,9 +390,9 @@ class Config:
|
|||
filename = self.__config_file
|
||||
|
||||
try:
|
||||
with open(filename, encoding='utf8') as _file:
|
||||
with open(filename, 'r', encoding='utf8') as _file:
|
||||
data = _file.read()
|
||||
except OSError as ex:
|
||||
except IOError as ex:
|
||||
log.warning('Unable to open config file %s: %s', filename, ex)
|
||||
return
|
||||
|
||||
|
@ -427,25 +422,8 @@ class Config:
|
|||
log.exception(ex)
|
||||
log.warning('Unable to load config file: %s', filename)
|
||||
|
||||
if not log.isEnabledFor(logging.DEBUG):
|
||||
return
|
||||
|
||||
config = self.__config
|
||||
if self.__log_mask_funcs:
|
||||
config = {
|
||||
key: self.__log_mask_funcs[key](config[key])
|
||||
if key in self.__log_mask_funcs
|
||||
else config[key]
|
||||
for key in config
|
||||
}
|
||||
|
||||
log.debug(
|
||||
'Config %s version: %s.%s loaded: %s',
|
||||
filename,
|
||||
self.__version['format'],
|
||||
self.__version['file'],
|
||||
config,
|
||||
)
|
||||
log.debug('Config %s version: %s.%s loaded: %s', filename,
|
||||
self.__version['format'], self.__version['file'], self.__config)
|
||||
|
||||
def save(self, filename=None):
|
||||
"""Save configuration to disk.
|
||||
|
@ -462,7 +440,7 @@ class Config:
|
|||
# Check to see if the current config differs from the one on disk
|
||||
# We will only write a new config file if there is a difference
|
||||
try:
|
||||
with open(filename, encoding='utf8') as _file:
|
||||
with open(filename, 'r', encoding='utf8') as _file:
|
||||
data = _file.read()
|
||||
objects = find_json_objects(data)
|
||||
start, end = objects[0]
|
||||
|
@ -474,40 +452,34 @@ class Config:
|
|||
if self._save_timer and self._save_timer.active():
|
||||
self._save_timer.cancel()
|
||||
return True
|
||||
except (OSError, IndexError) as ex:
|
||||
except (IOError, IndexError) as ex:
|
||||
log.warning('Unable to open config file: %s because: %s', filename, ex)
|
||||
|
||||
# Save the new config and make sure it's written to disk
|
||||
try:
|
||||
with NamedTemporaryFile(
|
||||
prefix=os.path.basename(filename) + '.', delete=False
|
||||
) as _file:
|
||||
filename_tmp = _file.name
|
||||
log.debug('Saving new config file %s', filename_tmp)
|
||||
log.debug('Saving new config file %s', filename + '.new')
|
||||
with open(filename + '.new', 'wb') as _file:
|
||||
json.dump(self.__version, getwriter('utf8')(_file), **JSON_FORMAT)
|
||||
json.dump(self.__config, getwriter('utf8')(_file), **JSON_FORMAT)
|
||||
_file.flush()
|
||||
os.fsync(_file.fileno())
|
||||
except OSError as ex:
|
||||
except IOError as ex:
|
||||
log.error('Error writing new config file: %s', ex)
|
||||
return False
|
||||
|
||||
# Resolve symlinked config files before backing up and saving.
|
||||
filename = os.path.realpath(filename)
|
||||
|
||||
# Make a backup of the old config
|
||||
try:
|
||||
log.debug('Backing up old config file to %s.bak', filename)
|
||||
shutil.move(filename, filename + '.bak')
|
||||
except OSError as ex:
|
||||
except IOError as ex:
|
||||
log.warning('Unable to backup old config: %s', ex)
|
||||
|
||||
# The new config file has been written successfully, so let's move it over
|
||||
# the existing one.
|
||||
try:
|
||||
log.debug('Moving new config file %s to %s', filename_tmp, filename)
|
||||
shutil.move(filename_tmp, filename)
|
||||
except OSError as ex:
|
||||
log.debug('Moving new config file %s to %s..', filename + '.new', filename)
|
||||
shutil.move(filename + '.new', filename)
|
||||
except IOError as ex:
|
||||
log.error('Error moving new config file: %s', ex)
|
||||
return False
|
||||
else:
|
||||
|
@ -533,23 +505,16 @@ class Config:
|
|||
raise ValueError('output_version needs to be greater than input_range')
|
||||
|
||||
if self.__version['file'] not in input_range:
|
||||
log.debug(
|
||||
'File version %s is not in input_range %s, ignoring converter function..',
|
||||
self.__version['file'],
|
||||
input_range,
|
||||
)
|
||||
log.debug('File version %s is not in input_range %s, ignoring converter function..',
|
||||
self.__version['file'], input_range)
|
||||
return
|
||||
|
||||
try:
|
||||
self.__config = func(self.__config)
|
||||
except Exception as ex:
|
||||
log.exception(ex)
|
||||
log.error(
|
||||
'There was an exception try to convert config file %s %s to %s',
|
||||
self.__config_file,
|
||||
self.__version['file'],
|
||||
output_version,
|
||||
)
|
||||
log.error('There was an exception try to convert config file %s %s to %s',
|
||||
self.__config_file, self.__version['file'], output_version)
|
||||
raise ex
|
||||
else:
|
||||
self.__version['file'] = output_version
|
||||
|
@ -559,11 +524,12 @@ class Config:
|
|||
def config_file(self):
|
||||
return self.__config_file
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
@prop
|
||||
def config(): # pylint: disable=no-method-argument
|
||||
"""The config dictionary"""
|
||||
def fget(self):
|
||||
return self.__config
|
||||
|
||||
@config.deleter
|
||||
def config(self):
|
||||
def fdel(self):
|
||||
return self.save()
|
||||
return locals()
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -6,6 +7,8 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
@ -16,7 +19,7 @@ from deluge.config import Config
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _ConfigManager:
|
||||
class _ConfigManager(object):
|
||||
def __init__(self):
|
||||
log.debug('ConfigManager started..')
|
||||
self.config_files = {}
|
||||
|
@ -91,12 +94,9 @@ class _ConfigManager:
|
|||
log.debug('Getting config: %s', config_file)
|
||||
# Create the config object if not already created
|
||||
if config_file not in self.config_files:
|
||||
self.config_files[config_file] = Config(
|
||||
config_file,
|
||||
defaults,
|
||||
self.config_files[config_file] = Config(config_file, defaults,
|
||||
config_dir=self.config_directory,
|
||||
file_version=file_version,
|
||||
)
|
||||
file_version=file_version)
|
||||
|
||||
return self.config_files[config_file]
|
||||
|
||||
|
@ -106,9 +106,7 @@ _configmanager = _ConfigManager()
|
|||
|
||||
|
||||
def ConfigManager(config, defaults=None, file_version=1): # NOQA: N802
|
||||
return _configmanager.get_config(
|
||||
config, defaults=defaults, file_version=file_version
|
||||
)
|
||||
return _configmanager.get_config(config, defaults=defaults, file_version=file_version)
|
||||
|
||||
|
||||
def set_config_dir(directory):
|
||||
|
|
|
@ -1,215 +0,0 @@
|
|||
#
|
||||
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
|
||||
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||
# See LICENSE for more details.
|
||||
#
|
||||
import asyncio
|
||||
import tempfile
|
||||
import warnings
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
import pytest_twisted
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet.defer import Deferred, maybeDeferred
|
||||
from twisted.internet.error import CannotListenError, ProcessTerminated
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
import deluge.component as _component
|
||||
import deluge.configmanager
|
||||
from deluge.common import get_localhost_auth
|
||||
from deluge.tests import common
|
||||
from deluge.ui.client import client as _client
|
||||
|
||||
DEFAULT_LISTEN_PORT = 58900
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def listen_port(request):
|
||||
if request and 'daemon' in request.fixturenames:
|
||||
try:
|
||||
return request.getfixturevalue('daemon').listen_port
|
||||
except Exception:
|
||||
pass
|
||||
return DEFAULT_LISTEN_PORT
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_callback():
|
||||
"""Returns a `Mock` object which can be registered as a callback to test against.
|
||||
|
||||
If callback was not called within `timeout` seconds, it will raise a TimeoutError.
|
||||
The returned Mock instance will have a `deferred` attribute which will complete when the callback has been called.
|
||||
"""
|
||||
|
||||
def reset(timeout=0.5, *args, **kwargs):
|
||||
if mock.called:
|
||||
original_reset_mock(*args, **kwargs)
|
||||
if mock.deferred:
|
||||
mock.deferred.cancel()
|
||||
deferred = Deferred(canceller=lambda x: deferred.callback(None))
|
||||
deferred.addTimeout(timeout, reactor)
|
||||
mock.side_effect = lambda *args, **kw: deferred.callback((args, kw))
|
||||
mock.deferred = deferred
|
||||
|
||||
mock = Mock()
|
||||
mock.__qualname__ = 'mock'
|
||||
original_reset_mock = mock.reset_mock
|
||||
mock.reset_mock = reset
|
||||
mock.reset_mock()
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config_dir(tmp_path):
|
||||
config_dir = tmp_path / 'config'
|
||||
deluge.configmanager.set_config_dir(config_dir)
|
||||
yield config_dir
|
||||
|
||||
|
||||
@pytest_twisted.async_yield_fixture()
|
||||
async def client(request, config_dir, monkeypatch, listen_port):
|
||||
# monkeypatch.setattr(
|
||||
# _client, 'connect', functools.partial(_client.connect, port=listen_port)
|
||||
# )
|
||||
try:
|
||||
username, password = get_localhost_auth()
|
||||
except Exception:
|
||||
username, password = '', ''
|
||||
await _client.connect(
|
||||
'localhost',
|
||||
port=listen_port,
|
||||
username=username,
|
||||
password=password,
|
||||
)
|
||||
yield _client
|
||||
if _client.connected():
|
||||
await _client.disconnect()
|
||||
|
||||
|
||||
@pytest_twisted.async_yield_fixture
|
||||
async def daemon(request, config_dir, tmp_path):
|
||||
listen_port = DEFAULT_LISTEN_PORT
|
||||
logfile = tmp_path / 'daemon.log'
|
||||
|
||||
if hasattr(request.cls, 'daemon_custom_script'):
|
||||
custom_script = request.cls.daemon_custom_script
|
||||
else:
|
||||
custom_script = ''
|
||||
|
||||
for dummy in range(10):
|
||||
try:
|
||||
d, daemon = common.start_core(
|
||||
listen_port=listen_port,
|
||||
logfile=logfile,
|
||||
timeout=5,
|
||||
timeout_msg='Timeout!',
|
||||
custom_script=custom_script,
|
||||
print_stdout=True,
|
||||
print_stderr=True,
|
||||
config_directory=config_dir,
|
||||
)
|
||||
await d
|
||||
except CannotListenError as ex:
|
||||
exception_error = ex
|
||||
listen_port += 1
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
else:
|
||||
break
|
||||
else:
|
||||
raise exception_error
|
||||
daemon.listen_port = listen_port
|
||||
yield daemon
|
||||
try:
|
||||
await daemon.kill()
|
||||
except ProcessTerminated:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def common_fixture(config_dir, request, monkeypatch, listen_port):
|
||||
"""Adds some instance attributes to test classes for backwards compatibility with old testing."""
|
||||
|
||||
def fail(self, reason):
|
||||
if isinstance(reason, Failure):
|
||||
reason = reason.value
|
||||
return pytest.fail(str(reason))
|
||||
|
||||
if request.instance:
|
||||
request.instance.patch = monkeypatch.setattr
|
||||
request.instance.config_dir = config_dir
|
||||
request.instance.listen_port = listen_port
|
||||
request.instance.id = lambda: request.node.name
|
||||
request.cls.fail = fail
|
||||
|
||||
|
||||
@pytest_twisted.async_yield_fixture(scope='function')
|
||||
async def component():
|
||||
"""Verify component registry is clean, and clean up after test."""
|
||||
if len(_component._ComponentRegistry.components) != 0:
|
||||
warnings.warn(
|
||||
'The component._ComponentRegistry.components is not empty on test setup.\n'
|
||||
'This is probably caused by another test that did not clean up after finishing!: %s'
|
||||
% _component._ComponentRegistry.components
|
||||
)
|
||||
|
||||
yield _component
|
||||
|
||||
await _component.shutdown()
|
||||
_component._ComponentRegistry.components.clear()
|
||||
_component._ComponentRegistry.dependents.clear()
|
||||
|
||||
|
||||
@pytest_twisted.async_yield_fixture(scope='function')
|
||||
async def base_fixture(common_fixture, component, request):
|
||||
"""This fixture is autoused on all tests that subclass BaseTestCase"""
|
||||
self = request.instance
|
||||
|
||||
if hasattr(self, 'set_up'):
|
||||
try:
|
||||
await maybeDeferred(self.set_up)
|
||||
except Exception as exc:
|
||||
warnings.warn('Error caught in test setup!\n%s' % exc)
|
||||
pytest.fail('Error caught in test setup!\n%s' % exc)
|
||||
|
||||
yield
|
||||
|
||||
if hasattr(self, 'tear_down'):
|
||||
try:
|
||||
await maybeDeferred(self.tear_down)
|
||||
except Exception as exc:
|
||||
pytest.fail('Error caught in test teardown!\n%s' % exc)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('base_fixture')
|
||||
class BaseTestCase:
|
||||
"""This is the base class that should be used for all test classes
|
||||
that create classes that inherit from deluge.component.Component. It
|
||||
ensures that the component registry has been cleaned up when tests
|
||||
have finished.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_mkstemp(tmp_path):
|
||||
"""Return known tempfile location to verify file deleted"""
|
||||
tmp_file = tempfile.mkstemp(dir=tmp_path)
|
||||
with patch('tempfile.mkstemp', return_value=tmp_file):
|
||||
yield tmp_file
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(session, config, items) -> None:
|
||||
"""
|
||||
Automatically runs async tests with pytest_twisted.ensureDeferred
|
||||
"""
|
||||
function_items = (item for item in items if isinstance(item, pytest.Function))
|
||||
for function_item in function_items:
|
||||
function = function_item.obj
|
||||
if hasattr(function, '__func__'):
|
||||
# methods need to be unwrapped.
|
||||
function = function.__func__
|
||||
if asyncio.iscoroutinefunction(function):
|
||||
# This is how pytest_twisted marks ensureDeferred tests
|
||||
setattr(function, '_pytest_twisted_mark', 'async_test')
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -14,16 +15,11 @@ This should typically only be used by the Core. Plugins should utilize the
|
|||
`:mod:EventManager` for similar functionality.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
from typing import Any, Callable
|
||||
|
||||
from twisted.internet import reactor, task, threads
|
||||
from twisted.internet import reactor
|
||||
|
||||
import deluge.component as component
|
||||
from deluge._libtorrent import lt
|
||||
|
@ -34,112 +30,68 @@ log = logging.getLogger(__name__)
|
|||
|
||||
class AlertManager(component.Component):
|
||||
"""AlertManager fetches and processes libtorrent alerts"""
|
||||
|
||||
def __init__(self):
|
||||
log.debug('AlertManager init...')
|
||||
component.Component.__init__(self, 'AlertManager')
|
||||
component.Component.__init__(self, 'AlertManager', interval=0.3)
|
||||
self.session = component.get('Core').session
|
||||
|
||||
# Increase the alert queue size so that alerts don't get lost.
|
||||
self.alert_queue_size = 10000
|
||||
self.set_alert_queue_size(self.alert_queue_size)
|
||||
|
||||
alert_mask = (
|
||||
lt.alert.category_t.error_notification
|
||||
| lt.alert.category_t.port_mapping_notification
|
||||
| lt.alert.category_t.storage_notification
|
||||
| lt.alert.category_t.tracker_notification
|
||||
| lt.alert.category_t.status_notification
|
||||
| lt.alert.category_t.ip_block_notification
|
||||
| lt.alert.category_t.performance_warning
|
||||
| lt.alert.category_t.file_progress_notification
|
||||
)
|
||||
alert_mask = (lt.alert.category_t.error_notification |
|
||||
lt.alert.category_t.port_mapping_notification |
|
||||
lt.alert.category_t.storage_notification |
|
||||
lt.alert.category_t.tracker_notification |
|
||||
lt.alert.category_t.status_notification |
|
||||
lt.alert.category_t.ip_block_notification |
|
||||
lt.alert.category_t.performance_warning)
|
||||
|
||||
self.session.apply_settings({'alert_mask': alert_mask})
|
||||
|
||||
# handlers is a dictionary of lists {"alert_type": [handler1,h2,..]}
|
||||
self.handlers = defaultdict(list)
|
||||
self.handlers_timeout_secs = 2
|
||||
self.handlers = {}
|
||||
self.delayed_calls = []
|
||||
self._event = threading.Event()
|
||||
|
||||
def update(self):
|
||||
pass
|
||||
|
||||
def start(self):
|
||||
thread = threading.Thread(
|
||||
target=self.wait_for_alert_in_thread, name='alert-poller', daemon=True
|
||||
)
|
||||
thread.start()
|
||||
self._event.set()
|
||||
self.delayed_calls = [dc for dc in self.delayed_calls if dc.active()]
|
||||
self.handle_alerts()
|
||||
|
||||
def stop(self):
|
||||
self.cancel_delayed_calls()
|
||||
|
||||
def pause(self):
|
||||
self._event.clear()
|
||||
|
||||
def resume(self):
|
||||
self._event.set()
|
||||
|
||||
def wait_for_alert_in_thread(self):
|
||||
while self._component_state not in ('Stopping', 'Stopped'):
|
||||
if self.check_delayed_calls():
|
||||
time.sleep(0.05)
|
||||
continue
|
||||
|
||||
if self.session.wait_for_alert(1000) is None:
|
||||
continue
|
||||
if self._event.wait():
|
||||
threads.blockingCallFromThread(reactor, self.maybe_handle_alerts)
|
||||
|
||||
def on_delayed_call_timeout(self, result, timeout, **kwargs):
|
||||
log.warning('Alert handler was timed-out before being called %s', kwargs)
|
||||
|
||||
def cancel_delayed_calls(self):
|
||||
"""Cancel all delayed handlers."""
|
||||
for delayed_call in self.delayed_calls:
|
||||
if delayed_call.active():
|
||||
delayed_call.cancel()
|
||||
self.delayed_calls = []
|
||||
|
||||
def check_delayed_calls(self) -> bool:
|
||||
"""Returns True if any handler calls are delayed."""
|
||||
self.delayed_calls = [dc for dc in self.delayed_calls if not dc.called]
|
||||
return len(self.delayed_calls) > 0
|
||||
|
||||
def maybe_handle_alerts(self) -> None:
|
||||
if self._component_state != 'Started':
|
||||
return
|
||||
|
||||
self.handle_alerts()
|
||||
|
||||
def register_handler(self, alert_type: str, handler: Callable[[Any], None]) -> None:
|
||||
def register_handler(self, alert_type, handler):
|
||||
"""
|
||||
Registers a function that will be called when 'alert_type' is pop'd
|
||||
in handle_alerts. The handler function should look like: handler(alert)
|
||||
Where 'alert' is the actual alert object from libtorrent.
|
||||
|
||||
Args:
|
||||
alert_type: String representation of the libtorrent alert name.
|
||||
Can be supplied with or without `_alert` suffix.
|
||||
handler: Callback function when the alert is raised.
|
||||
:param alert_type: str, this is string representation of the alert name
|
||||
:param handler: func(alert), the function to be called when the alert is raised
|
||||
"""
|
||||
if alert_type and alert_type.endswith('_alert'):
|
||||
alert_type = alert_type[: -len('_alert')]
|
||||
if alert_type not in self.handlers:
|
||||
# There is no entry for this alert type yet, so lets make it with an
|
||||
# empty list.
|
||||
self.handlers[alert_type] = []
|
||||
|
||||
# Append the handler to the list in the handlers dictionary
|
||||
self.handlers[alert_type].append(handler)
|
||||
log.debug('Registered handler for alert %s', alert_type)
|
||||
|
||||
def deregister_handler(self, handler: Callable[[Any], None]):
|
||||
def deregister_handler(self, handler):
|
||||
"""
|
||||
De-registers the `handler` function from all alert types.
|
||||
De-registers the `:param:handler` function from all alert types.
|
||||
|
||||
Args:
|
||||
handler: The handler function to deregister.
|
||||
:param handler: func, the handler function to deregister
|
||||
"""
|
||||
for alert_type_handlers in self.handlers.values():
|
||||
with contextlib.suppress(ValueError):
|
||||
alert_type_handlers.remove(handler)
|
||||
# Iterate through all handlers and remove 'handler' where found
|
||||
for (dummy_key, value) in self.handlers.items():
|
||||
if handler in value:
|
||||
# Handler is in this alert type list
|
||||
value.remove(handler)
|
||||
|
||||
def handle_alerts(self):
|
||||
"""
|
||||
|
@ -153,42 +105,23 @@ class AlertManager(component.Component):
|
|||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('Alerts queued: %s', num_alerts)
|
||||
if num_alerts > 0.9 * self.alert_queue_size:
|
||||
log.warning(
|
||||
'Warning total alerts queued, %s, passes 90%% of queue size.',
|
||||
num_alerts,
|
||||
)
|
||||
log.warning('Warning total alerts queued, %s, passes 90%% of queue size.', num_alerts)
|
||||
|
||||
# Loop through all alerts in the queue
|
||||
for alert in alerts:
|
||||
alert_type = alert.what()
|
||||
|
||||
alert_type = type(alert).__name__
|
||||
# Display the alert message
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('%s: %s', alert_type, decode_bytes(alert.message()))
|
||||
|
||||
if alert_type not in self.handlers:
|
||||
continue
|
||||
|
||||
# Call any handlers for this alert type
|
||||
if alert_type in self.handlers:
|
||||
for handler in self.handlers[alert_type]:
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('Handling alert: %s', alert_type)
|
||||
d = task.deferLater(reactor, 0, handler, alert)
|
||||
on_handler_timeout = partial(
|
||||
self.on_delayed_call_timeout,
|
||||
handler=handler.__qualname__,
|
||||
alert_type=alert_type,
|
||||
)
|
||||
d.addTimeout(
|
||||
self.handlers_timeout_secs,
|
||||
reactor,
|
||||
onTimeoutCancel=on_handler_timeout,
|
||||
)
|
||||
self.delayed_calls.append(d)
|
||||
self.delayed_calls.append(reactor.callLater(0, handler, alert))
|
||||
|
||||
def set_alert_queue_size(self, queue_size):
|
||||
"""Sets the maximum size of the libtorrent alert queue"""
|
||||
log.info('Alert Queue Size set to %s', queue_size)
|
||||
self.alert_queue_size = queue_size
|
||||
component.get('Core').apply_session_setting(
|
||||
'alert_queue_size', self.alert_queue_size
|
||||
)
|
||||
component.get('Core').apply_session_setting('alert_queue_size', self.alert_queue_size)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
# Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me>
|
||||
|
@ -7,20 +8,17 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from io import open
|
||||
|
||||
import deluge.component as component
|
||||
import deluge.configmanager as configmanager
|
||||
from deluge.common import (
|
||||
AUTH_LEVEL_ADMIN,
|
||||
AUTH_LEVEL_DEFAULT,
|
||||
AUTH_LEVEL_NONE,
|
||||
AUTH_LEVEL_NORMAL,
|
||||
AUTH_LEVEL_READONLY,
|
||||
create_localclient_account,
|
||||
)
|
||||
from deluge.common import (AUTH_LEVEL_ADMIN, AUTH_LEVEL_DEFAULT, AUTH_LEVEL_NONE, AUTH_LEVEL_NORMAL,
|
||||
AUTH_LEVEL_READONLY, create_localclient_account)
|
||||
from deluge.error import AuthenticationRequired, AuthManagerError, BadLoginError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -28,14 +26,13 @@ log = logging.getLogger(__name__)
|
|||
AUTH_LEVELS_MAPPING = {
|
||||
'NONE': AUTH_LEVEL_NONE,
|
||||
'READONLY': AUTH_LEVEL_READONLY,
|
||||
'DEFAULT': AUTH_LEVEL_DEFAULT,
|
||||
'NORMAL': AUTH_LEVEL_NORMAL,
|
||||
'ADMIN': AUTH_LEVEL_ADMIN,
|
||||
}
|
||||
'DEFAULT': AUTH_LEVEL_NORMAL,
|
||||
'NORMAL': AUTH_LEVEL_DEFAULT,
|
||||
'ADMIN': AUTH_LEVEL_ADMIN}
|
||||
AUTH_LEVELS_MAPPING_REVERSE = {v: k for k, v in AUTH_LEVELS_MAPPING.items()}
|
||||
|
||||
|
||||
class Account:
|
||||
class Account(object):
|
||||
__slots__ = ('username', 'password', 'authlevel')
|
||||
|
||||
def __init__(self, username, password, authlevel):
|
||||
|
@ -48,14 +45,12 @@ class Account:
|
|||
'username': self.username,
|
||||
'password': self.password,
|
||||
'authlevel': AUTH_LEVELS_MAPPING_REVERSE[self.authlevel],
|
||||
'authlevel_int': self.authlevel,
|
||||
'authlevel_int': self.authlevel
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
return '<Account username="{username}" authlevel={authlevel}>'.format(
|
||||
username=self.username,
|
||||
authlevel=self.authlevel,
|
||||
)
|
||||
return ('<Account username="%(username)s" authlevel=%(authlevel)s>' %
|
||||
{'username': self.username, 'authlevel': self.authlevel})
|
||||
|
||||
|
||||
class AuthManager(component.Component):
|
||||
|
@ -97,7 +92,7 @@ class AuthManager(component.Component):
|
|||
int: The auth level for this user.
|
||||
|
||||
Raises:
|
||||
AuthenticationRequired: If additional details are required to authenticate.
|
||||
AuthenticationRequired: If aditional details are required to authenticate.
|
||||
BadLoginError: If the username does not exist or password does not match.
|
||||
|
||||
"""
|
||||
|
@ -134,9 +129,8 @@ class AuthManager(component.Component):
|
|||
if authlevel not in AUTH_LEVELS_MAPPING:
|
||||
raise AuthManagerError('Invalid auth level: %s' % authlevel)
|
||||
try:
|
||||
self.__auth[username] = Account(
|
||||
username, password, AUTH_LEVELS_MAPPING[authlevel]
|
||||
)
|
||||
self.__auth[username] = Account(username, password,
|
||||
AUTH_LEVELS_MAPPING[authlevel])
|
||||
self.write_auth_file()
|
||||
return True
|
||||
except Exception as ex:
|
||||
|
@ -180,21 +174,18 @@ class AuthManager(component.Component):
|
|||
if os.path.isfile(filepath):
|
||||
log.debug('Creating backup of %s at: %s', filename, filepath_bak)
|
||||
shutil.copy2(filepath, filepath_bak)
|
||||
except OSError as ex:
|
||||
except IOError as ex:
|
||||
log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex)
|
||||
else:
|
||||
log.info('Saving the %s at: %s', filename, filepath)
|
||||
try:
|
||||
with open(filepath_tmp, 'w', encoding='utf8') as _file:
|
||||
for account in self.__auth.values():
|
||||
_file.write(
|
||||
'%(username)s:%(password)s:%(authlevel_int)s\n'
|
||||
% account.data()
|
||||
)
|
||||
_file.write('%(username)s:%(password)s:%(authlevel_int)s\n' % account.data())
|
||||
_file.flush()
|
||||
os.fsync(_file.fileno())
|
||||
shutil.move(filepath_tmp, filepath)
|
||||
except OSError as ex:
|
||||
except IOError as ex:
|
||||
log.error('Unable to save %s: %s', filename, ex)
|
||||
if os.path.isfile(filepath_bak):
|
||||
log.info('Restoring backup of %s from: %s', filename, filepath_bak)
|
||||
|
@ -223,9 +214,9 @@ class AuthManager(component.Component):
|
|||
for _filepath in (auth_file, auth_file_bak):
|
||||
log.info('Opening %s for load: %s', filename, _filepath)
|
||||
try:
|
||||
with open(_filepath, encoding='utf8') as _file:
|
||||
with open(_filepath, 'r', encoding='utf8') as _file:
|
||||
file_data = _file.readlines()
|
||||
except OSError as ex:
|
||||
except IOError as ex:
|
||||
log.warning('Unable to load %s: %s', _filepath, ex)
|
||||
file_data = []
|
||||
else:
|
||||
|
@ -241,12 +232,8 @@ class AuthManager(component.Component):
|
|||
lsplit = line.split(':')
|
||||
if len(lsplit) == 2:
|
||||
username, password = lsplit
|
||||
log.warning(
|
||||
'Your auth entry for %s contains no auth level, '
|
||||
'using AUTH_LEVEL_DEFAULT(%s)..',
|
||||
username,
|
||||
AUTH_LEVEL_DEFAULT,
|
||||
)
|
||||
log.warning('Your auth entry for %s contains no auth level, '
|
||||
'using AUTH_LEVEL_DEFAULT(%s)..', username, AUTH_LEVEL_DEFAULT)
|
||||
if username == 'localclient':
|
||||
authlevel = AUTH_LEVEL_ADMIN
|
||||
else:
|
||||
|
@ -267,10 +254,7 @@ class AuthManager(component.Component):
|
|||
try:
|
||||
authlevel = AUTH_LEVELS_MAPPING[authlevel]
|
||||
except KeyError:
|
||||
log.error(
|
||||
'Your auth file is malformed: %r is not a valid auth level',
|
||||
authlevel,
|
||||
)
|
||||
log.error('Your auth file is malformed: %r is not a valid auth level', authlevel)
|
||||
continue
|
||||
|
||||
self.__auth[username] = Account(username, password, authlevel)
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -7,6 +8,7 @@
|
|||
#
|
||||
|
||||
"""The Deluge daemon"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
@ -42,8 +44,8 @@ def is_daemon_running(pid_file):
|
|||
|
||||
try:
|
||||
with open(pid_file) as _file:
|
||||
pid, port = (int(x) for x in _file.readline().strip().split(';'))
|
||||
except (OSError, ValueError):
|
||||
pid, port = [int(x) for x in _file.readline().strip().split(';')]
|
||||
except (EnvironmentError, ValueError):
|
||||
return False
|
||||
|
||||
if is_process_running(pid):
|
||||
|
@ -51,7 +53,7 @@ def is_daemon_running(pid_file):
|
|||
_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
_socket.connect(('127.0.0.1', port))
|
||||
except OSError:
|
||||
except socket.error:
|
||||
# Can't connect, so pid is not a deluged process.
|
||||
return False
|
||||
else:
|
||||
|
@ -60,62 +62,43 @@ def is_daemon_running(pid_file):
|
|||
return True
|
||||
|
||||
|
||||
class Daemon:
|
||||
class Daemon(object):
|
||||
"""The Deluge Daemon class"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
listen_interface=None,
|
||||
outgoing_interface=None,
|
||||
interface=None,
|
||||
port=None,
|
||||
standalone=False,
|
||||
read_only_config_keys=None,
|
||||
):
|
||||
def __init__(self, listen_interface=None, interface=None, port=None, standalone=False,
|
||||
read_only_config_keys=None):
|
||||
"""
|
||||
Args:
|
||||
listen_interface (str, optional): The IP address to listen to
|
||||
BitTorrent connections on.
|
||||
outgoing_interface (str, optional): The network interface name or
|
||||
IP address to open outgoing BitTorrent connections on.
|
||||
interface (str, optional): The IP address the daemon will
|
||||
listen for UI connections on.
|
||||
port (int, optional): The port the daemon will listen for UI
|
||||
connections on.
|
||||
standalone (bool, optional): If True the client is in Standalone
|
||||
mode otherwise, if False, start the daemon as separate process.
|
||||
read_only_config_keys (list of str, optional): A list of config
|
||||
keys that will not be altered by core.set_config() RPC method.
|
||||
listen_interface (str, optional): The IP address to listen to bittorrent connections on.
|
||||
interface (str, optional): The IP address the daemon will listen for UI connections on.
|
||||
port (int, optional): The port the daemon will listen for UI connections on.
|
||||
standalone (bool, optional): If True the client is in Standalone mode otherwise, if
|
||||
False, start the daemon as separate process.
|
||||
read_only_config_keys (list of str, optional): A list of config keys that will not be
|
||||
altered by core.set_config() RPC method.
|
||||
"""
|
||||
self.standalone = standalone
|
||||
self.pid_file = get_config_dir('deluged.pid')
|
||||
log.info('Deluge daemon %s', get_version())
|
||||
if is_daemon_running(self.pid_file):
|
||||
raise DaemonRunningError(
|
||||
'Deluge daemon already running with this config directory!'
|
||||
)
|
||||
raise DaemonRunningError('Deluge daemon already running with this config directory!')
|
||||
|
||||
# Twisted catches signals to terminate, so just have it call the shutdown method.
|
||||
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
|
||||
|
||||
# Catch some Windows specific signals
|
||||
if windows_check():
|
||||
|
||||
def win_handler(ctrl_type):
|
||||
"""Handle the Windows shutdown or close events."""
|
||||
log.debug('windows handler ctrl_type: %s', ctrl_type)
|
||||
if ctrl_type == CTRL_CLOSE_EVENT or ctrl_type == CTRL_SHUTDOWN_EVENT:
|
||||
self._shutdown()
|
||||
return 1
|
||||
|
||||
SetConsoleCtrlHandler(win_handler)
|
||||
|
||||
# Start the core as a thread and join it until it's done
|
||||
self.core = Core(
|
||||
listen_interface=listen_interface,
|
||||
outgoing_interface=outgoing_interface,
|
||||
read_only_config_keys=read_only_config_keys,
|
||||
)
|
||||
self.core = Core(listen_interface=listen_interface,
|
||||
read_only_config_keys=read_only_config_keys)
|
||||
|
||||
if port is None:
|
||||
port = self.core.config['daemon_port']
|
||||
|
@ -129,16 +112,10 @@ class Daemon:
|
|||
port=port,
|
||||
allow_remote=self.core.config['allow_remote'],
|
||||
listen=not standalone,
|
||||
interface=interface,
|
||||
interface=interface
|
||||
)
|
||||
|
||||
log.debug(
|
||||
'Listening to UI on: %s:%s and bittorrent on: %s Making connections out on: %s',
|
||||
interface,
|
||||
port,
|
||||
listen_interface,
|
||||
outgoing_interface,
|
||||
)
|
||||
log.debug('Listening to UI on: %s:%s and bittorrent on: %s', interface, port, listen_interface)
|
||||
|
||||
def start(self):
|
||||
# Register the daemon and the core RPCs
|
||||
|
@ -154,7 +131,7 @@ class Daemon:
|
|||
pid = os.getpid()
|
||||
log.debug('Storing pid %s & port %s in: %s', pid, self.port, self.pid_file)
|
||||
with open(self.pid_file, 'w') as _file:
|
||||
_file.write(f'{pid};{self.port}\n')
|
||||
_file.write('%s;%s\n' % (pid, self.port))
|
||||
|
||||
component.start()
|
||||
|
||||
|
@ -180,11 +157,6 @@ class Daemon:
|
|||
"""Returns a list of the exported methods."""
|
||||
return self.rpcserver.get_method_list()
|
||||
|
||||
@export()
|
||||
def get_version(self):
|
||||
"""Returns the daemon version"""
|
||||
return get_version()
|
||||
|
||||
@export(1)
|
||||
def authorized_call(self, rpc):
|
||||
"""Determines if session auth_level is authorized to call RPC.
|
||||
|
@ -198,7 +170,4 @@ class Daemon:
|
|||
if rpc not in self.get_method_list():
|
||||
return False
|
||||
|
||||
return (
|
||||
self.rpcserver.get_session_auth_level()
|
||||
>= self.rpcserver.get_rpc_auth_level(rpc)
|
||||
)
|
||||
return self.rpcserver.get_session_auth_level() >= self.rpcserver.get_rpc_auth_level(rpc)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
|
||||
# Copyright (C) 2010 Pedro Algarvio <pedro@algarvio.me>
|
||||
|
@ -6,61 +7,30 @@
|
|||
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||
# See LICENSE for more details.
|
||||
#
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
from logging import DEBUG, FileHandler, getLogger
|
||||
|
||||
from twisted.internet.error import CannotListenError
|
||||
|
||||
from deluge.argparserbase import ArgParserBase
|
||||
from deluge.common import run_profiled
|
||||
from deluge.configmanager import get_config_dir
|
||||
from deluge.i18n import setup_mock_translation
|
||||
from deluge.ui.baseargparser import BaseArgParser
|
||||
from deluge.ui.translations_util import set_dummy_trans
|
||||
|
||||
|
||||
def add_daemon_options(parser):
|
||||
group = parser.add_argument_group(_('Daemon Options'))
|
||||
group.add_argument(
|
||||
'-u',
|
||||
'--ui-interface',
|
||||
metavar='<ip-addr>',
|
||||
action='store',
|
||||
help=_('IP address to listen for UI connections'),
|
||||
)
|
||||
group.add_argument(
|
||||
'-p',
|
||||
'--port',
|
||||
metavar='<port>',
|
||||
action='store',
|
||||
type=int,
|
||||
help=_('Port to listen for UI connections on'),
|
||||
)
|
||||
group.add_argument(
|
||||
'-i',
|
||||
'--interface',
|
||||
metavar='<ip-addr>',
|
||||
dest='listen_interface',
|
||||
action='store',
|
||||
help=_('IP address to listen for BitTorrent connections'),
|
||||
)
|
||||
group.add_argument(
|
||||
'-o',
|
||||
'--outgoing-interface',
|
||||
metavar='<interface>',
|
||||
dest='outgoing_interface',
|
||||
action='store',
|
||||
help=_(
|
||||
'The network interface name or IP address for outgoing BitTorrent connections.'
|
||||
),
|
||||
)
|
||||
group.add_argument(
|
||||
'--read-only-config-keys',
|
||||
metavar='<comma-separated-keys>',
|
||||
action='store',
|
||||
help=_('Config keys to be unmodified by `set_config` RPC'),
|
||||
type=str,
|
||||
default='',
|
||||
)
|
||||
group.add_argument('-u', '--ui-interface', metavar='<ip-addr>', action='store',
|
||||
help=_('IP address to listen for UI connections'))
|
||||
group.add_argument('-p', '--port', metavar='<port>', action='store', type=int,
|
||||
help=_('Port to listen for UI connections on'))
|
||||
group.add_argument('-i', '--interface', metavar='<ip-addr>', dest='listen_interface', action='store',
|
||||
help=_('IP address to listen for BitTorrent connections'))
|
||||
group.add_argument('--read-only-config-keys', metavar='<comma-separated-keys>', action='store',
|
||||
help=_('Config keys to be unmodified by `set_config` RPC'), type=str, default='')
|
||||
parser.add_process_arg_group()
|
||||
|
||||
|
||||
|
@ -75,23 +45,20 @@ def start_daemon(skip_start=False):
|
|||
deluge.core.daemon.Daemon: A new daemon object
|
||||
|
||||
"""
|
||||
setup_mock_translation()
|
||||
set_dummy_trans(warn_msg=True)
|
||||
|
||||
# Setup the argument parser
|
||||
parser = ArgParserBase()
|
||||
parser = BaseArgParser()
|
||||
add_daemon_options(parser)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
# Check for any daemons running with this same config
|
||||
from deluge.core.daemon import is_daemon_running
|
||||
|
||||
pid_file = get_config_dir('deluged.pid')
|
||||
if is_daemon_running(pid_file):
|
||||
print(
|
||||
'Cannot run multiple daemons with same config directory.\n'
|
||||
'If you believe this is an error, force starting by deleting: %s' % pid_file
|
||||
)
|
||||
print('Cannot run multiple daemons with same config directory.\n'
|
||||
'If you believe this is an error, force starting by deleting: %s' % pid_file)
|
||||
sys.exit(1)
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
@ -105,25 +72,18 @@ def start_daemon(skip_start=False):
|
|||
def run_daemon(options):
|
||||
try:
|
||||
from deluge.core.daemon import Daemon
|
||||
|
||||
daemon = Daemon(
|
||||
listen_interface=options.listen_interface,
|
||||
outgoing_interface=options.outgoing_interface,
|
||||
daemon = Daemon(listen_interface=options.listen_interface,
|
||||
interface=options.ui_interface,
|
||||
port=options.port,
|
||||
read_only_config_keys=options.read_only_config_keys.split(','),
|
||||
)
|
||||
read_only_config_keys=options.read_only_config_keys.split(','))
|
||||
if skip_start:
|
||||
return daemon
|
||||
else:
|
||||
daemon.start()
|
||||
except CannotListenError as ex:
|
||||
log.error(
|
||||
'Cannot start deluged, listen port in use.\n'
|
||||
log.error('Cannot start deluged, listen port in use.\n'
|
||||
' Check for other running daemons or services using this port: %s:%s',
|
||||
ex.interface,
|
||||
ex.port,
|
||||
)
|
||||
ex.interface, ex.port)
|
||||
sys.exit(1)
|
||||
except Exception as ex:
|
||||
log.error('Unable to start deluged: %s', ex)
|
||||
|
@ -135,6 +95,4 @@ def start_daemon(skip_start=False):
|
|||
if options.pidfile:
|
||||
os.remove(options.pidfile)
|
||||
|
||||
return run_profiled(
|
||||
run_daemon, options, output_file=options.profile, do_profile=options.profile
|
||||
)
|
||||
return run_profiled(run_daemon, options, output_file=options.profile, do_profile=options.profile)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -6,6 +7,8 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import deluge.component as component
|
||||
|
@ -33,12 +36,7 @@ class EventManager(component.Component):
|
|||
try:
|
||||
handler(*event.args)
|
||||
except Exception as ex:
|
||||
log.error(
|
||||
'Event handler %s failed in %s with exception %s',
|
||||
event.name,
|
||||
handler,
|
||||
ex,
|
||||
)
|
||||
log.error('Event handler %s failed in %s with exception %s', event.name, handler, ex)
|
||||
|
||||
def register_event_handler(self, event, handler):
|
||||
"""
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
|
||||
#
|
||||
|
@ -6,10 +7,12 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import deluge.component as component
|
||||
from deluge.common import TORRENT_STATE
|
||||
from deluge.common import PY2, TORRENT_STATE
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -95,8 +98,9 @@ def tracker_error_filter(torrent_ids, values):
|
|||
|
||||
|
||||
class FilterManager(component.Component):
|
||||
"""FilterManager"""
|
||||
"""FilterManager
|
||||
|
||||
"""
|
||||
def __init__(self, core):
|
||||
component.Component.__init__(self, 'FilterManager')
|
||||
log.debug('FilterManager init..')
|
||||
|
@ -111,14 +115,12 @@ class FilterManager(component.Component):
|
|||
|
||||
def _init_tracker_tree():
|
||||
return {'Error': 0}
|
||||
|
||||
self.register_tree_field('tracker_host', _init_tracker_tree)
|
||||
|
||||
self.register_filter('tracker_host', tracker_error_filter)
|
||||
|
||||
def _init_users_tree():
|
||||
return {'': 0}
|
||||
|
||||
self.register_tree_field('owner', _init_users_tree)
|
||||
|
||||
def filter_torrent_ids(self, filter_dict):
|
||||
|
@ -131,7 +133,7 @@ class FilterManager(component.Component):
|
|||
|
||||
# Sanitize input: filter-value must be a list of strings
|
||||
for key, value in filter_dict.items():
|
||||
if isinstance(value, str):
|
||||
if isinstance(value, str if not PY2 else basestring):
|
||||
filter_dict[key] = [value]
|
||||
|
||||
# Optimized filter for id
|
||||
|
@ -160,25 +162,19 @@ class FilterManager(component.Component):
|
|||
return torrent_ids
|
||||
|
||||
# Registered filters
|
||||
for field, values in list(filter_dict.items()):
|
||||
for field, values in filter_dict.items():
|
||||
if field in self.registered_filters:
|
||||
# Filters out doubles
|
||||
torrent_ids = list(
|
||||
set(self.registered_filters[field](torrent_ids, values))
|
||||
)
|
||||
torrent_ids = list(set(self.registered_filters[field](torrent_ids, values)))
|
||||
del filter_dict[field]
|
||||
|
||||
if not filter_dict:
|
||||
return torrent_ids
|
||||
|
||||
torrent_keys, plugin_keys = self.torrents.separate_keys(
|
||||
list(filter_dict), torrent_ids
|
||||
)
|
||||
torrent_keys, plugin_keys = self.torrents.separate_keys(list(filter_dict), torrent_ids)
|
||||
# Leftover filter arguments, default filter on status fields.
|
||||
for torrent_id in list(torrent_ids):
|
||||
status = self.core.create_torrent_status(
|
||||
torrent_id, torrent_keys, plugin_keys
|
||||
)
|
||||
status = self.core.create_torrent_status(torrent_id, torrent_keys, plugin_keys)
|
||||
for field, values in filter_dict.items():
|
||||
if field in status and status[field] in values:
|
||||
continue
|
||||
|
@ -198,21 +194,17 @@ class FilterManager(component.Component):
|
|||
tree_keys.remove(cat)
|
||||
|
||||
torrent_keys, plugin_keys = self.torrents.separate_keys(tree_keys, torrent_ids)
|
||||
items = {field: self.tree_fields[field]() for field in tree_keys}
|
||||
items = dict((field, self.tree_fields[field]()) for field in tree_keys)
|
||||
|
||||
for torrent_id in list(torrent_ids):
|
||||
status = self.core.create_torrent_status(
|
||||
torrent_id, torrent_keys, plugin_keys
|
||||
) # status={key:value}
|
||||
status = self.core.create_torrent_status(torrent_id, torrent_keys, plugin_keys) # status={key:value}
|
||||
for field in tree_keys:
|
||||
value = status[field]
|
||||
items[field][value] = items[field].get(value, 0) + 1
|
||||
|
||||
if 'tracker_host' in items:
|
||||
items['tracker_host']['All'] = len(torrent_ids)
|
||||
items['tracker_host']['Error'] = len(
|
||||
tracker_error_filter(torrent_ids, ('Error',))
|
||||
)
|
||||
items['tracker_host']['Error'] = len(tracker_error_filter(torrent_ids, ('Error',)))
|
||||
|
||||
if not show_zero_hits:
|
||||
for cat in ['state', 'owner', 'tracker_host']:
|
||||
|
@ -223,7 +215,7 @@ class FilterManager(component.Component):
|
|||
sorted_items = {field: sorted(items[field].items()) for field in tree_keys}
|
||||
|
||||
if 'state' in tree_keys:
|
||||
sorted_items['state'].sort(key=self._sort_state_item)
|
||||
sorted_items['state'].sort(self._sort_state_items)
|
||||
|
||||
return sorted_items
|
||||
|
||||
|
@ -232,9 +224,7 @@ class FilterManager(component.Component):
|
|||
init_state['All'] = len(self.torrents.get_torrent_list())
|
||||
for state in TORRENT_STATE:
|
||||
init_state[state] = 0
|
||||
init_state['Active'] = len(
|
||||
self.filter_state_active(self.torrents.get_torrent_list())
|
||||
)
|
||||
init_state['Active'] = len(self.filter_state_active(self.torrents.get_torrent_list()))
|
||||
return init_state
|
||||
|
||||
def register_filter(self, filter_id, filter_func, filter_value=None):
|
||||
|
@ -252,9 +242,7 @@ class FilterManager(component.Component):
|
|||
|
||||
def filter_state_active(self, torrent_ids):
|
||||
for torrent_id in list(torrent_ids):
|
||||
status = self.torrents[torrent_id].get_status(
|
||||
['download_payload_rate', 'upload_payload_rate']
|
||||
)
|
||||
status = self.torrents[torrent_id].get_status(['download_payload_rate', 'upload_payload_rate'])
|
||||
if status['download_payload_rate'] or status['upload_payload_rate']:
|
||||
pass
|
||||
else:
|
||||
|
@ -263,12 +251,18 @@ class FilterManager(component.Component):
|
|||
|
||||
def _hide_state_items(self, state_items):
|
||||
"""For hide(show)-zero hits"""
|
||||
for value, count in list(state_items.items()):
|
||||
for (value, count) in state_items.items():
|
||||
if value != 'All' and count == 0:
|
||||
del state_items[value]
|
||||
|
||||
def _sort_state_item(self, item):
|
||||
try:
|
||||
return STATE_SORT.index(item[0])
|
||||
except ValueError:
|
||||
return 99
|
||||
def _sort_state_items(self, x, y):
|
||||
if x[0] in STATE_SORT:
|
||||
ix = STATE_SORT.index(x[0])
|
||||
else:
|
||||
ix = 99
|
||||
if y[0] in STATE_SORT:
|
||||
iy = STATE_SORT.index(y[0])
|
||||
else:
|
||||
iy = 99
|
||||
|
||||
return ix - iy
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -8,6 +9,7 @@
|
|||
|
||||
|
||||
"""PluginManager for Core"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -31,8 +33,7 @@ class PluginManager(deluge.pluginmanagerbase.PluginManagerBase, component.Compon
|
|||
|
||||
# Call the PluginManagerBase constructor
|
||||
deluge.pluginmanagerbase.PluginManagerBase.__init__(
|
||||
self, 'core.conf', 'deluge.plugin.core'
|
||||
)
|
||||
self, 'core.conf', 'deluge.plugin.core')
|
||||
|
||||
def start(self):
|
||||
# Enable plugins that are enabled in the config
|
||||
|
@ -75,7 +76,6 @@ class PluginManager(deluge.pluginmanagerbase.PluginManagerBase, component.Compon
|
|||
if name not in self.plugins:
|
||||
component.get('EventManager').emit(PluginDisabledEvent(name))
|
||||
return result
|
||||
|
||||
d.addBoth(on_disable_plugin)
|
||||
return d
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008-2010 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -7,13 +8,13 @@
|
|||
#
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import random
|
||||
import threading
|
||||
from urllib.parse import quote_plus
|
||||
from urllib.request import urlopen
|
||||
|
||||
from twisted.internet.task import LoopingCall
|
||||
|
||||
|
@ -23,14 +24,17 @@ import deluge.configmanager
|
|||
from deluge._libtorrent import lt
|
||||
from deluge.event import ConfigValueChangedEvent
|
||||
|
||||
try:
|
||||
import GeoIP
|
||||
except ImportError:
|
||||
GeoIP = None
|
||||
|
||||
try:
|
||||
from GeoIP import GeoIP
|
||||
from urllib.parse import quote_plus
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
try:
|
||||
from pygeoip import GeoIP
|
||||
except ImportError:
|
||||
pass
|
||||
from urllib import quote_plus
|
||||
from urllib2 import urlopen
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -43,7 +47,6 @@ DEFAULT_PREFS = {
|
|||
'download_location': deluge.common.get_default_download_dir(),
|
||||
'listen_ports': [6881, 6891],
|
||||
'listen_interface': '',
|
||||
'outgoing_interface': '',
|
||||
'random_port': True,
|
||||
'listen_random_port': None,
|
||||
'listen_use_sys_port': False,
|
||||
|
@ -68,11 +71,8 @@ DEFAULT_PREFS = {
|
|||
'max_upload_speed': -1.0,
|
||||
'max_download_speed': -1.0,
|
||||
'max_upload_slots_global': 4,
|
||||
'max_half_open_connections': (
|
||||
lambda: deluge.common.windows_check()
|
||||
and (lambda: deluge.common.vista_check() and 4 or 8)()
|
||||
or 50
|
||||
)(),
|
||||
'max_half_open_connections': (lambda: deluge.common.windows_check() and
|
||||
(lambda: deluge.common.vista_check() and 4 or 8)() or 50)(),
|
||||
'max_connections_per_second': 20,
|
||||
'ignore_limits_on_local_network': True,
|
||||
'max_connections_per_torrent': -1,
|
||||
|
@ -122,7 +122,7 @@ DEFAULT_PREFS = {
|
|||
'cache_expiry': 60,
|
||||
'auto_manage_prefer_seeds': False,
|
||||
'shared': False,
|
||||
'super_seeding': False,
|
||||
'super_seeding': False
|
||||
}
|
||||
|
||||
|
||||
|
@ -131,9 +131,7 @@ class PreferencesManager(component.Component):
|
|||
component.Component.__init__(self, 'PreferencesManager')
|
||||
self.config = deluge.configmanager.ConfigManager('core.conf', DEFAULT_PREFS)
|
||||
if 'proxies' in self.config:
|
||||
log.warning(
|
||||
'Updating config file for proxy, using "peer" values to fill new "proxy" setting'
|
||||
)
|
||||
log.warning('Updating config file for proxy, using "peer" values to fill new "proxy" setting')
|
||||
self.config['proxy'].update(self.config['proxies']['peer'])
|
||||
log.warning('New proxy config is: %s', self.config['proxy'])
|
||||
del self.config['proxies']
|
||||
|
@ -189,50 +187,28 @@ class PreferencesManager(component.Component):
|
|||
def _on_set_listen_interface(self, key, value):
|
||||
self.__set_listen_on()
|
||||
|
||||
def _on_set_outgoing_interface(self, key, value):
|
||||
"""Set interface name or IP address for outgoing BitTorrent connections."""
|
||||
value = value.strip() if value else ''
|
||||
self.core.apply_session_settings({'outgoing_interfaces': value})
|
||||
|
||||
def _on_set_random_port(self, key, value):
|
||||
self.__set_listen_on()
|
||||
|
||||
def __set_listen_on(self):
|
||||
""" Set the ports and interface address to listen for incoming connections on."""
|
||||
if self.config['random_port']:
|
||||
if (
|
||||
not self.config['listen_reuse_port']
|
||||
or not self.config['listen_random_port']
|
||||
):
|
||||
if not self.config['listen_random_port']:
|
||||
self.config['listen_random_port'] = random.randrange(49152, 65525)
|
||||
listen_ports = [
|
||||
self.config['listen_random_port']
|
||||
] * 2 # use single port range
|
||||
listen_ports = [self.config['listen_random_port']] * 2 # use single port range
|
||||
else:
|
||||
self.config['listen_random_port'] = None
|
||||
listen_ports = self.config['listen_ports']
|
||||
|
||||
if self.config['listen_interface']:
|
||||
interface = self.config['listen_interface'].strip()
|
||||
else:
|
||||
interface = '0.0.0.0'
|
||||
interface = str(self.config['listen_interface'].strip())
|
||||
interface = interface if interface else '0.0.0.0'
|
||||
|
||||
log.debug(
|
||||
'Listen Interface: %s, Ports: %s with use_sys_port: %s',
|
||||
interface,
|
||||
listen_ports,
|
||||
self.config['listen_use_sys_port'],
|
||||
)
|
||||
interfaces = [
|
||||
f'{interface}:{port}'
|
||||
for port in range(listen_ports[0], listen_ports[1] + 1)
|
||||
]
|
||||
log.debug('Listen Interface: %s, Ports: %s with use_sys_port: %s',
|
||||
interface, listen_ports, self.config['listen_use_sys_port'])
|
||||
interfaces = ['%s:%s' % (interface, port) for port in range(listen_ports[0], listen_ports[1]+1)]
|
||||
self.core.apply_session_settings(
|
||||
{
|
||||
'listen_system_port_fallback': self.config['listen_use_sys_port'],
|
||||
'listen_interfaces': ','.join(interfaces),
|
||||
}
|
||||
)
|
||||
{'listen_system_port_fallback': self.config['listen_use_sys_port'],
|
||||
'listen_interfaces': ''.join(interfaces)})
|
||||
|
||||
def _on_set_outgoing_ports(self, key, value):
|
||||
self.__set_outgoing_ports()
|
||||
|
@ -241,22 +217,14 @@ class PreferencesManager(component.Component):
|
|||
self.__set_outgoing_ports()
|
||||
|
||||
def __set_outgoing_ports(self):
|
||||
port = (
|
||||
0
|
||||
if self.config['random_outgoing_ports']
|
||||
else self.config['outgoing_ports'][0]
|
||||
)
|
||||
port = 0 if self.config['random_outgoing_ports'] else self.config['outgoing_ports'][0]
|
||||
if port:
|
||||
num_ports = (
|
||||
self.config['outgoing_ports'][1] - self.config['outgoing_ports'][0]
|
||||
)
|
||||
num_ports = self.config['outgoing_ports'][1] - self.config['outgoing_ports'][0]
|
||||
num_ports = num_ports if num_ports > 1 else 5
|
||||
else:
|
||||
num_ports = 0
|
||||
log.debug('Outgoing port set to %s with range: %s', port, num_ports)
|
||||
self.core.apply_session_settings(
|
||||
{'outgoing_port': port, 'num_outgoing_ports': num_ports}
|
||||
)
|
||||
self.core.apply_session_settings({'outgoing_port': port, 'num_outgoing_ports': num_ports})
|
||||
|
||||
def _on_set_peer_tos(self, key, value):
|
||||
try:
|
||||
|
@ -265,21 +233,8 @@ class PreferencesManager(component.Component):
|
|||
log.error('Invalid tos byte: %s', ex)
|
||||
|
||||
def _on_set_dht(self, key, value):
|
||||
lt_bootstraps = self.core.session.get_settings()['dht_bootstrap_nodes']
|
||||
# Update list of lt bootstraps, using set to remove duplicates.
|
||||
dht_bootstraps = set(
|
||||
lt_bootstraps.split(',')
|
||||
+ [
|
||||
'router.bittorrent.com:6881',
|
||||
'router.utorrent.com:6881',
|
||||
'router.bitcomet.com:6881',
|
||||
'dht.transmissionbt.com:6881',
|
||||
'dht.aelitis.com:6881',
|
||||
]
|
||||
)
|
||||
self.core.apply_session_settings(
|
||||
{'dht_bootstrap_nodes': ','.join(dht_bootstraps), 'enable_dht': value}
|
||||
)
|
||||
dht_bootstraps = 'router.bittorrent.com:6881,router.utorrent.com:6881,router.bitcomet.com:6881'
|
||||
self.core.apply_session_settings({'dht_bootstrap_nodes': dht_bootstraps, 'enable_dht': value})
|
||||
|
||||
def _on_set_upnp(self, key, value):
|
||||
self.core.apply_session_setting('enable_upnp', value)
|
||||
|
@ -305,21 +260,12 @@ class PreferencesManager(component.Component):
|
|||
|
||||
def _on_set_encryption(self, key, value):
|
||||
# Convert Deluge enc_level values to libtorrent enc_level values.
|
||||
pe_enc_level = {
|
||||
0: lt.enc_level.plaintext,
|
||||
1: lt.enc_level.rc4,
|
||||
2: lt.enc_level.both,
|
||||
}
|
||||
pe_enc_level = {0: lt.enc_level.plaintext, 1: lt.enc_level.rc4, 2: lt.enc_level.both}
|
||||
self.core.apply_session_settings(
|
||||
{
|
||||
'out_enc_policy': lt.enc_policy(self.config['enc_out_policy']),
|
||||
{'out_enc_policy': lt.enc_policy(self.config['enc_out_policy']),
|
||||
'in_enc_policy': lt.enc_policy(self.config['enc_in_policy']),
|
||||
'allowed_enc_level': lt.enc_level(
|
||||
pe_enc_level[self.config['enc_level']]
|
||||
),
|
||||
'prefer_rc4': True,
|
||||
}
|
||||
)
|
||||
'allowed_enc_level': lt.enc_level(pe_enc_level[self.config['enc_level']]),
|
||||
'prefer_rc4': True})
|
||||
|
||||
def _on_set_max_connections_global(self, key, value):
|
||||
self.core.apply_session_setting('connections_limit', value)
|
||||
|
@ -381,29 +327,20 @@ class PreferencesManager(component.Component):
|
|||
|
||||
def run(self):
|
||||
import time
|
||||
|
||||
now = time.time()
|
||||
# check if we've done this within the last week or never
|
||||
if (now - self.config['info_sent']) >= (60 * 60 * 24 * 7):
|
||||
try:
|
||||
url = (
|
||||
'http://deluge-torrent.org/stats_get.php?processor='
|
||||
+ platform.machine()
|
||||
+ '&python='
|
||||
+ platform.python_version()
|
||||
+ '&deluge='
|
||||
+ deluge.common.get_version()
|
||||
+ '&os='
|
||||
+ platform.system()
|
||||
+ '&plugins='
|
||||
+ quote_plus(':'.join(self.config['enabled_plugins']))
|
||||
)
|
||||
url = 'http://deluge-torrent.org/stats_get.php?processor=' + \
|
||||
platform.machine() + '&python=' + platform.python_version() \
|
||||
+ '&deluge=' + deluge.common.get_version() \
|
||||
+ '&os=' + platform.system() \
|
||||
+ '&plugins=' + quote_plus(':'.join(self.config['enabled_plugins']))
|
||||
urlopen(url)
|
||||
except OSError as ex:
|
||||
except IOError as ex:
|
||||
log.debug('Network error while trying to send info: %s', ex)
|
||||
else:
|
||||
self.config['info_sent'] = now
|
||||
|
||||
if value:
|
||||
SendInfoThread(self.config).start()
|
||||
|
||||
|
@ -415,8 +352,7 @@ class PreferencesManager(component.Component):
|
|||
self.new_release_timer.stop()
|
||||
# Set a timer to check for a new release every 3 days
|
||||
self.new_release_timer = LoopingCall(
|
||||
self._on_set_new_release_check, 'new_release_check', True
|
||||
)
|
||||
self._on_set_new_release_check, 'new_release_check', True)
|
||||
self.new_release_timer.start(72 * 60 * 60, False)
|
||||
else:
|
||||
if self.new_release_timer and self.new_release_timer.running:
|
||||
|
@ -425,34 +361,31 @@ class PreferencesManager(component.Component):
|
|||
def _on_set_proxy(self, key, value):
|
||||
# Initialise with type none and blank hostnames.
|
||||
proxy_settings = {
|
||||
'proxy_type': lt.proxy_type_t.none,
|
||||
'proxy_type': lt.proxy_type.none,
|
||||
'i2p_hostname': '',
|
||||
'proxy_hostname': '',
|
||||
'proxy_hostnames': value['proxy_hostnames'],
|
||||
'proxy_peer_connections': value['proxy_peer_connections'],
|
||||
'proxy_tracker_connections': value['proxy_tracker_connections'],
|
||||
'force_proxy': value['force_proxy'],
|
||||
'anonymous_mode': value['anonymous_mode'],
|
||||
'anonymous_mode': value['anonymous_mode']
|
||||
}
|
||||
|
||||
if value['type'] == lt.proxy_type_t.i2p_proxy:
|
||||
proxy_settings.update(
|
||||
{
|
||||
'proxy_type': lt.proxy_type_t.i2p_proxy,
|
||||
if value['type'] == lt.proxy_type.i2p_proxy:
|
||||
proxy_settings.update({
|
||||
'proxy_type': lt.proxy_type.i2p_proxy,
|
||||
'i2p_hostname': value['hostname'],
|
||||
'i2p_port': value['port'],
|
||||
}
|
||||
)
|
||||
elif value['type'] != lt.proxy_type_t.none:
|
||||
proxy_settings.update(
|
||||
{
|
||||
})
|
||||
elif value['type'] != lt.proxy_type.none:
|
||||
proxy_settings.update({
|
||||
'proxy_type': value['type'],
|
||||
'proxy_hostname': value['hostname'],
|
||||
'proxy_port': value['port'],
|
||||
'proxy_username': value['username'],
|
||||
'proxy_password': value['password'],
|
||||
}
|
||||
)
|
||||
|
||||
})
|
||||
|
||||
self.core.apply_session_settings(proxy_settings)
|
||||
|
||||
|
@ -463,9 +396,9 @@ class PreferencesManager(component.Component):
|
|||
# Load the GeoIP DB for country look-ups if available
|
||||
if os.path.exists(geoipdb_path):
|
||||
try:
|
||||
self.core.geoip_instance = GeoIP(geoipdb_path, 0)
|
||||
except Exception as ex:
|
||||
log.warning('GeoIP Unavailable: %s', ex)
|
||||
self.core.geoip_instance = GeoIP.open(geoipdb_path, GeoIP.GEOIP_STANDARD)
|
||||
except AttributeError:
|
||||
log.warning('GeoIP Unavailable')
|
||||
else:
|
||||
log.warning('Unable to find GeoIP database file: %s', geoipdb_path)
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008,2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -7,34 +8,24 @@
|
|||
#
|
||||
|
||||
"""RPCServer Module"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
from collections import namedtuple
|
||||
from types import FunctionType
|
||||
from typing import Callable, TypeVar, overload
|
||||
|
||||
from OpenSSL import SSL, crypto
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.internet.protocol import Factory, connectionDone
|
||||
|
||||
import deluge.component as component
|
||||
import deluge.configmanager
|
||||
from deluge.core.authmanager import (
|
||||
AUTH_LEVEL_ADMIN,
|
||||
AUTH_LEVEL_DEFAULT,
|
||||
AUTH_LEVEL_NONE,
|
||||
)
|
||||
from deluge.crypto_utils import check_ssl_keys, get_context_factory
|
||||
from deluge.error import (
|
||||
BadLoginError,
|
||||
DelugeError,
|
||||
IncompatibleClient,
|
||||
NotAuthorizedError,
|
||||
WrappedException,
|
||||
_ClientSideRecreateError,
|
||||
)
|
||||
from deluge.core.authmanager import AUTH_LEVEL_ADMIN, AUTH_LEVEL_DEFAULT, AUTH_LEVEL_NONE
|
||||
from deluge.error import DelugeError, IncompatibleClient, NotAuthorizedError, WrappedException, _ClientSideRecreateError
|
||||
from deluge.event import ClientDisconnectedEvent
|
||||
from deluge.transfer import DelugeTransferProtocol
|
||||
|
||||
|
@ -44,16 +35,6 @@ RPC_EVENT = 3
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
TCallable = TypeVar('TCallable', bound=Callable)
|
||||
|
||||
|
||||
@overload
|
||||
def export(func: TCallable) -> TCallable: ...
|
||||
|
||||
|
||||
@overload
|
||||
def export(auth_level: int) -> Callable[[TCallable], TCallable]: ...
|
||||
|
||||
|
||||
def export(auth_level=AUTH_LEVEL_DEFAULT):
|
||||
"""
|
||||
|
@ -66,23 +47,13 @@ def export(auth_level=AUTH_LEVEL_DEFAULT):
|
|||
:type auth_level: int
|
||||
|
||||
"""
|
||||
|
||||
def wrap(func, *args, **kwargs):
|
||||
func._rpcserver_export = True
|
||||
func._rpcserver_auth_level = auth_level
|
||||
|
||||
rpc_text = '**RPC exported method** (*Auth level: %s*)' % auth_level
|
||||
|
||||
# Append the RPC text while ensuring correct docstring formatting.
|
||||
if func.__doc__:
|
||||
if func.__doc__.endswith(' '):
|
||||
indent = func.__doc__.split('\n')[-1]
|
||||
func.__doc__ += f'\n{indent}'
|
||||
else:
|
||||
func.__doc__ += '\n\n'
|
||||
func.__doc__ += rpc_text
|
||||
else:
|
||||
func.__doc__ = rpc_text
|
||||
doc = func.__doc__
|
||||
func.__doc__ = '**RPC Exported Function** (*Auth Level: %s*)\n\n' % auth_level
|
||||
if doc:
|
||||
func.__doc__ += doc
|
||||
|
||||
return func
|
||||
|
||||
|
@ -120,9 +91,25 @@ def format_request(call):
|
|||
return s
|
||||
|
||||
|
||||
class ServerContextFactory(object):
|
||||
def getContext(self): # NOQA: N802
|
||||
"""
|
||||
Create an SSL context.
|
||||
|
||||
This loads the servers cert/private key SSL files for use with the
|
||||
SSL transport.
|
||||
"""
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
ctx = SSL.Context(SSL.SSLv23_METHOD)
|
||||
ctx.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
|
||||
ctx.use_certificate_file(os.path.join(ssl_dir, 'daemon.cert'))
|
||||
ctx.use_privatekey_file(os.path.join(ssl_dir, 'daemon.pkey'))
|
||||
return ctx
|
||||
|
||||
|
||||
class DelugeRPCProtocol(DelugeTransferProtocol):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
super(DelugeRPCProtocol, self).__init__()
|
||||
# namedtuple subclass with auth_level, username for the connected session.
|
||||
self.AuthLevel = namedtuple('SessionAuthlevel', 'auth_level, username')
|
||||
|
||||
|
@ -147,10 +134,8 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
|
||||
for call in request:
|
||||
if len(call) != 4:
|
||||
log.debug(
|
||||
'Received invalid rpc request: number of items ' 'in request is %s',
|
||||
len(call),
|
||||
)
|
||||
log.debug('Received invalid rpc request: number of items '
|
||||
'in request is %s', len(call))
|
||||
continue
|
||||
# log.debug('RPCRequest: %s', format_request(call))
|
||||
reactor.callLater(0, self.dispatch, *call)
|
||||
|
@ -167,7 +152,7 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
try:
|
||||
self.transfer_message(data)
|
||||
except Exception as ex:
|
||||
log.warning('Error occurred when sending message: %s.', ex)
|
||||
log.warn('Error occurred when sending message: %s.', ex)
|
||||
log.exception(ex)
|
||||
raise
|
||||
|
||||
|
@ -176,11 +161,11 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
This method is called when a new client connects.
|
||||
"""
|
||||
peer = self.transport.getPeer()
|
||||
log.info('Deluge Client connection made from: %s:%s', peer.host, peer.port)
|
||||
log.info('Deluge Client connection made from: %s:%s',
|
||||
peer.host, peer.port)
|
||||
# Set the initial auth level of this session to AUTH_LEVEL_NONE
|
||||
self.factory.authorized_sessions[self.transport.sessionno] = self.AuthLevel(
|
||||
AUTH_LEVEL_NONE, ''
|
||||
)
|
||||
self.factory.authorized_sessions[
|
||||
self.transport.sessionno] = self.AuthLevel(AUTH_LEVEL_NONE, '')
|
||||
|
||||
def connectionLost(self, reason=connectionDone): # NOQA: N802
|
||||
"""
|
||||
|
@ -199,9 +184,7 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
del self.factory.interested_events[self.transport.sessionno]
|
||||
|
||||
if self.factory.state == 'running':
|
||||
component.get('EventManager').emit(
|
||||
ClientDisconnectedEvent(self.factory.session_id)
|
||||
)
|
||||
component.get('EventManager').emit(ClientDisconnectedEvent(self.factory.session_id))
|
||||
log.info('Deluge client disconnected: %s', reason.value)
|
||||
|
||||
def valid_session(self):
|
||||
|
@ -223,42 +206,32 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
:type kwargs: dict
|
||||
|
||||
"""
|
||||
|
||||
def send_error():
|
||||
"""
|
||||
Sends an error response with the contents of the exception that was raised.
|
||||
"""
|
||||
exc_type, exc_value, dummy_exc_trace = sys.exc_info()
|
||||
exceptionType, exceptionValue, dummy_exceptionTraceback = sys.exc_info()
|
||||
formated_tb = traceback.format_exc()
|
||||
try:
|
||||
self.sendData(
|
||||
(
|
||||
self.sendData((
|
||||
RPC_ERROR,
|
||||
request_id,
|
||||
exc_type.__name__,
|
||||
exc_value._args,
|
||||
exc_value._kwargs,
|
||||
formated_tb,
|
||||
)
|
||||
)
|
||||
exceptionType.__name__,
|
||||
exceptionValue._args,
|
||||
exceptionValue._kwargs,
|
||||
formated_tb
|
||||
))
|
||||
except AttributeError:
|
||||
# This is not a deluge exception (object has no attribute '_args), let's wrap it
|
||||
log.warning(
|
||||
'An exception occurred while sending RPC_ERROR to '
|
||||
log.warning('An exception occurred while sending RPC_ERROR to '
|
||||
'client. Wrapping it and resending. Error to '
|
||||
'send(causing exception goes next):\n%s',
|
||||
formated_tb,
|
||||
)
|
||||
'send(causing exception goes next):\n%s', formated_tb)
|
||||
try:
|
||||
raise WrappedException(
|
||||
str(exc_value), exc_type.__name__, formated_tb
|
||||
)
|
||||
raise WrappedException(str(exceptionValue), exceptionType.__name__, formated_tb)
|
||||
except WrappedException:
|
||||
send_error()
|
||||
except Exception as ex:
|
||||
log.error(
|
||||
'An exception occurred while sending RPC_ERROR to client: %s', ex
|
||||
)
|
||||
log.error('An exception occurred while sending RPC_ERROR to client: %s', ex)
|
||||
|
||||
if method == 'daemon.info':
|
||||
# This is a special case and used in the initial connection process
|
||||
|
@ -274,22 +247,13 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
raise IncompatibleClient(deluge.common.get_version())
|
||||
ret = component.get('AuthManager').authorize(*args, **kwargs)
|
||||
if ret:
|
||||
self.factory.authorized_sessions[self.transport.sessionno] = (
|
||||
self.AuthLevel(ret, args[0])
|
||||
)
|
||||
self.factory.authorized_sessions[
|
||||
self.transport.sessionno] = self.AuthLevel(ret, args[0])
|
||||
self.factory.session_protocols[self.transport.sessionno] = self
|
||||
except Exception as ex:
|
||||
send_error()
|
||||
if not isinstance(ex, _ClientSideRecreateError):
|
||||
log.exception(ex)
|
||||
if isinstance(ex, BadLoginError):
|
||||
peer = self.transport.getPeer()
|
||||
log.error(
|
||||
'Deluge client authentication error made from: %s:%s (%s)',
|
||||
peer.host,
|
||||
peer.port,
|
||||
str(ex),
|
||||
)
|
||||
else:
|
||||
self.sendData((RPC_RESPONSE, request_id, (ret)))
|
||||
if not ret:
|
||||
|
@ -326,15 +290,11 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
log.debug('RPC dispatch %s', method)
|
||||
try:
|
||||
method_auth_requirement = self.factory.methods[method]._rpcserver_auth_level
|
||||
auth_level = self.factory.authorized_sessions[
|
||||
self.transport.sessionno
|
||||
].auth_level
|
||||
auth_level = self.factory.authorized_sessions[self.transport.sessionno].auth_level
|
||||
if auth_level < method_auth_requirement:
|
||||
# This session is not allowed to call this method
|
||||
log.debug(
|
||||
'Session %s is attempting an unauthorized method call!',
|
||||
self.transport.sessionno,
|
||||
)
|
||||
log.debug('Session %s is attempting an unauthorized method call!',
|
||||
self.transport.sessionno)
|
||||
raise NotAuthorizedError(auth_level, method_auth_requirement)
|
||||
# Set the session_id in the factory so that methods can know
|
||||
# which session is calling it.
|
||||
|
@ -350,7 +310,6 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
# Check if the return value is a deferred, since we'll need to
|
||||
# wait for it to fire before sending the RPC_RESPONSE
|
||||
if isinstance(ret, defer.Deferred):
|
||||
|
||||
def on_success(result):
|
||||
try:
|
||||
self.sendData((RPC_RESPONSE, request_id, result))
|
||||
|
@ -420,13 +379,8 @@ class RPCServer(component.Component):
|
|||
# Check for SSL keys and generate some if needed
|
||||
check_ssl_keys()
|
||||
|
||||
cert = os.path.join(deluge.configmanager.get_config_dir('ssl'), 'daemon.cert')
|
||||
pkey = os.path.join(deluge.configmanager.get_config_dir('ssl'), 'daemon.pkey')
|
||||
|
||||
try:
|
||||
reactor.listenSSL(
|
||||
port, self.factory, get_context_factory(cert, pkey), interface=hostname
|
||||
)
|
||||
reactor.listenSSL(port, self.factory, ServerContextFactory(), interface=hostname)
|
||||
except Exception as ex:
|
||||
log.debug('Daemon already running or port not available.: %s', ex)
|
||||
raise
|
||||
|
@ -553,8 +507,8 @@ class RPCServer(component.Component):
|
|||
:type event: :class:`deluge.event.DelugeEvent`
|
||||
"""
|
||||
log.debug('intevents: %s', self.factory.interested_events)
|
||||
# Use copy of `interested_events` since it can mutate while iterating.
|
||||
for session_id, interest in self.factory.interested_events.copy().items():
|
||||
# Find sessions interested in this event
|
||||
for session_id, interest in self.factory.interested_events.items():
|
||||
if event.name in interest:
|
||||
log.debug('Emit Event: %s %s', event.name, event.args)
|
||||
# This session is interested so send a RPC_EVENT
|
||||
|
@ -572,35 +526,73 @@ class RPCServer(component.Component):
|
|||
:type event: :class:`deluge.event.DelugeEvent`
|
||||
"""
|
||||
if not self.is_session_valid(session_id):
|
||||
log.debug(
|
||||
'Session ID %s is not valid. Not sending event "%s".',
|
||||
session_id,
|
||||
event.name,
|
||||
)
|
||||
log.debug('Session ID %s is not valid. Not sending event "%s".', session_id, event.name)
|
||||
return
|
||||
if session_id not in self.factory.interested_events:
|
||||
log.debug(
|
||||
'Session ID %s is not interested in any events. Not sending event "%s".',
|
||||
session_id,
|
||||
event.name,
|
||||
)
|
||||
log.debug('Session ID %s is not interested in any events. Not sending event "%s".',
|
||||
session_id, event.name)
|
||||
return
|
||||
if event.name not in self.factory.interested_events[session_id]:
|
||||
log.debug(
|
||||
'Session ID %s is not interested in event "%s". Not sending it.',
|
||||
session_id,
|
||||
event.name,
|
||||
)
|
||||
log.debug('Session ID %s is not interested in event "%s". Not sending it.', session_id, event.name)
|
||||
return
|
||||
log.debug(
|
||||
'Sending event "%s" with args "%s" to session id "%s".',
|
||||
event.name,
|
||||
event.args,
|
||||
session_id,
|
||||
)
|
||||
self.factory.session_protocols[session_id].sendData(
|
||||
(RPC_EVENT, event.name, event.args)
|
||||
)
|
||||
log.debug('Sending event "%s" with args "%s" to session id "%s".',
|
||||
event.name, event.args, session_id)
|
||||
self.factory.session_protocols[session_id].sendData((RPC_EVENT, event.name, event.args))
|
||||
|
||||
def stop(self):
|
||||
self.factory.state = 'stopping'
|
||||
|
||||
|
||||
def check_ssl_keys():
|
||||
"""
|
||||
Check for SSL cert/key and create them if necessary
|
||||
"""
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
if not os.path.exists(ssl_dir):
|
||||
# The ssl folder doesn't exist so we need to create it
|
||||
os.makedirs(ssl_dir)
|
||||
generate_ssl_keys()
|
||||
else:
|
||||
for f in ('daemon.pkey', 'daemon.cert'):
|
||||
if not os.path.exists(os.path.join(ssl_dir, f)):
|
||||
generate_ssl_keys()
|
||||
break
|
||||
|
||||
|
||||
def generate_ssl_keys():
|
||||
"""
|
||||
This method generates a new SSL key/cert.
|
||||
"""
|
||||
from deluge.common import PY2
|
||||
digest = 'sha256' if not PY2 else b'sha256'
|
||||
|
||||
# Generate key pair
|
||||
pkey = crypto.PKey()
|
||||
pkey.generate_key(crypto.TYPE_RSA, 2048)
|
||||
|
||||
# Generate cert request
|
||||
req = crypto.X509Req()
|
||||
subj = req.get_subject()
|
||||
setattr(subj, 'CN', 'Deluge Daemon')
|
||||
req.set_pubkey(pkey)
|
||||
req.sign(pkey, digest)
|
||||
|
||||
# Generate certificate
|
||||
cert = crypto.X509()
|
||||
cert.set_serial_number(0)
|
||||
cert.gmtime_adj_notBefore(0)
|
||||
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365 * 3) # Three Years
|
||||
cert.set_issuer(req.get_subject())
|
||||
cert.set_subject(req.get_subject())
|
||||
cert.set_pubkey(req.get_pubkey())
|
||||
cert.sign(pkey, digest)
|
||||
|
||||
# Write out files
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
with open(os.path.join(ssl_dir, 'daemon.pkey'), 'wb') as _file:
|
||||
_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
|
||||
with open(os.path.join(ssl_dir, 'daemon.cert'), 'wb') as _file:
|
||||
_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
||||
# Make the files only readable by this user
|
||||
for f in ('daemon.pkey', 'daemon.cert'):
|
||||
os.chmod(os.path.join(ssl_dir, f), stat.S_IREAD | stat.S_IWRITE)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -13,12 +14,11 @@ Attributes:
|
|||
|
||||
"""
|
||||
|
||||
from __future__ import division, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from twisted.internet.defer import Deferred, DeferredList
|
||||
|
||||
|
@ -28,11 +28,19 @@ from deluge.common import decode_bytes
|
|||
from deluge.configmanager import ConfigManager, get_config_dir
|
||||
from deluge.core.authmanager import AUTH_LEVEL_ADMIN
|
||||
from deluge.decorators import deprecated
|
||||
from deluge.event import (
|
||||
TorrentFolderRenamedEvent,
|
||||
TorrentStateChangedEvent,
|
||||
TorrentTrackerStatusEvent,
|
||||
)
|
||||
from deluge.event import TorrentFolderRenamedEvent, TorrentStateChangedEvent, TorrentTrackerStatusEvent
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
# PY2 fallback
|
||||
from urlparse import urlparse # pylint: disable=ungrouped-imports
|
||||
|
||||
try:
|
||||
from future_builtins import zip
|
||||
except ImportError:
|
||||
# Ignore on Py3.
|
||||
pass
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -44,7 +52,7 @@ LT_TORRENT_STATE_MAP = {
|
|||
'finished': 'Seeding',
|
||||
'seeding': 'Seeding',
|
||||
'allocating': 'Allocating',
|
||||
'checking_resume_data': 'Checking',
|
||||
'checking_resume_data': 'Checking'
|
||||
}
|
||||
|
||||
|
||||
|
@ -57,7 +65,6 @@ def sanitize_filepath(filepath, folder=False):
|
|||
Args:
|
||||
folder (bool): A trailing slash is appended to the returned filepath.
|
||||
"""
|
||||
|
||||
def clean_filename(filename):
|
||||
"""Strips whitespace and discards dotted filenames"""
|
||||
filename = filename.strip()
|
||||
|
@ -82,7 +89,7 @@ def convert_lt_files(files):
|
|||
"""Indexes and decodes files from libtorrent get_files().
|
||||
|
||||
Args:
|
||||
files (file_storage): The libtorrent torrent files.
|
||||
files (list): The libtorrent torrent files.
|
||||
|
||||
Returns:
|
||||
list of dict: The files.
|
||||
|
@ -97,20 +104,18 @@ def convert_lt_files(files):
|
|||
}
|
||||
"""
|
||||
filelist = []
|
||||
for index in range(files.num_files()):
|
||||
for index, _file in enumerate(files):
|
||||
try:
|
||||
file_path = files.file_path(index).decode('utf8')
|
||||
file_path = _file.path.decode('utf8')
|
||||
except AttributeError:
|
||||
file_path = files.file_path(index)
|
||||
file_path = _file.path
|
||||
|
||||
filelist.append(
|
||||
{
|
||||
filelist.append({
|
||||
'index': index,
|
||||
'path': file_path.replace('\\', '/'),
|
||||
'size': files.file_size(index),
|
||||
'offset': files.file_offset(index),
|
||||
}
|
||||
)
|
||||
'size': _file.size,
|
||||
'offset': _file.offset
|
||||
})
|
||||
|
||||
return filelist
|
||||
|
||||
|
@ -123,7 +128,7 @@ class TorrentOptions(dict):
|
|||
auto_managed (bool): Set torrent to auto managed mode, i.e. will be started or queued automatically.
|
||||
download_location (str): The path for the torrent data to be stored while downloading.
|
||||
file_priorities (list of int): The priority for files in torrent, range is [0..7] however
|
||||
only [0, 1, 4, 7] are normally used and correspond to [Skip, Low, Normal, High]
|
||||
only [0, 1, 5, 7] are normally used and correspond to [Do Not Download, Normal, High, Highest]
|
||||
mapped_files (dict): A mapping of the renamed filenames in 'index:filename' pairs.
|
||||
max_connections (int): Sets maximum number of connections this torrent will open.
|
||||
This must be at least 2. The default is unlimited (-1).
|
||||
|
@ -147,9 +152,8 @@ class TorrentOptions(dict):
|
|||
stop_ratio (float): The seeding ratio to stop (or remove) the torrent at.
|
||||
super_seeding (bool): Enable super seeding/initial seeding.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
super(TorrentOptions, self).__init__()
|
||||
config = ConfigManager('core.conf').config
|
||||
options_conf_map = {
|
||||
'add_paused': 'add_paused',
|
||||
|
@ -168,7 +172,7 @@ class TorrentOptions(dict):
|
|||
'shared': 'shared',
|
||||
'stop_at_ratio': 'stop_seed_at_ratio',
|
||||
'stop_ratio': 'stop_seed_ratio',
|
||||
'super_seeding': 'super_seeding',
|
||||
'super_seeding': 'super_seeding'
|
||||
}
|
||||
for opt_k, conf_k in options_conf_map.items():
|
||||
self[opt_k] = config[conf_k]
|
||||
|
@ -179,14 +183,14 @@ class TorrentOptions(dict):
|
|||
self['seed_mode'] = False
|
||||
|
||||
|
||||
class TorrentError:
|
||||
class TorrentError(object):
|
||||
def __init__(self, error_message, was_paused=False, restart_to_resume=False):
|
||||
self.error_message = error_message
|
||||
self.was_paused = was_paused
|
||||
self.restart_to_resume = restart_to_resume
|
||||
|
||||
|
||||
class Torrent:
|
||||
class Torrent(object):
|
||||
"""Torrent holds information about torrents added to the libtorrent session.
|
||||
|
||||
Args:
|
||||
|
@ -194,12 +198,12 @@ class Torrent:
|
|||
options (dict): The torrent options.
|
||||
state (TorrentState): The torrent state.
|
||||
filename (str): The filename of the torrent file.
|
||||
magnet (str): The magnet URI.
|
||||
magnet (str): The magnet uri.
|
||||
|
||||
Attributes:
|
||||
torrent_id (str): The torrent_id for this torrent
|
||||
handle: Holds the libtorrent torrent handle
|
||||
magnet (str): The magnet URI used to add this torrent (if available).
|
||||
magnet (str): The magnet uri used to add this torrent (if available).
|
||||
status: Holds status info so that we don"t need to keep getting it from libtorrent.
|
||||
torrent_info: store the torrent info.
|
||||
has_metadata (bool): True if the metadata for the torrent is available, False otherwise.
|
||||
|
@ -223,7 +227,6 @@ class Torrent:
|
|||
we can re-pause it after its done if necessary
|
||||
forced_error (TorrentError): Keep track if we have forced this torrent to be in Error state.
|
||||
"""
|
||||
|
||||
def __init__(self, handle, options, state=None, filename=None, magnet=None):
|
||||
self.torrent_id = str(handle.info_hash())
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
|
@ -234,12 +237,12 @@ class Torrent:
|
|||
self.rpcserver = component.get('RPCServer')
|
||||
|
||||
self.handle = handle
|
||||
self.handle.resolve_countries(True)
|
||||
|
||||
self.magnet = magnet
|
||||
self._status: Optional['lt.torrent_status'] = None
|
||||
self._status_last_update: float = 0.0
|
||||
self.status = self.handle.status()
|
||||
|
||||
self.torrent_info = self.handle.torrent_file()
|
||||
self.torrent_info = self.handle.get_torrent_info()
|
||||
self.has_metadata = self.status.has_metadata
|
||||
|
||||
self.options = TorrentOptions()
|
||||
|
@ -255,9 +258,6 @@ class Torrent:
|
|||
self.is_finished = False
|
||||
self.filename = filename
|
||||
|
||||
if not self.filename:
|
||||
self.filename = ''
|
||||
|
||||
self.forced_error = None
|
||||
self.statusmsg = None
|
||||
self.state = None
|
||||
|
@ -270,6 +270,7 @@ class Torrent:
|
|||
self.prev_status = {}
|
||||
self.waiting_on_folder_rename = []
|
||||
|
||||
self.update_status(self.handle.status())
|
||||
self._create_status_funcs()
|
||||
self.set_options(self.options)
|
||||
self.update_state()
|
||||
|
@ -277,18 +278,6 @@ class Torrent:
|
|||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('Torrent object created.')
|
||||
|
||||
def _set_handle_flags(self, flag: lt.torrent_flags, set_flag: bool):
|
||||
"""set or unset a flag to the lt handle
|
||||
|
||||
Args:
|
||||
flag (lt.torrent_flags): the flag to set/unset
|
||||
set_flag (bool): True for setting the flag, False for unsetting it
|
||||
"""
|
||||
if set_flag:
|
||||
self.handle.set_flags(flag)
|
||||
else:
|
||||
self.handle.unset_flags(flag)
|
||||
|
||||
def on_metadata_received(self):
|
||||
"""Process the metadata received alert for this torrent"""
|
||||
self.has_metadata = True
|
||||
|
@ -307,9 +296,7 @@ class Torrent:
|
|||
|
||||
# Skip set_prioritize_first_last if set_file_priorities is in options as it also calls the method.
|
||||
if 'file_priorities' in options and 'prioritize_first_last_pieces' in options:
|
||||
self.options['prioritize_first_last_pieces'] = options.pop(
|
||||
'prioritize_first_last_pieces'
|
||||
)
|
||||
self.options['prioritize_first_last_pieces'] = options.pop('prioritize_first_last_pieces')
|
||||
|
||||
for key, value in options.items():
|
||||
if key in self.options:
|
||||
|
@ -373,7 +360,7 @@ class Torrent:
|
|||
"""Sets maximum download speed for this torrent.
|
||||
|
||||
Args:
|
||||
m_down_speed (float): Maximum download speed in KiB/s.
|
||||
m_up_speed (float): Maximum download speed in KiB/s.
|
||||
"""
|
||||
self.options['max_download_speed'] = m_down_speed
|
||||
if m_down_speed < 0:
|
||||
|
@ -405,7 +392,7 @@ class Torrent:
|
|||
return
|
||||
|
||||
# A list of priorities for each piece in the torrent
|
||||
priorities = self.handle.get_piece_priorities()
|
||||
priorities = self.handle.piece_priorities()
|
||||
|
||||
def get_file_piece(idx, byte_offset):
|
||||
return self.torrent_info.map_file(idx, byte_offset, 0).piece
|
||||
|
@ -421,27 +408,20 @@ class Torrent:
|
|||
|
||||
# Set the pieces in first and last ranges to priority 7
|
||||
# if they are not marked as do not download
|
||||
priorities[first_start:first_end] = [
|
||||
p and 7 for p in priorities[first_start:first_end]
|
||||
]
|
||||
priorities[last_start:last_end] = [
|
||||
p and 7 for p in priorities[last_start:last_end]
|
||||
]
|
||||
priorities[first_start:first_end] = [p and 7 for p in priorities[first_start:first_end]]
|
||||
priorities[last_start:last_end] = [p and 7 for p in priorities[last_start:last_end]]
|
||||
|
||||
# Setting the priorites for all the pieces of this torrent
|
||||
self.handle.prioritize_pieces(priorities)
|
||||
|
||||
def set_sequential_download(self, sequential):
|
||||
def set_sequential_download(self, set_sequencial):
|
||||
"""Sets whether to download the pieces of the torrent in order.
|
||||
|
||||
Args:
|
||||
sequential (bool): Enable sequential downloading.
|
||||
set_sequencial (bool): Enable sequencial downloading.
|
||||
"""
|
||||
self.options['sequential_download'] = sequential
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.sequential_download,
|
||||
set_flag=sequential,
|
||||
)
|
||||
self.options['sequential_download'] = set_sequencial
|
||||
self.handle.set_sequential_download(set_sequencial)
|
||||
|
||||
def set_auto_managed(self, auto_managed):
|
||||
"""Set auto managed mode, i.e. will be started or queued automatically.
|
||||
|
@ -451,10 +431,7 @@ class Torrent:
|
|||
"""
|
||||
self.options['auto_managed'] = auto_managed
|
||||
if not (self.status.paused and not self.status.auto_managed):
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=auto_managed,
|
||||
)
|
||||
self.handle.auto_managed(auto_managed)
|
||||
self.update_state()
|
||||
|
||||
def set_super_seeding(self, super_seeding):
|
||||
|
@ -463,11 +440,11 @@ class Torrent:
|
|||
Args:
|
||||
super_seeding (bool): Enable super seeding.
|
||||
"""
|
||||
if self.status.is_seeding:
|
||||
self.options['super_seeding'] = super_seeding
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.super_seeding,
|
||||
set_flag=super_seeding,
|
||||
)
|
||||
self.handle.super_seeding(super_seeding)
|
||||
else:
|
||||
self.options['super_seeding'] = False
|
||||
|
||||
def set_stop_ratio(self, stop_ratio):
|
||||
"""The seeding ratio to stop (or remove) the torrent at.
|
||||
|
@ -516,35 +493,32 @@ class Torrent:
|
|||
Args:
|
||||
file_priorities (list of int): List of file priorities.
|
||||
"""
|
||||
if not self.has_metadata:
|
||||
return
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(
|
||||
'Setting %s file priorities to: %s', self.torrent_id, file_priorities
|
||||
)
|
||||
log.debug('Setting %s file priorities to: %s', self.torrent_id, file_priorities)
|
||||
|
||||
if file_priorities and len(file_priorities) == len(self.get_files()):
|
||||
if (self.handle.has_metadata() and file_priorities and
|
||||
len(file_priorities) == len(self.get_files())):
|
||||
self.handle.prioritize_files(file_priorities)
|
||||
else:
|
||||
log.debug('Unable to set new file priorities.')
|
||||
file_priorities = self.handle.get_file_priorities()
|
||||
file_priorities = self.handle.file_priorities()
|
||||
|
||||
if 0 in self.options['file_priorities']:
|
||||
# Previously marked a file 'skip' so check for any 0's now >0.
|
||||
# Previously marked a file 'Do Not Download' so check if changed any 0's to >0.
|
||||
for index, priority in enumerate(self.options['file_priorities']):
|
||||
if priority == 0 and file_priorities[index] > 0:
|
||||
# Changed priority from skip to download so update state.
|
||||
# Changed 'Do Not Download' to a download priority so update state.
|
||||
self.is_finished = False
|
||||
self.update_state()
|
||||
break
|
||||
|
||||
# Store the priorities.
|
||||
self.options['file_priorities'] = file_priorities
|
||||
# Ensure stored options are in sync in case file_priorities were faulty (old state?).
|
||||
self.options['file_priorities'] = self.handle.file_priorities()
|
||||
|
||||
# Set the first/last priorities if needed.
|
||||
if self.options['prioritize_first_last_pieces']:
|
||||
self.set_prioritize_first_last_pieces(True)
|
||||
self.set_prioritize_first_last_pieces(self.options['prioritize_first_last_pieces'])
|
||||
|
||||
@deprecated
|
||||
def set_save_path(self, download_location):
|
||||
|
@ -578,7 +552,7 @@ class Torrent:
|
|||
trackers (list of dicts): A list of trackers.
|
||||
"""
|
||||
if trackers is None:
|
||||
self.trackers = list(self.handle.trackers())
|
||||
self.trackers = [tracker for tracker in self.handle.trackers()]
|
||||
self.tracker_host = None
|
||||
return
|
||||
|
||||
|
@ -620,16 +594,11 @@ class Torrent:
|
|||
|
||||
if self.tracker_status != status:
|
||||
self.tracker_status = status
|
||||
component.get('EventManager').emit(
|
||||
TorrentTrackerStatusEvent(self.torrent_id, self.tracker_status)
|
||||
)
|
||||
component.get('EventManager').emit(TorrentTrackerStatusEvent(self.torrent_id, self.tracker_status))
|
||||
|
||||
def merge_trackers(self, torrent_info):
|
||||
"""Merges new trackers in torrent_info into torrent"""
|
||||
log.info(
|
||||
'Adding any new trackers to torrent (%s) already in session...',
|
||||
self.torrent_id,
|
||||
)
|
||||
log.info('Adding any new trackers to torrent (%s) already in session...', self.torrent_id)
|
||||
if not torrent_info:
|
||||
return
|
||||
# Don't merge trackers if either torrent has private flag set.
|
||||
|
@ -643,7 +612,7 @@ class Torrent:
|
|||
|
||||
def update_state(self):
|
||||
"""Updates the state, based on libtorrent's torrent state"""
|
||||
status = self.get_lt_status()
|
||||
status = self.handle.status()
|
||||
session_paused = component.get('Core').session.is_paused()
|
||||
old_state = self.state
|
||||
self.set_status_message()
|
||||
|
@ -655,10 +624,7 @@ class Torrent:
|
|||
elif status_error:
|
||||
self.state = 'Error'
|
||||
# auto-manage status will be reverted upon resuming.
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=False,
|
||||
)
|
||||
self.handle.auto_managed(False)
|
||||
self.set_status_message(decode_bytes(status_error))
|
||||
elif status.moving_storage:
|
||||
self.state = 'Moving'
|
||||
|
@ -670,23 +636,13 @@ class Torrent:
|
|||
self.state = LT_TORRENT_STATE_MAP.get(str(status.state), str(status.state))
|
||||
|
||||
if self.state != old_state:
|
||||
component.get('EventManager').emit(
|
||||
TorrentStateChangedEvent(self.torrent_id, self.state)
|
||||
)
|
||||
component.get('EventManager').emit(TorrentStateChangedEvent(self.torrent_id, self.state))
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(
|
||||
'State from lt was: %s | Session is paused: %s\nTorrent state set from "%s" to "%s" (%s)',
|
||||
'error' if status_error else status.state,
|
||||
session_paused,
|
||||
old_state,
|
||||
self.state,
|
||||
self.torrent_id,
|
||||
)
|
||||
log.debug('State from lt was: %s | Session is paused: %s\nTorrent state set from "%s" to "%s" (%s)',
|
||||
'error' if status_error else status.state, session_paused, old_state, self.state, self.torrent_id)
|
||||
if self.forced_error:
|
||||
log.debug(
|
||||
'Torrent Error state message: %s', self.forced_error.error_message
|
||||
)
|
||||
log.debug('Torrent Error state message: %s', self.forced_error.error_message)
|
||||
|
||||
def set_status_message(self, message=None):
|
||||
"""Sets the torrent status message.
|
||||
|
@ -711,11 +667,8 @@ class Torrent:
|
|||
restart_to_resume (bool, optional): Prevent resuming clearing the error, only restarting
|
||||
session can resume.
|
||||
"""
|
||||
status = self.get_lt_status()
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=False,
|
||||
)
|
||||
status = self.handle.status()
|
||||
self.handle.auto_managed(False)
|
||||
self.forced_error = TorrentError(message, status.paused, restart_to_resume)
|
||||
if not status.paused:
|
||||
self.handle.pause()
|
||||
|
@ -729,10 +682,7 @@ class Torrent:
|
|||
log.error('Restart deluge to clear this torrent error')
|
||||
|
||||
if not self.forced_error.was_paused and self.options['auto_managed']:
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=True,
|
||||
)
|
||||
self.handle.auto_managed(True)
|
||||
self.forced_error = None
|
||||
self.set_status_message('OK')
|
||||
if update_state:
|
||||
|
@ -747,23 +697,16 @@ class Torrent:
|
|||
"""
|
||||
status = self.status
|
||||
eta = 0
|
||||
if (
|
||||
self.is_finished
|
||||
and self.options['stop_at_ratio']
|
||||
and status.upload_payload_rate
|
||||
):
|
||||
if self.is_finished and self.options['stop_at_ratio'] and status.upload_payload_rate:
|
||||
# We're a seed, so calculate the time to the 'stop_share_ratio'
|
||||
eta = (
|
||||
int(status.all_time_download * self.options['stop_ratio'])
|
||||
- status.all_time_upload
|
||||
) // status.upload_payload_rate
|
||||
eta = ((status.all_time_download * self.options['stop_ratio']) -
|
||||
status.all_time_upload) // status.upload_payload_rate
|
||||
elif status.download_payload_rate:
|
||||
left = status.total_wanted - status.total_wanted_done
|
||||
if left > 0:
|
||||
eta = left // status.download_payload_rate
|
||||
|
||||
# Limit to 1 year, avoid excessive values and prevent GTK int overflow.
|
||||
return eta if eta < 31557600 else -1
|
||||
return eta
|
||||
|
||||
def get_ratio(self):
|
||||
"""Get the ratio of upload/download for this torrent.
|
||||
|
@ -831,37 +774,27 @@ class Torrent:
|
|||
if peer.flags & peer.connecting or peer.flags & peer.handshake:
|
||||
continue
|
||||
|
||||
try:
|
||||
client = decode_bytes(peer.client)
|
||||
except UnicodeDecodeError:
|
||||
# libtorrent on Py3 can raise UnicodeDecodeError for peer_info.client
|
||||
client = 'unknown'
|
||||
|
||||
try:
|
||||
country = component.get('Core').geoip_instance.country_code_by_addr(
|
||||
peer.ip[0]
|
||||
)
|
||||
country = component.get('Core').geoip_instance.country_code_by_addr(peer.ip[0])
|
||||
except AttributeError:
|
||||
country = ''
|
||||
else:
|
||||
try:
|
||||
country = ''.join(
|
||||
[char if char.isalpha() else ' ' for char in country]
|
||||
)
|
||||
country = ''.join([char if char.isalpha() else ' ' for char in country])
|
||||
except TypeError:
|
||||
country = ''
|
||||
|
||||
ret.append(
|
||||
{
|
||||
ret.append({
|
||||
'client': client,
|
||||
'country': country,
|
||||
'down_speed': peer.payload_down_speed,
|
||||
'ip': f'{peer.ip[0]}:{peer.ip[1]}',
|
||||
'ip': '%s:%s' % (peer.ip[0], peer.ip[1]),
|
||||
'progress': peer.progress,
|
||||
'seed': peer.flags & peer.seed,
|
||||
'up_speed': peer.payload_up_speed,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -875,7 +808,7 @@ class Torrent:
|
|||
|
||||
def get_file_priorities(self):
|
||||
"""Return the file priorities"""
|
||||
if not self.handle.status().has_metadata:
|
||||
if not self.handle.has_metadata():
|
||||
return []
|
||||
|
||||
if not self.options['file_priorities']:
|
||||
|
@ -892,19 +825,8 @@ class Torrent:
|
|||
"""
|
||||
if not self.has_metadata:
|
||||
return []
|
||||
|
||||
try:
|
||||
files_progresses = zip(
|
||||
self.handle.file_progress(), self.torrent_info.files()
|
||||
)
|
||||
except Exception:
|
||||
# Handle libtorrent >=2.0.0,<=2.0.4 file_progress error
|
||||
files_progresses = zip(iter(lambda: 0, 1), self.torrent_info.files())
|
||||
|
||||
return [
|
||||
progress / _file.size if _file.size else 0.0
|
||||
for progress, _file in files_progresses
|
||||
]
|
||||
return [progress / _file.size if _file.size else 0.0 for progress, _file in
|
||||
zip(self.handle.file_progress(), self.torrent_info.files())]
|
||||
|
||||
def get_tracker_host(self):
|
||||
"""Get the hostname of the currently connected tracker.
|
||||
|
@ -924,11 +846,11 @@ class Torrent:
|
|||
if tracker:
|
||||
url = urlparse(tracker.replace('udp://', 'http://'))
|
||||
if hasattr(url, 'hostname'):
|
||||
host = url.hostname or 'DHT'
|
||||
host = (url.hostname or 'DHT')
|
||||
# Check if hostname is an IP address and just return it if that's the case
|
||||
try:
|
||||
socket.inet_aton(host)
|
||||
except OSError:
|
||||
except socket.error:
|
||||
pass
|
||||
else:
|
||||
# This is an IP address because an exception wasn't raised
|
||||
|
@ -945,7 +867,7 @@ class Torrent:
|
|||
return ''
|
||||
|
||||
def get_magnet_uri(self):
|
||||
"""Returns a magnet URI for this torrent"""
|
||||
"""Returns a magnet uri for this torrent"""
|
||||
return lt.make_magnet_uri(self.handle)
|
||||
|
||||
def get_name(self):
|
||||
|
@ -959,18 +881,14 @@ class Torrent:
|
|||
str: the name of the torrent.
|
||||
|
||||
"""
|
||||
if self.options['name']:
|
||||
return self.options['name']
|
||||
|
||||
if self.has_metadata:
|
||||
# Use the top-level folder as torrent name.
|
||||
filename = decode_bytes(self.torrent_info.files().file_path(0))
|
||||
name = filename.replace('\\', '/', 1).split('/', 1)[0]
|
||||
if not self.options['name']:
|
||||
handle_name = self.handle.name()
|
||||
if handle_name:
|
||||
name = decode_bytes(handle_name)
|
||||
else:
|
||||
name = decode_bytes(self.handle.status().name)
|
||||
|
||||
if not name:
|
||||
name = self.torrent_id
|
||||
else:
|
||||
name = self.options['name']
|
||||
|
||||
return name
|
||||
|
||||
|
@ -1019,14 +937,12 @@ class Torrent:
|
|||
call to get_status based on the session_id
|
||||
update (bool): If True the status will be updated from libtorrent
|
||||
if False, the cached values will be returned
|
||||
all_keys (bool): If True return all keys while ignoring the keys param
|
||||
if False, return only the requested keys
|
||||
|
||||
Returns:
|
||||
dict: a dictionary of the status keys and their values
|
||||
"""
|
||||
if update:
|
||||
self.get_lt_status()
|
||||
self.update_status(self.handle.status())
|
||||
|
||||
if all_keys:
|
||||
keys = list(self.status_funcs)
|
||||
|
@ -1056,35 +972,13 @@ class Torrent:
|
|||
|
||||
return status_dict
|
||||
|
||||
def get_lt_status(self) -> 'lt.torrent_status':
|
||||
"""Get the torrent status fresh, not from cache.
|
||||
|
||||
This should be used when a guaranteed fresh status is needed rather than
|
||||
`torrent.handle.status()` because it will update the cache as well.
|
||||
"""
|
||||
self.status = self.handle.status()
|
||||
return self.status
|
||||
|
||||
@property
|
||||
def status(self) -> 'lt.torrent_status':
|
||||
"""Cached copy of the libtorrent status for this torrent.
|
||||
|
||||
If it has not been updated within the last five seconds, it will be
|
||||
automatically refreshed.
|
||||
"""
|
||||
if self._status_last_update < (time.time() - 5):
|
||||
self.status = self.handle.status()
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, status: 'lt.torrent_status') -> None:
|
||||
def update_status(self, status):
|
||||
"""Updates the cached status.
|
||||
|
||||
Args:
|
||||
status: a libtorrent torrent status
|
||||
status (libtorrent.torrent_status): a libtorrent torrent status
|
||||
"""
|
||||
self._status = status
|
||||
self._status_last_update = time.time()
|
||||
self.status = status
|
||||
|
||||
def _create_status_funcs(self):
|
||||
"""Creates the functions for getting torrent status"""
|
||||
|
@ -1093,9 +987,7 @@ class Torrent:
|
|||
'seeding_time': lambda: self.status.seeding_time,
|
||||
'finished_time': lambda: self.status.finished_time,
|
||||
'all_time_download': lambda: self.status.all_time_download,
|
||||
'storage_mode': lambda: self.status.storage_mode.name.split('_')[
|
||||
2
|
||||
], # sparse or allocate
|
||||
'storage_mode': lambda: self.status.storage_mode.name.split('_')[2], # sparse or allocate
|
||||
'distributed_copies': lambda: max(0.0, self.status.distributed_copies),
|
||||
'download_payload_rate': lambda: self.status.download_payload_rate,
|
||||
'file_priorities': self.get_file_priorities,
|
||||
|
@ -1108,12 +1000,8 @@ class Torrent:
|
|||
'max_upload_slots': lambda: self.options['max_upload_slots'],
|
||||
'max_upload_speed': lambda: self.options['max_upload_speed'],
|
||||
'message': lambda: self.statusmsg,
|
||||
'move_on_completed_path': lambda: self.options[
|
||||
'move_completed_path'
|
||||
], # Deprecated: move_completed_path
|
||||
'move_on_completed': lambda: self.options[
|
||||
'move_completed'
|
||||
], # Deprecated: Use move_completed
|
||||
'move_on_completed_path': lambda: self.options['move_completed_path'], # Deprecated: move_completed_path
|
||||
'move_on_completed': lambda: self.options['move_completed'], # Deprecated: Use move_completed
|
||||
'move_completed_path': lambda: self.options['move_completed_path'],
|
||||
'move_completed': lambda: self.options['move_completed'],
|
||||
'next_announce': lambda: self.status.next_announce.seconds,
|
||||
|
@ -1121,25 +1009,17 @@ class Torrent:
|
|||
'num_seeds': lambda: self.status.num_seeds,
|
||||
'owner': lambda: self.options['owner'],
|
||||
'paused': lambda: self.status.paused,
|
||||
'prioritize_first_last': lambda: self.options[
|
||||
'prioritize_first_last_pieces'
|
||||
],
|
||||
'prioritize_first_last': lambda: self.options['prioritize_first_last_pieces'],
|
||||
# Deprecated: Use prioritize_first_last_pieces
|
||||
'prioritize_first_last_pieces': lambda: self.options[
|
||||
'prioritize_first_last_pieces'
|
||||
],
|
||||
'prioritize_first_last_pieces': lambda: self.options['prioritize_first_last_pieces'],
|
||||
'sequential_download': lambda: self.options['sequential_download'],
|
||||
'progress': self.get_progress,
|
||||
'shared': lambda: self.options['shared'],
|
||||
'remove_at_ratio': lambda: self.options['remove_at_ratio'],
|
||||
'save_path': lambda: self.options[
|
||||
'download_location'
|
||||
], # Deprecated: Use download_location
|
||||
'save_path': lambda: self.options['download_location'], # Deprecated: Use download_location
|
||||
'download_location': lambda: self.options['download_location'],
|
||||
'seeds_peers_ratio': lambda: -1.0
|
||||
if self.status.num_incomplete == 0
|
||||
# Use -1.0 to signify infinity
|
||||
else (self.status.num_complete / self.status.num_incomplete),
|
||||
'seeds_peers_ratio': lambda: -1.0 if self.status.num_incomplete == 0 else ( # Use -1.0 to signify infinity
|
||||
self.status.num_complete / self.status.num_incomplete),
|
||||
'seed_rank': lambda: self.status.seed_rank,
|
||||
'state': lambda: self.state,
|
||||
'stop_at_ratio': lambda: self.options['stop_at_ratio'],
|
||||
|
@ -1152,32 +1032,19 @@ class Torrent:
|
|||
'total_seeds': lambda: self.status.num_complete,
|
||||
'total_uploaded': lambda: self.status.all_time_upload,
|
||||
'total_wanted': lambda: self.status.total_wanted,
|
||||
'total_remaining': lambda: self.status.total_wanted
|
||||
- self.status.total_wanted_done,
|
||||
'total_remaining': lambda: self.status.total_wanted - self.status.total_wanted_done,
|
||||
'tracker': lambda: self.status.current_tracker,
|
||||
'tracker_host': self.get_tracker_host,
|
||||
'trackers': lambda: self.trackers,
|
||||
'tracker_status': lambda: self.tracker_status,
|
||||
'upload_payload_rate': lambda: self.status.upload_payload_rate,
|
||||
'comment': lambda: decode_bytes(self.torrent_info.comment())
|
||||
if self.has_metadata
|
||||
else '',
|
||||
'creator': lambda: decode_bytes(self.torrent_info.creator())
|
||||
if self.has_metadata
|
||||
else '',
|
||||
'num_files': lambda: self.torrent_info.num_files()
|
||||
if self.has_metadata
|
||||
else 0,
|
||||
'num_pieces': lambda: self.torrent_info.num_pieces()
|
||||
if self.has_metadata
|
||||
else 0,
|
||||
'piece_length': lambda: self.torrent_info.piece_length()
|
||||
if self.has_metadata
|
||||
else 0,
|
||||
'comment': lambda: decode_bytes(self.torrent_info.comment()) if self.has_metadata else '',
|
||||
'creator': lambda: decode_bytes(self.torrent_info.creator()) if self.has_metadata else '',
|
||||
'num_files': lambda: self.torrent_info.num_files() if self.has_metadata else 0,
|
||||
'num_pieces': lambda: self.torrent_info.num_pieces() if self.has_metadata else 0,
|
||||
'piece_length': lambda: self.torrent_info.piece_length() if self.has_metadata else 0,
|
||||
'private': lambda: self.torrent_info.priv() if self.has_metadata else False,
|
||||
'total_size': lambda: self.torrent_info.total_size()
|
||||
if self.has_metadata
|
||||
else 0,
|
||||
'total_size': lambda: self.torrent_info.total_size() if self.has_metadata else 0,
|
||||
'eta': self.get_eta,
|
||||
'file_progress': self.get_file_progress,
|
||||
'files': self.get_files,
|
||||
|
@ -1194,7 +1061,7 @@ class Torrent:
|
|||
'super_seeding': lambda: self.status.super_seeding,
|
||||
'time_since_download': lambda: self.status.time_since_download,
|
||||
'time_since_upload': lambda: self.status.time_since_upload,
|
||||
'time_since_transfer': self.get_time_since_transfer,
|
||||
'time_since_transfer': self.get_time_since_transfer
|
||||
}
|
||||
|
||||
def pause(self):
|
||||
|
@ -1205,48 +1072,37 @@ class Torrent:
|
|||
|
||||
"""
|
||||
# Turn off auto-management so the torrent will not be unpaused by lt queueing
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=False,
|
||||
)
|
||||
self.handle.auto_managed(False)
|
||||
if self.state == 'Error':
|
||||
log.debug('Unable to pause torrent while in Error state')
|
||||
return False
|
||||
elif self.status.paused:
|
||||
# This torrent was probably paused due to being auto managed by lt
|
||||
# Since we turned auto_managed off, we should update the state which should
|
||||
# show it as 'Paused'. We need to emit a torrent_paused signal because
|
||||
# the torrent_paused alert from libtorrent will not be generated.
|
||||
self.update_state()
|
||||
component.get('EventManager').emit(
|
||||
TorrentStateChangedEvent(self.torrent_id, 'Paused')
|
||||
)
|
||||
component.get('EventManager').emit(TorrentStateChangedEvent(self.torrent_id, 'Paused'))
|
||||
else:
|
||||
try:
|
||||
self.handle.pause()
|
||||
except RuntimeError as ex:
|
||||
log.debug('Unable to pause torrent: %s', ex)
|
||||
return False
|
||||
return True
|
||||
|
||||
def resume(self):
|
||||
"""Resumes this torrent."""
|
||||
if self.status.paused and self.status.auto_managed:
|
||||
log.debug('Resume not possible for auto-managed torrent!')
|
||||
elif self.forced_error and self.forced_error.was_paused:
|
||||
log.debug(
|
||||
'Resume skipped for forced_error torrent as it was originally paused.'
|
||||
)
|
||||
elif (
|
||||
self.status.is_finished
|
||||
and self.options['stop_at_ratio']
|
||||
and self.get_ratio() >= self.options['stop_ratio']
|
||||
):
|
||||
log.debug('Resume skipped for forced_error torrent as it was originally paused.')
|
||||
elif (self.status.is_finished and self.options['stop_at_ratio'] and
|
||||
self.get_ratio() >= self.options['stop_ratio']):
|
||||
log.debug('Resume skipped for torrent as it has reached "stop_seed_ratio".')
|
||||
else:
|
||||
# Check if torrent was originally being auto-managed.
|
||||
if self.options['auto_managed']:
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=True,
|
||||
)
|
||||
self.handle.auto_managed(True)
|
||||
try:
|
||||
self.handle.resume()
|
||||
except RuntimeError as ex:
|
||||
|
@ -1269,8 +1125,8 @@ class Torrent:
|
|||
bool: True is successful, otherwise False
|
||||
"""
|
||||
try:
|
||||
self.handle.connect_peer((peer_ip, int(peer_port)), 0)
|
||||
except (RuntimeError, ValueError) as ex:
|
||||
self.handle.connect_peer((peer_ip, peer_port), 0)
|
||||
except RuntimeError as ex:
|
||||
log.debug('Unable to connect to peer: %s', ex)
|
||||
return False
|
||||
return True
|
||||
|
@ -1291,13 +1147,9 @@ class Torrent:
|
|||
try:
|
||||
os.makedirs(dest)
|
||||
except OSError as ex:
|
||||
log.error(
|
||||
'Could not move storage for torrent %s since %s does '
|
||||
log.error('Could not move storage for torrent %s since %s does '
|
||||
'not exist and could not create the directory: %s',
|
||||
self.torrent_id,
|
||||
dest,
|
||||
ex,
|
||||
)
|
||||
self.torrent_id, dest, ex)
|
||||
return False
|
||||
|
||||
try:
|
||||
|
@ -1330,9 +1182,8 @@ class Torrent:
|
|||
flags = lt.save_resume_flags_t.flush_disk_cache if flush_disk_cache else 0
|
||||
# Don't generate fastresume data if torrent is in a Deluge Error state.
|
||||
if self.forced_error:
|
||||
component.get('TorrentManager').waiting_on_resume_data[
|
||||
self.torrent_id
|
||||
].errback(UserWarning('Skipped creating resume_data while in Error state'))
|
||||
component.get('TorrentManager').waiting_on_resume_data[self.torrent_id].errback(
|
||||
UserWarning('Skipped creating resume_data while in Error state'))
|
||||
else:
|
||||
self.handle.save_resume_data(flags)
|
||||
|
||||
|
@ -1350,15 +1201,16 @@ class Torrent:
|
|||
try:
|
||||
with open(filepath, 'wb') as save_file:
|
||||
save_file.write(filedump)
|
||||
except OSError as ex:
|
||||
except IOError as ex:
|
||||
log.error('Unable to save torrent file to: %s', ex)
|
||||
|
||||
filepath = os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent')
|
||||
|
||||
# Regenerate the file priorities
|
||||
self.set_file_priorities([])
|
||||
if filedump is None:
|
||||
lt_ct = lt.create_torrent(self.torrent_info)
|
||||
filedump = lt.bencode(lt_ct.generate())
|
||||
|
||||
metadata = lt.bdecode(self.torrent_info.metadata())
|
||||
torrent_file = {b'info': metadata}
|
||||
filedump = lt.bencode(torrent_file)
|
||||
write_file(filepath, filedump)
|
||||
|
||||
# If the user has requested a copy of the torrent be saved elsewhere we need to do that.
|
||||
|
@ -1370,13 +1222,9 @@ class Torrent:
|
|||
|
||||
def delete_torrentfile(self, delete_copies=False):
|
||||
"""Deletes the .torrent file in the state directory in config"""
|
||||
torrent_files = [
|
||||
os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent')
|
||||
]
|
||||
if delete_copies and self.filename:
|
||||
torrent_files.append(
|
||||
os.path.join(self.config['torrentfiles_location'], self.filename)
|
||||
)
|
||||
torrent_files = [os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent')]
|
||||
if delete_copies:
|
||||
torrent_files.append(os.path.join(self.config['torrentfiles_location'], self.filename))
|
||||
|
||||
for torrent_file in torrent_files:
|
||||
log.debug('Deleting torrent file: %s', torrent_file)
|
||||
|
@ -1436,7 +1284,7 @@ class Torrent:
|
|||
# lt needs utf8 byte-string. Otherwise if wstrings enabled, unicode string.
|
||||
try:
|
||||
self.handle.rename_file(index, filename.encode('utf8'))
|
||||
except (UnicodeDecodeError, TypeError):
|
||||
except TypeError:
|
||||
self.handle.rename_file(index, filename)
|
||||
|
||||
def rename_folder(self, folder, new_folder):
|
||||
|
@ -1445,7 +1293,7 @@ class Torrent:
|
|||
This basically does a file rename on all of the folders children.
|
||||
|
||||
Args:
|
||||
folder (str): The original folder name
|
||||
folder (str): The orignal folder name
|
||||
new_folder (str): The new folder name
|
||||
|
||||
Returns:
|
||||
|
@ -1472,19 +1320,15 @@ class Torrent:
|
|||
new_path = _file['path'].replace(folder, new_folder, 1)
|
||||
try:
|
||||
self.handle.rename_file(_file['index'], new_path.encode('utf8'))
|
||||
except (UnicodeDecodeError, TypeError):
|
||||
except TypeError:
|
||||
self.handle.rename_file(_file['index'], new_path)
|
||||
|
||||
def on_folder_rename_complete(dummy_result, torrent, folder, new_folder):
|
||||
"""Folder rename complete"""
|
||||
component.get('EventManager').emit(
|
||||
TorrentFolderRenamedEvent(torrent.torrent_id, folder, new_folder)
|
||||
)
|
||||
component.get('EventManager').emit(TorrentFolderRenamedEvent(torrent.torrent_id, folder, new_folder))
|
||||
# Empty folders are removed after libtorrent folder renames
|
||||
self.remove_empty_folders(folder)
|
||||
torrent.waiting_on_folder_rename = [
|
||||
_dir for _dir in torrent.waiting_on_folder_rename if _dir
|
||||
]
|
||||
torrent.waiting_on_folder_rename = [_dir for _dir in torrent.waiting_on_folder_rename if _dir]
|
||||
component.get('TorrentManager').save_resume_data((self.torrent_id,))
|
||||
|
||||
d = DeferredList(list(wait_on_folder.values()))
|
||||
|
@ -1501,9 +1345,7 @@ class Torrent:
|
|||
"""
|
||||
# Removes leading slashes that can cause join to ignore download_location
|
||||
download_location = self.options['download_location']
|
||||
folder_full_path = os.path.normpath(
|
||||
os.path.join(download_location, folder.lstrip('\\/'))
|
||||
)
|
||||
folder_full_path = os.path.normpath(os.path.join(download_location, folder.lstrip('\\/')))
|
||||
|
||||
try:
|
||||
if not os.listdir(folder_full_path):
|
||||
|
@ -1514,9 +1356,7 @@ class Torrent:
|
|||
for name in dirs:
|
||||
try:
|
||||
os.removedirs(os.path.join(root, name))
|
||||
log.debug(
|
||||
'Removed Empty Folder %s', os.path.join(root, name)
|
||||
)
|
||||
log.debug('Removed Empty Folder %s', os.path.join(root, name))
|
||||
except OSError as ex:
|
||||
log.debug(ex)
|
||||
|
||||
|
@ -1539,22 +1379,16 @@ class Torrent:
|
|||
pieces = None
|
||||
else:
|
||||
pieces = []
|
||||
for piece, avail_piece in zip(
|
||||
self.status.pieces, self.handle.piece_availability()
|
||||
):
|
||||
for piece, avail_piece in zip(self.status.pieces, self.handle.piece_availability()):
|
||||
if piece:
|
||||
# Completed.
|
||||
pieces.append(3)
|
||||
pieces.append(3) # Completed.
|
||||
elif avail_piece:
|
||||
# Available, just not downloaded nor being downloaded.
|
||||
pieces.append(1)
|
||||
pieces.append(1) # Available, just not downloaded nor being downloaded.
|
||||
else:
|
||||
# Missing, no known peer with piece, or not asked for yet.
|
||||
pieces.append(0)
|
||||
pieces.append(0) # Missing, no known peer with piece, or not asked for yet.
|
||||
|
||||
for peer_info in self.handle.get_peer_info():
|
||||
if peer_info.downloading_piece_index >= 0:
|
||||
# Being downloaded from peer.
|
||||
pieces[peer_info.downloading_piece_index] = 2
|
||||
pieces[peer_info.downloading_piece_index] = 2 # Being downloaded from peer.
|
||||
|
||||
return pieces
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,136 +0,0 @@
|
|||
#
|
||||
# Copyright (C) 2007,2008 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
|
||||
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
import os
|
||||
import stat
|
||||
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL.crypto import FILETYPE_PEM
|
||||
from twisted.internet.ssl import (
|
||||
AcceptableCiphers,
|
||||
Certificate,
|
||||
CertificateOptions,
|
||||
KeyPair,
|
||||
TLSVersion,
|
||||
)
|
||||
|
||||
import deluge.configmanager
|
||||
|
||||
# A TLS ciphers list.
|
||||
# Sources for more information on TLS ciphers:
|
||||
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||
# - https://www.ssllabs.com/projects/best-practices/index.html
|
||||
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
#
|
||||
# This list was inspired by the `urllib3` library
|
||||
# - https://github.com/urllib3/urllib3/blob/master/urllib3/util/ssl_.py#L79
|
||||
#
|
||||
# The general intent is:
|
||||
# - prefer cipher suites that offer perfect forward secrecy (ECDHE),
|
||||
# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
|
||||
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||
TLS_CIPHERS = ':'.join(
|
||||
[
|
||||
'ECDH+AESGCM',
|
||||
'ECDH+CHACHA20',
|
||||
'AES256-GCM-SHA384',
|
||||
'AES128-GCM-SHA256',
|
||||
'!DSS' '!aNULL',
|
||||
'!eNULL',
|
||||
'!MD5',
|
||||
]
|
||||
)
|
||||
|
||||
# This value tells OpenSSL to disable all SSL/TLS renegotiation.
|
||||
SSL_OP_NO_RENEGOTIATION = 0x40000000
|
||||
|
||||
|
||||
def get_context_factory(cert_path, pkey_path):
|
||||
"""OpenSSL context factory.
|
||||
|
||||
Generates an OpenSSL context factory using Twisted's CertificateOptions class.
|
||||
This will keep a server cipher order.
|
||||
|
||||
Args:
|
||||
cert_path (string): The path to the certificate file
|
||||
pkey_path (string): The path to the private key file
|
||||
|
||||
Returns:
|
||||
twisted.internet.ssl.CertificateOptions: An OpenSSL context factory
|
||||
"""
|
||||
|
||||
with open(cert_path) as cert:
|
||||
certificate = Certificate.loadPEM(cert.read()).original
|
||||
with open(pkey_path) as pkey:
|
||||
private_key = KeyPair.load(pkey.read(), FILETYPE_PEM).original
|
||||
ciphers = AcceptableCiphers.fromOpenSSLCipherString(TLS_CIPHERS)
|
||||
cert_options = CertificateOptions(
|
||||
privateKey=private_key,
|
||||
certificate=certificate,
|
||||
raiseMinimumTo=TLSVersion.TLSv1_2,
|
||||
acceptableCiphers=ciphers,
|
||||
)
|
||||
ctx = cert_options.getContext()
|
||||
ctx.use_certificate_chain_file(cert_path)
|
||||
ctx.set_options(SSL_OP_NO_RENEGOTIATION)
|
||||
|
||||
return cert_options
|
||||
|
||||
|
||||
def check_ssl_keys():
|
||||
"""
|
||||
Check for SSL cert/key and create them if necessary
|
||||
"""
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
if not os.path.exists(ssl_dir):
|
||||
# The ssl folder doesn't exist so we need to create it
|
||||
os.makedirs(ssl_dir)
|
||||
generate_ssl_keys()
|
||||
else:
|
||||
for f in ('daemon.pkey', 'daemon.cert'):
|
||||
if not os.path.exists(os.path.join(ssl_dir, f)):
|
||||
generate_ssl_keys()
|
||||
break
|
||||
|
||||
|
||||
def generate_ssl_keys():
|
||||
"""
|
||||
This method generates a new SSL key/cert.
|
||||
"""
|
||||
digest = 'sha256'
|
||||
|
||||
# Generate key pair
|
||||
pkey = crypto.PKey()
|
||||
pkey.generate_key(crypto.TYPE_RSA, 2048)
|
||||
|
||||
# Generate cert request
|
||||
req = crypto.X509Req()
|
||||
subj = req.get_subject()
|
||||
setattr(subj, 'CN', 'Deluge Daemon')
|
||||
req.set_pubkey(pkey)
|
||||
req.sign(pkey, digest)
|
||||
|
||||
# Generate certificate
|
||||
cert = crypto.X509()
|
||||
cert.set_serial_number(0)
|
||||
cert.gmtime_adj_notBefore(0)
|
||||
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365 * 3) # Three Years
|
||||
cert.set_issuer(req.get_subject())
|
||||
cert.set_subject(req.get_subject())
|
||||
cert.set_pubkey(req.get_pubkey())
|
||||
cert.sign(pkey, digest)
|
||||
|
||||
# Write out files
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
with open(os.path.join(ssl_dir, 'daemon.pkey'), 'wb') as _file:
|
||||
_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
|
||||
with open(os.path.join(ssl_dir, 'daemon.cert'), 'wb') as _file:
|
||||
_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
||||
# Make the files only readable by this user
|
||||
for f in ('daemon.pkey', 'daemon.cert'):
|
||||
os.chmod(os.path.join(ssl_dir, f), stat.S_IREAD | stat.S_IWRITE)
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2010 John Garland <johnnybg+deluge@gmail.com>
|
||||
#
|
||||
|
@ -6,13 +7,12 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import inspect
|
||||
import re
|
||||
import warnings
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Coroutine, TypeVar
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
||||
def proxy(proxy_func):
|
||||
|
@ -23,14 +23,11 @@ def proxy(proxy_func):
|
|||
:param proxy_func: the proxy function
|
||||
:type proxy_func: function
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return proxy_func(func, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
|
@ -56,11 +53,10 @@ def overrides(*args):
|
|||
if inspect.isfunction(args[0]):
|
||||
return _overrides(stack, args[0])
|
||||
else:
|
||||
# One or more classes are specified, so return a function that will be
|
||||
# One or more classes are specifed, so return a function that will be
|
||||
# called with the real function as argument
|
||||
def ret_func(func, **kwargs):
|
||||
return _overrides(stack, func, explicit_base_classes=args)
|
||||
|
||||
return ret_func
|
||||
|
||||
|
||||
|
@ -79,10 +75,7 @@ def _overrides(stack, method, explicit_base_classes=None):
|
|||
check_classes = base_classes
|
||||
|
||||
if not base_classes:
|
||||
raise ValueError(
|
||||
'overrides decorator: unable to determine base class of class "%s"'
|
||||
% class_name
|
||||
)
|
||||
raise ValueError('overrides decorator: unable to determine base class of class "%s"' % class_name)
|
||||
|
||||
def get_class(cls_name):
|
||||
if '.' not in cls_name:
|
||||
|
@ -98,138 +91,47 @@ def _overrides(stack, method, explicit_base_classes=None):
|
|||
|
||||
if explicit_base_classes:
|
||||
# One or more base classes are explicitly given, check only those classes
|
||||
override_classes = re.search(r'\s*@overrides\((.+)\)\s*', stack[1][4][0]).group(
|
||||
1
|
||||
)
|
||||
override_classes = re.search(r'\s*@overrides\((.+)\)\s*', stack[1][4][0]).group(1)
|
||||
override_classes = [c.strip() for c in override_classes.split(',')]
|
||||
check_classes = override_classes
|
||||
|
||||
for c in base_classes + check_classes:
|
||||
classes[c] = get_class(c)
|
||||
|
||||
# Verify that the explicit override class is one of base classes
|
||||
# Verify that the excplicit override class is one of base classes
|
||||
if explicit_base_classes:
|
||||
from itertools import product
|
||||
|
||||
for bc, cc in product(base_classes, check_classes):
|
||||
if issubclass(classes[bc], classes[cc]):
|
||||
break
|
||||
else:
|
||||
raise Exception(
|
||||
'Excplicit override class "%s" is not a super class of: %s'
|
||||
% (explicit_base_classes, class_name)
|
||||
)
|
||||
raise Exception('Excplicit override class "%s" is not a super class of: %s'
|
||||
% (explicit_base_classes, class_name))
|
||||
if not all(hasattr(classes[cls], method.__name__) for cls in check_classes):
|
||||
for cls in check_classes:
|
||||
if not hasattr(classes[cls], method.__name__):
|
||||
raise Exception(
|
||||
'Function override "%s" not found in superclass: %s\n%s'
|
||||
% (
|
||||
method.__name__,
|
||||
cls,
|
||||
f'File: {stack[1][1]}:{stack[1][2]}',
|
||||
)
|
||||
)
|
||||
raise Exception('Function override "%s" not found in superclass: %s\n%s'
|
||||
% (method.__name__, cls, 'File: %s:%s' % (stack[1][1], stack[1][2])))
|
||||
|
||||
if not any(hasattr(classes[cls], method.__name__) for cls in check_classes):
|
||||
raise Exception(
|
||||
'Function override "%s" not found in any superclass: %s\n%s'
|
||||
% (
|
||||
method.__name__,
|
||||
check_classes,
|
||||
f'File: {stack[1][1]}:{stack[1][2]}',
|
||||
)
|
||||
)
|
||||
raise Exception('Function override "%s" not found in any superclass: %s\n%s'
|
||||
% (method.__name__, check_classes, 'File: %s:%s' % (stack[1][1], stack[1][2])))
|
||||
return method
|
||||
|
||||
|
||||
def deprecated(func):
|
||||
"""This is a decorator which can be used to mark function as deprecated.
|
||||
|
||||
It will result in a warning being emitted when the function is used.
|
||||
It will result in a warning being emmitted when the function is used.
|
||||
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def depr_func(*args, **kwargs):
|
||||
warnings.simplefilter('always', DeprecationWarning) # Turn off filter
|
||||
warnings.warn(
|
||||
f'Call to deprecated function {func.__name__}.',
|
||||
category=DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
warnings.warn('Call to deprecated function {}.'.format(func.__name__),
|
||||
category=DeprecationWarning, stacklevel=2)
|
||||
warnings.simplefilter('default', DeprecationWarning) # Reset filter
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return depr_func
|
||||
|
||||
|
||||
class CoroutineDeferred(defer.Deferred):
|
||||
"""Wraps a coroutine in a Deferred.
|
||||
It will dynamically pass through the underlying coroutine without wrapping where apporpriate.
|
||||
"""
|
||||
|
||||
def __init__(self, coro: Coroutine):
|
||||
# Delay this import to make sure a reactor was installed first
|
||||
from twisted.internet import reactor
|
||||
|
||||
super().__init__()
|
||||
self.coro = coro
|
||||
self.awaited = None
|
||||
self.activate_deferred = reactor.callLater(0, self.activate)
|
||||
|
||||
def __await__(self):
|
||||
if self.awaited in [None, True]:
|
||||
self.awaited = True
|
||||
return self.coro.__await__()
|
||||
# Already in deferred mode
|
||||
return super().__await__()
|
||||
|
||||
def activate(self):
|
||||
"""If the result wasn't awaited before the next context switch, we turn it into a deferred."""
|
||||
if self.awaited is None:
|
||||
self.awaited = False
|
||||
try:
|
||||
d = defer.Deferred.fromCoroutine(self.coro)
|
||||
except AttributeError:
|
||||
# Fallback for Twisted <= 21.2 without fromCoroutine
|
||||
d = defer.ensureDeferred(self.coro)
|
||||
d.chainDeferred(self)
|
||||
|
||||
def _callback_activate(self):
|
||||
"""Verify awaited status before calling activate."""
|
||||
assert not self.awaited, 'Cannot add callbacks to an already awaited coroutine.'
|
||||
self.activate()
|
||||
|
||||
def addCallback(self, *args, **kwargs): # noqa: N802
|
||||
self._callback_activate()
|
||||
return super().addCallback(*args, **kwargs)
|
||||
|
||||
def addCallbacks(self, *args, **kwargs): # noqa: N802
|
||||
self._callback_activate()
|
||||
return super().addCallbacks(*args, **kwargs)
|
||||
|
||||
def addErrback(self, *args, **kwargs): # noqa: N802
|
||||
self._callback_activate()
|
||||
return super().addErrback(*args, **kwargs)
|
||||
|
||||
def addBoth(self, *args, **kwargs): # noqa: N802
|
||||
self._callback_activate()
|
||||
return super().addBoth(*args, **kwargs)
|
||||
|
||||
|
||||
_RetT = TypeVar('_RetT')
|
||||
|
||||
|
||||
def maybe_coroutine(
|
||||
f: Callable[..., Coroutine[Any, Any, _RetT]],
|
||||
) -> 'Callable[..., defer.Deferred[_RetT]]':
|
||||
"""Wraps a coroutine function to make it usable as a normal function that returns a Deferred."""
|
||||
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Uncomment for quick testing to make sure CoroutineDeferred magic isn't at fault
|
||||
# return defer.ensureDeferred(f(*args, **kwargs))
|
||||
return CoroutineDeferred(f(*args, **kwargs))
|
||||
|
||||
return wrapper
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
|
||||
# Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me>
|
||||
|
@ -8,15 +9,19 @@
|
|||
#
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
class DelugeError(Exception):
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
inst = super().__new__(cls, *args, **kwargs)
|
||||
inst = super(DelugeError, cls).__new__(cls, *args, **kwargs)
|
||||
inst._args = args
|
||||
inst._kwargs = kwargs
|
||||
return inst
|
||||
|
||||
def __init__(self, message=None):
|
||||
super().__init__(message)
|
||||
super(DelugeError, self).__init__(message)
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
|
@ -40,13 +45,14 @@ class InvalidPathError(DelugeError):
|
|||
|
||||
|
||||
class WrappedException(DelugeError):
|
||||
|
||||
def __init__(self, message, exception_type, traceback):
|
||||
super().__init__(message)
|
||||
super(WrappedException, self).__init__(message)
|
||||
self.type = exception_type
|
||||
self.traceback = traceback
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.message}\n{self.traceback}'
|
||||
return '%s\n%s' % (self.message, self.traceback)
|
||||
|
||||
|
||||
class _ClientSideRecreateError(DelugeError):
|
||||
|
@ -54,29 +60,29 @@ class _ClientSideRecreateError(DelugeError):
|
|||
|
||||
|
||||
class IncompatibleClient(_ClientSideRecreateError):
|
||||
|
||||
def __init__(self, daemon_version):
|
||||
self.daemon_version = daemon_version
|
||||
msg = (
|
||||
'Your deluge client is not compatible with the daemon. '
|
||||
'Please upgrade your client to %(daemon_version)s'
|
||||
) % {'daemon_version': self.daemon_version}
|
||||
super().__init__(message=msg)
|
||||
msg = 'Your deluge client is not compatible with the daemon. '\
|
||||
'Please upgrade your client to %(daemon_version)s' % \
|
||||
dict(daemon_version=self.daemon_version)
|
||||
super(IncompatibleClient, self).__init__(message=msg)
|
||||
|
||||
|
||||
class NotAuthorizedError(_ClientSideRecreateError):
|
||||
|
||||
def __init__(self, current_level, required_level):
|
||||
msg = ('Auth level too low: %(current_level)s < %(required_level)s') % {
|
||||
'current_level': current_level,
|
||||
'required_level': required_level,
|
||||
}
|
||||
super().__init__(message=msg)
|
||||
msg = 'Auth level too low: %(current_level)s < %(required_level)s' % \
|
||||
dict(current_level=current_level, required_level=required_level)
|
||||
super(NotAuthorizedError, self).__init__(message=msg)
|
||||
self.current_level = current_level
|
||||
self.required_level = required_level
|
||||
|
||||
|
||||
class _UsernameBasedPasstroughError(_ClientSideRecreateError):
|
||||
|
||||
def __init__(self, message, username):
|
||||
super().__init__(message)
|
||||
super(_UsernameBasedPasstroughError, self).__init__(message)
|
||||
self.username = username
|
||||
|
||||
|
||||
|
@ -90,7 +96,3 @@ class AuthenticationRequired(_UsernameBasedPasstroughError):
|
|||
|
||||
class AuthManagerError(_UsernameBasedPasstroughError):
|
||||
pass
|
||||
|
||||
|
||||
class LibtorrentImportError(ImportError):
|
||||
pass
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -13,6 +14,7 @@ This module describes the types of events that can be generated by the daemon
|
|||
and subsequently emitted to the clients.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
known_events = {}
|
||||
|
||||
|
@ -21,14 +23,13 @@ class DelugeEventMetaClass(type):
|
|||
"""
|
||||
This metaclass simply keeps a list of all events classes created.
|
||||
"""
|
||||
|
||||
def __init__(cls, name, bases, dct): # pylint: disable=bad-mcs-method-argument
|
||||
super().__init__(name, bases, dct)
|
||||
def __init__(self, name, bases, dct): # pylint: disable=bad-mcs-method-argument
|
||||
super(DelugeEventMetaClass, self).__init__(name, bases, dct)
|
||||
if name != 'DelugeEvent':
|
||||
known_events[name] = cls
|
||||
known_events[name] = self
|
||||
|
||||
|
||||
class DelugeEvent(metaclass=DelugeEventMetaClass):
|
||||
class DelugeEvent(object):
|
||||
"""
|
||||
The base class for all events.
|
||||
|
||||
|
@ -38,6 +39,7 @@ class DelugeEvent(metaclass=DelugeEventMetaClass):
|
|||
:type args: list
|
||||
|
||||
"""
|
||||
__metaclass__ = DelugeEventMetaClass
|
||||
|
||||
def _get_name(self):
|
||||
return self.__class__.__name__
|
||||
|
@ -55,7 +57,6 @@ class TorrentAddedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a new torrent is successfully added to the session.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, from_state):
|
||||
"""
|
||||
:param torrent_id: the torrent_id of the torrent that was added
|
||||
|
@ -70,7 +71,6 @@ class TorrentRemovedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent has been removed from the session.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -83,7 +83,6 @@ class PreTorrentRemovedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent is about to be removed from the session.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -96,7 +95,6 @@ class TorrentStateChangedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent changes state.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, state):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -111,7 +109,6 @@ class TorrentTrackerStatusEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrents tracker status changes.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, status):
|
||||
"""
|
||||
Args:
|
||||
|
@ -125,7 +122,6 @@ class TorrentQueueChangedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the queue order has changed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -133,7 +129,6 @@ class TorrentFolderRenamedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a folder within a torrent has been renamed.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, old, new):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -150,7 +145,6 @@ class TorrentFileRenamedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a file within a torrent has been renamed.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, index, name):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -167,7 +161,6 @@ class TorrentFinishedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent finishes downloading.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -180,7 +173,6 @@ class TorrentResumedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent resumes from a paused state.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -193,7 +185,6 @@ class TorrentFileCompletedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a file completes.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, index):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -208,7 +199,6 @@ class TorrentStorageMovedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the storage location for a torrent has been moved.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, path):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -223,7 +213,6 @@ class CreateTorrentProgressEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when creating a torrent file remotely.
|
||||
"""
|
||||
|
||||
def __init__(self, piece_count, num_pieces):
|
||||
self._args = [piece_count, num_pieces]
|
||||
|
||||
|
@ -232,7 +221,6 @@ class NewVersionAvailableEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a more recent version of Deluge is available.
|
||||
"""
|
||||
|
||||
def __init__(self, new_release):
|
||||
"""
|
||||
:param new_release: the new version that is available
|
||||
|
@ -246,7 +234,6 @@ class SessionStartedEvent(DelugeEvent):
|
|||
Emitted when a session has started. This typically only happens once when
|
||||
the daemon is initially started.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -254,7 +241,6 @@ class SessionPausedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the session has been paused.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -262,7 +248,6 @@ class SessionResumedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the session has been resumed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -270,7 +255,6 @@ class ConfigValueChangedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a config value changes in the Core.
|
||||
"""
|
||||
|
||||
def __init__(self, key, value):
|
||||
"""
|
||||
:param key: the key that changed
|
||||
|
@ -284,7 +268,6 @@ class PluginEnabledEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a plugin is enabled in the Core.
|
||||
"""
|
||||
|
||||
def __init__(self, plugin_name):
|
||||
self._args = [plugin_name]
|
||||
|
||||
|
@ -293,7 +276,6 @@ class PluginDisabledEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a plugin is disabled in the Core.
|
||||
"""
|
||||
|
||||
def __init__(self, plugin_name):
|
||||
self._args = [plugin_name]
|
||||
|
||||
|
@ -302,7 +284,6 @@ class ClientDisconnectedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a client disconnects.
|
||||
"""
|
||||
|
||||
def __init__(self, session_id):
|
||||
self._args = [session_id]
|
||||
|
||||
|
@ -311,7 +292,6 @@ class ExternalIPEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the external ip address is received from libtorrent.
|
||||
"""
|
||||
|
||||
def __init__(self, external_ip):
|
||||
"""
|
||||
Args:
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -6,200 +7,129 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
import email.message
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os.path
|
||||
import zlib
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet.defer import Deferred
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.web import client, http
|
||||
from twisted.web._newclient import HTTPClientParser
|
||||
from twisted.web.error import Error, PageRedirect
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web.iweb import IAgent
|
||||
from zope.interface import implementer
|
||||
from twisted.web.error import PageRedirect
|
||||
|
||||
from deluge.common import get_version, utf8_encode_structure
|
||||
|
||||
try:
|
||||
from urllib.parse import urljoin
|
||||
except ImportError:
|
||||
# PY2 fallback
|
||||
from urlparse import urljoin # pylint: disable=ungrouped-imports
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CompressionDecoder(client.GzipDecoder):
|
||||
"""A compression decoder for gzip, x-gzip and deflate."""
|
||||
|
||||
def deliverBody(self, protocol): # NOQA: N802
|
||||
self.original.deliverBody(CompressionDecoderProtocol(protocol, self.original))
|
||||
|
||||
|
||||
class CompressionDecoderProtocol(client._GzipProtocol):
|
||||
"""A compression decoder protocol for CompressionDecoder."""
|
||||
|
||||
def __init__(self, protocol, response):
|
||||
super().__init__(protocol, response)
|
||||
self._zlibDecompress = zlib.decompressobj(32 + zlib.MAX_WBITS)
|
||||
|
||||
|
||||
class BodyHandler(HTTPClientParser):
|
||||
"""An HTTP parser that saves the response to a file."""
|
||||
|
||||
def __init__(self, request, finished, length, agent, encoding=None):
|
||||
"""BodyHandler init.
|
||||
|
||||
Args:
|
||||
request (t.w.i.IClientRequest): The parser request.
|
||||
finished (Deferred): A Deferred to handle the finished response.
|
||||
length (int): The length of the response.
|
||||
agent (t.w.i.IAgent): The agent from which the request was sent.
|
||||
class HTTPDownloader(client.HTTPDownloader):
|
||||
"""
|
||||
super().__init__(request, finished)
|
||||
self.agent = agent
|
||||
self.finished = finished
|
||||
self.total_length = length
|
||||
self.current_length = 0
|
||||
self.data = b''
|
||||
self.encoding = encoding
|
||||
|
||||
def dataReceived(self, data): # NOQA: N802
|
||||
self.current_length += len(data)
|
||||
self.data += data
|
||||
if self.agent.part_callback:
|
||||
self.agent.part_callback(data, self.current_length, self.total_length)
|
||||
|
||||
def connectionLost(self, reason): # NOQA: N802
|
||||
if self.encoding:
|
||||
self.data = self.data.decode(self.encoding).encode('utf8')
|
||||
with open(self.agent.filename, 'wb') as _file:
|
||||
_file.write(self.data)
|
||||
self.finished.callback(self.agent.filename)
|
||||
self.state = 'DONE'
|
||||
HTTPClientParser.connectionLost(self, reason)
|
||||
|
||||
|
||||
@implementer(IAgent)
|
||||
class HTTPDownloaderAgent:
|
||||
"""A File Downloader Agent."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
agent,
|
||||
filename,
|
||||
part_callback=None,
|
||||
force_filename=False,
|
||||
allow_compression=True,
|
||||
handle_redirect=True,
|
||||
):
|
||||
"""HTTPDownloaderAgent init.
|
||||
|
||||
Args:
|
||||
agent (t.w.c.Agent): The agent which will send the requests.
|
||||
filename (str): The filename to save the file as.
|
||||
force_filename (bool): Forces use of the supplied filename,
|
||||
regardless of header content.
|
||||
part_callback (func): A function to be called when a part of data
|
||||
is received, it's signature should be:
|
||||
func(data, current_length, total_length)
|
||||
Factory class for downloading files and keeping track of progress.
|
||||
"""
|
||||
def __init__(self, url, filename, part_callback=None, headers=None,
|
||||
force_filename=False, allow_compression=True):
|
||||
"""
|
||||
:param url: the url to download from
|
||||
:type url: string
|
||||
:param filename: the filename to save the file as
|
||||
:type filename: string
|
||||
:param force_filename: forces use of the supplied filename, regardless of header content
|
||||
:type force_filename: bool
|
||||
:param part_callback: a function to be called when a part of data
|
||||
is received, it's signature should be: func(data, current_length, total_length)
|
||||
:type part_callback: function
|
||||
:param headers: any optional headers to send
|
||||
:type headers: dictionary
|
||||
"""
|
||||
|
||||
self.handle_redirect = handle_redirect
|
||||
self.agent = agent
|
||||
self.filename = filename
|
||||
self.part_callback = part_callback
|
||||
self.current_length = 0
|
||||
self.total_length = 0
|
||||
self.decoder = None
|
||||
self.value = filename
|
||||
self.force_filename = force_filename
|
||||
self.allow_compression = allow_compression
|
||||
self.decoder = None
|
||||
self.code = None
|
||||
agent = b'Deluge/%s (http://deluge-torrent.org)' % get_version().encode('utf8')
|
||||
|
||||
def request_callback(self, response):
|
||||
finished = Deferred()
|
||||
client.HTTPDownloader.__init__(self, url, filename, headers=headers, agent=agent)
|
||||
|
||||
if not self.handle_redirect and response.code in (
|
||||
http.MOVED_PERMANENTLY,
|
||||
http.FOUND,
|
||||
http.SEE_OTHER,
|
||||
http.TEMPORARY_REDIRECT,
|
||||
):
|
||||
location = response.headers.getRawHeaders(b'location')[0]
|
||||
error = PageRedirect(response.code, location=location)
|
||||
finished.errback(Failure(error))
|
||||
elif response.code >= 400:
|
||||
error = Error(response.code)
|
||||
finished.errback(Failure(error))
|
||||
def gotStatus(self, version, status, message): # NOQA: N802
|
||||
self.code = int(status)
|
||||
client.HTTPDownloader.gotStatus(self, version, status, message)
|
||||
|
||||
def gotHeaders(self, headers): # NOQA: N802
|
||||
if self.code == http.OK:
|
||||
if 'content-length' in headers:
|
||||
self.total_length = int(headers['content-length'][0])
|
||||
else:
|
||||
headers = response.headers
|
||||
body_length = int(headers.getRawHeaders(b'content-length', default=[0])[0])
|
||||
self.total_length = 0
|
||||
|
||||
if headers.hasHeader(b'content-disposition') and not self.force_filename:
|
||||
content_disp = headers.getRawHeaders(b'content-disposition')[0].decode(
|
||||
'utf-8'
|
||||
)
|
||||
message = email.message.EmailMessage()
|
||||
message['content-disposition'] = content_disp
|
||||
new_file_name = message.get_filename()
|
||||
if new_file_name:
|
||||
if self.allow_compression and 'content-encoding' in headers and \
|
||||
headers['content-encoding'][0] in ('gzip', 'x-gzip', 'deflate'):
|
||||
# Adding 32 to the wbits enables gzip & zlib decoding (with automatic header detection)
|
||||
# Adding 16 just enables gzip decoding (no zlib)
|
||||
self.decoder = zlib.decompressobj(zlib.MAX_WBITS + 32)
|
||||
|
||||
if 'content-disposition' in headers and not self.force_filename:
|
||||
new_file_name = str(headers['content-disposition'][0]).split(';')[1].split('=')[1]
|
||||
new_file_name = sanitise_filename(new_file_name)
|
||||
new_file_name = os.path.join(
|
||||
os.path.split(self.filename)[0], new_file_name
|
||||
)
|
||||
new_file_name = os.path.join(os.path.split(self.value)[0], new_file_name)
|
||||
|
||||
count = 1
|
||||
fileroot = os.path.splitext(new_file_name)[0]
|
||||
fileext = os.path.splitext(new_file_name)[1]
|
||||
while os.path.isfile(new_file_name):
|
||||
# Increment filename if already exists
|
||||
new_file_name = f'{fileroot}-{count}{fileext}'
|
||||
new_file_name = '%s-%s%s' % (fileroot, count, fileext)
|
||||
count += 1
|
||||
|
||||
self.filename = new_file_name
|
||||
self.fileName = new_file_name
|
||||
self.value = new_file_name
|
||||
|
||||
cont_type_header = headers.getRawHeaders(b'content-type')[0].decode()
|
||||
message = email.message.EmailMessage()
|
||||
message['content-type'] = cont_type_header
|
||||
cont_type = message.get_content_type()
|
||||
params = message['content-type'].params
|
||||
# Only re-ecode text content types.
|
||||
encoding = None
|
||||
if cont_type.startswith('text/'):
|
||||
encoding = params.get('charset', None)
|
||||
response.deliverBody(
|
||||
BodyHandler(response.request, finished, body_length, self, encoding)
|
||||
)
|
||||
elif self.code in (http.MOVED_PERMANENTLY, http.FOUND, http.SEE_OTHER, http.TEMPORARY_REDIRECT):
|
||||
location = headers['location'][0]
|
||||
error = PageRedirect(self.code, location=location)
|
||||
self.noPage(Failure(error))
|
||||
|
||||
return finished
|
||||
return client.HTTPDownloader.gotHeaders(self, headers)
|
||||
|
||||
def request(self, method, uri, headers=None, body_producer=None):
|
||||
"""Issue a new request to the wrapped agent.
|
||||
def pagePart(self, data): # NOQA: N802
|
||||
if self.code == http.OK:
|
||||
self.current_length += len(data)
|
||||
if self.decoder:
|
||||
data = self.decoder.decompress(data)
|
||||
if self.part_callback:
|
||||
self.part_callback(data, self.current_length, self.total_length)
|
||||
|
||||
Args:
|
||||
method (bytes): The HTTP method to use.
|
||||
uri (bytes): The url to download from.
|
||||
headers (t.w.h.Headers, optional): Any extra headers to send.
|
||||
body_producer (t.w.i.IBodyProducer, optional): Request body data.
|
||||
return client.HTTPDownloader.pagePart(self, data)
|
||||
|
||||
Returns:
|
||||
Deferred: The filename of the of the downloaded file.
|
||||
"""
|
||||
if headers is None:
|
||||
headers = Headers()
|
||||
def pageEnd(self): # NOQA: N802
|
||||
if self.decoder:
|
||||
data = self.decoder.flush()
|
||||
self.current_length -= len(data)
|
||||
self.decoder = None
|
||||
self.pagePart(data)
|
||||
|
||||
if not headers.hasHeader(b'User-Agent'):
|
||||
user_agent = 'Deluge'
|
||||
headers.addRawHeader('User-Agent', user_agent)
|
||||
|
||||
d = self.agent.request(
|
||||
method=method, uri=uri, headers=headers, bodyProducer=body_producer
|
||||
)
|
||||
d.addCallback(self.request_callback)
|
||||
return d
|
||||
return client.HTTPDownloader.pageEnd(self)
|
||||
|
||||
|
||||
def sanitise_filename(filename):
|
||||
"""Sanitises a filename to use as a download destination file.
|
||||
|
||||
"""
|
||||
Sanitises a filename to use as a download destination file.
|
||||
Logs any filenames that could be considered malicious.
|
||||
|
||||
filename (str): The filename to sanitise.
|
||||
|
||||
Returns:
|
||||
str: The sanitised filename.
|
||||
:param filename: the filename to sanitise
|
||||
:type filename: string
|
||||
:returns: the sanitised filename
|
||||
:rtype: string
|
||||
"""
|
||||
|
||||
# Remove any quotes
|
||||
|
@ -207,128 +137,136 @@ def sanitise_filename(filename):
|
|||
|
||||
if os.path.basename(filename) != filename:
|
||||
# Dodgy server, log it
|
||||
log.warning(
|
||||
'Potentially malicious server: trying to write to file: %s', filename
|
||||
)
|
||||
log.warning('Potentially malicious server: trying to write to file: %s', filename)
|
||||
# Only use the basename
|
||||
filename = os.path.basename(filename)
|
||||
|
||||
filename = filename.strip()
|
||||
if filename.startswith('.') or ';' in filename or '|' in filename:
|
||||
# Dodgy server, log it
|
||||
log.warning(
|
||||
'Potentially malicious server: trying to write to file: %s', filename
|
||||
)
|
||||
log.warning('Potentially malicious server: trying to write to file: %s', filename)
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def _download_file(
|
||||
url,
|
||||
filename,
|
||||
callback=None,
|
||||
headers=None,
|
||||
force_filename=False,
|
||||
allow_compression=True,
|
||||
handle_redirects=True,
|
||||
):
|
||||
"""Downloads a file from a specific URL and returns a Deferred.
|
||||
|
||||
A callback function can be specified to be called as parts are received.
|
||||
def _download_file(url, filename, callback=None, headers=None, force_filename=False, allow_compression=True):
|
||||
"""
|
||||
Downloads a file from a specific URL and returns a Deferred. A callback
|
||||
function can be specified to be called as parts are received.
|
||||
|
||||
Args:
|
||||
url (str): The url to download from.
|
||||
filename (str): The filename to save the file as.
|
||||
callback (func): A function to be called when partial data is received,
|
||||
url (str): The url to download from
|
||||
filename (str): The filename to save the file as
|
||||
callback (func): A function to be called when a part of data is received,
|
||||
it's signature should be: func(data, current_length, total_length)
|
||||
headers (dict): Any optional headers to send.
|
||||
force_filename (bool): Force using the filename specified rather than
|
||||
one the server may suggest.
|
||||
allow_compression (bool): Allows gzip & deflate decoding.
|
||||
headers (dict): Any optional headers to send
|
||||
force_filename (bool): force us to use the filename specified rather than
|
||||
one the server may suggest
|
||||
allow_compression (bool): Allows gzip & deflate decoding
|
||||
|
||||
Returns:
|
||||
Deferred: The filename of the downloaded file.
|
||||
Deferred: the filename of the downloaded file
|
||||
|
||||
Raises:
|
||||
t.w.e.PageRedirect
|
||||
t.w.e.Error: for all other HTTP response errors
|
||||
"""
|
||||
|
||||
agent = client.Agent(reactor)
|
||||
"""
|
||||
|
||||
if allow_compression:
|
||||
enc_accepted = ['gzip', 'x-gzip', 'deflate']
|
||||
decoders = [(enc.encode(), CompressionDecoder) for enc in enc_accepted]
|
||||
agent = client.ContentDecoderAgent(agent, decoders)
|
||||
if handle_redirects:
|
||||
agent = client.RedirectAgent(agent)
|
||||
if not headers:
|
||||
headers = {}
|
||||
headers['accept-encoding'] = 'deflate, gzip, x-gzip'
|
||||
|
||||
agent = HTTPDownloaderAgent(
|
||||
agent, filename, callback, force_filename, allow_compression, handle_redirects
|
||||
)
|
||||
url = url.encode('utf8')
|
||||
filename = filename.encode('utf8')
|
||||
headers = utf8_encode_structure(headers) if headers else headers
|
||||
factory = HTTPDownloader(url, filename, callback, headers, force_filename, allow_compression)
|
||||
|
||||
# The Headers init expects dict values to be a list.
|
||||
if headers:
|
||||
for name, value in list(headers.items()):
|
||||
if not isinstance(value, list):
|
||||
headers[name] = [value]
|
||||
# In Twisted 13.1.0 _parse() function replaced by _URI class.
|
||||
# In Twisted 15.0.0 _URI class renamed to URI.
|
||||
if hasattr(client, '_parse'):
|
||||
scheme, host, port, dummy_path = client._parse(url)
|
||||
else:
|
||||
try:
|
||||
from twisted.web.client import _URI as URI
|
||||
except ImportError:
|
||||
from twisted.web.client import URI
|
||||
finally:
|
||||
uri = URI.fromBytes(url)
|
||||
scheme = uri.scheme
|
||||
host = uri.host
|
||||
port = uri.port
|
||||
|
||||
return agent.request(b'GET', url.encode(), Headers(headers))
|
||||
if scheme == 'https':
|
||||
from twisted.internet import ssl
|
||||
# ClientTLSOptions in Twisted >= 14, see ticket #2765 for details on this addition.
|
||||
try:
|
||||
from twisted.internet._sslverify import ClientTLSOptions
|
||||
except ImportError:
|
||||
ctx_factory = ssl.ClientContextFactory()
|
||||
else:
|
||||
class TLSSNIContextFactory(ssl.ClientContextFactory): # pylint: disable=no-init
|
||||
"""
|
||||
A custom context factory to add a server name for TLS connections.
|
||||
"""
|
||||
def getContext(self): # NOQA: N802
|
||||
ctx = ssl.ClientContextFactory.getContext(self)
|
||||
ClientTLSOptions(host, ctx)
|
||||
return ctx
|
||||
ctx_factory = TLSSNIContextFactory()
|
||||
|
||||
reactor.connectSSL(host, port, factory, ctx_factory)
|
||||
else:
|
||||
reactor.connectTCP(host, port, factory)
|
||||
|
||||
return factory.deferred
|
||||
|
||||
|
||||
def download_file(
|
||||
url,
|
||||
filename,
|
||||
callback=None,
|
||||
headers=None,
|
||||
force_filename=False,
|
||||
allow_compression=True,
|
||||
handle_redirects=True,
|
||||
):
|
||||
"""Downloads a file from a specific URL and returns a Deferred.
|
||||
|
||||
A callback function can be specified to be called as parts are received.
|
||||
def download_file(url, filename, callback=None, headers=None, force_filename=False,
|
||||
allow_compression=True, handle_redirects=True):
|
||||
"""
|
||||
Downloads a file from a specific URL and returns a Deferred. A callback
|
||||
function can be specified to be called as parts are received.
|
||||
|
||||
Args:
|
||||
url (str): The url to download from.
|
||||
filename (str): The filename to save the file as.
|
||||
callback (func): A function to be called when partial data is received,
|
||||
it's signature should be: func(data, current_length, total_length).
|
||||
headers (dict): Any optional headers to send.
|
||||
force_filename (bool): Force the filename specified rather than one the
|
||||
server may suggest.
|
||||
allow_compression (bool): Allows gzip & deflate decoding.
|
||||
handle_redirects (bool): HTTP redirects handled automatically or not.
|
||||
url (str): The url to download from
|
||||
filename (str): The filename to save the file as
|
||||
callback (func): A function to be called when a part of data is received,
|
||||
it's signature should be: func(data, current_length, total_length)
|
||||
headers (dict): Any optional headers to send
|
||||
force_filename (bool): force us to use the filename specified rather than
|
||||
one the server may suggest
|
||||
allow_compression (bool): Allows gzip & deflate decoding
|
||||
handle_redirects (bool): If HTTP redirects should be handled automatically
|
||||
|
||||
Returns:
|
||||
Deferred: The filename of the downloaded file.
|
||||
Deferred: the filename of the downloaded file
|
||||
|
||||
Raises:
|
||||
t.w.e.PageRedirect: If handle_redirects is False.
|
||||
t.w.e.Error: For all other HTTP response errors.
|
||||
"""
|
||||
t.w.e.PageRedirect: Unless handle_redirects=True
|
||||
t.w.e.Error: for all other HTTP response errors
|
||||
|
||||
"""
|
||||
def on_download_success(result):
|
||||
log.debug('Download success!')
|
||||
return result
|
||||
|
||||
def on_download_fail(failure):
|
||||
log.warning(
|
||||
'Error occurred downloading file from "%s": %s',
|
||||
url,
|
||||
failure.getErrorMessage(),
|
||||
)
|
||||
if failure.check(PageRedirect) and handle_redirects:
|
||||
new_url = urljoin(url, failure.getErrorMessage().split(' to ')[1])
|
||||
result = _download_file(new_url, filename, callback=callback, headers=headers,
|
||||
force_filename=force_filename,
|
||||
allow_compression=allow_compression)
|
||||
result.addCallbacks(on_download_success, on_download_fail)
|
||||
else:
|
||||
# Log the failure and pass to the caller
|
||||
log.warning('Error occurred downloading file from "%s": %s',
|
||||
url, failure.getErrorMessage())
|
||||
result = failure
|
||||
return result
|
||||
|
||||
d = _download_file(
|
||||
url,
|
||||
filename,
|
||||
callback=callback,
|
||||
headers=headers,
|
||||
force_filename=force_filename,
|
||||
allow_compression=allow_compression,
|
||||
handle_redirects=handle_redirects,
|
||||
)
|
||||
d = _download_file(url, filename, callback=callback, headers=headers,
|
||||
force_filename=force_filename, allow_compression=allow_compression)
|
||||
d.addCallbacks(on_download_success, on_download_fail)
|
||||
return d
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
from .util import (
|
||||
I18N_DOMAIN,
|
||||
get_languages,
|
||||
set_language,
|
||||
setup_mock_translation,
|
||||
setup_translation,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'I18N_DOMAIN',
|
||||
'set_language',
|
||||
'get_languages',
|
||||
'setup_translation',
|
||||
'setup_mock_translation',
|
||||
]
|
6431
deluge/i18n/ab.po
6431
deluge/i18n/ab.po
File diff suppressed because it is too large
Load diff
6234
deluge/i18n/af.po
6234
deluge/i18n/af.po
File diff suppressed because it is too large
Load diff
9295
deluge/i18n/ar.po
9295
deluge/i18n/ar.po
File diff suppressed because it is too large
Load diff
9633
deluge/i18n/ast.po
9633
deluge/i18n/ast.po
File diff suppressed because it is too large
Load diff
9278
deluge/i18n/be.po
9278
deluge/i18n/be.po
File diff suppressed because it is too large
Load diff
9714
deluge/i18n/bg.po
9714
deluge/i18n/bg.po
File diff suppressed because it is too large
Load diff
8267
deluge/i18n/bn.po
8267
deluge/i18n/bn.po
File diff suppressed because it is too large
Load diff
8308
deluge/i18n/bs.po
8308
deluge/i18n/bs.po
File diff suppressed because it is too large
Load diff
9461
deluge/i18n/ca.po
9461
deluge/i18n/ca.po
File diff suppressed because it is too large
Load diff
9620
deluge/i18n/cs.po
9620
deluge/i18n/cs.po
File diff suppressed because it is too large
Load diff
8336
deluge/i18n/cy.po
8336
deluge/i18n/cy.po
File diff suppressed because it is too large
Load diff
9830
deluge/i18n/da.po
9830
deluge/i18n/da.po
File diff suppressed because it is too large
Load diff
10065
deluge/i18n/de.po
10065
deluge/i18n/de.po
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
9864
deluge/i18n/el.po
9864
deluge/i18n/el.po
File diff suppressed because it is too large
Load diff
10268
deluge/i18n/en_AU.po
10268
deluge/i18n/en_AU.po
File diff suppressed because it is too large
Load diff
10298
deluge/i18n/en_CA.po
10298
deluge/i18n/en_CA.po
File diff suppressed because it is too large
Load diff
10423
deluge/i18n/en_GB.po
10423
deluge/i18n/en_GB.po
File diff suppressed because it is too large
Load diff
8311
deluge/i18n/eo.po
8311
deluge/i18n/eo.po
File diff suppressed because it is too large
Load diff
9551
deluge/i18n/es.po
9551
deluge/i18n/es.po
File diff suppressed because it is too large
Load diff
9770
deluge/i18n/et.po
9770
deluge/i18n/et.po
File diff suppressed because it is too large
Load diff
7864
deluge/i18n/eu.po
7864
deluge/i18n/eu.po
File diff suppressed because it is too large
Load diff
8440
deluge/i18n/fa.po
8440
deluge/i18n/fa.po
File diff suppressed because it is too large
Load diff
9593
deluge/i18n/fi.po
9593
deluge/i18n/fi.po
File diff suppressed because it is too large
Load diff
6217
deluge/i18n/fo.po
6217
deluge/i18n/fo.po
File diff suppressed because it is too large
Load diff
10533
deluge/i18n/fr.po
10533
deluge/i18n/fr.po
File diff suppressed because it is too large
Load diff
9198
deluge/i18n/fy.po
9198
deluge/i18n/fy.po
File diff suppressed because it is too large
Load diff
6214
deluge/i18n/ga.po
6214
deluge/i18n/ga.po
File diff suppressed because it is too large
Load diff
9248
deluge/i18n/gl.po
9248
deluge/i18n/gl.po
File diff suppressed because it is too large
Load diff
9585
deluge/i18n/he.po
9585
deluge/i18n/he.po
File diff suppressed because it is too large
Load diff
9312
deluge/i18n/hi.po
9312
deluge/i18n/hi.po
File diff suppressed because it is too large
Load diff
9239
deluge/i18n/hr.po
9239
deluge/i18n/hr.po
File diff suppressed because it is too large
Load diff
9743
deluge/i18n/hu.po
9743
deluge/i18n/hu.po
File diff suppressed because it is too large
Load diff
8549
deluge/i18n/id.po
8549
deluge/i18n/id.po
File diff suppressed because it is too large
Load diff
9613
deluge/i18n/is.po
9613
deluge/i18n/is.po
File diff suppressed because it is too large
Load diff
9625
deluge/i18n/it.po
9625
deluge/i18n/it.po
File diff suppressed because it is too large
Load diff
8252
deluge/i18n/iu.po
8252
deluge/i18n/iu.po
File diff suppressed because it is too large
Load diff
9461
deluge/i18n/ja.po
9461
deluge/i18n/ja.po
File diff suppressed because it is too large
Load diff
8867
deluge/i18n/ka.po
8867
deluge/i18n/ka.po
File diff suppressed because it is too large
Load diff
9831
deluge/i18n/kk.po
9831
deluge/i18n/kk.po
File diff suppressed because it is too large
Load diff
6222
deluge/i18n/km.po
6222
deluge/i18n/km.po
File diff suppressed because it is too large
Load diff
8365
deluge/i18n/kn.po
8365
deluge/i18n/kn.po
File diff suppressed because it is too large
Load diff
9411
deluge/i18n/ko.po
9411
deluge/i18n/ko.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/ku.po
8300
deluge/i18n/ku.po
File diff suppressed because it is too large
Load diff
6214
deluge/i18n/ky.po
6214
deluge/i18n/ky.po
File diff suppressed because it is too large
Load diff
8277
deluge/i18n/la.po
8277
deluge/i18n/la.po
File diff suppressed because it is too large
Load diff
6214
deluge/i18n/lb.po
6214
deluge/i18n/lb.po
File diff suppressed because it is too large
Load diff
10252
deluge/i18n/lt.po
10252
deluge/i18n/lt.po
File diff suppressed because it is too large
Load diff
9799
deluge/i18n/lv.po
9799
deluge/i18n/lv.po
File diff suppressed because it is too large
Load diff
8917
deluge/i18n/mk.po
8917
deluge/i18n/mk.po
File diff suppressed because it is too large
Load diff
6214
deluge/i18n/ml.po
6214
deluge/i18n/ml.po
File diff suppressed because it is too large
Load diff
6214
deluge/i18n/mo.po
6214
deluge/i18n/mo.po
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue