Compare commits

..

No commits in common. "develop" and "deluge-2.0.1" have entirely different histories.

916 changed files with 267999 additions and 460764 deletions

1
.gitattributes vendored
View file

@ -3,4 +3,3 @@
.gitignore export-ignore .gitignore export-ignore
*.py diff=python *.py diff=python
ext-all.js diff=minjs ext-all.js diff=minjs
*.state -merge -text

View file

@ -1,104 +0,0 @@
name: Package
on:
push:
tags:
- "deluge-*"
- "!deluge*-dev*"
branches:
- develop
pull_request:
types: [labeled, opened, synchronize, reopened]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
ref:
description: "Enter a tag or commit to package"
default: ""
jobs:
windows_package:
runs-on: windows-2022
if: (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'package'))
strategy:
matrix:
arch: [x64, x86]
python: ["3.9"]
libtorrent: [2.0.7, 1.2.19]
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout Deluge source to subdir to enable packaging any tag/commit
- name: Checkout Deluge source
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.ref }}
fetch-depth: 0
path: deluge_src
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python}}
architecture: ${{ matrix.arch }}
cache: pip
- name: Prepare pip
run: python -m pip install wheel setuptools==68.*
- name: Install GTK
run: |
$WebClient = New-Object System.Net.WebClient
$WebClient.DownloadFile("https://github.com/deluge-torrent/gvsbuild-release/releases/download/latest/gvsbuild-py${{ matrix.python }}-vs16-${{ matrix.arch }}.zip","C:\GTK.zip")
7z x C:\GTK.zip -oc:\GTK
echo "C:\GTK\release\lib" | Out-File -FilePath $env:GITHUB_PATH -Append
echo "C:\GTK\release\bin" | Out-File -FilePath $env:GITHUB_PATH -Append
echo "C:\GTK\release" | Out-File -FilePath $env:GITHUB_PATH -Append
python -m pip install --no-index --find-links="C:\GTK\release\python" pycairo PyGObject
- name: Install Python dependencies
# Pillow no longer provides 32-bit wheels for Windows
# so specify only-binary to install old version.
run: >
python -m pip install
--only-binary=pillow
twisted[tls]==22.8.0
libtorrent==${{ matrix.libtorrent }}
pyinstaller
pygame
-r requirements.txt
- name: Install Deluge
working-directory: deluge_src
run: |
python -m pip install .
python setup.py install_scripts
- name: Freeze Deluge
working-directory: packaging/win
run: |
pyinstaller --clean delugewin.spec --distpath freeze
- name: Verify Deluge exes
working-directory: packaging/win/freeze/Deluge/
run: |
deluge-debug.exe -v
deluged-debug.exe -v
deluge-web-debug.exe -v
deluge-console -v
- name: Make Deluge Installer
working-directory: ./packaging/win
run: |
python setup_nsis.py
makensis /Darch=${{ matrix.arch }} deluge-win-installer.nsi
- uses: actions/upload-artifact@v4
with:
name: deluge-py${{ matrix.python }}-lt${{ matrix.libtorrent }}-${{ matrix.arch }}
path: packaging/win/*.exe

View file

@ -1,101 +0,0 @@
name: CI
on:
push:
pull_request:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
core-dump:
description: "Set to 1 to enable retrieving core dump from crashes"
default: "0"
jobs:
test-linux:
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["3.7", "3.10"]
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements*.txt"
- name: Sets env var for security
if: (github.event_name == 'pull_request' && contains(github.event.pull_request.body, 'security_test')) || (github.event_name == 'push' && contains(github.event.head_commit.message, 'security_test'))
run: echo "SECURITY_TESTS=True" >> $GITHUB_ENV
- name: Install dependencies
run: |
pip install --upgrade pip wheel setuptools
pip install -r requirements-ci.txt
pip install -e .
- name: Install security dependencies
if: contains(env.SECURITY_TESTS, 'True')
run: |
wget -O- $TESTSSL_URL$TESTSSL_VER | tar xz
mv -t deluge/tests/data testssl.sh-$TESTSSL_VER/testssl.sh testssl.sh-$TESTSSL_VER/etc/;
env:
TESTSSL_VER: 3.0.6
TESTSSL_URL: https://codeload.github.com/drwetter/testssl.sh/tar.gz/refs/tags/v
- name: Setup core dump catch and store
if: github.event.inputs.core-dump == '1'
run: |
sudo mkdir /cores/ && sudo chmod 777 /cores/
echo "/cores/%E.%p" | sudo tee /proc/sys/kernel/core_pattern
ulimit -c unlimited
sudo apt install glibc-tools
echo "DEBUG_PREFIX=catchsegv python -X dev -m" >> $GITHUB_ENV
- name: Test with pytest
run: |
python -c 'from deluge._libtorrent import lt; print(lt.__version__)';
$DEBUG_PREFIX pytest -v -m "not (todo or gtkui)" deluge
- uses: actions/upload-artifact@v4
# capture all crashes as build artifacts
if: failure()
with:
name: crashes
path: /cores
test-windows:
runs-on: windows-2022
strategy:
matrix:
python-version: ["3.7", "3.10"]
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: "requirements*.txt"
- name: Install dependencies
run: |
pip install --upgrade pip wheel setuptools
pip install -r requirements-ci.txt
pip install -e .
- name: Test with pytest
run: |
python -c 'import libtorrent as lt; print(lt.__version__)';
pytest -v -m "not (todo or gtkui or security)" deluge

View file

@ -1,38 +0,0 @@
name: Docs
# Controls when the action will run.
on:
# Triggers the workflow on push or pull request events but only for the main branch
push:
pull_request:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: "3.10"
cache: "pip"
cache-dependency-path: "requirements*.txt"
- name: Install dependencies
run: |
pip install --upgrade pip wheel
pip install tox
sudo apt-get install enchant-2
- name: Build docs with tox
env:
TOX_ENV: docs
run: |
tox -e $TOX_ENV

View file

@ -1,17 +0,0 @@
name: Linting
on:
push:
pull_request:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- name: Run pre-commit linting
uses: pre-commit/action@v3.0.1

7
.gitignore vendored
View file

@ -10,16 +10,15 @@ docs/source/modules/deluge*.rst
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
*.tar.* *.tar.*
_trial_temp
.tox/ .tox/
deluge/i18n/*/ deluge/i18n/*/
deluge.pot
deluge/ui/web/js/*.js deluge/ui/web/js/*.js
deluge/ui/web/js/extjs/ext-extensions*.js deluge/ui/web/js/extjs/ext-extensions*.js
*.desktop *.desktop
*.metainfo.xml *.appdata.xml
.build_data* .build_data*
osx/app osx/app
RELEASE-VERSION RELEASE-VERSION
.venv* .venv*
# used by setuptools to cache downloaded eggs
/.eggs
_pytest_temp/

View file

@ -1,30 +1,36 @@
default_language_version: default_language:
python: python3 python: python3
exclude: > exclude: >
(?x)^( (?x)^(
deluge/ui/web/docs/template/.*| deluge/ui/web/docs/template/.*|
deluge/tests/data/.*svg|
)$ )$
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/ambv/black
# Ruff version. rev: 19.3b0
rev: v0.6.4
hooks: hooks:
- id: ruff - id: black
name: Chk Ruff name: Fmt Black
args: [--fix] language_version: python3.6
- id: ruff-format - repo: https://github.com/prettier/prettier
name: Fmt Ruff rev: 1.17.0
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.7.1
hooks: hooks:
- id: prettier - id: prettier
name: Fmt Prettier name: Fmt Prettier
# Workaround to list modified files only. # Workaround to list modified files only.
args: [--list-different] args: [--list-different]
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://gitlab.com/pycqa/flake8
rev: v4.4.0 rev: 3.7.7
hooks: hooks:
- id: flake8
name: Chk Flake8
additional_dependencies:
- flake8-isort==2.7
- pep8-naming==0.8.2
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.2.1
hooks:
- id: double-quote-string-fixer
name: Fix Double-quotes
- id: end-of-file-fixer - id: end-of-file-fixer
name: Fix End-of-files name: Fix End-of-files
exclude_types: [javascript, css] exclude_types: [javascript, css]
@ -33,9 +39,3 @@ repos:
args: [--fix=auto] args: [--fix=auto]
- id: trailing-whitespace - id: trailing-whitespace
name: Fix Trailing whitespace name: Fix Trailing whitespace
- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
args: [--py37-plus]
stages: [manual]

View file

@ -289,7 +289,7 @@ callbacks=cb_,_cb
# List of qualified module names which can have objects that can redefine # List of qualified module names which can have objects that can redefine
# builtins. # builtins.
redefining-builtins-modules= redefining-builtins-modules=six.moves,future.builtins,future_builtins
[TYPECHECK] [TYPECHECK]
@ -359,6 +359,11 @@ known-standard-library=
# Force import order to recognize a module as part of a third party library. # Force import order to recognize a module as part of a third party library.
known-third-party=enchant known-third-party=enchant
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
[DESIGN] [DESIGN]

View file

@ -5,14 +5,6 @@
# Required # Required
version: 2 version: 2
build:
os: ubuntu-22.04
tools:
python: "3.10"
jobs:
post_checkout:
- git fetch --unshallow || true
# Build documentation in the docs/ directory with Sphinx # Build documentation in the docs/ directory with Sphinx
sphinx: sphinx:
configuration: docs/source/conf.py configuration: docs/source/conf.py
@ -22,8 +14,9 @@ formats: all
# Optionally set the version of Python and requirements required to build your docs # Optionally set the version of Python and requirements required to build your docs
python: python:
version: 3.7
install: install:
- requirements: requirements.txt - requirements: requirements.txt
- requirements: docs/requirements.txt - requirements: docs/requirements.txt
- method: pip - method: setuptools
path: . path: .

79
.travis.yml Normal file
View file

@ -0,0 +1,79 @@
dist: xenial
sudo: required
language: python
python:
# Travis Xenial Python to support system_site_packages
- 3.5
cache: pip
virtualenv:
system_site_packages: true
env:
global:
- DISPLAY=:99.0
git:
# Set greater depth to get version from tags.
depth: 1000
matrix:
include:
- name: Unit tests
env: TOX_ENV=py3
- name: Unit tests (libtorrent 1.2)
env: TOX_ENV=py3
addons:
apt:
sources: [sourceline: "ppa:libtorrent.org/1.2-daily"]
packages: [python3-libtorrent, python3-venv]
- if: commit_message =~ SECURITY_TEST
env: TOX_ENV=security
- name: Code linting
env: TOX_ENV=lint
- name: Docs build
env: TOX_ENV=docs
- name: GTK unit tests
env: TOX_ENV=gtkui
- name: Plugins unit tests
env: TOX_ENV=plugins
addons:
apt:
sources:
- sourceline: "ppa:libtorrent.org/rc-1.1-daily"
- deadsnakes
packages:
- python3-libtorrent
# Install py36 specifically for pre-commit to run black formatter.
- python3.6
# Intall python3-venv to provide ensurepip module for tox.
- python3-venv
# Install dependencies
install:
- pip install tox tox-venv
# GTKUI tests
- "if [ $TOX_ENV == 'gtkui' ]; then
sudo apt install python3-gi python3-gi-cairo gir1.2-gtk-3.0;
fi"
# Security tests
- "if [ $TOX_ENV == 'security' ]; then
testssl_url=https://github.com/drwetter/testssl.sh/archive/v2.9.5-5.tar.gz;
wget -O- $testssl_url | tar xz
&& mv -t deluge/tests/data testssl.sh-2.9.5-5/testssl.sh testssl.sh-2.9.5-5/etc/;
fi"
before_script:
- export PYTHONPATH=$PYTHONPATH:$PWD
# Verify libtorrent installed and version
- python -c "import libtorrent as lt; print(lt.__version__)"
# Start xvfb for the GTKUI tests
- "if [ $TOX_ENV == 'gtkui' ]; then
/sbin/start-stop-daemon --start --quiet --background \
--make-pidfile --pidfile /tmp/custom_xvfb_99.pid \
--exec /usr/bin/Xvfb -- :99 -ac -screen 0 1280x1024x16;
fi"
script:
- tox -e $TOX_ENV

17
AUTHORS
View file

@ -39,9 +39,14 @@ Images Authors:
* files: deluge/ui/data/pixmaps/*.svg, *.png * files: deluge/ui/data/pixmaps/*.svg, *.png
deluge/ui/web/icons/active.png, alert.png, all.png, checking.png, dht.png, deluge/ui/web/icons/active.png, alert.png, all.png, checking.png, dht.png,
downloading.png, inactive.png, queued.png, seeding.png, traffic.png downloading.png, inactive.png, queued.png, seeding.png, traffic.png
deluge/ui/web/images/deluge*.png exceptions: deluge/ui/data/pixmaps/deluge.svg and derivatives
copyright: Andrew Resch
license: GPLv3
* files: deluge/ui/data/pixmaps/deluge.svg and derivatives
deluge/ui/web/icons/apple-pre-*.png, deluge*.png deluge/ui/web/icons/apple-pre-*.png, deluge*.png
copyright: Calum Lind deluge/ui/web/images/deluge*.png
copyright: Andrew Wedderburn
license: GPLv3 license: GPLv3
* files: deluge/plugins/blocklist/blocklist/data/*.png * files: deluge/plugins/blocklist/blocklist/data/*.png
@ -50,9 +55,11 @@ Images Authors:
license: GPLv2 license: GPLv2
url: http://ftp.acc.umu.se/pub/GNOME/sources/gnome-icon-theme url: http://ftp.acc.umu.se/pub/GNOME/sources/gnome-icon-theme
* files: deluge/ui/data/pixmaps/magnet*.svg, *.png * files: deluge/ui/data/pixmaps/magnet.png
copyright: Matias Wilkman copyright: Woothemes
license: license: Freeware
icon pack: WP Woothemes Ultimate
url: http://www.woothemes.com/
* files: deluge/ui/data/pixmaps/flags/*.png * files: deluge/ui/data/pixmaps/flags/*.png
copyright: Mark James <mjames@gmail.com> copyright: Mark James <mjames@gmail.com>

View file

@ -1,255 +1,8 @@
# Changelog # Changelog
## 2.1.x (TBA)
### Breaking changes
- Removed Python 3.6 support (Python >= 3.7)
### Core
- Fix GHSL-2024-189 - insecure HTTP for new version check.
- Fix alert handler segfault.
- Add support for creating v2 torrents.
### GTK UI
- Fix changing torrent ownership.
- Fix upper limit of upload/download in Add Torrent dialog.
- Fix #3339 - Resizing window crashes with Piecesbar or Stats plugin.
- Fix #3350 - Unable to use quick search.
- Fix #3598 - Missing AppIndicator option in Preferences.
- Set Appindicator as default for tray icon on Linux.
- Add feature to switch between dark/light themes.
### Web UI
- Fix GHSL-2024-191 - potential flag endpoint path traversal.
- Fix GHSL-2024-188 - js script dir traversal vulnerability.
- Fix GHSL-2024-190 - insecure tracker icon endpoint.
- Fix unable to stop daemon in connection manager.
- Fix responsiveness to avoid "Connection lost".
- Add support for network interface name as well as IP address.
- Add ability to change UI theme.
### Console UI
- Fix 'rm' and 'move' commands hanging when done.
- Fix #3538 - Unable to add host in connection manager.
- Disable interactive-mode on Windows.
### UI library
- Fix tracker icon display by converting to png format.
- Fix splitting trackers by newline
- Add clickable URLs for torrent comment and tracker status.
### Label
- Fix torrent deletion not removed from config.
- Fix label display name in submenu.
### AutoAdd
- Fix #3515 - Torrent file decoding errors disabled watch folder.
## 2.1.1 (2022-07-10)
### Core
- Fix missing trackers added via magnet
- Fix handling magnets with tracker tiers
## 2.1.0 (2022-06-28)
### Breaking changes
- Python 2 support removed (Python >= 3.6)
- libtorrent minimum requirement increased (>= 1.2).
### Core
- Add support for SVG tracker icons.
- Fix tracker icon error handling.
- Fix cleaning-up tracker icon temp files.
- Fix Plugin manager to handle new metadata 2.1.
- Hide passwords in config logs.
- Fix cleaning-up temp files in add_torrent_url.
- Fix KeyError in sessionproxy after torrent delete.
- Remove libtorrent deprecated functions.
- Fix file_completed_alert handling.
- Add plugin keys to get_torrents_status.
- Add support for pygeoip dependency.
- Fix crash logging to Windows protected folder.
- Add is_interface and is_interface_name to validate network interfaces.
- Fix is_url and is_infohash error with None value.
- Fix load_libintl error.
- Add support for IPv6 in host lists.
- Add systemd user services.
- Fix refresh and expire the torrent status cache.
- Fix crash when logging errors initializing gettext.
### Web UI
- Fix ETA column sorting in correct order (#3413).
- Fix defining foreground and background colors.
- Accept charset in content-type for json messages.
- Fix 'Complete Seen' and 'Completed' sorting.
- Fix encoding HTML entities for torrent attributes to prevent XSS.
### Gtk UI
- Fix download location textbox width.
- Fix obscured port number in Connection Manager.
- Increase connection manager default height.
- Fix bug with setting move completed in Options tab.
- Fix adding daemon accounts.
- Add workaround for crash on Windows with ico or gif icons.
- Hide account password length in log.
- Added a torrent menu option for magnet copy.
- Fix unable to prefetch magnet in thinclient mode.
- Use GtkSpinner when testing open port.
- Update About Dialog year.
- Fix Edit Torrents dialogs close issues.
- Fix ETA being copied to neighboring empty cells.
- Disable GTK CSD by default on Windows.
### Console UI
- Fix curses.init_pair raise ValueError on Py3.10.
- Swap j and k key's behavior to fit vim mode.
- Fix torrent details status error.
- Fix incorrect test for when a host is online.
- Add the torrent label to info command.
### AutoAdd
- Fix handling torrent decode errors.
- Fix error dialog not being shown on error.
### Blocklist
- Add frequency unit to interval label.
### Notifications
- Fix UnicodeEncodeError upon non-ascii torrent name.
## 2.0.5 (2021-12-15)
### WebUI
- Fix js minifying error resulting in WebUI blank screen.
- Silence erronous missing translations warning.
## 2.0.4 (2021-12-12)
### Packaging
- Fix python optional setup.py requirements
### Gtk UI
- Add detection of torrent URL on GTK UI focus
- Fix piecesbar crashing when enabled
- Remove num_blocks_cache_hits in stats
- Fix unhandled error with empty clipboard
- Add torrentdetails tabs position menu (#3441)
- Hide pygame community banner in console
- Fix cmp function for None types (#3309)
- Fix loading config with double-quotes in string
- Fix Status tab download speed and uploaded
### Web UI
- Handle torrent add failures
- Add menu option to copy magnet URI
- Fix md5sums in torrent files breaking file listing (#3388)
- Add country flag alt/title for accessibility
### Console UI
- Fix allowing use of windows-curses on Windows
- Fix hostlist status lookup errors
- Fix AttributeError setting config values
- Fix setting 'Skip' priority
### Core
- Add workaround libtorrent 2.0 file_progress error
- Fix allow enabling any plugin Python version
- Export torrent get_magnet_uri method
- Fix loading magnet with resume_data and no metadata (#3478)
- Fix httpdownloader reencoding torrent file downloads (#3440)
- Fix lt listen_interfaces not comma-separated (#3337)
- Fix unable to remove magnet with delete_copies enabled (#3325)
- Fix Python 3.8 compatibility
- Fix loading config with double-quotes in string
- Fix pickle loading non-ascii state error (#3298)
- Fix creation of pidfile via command option
- Fix for peer.client UnicodeDecodeError
- Fix show_file unhandled dbus error
### Documentation
- Add How-to guides about services.
### Stats plugin
- Fix constant session status key warnings
- Fix cairo error
### Notifications plugin
- Fix email KeyError with status name
- Fix unhandled TypeErrors on Python 3
### Autoadd plugin
- Fix magnet missing applied labels
### Execute plugin
- Fix failing to run on Windows (#3439)
## 2.0.3 (2019-06-12)
### Gtk UI
- Fix errors running on Wayland (#3265).
- Fix Peers Tab tooltip and context menu errors (#3266).
### Web UI
- Fix TypeError in Peers Tab setting country flag.
- Fix reverse proxy header TypeError (#3260).
- Fix request.base 'idna' codec error (#3261).
- Fix unable to change password (#3262).
### Extractor plugin
- Fix potential error starting plugin.
### Documentation
- Fix macOS install typo.
- Fix Windows install instructions.
## 2.0.2 (2019-06-08)
### Packaging
- Add systemd deluged and deluge-web service files to package tarball (#2034)
### Core
- Fix Python 2 compatibility issue with SimpleNamespace.
## 2.0.1 (2019-06-07) ## 2.0.1 (2019-06-07)
### Packaging - Fix setup.py build error without git installed.
- Fix `setup.py` build error without git installed.
## 2.0.0 (2019-06-06) ## 2.0.0 (2019-06-06)
@ -267,37 +20,37 @@
there to allow acting upon them. there to allow acting upon them.
- Updated SSL/TLS Protocol parameters for better security. - Updated SSL/TLS Protocol parameters for better security.
- Make the distinction between adding to the session new unmanaged torrents - Make the distinction between adding to the session new unmanaged torrents
and torrents loaded from state. This will break backwards compatibility. and torrents loaded from state. This will break backwards compatability.
- Pass a copy of an event instead of passing the event arguments to the - Pass a copy of an event instead of passing the event arguments to the
event handlers. This will break backwards compatibility. event handlers. This will break backwards compatability.
- Allow changing ownership of torrents. - Allow changing ownership of torrents.
- File modifications on the auth file are now detected and when they happen, - File modifications on the auth file are now detected and when they happen,
the file is reloaded. Upon finding an old auth file with an old format, an the file is reloaded. Upon finding an old auth file with an old format, an
upgrade to the new format is made, file saved, and reloaded. upgrade to the new format is made, file saved, and reloaded.
- Authentication no longer requires a username/password. If one or both of - Authentication no longer requires a username/password. If one or both of
these is missing, an authentication error will be sent to the client these is missing, an authentication error will be sent to the client
which should then ask the username/password to the user. which sould then ask the username/password to the user.
- Implemented sequential downloads. - Implemented sequential downloads.
- Provide information about a torrent's pieces states - Provide information about a torrent's pieces states
- Add Option To Specify Outgoing Connection Interface. - Add Option To Specify Outgoing Connection Interface.
- Fix potential for host_id collision when creating hostlist entries. - Fix potential for host_id collision when creating hostlist entries.
### Gtk UI ### GtkUI
- Ported to GTK3 (3rd-party plugins will need updated). - Ported to GTK3 (3rd-party plugins will need updated).
- Allow changing ownership of torrents. - Allow changing ownership of torrents.
- Host entries in the Connection Manager UI are now editable. - Host entries in the Connection Manager UI are now editable.
- Implemented sequential downloads UI handling. - Implemented sequential downloads UI handling.
- Add optional pieces bar instead of a regular progress bar in torrent status tab. - Add optional pieces bar instead of a regular progress bar in torrent status tab.
- Make torrent opening compatible with all Unicode paths. - Make torrent opening compatible with all unicode paths.
- Fix magnet association button on Windows. - Fix magnet association button on Windows.
- Add keyboard shortcuts for changing queue position: - Add keyboard shortcuts for changing queue position:
- Up: `Ctrl+Alt+Up` - Up: Ctrl+Alt+Up
- Down: `Ctrl+Alt+Down` - Down: Ctrl+Alt+Down
- Top: `Ctrl+Alt+Shift+Up` - Top: Ctrl+Alt+Shift+Up
- Bottom: `Ctrl+Alt+Shift+Down` - Bottom: Ctrl+Alt+Shift+Down
### Web UI ### WebUI
- Server (deluge-web) now daemonizes by default, use '-d' or '--do-not-daemonize' to disable. - Server (deluge-web) now daemonizes by default, use '-d' or '--do-not-daemonize' to disable.
- Fixed the '--base' option to work for regular use, not just with reverse proxies. - Fixed the '--base' option to work for regular use, not just with reverse proxies.
@ -305,7 +58,7 @@
### Blocklist Plugin ### Blocklist Plugin
- Implemented whitelist support to both core and GTK UI. - Implemented whitelist support to both core and GTK UI.
- Implemented IP filter cleaning before each update. Restarting the deluge - Implemented ip filter cleaning before each update. Restarting the deluge
daemon is no longer needed. daemon is no longer needed.
- If "check_after_days" is 0(zero), the timer is not started anymore. It - If "check_after_days" is 0(zero), the timer is not started anymore. It
would keep updating one call after the other. If the value changed, the would keep updating one call after the other. If the value changed, the

View file

@ -7,13 +7,13 @@ All modules will require the [common](#common) section dependencies.
## Prerequisite ## Prerequisite
- [Python] _>= 3.6_ - [Python] _>= 3.5_
## Build ## Build
- [setuptools] - [setuptools]
- [intltool] - Optional: Desktop file translation for \*nix. - [intltool] - Optional: Desktop file translation for \*nix.
- [closure-compiler] - Minify javascript (alternative is [rjsmin]) - [closure-compiler] - Minify javascript (alternative is [slimit])
## Common ## Common
@ -23,26 +23,26 @@ All modules will require the [common](#common) section dependencies.
- [rencode] _>= 1.0.2_ - Encoding library. - [rencode] _>= 1.0.2_ - Encoding library.
- [PyXDG] - Access freedesktop.org standards for \*nix. - [PyXDG] - Access freedesktop.org standards for \*nix.
- [xdg-utils] - Provides xdg-open for \*nix. - [xdg-utils] - Provides xdg-open for \*nix.
- [six]
- [zope.interface] - [zope.interface]
- [chardet] - Optional: Encoding detection. - [chardet] - Optional: Encoding detection.
- [setproctitle] - Optional: Renaming processes. - [setproctitle] - Optional: Renaming processes.
- [Pillow] - Optional: Support for resizing tracker icons. - [Pillow] - Optional: Support for resizing tracker icons.
- [dbus-python] - Optional: Show item location in filemanager. - [dbus-python] - Optional: Show item location in filemanager.
- [ifaddr] - Optional: Verify network interfaces.
### Linux and BSD #### Linux and BSD
- [distro] - Optional: OS platform information. - [distro] - Optional: OS platform information.
### Windows OS #### Windows OS
- [pywin32] - [pywin32]
- [certifi] - [certifi]
## Core (deluged daemon) ## Core (deluged daemon)
- [libtorrent] _>= 1.2.0_ - [libtorrent] _>= 1.1.1_
- [GeoIP] or [pygeoip] - Optional: IP address country lookup. (_Debian: `python-geoip`_) - [GeoIP] - Optional: IP address location lookup. (_Debian: `python-geoip`_)
## GTK UI ## GTK UI
@ -50,9 +50,9 @@ All modules will require the [common](#common) section dependencies.
- [PyGObject] - [PyGObject]
- [Pycairo] - [Pycairo]
- [librsvg] _>= 2_ - [librsvg] _>= 2_
- [ayatanaappindicator3] w/GIR - Optional: Ubuntu system tray icon. - [libappindicator3] w/GIR - Optional: Ubuntu system tray icon.
### MacOS #### MacOS
- [GtkOSXApplication] - [GtkOSXApplication]
@ -71,7 +71,7 @@ All modules will require the [common](#common) section dependencies.
[setuptools]: https://setuptools.readthedocs.io/en/latest/ [setuptools]: https://setuptools.readthedocs.io/en/latest/
[intltool]: https://freedesktop.org/wiki/Software/intltool/ [intltool]: https://freedesktop.org/wiki/Software/intltool/
[closure-compiler]: https://developers.google.com/closure/compiler/ [closure-compiler]: https://developers.google.com/closure/compiler/
[rjsmin]: https://pypi.org/project/rjsmin/ [slimit]: https://slimit.readthedocs.io/en/latest/
[openssl]: https://www.openssl.org/ [openssl]: https://www.openssl.org/
[pyopenssl]: https://pyopenssl.org [pyopenssl]: https://pyopenssl.org
[twisted]: https://twistedmatrix.com [twisted]: https://twistedmatrix.com
@ -81,12 +81,14 @@ All modules will require the [common](#common) section dependencies.
[distro]: https://github.com/nir0s/distro [distro]: https://github.com/nir0s/distro
[pywin32]: https://github.com/mhammond/pywin32 [pywin32]: https://github.com/mhammond/pywin32
[certifi]: https://pypi.org/project/certifi/ [certifi]: https://pypi.org/project/certifi/
[py2-ipaddress]: https://pypi.org/project/py2-ipaddress/
[dbus-python]: https://pypi.org/project/dbus-python/ [dbus-python]: https://pypi.org/project/dbus-python/
[setproctitle]: https://pypi.org/project/setproctitle/ [setproctitle]: https://pypi.org/project/setproctitle/
[gtkosxapplication]: https://github.com/jralls/gtk-mac-integration [gtkosxapplication]: https://github.com/jralls/gtk-mac-integration
[chardet]: https://chardet.github.io/ [chardet]: https://chardet.github.io/
[rencode]: https://github.com/aresch/rencode [rencode]: https://github.com/aresch/rencode
[pyxdg]: https://www.freedesktop.org/wiki/Software/pyxdg/ [pyxdg]: https://www.freedesktop.org/wiki/Software/pyxdg/
[six]: https://pythonhosted.org/six/
[xdg-utils]: https://www.freedesktop.org/wiki/Software/xdg-utils/ [xdg-utils]: https://www.freedesktop.org/wiki/Software/xdg-utils/
[gtk+]: https://www.gtk.org/ [gtk+]: https://www.gtk.org/
[pycairo]: https://cairographics.org/pycairo/ [pycairo]: https://cairographics.org/pycairo/
@ -95,6 +97,5 @@ All modules will require the [common](#common) section dependencies.
[mako]: https://www.makotemplates.org/ [mako]: https://www.makotemplates.org/
[pygame]: https://www.pygame.org/ [pygame]: https://www.pygame.org/
[libnotify]: https://developer.gnome.org/libnotify/ [libnotify]: https://developer.gnome.org/libnotify/
[ayatanaappindicator3]: https://lazka.github.io/pgi-docs/AyatanaAppIndicator3-0.1/index.html [python-appindicator]: https://packages.ubuntu.com/xenial/python-appindicator
[librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg [librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg
[ifaddr]: https://pypi.org/project/ifaddr/

View file

@ -8,7 +8,6 @@ include version.py
include gen_web_gettext.py include gen_web_gettext.py
graft docs/man graft docs/man
graft packaging/systemd
include deluge/i18n/*.po include deluge/i18n/*.po
recursive-exclude deluge/i18n *.mo recursive-exclude deluge/i18n *.mo
@ -23,7 +22,7 @@ recursive-exclude deluge/tests *.pyc
graft deluge/ui/data graft deluge/ui/data
recursive-exclude deluge/ui/data *.desktop *.xml recursive-exclude deluge/ui/data *.desktop *.xml
graft deluge/ui/gtk3/glade graft deluge/ui/gtkui/glade
include deluge/ui/web/index.html include deluge/ui/web/index.html
include deluge/ui/web/css/*.css include deluge/ui/web/css/*.css

View file

@ -1,10 +1,10 @@
# Deluge BitTorrent Client # Deluge BitTorrent Client
[![build-status]][github-ci] [![docs-status]][rtd-deluge] [![build-status]][travis-deluge] [![docs-status]][rtd-deluge]
Deluge is a BitTorrent client that utilizes a daemon/client model. Deluge is a BitTorrent client that utilizes a daemon/client model.
It has various user interfaces available such as the GTK-UI, Web-UI and It has various user interfaces available such as the GTK-UI, Web-UI and
Console-UI. It uses [libtorrent][lt] at its core to handle the BitTorrent a Console-UI. It uses [libtorrent][lt] at it's core to handle the BitTorrent
protocol. protocol.
## Install ## Install
@ -13,17 +13,10 @@ From [PyPi](https://pypi.org/project/deluge):
pip install deluge pip install deluge
with all optional dependencies:
pip install deluge[all]
From source code: From source code:
pip install . python setup.py build
python setup.py install
with all optional dependencies:
pip install .[all]
See [DEPENDS](DEPENDS.md) and [Installing/Source] for dependency details. See [DEPENDS](DEPENDS.md) and [Installing/Source] for dependency details.
@ -58,14 +51,13 @@ See the [Thinclient guide] to connect to the daemon from another computer.
- [Homepage](https://deluge-torrent.org) - [Homepage](https://deluge-torrent.org)
- [User guide][user guide] - [User guide][user guide]
- [Forum](https://forum.deluge-torrent.org) - [Forum](https://forum.deluge-torrent.org)
- [IRC Libera.Chat #deluge](irc://irc.libera.chat/deluge) - [IRC Freenode #deluge](irc://irc.freenode.net/deluge)
- [Discord](https://discord.gg/nwaHSE6tqn)
[user guide]: https://dev.deluge-torrent.org/wiki/UserGuide [user guide]: https://dev.deluge-torrent.org/wiki/UserGuide
[thinclient guide]: https://dev.deluge-torrent.org/wiki/UserGuide/ThinClient [thinclient guide]: https://dev.deluge-torrent.org/wiki/UserGuide/ThinClient
[installing/source]: https://dev.deluge-torrent.org/wiki/Installing/Source [installing/source]: https://dev.deluge-torrent.org/wiki/Installing/Source
[build-status]: https://github.com/deluge-torrent/deluge/actions/workflows/ci.yml/badge.svg?branch=develop "CI" [build-status]: https://travis-ci.org/deluge-torrent/deluge.svg "Travis Status"
[github-ci]: https://github.com/deluge-torrent/deluge/actions/workflows/ci.yml [travis-deluge]: https://travis-ci.org/deluge-torrent/deluge
[docs-status]: https://readthedocs.org/projects/deluge/badge/?version=latest [docs-status]: https://readthedocs.org/projects/deluge/badge/?version=develop
[rtd-deluge]: https://deluge.readthedocs.io/en/latest/?badge=latest "Documentation Status" [rtd-deluge]: https://deluge.readthedocs.io/en/develop/?badge=develop "Documentation Status"
[lt]: https://libtorrent.org [lt]: https://libtorrent.org

View file

@ -1,6 +0,0 @@
from twisted.web.http import Request
__request__: Request
def _(string: str) -> str: ...
def _n(string: str) -> str: ...

53
appveyor.yml Normal file
View file

@ -0,0 +1,53 @@
environment:
PYTHON_VERSION: 3.6
PYTHON_ARCH: 64
PYTHON: "C:\\Python36-x64"
APPVEYOR_SAVE_CACHE_ON_ERROR: true
matrix:
- TOXENV: py36
pull_requests:
do_not_increment_build_number: true
install:
# If there is a newer build queued for same PR, cancel this one. Credit: JuliaLang devs
- ps:
if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
throw "There are newer queued builds for this pull request, failing early." }
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
- "python -VV"
- if defined TOXENV (
python -m pip install tox tox_venv
) else (
python -m pip install -rrequirements.txt pygame bbfreeze pefile
)
- "SET PATH=C:\\OpenSSL-v11-Win64\\bin;%PATH%"
- openssl version -v
- python -m pip install deluge-libtorrent
- 'python -c "import libtorrent; print(libtorrent.__version__)"'
cache:
- '%LOCALAPPDATA%\pip\cache'
build: false
test_script:
- if defined TOXENV tox
# Commented out as require GTK3 to create package.
# after_test:
# - if not defined TOXENV python setup.py build && python setup.py install
# - cd %APPVEYOR_BUILD_FOLDER%\\packaging\\win32
# - if not defined TOXENV deluge-bbfreeze.py debug
# - "SET PATH=C:\\Program Files (x86)\\NSIS;%PATH%"
# - if not defined TOXENV makensis deluge-win32-installer.nsi
# - if not defined TOXENV 7z a deluge-win32.zip build-win32 "-x!*.exe"
# artifacts:
# - path: packaging\win32\deluge-win32.zip
# - path: packaging\win32\build-win32\*.exe
#on_success:
#

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -14,23 +15,19 @@ Example:
>>> from deluge._libtorrent import lt >>> from deluge._libtorrent import lt
""" """
from __future__ import unicode_literals
from deluge.common import VersionSplit, get_version from deluge.common import VersionSplit, get_version
from deluge.error import LibtorrentImportError
try: try:
import deluge.libtorrent as lt import deluge.libtorrent as lt
except ImportError: except ImportError:
try: import libtorrent as lt
import libtorrent as lt
except ImportError as ex:
raise LibtorrentImportError('No libtorrent library found: %s' % (ex))
REQUIRED_VERSION = '1.1.2.0'
REQUIRED_VERSION = '1.2.0.0'
LT_VERSION = lt.__version__ LT_VERSION = lt.__version__
if VersionSplit(LT_VERSION) < VersionSplit(REQUIRED_VERSION): if VersionSplit(LT_VERSION) < VersionSplit(REQUIRED_VERSION):
raise LibtorrentImportError( raise ImportError(
f'Deluge {get_version()} requires libtorrent >= {REQUIRED_VERSION}' 'Deluge %s requires libtorrent >= %s' % (get_version(), REQUIRED_VERSION)
) )

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
# #
@ -6,6 +7,8 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
from __future__ import unicode_literals
import argparse import argparse
import logging import logging
import os import os
@ -92,7 +95,7 @@ def _get_version_detail():
except ImportError: except ImportError:
pass pass
version_str += 'Python: %s\n' % platform.python_version() version_str += 'Python: %s\n' % platform.python_version()
version_str += f'OS: {platform.system()} {common.get_os_version()}\n' version_str += 'OS: %s %s\n' % (platform.system(), common.get_os_version())
return version_str return version_str
@ -106,8 +109,8 @@ class DelugeTextHelpFormatter(argparse.RawDescriptionHelpFormatter):
line instead. This way list formatting is not mangled by textwrap.wrap. line instead. This way list formatting is not mangled by textwrap.wrap.
""" """
wrapped_lines = [] wrapped_lines = []
for line in text.splitlines(): for l in text.splitlines():
wrapped_lines.extend(textwrap.wrap(line, width, subsequent_indent=' ')) wrapped_lines.extend(textwrap.wrap(l, width, subsequent_indent=' '))
return wrapped_lines return wrapped_lines
def _format_action_invocation(self, action): def _format_action_invocation(self, action):
@ -119,7 +122,7 @@ class DelugeTextHelpFormatter(argparse.RawDescriptionHelpFormatter):
""" """
if not action.option_strings: if not action.option_strings:
(metavar,) = self._metavar_formatter(action, action.dest)(1) metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar return metavar
else: else:
parts = [] parts = []
@ -134,7 +137,7 @@ class DelugeTextHelpFormatter(argparse.RawDescriptionHelpFormatter):
default = action.dest.upper() default = action.dest.upper()
args_string = self._format_args(action, default) args_string = self._format_args(action, default)
opt = ', '.join(action.option_strings) opt = ', '.join(action.option_strings)
parts.append(f'{opt} {args_string}') parts.append('%s %s' % (opt, args_string))
return ', '.join(parts) return ', '.join(parts)
@ -162,7 +165,7 @@ class ArgParserBase(argparse.ArgumentParser):
self.log_stream = kwargs['log_stream'] self.log_stream = kwargs['log_stream']
del kwargs['log_stream'] del kwargs['log_stream']
super().__init__(*args, **kwargs) super(ArgParserBase, self).__init__(*args, **kwargs)
self.common_setup = False self.common_setup = False
self.process_arg_group = False self.process_arg_group = False
@ -199,7 +202,7 @@ class ArgParserBase(argparse.ArgumentParser):
self.group.add_argument( self.group.add_argument(
'-L', '-L',
'--loglevel', '--loglevel',
choices=[level for k in deluge.log.levels for level in (k, k.upper())], choices=[l for k in deluge.log.levels for l in (k, k.upper())],
help=_('Set the log level (none, error, warning, info, debug)'), help=_('Set the log level (none, error, warning, info, debug)'),
metavar='<level>', metavar='<level>',
) )
@ -243,7 +246,7 @@ class ArgParserBase(argparse.ArgumentParser):
argparse.Namespace: The parsed arguments. argparse.Namespace: The parsed arguments.
""" """
options = super().parse_args(args=args) options = super(ArgParserBase, self).parse_args(args=args)
return self._handle_ui_options(options) return self._handle_ui_options(options)
def parse_known_ui_args(self, args, withhold=None): def parse_known_ui_args(self, args, withhold=None):
@ -259,9 +262,9 @@ class ArgParserBase(argparse.ArgumentParser):
""" """
if withhold: if withhold:
args = [a for a in args if a not in withhold] args = [a for a in args if a not in withhold]
options, remaining = super().parse_known_args(args=args) options, remaining = super(ArgParserBase, self).parse_known_args(args=args)
options.remaining = remaining options.remaining = remaining
# Handle common and process group options # Hanlde common and process group options
return self._handle_ui_options(options) return self._handle_ui_options(options)
def _handle_ui_options(self, options): def _handle_ui_options(self, options):
@ -322,22 +325,22 @@ class ArgParserBase(argparse.ArgumentParser):
# Write pid file before chuid # Write pid file before chuid
if options.pidfile: if options.pidfile:
with open(options.pidfile, 'w') as _file: with open(options.pidfile, 'wb') as _file:
_file.write('%d\n' % os.getpid()) _file.write('%d\n' % os.getpid())
if not common.windows_check(): if not common.windows_check():
if options.group:
if not options.group.isdigit():
import grp
options.group = grp.getgrnam(options.group)[2]
os.setgid(options.group)
if options.user: if options.user:
if not options.user.isdigit(): if not options.user.isdigit():
import pwd import pwd
options.user = pwd.getpwnam(options.user)[2] options.user = pwd.getpwnam(options.user)[2]
os.setuid(options.user) os.setuid(options.user)
if options.group:
if not options.group.isdigit():
import grp
options.group = grp.getgrnam(options.group)[2]
os.setuid(options.group)
return options return options

View file

@ -9,7 +9,13 @@
# License. # License.
# Written by Petru Paler # Written by Petru Paler
# Updated by Calum Lind to support Python 3. # Updated by Calum Lind to support both Python 2 and Python 3.
from __future__ import unicode_literals
from sys import version_info
PY2 = version_info.major == 2
class BTFailure(Exception): class BTFailure(Exception):
@ -84,7 +90,8 @@ def bdecode(x):
return r return r
class Bencached: class Bencached(object):
__slots__ = ['bencoded'] __slots__ = ['bencoded']
def __init__(self, s): def __init__(self, s):
@ -139,6 +146,10 @@ encode_func[dict] = encode_dict
encode_func[bool] = encode_bool encode_func[bool] = encode_bool
encode_func[str] = encode_string encode_func[str] = encode_string
encode_func[bytes] = encode_bytes encode_func[bytes] = encode_bytes
if PY2:
encode_func[long] = encode_int # noqa: F821
encode_func[str] = encode_bytes
encode_func[unicode] = encode_string # noqa: F821
def bencode(x): def bencode(x):

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007,2008 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007,2008 Andrew Resch <andrewresch@gmail.com>
# #
@ -7,43 +8,44 @@
# #
"""Common functions for various parts of Deluge to use.""" """Common functions for various parts of Deluge to use."""
from __future__ import division, print_function, unicode_literals
import base64 import base64
import binascii import binascii
import functools import functools
import glob import glob
import locale
import logging import logging
import numbers import numbers
import os import os
import platform import platform
import re import re
import socket
import subprocess import subprocess
import sys import sys
import tarfile import tarfile
import time import time
from contextlib import closing from contextlib import closing
from datetime import datetime from datetime import datetime
from importlib import resources from io import BytesIO, open
from io import BytesIO
from pathlib import Path import pkg_resources
from urllib.parse import unquote_plus, urljoin
from urllib.request import pathname2url
from deluge.decorators import deprecated from deluge.decorators import deprecated
from deluge.error import InvalidPathError from deluge.error import InvalidPathError
try:
from importlib.metadata import distribution
except ImportError:
from pkg_resources import get_distribution as distribution
try: try:
import chardet import chardet
except ImportError: except ImportError:
chardet = None chardet = None
try:
from urllib.parse import unquote_plus, urljoin
from urllib.request import pathname2url
except ImportError:
# PY2 fallback
from urlparse import urljoin # pylint: disable=ungrouped-imports
from urllib import pathname2url, unquote_plus # pylint: disable=ungrouped-imports
# Windows workaround for HTTPS requests requiring certificate authority bundle. # Windows workaround for HTTPS requests requiring certificate authority bundle.
# see: https://twistedmatrix.com/trac/ticket/9209 # see: https://twistedmatrix.com/trac/ticket/9209
if platform.system() in ('Windows', 'Microsoft'): if platform.system() in ('Windows', 'Microsoft'):
@ -51,11 +53,6 @@ if platform.system() in ('Windows', 'Microsoft'):
os.environ['SSL_CERT_FILE'] = where() os.environ['SSL_CERT_FILE'] = where()
try:
import ifaddr
except ImportError:
ifaddr = None
if platform.system() not in ('Windows', 'Microsoft', 'Darwin'): if platform.system() not in ('Windows', 'Microsoft', 'Darwin'):
# gi makes dbus available on Window but don't import it as unused. # gi makes dbus available on Window but don't import it as unused.
@ -84,11 +81,7 @@ TORRENT_STATE = [
# The output formatting for json.dump # The output formatting for json.dump
JSON_FORMAT = {'indent': 4, 'sort_keys': True, 'ensure_ascii': False} JSON_FORMAT = {'indent': 4, 'sort_keys': True, 'ensure_ascii': False}
DBUS_FM_ID = 'org.freedesktop.FileManager1' PY2 = sys.version_info.major == 2
DBUS_FM_PATH = '/org/freedesktop/FileManager1'
# Retained for plugin backward compatibility
PY2 = False
def get_version(): def get_version():
@ -97,7 +90,7 @@ def get_version():
Returns: Returns:
str: The version of Deluge. str: The version of Deluge.
""" """
return distribution('Deluge').version return pkg_resources.get_distribution('Deluge').version
def get_default_config_dir(filename=None): def get_default_config_dir(filename=None):
@ -115,8 +108,10 @@ def get_default_config_dir(filename=None):
def save_config_path(resource): def save_config_path(resource):
app_data_path = os.environ.get('APPDATA') app_data_path = os.environ.get('APPDATA')
if not app_data_path: if not app_data_path:
import winreg try:
import winreg
except ImportError:
import _winreg as winreg # For Python 2.
hkey = winreg.OpenKey( hkey = winreg.OpenKey(
winreg.HKEY_CURRENT_USER, winreg.HKEY_CURRENT_USER,
'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders', 'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders',
@ -149,14 +144,14 @@ def get_default_download_dir():
try: try:
user_dirs_path = os.path.join(xdg_config_home, 'user-dirs.dirs') user_dirs_path = os.path.join(xdg_config_home, 'user-dirs.dirs')
with open(user_dirs_path, encoding='utf8') as _file: with open(user_dirs_path, 'r', encoding='utf8') as _file:
for line in _file: for line in _file:
if not line.startswith('#') and line.startswith('XDG_DOWNLOAD_DIR'): if not line.startswith('#') and line.startswith('XDG_DOWNLOAD_DIR'):
download_dir = os.path.expandvars( download_dir = os.path.expandvars(
line.partition('=')[2].rstrip().strip('"') line.partition('=')[2].rstrip().strip('"')
) )
break break
except OSError: except IOError:
pass pass
if not download_dir: if not download_dir:
@ -180,8 +175,8 @@ def archive_files(arc_name, filepaths, message=None, rotate=10):
from deluge.configmanager import get_config_dir from deluge.configmanager import get_config_dir
# Set archive compression to lzma # Set archive compression to lzma with bz2 fallback.
arc_comp = 'xz' arc_comp = 'xz' if not PY2 else 'bz2'
archive_dir = os.path.join(get_config_dir(), 'archive') archive_dir = os.path.join(get_config_dir(), 'archive')
timestamp = datetime.now().replace(microsecond=0).isoformat().replace(':', '-') timestamp = datetime.now().replace(microsecond=0).isoformat().replace(':', '-')
@ -277,7 +272,7 @@ def get_os_version():
os_version = list(platform.mac_ver()) os_version = list(platform.mac_ver())
os_version[1] = '' # versioninfo always empty. os_version[1] = '' # versioninfo always empty.
elif distro: elif distro:
os_version = (distro.name(), distro.version(), distro.codename()) os_version = distro.linux_distribution()
else: else:
os_version = (platform.release(),) os_version = (platform.release(),)
@ -297,22 +292,20 @@ def get_pixmap(fname):
return resource_filename('deluge', os.path.join('ui', 'data', 'pixmaps', fname)) return resource_filename('deluge', os.path.join('ui', 'data', 'pixmaps', fname))
def resource_filename(module: str, path: str) -> str: def resource_filename(module, path):
"""Get filesystem path for a non-python resource. """Get filesystem path for a resource.
Abstracts getting module resource files. Originally created to This function contains a work-around for pkg_resources.resource_filename
workaround pkg_resources.resource_filename limitations with not returning the correct path with multiple packages installed.
multiple Deluge packages installed.
So if there's a second deluge package, installed globally and another in
develop mode somewhere else, while pkg_resources.get_distribution('Deluge')
returns the proper deluge instance, pkg_resources.resource_filename
does not, it returns the first found on the python path, which is wrong.
""" """
path = Path(path) return pkg_resources.get_distribution('Deluge').get_resource_filename(
pkg_resources._manager, os.path.join(*(module.split('.') + [path]))
try: )
with resources.as_file(resources.files(module) / path) as resource_file:
return str(resource_file)
except AttributeError:
# Python <= 3.8
with resources.path(module, path.parts[0]) as resource_file:
return str(resource_file.joinpath(*path.parts[1:]))
def open_file(path, timestamp=None): def open_file(path, timestamp=None):
@ -362,30 +355,27 @@ def show_file(path, timestamp=None):
timestamp, timestamp,
timestamp, timestamp,
) )
if dbus: if dbus:
bus = dbus.SessionBus() bus = dbus.SessionBus()
try: filemanager1 = bus.get_object(
filemanager1 = bus.get_object(DBUS_FM_ID, DBUS_FM_PATH) 'org.freedesktop.FileManager1', '/org/freedesktop/FileManager1'
except dbus.exceptions.DBusException as ex: )
log.debug('Unable to get dbus file manager: %s', ex) paths = [urljoin('file:', pathname2url(path))]
# Fallback to xdg-open filemanager1.ShowItems(
else: paths, startup_id, dbus_interface='org.freedesktop.FileManager1'
paths = [urljoin('file:', pathname2url(path))] )
filemanager1.ShowItems(paths, startup_id, dbus_interface=DBUS_FM_ID) else:
return env = os.environ.copy()
env['DESKTOP_STARTUP_ID'] = startup_id.replace('dbus', 'xdg-open')
env = os.environ.copy() # No option in xdg to highlight a file so just open parent folder.
env['DESKTOP_STARTUP_ID'] = startup_id.replace('dbus', 'xdg-open') subprocess.Popen(['xdg-open', os.path.dirname(path.rstrip('/'))], env=env)
# No option in xdg to highlight a file so just open parent folder.
subprocess.Popen(['xdg-open', os.path.dirname(path.rstrip('/'))], env=env)
def open_url_in_browser(url): def open_url_in_browser(url):
""" """
Opens a URL in the desktop's default browser Opens a url in the desktop's default browser
:param url: the URL to open :param url: the url to open
:type url: string :type url: string
""" """
@ -424,49 +414,43 @@ def translate_size_units():
def fsize(fsize_b, precision=1, shortform=False): def fsize(fsize_b, precision=1, shortform=False):
"""Formats the bytes value into a string with KiB, MiB, GiB or TiB units. """Formats the bytes value into a string with KiB, MiB or GiB units.
Args: Args:
fsize_b (int): The filesize in bytes. fsize_b (int): The filesize in bytes.
precision (int): The output float precision, 1 by default. precision (int): The filesize float precision.
shortform (bool): The output short|long form, False (long form) by default.
Returns: Returns:
str: A formatted string in KiB, MiB, GiB or TiB units. str: A formatted string in KiB, MiB or GiB units.
Examples: Examples:
>>> fsize(112245) >>> fsize(112245)
'109.6 KiB' '109.6 KiB'
>>> fsize(112245, precision=0) >>> fsize(112245, precision=0)
'110 KiB' '110 KiB'
>>> fsize(112245, shortform=True)
'109.6 K'
Note: Note:
This function has been refactored for performance with the This function has been refactored for perfomance with the
fsize units being translated outside the function. fsize units being translated outside the function.
Notice that short forms K|M|G|T are synonymous here with
KiB|MiB|GiB|TiB. They are powers of 1024, not 1000.
""" """
if fsize_b >= 1024**4: if fsize_b >= 1024 ** 4:
return '%.*f %s' % ( return '%.*f %s' % (
precision, precision,
fsize_b / 1024**4, fsize_b / 1024 ** 4,
tib_txt_short if shortform else tib_txt, tib_txt_short if shortform else tib_txt,
) )
elif fsize_b >= 1024**3: elif fsize_b >= 1024 ** 3:
return '%.*f %s' % ( return '%.*f %s' % (
precision, precision,
fsize_b / 1024**3, fsize_b / 1024 ** 3,
gib_txt_short if shortform else gib_txt, gib_txt_short if shortform else gib_txt,
) )
elif fsize_b >= 1024**2: elif fsize_b >= 1024 ** 2:
return '%.*f %s' % ( return '%.*f %s' % (
precision, precision,
fsize_b / 1024**2, fsize_b / 1024 ** 2,
mib_txt_short if shortform else mib_txt, mib_txt_short if shortform else mib_txt,
) )
elif fsize_b >= 1024: elif fsize_b >= 1024:
@ -484,7 +468,7 @@ def fpcnt(dec, precision=2):
Args: Args:
dec (float): The ratio in the range [0.0, 1.0]. dec (float): The ratio in the range [0.0, 1.0].
precision (int): The output float precision, 2 by default. precision (int): The percentage float precision.
Returns: Returns:
str: A formatted string representing a percentage. str: A formatted string representing a percentage.
@ -508,8 +492,6 @@ def fspeed(bps, precision=1, shortform=False):
Args: Args:
bps (int): The speed in bytes per second. bps (int): The speed in bytes per second.
precision (int): The output float precision, 1 by default.
shortform (bool): The output short|long form, False (long form) by default.
Returns: Returns:
str: A formatted string representing transfer speed. str: A formatted string representing transfer speed.
@ -518,34 +500,30 @@ def fspeed(bps, precision=1, shortform=False):
>>> fspeed(43134) >>> fspeed(43134)
'42.1 KiB/s' '42.1 KiB/s'
Note:
Notice that short forms K|M|G|T are synonymous here with
KiB|MiB|GiB|TiB. They are powers of 1024, not 1000.
""" """
if bps < 1024**2: if bps < 1024 ** 2:
return '%.*f %s' % ( return '%.*f %s' % (
precision, precision,
bps / 1024, bps / 1024,
_('K/s') if shortform else _('KiB/s'), _('K/s') if shortform else _('KiB/s'),
) )
elif bps < 1024**3: elif bps < 1024 ** 3:
return '%.*f %s' % ( return '%.*f %s' % (
precision, precision,
bps / 1024**2, bps / 1024 ** 2,
_('M/s') if shortform else _('MiB/s'), _('M/s') if shortform else _('MiB/s'),
) )
elif bps < 1024**4: elif bps < 1024 ** 4:
return '%.*f %s' % ( return '%.*f %s' % (
precision, precision,
bps / 1024**3, bps / 1024 ** 3,
_('G/s') if shortform else _('GiB/s'), _('G/s') if shortform else _('GiB/s'),
) )
else: else:
return '%.*f %s' % ( return '%.*f %s' % (
precision, precision,
bps / 1024**4, bps / 1024 ** 4,
_('T/s') if shortform else _('TiB/s'), _('T/s') if shortform else _('TiB/s'),
) )
@ -558,7 +536,7 @@ def fpeer(num_peers, total_peers):
total_peers (int): The total number of peers. total_peers (int): The total number of peers.
Returns: Returns:
str: A formatted string 'num_peers (total_peers)' or if total_peers < 0, just 'num_peers'. str: A formatted string 'num_peers (total_peers)' or total_peers < 0, just 'num_peers'.
Examples: Examples:
>>> fpeer(10, 20) >>> fpeer(10, 20)
@ -568,9 +546,9 @@ def fpeer(num_peers, total_peers):
""" """
if total_peers > -1: if total_peers > -1:
return f'{num_peers:d} ({total_peers:d})' return '{:d} ({:d})'.format(num_peers, total_peers)
else: else:
return f'{num_peers:d}' return '{:d}'.format(num_peers)
def ftime(secs): def ftime(secs):
@ -587,7 +565,7 @@ def ftime(secs):
'6h 23m' '6h 23m'
Note: Note:
This function has been refactored for performance. This function has been refactored for perfomance.
""" """
@ -596,27 +574,27 @@ def ftime(secs):
if secs <= 0: if secs <= 0:
time_str = '' time_str = ''
elif secs < 60: elif secs < 60:
time_str = f'{secs}s' time_str = '{}s'.format(secs)
elif secs < 3600: elif secs < 3600:
time_str = f'{secs // 60}m {secs % 60}s' time_str = '{}m {}s'.format(secs // 60, secs % 60)
elif secs < 86400: elif secs < 86400:
time_str = f'{secs // 3600}h {secs // 60 % 60}m' time_str = '{}h {}m'.format(secs // 3600, secs // 60 % 60)
elif secs < 604800: elif secs < 604800:
time_str = f'{secs // 86400}d {secs // 3600 % 24}h' time_str = '{}d {}h'.format(secs // 86400, secs // 3600 % 24)
elif secs < 31449600: elif secs < 31449600:
time_str = f'{secs // 604800}w {secs // 86400 % 7}d' time_str = '{}w {}d'.format(secs // 604800, secs // 86400 % 7)
else: else:
time_str = f'{secs // 31449600}y {secs // 604800 % 52}w' time_str = '{}y {}w'.format(secs // 31449600, secs // 604800 % 52)
return time_str return time_str
def fdate(seconds, date_only=False, precision_secs=False): def fdate(seconds, date_only=False, precision_secs=False):
"""Formats a date time string in the locale's date representation based on the system's timezone. """Formats a date time string in the locale's date representation based on the systems timezone.
Args: Args:
seconds (float): Time in seconds since the Epoch. seconds (float): Time in seconds since the Epoch.
date_only (bool): Whether to include only the date, False by default. precision_secs (bool): Include seconds in time format.
precision_secs (bool): Include seconds in time format, False by default.
Returns: Returns:
str: A string in the locale's datetime representation or "" if seconds < 0 str: A string in the locale's datetime representation or "" if seconds < 0
@ -641,14 +619,10 @@ def tokenize(text):
Returns: Returns:
list: A list of strings and/or numbers. list: A list of strings and/or numbers.
Note: This function is used to implement robust tokenization of user input
This function is used to implement robust tokenization of user input It automatically coerces integer and floating point numbers, ignores
It automatically coerces integer and floating point numbers, ignores whitespace and knows how to separate numbers from strings even without
whitespace and knows how to separate numbers from strings even without whitespace.
whitespace.
Possible optimization: move the 2 regexes outside of function.
""" """
tokenized_input = [] tokenized_input = []
for token in re.split(r'(\d+(?:\.\d+)?)', text): for token in re.split(r'(\d+(?:\.\d+)?)', text):
@ -664,21 +638,17 @@ def tokenize(text):
size_units = [ size_units = [
{'prefix': 'b', 'divider': 1, 'singular': 'byte', 'plural': 'bytes'}, {'prefix': 'b', 'divider': 1, 'singular': 'byte', 'plural': 'bytes'},
{'prefix': 'KiB', 'divider': 1024**1}, {'prefix': 'KiB', 'divider': 1024 ** 1},
{'prefix': 'MiB', 'divider': 1024**2}, {'prefix': 'MiB', 'divider': 1024 ** 2},
{'prefix': 'GiB', 'divider': 1024**3}, {'prefix': 'GiB', 'divider': 1024 ** 3},
{'prefix': 'TiB', 'divider': 1024**4}, {'prefix': 'TiB', 'divider': 1024 ** 4},
{'prefix': 'PiB', 'divider': 1024**5}, {'prefix': 'PiB', 'divider': 1024 ** 5},
{'prefix': 'k', 'divider': 1000**1}, {'prefix': 'KB', 'divider': 1000 ** 1},
{'prefix': 'm', 'divider': 1000**2}, {'prefix': 'MB', 'divider': 1000 ** 2},
{'prefix': 'g', 'divider': 1000**3}, {'prefix': 'GB', 'divider': 1000 ** 3},
{'prefix': 't', 'divider': 1000**4}, {'prefix': 'TB', 'divider': 1000 ** 4},
{'prefix': 'p', 'divider': 1000**5}, {'prefix': 'PB', 'divider': 1000 ** 5},
{'prefix': 'KB', 'divider': 1000**1}, {'prefix': 'm', 'divider': 1000 ** 2},
{'prefix': 'MB', 'divider': 1000**2},
{'prefix': 'GB', 'divider': 1000**3},
{'prefix': 'TB', 'divider': 1000**4},
{'prefix': 'PB', 'divider': 1000**5},
] ]
@ -721,21 +691,11 @@ def parse_human_size(size):
raise InvalidSize(msg % (size, tokens)) raise InvalidSize(msg % (size, tokens))
def anchorify_urls(text: str) -> str:
"""
Wrap all occurrences of text URLs with HTML
"""
url_pattern = r'((htt)|(ft)|(ud))ps?://\S+'
html_href_pattern = r'<a href="\g<0>">\g<0></a>'
return re.sub(url_pattern, html_href_pattern, text)
def is_url(url): def is_url(url):
""" """
A simple test to check if the URL is valid A simple test to check if the URL is valid
:param url: the URL to test :param url: the url to test
:type url: string :type url: string
:returns: True or False :returns: True or False
:rtype: bool :rtype: bool
@ -746,9 +706,6 @@ def is_url(url):
True True
""" """
if not url:
return False
return url.partition('://')[0] in ('http', 'https', 'ftp', 'udp') return url.partition('://')[0] in ('http', 'https', 'ftp', 'udp')
@ -763,9 +720,6 @@ def is_infohash(infohash):
bool: True if valid infohash, False otherwise. bool: True if valid infohash, False otherwise.
""" """
if not infohash:
return False
return len(infohash) == 40 and infohash.isalnum() return len(infohash) == 40 and infohash.isalnum()
@ -773,15 +727,13 @@ MAGNET_SCHEME = 'magnet:?'
XT_BTIH_PARAM = 'xt=urn:btih:' XT_BTIH_PARAM = 'xt=urn:btih:'
DN_PARAM = 'dn=' DN_PARAM = 'dn='
TR_PARAM = 'tr=' TR_PARAM = 'tr='
TR_TIER_PARAM = 'tr.'
TR_TIER_REGEX = re.compile(r'^tr.(\d+)=(\S+)')
def is_magnet(uri): def is_magnet(uri):
""" """
A check to determine if a URI is a valid bittorrent magnet URI A check to determine if a uri is a valid bittorrent magnet uri
:param uri: the URI to check :param uri: the uri to check
:type uri: string :type uri: string
:returns: True or False :returns: True or False
:rtype: bool :rtype: bool
@ -817,6 +769,8 @@ def get_magnet_info(uri):
""" """
tr0_param = 'tr.'
tr0_param_regex = re.compile(r'^tr.(\d+)=(\S+)')
if not uri.startswith(MAGNET_SCHEME): if not uri.startswith(MAGNET_SCHEME):
return {} return {}
@ -844,14 +798,12 @@ def get_magnet_info(uri):
tracker = unquote_plus(param[len(TR_PARAM) :]) tracker = unquote_plus(param[len(TR_PARAM) :])
trackers[tracker] = tier trackers[tracker] = tier
tier += 1 tier += 1
elif param.startswith(TR_TIER_PARAM): elif param.startswith(tr0_param):
tracker_match = re.match(TR_TIER_REGEX, param) try:
if not tracker_match: tier, tracker = re.match(tr0_param_regex, param).groups()
continue trackers[tracker] = tier
except AttributeError:
tier, tracker = tracker_match.groups() pass
tracker = unquote_plus(tracker)
trackers[tracker] = int(tier)
if info_hash: if info_hash:
if not name: if not name:
@ -867,15 +819,15 @@ def get_magnet_info(uri):
def create_magnet_uri(infohash, name=None, trackers=None): def create_magnet_uri(infohash, name=None, trackers=None):
"""Creates a magnet URI """Creates a magnet uri
Args: Args:
infohash (str): The info-hash of the torrent. infohash (str): The info-hash of the torrent.
name (str, optional): The name of the torrent. name (str, optional): The name of the torrent.
trackers (list or dict, optional): A list of trackers or a dict or some {tracker: tier} pairs. trackers (list or dict, optional): A list of trackers or dict or {tracker: tier} pairs.
Returns: Returns:
str: A magnet URI string. str: A magnet uri string.
""" """
try: try:
@ -914,7 +866,7 @@ def get_path_size(path):
return os.path.getsize(path) return os.path.getsize(path)
dir_size = 0 dir_size = 0
for p, dummy_dirs, files in os.walk(path): for (p, dummy_dirs, files) in os.walk(path):
for _file in files: for _file in files:
filename = os.path.join(p, _file) filename = os.path.join(p, _file)
dir_size += os.path.getsize(filename) dir_size += os.path.getsize(filename)
@ -946,29 +898,6 @@ def free_space(path):
return disk_data.f_bavail * block_size return disk_data.f_bavail * block_size
def is_interface(interface):
"""Check if interface is a valid IP or network adapter.
Args:
interface (str): The IP or interface name to test.
Returns:
bool: Whether interface is valid is not.
Examples:
Windows:
>>> is_interface('{7A30AE62-23ZA-3744-Z844-A5B042524871}')
>>> is_interface('127.0.0.1')
True
Linux:
>>> is_interface('lo')
>>> is_interface('127.0.0.1')
True
"""
return is_ip(interface) or is_interface_name(interface)
def is_ip(ip): def is_ip(ip):
"""A test to see if 'ip' is a valid IPv4 or IPv6 address. """A test to see if 'ip' is a valid IPv4 or IPv6 address.
@ -1004,12 +933,15 @@ def is_ipv4(ip):
""" """
import socket
try: try:
socket.inet_pton(socket.AF_INET, ip) if windows_check():
except OSError: return socket.inet_aton(ip)
else:
return socket.inet_pton(socket.AF_INET, ip)
except socket.error:
return False return False
else:
return True
def is_ipv6(ip): def is_ipv6(ip):
@ -1028,51 +960,23 @@ def is_ipv6(ip):
""" """
try: try:
socket.inet_pton(socket.AF_INET6, ip) import ipaddress
except OSError: except ImportError:
return False import socket
try:
return socket.inet_pton(socket.AF_INET6, ip)
except (socket.error, AttributeError):
if windows_check():
log.warning('Unable to verify IPv6 Address on Windows.')
return True
else: else:
return True
def is_interface_name(name):
"""Returns True if an interface name exists.
Args:
name (str): The Interface to test. eg. eth0 linux. GUID on Windows.
Returns:
bool: Whether name is valid or not.
Examples:
>>> is_interface_name("eth0")
True
>>> is_interface_name("{7A30AE62-23ZA-3744-Z844-A5B042524871}")
True
"""
if not windows_check():
try: try:
socket.if_nametoindex(name) return ipaddress.IPv6Address(decode_bytes(ip))
except OSError: except ipaddress.AddressValueError:
pass pass
else:
return True
if ifaddr: return False
try:
adapters = ifaddr.get_adapters()
except OSError:
return True
else:
return any([name == a.name for a in adapters])
if windows_check():
regex = '^{[0-9A-Z]{8}-([0-9A-Z]{4}-){3}[0-9A-Z]{12}}$'
return bool(re.search(regex, str(name)))
return True
def decode_bytes(byte_str, encoding='utf8'): def decode_bytes(byte_str, encoding='utf8'):
@ -1103,9 +1007,9 @@ def decode_bytes(byte_str, encoding='utf8'):
if encoding.lower() not in ['utf8', 'utf-8']: if encoding.lower() not in ['utf8', 'utf-8']:
encodings.insert(0, lambda: (encoding, 'strict')) encodings.insert(0, lambda: (encoding, 'strict'))
for enc in encodings: for l in encodings:
try: try:
return byte_str.decode(*enc()) return byte_str.decode(*l())
except UnicodeDecodeError: except UnicodeDecodeError:
pass pass
return '' return ''
@ -1150,7 +1054,7 @@ def utf8_encode_structure(data):
@functools.total_ordering @functools.total_ordering
class VersionSplit: class VersionSplit(object):
""" """
Used for comparing version numbers. Used for comparing version numbers.
@ -1234,7 +1138,6 @@ AUTH_LEVEL_DEFAULT = AUTH_LEVEL_NORMAL
def create_auth_file(): def create_auth_file():
import stat import stat
import deluge.configmanager import deluge.configmanager
auth_file = deluge.configmanager.get_config_dir('auth') auth_file = deluge.configmanager.get_config_dir('auth')
@ -1250,7 +1153,6 @@ def create_auth_file():
def create_localclient_account(append=False): def create_localclient_account(append=False):
import random import random
from hashlib import sha1 as sha from hashlib import sha1 as sha
import deluge.configmanager import deluge.configmanager
auth_file = deluge.configmanager.get_config_dir('auth') auth_file = deluge.configmanager.get_config_dir('auth')
@ -1273,7 +1175,7 @@ def create_localclient_account(append=False):
def get_localhost_auth(): def get_localhost_auth():
"""Grabs the localclient auth line from the 'auth' file and creates a localhost URI. """Grabs the localclient auth line from the 'auth' file and creates a localhost uri.
Returns: Returns:
tuple: With the username and password to login as. tuple: With the username and password to login as.
@ -1329,10 +1231,15 @@ def set_env_variable(name, value):
http://sourceforge.net/p/gramps/code/HEAD/tree/branches/maintenance/gramps32/src/TransUtils.py http://sourceforge.net/p/gramps/code/HEAD/tree/branches/maintenance/gramps32/src/TransUtils.py
""" """
# Update Python's copy of the environment variables # Update Python's copy of the environment variables
os.environ[name] = value try:
os.environ[name] = value
except UnicodeEncodeError:
# Python 2
os.environ[name] = value.encode('utf8')
if windows_check(): if windows_check():
from ctypes import cdll, windll from ctypes import windll
from ctypes import cdll
# Update the copy maintained by Windows (so SysInternals Process Explorer sees it) # Update the copy maintained by Windows (so SysInternals Process Explorer sees it)
result = windll.kernel32.SetEnvironmentVariableW(name, value) result = windll.kernel32.SetEnvironmentVariableW(name, value)
@ -1348,13 +1255,56 @@ def set_env_variable(name, value):
) )
# Update the copy maintained by msvcrt (used by gtk+ runtime) # Update the copy maintained by msvcrt (used by gtk+ runtime)
result = cdll.msvcrt._wputenv(f'{name}={value}') result = cdll.msvcrt._wputenv('%s=%s' % (name, value))
if result != 0: if result != 0:
log.info("Failed to set Env Var '%s' (msvcrt._putenv)", name) log.info("Failed to set Env Var '%s' (msvcrt._putenv)", name)
else: else:
log.debug("Set Env Var '%s' to '%s' (msvcrt._putenv)", name, value) log.debug("Set Env Var '%s' to '%s' (msvcrt._putenv)", name, value)
def unicode_argv():
""" Gets sys.argv as list of unicode objects on any platform."""
if windows_check():
# Versions 2.x of Python don't support Unicode in sys.argv on
# Windows, with the underlying Windows API instead replacing multi-byte
# characters with '?'.
from ctypes import POINTER, byref, cdll, c_int, windll
from ctypes.wintypes import LPCWSTR, LPWSTR
get_cmd_linew = cdll.kernel32.GetCommandLineW
get_cmd_linew.argtypes = []
get_cmd_linew.restype = LPCWSTR
cmdline_to_argvw = windll.shell32.CommandLineToArgvW
cmdline_to_argvw.argtypes = [LPCWSTR, POINTER(c_int)]
cmdline_to_argvw.restype = POINTER(LPWSTR)
cmd = get_cmd_linew()
argc = c_int(0)
argv = cmdline_to_argvw(cmd, byref(argc))
if argc.value > 0:
# Remove Python executable and commands if present
start = argc.value - len(sys.argv)
return [argv[i] for i in range(start, argc.value)]
else:
# On other platforms, we have to find the likely encoding of the args and decode
# First check if sys.stdout or stdin have encoding set
encoding = getattr(sys.stdout, 'encoding') or getattr(sys.stdin, 'encoding')
# If that fails, check what the locale is set to
encoding = encoding or locale.getpreferredencoding()
# As a last resort, just default to utf-8
encoding = encoding or 'utf-8'
arg_list = []
for arg in sys.argv:
try:
arg_list.append(arg.decode(encoding))
except AttributeError:
arg_list.append(arg)
return arg_list
def run_profiled(func, *args, **kwargs): def run_profiled(func, *args, **kwargs):
""" """
Profile a function with cProfile Profile a function with cProfile

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007-2010 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007-2010 Andrew Resch <andrewresch@gmail.com>
# #
@ -6,10 +7,13 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
from __future__ import unicode_literals
import logging import logging
import traceback import traceback
from collections import defaultdict from collections import defaultdict
from six import string_types
from twisted.internet import reactor from twisted.internet import reactor
from twisted.internet.defer import DeferredList, fail, maybeDeferred, succeed from twisted.internet.defer import DeferredList, fail, maybeDeferred, succeed
from twisted.internet.task import LoopingCall, deferLater from twisted.internet.task import LoopingCall, deferLater
@ -23,13 +27,13 @@ class ComponentAlreadyRegistered(Exception):
class ComponentException(Exception): class ComponentException(Exception):
def __init__(self, message, tb): def __init__(self, message, tb):
super().__init__(message) super(ComponentException, self).__init__(message)
self.message = message self.message = message
self.tb = tb self.tb = tb
def __str__(self): def __str__(self):
s = super().__str__() s = super(ComponentException, self).__str__()
return '{}\n{}'.format(s, ''.join(self.tb)) return '%s\n%s' % (s, ''.join(self.tb))
def __eq__(self, other): def __eq__(self, other):
if isinstance(other, self.__class__): if isinstance(other, self.__class__):
@ -41,7 +45,7 @@ class ComponentException(Exception):
return not self.__eq__(other) return not self.__eq__(other)
class Component: class Component(object):
"""Component objects are singletons managed by the :class:`ComponentRegistry`. """Component objects are singletons managed by the :class:`ComponentRegistry`.
When a new Component object is instantiated, it will be automatically When a new Component object is instantiated, it will be automatically
@ -59,16 +63,11 @@ class Component:
Deluge core. Deluge core.
**update()** - This method is called every 1 second by default while the **update()** - This method is called every 1 second by default while the
Component is in a *Started* state. The interval can be Componented is in a *Started* state. The interval can be
specified during instantiation. The update() timer can be specified during instantiation. The update() timer can be
paused by instructing the :class:`ComponentRegistry` to pause paused by instructing the :class:`ComponentRegistry` to pause
this Component. this Component.
**pause()** - This method is called when the component is being paused.
**resume()** - This method is called when the component resumes from a Paused
state.
**shutdown()** - This method is called when the client is exiting. If the **shutdown()** - This method is called when the client is exiting. If the
Component is in a "Started" state when this is called, a Component is in a "Started" state when this is called, a
call to stop() will be issued prior to shutdown(). call to stop() will be issued prior to shutdown().
@ -85,10 +84,10 @@ class Component:
**Stopped** - The Component has either been stopped or has yet to be started. **Stopped** - The Component has either been stopped or has yet to be started.
**Stopping** - The Component has had its stop method called, but it hasn't **Stopping** - The Component has had it's stop method called, but it hasn't
fully stopped yet. fully stopped yet.
**Paused** - The Component has had its update timer stopped, but will **Paused** - The Component has had it's update timer stopped, but will
still be considered in a Started state. still be considered in a Started state.
""" """
@ -116,8 +115,9 @@ class Component:
_ComponentRegistry.deregister(self) _ComponentRegistry.deregister(self)
def _component_start_timer(self): def _component_start_timer(self):
self._component_timer = LoopingCall(self.update) if hasattr(self, 'update'):
self._component_timer.start(self._component_interval) self._component_timer = LoopingCall(self.update)
self._component_timer.start(self._component_interval)
def _component_start(self): def _component_start(self):
def on_start(result): def on_start(result):
@ -133,10 +133,13 @@ class Component:
return fail(result) return fail(result)
if self._component_state == 'Stopped': if self._component_state == 'Stopped':
self._component_state = 'Starting' if hasattr(self, 'start'):
d = deferLater(reactor, 0, self.start) self._component_state = 'Starting'
d.addCallbacks(on_start, on_start_fail) d = deferLater(reactor, 0, self.start)
self._component_starting_deferred = d d.addCallbacks(on_start, on_start_fail)
self._component_starting_deferred = d
else:
d = maybeDeferred(on_start, None)
elif self._component_state == 'Starting': elif self._component_state == 'Starting':
return self._component_starting_deferred return self._component_starting_deferred
elif self._component_state == 'Started': elif self._component_state == 'Started':
@ -166,11 +169,14 @@ class Component:
return result return result
if self._component_state != 'Stopped' and self._component_state != 'Stopping': if self._component_state != 'Stopped' and self._component_state != 'Stopping':
self._component_state = 'Stopping' if hasattr(self, 'stop'):
d = maybeDeferred(self.stop) self._component_state = 'Stopping'
d.addCallback(on_stop) d = maybeDeferred(self.stop)
d.addErrback(on_stop_fail) d.addCallback(on_stop)
self._component_stopping_deferred = d d.addErrback(on_stop_fail)
self._component_stopping_deferred = d
else:
d = maybeDeferred(on_stop, None)
if self._component_state == 'Stopping': if self._component_state == 'Stopping':
return self._component_stopping_deferred return self._component_stopping_deferred
@ -180,12 +186,13 @@ class Component:
def _component_pause(self): def _component_pause(self):
def on_pause(result): def on_pause(result):
self._component_state = 'Paused' self._component_state = 'Paused'
if self._component_timer and self._component_timer.running:
self._component_timer.stop()
if self._component_state == 'Started': if self._component_state == 'Started':
d = maybeDeferred(self.pause) if self._component_timer and self._component_timer.running:
d.addCallback(on_pause) d = maybeDeferred(self._component_timer.stop)
d.addCallback(on_pause)
else:
d = succeed(None)
elif self._component_state == 'Paused': elif self._component_state == 'Paused':
d = succeed(None) d = succeed(None)
else: else:
@ -202,10 +209,9 @@ class Component:
def _component_resume(self): def _component_resume(self):
def on_resume(result): def on_resume(result):
self._component_state = 'Started' self._component_state = 'Started'
self._component_start_timer()
if self._component_state == 'Paused': if self._component_state == 'Paused':
d = maybeDeferred(self.resume) d = maybeDeferred(self._component_start_timer)
d.addCallback(on_resume) d.addCallback(on_resume)
else: else:
d = fail( d = fail(
@ -220,7 +226,9 @@ class Component:
def _component_shutdown(self): def _component_shutdown(self):
def on_stop(result): def on_stop(result):
return maybeDeferred(self.shutdown) if hasattr(self, 'shutdown'):
return maybeDeferred(self.shutdown)
return succeed(None)
d = self._component_stop() d = self._component_stop()
d.addCallback(on_stop) d.addCallback(on_stop)
@ -241,14 +249,8 @@ class Component:
def shutdown(self): def shutdown(self):
pass pass
def pause(self):
pass
def resume(self): class ComponentRegistry(object):
pass
class ComponentRegistry:
"""The ComponentRegistry holds a list of currently registered :class:`Component` objects. """The ComponentRegistry holds a list of currently registered :class:`Component` objects.
It is used to manage the Components by starting, stopping, pausing and shutting them down. It is used to manage the Components by starting, stopping, pausing and shutting them down.
@ -291,8 +293,7 @@ class ComponentRegistry:
obj (Component): a component object to deregister obj (Component): a component object to deregister
Returns: Returns:
Deferred: a deferred object that will fire once the Component has been Deferred: a deferred object that will fire once the Component has been sucessfully deregistered
successfully deregistered
""" """
if obj in self.components.values(): if obj in self.components.values():
@ -323,7 +324,7 @@ class ComponentRegistry:
# Start all the components if names is empty # Start all the components if names is empty
if not names: if not names:
names = list(self.components) names = list(self.components)
elif isinstance(names, str): elif isinstance(names, string_types):
names = [names] names = [names]
def on_depends_started(result, name): def on_depends_started(result, name):
@ -357,7 +358,7 @@ class ComponentRegistry:
""" """
if not names: if not names:
names = list(self.components) names = list(self.components)
elif isinstance(names, str): elif isinstance(names, string_types):
names = [names] names = [names]
def on_dependents_stopped(result, name): def on_dependents_stopped(result, name):
@ -397,7 +398,7 @@ class ComponentRegistry:
""" """
if not names: if not names:
names = list(self.components) names = list(self.components)
elif isinstance(names, str): elif isinstance(names, string_types):
names = [names] names = [names]
deferreds = [] deferreds = []
@ -423,7 +424,7 @@ class ComponentRegistry:
""" """
if not names: if not names:
names = list(self.components) names = list(self.components)
elif isinstance(names, str): elif isinstance(names, string_types):
names = [names] names = [names]
deferreds = [] deferreds = []
@ -447,7 +448,7 @@ class ComponentRegistry:
def on_stopped(result): def on_stopped(result):
return DeferredList( return DeferredList(
[comp._component_shutdown() for comp in list(self.components.values())] [comp._component_shutdown() for comp in self.components.values()]
) )
return self.stop(list(self.components)).addCallback(on_stopped) return self.stop(list(self.components)).addCallback(on_stopped)

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
# #
@ -38,67 +39,78 @@ this can only be done for the 'config file version' and not for the 'format'
version as this will be done internally. version as this will be done internally.
""" """
from __future__ import unicode_literals
import json import json
import logging import logging
import os import os
import pickle
import shutil import shutil
from codecs import getwriter from codecs import getwriter
from io import open
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
import six.moves.cPickle as pickle # noqa: N813
from deluge.common import JSON_FORMAT, get_default_config_dir from deluge.common import JSON_FORMAT, get_default_config_dir
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
callLater = None # noqa: N816 Necessary for the config tests
def find_json_objects(text, decoder=json.JSONDecoder()): def prop(func):
"""Find json objects in text. """Function decorator for defining property attributes
The decorated function is expected to return a dictionary
containing one or more of the following pairs:
fget - function for getting attribute value
fset - function for setting attribute value
fdel - function for deleting attribute
This can be conveniently constructed by the locals() builtin
function; see:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/205183
"""
return property(doc=func.__doc__, **func())
def find_json_objects(s):
"""Find json objects in a string.
Args: Args:
text (str): The text to find json objects within. s (str): the string to find json objects in
Returns: Returns:
list: A list of tuples containing start and end locations of json list: A list of tuples containing start and end locations of json
objects in the text. e.g. [(start, end), ...] objects in string `s`. e.g. [(start, end), ...]
""" """
objects = [] objects = []
offset = 0 opens = 0
while True: start = s.find('{')
try: offset = start
start = text.index('{', offset)
except ValueError:
break
try: if start < 0:
__, index = decoder.raw_decode(text[start:]) return []
except json.decoder.JSONDecodeError:
offset = start + 1 quoted = False
else: for index, c in enumerate(s[offset:]):
offset = start + index if c == '"':
objects.append((start, offset)) quoted = not quoted
elif quoted:
continue
elif c == '{':
opens += 1
elif c == '}':
opens -= 1
if opens == 0:
objects.append((start, index + offset + 1))
start = index + offset + 1
return objects return objects
def cast_to_existing_type(value, old_value): class Config(object):
"""Attempt to convert new value type to match old value type"""
types_match = isinstance(old_value, (type(None), type(value)))
if value is not None and not types_match:
old_type = type(old_value)
# Skip convert to bytes since requires knowledge of encoding and value should
# be unicode anyway.
if old_type is bytes:
return value
return old_type(value)
return value
class Config:
"""This class is used to access/create/modify config files. """This class is used to access/create/modify config files.
Args: Args:
@ -108,23 +120,13 @@ class Config:
file_version (int): The file format for the default config values when creating file_version (int): The file format for the default config values when creating
a fresh config. This value should be increased whenever a new migration function is a fresh config. This value should be increased whenever a new migration function is
setup to convert old config files. (default: 1) setup to convert old config files. (default: 1)
log_mask_funcs (dict): A dict of key:function, used to mask sensitive
key values (e.g. passwords) when logging is enabled.
""" """
def __init__( def __init__(self, filename, defaults=None, config_dir=None, file_version=1):
self,
filename,
defaults=None,
config_dir=None,
file_version=1,
log_mask_funcs=None,
):
self.__config = {} self.__config = {}
self.__set_functions = {} self.__set_functions = {}
self.__change_callbacks = [] self.__change_callbacks = []
self.__log_mask_funcs = log_mask_funcs if log_mask_funcs else {}
# These hold the version numbers and they will be set when loaded # These hold the version numbers and they will be set when loaded
self.__version = {'format': 1, 'file': file_version} self.__version = {'format': 1, 'file': file_version}
@ -135,7 +137,7 @@ class Config:
if defaults: if defaults:
for key, value in defaults.items(): for key, value in defaults.items():
self.set_item(key, value, default=True) self.set_item(key, value)
# Load the config from file in the config_dir # Load the config from file in the config_dir
if config_dir: if config_dir:
@ -145,12 +147,6 @@ class Config:
self.load() self.load()
def callLater(self, period, func, *args, **kwargs): # noqa: N802 ignore camelCase
"""Wrapper around reactor.callLater for test purpose."""
from twisted.internet import reactor
return reactor.callLater(period, func, *args, **kwargs)
def __contains__(self, item): def __contains__(self, item):
return item in self.__config return item in self.__config
@ -159,7 +155,7 @@ class Config:
return self.set_item(key, value) return self.set_item(key, value)
def set_item(self, key, value, default=False): def set_item(self, key, value):
"""Sets item 'key' to 'value' in the config dictionary. """Sets item 'key' to 'value' in the config dictionary.
Does not allow changing the item's type unless it is None. Does not allow changing the item's type unless it is None.
@ -171,8 +167,6 @@ class Config:
key (str): Item to change to change. key (str): Item to change to change.
value (any): The value to change item to, must be same type as what is value (any): The value to change item to, must be same type as what is
currently in the config. currently in the config.
default (optional, bool): When setting a default value skip func or save
callbacks.
Raises: Raises:
ValueError: Raised when the type of value is not the same as what is ValueError: Raised when the type of value is not the same as what is
@ -185,54 +179,61 @@ class Config:
5 5
""" """
if isinstance(value, bytes): if key not in self.__config:
value = value.decode() self.__config[key] = value
log.debug('Setting key "%s" to: %s (of type: %s)', key, value, type(value))
return
if key in self.__config: if self.__config[key] == value:
return
# Change the value type if it is not None and does not match.
type_match = isinstance(self.__config[key], (type(None), type(value)))
if value is not None and not type_match:
try: try:
value = cast_to_existing_type(value, self.__config[key]) oldtype = type(self.__config[key])
# Don't convert to bytes as requires encoding and value will
# be decoded anyway.
if oldtype is not bytes:
value = oldtype(value)
except ValueError: except ValueError:
log.warning('Value Type "%s" invalid for key: %s', type(value), key) log.warning('Value Type "%s" invalid for key: %s', type(value), key)
raise raise
else:
if self.__config[key] == value:
return
if log.isEnabledFor(logging.DEBUG): if isinstance(value, bytes):
if key in self.__log_mask_funcs: value = value.decode('utf8')
value = self.__log_mask_funcs[key](value)
log.debug( log.debug('Setting key "%s" to: %s (of type: %s)', key, value, type(value))
'Setting key "%s" to: %s (of type: %s)',
key,
value,
type(value),
)
self.__config[key] = value self.__config[key] = value
# Skip save or func callbacks if setting default value for keys global callLater
if default: if callLater is None:
return # Must import here and not at the top or it will throw ReactorAlreadyInstalledError
from twisted.internet.reactor import (
callLater,
) # pylint: disable=redefined-outer-name
# Run the set_function for this key if any # Run the set_function for this key if any
for func in self.__set_functions.get(key, []): try:
self.callLater(0, func, key, value) for func in self.__set_functions[key]:
callLater(0, func, key, value)
except KeyError:
pass
try: try:
def do_change_callbacks(key, value): def do_change_callbacks(key, value):
for func in self.__change_callbacks: for func in self.__change_callbacks:
func(key, value) func(key, value)
self.callLater(0, do_change_callbacks, key, value) callLater(0, do_change_callbacks, key, value)
except Exception: except Exception:
pass pass
# We set the save_timer for 5 seconds if not already set # We set the save_timer for 5 seconds if not already set
if not self._save_timer or not self._save_timer.active(): if not self._save_timer or not self._save_timer.active():
self._save_timer = self.callLater(5, self.save) self._save_timer = callLater(5, self.save)
def __getitem__(self, key): def __getitem__(self, key):
"""See get_item""" """See get_item """
return self.get_item(key) return self.get_item(key)
def get_item(self, key): def get_item(self, key):
@ -305,9 +306,16 @@ class Config:
del self.__config[key] del self.__config[key]
global callLater
if callLater is None:
# Must import here and not at the top or it will throw ReactorAlreadyInstalledError
from twisted.internet.reactor import (
callLater,
) # pylint: disable=redefined-outer-name
# We set the save_timer for 5 seconds if not already set # We set the save_timer for 5 seconds if not already set
if not self._save_timer or not self._save_timer.active(): if not self._save_timer or not self._save_timer.active():
self._save_timer = self.callLater(5, self.save) self._save_timer = callLater(5, self.save)
def register_change_callback(self, callback): def register_change_callback(self, callback):
"""Registers a callback function for any changed value. """Registers a callback function for any changed value.
@ -353,6 +361,7 @@ class Config:
# Run the function now if apply_now is set # Run the function now if apply_now is set
if apply_now: if apply_now:
function(key, self.__config[key]) function(key, self.__config[key])
return
def apply_all(self): def apply_all(self):
"""Calls all set functions. """Calls all set functions.
@ -395,9 +404,9 @@ class Config:
filename = self.__config_file filename = self.__config_file
try: try:
with open(filename, encoding='utf8') as _file: with open(filename, 'r', encoding='utf8') as _file:
data = _file.read() data = _file.read()
except OSError as ex: except IOError as ex:
log.warning('Unable to open config file %s: %s', filename, ex) log.warning('Unable to open config file %s: %s', filename, ex)
return return
@ -427,24 +436,12 @@ class Config:
log.exception(ex) log.exception(ex)
log.warning('Unable to load config file: %s', filename) log.warning('Unable to load config file: %s', filename)
if not log.isEnabledFor(logging.DEBUG):
return
config = self.__config
if self.__log_mask_funcs:
config = {
key: self.__log_mask_funcs[key](config[key])
if key in self.__log_mask_funcs
else config[key]
for key in config
}
log.debug( log.debug(
'Config %s version: %s.%s loaded: %s', 'Config %s version: %s.%s loaded: %s',
filename, filename,
self.__version['format'], self.__version['format'],
self.__version['file'], self.__version['file'],
config, self.__config,
) )
def save(self, filename=None): def save(self, filename=None):
@ -462,7 +459,7 @@ class Config:
# Check to see if the current config differs from the one on disk # Check to see if the current config differs from the one on disk
# We will only write a new config file if there is a difference # We will only write a new config file if there is a difference
try: try:
with open(filename, encoding='utf8') as _file: with open(filename, 'r', encoding='utf8') as _file:
data = _file.read() data = _file.read()
objects = find_json_objects(data) objects = find_json_objects(data)
start, end = objects[0] start, end = objects[0]
@ -474,7 +471,7 @@ class Config:
if self._save_timer and self._save_timer.active(): if self._save_timer and self._save_timer.active():
self._save_timer.cancel() self._save_timer.cancel()
return True return True
except (OSError, IndexError) as ex: except (IOError, IndexError) as ex:
log.warning('Unable to open config file: %s because: %s', filename, ex) log.warning('Unable to open config file: %s because: %s', filename, ex)
# Save the new config and make sure it's written to disk # Save the new config and make sure it's written to disk
@ -488,7 +485,7 @@ class Config:
json.dump(self.__config, getwriter('utf8')(_file), **JSON_FORMAT) json.dump(self.__config, getwriter('utf8')(_file), **JSON_FORMAT)
_file.flush() _file.flush()
os.fsync(_file.fileno()) os.fsync(_file.fileno())
except OSError as ex: except IOError as ex:
log.error('Error writing new config file: %s', ex) log.error('Error writing new config file: %s', ex)
return False return False
@ -499,7 +496,7 @@ class Config:
try: try:
log.debug('Backing up old config file to %s.bak', filename) log.debug('Backing up old config file to %s.bak', filename)
shutil.move(filename, filename + '.bak') shutil.move(filename, filename + '.bak')
except OSError as ex: except IOError as ex:
log.warning('Unable to backup old config: %s', ex) log.warning('Unable to backup old config: %s', ex)
# The new config file has been written successfully, so let's move it over # The new config file has been written successfully, so let's move it over
@ -507,7 +504,7 @@ class Config:
try: try:
log.debug('Moving new config file %s to %s', filename_tmp, filename) log.debug('Moving new config file %s to %s', filename_tmp, filename)
shutil.move(filename_tmp, filename) shutil.move(filename_tmp, filename)
except OSError as ex: except IOError as ex:
log.error('Error moving new config file: %s', ex) log.error('Error moving new config file: %s', ex)
return False return False
else: else:
@ -559,11 +556,14 @@ class Config:
def config_file(self): def config_file(self):
return self.__config_file return self.__config_file
@property @prop
def config(self): def config(): # pylint: disable=no-method-argument
"""The config dictionary""" """The config dictionary"""
return self.__config
@config.deleter def fget(self):
def config(self): return self.__config
return self.save()
def fdel(self):
return self.save()
return locals()

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
# #
@ -6,6 +7,8 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
from __future__ import unicode_literals
import logging import logging
import os import os
@ -16,7 +19,7 @@ from deluge.config import Config
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class _ConfigManager: class _ConfigManager(object):
def __init__(self): def __init__(self):
log.debug('ConfigManager started..') log.debug('ConfigManager started..')
self.config_files = {} self.config_files = {}

View file

@ -1,215 +0,0 @@
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import asyncio
import tempfile
import warnings
from unittest.mock import Mock, patch
import pytest
import pytest_twisted
from twisted.internet import reactor
from twisted.internet.defer import Deferred, maybeDeferred
from twisted.internet.error import CannotListenError, ProcessTerminated
from twisted.python.failure import Failure
import deluge.component as _component
import deluge.configmanager
from deluge.common import get_localhost_auth
from deluge.tests import common
from deluge.ui.client import client as _client
DEFAULT_LISTEN_PORT = 58900
@pytest.fixture
def listen_port(request):
if request and 'daemon' in request.fixturenames:
try:
return request.getfixturevalue('daemon').listen_port
except Exception:
pass
return DEFAULT_LISTEN_PORT
@pytest.fixture
def mock_callback():
"""Returns a `Mock` object which can be registered as a callback to test against.
If callback was not called within `timeout` seconds, it will raise a TimeoutError.
The returned Mock instance will have a `deferred` attribute which will complete when the callback has been called.
"""
def reset(timeout=0.5, *args, **kwargs):
if mock.called:
original_reset_mock(*args, **kwargs)
if mock.deferred:
mock.deferred.cancel()
deferred = Deferred(canceller=lambda x: deferred.callback(None))
deferred.addTimeout(timeout, reactor)
mock.side_effect = lambda *args, **kw: deferred.callback((args, kw))
mock.deferred = deferred
mock = Mock()
mock.__qualname__ = 'mock'
original_reset_mock = mock.reset_mock
mock.reset_mock = reset
mock.reset_mock()
return mock
@pytest.fixture
def config_dir(tmp_path):
config_dir = tmp_path / 'config'
deluge.configmanager.set_config_dir(config_dir)
yield config_dir
@pytest_twisted.async_yield_fixture()
async def client(request, config_dir, monkeypatch, listen_port):
# monkeypatch.setattr(
# _client, 'connect', functools.partial(_client.connect, port=listen_port)
# )
try:
username, password = get_localhost_auth()
except Exception:
username, password = '', ''
await _client.connect(
'localhost',
port=listen_port,
username=username,
password=password,
)
yield _client
if _client.connected():
await _client.disconnect()
@pytest_twisted.async_yield_fixture
async def daemon(request, config_dir, tmp_path):
listen_port = DEFAULT_LISTEN_PORT
logfile = tmp_path / 'daemon.log'
if hasattr(request.cls, 'daemon_custom_script'):
custom_script = request.cls.daemon_custom_script
else:
custom_script = ''
for dummy in range(10):
try:
d, daemon = common.start_core(
listen_port=listen_port,
logfile=logfile,
timeout=5,
timeout_msg='Timeout!',
custom_script=custom_script,
print_stdout=True,
print_stderr=True,
config_directory=config_dir,
)
await d
except CannotListenError as ex:
exception_error = ex
listen_port += 1
except (KeyboardInterrupt, SystemExit):
raise
else:
break
else:
raise exception_error
daemon.listen_port = listen_port
yield daemon
try:
await daemon.kill()
except ProcessTerminated:
pass
@pytest.fixture(autouse=True)
def common_fixture(config_dir, request, monkeypatch, listen_port):
"""Adds some instance attributes to test classes for backwards compatibility with old testing."""
def fail(self, reason):
if isinstance(reason, Failure):
reason = reason.value
return pytest.fail(str(reason))
if request.instance:
request.instance.patch = monkeypatch.setattr
request.instance.config_dir = config_dir
request.instance.listen_port = listen_port
request.instance.id = lambda: request.node.name
request.cls.fail = fail
@pytest_twisted.async_yield_fixture(scope='function')
async def component():
"""Verify component registry is clean, and clean up after test."""
if len(_component._ComponentRegistry.components) != 0:
warnings.warn(
'The component._ComponentRegistry.components is not empty on test setup.\n'
'This is probably caused by another test that did not clean up after finishing!: %s'
% _component._ComponentRegistry.components
)
yield _component
await _component.shutdown()
_component._ComponentRegistry.components.clear()
_component._ComponentRegistry.dependents.clear()
@pytest_twisted.async_yield_fixture(scope='function')
async def base_fixture(common_fixture, component, request):
"""This fixture is autoused on all tests that subclass BaseTestCase"""
self = request.instance
if hasattr(self, 'set_up'):
try:
await maybeDeferred(self.set_up)
except Exception as exc:
warnings.warn('Error caught in test setup!\n%s' % exc)
pytest.fail('Error caught in test setup!\n%s' % exc)
yield
if hasattr(self, 'tear_down'):
try:
await maybeDeferred(self.tear_down)
except Exception as exc:
pytest.fail('Error caught in test teardown!\n%s' % exc)
@pytest.mark.usefixtures('base_fixture')
class BaseTestCase:
"""This is the base class that should be used for all test classes
that create classes that inherit from deluge.component.Component. It
ensures that the component registry has been cleaned up when tests
have finished.
"""
@pytest.fixture
def mock_mkstemp(tmp_path):
"""Return known tempfile location to verify file deleted"""
tmp_file = tempfile.mkstemp(dir=tmp_path)
with patch('tempfile.mkstemp', return_value=tmp_file):
yield tmp_file
def pytest_collection_modifyitems(session, config, items) -> None:
"""
Automatically runs async tests with pytest_twisted.ensureDeferred
"""
function_items = (item for item in items if isinstance(item, pytest.Function))
for function_item in function_items:
function = function_item.obj
if hasattr(function, '__func__'):
# methods need to be unwrapped.
function = function.__func__
if asyncio.iscoroutinefunction(function):
# This is how pytest_twisted marks ensureDeferred tests
setattr(function, '_pytest_twisted_mark', 'async_test')

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -14,16 +15,12 @@ This should typically only be used by the Core. Plugins should utilize the
`:mod:EventManager` for similar functionality. `:mod:EventManager` for similar functionality.
""" """
from __future__ import unicode_literals
import contextlib
import logging import logging
import threading import types
import time
from collections import defaultdict
from functools import partial
from typing import Any, Callable
from twisted.internet import reactor, task, threads from twisted.internet import reactor
import deluge.component as component import deluge.component as component
from deluge._libtorrent import lt from deluge._libtorrent import lt
@ -37,7 +34,7 @@ class AlertManager(component.Component):
def __init__(self): def __init__(self):
log.debug('AlertManager init...') log.debug('AlertManager init...')
component.Component.__init__(self, 'AlertManager') component.Component.__init__(self, 'AlertManager', interval=0.3)
self.session = component.get('Core').session self.session = component.get('Core').session
# Increase the alert queue size so that alerts don't get lost. # Increase the alert queue size so that alerts don't get lost.
@ -52,94 +49,53 @@ class AlertManager(component.Component):
| lt.alert.category_t.status_notification | lt.alert.category_t.status_notification
| lt.alert.category_t.ip_block_notification | lt.alert.category_t.ip_block_notification
| lt.alert.category_t.performance_warning | lt.alert.category_t.performance_warning
| lt.alert.category_t.file_progress_notification
) )
self.session.apply_settings({'alert_mask': alert_mask}) self.session.apply_settings({'alert_mask': alert_mask})
# handlers is a dictionary of lists {"alert_type": [handler1,h2,..]} # handlers is a dictionary of lists {"alert_type": [handler1,h2,..]}
self.handlers = defaultdict(list) self.handlers = {}
self.handlers_timeout_secs = 2
self.delayed_calls = [] self.delayed_calls = []
self._event = threading.Event()
def update(self): def update(self):
pass self.delayed_calls = [dc for dc in self.delayed_calls if dc.active()]
def start(self):
thread = threading.Thread(
target=self.wait_for_alert_in_thread, name='alert-poller', daemon=True
)
thread.start()
self._event.set()
def stop(self):
self.cancel_delayed_calls()
def pause(self):
self._event.clear()
def resume(self):
self._event.set()
def wait_for_alert_in_thread(self):
while self._component_state not in ('Stopping', 'Stopped'):
if self.check_delayed_calls():
time.sleep(0.05)
continue
if self.session.wait_for_alert(1000) is None:
continue
if self._event.wait():
threads.blockingCallFromThread(reactor, self.maybe_handle_alerts)
def on_delayed_call_timeout(self, result, timeout, **kwargs):
log.warning('Alert handler was timed-out before being called %s', kwargs)
def cancel_delayed_calls(self):
"""Cancel all delayed handlers."""
for delayed_call in self.delayed_calls:
delayed_call.cancel()
self.delayed_calls = []
def check_delayed_calls(self) -> bool:
"""Returns True if any handler calls are delayed."""
self.delayed_calls = [dc for dc in self.delayed_calls if not dc.called]
return len(self.delayed_calls) > 0
def maybe_handle_alerts(self) -> None:
if self._component_state != 'Started':
return
self.handle_alerts() self.handle_alerts()
def register_handler(self, alert_type: str, handler: Callable[[Any], None]) -> None: def stop(self):
for delayed_call in self.delayed_calls:
if delayed_call.active():
delayed_call.cancel()
self.delayed_calls = []
def register_handler(self, alert_type, handler):
""" """
Registers a function that will be called when 'alert_type' is pop'd Registers a function that will be called when 'alert_type' is pop'd
in handle_alerts. The handler function should look like: handler(alert) in handle_alerts. The handler function should look like: handler(alert)
Where 'alert' is the actual alert object from libtorrent. Where 'alert' is the actual alert object from libtorrent.
Args: :param alert_type: str, this is string representation of the alert name
alert_type: String representation of the libtorrent alert name. :param handler: func(alert), the function to be called when the alert is raised
Can be supplied with or without `_alert` suffix.
handler: Callback function when the alert is raised.
""" """
if alert_type and alert_type.endswith('_alert'): if alert_type not in self.handlers:
alert_type = alert_type[: -len('_alert')] # There is no entry for this alert type yet, so lets make it with an
# empty list.
self.handlers[alert_type] = []
# Append the handler to the list in the handlers dictionary
self.handlers[alert_type].append(handler) self.handlers[alert_type].append(handler)
log.debug('Registered handler for alert %s', alert_type) log.debug('Registered handler for alert %s', alert_type)
def deregister_handler(self, handler: Callable[[Any], None]): def deregister_handler(self, handler):
""" """
De-registers the `handler` function from all alert types. De-registers the `:param:handler` function from all alert types.
Args: :param handler: func, the handler function to deregister
handler: The handler function to deregister.
""" """
for alert_type_handlers in self.handlers.values(): # Iterate through all handlers and remove 'handler' where found
with contextlib.suppress(ValueError): for (dummy_key, value) in self.handlers.items():
alert_type_handlers.remove(handler) if handler in value:
# Handler is in this alert type list
value.remove(handler)
def handle_alerts(self): def handle_alerts(self):
""" """
@ -158,32 +114,26 @@ class AlertManager(component.Component):
num_alerts, num_alerts,
) )
# Loop through all alerts in the queue
for alert in alerts: for alert in alerts:
alert_type = alert.what() alert_type = type(alert).__name__
# Display the alert message # Display the alert message
if log.isEnabledFor(logging.DEBUG): if log.isEnabledFor(logging.DEBUG):
log.debug('%s: %s', alert_type, decode_bytes(alert.message())) log.debug('%s: %s', alert_type, decode_bytes(alert.message()))
if alert_type not in self.handlers:
continue
# Call any handlers for this alert type # Call any handlers for this alert type
for handler in self.handlers[alert_type]: if alert_type in self.handlers:
if log.isEnabledFor(logging.DEBUG): for handler in self.handlers[alert_type]:
log.debug('Handling alert: %s', alert_type) if log.isEnabledFor(logging.DEBUG):
d = task.deferLater(reactor, 0, handler, alert) log.debug('Handling alert: %s', alert_type)
on_handler_timeout = partial( # Copy alert attributes
self.on_delayed_call_timeout, alert_copy = types.SimpleNamespace(
handler=handler.__qualname__, **{
alert_type=alert_type, attr: getattr(alert, attr)
) for attr in dir(alert)
d.addTimeout( if not attr.startswith('__')
self.handlers_timeout_secs, }
reactor, )
onTimeoutCancel=on_handler_timeout, self.delayed_calls.append(reactor.callLater(0, handler, alert_copy))
)
self.delayed_calls.append(d)
def set_alert_queue_size(self, queue_size): def set_alert_queue_size(self, queue_size):
"""Sets the maximum size of the libtorrent alert queue""" """Sets the maximum size of the libtorrent alert queue"""

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me> # Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me>
@ -7,9 +8,12 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
from __future__ import unicode_literals
import logging import logging
import os import os
import shutil import shutil
from io import open
import deluge.component as component import deluge.component as component
import deluge.configmanager as configmanager import deluge.configmanager as configmanager
@ -28,14 +32,14 @@ log = logging.getLogger(__name__)
AUTH_LEVELS_MAPPING = { AUTH_LEVELS_MAPPING = {
'NONE': AUTH_LEVEL_NONE, 'NONE': AUTH_LEVEL_NONE,
'READONLY': AUTH_LEVEL_READONLY, 'READONLY': AUTH_LEVEL_READONLY,
'DEFAULT': AUTH_LEVEL_DEFAULT, 'DEFAULT': AUTH_LEVEL_NORMAL,
'NORMAL': AUTH_LEVEL_NORMAL, 'NORMAL': AUTH_LEVEL_DEFAULT,
'ADMIN': AUTH_LEVEL_ADMIN, 'ADMIN': AUTH_LEVEL_ADMIN,
} }
AUTH_LEVELS_MAPPING_REVERSE = {v: k for k, v in AUTH_LEVELS_MAPPING.items()} AUTH_LEVELS_MAPPING_REVERSE = {v: k for k, v in AUTH_LEVELS_MAPPING.items()}
class Account: class Account(object):
__slots__ = ('username', 'password', 'authlevel') __slots__ = ('username', 'password', 'authlevel')
def __init__(self, username, password, authlevel): def __init__(self, username, password, authlevel):
@ -52,10 +56,10 @@ class Account:
} }
def __repr__(self): def __repr__(self):
return '<Account username="{username}" authlevel={authlevel}>'.format( return '<Account username="%(username)s" authlevel=%(authlevel)s>' % {
username=self.username, 'username': self.username,
authlevel=self.authlevel, 'authlevel': self.authlevel,
) }
class AuthManager(component.Component): class AuthManager(component.Component):
@ -97,7 +101,7 @@ class AuthManager(component.Component):
int: The auth level for this user. int: The auth level for this user.
Raises: Raises:
AuthenticationRequired: If additional details are required to authenticate. AuthenticationRequired: If aditional details are required to authenticate.
BadLoginError: If the username does not exist or password does not match. BadLoginError: If the username does not exist or password does not match.
""" """
@ -180,7 +184,7 @@ class AuthManager(component.Component):
if os.path.isfile(filepath): if os.path.isfile(filepath):
log.debug('Creating backup of %s at: %s', filename, filepath_bak) log.debug('Creating backup of %s at: %s', filename, filepath_bak)
shutil.copy2(filepath, filepath_bak) shutil.copy2(filepath, filepath_bak)
except OSError as ex: except IOError as ex:
log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex) log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex)
else: else:
log.info('Saving the %s at: %s', filename, filepath) log.info('Saving the %s at: %s', filename, filepath)
@ -194,7 +198,7 @@ class AuthManager(component.Component):
_file.flush() _file.flush()
os.fsync(_file.fileno()) os.fsync(_file.fileno())
shutil.move(filepath_tmp, filepath) shutil.move(filepath_tmp, filepath)
except OSError as ex: except IOError as ex:
log.error('Unable to save %s: %s', filename, ex) log.error('Unable to save %s: %s', filename, ex)
if os.path.isfile(filepath_bak): if os.path.isfile(filepath_bak):
log.info('Restoring backup of %s from: %s', filename, filepath_bak) log.info('Restoring backup of %s from: %s', filename, filepath_bak)
@ -223,9 +227,9 @@ class AuthManager(component.Component):
for _filepath in (auth_file, auth_file_bak): for _filepath in (auth_file, auth_file_bak):
log.info('Opening %s for load: %s', filename, _filepath) log.info('Opening %s for load: %s', filename, _filepath)
try: try:
with open(_filepath, encoding='utf8') as _file: with open(_filepath, 'r', encoding='utf8') as _file:
file_data = _file.readlines() file_data = _file.readlines()
except OSError as ex: except IOError as ex:
log.warning('Unable to load %s: %s', _filepath, ex) log.warning('Unable to load %s: %s', _filepath, ex)
file_data = [] file_data = []
else: else:

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -7,6 +8,7 @@
# #
"""The Deluge daemon""" """The Deluge daemon"""
from __future__ import unicode_literals
import logging import logging
import os import os
@ -42,8 +44,8 @@ def is_daemon_running(pid_file):
try: try:
with open(pid_file) as _file: with open(pid_file) as _file:
pid, port = (int(x) for x in _file.readline().strip().split(';')) pid, port = [int(x) for x in _file.readline().strip().split(';')]
except (OSError, ValueError): except (EnvironmentError, ValueError):
return False return False
if is_process_running(pid): if is_process_running(pid):
@ -51,7 +53,7 @@ def is_daemon_running(pid_file):
_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) _socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try: try:
_socket.connect(('127.0.0.1', port)) _socket.connect(('127.0.0.1', port))
except OSError: except socket.error:
# Can't connect, so pid is not a deluged process. # Can't connect, so pid is not a deluged process.
return False return False
else: else:
@ -60,7 +62,7 @@ def is_daemon_running(pid_file):
return True return True
class Daemon: class Daemon(object):
"""The Deluge Daemon class""" """The Deluge Daemon class"""
def __init__( def __init__(
@ -154,7 +156,7 @@ class Daemon:
pid = os.getpid() pid = os.getpid()
log.debug('Storing pid %s & port %s in: %s', pid, self.port, self.pid_file) log.debug('Storing pid %s & port %s in: %s', pid, self.port, self.pid_file)
with open(self.pid_file, 'w') as _file: with open(self.pid_file, 'w') as _file:
_file.write(f'{pid};{self.port}\n') _file.write('%s;%s\n' % (pid, self.port))
component.start() component.start()
@ -198,7 +200,6 @@ class Daemon:
if rpc not in self.get_method_list(): if rpc not in self.get_method_list():
return False return False
return ( return self.rpcserver.get_session_auth_level() >= self.rpcserver.get_rpc_auth_level(
self.rpcserver.get_session_auth_level() rpc
>= self.rpcserver.get_rpc_auth_level(rpc)
) )

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2010 Pedro Algarvio <pedro@algarvio.me> # Copyright (C) 2010 Pedro Algarvio <pedro@algarvio.me>
@ -6,6 +7,8 @@
# the additional special exception to link portions of this program with the OpenSSL library. # the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details. # See LICENSE for more details.
# #
from __future__ import print_function, unicode_literals
import os import os
import sys import sys
from logging import DEBUG, FileHandler, getLogger from logging import DEBUG, FileHandler, getLogger

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -6,6 +7,8 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
from __future__ import unicode_literals
import logging import logging
import deluge.component as component import deluge.component as component

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com> # Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
# #
@ -6,8 +7,12 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
from __future__ import unicode_literals
import logging import logging
from six import string_types
import deluge.component as component import deluge.component as component
from deluge.common import TORRENT_STATE from deluge.common import TORRENT_STATE
@ -95,7 +100,9 @@ def tracker_error_filter(torrent_ids, values):
class FilterManager(component.Component): class FilterManager(component.Component):
"""FilterManager""" """FilterManager
"""
def __init__(self, core): def __init__(self, core):
component.Component.__init__(self, 'FilterManager') component.Component.__init__(self, 'FilterManager')
@ -131,7 +138,7 @@ class FilterManager(component.Component):
# Sanitize input: filter-value must be a list of strings # Sanitize input: filter-value must be a list of strings
for key, value in filter_dict.items(): for key, value in filter_dict.items():
if isinstance(value, str): if isinstance(value, string_types):
filter_dict[key] = [value] filter_dict[key] = [value]
# Optimized filter for id # Optimized filter for id

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
# #
@ -8,6 +9,7 @@
"""PluginManager for Core""" """PluginManager for Core"""
from __future__ import unicode_literals
import logging import logging

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2008-2010 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2008-2010 Andrew Resch <andrewresch@gmail.com>
# #
@ -7,13 +8,13 @@
# #
from __future__ import unicode_literals
import logging import logging
import os import os
import platform import platform
import random import random
import threading import threading
from urllib.parse import quote_plus
from urllib.request import urlopen
from twisted.internet.task import LoopingCall from twisted.internet.task import LoopingCall
@ -23,14 +24,17 @@ import deluge.configmanager
from deluge._libtorrent import lt from deluge._libtorrent import lt
from deluge.event import ConfigValueChangedEvent from deluge.event import ConfigValueChangedEvent
GeoIP = None
try: try:
from GeoIP import GeoIP import GeoIP
except ImportError: except ImportError:
try: GeoIP = None
from pygeoip import GeoIP
except ImportError: try:
pass from urllib.parse import quote_plus
from urllib.request import urlopen
except ImportError:
from urllib import quote_plus
from urllib2 import urlopen
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -198,12 +202,9 @@ class PreferencesManager(component.Component):
self.__set_listen_on() self.__set_listen_on()
def __set_listen_on(self): def __set_listen_on(self):
"""Set the ports and interface address to listen for incoming connections on.""" """ Set the ports and interface address to listen for incoming connections on."""
if self.config['random_port']: if self.config['random_port']:
if ( if not self.config['listen_random_port']:
not self.config['listen_reuse_port']
or not self.config['listen_random_port']
):
self.config['listen_random_port'] = random.randrange(49152, 65525) self.config['listen_random_port'] = random.randrange(49152, 65525)
listen_ports = [ listen_ports = [
self.config['listen_random_port'] self.config['listen_random_port']
@ -224,13 +225,13 @@ class PreferencesManager(component.Component):
self.config['listen_use_sys_port'], self.config['listen_use_sys_port'],
) )
interfaces = [ interfaces = [
f'{interface}:{port}' '%s:%s' % (interface, port)
for port in range(listen_ports[0], listen_ports[1] + 1) for port in range(listen_ports[0], listen_ports[1] + 1)
] ]
self.core.apply_session_settings( self.core.apply_session_settings(
{ {
'listen_system_port_fallback': self.config['listen_use_sys_port'], 'listen_system_port_fallback': self.config['listen_use_sys_port'],
'listen_interfaces': ','.join(interfaces), 'listen_interfaces': ''.join(interfaces),
} }
) )
@ -399,7 +400,7 @@ class PreferencesManager(component.Component):
+ quote_plus(':'.join(self.config['enabled_plugins'])) + quote_plus(':'.join(self.config['enabled_plugins']))
) )
urlopen(url) urlopen(url)
except OSError as ex: except IOError as ex:
log.debug('Network error while trying to send info: %s', ex) log.debug('Network error while trying to send info: %s', ex)
else: else:
self.config['info_sent'] = now self.config['info_sent'] = now
@ -463,9 +464,11 @@ class PreferencesManager(component.Component):
# Load the GeoIP DB for country look-ups if available # Load the GeoIP DB for country look-ups if available
if os.path.exists(geoipdb_path): if os.path.exists(geoipdb_path):
try: try:
self.core.geoip_instance = GeoIP(geoipdb_path, 0) self.core.geoip_instance = GeoIP.open(
except Exception as ex: geoipdb_path, GeoIP.GEOIP_STANDARD
log.warning('GeoIP Unavailable: %s', ex) )
except AttributeError:
log.warning('GeoIP Unavailable')
else: else:
log.warning('Unable to find GeoIP database file: %s', geoipdb_path) log.warning('Unable to find GeoIP database file: %s', geoipdb_path)

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2008,2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2008,2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -7,15 +8,17 @@
# #
"""RPCServer Module""" """RPCServer Module"""
from __future__ import unicode_literals
import logging import logging
import os import os
import stat
import sys import sys
import traceback import traceback
from collections import namedtuple from collections import namedtuple
from types import FunctionType from types import FunctionType
from typing import Callable, TypeVar, overload
from OpenSSL import crypto
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from twisted.internet.protocol import Factory, connectionDone from twisted.internet.protocol import Factory, connectionDone
@ -26,9 +29,8 @@ from deluge.core.authmanager import (
AUTH_LEVEL_DEFAULT, AUTH_LEVEL_DEFAULT,
AUTH_LEVEL_NONE, AUTH_LEVEL_NONE,
) )
from deluge.crypto_utils import check_ssl_keys, get_context_factory from deluge.crypto_utils import get_context_factory
from deluge.error import ( from deluge.error import (
BadLoginError,
DelugeError, DelugeError,
IncompatibleClient, IncompatibleClient,
NotAuthorizedError, NotAuthorizedError,
@ -44,16 +46,6 @@ RPC_EVENT = 3
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
TCallable = TypeVar('TCallable', bound=Callable)
@overload
def export(func: TCallable) -> TCallable: ...
@overload
def export(auth_level: int) -> Callable[[TCallable], TCallable]: ...
def export(auth_level=AUTH_LEVEL_DEFAULT): def export(auth_level=AUTH_LEVEL_DEFAULT):
""" """
@ -77,7 +69,7 @@ def export(auth_level=AUTH_LEVEL_DEFAULT):
if func.__doc__: if func.__doc__:
if func.__doc__.endswith(' '): if func.__doc__.endswith(' '):
indent = func.__doc__.split('\n')[-1] indent = func.__doc__.split('\n')[-1]
func.__doc__ += f'\n{indent}' func.__doc__ += '\n{}'.format(indent)
else: else:
func.__doc__ += '\n\n' func.__doc__ += '\n\n'
func.__doc__ += rpc_text func.__doc__ += rpc_text
@ -122,7 +114,7 @@ def format_request(call):
class DelugeRPCProtocol(DelugeTransferProtocol): class DelugeRPCProtocol(DelugeTransferProtocol):
def __init__(self): def __init__(self):
super().__init__() super(DelugeRPCProtocol, self).__init__()
# namedtuple subclass with auth_level, username for the connected session. # namedtuple subclass with auth_level, username for the connected session.
self.AuthLevel = namedtuple('SessionAuthlevel', 'auth_level, username') self.AuthLevel = namedtuple('SessionAuthlevel', 'auth_level, username')
@ -274,22 +266,14 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
raise IncompatibleClient(deluge.common.get_version()) raise IncompatibleClient(deluge.common.get_version())
ret = component.get('AuthManager').authorize(*args, **kwargs) ret = component.get('AuthManager').authorize(*args, **kwargs)
if ret: if ret:
self.factory.authorized_sessions[self.transport.sessionno] = ( self.factory.authorized_sessions[
self.AuthLevel(ret, args[0]) self.transport.sessionno
) ] = self.AuthLevel(ret, args[0])
self.factory.session_protocols[self.transport.sessionno] = self self.factory.session_protocols[self.transport.sessionno] = self
except Exception as ex: except Exception as ex:
send_error() send_error()
if not isinstance(ex, _ClientSideRecreateError): if not isinstance(ex, _ClientSideRecreateError):
log.exception(ex) log.exception(ex)
if isinstance(ex, BadLoginError):
peer = self.transport.getPeer()
log.error(
'Deluge client authentication error made from: %s:%s (%s)',
peer.host,
peer.port,
str(ex),
)
else: else:
self.sendData((RPC_RESPONSE, request_id, (ret))) self.sendData((RPC_RESPONSE, request_id, (ret)))
if not ret: if not ret:
@ -553,8 +537,8 @@ class RPCServer(component.Component):
:type event: :class:`deluge.event.DelugeEvent` :type event: :class:`deluge.event.DelugeEvent`
""" """
log.debug('intevents: %s', self.factory.interested_events) log.debug('intevents: %s', self.factory.interested_events)
# Use copy of `interested_events` since it can mutate while iterating. # Find sessions interested in this event
for session_id, interest in self.factory.interested_events.copy().items(): for session_id, interest in self.factory.interested_events.items():
if event.name in interest: if event.name in interest:
log.debug('Emit Event: %s %s', event.name, event.args) log.debug('Emit Event: %s %s', event.name, event.args)
# This session is interested so send a RPC_EVENT # This session is interested so send a RPC_EVENT
@ -604,3 +588,59 @@ class RPCServer(component.Component):
def stop(self): def stop(self):
self.factory.state = 'stopping' self.factory.state = 'stopping'
def check_ssl_keys():
"""
Check for SSL cert/key and create them if necessary
"""
ssl_dir = deluge.configmanager.get_config_dir('ssl')
if not os.path.exists(ssl_dir):
# The ssl folder doesn't exist so we need to create it
os.makedirs(ssl_dir)
generate_ssl_keys()
else:
for f in ('daemon.pkey', 'daemon.cert'):
if not os.path.exists(os.path.join(ssl_dir, f)):
generate_ssl_keys()
break
def generate_ssl_keys():
"""
This method generates a new SSL key/cert.
"""
from deluge.common import PY2
digest = 'sha256' if not PY2 else b'sha256'
# Generate key pair
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 2048)
# Generate cert request
req = crypto.X509Req()
subj = req.get_subject()
setattr(subj, 'CN', 'Deluge Daemon')
req.set_pubkey(pkey)
req.sign(pkey, digest)
# Generate certificate
cert = crypto.X509()
cert.set_serial_number(0)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365 * 3) # Three Years
cert.set_issuer(req.get_subject())
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.sign(pkey, digest)
# Write out files
ssl_dir = deluge.configmanager.get_config_dir('ssl')
with open(os.path.join(ssl_dir, 'daemon.pkey'), 'wb') as _file:
_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
with open(os.path.join(ssl_dir, 'daemon.cert'), 'wb') as _file:
_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
# Make the files only readable by this user
for f in ('daemon.pkey', 'daemon.cert'):
os.chmod(os.path.join(ssl_dir, f), stat.S_IREAD | stat.S_IWRITE)

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -13,12 +14,11 @@ Attributes:
""" """
from __future__ import division, unicode_literals
import logging import logging
import os import os
import socket import socket
import time
from typing import Optional
from urllib.parse import urlparse
from twisted.internet.defer import Deferred, DeferredList from twisted.internet.defer import Deferred, DeferredList
@ -34,6 +34,18 @@ from deluge.event import (
TorrentTrackerStatusEvent, TorrentTrackerStatusEvent,
) )
try:
from urllib.parse import urlparse
except ImportError:
# PY2 fallback
from urlparse import urlparse # pylint: disable=ungrouped-imports
try:
from future_builtins import zip
except ImportError:
# Ignore on Py3.
pass
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
LT_TORRENT_STATE_MAP = { LT_TORRENT_STATE_MAP = {
@ -82,7 +94,7 @@ def convert_lt_files(files):
"""Indexes and decodes files from libtorrent get_files(). """Indexes and decodes files from libtorrent get_files().
Args: Args:
files (file_storage): The libtorrent torrent files. files (list): The libtorrent torrent files.
Returns: Returns:
list of dict: The files. list of dict: The files.
@ -97,18 +109,18 @@ def convert_lt_files(files):
} }
""" """
filelist = [] filelist = []
for index in range(files.num_files()): for index, _file in enumerate(files):
try: try:
file_path = files.file_path(index).decode('utf8') file_path = _file.path.decode('utf8')
except AttributeError: except AttributeError:
file_path = files.file_path(index) file_path = _file.path
filelist.append( filelist.append(
{ {
'index': index, 'index': index,
'path': file_path.replace('\\', '/'), 'path': file_path.replace('\\', '/'),
'size': files.file_size(index), 'size': _file.size,
'offset': files.file_offset(index), 'offset': _file.offset,
} }
) )
@ -149,7 +161,7 @@ class TorrentOptions(dict):
""" """
def __init__(self): def __init__(self):
super().__init__() super(TorrentOptions, self).__init__()
config = ConfigManager('core.conf').config config = ConfigManager('core.conf').config
options_conf_map = { options_conf_map = {
'add_paused': 'add_paused', 'add_paused': 'add_paused',
@ -179,14 +191,14 @@ class TorrentOptions(dict):
self['seed_mode'] = False self['seed_mode'] = False
class TorrentError: class TorrentError(object):
def __init__(self, error_message, was_paused=False, restart_to_resume=False): def __init__(self, error_message, was_paused=False, restart_to_resume=False):
self.error_message = error_message self.error_message = error_message
self.was_paused = was_paused self.was_paused = was_paused
self.restart_to_resume = restart_to_resume self.restart_to_resume = restart_to_resume
class Torrent: class Torrent(object):
"""Torrent holds information about torrents added to the libtorrent session. """Torrent holds information about torrents added to the libtorrent session.
Args: Args:
@ -194,12 +206,12 @@ class Torrent:
options (dict): The torrent options. options (dict): The torrent options.
state (TorrentState): The torrent state. state (TorrentState): The torrent state.
filename (str): The filename of the torrent file. filename (str): The filename of the torrent file.
magnet (str): The magnet URI. magnet (str): The magnet uri.
Attributes: Attributes:
torrent_id (str): The torrent_id for this torrent torrent_id (str): The torrent_id for this torrent
handle: Holds the libtorrent torrent handle handle: Holds the libtorrent torrent handle
magnet (str): The magnet URI used to add this torrent (if available). magnet (str): The magnet uri used to add this torrent (if available).
status: Holds status info so that we don"t need to keep getting it from libtorrent. status: Holds status info so that we don"t need to keep getting it from libtorrent.
torrent_info: store the torrent info. torrent_info: store the torrent info.
has_metadata (bool): True if the metadata for the torrent is available, False otherwise. has_metadata (bool): True if the metadata for the torrent is available, False otherwise.
@ -236,10 +248,9 @@ class Torrent:
self.handle = handle self.handle = handle
self.magnet = magnet self.magnet = magnet
self._status: Optional['lt.torrent_status'] = None self.status = self.handle.status()
self._status_last_update: float = 0.0
self.torrent_info = self.handle.torrent_file() self.torrent_info = self.handle.get_torrent_info()
self.has_metadata = self.status.has_metadata self.has_metadata = self.status.has_metadata
self.options = TorrentOptions() self.options = TorrentOptions()
@ -255,9 +266,6 @@ class Torrent:
self.is_finished = False self.is_finished = False
self.filename = filename self.filename = filename
if not self.filename:
self.filename = ''
self.forced_error = None self.forced_error = None
self.statusmsg = None self.statusmsg = None
self.state = None self.state = None
@ -270,6 +278,7 @@ class Torrent:
self.prev_status = {} self.prev_status = {}
self.waiting_on_folder_rename = [] self.waiting_on_folder_rename = []
self.update_status(self.handle.status())
self._create_status_funcs() self._create_status_funcs()
self.set_options(self.options) self.set_options(self.options)
self.update_state() self.update_state()
@ -277,18 +286,6 @@ class Torrent:
if log.isEnabledFor(logging.DEBUG): if log.isEnabledFor(logging.DEBUG):
log.debug('Torrent object created.') log.debug('Torrent object created.')
def _set_handle_flags(self, flag: lt.torrent_flags, set_flag: bool):
"""set or unset a flag to the lt handle
Args:
flag (lt.torrent_flags): the flag to set/unset
set_flag (bool): True for setting the flag, False for unsetting it
"""
if set_flag:
self.handle.set_flags(flag)
else:
self.handle.unset_flags(flag)
def on_metadata_received(self): def on_metadata_received(self):
"""Process the metadata received alert for this torrent""" """Process the metadata received alert for this torrent"""
self.has_metadata = True self.has_metadata = True
@ -373,7 +370,7 @@ class Torrent:
"""Sets maximum download speed for this torrent. """Sets maximum download speed for this torrent.
Args: Args:
m_down_speed (float): Maximum download speed in KiB/s. m_up_speed (float): Maximum download speed in KiB/s.
""" """
self.options['max_download_speed'] = m_down_speed self.options['max_download_speed'] = m_down_speed
if m_down_speed < 0: if m_down_speed < 0:
@ -405,7 +402,7 @@ class Torrent:
return return
# A list of priorities for each piece in the torrent # A list of priorities for each piece in the torrent
priorities = self.handle.get_piece_priorities() priorities = self.handle.piece_priorities()
def get_file_piece(idx, byte_offset): def get_file_piece(idx, byte_offset):
return self.torrent_info.map_file(idx, byte_offset, 0).piece return self.torrent_info.map_file(idx, byte_offset, 0).piece
@ -431,17 +428,14 @@ class Torrent:
# Setting the priorites for all the pieces of this torrent # Setting the priorites for all the pieces of this torrent
self.handle.prioritize_pieces(priorities) self.handle.prioritize_pieces(priorities)
def set_sequential_download(self, sequential): def set_sequential_download(self, set_sequencial):
"""Sets whether to download the pieces of the torrent in order. """Sets whether to download the pieces of the torrent in order.
Args: Args:
sequential (bool): Enable sequential downloading. set_sequencial (bool): Enable sequencial downloading.
""" """
self.options['sequential_download'] = sequential self.options['sequential_download'] = set_sequencial
self._set_handle_flags( self.handle.set_sequential_download(set_sequencial)
flag=lt.torrent_flags.sequential_download,
set_flag=sequential,
)
def set_auto_managed(self, auto_managed): def set_auto_managed(self, auto_managed):
"""Set auto managed mode, i.e. will be started or queued automatically. """Set auto managed mode, i.e. will be started or queued automatically.
@ -451,10 +445,7 @@ class Torrent:
""" """
self.options['auto_managed'] = auto_managed self.options['auto_managed'] = auto_managed
if not (self.status.paused and not self.status.auto_managed): if not (self.status.paused and not self.status.auto_managed):
self._set_handle_flags( self.handle.auto_managed(auto_managed)
flag=lt.torrent_flags.auto_managed,
set_flag=auto_managed,
)
self.update_state() self.update_state()
def set_super_seeding(self, super_seeding): def set_super_seeding(self, super_seeding):
@ -464,10 +455,7 @@ class Torrent:
super_seeding (bool): Enable super seeding. super_seeding (bool): Enable super seeding.
""" """
self.options['super_seeding'] = super_seeding self.options['super_seeding'] = super_seeding
self._set_handle_flags( self.handle.super_seeding(super_seeding)
flag=lt.torrent_flags.super_seeding,
set_flag=super_seeding,
)
def set_stop_ratio(self, stop_ratio): def set_stop_ratio(self, stop_ratio):
"""The seeding ratio to stop (or remove) the torrent at. """The seeding ratio to stop (or remove) the torrent at.
@ -528,7 +516,7 @@ class Torrent:
self.handle.prioritize_files(file_priorities) self.handle.prioritize_files(file_priorities)
else: else:
log.debug('Unable to set new file priorities.') log.debug('Unable to set new file priorities.')
file_priorities = self.handle.get_file_priorities() file_priorities = self.handle.file_priorities()
if 0 in self.options['file_priorities']: if 0 in self.options['file_priorities']:
# Previously marked a file 'skip' so check for any 0's now >0. # Previously marked a file 'skip' so check for any 0's now >0.
@ -578,7 +566,7 @@ class Torrent:
trackers (list of dicts): A list of trackers. trackers (list of dicts): A list of trackers.
""" """
if trackers is None: if trackers is None:
self.trackers = list(self.handle.trackers()) self.trackers = [tracker for tracker in self.handle.trackers()]
self.tracker_host = None self.tracker_host = None
return return
@ -643,7 +631,7 @@ class Torrent:
def update_state(self): def update_state(self):
"""Updates the state, based on libtorrent's torrent state""" """Updates the state, based on libtorrent's torrent state"""
status = self.get_lt_status() status = self.handle.status()
session_paused = component.get('Core').session.is_paused() session_paused = component.get('Core').session.is_paused()
old_state = self.state old_state = self.state
self.set_status_message() self.set_status_message()
@ -655,10 +643,7 @@ class Torrent:
elif status_error: elif status_error:
self.state = 'Error' self.state = 'Error'
# auto-manage status will be reverted upon resuming. # auto-manage status will be reverted upon resuming.
self._set_handle_flags( self.handle.auto_managed(False)
flag=lt.torrent_flags.auto_managed,
set_flag=False,
)
self.set_status_message(decode_bytes(status_error)) self.set_status_message(decode_bytes(status_error))
elif status.moving_storage: elif status.moving_storage:
self.state = 'Moving' self.state = 'Moving'
@ -711,11 +696,8 @@ class Torrent:
restart_to_resume (bool, optional): Prevent resuming clearing the error, only restarting restart_to_resume (bool, optional): Prevent resuming clearing the error, only restarting
session can resume. session can resume.
""" """
status = self.get_lt_status() status = self.handle.status()
self._set_handle_flags( self.handle.auto_managed(False)
flag=lt.torrent_flags.auto_managed,
set_flag=False,
)
self.forced_error = TorrentError(message, status.paused, restart_to_resume) self.forced_error = TorrentError(message, status.paused, restart_to_resume)
if not status.paused: if not status.paused:
self.handle.pause() self.handle.pause()
@ -729,10 +711,7 @@ class Torrent:
log.error('Restart deluge to clear this torrent error') log.error('Restart deluge to clear this torrent error')
if not self.forced_error.was_paused and self.options['auto_managed']: if not self.forced_error.was_paused and self.options['auto_managed']:
self._set_handle_flags( self.handle.auto_managed(True)
flag=lt.torrent_flags.auto_managed,
set_flag=True,
)
self.forced_error = None self.forced_error = None
self.set_status_message('OK') self.set_status_message('OK')
if update_state: if update_state:
@ -831,11 +810,7 @@ class Torrent:
if peer.flags & peer.connecting or peer.flags & peer.handshake: if peer.flags & peer.connecting or peer.flags & peer.handshake:
continue continue
try: client = decode_bytes(peer.client)
client = decode_bytes(peer.client)
except UnicodeDecodeError:
# libtorrent on Py3 can raise UnicodeDecodeError for peer_info.client
client = 'unknown'
try: try:
country = component.get('Core').geoip_instance.country_code_by_addr( country = component.get('Core').geoip_instance.country_code_by_addr(
@ -856,7 +831,7 @@ class Torrent:
'client': client, 'client': client,
'country': country, 'country': country,
'down_speed': peer.payload_down_speed, 'down_speed': peer.payload_down_speed,
'ip': f'{peer.ip[0]}:{peer.ip[1]}', 'ip': '%s:%s' % (peer.ip[0], peer.ip[1]),
'progress': peer.progress, 'progress': peer.progress,
'seed': peer.flags & peer.seed, 'seed': peer.flags & peer.seed,
'up_speed': peer.payload_up_speed, 'up_speed': peer.payload_up_speed,
@ -875,7 +850,7 @@ class Torrent:
def get_file_priorities(self): def get_file_priorities(self):
"""Return the file priorities""" """Return the file priorities"""
if not self.handle.status().has_metadata: if not self.handle.has_metadata():
return [] return []
if not self.options['file_priorities']: if not self.options['file_priorities']:
@ -892,18 +867,11 @@ class Torrent:
""" """
if not self.has_metadata: if not self.has_metadata:
return [] return []
try:
files_progresses = zip(
self.handle.file_progress(), self.torrent_info.files()
)
except Exception:
# Handle libtorrent >=2.0.0,<=2.0.4 file_progress error
files_progresses = zip(iter(lambda: 0, 1), self.torrent_info.files())
return [ return [
progress / _file.size if _file.size else 0.0 progress / _file.size if _file.size else 0.0
for progress, _file in files_progresses for progress, _file in zip(
self.handle.file_progress(), self.torrent_info.files()
)
] ]
def get_tracker_host(self): def get_tracker_host(self):
@ -928,7 +896,7 @@ class Torrent:
# Check if hostname is an IP address and just return it if that's the case # Check if hostname is an IP address and just return it if that's the case
try: try:
socket.inet_aton(host) socket.inet_aton(host)
except OSError: except socket.error:
pass pass
else: else:
# This is an IP address because an exception wasn't raised # This is an IP address because an exception wasn't raised
@ -945,7 +913,7 @@ class Torrent:
return '' return ''
def get_magnet_uri(self): def get_magnet_uri(self):
"""Returns a magnet URI for this torrent""" """Returns a magnet uri for this torrent"""
return lt.make_magnet_uri(self.handle) return lt.make_magnet_uri(self.handle)
def get_name(self): def get_name(self):
@ -964,10 +932,10 @@ class Torrent:
if self.has_metadata: if self.has_metadata:
# Use the top-level folder as torrent name. # Use the top-level folder as torrent name.
filename = decode_bytes(self.torrent_info.files().file_path(0)) filename = decode_bytes(self.torrent_info.file_at(0).path)
name = filename.replace('\\', '/', 1).split('/', 1)[0] name = filename.replace('\\', '/', 1).split('/', 1)[0]
else: else:
name = decode_bytes(self.handle.status().name) name = decode_bytes(self.handle.name())
if not name: if not name:
name = self.torrent_id name = self.torrent_id
@ -1019,14 +987,12 @@ class Torrent:
call to get_status based on the session_id call to get_status based on the session_id
update (bool): If True the status will be updated from libtorrent update (bool): If True the status will be updated from libtorrent
if False, the cached values will be returned if False, the cached values will be returned
all_keys (bool): If True return all keys while ignoring the keys param
if False, return only the requested keys
Returns: Returns:
dict: a dictionary of the status keys and their values dict: a dictionary of the status keys and their values
""" """
if update: if update:
self.get_lt_status() self.update_status(self.handle.status())
if all_keys: if all_keys:
keys = list(self.status_funcs) keys = list(self.status_funcs)
@ -1056,35 +1022,13 @@ class Torrent:
return status_dict return status_dict
def get_lt_status(self) -> 'lt.torrent_status': def update_status(self, status):
"""Get the torrent status fresh, not from cache.
This should be used when a guaranteed fresh status is needed rather than
`torrent.handle.status()` because it will update the cache as well.
"""
self.status = self.handle.status()
return self.status
@property
def status(self) -> 'lt.torrent_status':
"""Cached copy of the libtorrent status for this torrent.
If it has not been updated within the last five seconds, it will be
automatically refreshed.
"""
if self._status_last_update < (time.time() - 5):
self.status = self.handle.status()
return self._status
@status.setter
def status(self, status: 'lt.torrent_status') -> None:
"""Updates the cached status. """Updates the cached status.
Args: Args:
status: a libtorrent torrent status status (libtorrent.torrent_status): a libtorrent torrent status
""" """
self._status = status self.status = status
self._status_last_update = time.time()
def _create_status_funcs(self): def _create_status_funcs(self):
"""Creates the functions for getting torrent status""" """Creates the functions for getting torrent status"""
@ -1138,8 +1082,9 @@ class Torrent:
'download_location': lambda: self.options['download_location'], 'download_location': lambda: self.options['download_location'],
'seeds_peers_ratio': lambda: -1.0 'seeds_peers_ratio': lambda: -1.0
if self.status.num_incomplete == 0 if self.status.num_incomplete == 0
# Use -1.0 to signify infinity else ( # Use -1.0 to signify infinity
else (self.status.num_complete / self.status.num_incomplete), self.status.num_complete / self.status.num_incomplete
),
'seed_rank': lambda: self.status.seed_rank, 'seed_rank': lambda: self.status.seed_rank,
'state': lambda: self.state, 'state': lambda: self.state,
'stop_at_ratio': lambda: self.options['stop_at_ratio'], 'stop_at_ratio': lambda: self.options['stop_at_ratio'],
@ -1205,10 +1150,7 @@ class Torrent:
""" """
# Turn off auto-management so the torrent will not be unpaused by lt queueing # Turn off auto-management so the torrent will not be unpaused by lt queueing
self._set_handle_flags( self.handle.auto_managed(False)
flag=lt.torrent_flags.auto_managed,
set_flag=False,
)
if self.state == 'Error': if self.state == 'Error':
log.debug('Unable to pause torrent while in Error state') log.debug('Unable to pause torrent while in Error state')
elif self.status.paused: elif self.status.paused:
@ -1243,10 +1185,7 @@ class Torrent:
else: else:
# Check if torrent was originally being auto-managed. # Check if torrent was originally being auto-managed.
if self.options['auto_managed']: if self.options['auto_managed']:
self._set_handle_flags( self.handle.auto_managed(True)
flag=lt.torrent_flags.auto_managed,
set_flag=True,
)
try: try:
self.handle.resume() self.handle.resume()
except RuntimeError as ex: except RuntimeError as ex:
@ -1269,8 +1208,8 @@ class Torrent:
bool: True is successful, otherwise False bool: True is successful, otherwise False
""" """
try: try:
self.handle.connect_peer((peer_ip, int(peer_port)), 0) self.handle.connect_peer((peer_ip, peer_port), 0)
except (RuntimeError, ValueError) as ex: except RuntimeError as ex:
log.debug('Unable to connect to peer: %s', ex) log.debug('Unable to connect to peer: %s', ex)
return False return False
return True return True
@ -1350,7 +1289,7 @@ class Torrent:
try: try:
with open(filepath, 'wb') as save_file: with open(filepath, 'wb') as save_file:
save_file.write(filedump) save_file.write(filedump)
except OSError as ex: except IOError as ex:
log.error('Unable to save torrent file to: %s', ex) log.error('Unable to save torrent file to: %s', ex)
filepath = os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent') filepath = os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent')
@ -1373,7 +1312,7 @@ class Torrent:
torrent_files = [ torrent_files = [
os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent') os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent')
] ]
if delete_copies and self.filename: if delete_copies:
torrent_files.append( torrent_files.append(
os.path.join(self.config['torrentfiles_location'], self.filename) os.path.join(self.config['torrentfiles_location'], self.filename)
) )
@ -1397,8 +1336,8 @@ class Torrent:
def scrape_tracker(self): def scrape_tracker(self):
"""Scrape the tracker """Scrape the tracker
A scrape request queries the tracker for statistics such as total A scrape request queries the tracker for statistics such as total
number of incomplete peers, complete peers, number of downloads etc. number of incomplete peers, complete peers, number of downloads etc.
""" """
try: try:
self.handle.scrape_tracker() self.handle.scrape_tracker()
@ -1445,7 +1384,7 @@ class Torrent:
This basically does a file rename on all of the folders children. This basically does a file rename on all of the folders children.
Args: Args:
folder (str): The original folder name folder (str): The orignal folder name
new_folder (str): The new folder name new_folder (str): The new folder name
Returns: Returns:
@ -1543,18 +1482,20 @@ class Torrent:
self.status.pieces, self.handle.piece_availability() self.status.pieces, self.handle.piece_availability()
): ):
if piece: if piece:
# Completed. pieces.append(3) # Completed.
pieces.append(3)
elif avail_piece: elif avail_piece:
# Available, just not downloaded nor being downloaded. pieces.append(
pieces.append(1) 1
) # Available, just not downloaded nor being downloaded.
else: else:
# Missing, no known peer with piece, or not asked for yet. pieces.append(
pieces.append(0) 0
) # Missing, no known peer with piece, or not asked for yet.
for peer_info in self.handle.get_peer_info(): for peer_info in self.handle.get_peer_info():
if peer_info.downloading_piece_index >= 0: if peer_info.downloading_piece_index >= 0:
# Being downloaded from peer. pieces[
pieces[peer_info.downloading_piece_index] = 2 peer_info.downloading_piece_index
] = 2 # Being downloaded from peer.
return pieces return pieces

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -7,34 +8,27 @@
# #
"""TorrentManager handles Torrent objects""" """TorrentManager handles Torrent objects"""
from __future__ import unicode_literals
import datetime import datetime
import logging import logging
import operator import operator
import os import os
import pickle
import time import time
from base64 import b64encode from collections import namedtuple
from tempfile import gettempdir from tempfile import gettempdir
from typing import Dict, List, NamedTuple, Tuple
from twisted.internet import defer, reactor, threads import six.moves.cPickle as pickle # noqa: N813
from twisted.internet import defer, error, reactor, threads
from twisted.internet.defer import Deferred, DeferredList from twisted.internet.defer import Deferred, DeferredList
from twisted.internet.task import LoopingCall from twisted.internet.task import LoopingCall
import deluge.component as component import deluge.component as component
from deluge._libtorrent import LT_VERSION, lt from deluge._libtorrent import lt
from deluge.common import ( from deluge.common import archive_files, decode_bytes, get_magnet_info, is_magnet
VersionSplit,
archive_files,
decode_bytes,
get_magnet_info,
is_magnet,
)
from deluge.configmanager import ConfigManager, get_config_dir from deluge.configmanager import ConfigManager, get_config_dir
from deluge.core.authmanager import AUTH_LEVEL_ADMIN from deluge.core.authmanager import AUTH_LEVEL_ADMIN
from deluge.core.torrent import Torrent, TorrentOptions, sanitize_filepath from deluge.core.torrent import Torrent, TorrentOptions, sanitize_filepath
from deluge.decorators import maybe_coroutine
from deluge.error import AddTorrentError, InvalidTorrentError from deluge.error import AddTorrentError, InvalidTorrentError
from deluge.event import ( from deluge.event import (
ExternalIPEvent, ExternalIPEvent,
@ -51,18 +45,13 @@ from deluge.event import (
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
LT_DEFAULT_ADD_TORRENT_FLAGS = ( LT_DEFAULT_ADD_TORRENT_FLAGS = (
lt.torrent_flags.paused lt.add_torrent_params_flags_t.flag_paused
| lt.torrent_flags.auto_managed | lt.add_torrent_params_flags_t.flag_auto_managed
| lt.torrent_flags.update_subscribe | lt.add_torrent_params_flags_t.flag_update_subscribe
| lt.torrent_flags.apply_ip_filter | lt.add_torrent_params_flags_t.flag_apply_ip_filter
) )
class PrefetchQueueItem(NamedTuple):
alert_deferred: Deferred
result_queue: List[Deferred]
class TorrentState: # pylint: disable=old-style-class class TorrentState: # pylint: disable=old-style-class
"""Create a torrent state. """Create a torrent state.
@ -100,7 +89,7 @@ class TorrentState: # pylint: disable=old-style-class
super_seeding=False, super_seeding=False,
name=None, name=None,
): ):
# Build the class attribute list from args # Build the class atrribute list from args
for key, value in locals().items(): for key, value in locals().items():
if key == 'self': if key == 'self':
continue continue
@ -140,8 +129,7 @@ class TorrentManager(component.Component):
""" """
# This is used in the test to mock out timeouts callLater = reactor.callLater # noqa: N815
clock = reactor
def __init__(self): def __init__(self):
component.Component.__init__( component.Component.__init__(
@ -170,7 +158,7 @@ class TorrentManager(component.Component):
self.is_saving_state = False self.is_saving_state = False
self.save_resume_data_file_lock = defer.DeferredLock() self.save_resume_data_file_lock = defer.DeferredLock()
self.torrents_loading = {} self.torrents_loading = {}
self.prefetching_metadata: Dict[str, PrefetchQueueItem] = {} self.prefetching_metadata = {}
# This is a map of torrent_ids to Deferreds used to track needed resume data. # This is a map of torrent_ids to Deferreds used to track needed resume data.
# The Deferreds will be completed when resume data has been saved. # The Deferreds will be completed when resume data has been saved.
@ -203,32 +191,34 @@ class TorrentManager(component.Component):
# Register alert functions # Register alert functions
alert_handles = [ alert_handles = [
'external_ip', 'external_ip_alert',
'performance', 'performance_alert',
'add_torrent', 'add_torrent_alert',
'metadata_received', 'metadata_received_alert',
'torrent_finished', 'torrent_finished_alert',
'torrent_paused', 'torrent_paused_alert',
'torrent_checked', 'torrent_checked_alert',
'torrent_resumed', 'torrent_resumed_alert',
'tracker_reply', 'tracker_reply_alert',
'tracker_announce', 'tracker_announce_alert',
'tracker_warning', 'tracker_warning_alert',
'tracker_error', 'tracker_error_alert',
'file_renamed', 'file_renamed_alert',
'file_error', 'file_error_alert',
'file_completed', 'file_completed_alert',
'storage_moved', 'storage_moved_alert',
'storage_moved_failed', 'storage_moved_failed_alert',
'state_update', 'state_update_alert',
'state_changed', 'state_changed_alert',
'save_resume_data', 'save_resume_data_alert',
'save_resume_data_failed', 'save_resume_data_failed_alert',
'fastresume_rejected', 'fastresume_rejected_alert',
] ]
for alert_handle in alert_handles: for alert_handle in alert_handles:
on_alert_func = getattr(self, ''.join(['on_alert_', alert_handle])) on_alert_func = getattr(
self, ''.join(['on_alert_', alert_handle.replace('_alert', '')])
)
self.alerts.register_handler(alert_handle, on_alert_func) self.alerts.register_handler(alert_handle, on_alert_func)
# Define timers # Define timers
@ -253,8 +243,8 @@ class TorrentManager(component.Component):
self.save_resume_data_timer.start(190, False) self.save_resume_data_timer.start(190, False)
self.prev_status_cleanup_loop.start(10) self.prev_status_cleanup_loop.start(10)
@maybe_coroutine @defer.inlineCallbacks
async def stop(self): def stop(self):
# Stop timers # Stop timers
if self.save_state_timer.running: if self.save_state_timer.running:
self.save_state_timer.stop() self.save_state_timer.stop()
@ -266,11 +256,11 @@ class TorrentManager(component.Component):
self.prev_status_cleanup_loop.stop() self.prev_status_cleanup_loop.stop()
# Save state on shutdown # Save state on shutdown
await self.save_state() yield self.save_state()
self.session.pause() self.session.pause()
result = await self.save_resume_data(flush_disk_cache=True) result = yield self.save_resume_data(flush_disk_cache=True)
# Remove the temp_file to signify successfully saved state # Remove the temp_file to signify successfully saved state
if result and os.path.isfile(self.temp_file): if result and os.path.isfile(self.temp_file):
os.remove(self.temp_file) os.remove(self.temp_file)
@ -284,6 +274,11 @@ class TorrentManager(component.Component):
'Paused', 'Paused',
'Queued', 'Queued',
): ):
# If the global setting is set, but the per-torrent isn't...
# Just skip to the next torrent.
# This is so that a user can turn-off the stop at ratio option on a per-torrent basis
if not torrent.options['stop_at_ratio']:
continue
if ( if (
torrent.get_ratio() >= torrent.options['stop_ratio'] torrent.get_ratio() >= torrent.options['stop_ratio']
and torrent.is_finished and torrent.is_finished
@ -291,8 +286,8 @@ class TorrentManager(component.Component):
if torrent.options['remove_at_ratio']: if torrent.options['remove_at_ratio']:
self.remove(torrent_id) self.remove(torrent_id)
break break
if not torrent.handle.status().paused:
torrent.pause() torrent.pause()
def __getitem__(self, torrent_id): def __getitem__(self, torrent_id):
"""Return the Torrent with torrent_id. """Return the Torrent with torrent_id.
@ -344,64 +339,66 @@ class TorrentManager(component.Component):
else: else:
return torrent_info return torrent_info
@maybe_coroutine def prefetch_metadata(self, magnet, timeout):
async def prefetch_metadata(self, magnet: str, timeout: int) -> Tuple[str, bytes]: """Download the metadata for a magnet uri.
"""Download the metadata for a magnet URI.
Args: Args:
magnet: A magnet URI to download the metadata for. magnet (str): A magnet uri to download the metadata for.
timeout: Number of seconds to wait before canceling. timeout (int): Number of seconds to wait before cancelling.
Returns: Returns:
A tuple of (torrent_id, metadata) Deferred: A tuple of (torrent_id (str), metadata (dict))
""" """
torrent_id = get_magnet_info(magnet)['info_hash'] torrent_id = get_magnet_info(magnet)['info_hash']
if torrent_id in self.prefetching_metadata: if torrent_id in self.prefetching_metadata:
d = Deferred() return self.prefetching_metadata[torrent_id].defer
self.prefetching_metadata[torrent_id].result_queue.append(d)
return await d
add_torrent_params = lt.parse_magnet_uri(magnet) add_torrent_params = {}
add_torrent_params.save_path = gettempdir() add_torrent_params['save_path'] = gettempdir()
add_torrent_params.flags = ( add_torrent_params['url'] = magnet.strip().encode('utf8')
add_torrent_params['flags'] = (
( (
LT_DEFAULT_ADD_TORRENT_FLAGS LT_DEFAULT_ADD_TORRENT_FLAGS
| lt.torrent_flags.duplicate_is_error | lt.add_torrent_params_flags_t.flag_duplicate_is_error
| lt.torrent_flags.upload_mode | lt.add_torrent_params_flags_t.flag_upload_mode
) )
^ lt.torrent_flags.auto_managed ^ lt.add_torrent_params_flags_t.flag_auto_managed
^ lt.torrent_flags.paused ^ lt.add_torrent_params_flags_t.flag_paused
) )
torrent_handle = self.session.add_torrent(add_torrent_params) torrent_handle = self.session.add_torrent(add_torrent_params)
d = Deferred() d = Deferred()
# Cancel the defer if timeout reached. # Cancel the defer if timeout reached.
d.addTimeout(timeout, self.clock) defer_timeout = self.callLater(timeout, d.cancel)
self.prefetching_metadata[torrent_id] = PrefetchQueueItem(d, []) d.addBoth(self.on_prefetch_metadata, torrent_id, defer_timeout)
Prefetch = namedtuple('Prefetch', 'defer handle')
self.prefetching_metadata[torrent_id] = Prefetch(defer=d, handle=torrent_handle)
return d
def on_prefetch_metadata(self, torrent_info, torrent_id, defer_timeout):
# Cancel reactor.callLater.
try: try:
torrent_info = await d defer_timeout.cancel()
except (defer.TimeoutError, defer.CancelledError): except error.AlreadyCalled:
log.debug(f'Prefetching metadata for {torrent_id} timed out or cancelled.') pass
metadata = b''
else:
log.debug('prefetch metadata received')
if VersionSplit(LT_VERSION) < VersionSplit('2.0.0.0'):
metadata = torrent_info.metadata()
else:
metadata = torrent_info.info_section()
log.debug('remove prefetch magnet from session') log.debug('remove prefetch magnet from session')
result_queue = self.prefetching_metadata.pop(torrent_id).result_queue try:
self.session.remove_torrent(torrent_handle, 1) torrent_handle = self.prefetching_metadata.pop(torrent_id).handle
result = torrent_id, b64encode(metadata) except KeyError:
pass
else:
self.session.remove_torrent(torrent_handle, 1)
for d in result_queue: metadata = None
d.callback(result) if isinstance(torrent_info, lt.torrent_info):
return result log.debug('prefetch metadata received')
metadata = lt.bdecode(torrent_info.metadata())
return torrent_id, metadata
def _build_torrent_options(self, options): def _build_torrent_options(self, options):
"""Load default options and update if needed.""" """Load default options and update if needed."""
@ -434,10 +431,9 @@ class TorrentManager(component.Component):
elif magnet: elif magnet:
magnet_info = get_magnet_info(magnet) magnet_info = get_magnet_info(magnet)
if magnet_info: if magnet_info:
add_torrent_params['url'] = magnet.strip().encode('utf8')
add_torrent_params['name'] = magnet_info['name'] add_torrent_params['name'] = magnet_info['name']
add_torrent_params['trackers'] = list(magnet_info['trackers'])
torrent_id = magnet_info['info_hash'] torrent_id = magnet_info['info_hash']
add_torrent_params['info_hash'] = bytes(bytearray.fromhex(torrent_id))
else: else:
raise AddTorrentError( raise AddTorrentError(
'Unable to add magnet, invalid magnet info: %s' % magnet 'Unable to add magnet, invalid magnet info: %s' % magnet
@ -452,7 +448,7 @@ class TorrentManager(component.Component):
raise AddTorrentError('Torrent already being added (%s).' % torrent_id) raise AddTorrentError('Torrent already being added (%s).' % torrent_id)
elif torrent_id in self.prefetching_metadata: elif torrent_id in self.prefetching_metadata:
# Cancel and remove metadata fetching torrent. # Cancel and remove metadata fetching torrent.
self.prefetching_metadata[torrent_id].alert_deferred.cancel() self.prefetching_metadata[torrent_id].defer.cancel()
# Check for renamed files and if so, rename them in the torrent_info before adding. # Check for renamed files and if so, rename them in the torrent_info before adding.
if options['mapped_files'] and torrent_info: if options['mapped_files'] and torrent_info:
@ -480,12 +476,16 @@ class TorrentManager(component.Component):
# Set flags: enable duplicate_is_error & override_resume_data, disable auto_managed. # Set flags: enable duplicate_is_error & override_resume_data, disable auto_managed.
add_torrent_params['flags'] = ( add_torrent_params['flags'] = (
LT_DEFAULT_ADD_TORRENT_FLAGS | lt.torrent_flags.duplicate_is_error LT_DEFAULT_ADD_TORRENT_FLAGS
) ^ lt.torrent_flags.auto_managed | lt.add_torrent_params_flags_t.flag_duplicate_is_error
| lt.add_torrent_params_flags_t.flag_override_resume_data
) ^ lt.add_torrent_params_flags_t.flag_auto_managed
if options['seed_mode']: if options['seed_mode']:
add_torrent_params['flags'] |= lt.torrent_flags.seed_mode add_torrent_params['flags'] |= lt.add_torrent_params_flags_t.flag_seed_mode
if options['super_seeding']: if options['super_seeding']:
add_torrent_params['flags'] |= lt.torrent_flags.super_seeding add_torrent_params[
'flags'
] |= lt.add_torrent_params_flags_t.flag_super_seeding
return torrent_id, add_torrent_params return torrent_id, add_torrent_params
@ -509,7 +509,7 @@ class TorrentManager(component.Component):
save_state (bool, optional): If True save the session state after adding torrent, defaults to True. save_state (bool, optional): If True save the session state after adding torrent, defaults to True.
filedump (str, optional): bencoded filedump of a torrent file. filedump (str, optional): bencoded filedump of a torrent file.
filename (str, optional): The filename of the torrent file. filename (str, optional): The filename of the torrent file.
magnet (str, optional): The magnet URI. magnet (str, optional): The magnet uri.
resume_data (lt.entry, optional): libtorrent fast resume data. resume_data (lt.entry, optional): libtorrent fast resume data.
Returns: Returns:
@ -574,7 +574,7 @@ class TorrentManager(component.Component):
save_state (bool, optional): If True save the session state after adding torrent, defaults to True. save_state (bool, optional): If True save the session state after adding torrent, defaults to True.
filedump (str, optional): bencoded filedump of a torrent file. filedump (str, optional): bencoded filedump of a torrent file.
filename (str, optional): The filename of the torrent file. filename (str, optional): The filename of the torrent file.
magnet (str, optional): The magnet URI. magnet (str, optional): The magnet uri.
resume_data (lt.entry, optional): libtorrent fast resume data. resume_data (lt.entry, optional): libtorrent fast resume data.
Returns: Returns:
@ -642,7 +642,7 @@ class TorrentManager(component.Component):
# Resume AlertManager if paused for adding torrent to libtorrent. # Resume AlertManager if paused for adding torrent to libtorrent.
component.resume('AlertManager') component.resume('AlertManager')
# Store the original resume_data, in case of errors. # Store the orignal resume_data, in case of errors.
if resume_data: if resume_data:
self.resume_data[torrent.torrent_id] = resume_data self.resume_data[torrent.torrent_id] = resume_data
@ -809,9 +809,9 @@ class TorrentManager(component.Component):
try: try:
with open(filepath, 'rb') as _file: with open(filepath, 'rb') as _file:
state = pickle.load(_file, encoding='utf8') state = pickle.load(_file)
except (OSError, EOFError, pickle.UnpicklingError) as ex: except (IOError, EOFError, pickle.UnpicklingError) as ex:
message = f'Unable to load {filepath}: {ex}' message = 'Unable to load {}: {}'.format(filepath, ex)
log.error(message) log.error(message)
if not filepath.endswith('.bak'): if not filepath.endswith('.bak'):
self.archive_state(message) self.archive_state(message)
@ -1022,7 +1022,7 @@ class TorrentManager(component.Component):
) )
def on_torrent_resume_save(dummy_result, torrent_id): def on_torrent_resume_save(dummy_result, torrent_id):
"""Received torrent resume_data alert so remove from waiting list""" """Recieved torrent resume_data alert so remove from waiting list"""
self.waiting_on_resume_data.pop(torrent_id, None) self.waiting_on_resume_data.pop(torrent_id, None)
deferreds = [] deferreds = []
@ -1067,7 +1067,7 @@ class TorrentManager(component.Component):
try: try:
with open(_filepath, 'rb') as _file: with open(_filepath, 'rb') as _file:
resume_data = lt.bdecode(_file.read()) resume_data = lt.bdecode(_file.read())
except (OSError, EOFError, RuntimeError) as ex: except (IOError, EOFError, RuntimeError) as ex:
if self.torrents: if self.torrents:
log.warning('Unable to load %s: %s', _filepath, ex) log.warning('Unable to load %s: %s', _filepath, ex)
resume_data = None resume_data = None
@ -1240,7 +1240,7 @@ class TorrentManager(component.Component):
def on_alert_add_torrent(self, alert): def on_alert_add_torrent(self, alert):
"""Alert handler for libtorrent add_torrent_alert""" """Alert handler for libtorrent add_torrent_alert"""
if not alert.handle.is_valid(): if not alert.handle.is_valid():
log.warning('Torrent handle is invalid: %s', alert.error.message()) log.warning('Torrent handle is invalid!')
return return
try: try:
@ -1351,8 +1351,10 @@ class TorrentManager(component.Component):
torrent.set_tracker_status('Announce OK') torrent.set_tracker_status('Announce OK')
# Check for peer information from the tracker, if none then send a scrape request. # Check for peer information from the tracker, if none then send a scrape request.
torrent.get_lt_status() if (
if torrent.status.num_complete == -1 or torrent.status.num_incomplete == -1: alert.handle.status().num_complete == -1
or alert.handle.status().num_incomplete == -1
):
torrent.scrape_tracker() torrent.scrape_tracker()
def on_alert_tracker_announce(self, alert): def on_alert_tracker_announce(self, alert):
@ -1387,18 +1389,7 @@ class TorrentManager(component.Component):
log.debug( log.debug(
'Tracker Error Alert: %s [%s]', decode_bytes(alert.message()), error_message 'Tracker Error Alert: %s [%s]', decode_bytes(alert.message()), error_message
) )
# libtorrent 1.2 added endpoint struct to each tracker. to prevent false updates torrent.set_tracker_status('Error: ' + error_message)
# we will need to verify that at least one endpoint to the errored tracker is working
for tracker in torrent.handle.trackers():
if tracker['url'] == alert.url:
if any(
endpoint['last_error']['value'] == 0
for endpoint in tracker['endpoints']
):
torrent.set_tracker_status('Announce OK')
else:
torrent.set_tracker_status('Error: ' + error_message)
break
def on_alert_storage_moved(self, alert): def on_alert_storage_moved(self, alert):
"""Alert handler for libtorrent storage_moved_alert""" """Alert handler for libtorrent storage_moved_alert"""
@ -1472,9 +1463,7 @@ class TorrentManager(component.Component):
return return
if torrent_id in self.torrents: if torrent_id in self.torrents:
# libtorrent add_torrent expects bencoded resume_data. # libtorrent add_torrent expects bencoded resume_data.
self.resume_data[torrent_id] = lt.bencode( self.resume_data[torrent_id] = lt.bencode(alert.resume_data)
lt.write_resume_data(alert.params)
)
if torrent_id in self.waiting_on_resume_data: if torrent_id in self.waiting_on_resume_data:
self.waiting_on_resume_data[torrent_id].callback(None) self.waiting_on_resume_data[torrent_id].callback(None)
@ -1556,7 +1545,7 @@ class TorrentManager(component.Component):
# Try callback to prefetch_metadata method. # Try callback to prefetch_metadata method.
try: try:
d = self.prefetching_metadata[torrent_id].alert_deferred d = self.prefetching_metadata[torrent_id].defer
except KeyError: except KeyError:
pass pass
else: else:
@ -1602,14 +1591,23 @@ class TorrentManager(component.Component):
except RuntimeError: except RuntimeError:
continue continue
if torrent_id in self.torrents: if torrent_id in self.torrents:
self.torrents[torrent_id].status = t_status self.torrents[torrent_id].update_status(t_status)
self.handle_torrents_status_callback(self.torrents_status_requests.pop()) self.handle_torrents_status_callback(self.torrents_status_requests.pop())
def on_alert_external_ip(self, alert): def on_alert_external_ip(self, alert):
"""Alert handler for libtorrent external_ip_alert""" """Alert handler for libtorrent external_ip_alert
log.info('on_alert_external_ip: %s', alert.external_address)
component.get('EventManager').emit(ExternalIPEvent(alert.external_address)) Note:
The alert.message IPv4 address format is:
'external IP received: 0.0.0.0'
and IPv6 address format is:
'external IP received: 0:0:0:0:0:0:0:0'
"""
external_ip = decode_bytes(alert.message()).split(' ')[-1]
log.info('on_alert_external_ip: %s', external_ip)
component.get('EventManager').emit(ExternalIPEvent(external_ip))
def on_alert_performance(self, alert): def on_alert_performance(self, alert):
"""Alert handler for libtorrent performance_alert""" """Alert handler for libtorrent performance_alert"""

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2007,2008 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2007,2008 Andrew Resch <andrewresch@gmail.com>
# #
@ -6,10 +7,8 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
import os from __future__ import division, print_function, unicode_literals
import stat
from OpenSSL import crypto
from OpenSSL.crypto import FILETYPE_PEM from OpenSSL.crypto import FILETYPE_PEM
from twisted.internet.ssl import ( from twisted.internet.ssl import (
AcceptableCiphers, AcceptableCiphers,
@ -19,8 +18,6 @@ from twisted.internet.ssl import (
TLSVersion, TLSVersion,
) )
import deluge.configmanager
# A TLS ciphers list. # A TLS ciphers list.
# Sources for more information on TLS ciphers: # Sources for more information on TLS ciphers:
# - https://wiki.mozilla.org/Security/Server_Side_TLS # - https://wiki.mozilla.org/Security/Server_Side_TLS
@ -80,57 +77,3 @@ def get_context_factory(cert_path, pkey_path):
ctx.set_options(SSL_OP_NO_RENEGOTIATION) ctx.set_options(SSL_OP_NO_RENEGOTIATION)
return cert_options return cert_options
def check_ssl_keys():
"""
Check for SSL cert/key and create them if necessary
"""
ssl_dir = deluge.configmanager.get_config_dir('ssl')
if not os.path.exists(ssl_dir):
# The ssl folder doesn't exist so we need to create it
os.makedirs(ssl_dir)
generate_ssl_keys()
else:
for f in ('daemon.pkey', 'daemon.cert'):
if not os.path.exists(os.path.join(ssl_dir, f)):
generate_ssl_keys()
break
def generate_ssl_keys():
"""
This method generates a new SSL key/cert.
"""
digest = 'sha256'
# Generate key pair
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 2048)
# Generate cert request
req = crypto.X509Req()
subj = req.get_subject()
setattr(subj, 'CN', 'Deluge Daemon')
req.set_pubkey(pkey)
req.sign(pkey, digest)
# Generate certificate
cert = crypto.X509()
cert.set_serial_number(0)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365 * 3) # Three Years
cert.set_issuer(req.get_subject())
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.sign(pkey, digest)
# Write out files
ssl_dir = deluge.configmanager.get_config_dir('ssl')
with open(os.path.join(ssl_dir, 'daemon.pkey'), 'wb') as _file:
_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
with open(os.path.join(ssl_dir, 'daemon.cert'), 'wb') as _file:
_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
# Make the files only readable by this user
for f in ('daemon.pkey', 'daemon.cert'):
os.chmod(os.path.join(ssl_dir, f), stat.S_IREAD | stat.S_IWRITE)

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2010 John Garland <johnnybg+deluge@gmail.com> # Copyright (C) 2010 John Garland <johnnybg+deluge@gmail.com>
# #
@ -6,13 +7,12 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
from __future__ import unicode_literals
import inspect import inspect
import re import re
import warnings import warnings
from functools import wraps from functools import wraps
from typing import Any, Callable, Coroutine, TypeVar
from twisted.internet import defer
def proxy(proxy_func): def proxy(proxy_func):
@ -56,7 +56,7 @@ def overrides(*args):
if inspect.isfunction(args[0]): if inspect.isfunction(args[0]):
return _overrides(stack, args[0]) return _overrides(stack, args[0])
else: else:
# One or more classes are specified, so return a function that will be # One or more classes are specifed, so return a function that will be
# called with the real function as argument # called with the real function as argument
def ret_func(func, **kwargs): def ret_func(func, **kwargs):
return _overrides(stack, func, explicit_base_classes=args) return _overrides(stack, func, explicit_base_classes=args)
@ -107,7 +107,7 @@ def _overrides(stack, method, explicit_base_classes=None):
for c in base_classes + check_classes: for c in base_classes + check_classes:
classes[c] = get_class(c) classes[c] = get_class(c)
# Verify that the explicit override class is one of base classes # Verify that the excplicit override class is one of base classes
if explicit_base_classes: if explicit_base_classes:
from itertools import product from itertools import product
@ -127,7 +127,7 @@ def _overrides(stack, method, explicit_base_classes=None):
% ( % (
method.__name__, method.__name__,
cls, cls,
f'File: {stack[1][1]}:{stack[1][2]}', 'File: %s:%s' % (stack[1][1], stack[1][2]),
) )
) )
@ -137,7 +137,7 @@ def _overrides(stack, method, explicit_base_classes=None):
% ( % (
method.__name__, method.__name__,
check_classes, check_classes,
f'File: {stack[1][1]}:{stack[1][2]}', 'File: %s:%s' % (stack[1][1], stack[1][2]),
) )
) )
return method return method
@ -146,7 +146,7 @@ def _overrides(stack, method, explicit_base_classes=None):
def deprecated(func): def deprecated(func):
"""This is a decorator which can be used to mark function as deprecated. """This is a decorator which can be used to mark function as deprecated.
It will result in a warning being emitted when the function is used. It will result in a warning being emmitted when the function is used.
""" """
@ -154,7 +154,7 @@ def deprecated(func):
def depr_func(*args, **kwargs): def depr_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning) # Turn off filter warnings.simplefilter('always', DeprecationWarning) # Turn off filter
warnings.warn( warnings.warn(
f'Call to deprecated function {func.__name__}.', 'Call to deprecated function {}.'.format(func.__name__),
category=DeprecationWarning, category=DeprecationWarning,
stacklevel=2, stacklevel=2,
) )
@ -162,74 +162,3 @@ def deprecated(func):
return func(*args, **kwargs) return func(*args, **kwargs)
return depr_func return depr_func
class CoroutineDeferred(defer.Deferred):
"""Wraps a coroutine in a Deferred.
It will dynamically pass through the underlying coroutine without wrapping where apporpriate.
"""
def __init__(self, coro: Coroutine):
# Delay this import to make sure a reactor was installed first
from twisted.internet import reactor
super().__init__()
self.coro = coro
self.awaited = None
self.activate_deferred = reactor.callLater(0, self.activate)
def __await__(self):
if self.awaited in [None, True]:
self.awaited = True
return self.coro.__await__()
# Already in deferred mode
return super().__await__()
def activate(self):
"""If the result wasn't awaited before the next context switch, we turn it into a deferred."""
if self.awaited is None:
self.awaited = False
try:
d = defer.Deferred.fromCoroutine(self.coro)
except AttributeError:
# Fallback for Twisted <= 21.2 without fromCoroutine
d = defer.ensureDeferred(self.coro)
d.chainDeferred(self)
def _callback_activate(self):
"""Verify awaited status before calling activate."""
assert not self.awaited, 'Cannot add callbacks to an already awaited coroutine.'
self.activate()
def addCallback(self, *args, **kwargs): # noqa: N802
self._callback_activate()
return super().addCallback(*args, **kwargs)
def addCallbacks(self, *args, **kwargs): # noqa: N802
self._callback_activate()
return super().addCallbacks(*args, **kwargs)
def addErrback(self, *args, **kwargs): # noqa: N802
self._callback_activate()
return super().addErrback(*args, **kwargs)
def addBoth(self, *args, **kwargs): # noqa: N802
self._callback_activate()
return super().addBoth(*args, **kwargs)
_RetT = TypeVar('_RetT')
def maybe_coroutine(
f: Callable[..., Coroutine[Any, Any, _RetT]],
) -> 'Callable[..., defer.Deferred[_RetT]]':
"""Wraps a coroutine function to make it usable as a normal function that returns a Deferred."""
@wraps(f)
def wrapper(*args, **kwargs):
# Uncomment for quick testing to make sure CoroutineDeferred magic isn't at fault
# return defer.ensureDeferred(f(*args, **kwargs))
return CoroutineDeferred(f(*args, **kwargs))
return wrapper

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me> # Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me>
@ -8,15 +9,18 @@
# #
from __future__ import unicode_literals
class DelugeError(Exception): class DelugeError(Exception):
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
inst = super().__new__(cls, *args, **kwargs) inst = super(DelugeError, cls).__new__(cls, *args, **kwargs)
inst._args = args inst._args = args
inst._kwargs = kwargs inst._kwargs = kwargs
return inst return inst
def __init__(self, message=None): def __init__(self, message=None):
super().__init__(message) super(DelugeError, self).__init__(message)
self.message = message self.message = message
def __str__(self): def __str__(self):
@ -41,12 +45,12 @@ class InvalidPathError(DelugeError):
class WrappedException(DelugeError): class WrappedException(DelugeError):
def __init__(self, message, exception_type, traceback): def __init__(self, message, exception_type, traceback):
super().__init__(message) super(WrappedException, self).__init__(message)
self.type = exception_type self.type = exception_type
self.traceback = traceback self.traceback = traceback
def __str__(self): def __str__(self):
return f'{self.message}\n{self.traceback}' return '%s\n%s' % (self.message, self.traceback)
class _ClientSideRecreateError(DelugeError): class _ClientSideRecreateError(DelugeError):
@ -60,7 +64,7 @@ class IncompatibleClient(_ClientSideRecreateError):
'Your deluge client is not compatible with the daemon. ' 'Your deluge client is not compatible with the daemon. '
'Please upgrade your client to %(daemon_version)s' 'Please upgrade your client to %(daemon_version)s'
) % {'daemon_version': self.daemon_version} ) % {'daemon_version': self.daemon_version}
super().__init__(message=msg) super(IncompatibleClient, self).__init__(message=msg)
class NotAuthorizedError(_ClientSideRecreateError): class NotAuthorizedError(_ClientSideRecreateError):
@ -69,14 +73,14 @@ class NotAuthorizedError(_ClientSideRecreateError):
'current_level': current_level, 'current_level': current_level,
'required_level': required_level, 'required_level': required_level,
} }
super().__init__(message=msg) super(NotAuthorizedError, self).__init__(message=msg)
self.current_level = current_level self.current_level = current_level
self.required_level = required_level self.required_level = required_level
class _UsernameBasedPasstroughError(_ClientSideRecreateError): class _UsernameBasedPasstroughError(_ClientSideRecreateError):
def __init__(self, message, username): def __init__(self, message, username):
super().__init__(message) super(_UsernameBasedPasstroughError, self).__init__(message)
self.username = username self.username = username
@ -90,7 +94,3 @@ class AuthenticationRequired(_UsernameBasedPasstroughError):
class AuthManagerError(_UsernameBasedPasstroughError): class AuthManagerError(_UsernameBasedPasstroughError):
pass pass
class LibtorrentImportError(ImportError):
pass

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -13,6 +14,9 @@ This module describes the types of events that can be generated by the daemon
and subsequently emitted to the clients. and subsequently emitted to the clients.
""" """
from __future__ import unicode_literals
import six
known_events = {} known_events = {}
@ -23,12 +27,12 @@ class DelugeEventMetaClass(type):
""" """
def __init__(cls, name, bases, dct): # pylint: disable=bad-mcs-method-argument def __init__(cls, name, bases, dct): # pylint: disable=bad-mcs-method-argument
super().__init__(name, bases, dct) super(DelugeEventMetaClass, cls).__init__(name, bases, dct)
if name != 'DelugeEvent': if name != 'DelugeEvent':
known_events[name] = cls known_events[name] = cls
class DelugeEvent(metaclass=DelugeEventMetaClass): class DelugeEvent(six.with_metaclass(DelugeEventMetaClass, object)):
""" """
The base class for all events. The base class for all events.

View file

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
# #
@ -6,7 +7,9 @@
# See LICENSE for more details. # See LICENSE for more details.
# #
import email.message from __future__ import unicode_literals
import cgi
import logging import logging
import os.path import os.path
import zlib import zlib
@ -16,11 +19,13 @@ from twisted.internet.defer import Deferred
from twisted.python.failure import Failure from twisted.python.failure import Failure
from twisted.web import client, http from twisted.web import client, http
from twisted.web._newclient import HTTPClientParser from twisted.web._newclient import HTTPClientParser
from twisted.web.error import Error, PageRedirect from twisted.web.error import PageRedirect
from twisted.web.http_headers import Headers from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent from twisted.web.iweb import IAgent
from zope.interface import implementer from zope.interface import implementer
from deluge.common import get_version
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -35,11 +40,11 @@ class CompressionDecoderProtocol(client._GzipProtocol):
"""A compression decoder protocol for CompressionDecoder.""" """A compression decoder protocol for CompressionDecoder."""
def __init__(self, protocol, response): def __init__(self, protocol, response):
super().__init__(protocol, response) super(CompressionDecoderProtocol, self).__init__(protocol, response)
self._zlibDecompress = zlib.decompressobj(32 + zlib.MAX_WBITS) self._zlibDecompress = zlib.decompressobj(32 + zlib.MAX_WBITS)
class BodyHandler(HTTPClientParser): class BodyHandler(HTTPClientParser, object):
"""An HTTP parser that saves the response to a file.""" """An HTTP parser that saves the response to a file."""
def __init__(self, request, finished, length, agent, encoding=None): def __init__(self, request, finished, length, agent, encoding=None):
@ -51,7 +56,7 @@ class BodyHandler(HTTPClientParser):
length (int): The length of the response. length (int): The length of the response.
agent (t.w.i.IAgent): The agent from which the request was sent. agent (t.w.i.IAgent): The agent from which the request was sent.
""" """
super().__init__(request, finished) super(BodyHandler, self).__init__(request, finished)
self.agent = agent self.agent = agent
self.finished = finished self.finished = finished
self.total_length = length self.total_length = length
@ -71,12 +76,12 @@ class BodyHandler(HTTPClientParser):
with open(self.agent.filename, 'wb') as _file: with open(self.agent.filename, 'wb') as _file:
_file.write(self.data) _file.write(self.data)
self.finished.callback(self.agent.filename) self.finished.callback(self.agent.filename)
self.state = 'DONE' self.state = u'DONE'
HTTPClientParser.connectionLost(self, reason) HTTPClientParser.connectionLost(self, reason)
@implementer(IAgent) @implementer(IAgent)
class HTTPDownloaderAgent: class HTTPDownloaderAgent(object):
"""A File Downloader Agent.""" """A File Downloader Agent."""
def __init__( def __init__(
@ -120,9 +125,6 @@ class HTTPDownloaderAgent:
location = response.headers.getRawHeaders(b'location')[0] location = response.headers.getRawHeaders(b'location')[0]
error = PageRedirect(response.code, location=location) error = PageRedirect(response.code, location=location)
finished.errback(Failure(error)) finished.errback(Failure(error))
elif response.code >= 400:
error = Error(response.code)
finished.errback(Failure(error))
else: else:
headers = response.headers headers = response.headers
body_length = int(headers.getRawHeaders(b'content-length', default=[0])[0]) body_length = int(headers.getRawHeaders(b'content-length', default=[0])[0])
@ -131,10 +133,9 @@ class HTTPDownloaderAgent:
content_disp = headers.getRawHeaders(b'content-disposition')[0].decode( content_disp = headers.getRawHeaders(b'content-disposition')[0].decode(
'utf-8' 'utf-8'
) )
message = email.message.EmailMessage() content_disp_params = cgi.parse_header(content_disp)[1]
message['content-disposition'] = content_disp if 'filename' in content_disp_params:
new_file_name = message.get_filename() new_file_name = content_disp_params['filename']
if new_file_name:
new_file_name = sanitise_filename(new_file_name) new_file_name = sanitise_filename(new_file_name)
new_file_name = os.path.join( new_file_name = os.path.join(
os.path.split(self.filename)[0], new_file_name os.path.split(self.filename)[0], new_file_name
@ -145,20 +146,14 @@ class HTTPDownloaderAgent:
fileext = os.path.splitext(new_file_name)[1] fileext = os.path.splitext(new_file_name)[1]
while os.path.isfile(new_file_name): while os.path.isfile(new_file_name):
# Increment filename if already exists # Increment filename if already exists
new_file_name = f'{fileroot}-{count}{fileext}' new_file_name = '%s-%s%s' % (fileroot, count, fileext)
count += 1 count += 1
self.filename = new_file_name self.filename = new_file_name
cont_type_header = headers.getRawHeaders(b'content-type')[0].decode() cont_type = headers.getRawHeaders(b'content-type')[0].decode()
message = email.message.EmailMessage() params = cgi.parse_header(cont_type)[1]
message['content-type'] = cont_type_header encoding = params.get('charset', None)
cont_type = message.get_content_type()
params = message['content-type'].params
# Only re-ecode text content types.
encoding = None
if cont_type.startswith('text/'):
encoding = params.get('charset', None)
response.deliverBody( response.deliverBody(
BodyHandler(response.request, finished, body_length, self, encoding) BodyHandler(response.request, finished, body_length, self, encoding)
) )
@ -181,7 +176,8 @@ class HTTPDownloaderAgent:
headers = Headers() headers = Headers()
if not headers.hasHeader(b'User-Agent'): if not headers.hasHeader(b'User-Agent'):
user_agent = 'Deluge' version = get_version()
user_agent = 'Deluge/%s (https://deluge-torrent.org)' % version
headers.addRawHeader('User-Agent', user_agent) headers.addRawHeader('User-Agent', user_agent)
d = self.agent.request( d = self.agent.request(

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,10 @@
# -*- coding: utf-8 -*-
# #
# This file is public domain. # This file is public domain.
# #
from __future__ import unicode_literals
# Language code for this installation. All choices can be found here: # Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html # http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us' LANGUAGE_CODE = 'en-us'

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more