mirror of
https://git.deluge-torrent.org/deluge
synced 2025-08-03 15:08:40 +00:00
Compare commits
141 commits
deluge-2.1
...
develop
Author | SHA1 | Date | |
---|---|---|---|
|
6ec1479cdb |
||
|
66d275734b |
||
|
99f2f1209c |
||
|
3a806973ea |
||
|
22e9adbc31 |
||
|
c5ce83eb2b |
||
|
a49b436ff2 |
||
|
a83f56a8a5 |
||
|
757a782351 |
||
|
ba7c489118 |
||
|
0b5addf58e |
||
|
98d01fbe35 |
||
|
ee33c0c5bb |
||
|
0e197ee07e |
||
|
e83f6b84fb |
||
|
0878616b2e |
||
|
7c5b7b44a3 |
||
|
7071da85c3 |
||
|
cb182daaaf |
||
|
8df36c454b |
||
|
40d4f7efef |
||
|
d064ad06c5 |
||
|
0d72195281 |
||
|
2247668571 |
||
|
e7d08d7645 |
||
|
90c5e75373 |
||
|
c88f750108 |
||
|
491458c4ad |
||
|
5d96cfc72f |
||
|
3bceb4bfc1 |
||
|
9d802b2a91 |
||
|
8867da94f8 |
||
|
e1fa8d18ec |
||
|
d5af32802f |
||
|
d1d72b1be8 |
||
|
776efe4faa |
||
|
f101f0afdd |
||
|
d98d15422a |
||
|
d9e3facbe8 |
||
|
6ba23a8013 |
||
|
af70ff1fdc |
||
|
18fa028d2d |
||
|
322faa7a54 |
||
|
785ad00d2b |
||
|
1e5f248fb8 |
||
|
80985c02da |
||
|
7660e2e5ca |
||
|
7f3f7f69ee |
||
|
5dd7aa5321 |
||
|
ee97864086 |
||
|
848d668af9 |
||
|
d9ef65d745 |
||
|
7f70d6c6ff |
||
|
b7450b5082 |
||
|
7046824115 |
||
|
fa8d19335e |
||
|
0c1a02dcb5 |
||
|
810751d72a |
||
|
7199805c89 |
||
|
29cf72577f |
||
|
42accef295 |
||
|
54d6f50231 |
||
|
b5f8c5af2d |
||
|
c7dc60571e |
||
|
1989d0de73 |
||
|
1751d62df9 |
||
|
4088e13905 |
||
|
b63699c6de |
||
|
8dba0efa85 |
||
|
b2005ecd78 |
||
|
39b99182ba |
||
|
66eaea0059 |
||
|
5aa4d07816 |
||
|
f3d7b1ffe8 |
||
|
d8f9fe4acf |
||
|
f43b605b80 |
||
|
1dbb18b80a |
||
|
21470799d0 |
||
|
e24081a17e |
||
|
6c9b058d81 |
||
|
18dca70084 |
||
|
ed1366d5ce |
||
|
7082d9cec4 |
||
|
015b0660be |
||
|
a459e78268 |
||
|
8001110625 |
||
|
d8b586e6ba |
||
|
905a7dc3bc |
||
|
89b79e4b7f |
||
|
e70e43e631 |
||
|
b24a5d2465 |
||
|
701f68d70b |
||
|
de570ae536 |
||
|
40a66278a3 |
||
|
366cded7be |
||
|
dbedf7f639 |
||
|
81116a63ca |
||
|
a83ac65ab6 |
||
|
d2a56ce15e |
||
|
71b634e968 |
||
|
39bd97f03e |
||
|
196086c1fb |
||
|
527cfa586c |
||
|
25a2b113e2 |
||
|
c38b4c72d0 |
||
|
0745c0eff8 |
||
|
e90f6c7eef |
||
|
7b1a0ef89c |
||
|
75b27485e1 |
||
|
a64cdfaf78 |
||
|
4b6ac1f4c4 |
||
|
683a4f906e |
||
|
e70a983a55 |
||
|
9ce8afe507 |
||
|
f67fb4d520 |
||
|
d00068423f |
||
|
7336877928 |
||
|
543fce4f29 |
||
|
38feea0fa4 |
||
|
7af584d649 |
||
|
1ba7beb7bc |
||
|
f4f4accd34 |
||
|
ae22a52f2f |
||
|
22f74b60ce |
||
|
253eb2240b |
||
|
6c924e6128 |
||
|
930cf87103 |
||
|
45c9f3b90a |
||
|
13f81efe98 |
||
|
98c5830013 |
||
|
8332d1aa39 |
||
|
6f7445be18 |
||
|
fb30478123 |
||
|
5d7b416373 |
||
|
4de754328f |
||
|
c4b9cc7292 |
||
|
fa750c9fd0 |
||
|
2a945de069 |
||
|
d0acd3e06e |
||
|
3565a9a817 |
||
|
b3d1fd79a8 |
584 changed files with 360855 additions and 335815 deletions
32
.github/workflows/cd.yml
vendored
32
.github/workflows/cd.yml
vendored
|
@ -19,37 +19,37 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
windows_package:
|
windows_package:
|
||||||
runs-on: windows-2019
|
runs-on: windows-2022
|
||||||
if: (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'package'))
|
if: (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'package'))
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
arch: [x64, x86]
|
arch: [x64, x86]
|
||||||
python: ["3.9"]
|
python: ["3.9"]
|
||||||
libtorrent: [2.0.6, 1.2.15]
|
libtorrent: [2.0.7, 1.2.19]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
# Checkout Deluge source to subdir to enable packaging any tag/commit
|
# Checkout Deluge source to subdir to enable packaging any tag/commit
|
||||||
- name: Checkout Deluge source
|
- name: Checkout Deluge source
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.inputs.ref }}
|
ref: ${{ github.event.inputs.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
path: deluge_src
|
path: deluge_src
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python}}
|
python-version: ${{ matrix.python}}
|
||||||
architecture: ${{ matrix.arch }}
|
architecture: ${{ matrix.arch }}
|
||||||
cache: pip
|
cache: pip
|
||||||
|
|
||||||
- name: Prepare pip
|
- name: Prepare pip
|
||||||
run: python -m pip install wheel
|
run: python -m pip install wheel setuptools==68.*
|
||||||
|
|
||||||
- name: Install GTK
|
- name: Install GTK
|
||||||
run: |
|
run: |
|
||||||
|
@ -62,11 +62,14 @@ jobs:
|
||||||
python -m pip install --no-index --find-links="C:\GTK\release\python" pycairo PyGObject
|
python -m pip install --no-index --find-links="C:\GTK\release\python" pycairo PyGObject
|
||||||
|
|
||||||
- name: Install Python dependencies
|
- name: Install Python dependencies
|
||||||
|
# Pillow no longer provides 32-bit wheels for Windows
|
||||||
|
# so specify only-binary to install old version.
|
||||||
run: >
|
run: >
|
||||||
python -m pip install
|
python -m pip install
|
||||||
twisted[tls]==22.4.0
|
--only-binary=pillow
|
||||||
|
twisted[tls]==22.8.0
|
||||||
libtorrent==${{ matrix.libtorrent }}
|
libtorrent==${{ matrix.libtorrent }}
|
||||||
pyinstaller==4.10
|
pyinstaller
|
||||||
pygame
|
pygame
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
|
@ -81,12 +84,13 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
pyinstaller --clean delugewin.spec --distpath freeze
|
pyinstaller --clean delugewin.spec --distpath freeze
|
||||||
|
|
||||||
- name: Fix OpenSSL for libtorrent x64
|
- name: Verify Deluge exes
|
||||||
if: ${{ matrix.arch == 'x64' }}
|
working-directory: packaging/win/freeze/Deluge/
|
||||||
working-directory: packaging/win/freeze/Deluge
|
|
||||||
run: |
|
run: |
|
||||||
cp libssl-1_1.dll libssl-1_1-x64.dll
|
deluge-debug.exe -v
|
||||||
cp libcrypto-1_1.dll libcrypto-1_1-x64.dll
|
deluged-debug.exe -v
|
||||||
|
deluge-web-debug.exe -v
|
||||||
|
deluge-console -v
|
||||||
|
|
||||||
- name: Make Deluge Installer
|
- name: Make Deluge Installer
|
||||||
working-directory: ./packaging/win
|
working-directory: ./packaging/win
|
||||||
|
@ -94,7 +98,7 @@ jobs:
|
||||||
python setup_nsis.py
|
python setup_nsis.py
|
||||||
makensis /Darch=${{ matrix.arch }} deluge-win-installer.nsi
|
makensis /Darch=${{ matrix.arch }} deluge-win-installer.nsi
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: deluge-py${{ matrix.python }}-lt${{ matrix.libtorrent }}-${{ matrix.arch }}
|
name: deluge-py${{ matrix.python }}-lt${{ matrix.libtorrent }}-${{ matrix.arch }}
|
||||||
path: packaging/win/*.exe
|
path: packaging/win/*.exe
|
||||||
|
|
36
.github/workflows/ci.yml
vendored
36
.github/workflows/ci.yml
vendored
|
@ -6,22 +6,25 @@ on:
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
core-dump:
|
||||||
|
description: "Set to 1 to enable retrieving core dump from crashes"
|
||||||
|
default: "0"
|
||||||
jobs:
|
jobs:
|
||||||
test-linux:
|
test-linux:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.7", "3.10"]
|
python-version: ["3.7", "3.10"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
cache: "pip"
|
cache: "pip"
|
||||||
|
@ -33,8 +36,8 @@ jobs:
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip wheel
|
pip install --upgrade pip wheel setuptools
|
||||||
pip install -r requirements.txt -r requirements-tests.txt
|
pip install -r requirements-ci.txt
|
||||||
pip install -e .
|
pip install -e .
|
||||||
|
|
||||||
- name: Install security dependencies
|
- name: Install security dependencies
|
||||||
|
@ -46,18 +49,21 @@ jobs:
|
||||||
TESTSSL_VER: 3.0.6
|
TESTSSL_VER: 3.0.6
|
||||||
TESTSSL_URL: https://codeload.github.com/drwetter/testssl.sh/tar.gz/refs/tags/v
|
TESTSSL_URL: https://codeload.github.com/drwetter/testssl.sh/tar.gz/refs/tags/v
|
||||||
|
|
||||||
- name: Setup core dump directory
|
- name: Setup core dump catch and store
|
||||||
|
if: github.event.inputs.core-dump == '1'
|
||||||
run: |
|
run: |
|
||||||
sudo mkdir /cores/ && sudo chmod 777 /cores/
|
sudo mkdir /cores/ && sudo chmod 777 /cores/
|
||||||
echo "/cores/%E.%p" | sudo tee /proc/sys/kernel/core_pattern
|
echo "/cores/%E.%p" | sudo tee /proc/sys/kernel/core_pattern
|
||||||
|
ulimit -c unlimited
|
||||||
|
sudo apt install glibc-tools
|
||||||
|
echo "DEBUG_PREFIX=catchsegv python -X dev -m" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Test with pytest
|
- name: Test with pytest
|
||||||
run: |
|
run: |
|
||||||
ulimit -c unlimited # Enable core dumps to be captured
|
|
||||||
python -c 'from deluge._libtorrent import lt; print(lt.__version__)';
|
python -c 'from deluge._libtorrent import lt; print(lt.__version__)';
|
||||||
catchsegv python -X dev -m pytest -v -m "not (todo or gtkui)" deluge
|
$DEBUG_PREFIX pytest -v -m "not (todo or gtkui)" deluge
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v4
|
||||||
# capture all crashes as build artifacts
|
# capture all crashes as build artifacts
|
||||||
if: failure()
|
if: failure()
|
||||||
with:
|
with:
|
||||||
|
@ -65,19 +71,19 @@ jobs:
|
||||||
path: /cores
|
path: /cores
|
||||||
|
|
||||||
test-windows:
|
test-windows:
|
||||||
runs-on: windows-2019
|
runs-on: windows-2022
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.7", "3.10"]
|
python-version: ["3.7", "3.10"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
cache: "pip"
|
cache: "pip"
|
||||||
|
@ -85,8 +91,8 @@ jobs:
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip wheel
|
pip install --upgrade pip wheel setuptools
|
||||||
pip install -r requirements.txt -r requirements-tests.txt
|
pip install -r requirements-ci.txt
|
||||||
pip install -e .
|
pip install -e .
|
||||||
|
|
||||||
- name: Test with pytest
|
- name: Test with pytest
|
||||||
|
|
23
.github/workflows/docs.yml
vendored
23
.github/workflows/docs.yml
vendored
|
@ -15,30 +15,23 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.8"
|
python-version: "3.10"
|
||||||
- name: Cache pip
|
cache: "pip"
|
||||||
uses: actions/cache@v2
|
cache-dependency-path: "requirements*.txt"
|
||||||
with:
|
|
||||||
# This path is specific to Ubuntu
|
|
||||||
path: ~/.cache/pip
|
|
||||||
# Look to see if there is a cache hit for the corresponding requirements file
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements*.txt') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
${{ runner.os }}-
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip wheel
|
pip install --upgrade pip wheel
|
||||||
pip install tox
|
pip install tox
|
||||||
sudo apt-get install enchant
|
sudo apt-get install enchant-2
|
||||||
|
|
||||||
- name: Test with tox
|
- name: Build docs with tox
|
||||||
env:
|
env:
|
||||||
TOX_ENV: docs
|
TOX_ENV: docs
|
||||||
run: |
|
run: |
|
||||||
|
|
6
.github/workflows/lint.yml
vendored
6
.github/workflows/lint.yml
vendored
|
@ -11,7 +11,7 @@ jobs:
|
||||||
lint:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v5
|
||||||
- name: Run pre-commit linting
|
- name: Run pre-commit linting
|
||||||
uses: pre-commit/action@v2.0.2
|
uses: pre-commit/action@v3.0.1
|
||||||
|
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -12,14 +12,14 @@ __pycache__/
|
||||||
*.tar.*
|
*.tar.*
|
||||||
.tox/
|
.tox/
|
||||||
deluge/i18n/*/
|
deluge/i18n/*/
|
||||||
deluge.pot
|
|
||||||
deluge/ui/web/js/*.js
|
deluge/ui/web/js/*.js
|
||||||
deluge/ui/web/js/extjs/ext-extensions*.js
|
deluge/ui/web/js/extjs/ext-extensions*.js
|
||||||
*.desktop
|
*.desktop
|
||||||
*.appdata.xml
|
*.metainfo.xml
|
||||||
.build_data*
|
.build_data*
|
||||||
osx/app
|
osx/app
|
||||||
RELEASE-VERSION
|
RELEASE-VERSION
|
||||||
.venv*
|
.venv*
|
||||||
# used by setuptools to cache downloaded eggs
|
# used by setuptools to cache downloaded eggs
|
||||||
/.eggs
|
/.eggs
|
||||||
|
_pytest_temp/
|
||||||
|
|
|
@ -6,35 +6,25 @@ exclude: >
|
||||||
deluge/tests/data/.*svg|
|
deluge/tests/data/.*svg|
|
||||||
)$
|
)$
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 22.3.0
|
# Ruff version.
|
||||||
|
rev: v0.6.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: ruff
|
||||||
name: Fmt Black
|
name: Chk Ruff
|
||||||
|
args: [--fix]
|
||||||
|
- id: ruff-format
|
||||||
|
name: Fmt Ruff
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: v2.5.1
|
rev: v2.7.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
name: Fmt Prettier
|
name: Fmt Prettier
|
||||||
# Workaround to list modified files only.
|
# Workaround to list modified files only.
|
||||||
args: [--list-different]
|
args: [--list-different]
|
||||||
- repo: https://github.com/pycqa/isort
|
|
||||||
rev: 5.10.1
|
|
||||||
hooks:
|
|
||||||
- id: isort
|
|
||||||
name: Fmt isort
|
|
||||||
- repo: https://github.com/pycqa/flake8
|
|
||||||
rev: 4.0.1
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
name: Chk Flake8
|
|
||||||
additional_dependencies:
|
|
||||||
- pep8-naming==0.12.1
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.1.0
|
rev: v4.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: double-quote-string-fixer
|
|
||||||
name: Fix Double-quotes
|
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
name: Fix End-of-files
|
name: Fix End-of-files
|
||||||
exclude_types: [javascript, css]
|
exclude_types: [javascript, css]
|
||||||
|
@ -44,8 +34,8 @@ repos:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
name: Fix Trailing whitespace
|
name: Fix Trailing whitespace
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v2.31.0
|
rev: v3.3.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
args: [--py36-plus]
|
args: [--py37-plus]
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
|
|
|
@ -5,6 +5,14 @@
|
||||||
# Required
|
# Required
|
||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
|
build:
|
||||||
|
os: ubuntu-22.04
|
||||||
|
tools:
|
||||||
|
python: "3.10"
|
||||||
|
jobs:
|
||||||
|
post_checkout:
|
||||||
|
- git fetch --unshallow || true
|
||||||
|
|
||||||
# Build documentation in the docs/ directory with Sphinx
|
# Build documentation in the docs/ directory with Sphinx
|
||||||
sphinx:
|
sphinx:
|
||||||
configuration: docs/source/conf.py
|
configuration: docs/source/conf.py
|
||||||
|
@ -14,9 +22,8 @@ formats: all
|
||||||
|
|
||||||
# Optionally set the version of Python and requirements required to build your docs
|
# Optionally set the version of Python and requirements required to build your docs
|
||||||
python:
|
python:
|
||||||
version: 3.7
|
|
||||||
install:
|
install:
|
||||||
- requirements: requirements.txt
|
- requirements: requirements.txt
|
||||||
- requirements: docs/requirements.txt
|
- requirements: docs/requirements.txt
|
||||||
- method: setuptools
|
- method: pip
|
||||||
path: .
|
path: .
|
||||||
|
|
53
CHANGELOG.md
53
CHANGELOG.md
|
@ -1,5 +1,58 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2.1.x (TBA)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
- Removed Python 3.6 support (Python >= 3.7)
|
||||||
|
|
||||||
|
### Core
|
||||||
|
|
||||||
|
- Fix GHSL-2024-189 - insecure HTTP for new version check.
|
||||||
|
- Fix alert handler segfault.
|
||||||
|
- Add support for creating v2 torrents.
|
||||||
|
|
||||||
|
### GTK UI
|
||||||
|
|
||||||
|
- Fix changing torrent ownership.
|
||||||
|
- Fix upper limit of upload/download in Add Torrent dialog.
|
||||||
|
- Fix #3339 - Resizing window crashes with Piecesbar or Stats plugin.
|
||||||
|
- Fix #3350 - Unable to use quick search.
|
||||||
|
- Fix #3598 - Missing AppIndicator option in Preferences.
|
||||||
|
- Set Appindicator as default for tray icon on Linux.
|
||||||
|
- Add feature to switch between dark/light themes.
|
||||||
|
|
||||||
|
### Web UI
|
||||||
|
|
||||||
|
- Fix GHSL-2024-191 - potential flag endpoint path traversal.
|
||||||
|
- Fix GHSL-2024-188 - js script dir traversal vulnerability.
|
||||||
|
- Fix GHSL-2024-190 - insecure tracker icon endpoint.
|
||||||
|
- Fix unable to stop daemon in connection manager.
|
||||||
|
- Fix responsiveness to avoid "Connection lost".
|
||||||
|
- Add support for network interface name as well as IP address.
|
||||||
|
- Add ability to change UI theme.
|
||||||
|
|
||||||
|
### Console UI
|
||||||
|
|
||||||
|
- Fix 'rm' and 'move' commands hanging when done.
|
||||||
|
- Fix #3538 - Unable to add host in connection manager.
|
||||||
|
- Disable interactive-mode on Windows.
|
||||||
|
|
||||||
|
### UI library
|
||||||
|
|
||||||
|
- Fix tracker icon display by converting to png format.
|
||||||
|
- Fix splitting trackers by newline
|
||||||
|
- Add clickable URLs for torrent comment and tracker status.
|
||||||
|
|
||||||
|
### Label
|
||||||
|
|
||||||
|
- Fix torrent deletion not removed from config.
|
||||||
|
- Fix label display name in submenu.
|
||||||
|
|
||||||
|
### AutoAdd
|
||||||
|
|
||||||
|
- Fix #3515 - Torrent file decoding errors disabled watch folder.
|
||||||
|
|
||||||
## 2.1.1 (2022-07-10)
|
## 2.1.1 (2022-07-10)
|
||||||
|
|
||||||
### Core
|
### Core
|
||||||
|
|
|
@ -50,7 +50,7 @@ All modules will require the [common](#common) section dependencies.
|
||||||
- [PyGObject]
|
- [PyGObject]
|
||||||
- [Pycairo]
|
- [Pycairo]
|
||||||
- [librsvg] _>= 2_
|
- [librsvg] _>= 2_
|
||||||
- [libappindicator3] w/GIR - Optional: Ubuntu system tray icon.
|
- [ayatanaappindicator3] w/GIR - Optional: Ubuntu system tray icon.
|
||||||
|
|
||||||
### MacOS
|
### MacOS
|
||||||
|
|
||||||
|
@ -95,6 +95,6 @@ All modules will require the [common](#common) section dependencies.
|
||||||
[mako]: https://www.makotemplates.org/
|
[mako]: https://www.makotemplates.org/
|
||||||
[pygame]: https://www.pygame.org/
|
[pygame]: https://www.pygame.org/
|
||||||
[libnotify]: https://developer.gnome.org/libnotify/
|
[libnotify]: https://developer.gnome.org/libnotify/
|
||||||
[python-appindicator]: https://packages.ubuntu.com/xenial/python-appindicator
|
[ayatanaappindicator3]: https://lazka.github.io/pgi-docs/AyatanaAppIndicator3-0.1/index.html
|
||||||
[librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg
|
[librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg
|
||||||
[ifaddr]: https://pypi.org/project/ifaddr/
|
[ifaddr]: https://pypi.org/project/ifaddr/
|
||||||
|
|
6
__builtins__.pyi
Normal file
6
__builtins__.pyi
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
from twisted.web.http import Request
|
||||||
|
|
||||||
|
__request__: Request
|
||||||
|
|
||||||
|
def _(string: str) -> str: ...
|
||||||
|
def _n(string: str) -> str: ...
|
|
@ -14,6 +14,7 @@ Example:
|
||||||
>>> from deluge._libtorrent import lt
|
>>> from deluge._libtorrent import lt
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from deluge.common import VersionSplit, get_version
|
from deluge.common import VersionSplit, get_version
|
||||||
from deluge.error import LibtorrentImportError
|
from deluge.error import LibtorrentImportError
|
||||||
|
|
||||||
|
|
|
@ -85,7 +85,6 @@ def bdecode(x):
|
||||||
|
|
||||||
|
|
||||||
class Bencached:
|
class Bencached:
|
||||||
|
|
||||||
__slots__ = ['bencoded']
|
__slots__ = ['bencoded']
|
||||||
|
|
||||||
def __init__(self, s):
|
def __init__(self, s):
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
"""Common functions for various parts of Deluge to use."""
|
"""Common functions for various parts of Deluge to use."""
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import binascii
|
import binascii
|
||||||
import functools
|
import functools
|
||||||
|
@ -23,15 +24,21 @@ import tarfile
|
||||||
import time
|
import time
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from importlib import resources
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
from pathlib import Path
|
||||||
from urllib.parse import unquote_plus, urljoin
|
from urllib.parse import unquote_plus, urljoin
|
||||||
from urllib.request import pathname2url
|
from urllib.request import pathname2url
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
from deluge.decorators import deprecated
|
from deluge.decorators import deprecated
|
||||||
from deluge.error import InvalidPathError
|
from deluge.error import InvalidPathError
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib.metadata import distribution
|
||||||
|
except ImportError:
|
||||||
|
from pkg_resources import get_distribution as distribution
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import chardet
|
import chardet
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -90,7 +97,7 @@ def get_version():
|
||||||
Returns:
|
Returns:
|
||||||
str: The version of Deluge.
|
str: The version of Deluge.
|
||||||
"""
|
"""
|
||||||
return pkg_resources.get_distribution('Deluge').version
|
return distribution('Deluge').version
|
||||||
|
|
||||||
|
|
||||||
def get_default_config_dir(filename=None):
|
def get_default_config_dir(filename=None):
|
||||||
|
@ -290,20 +297,22 @@ def get_pixmap(fname):
|
||||||
return resource_filename('deluge', os.path.join('ui', 'data', 'pixmaps', fname))
|
return resource_filename('deluge', os.path.join('ui', 'data', 'pixmaps', fname))
|
||||||
|
|
||||||
|
|
||||||
def resource_filename(module, path):
|
def resource_filename(module: str, path: str) -> str:
|
||||||
"""Get filesystem path for a resource.
|
"""Get filesystem path for a non-python resource.
|
||||||
|
|
||||||
This function contains a work-around for pkg_resources.resource_filename
|
Abstracts getting module resource files. Originally created to
|
||||||
not returning the correct path with multiple packages installed.
|
workaround pkg_resources.resource_filename limitations with
|
||||||
|
multiple Deluge packages installed.
|
||||||
So if there's a second deluge package, installed globally and another in
|
|
||||||
develop mode somewhere else, while pkg_resources.get_distribution('Deluge')
|
|
||||||
returns the proper deluge instance, pkg_resources.resource_filename
|
|
||||||
does not, it returns the first found on the python path, which is wrong.
|
|
||||||
"""
|
"""
|
||||||
return pkg_resources.get_distribution('Deluge').get_resource_filename(
|
path = Path(path)
|
||||||
pkg_resources._manager, os.path.join(*(module.split('.') + [path]))
|
|
||||||
)
|
try:
|
||||||
|
with resources.as_file(resources.files(module) / path) as resource_file:
|
||||||
|
return str(resource_file)
|
||||||
|
except AttributeError:
|
||||||
|
# Python <= 3.8
|
||||||
|
with resources.path(module, path.parts[0]) as resource_file:
|
||||||
|
return str(resource_file.joinpath(*path.parts[1:]))
|
||||||
|
|
||||||
|
|
||||||
def open_file(path, timestamp=None):
|
def open_file(path, timestamp=None):
|
||||||
|
@ -415,25 +424,31 @@ def translate_size_units():
|
||||||
|
|
||||||
|
|
||||||
def fsize(fsize_b, precision=1, shortform=False):
|
def fsize(fsize_b, precision=1, shortform=False):
|
||||||
"""Formats the bytes value into a string with KiB, MiB or GiB units.
|
"""Formats the bytes value into a string with KiB, MiB, GiB or TiB units.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
fsize_b (int): The filesize in bytes.
|
fsize_b (int): The filesize in bytes.
|
||||||
precision (int): The filesize float precision.
|
precision (int): The output float precision, 1 by default.
|
||||||
|
shortform (bool): The output short|long form, False (long form) by default.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: A formatted string in KiB, MiB or GiB units.
|
str: A formatted string in KiB, MiB, GiB or TiB units.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
>>> fsize(112245)
|
>>> fsize(112245)
|
||||||
'109.6 KiB'
|
'109.6 KiB'
|
||||||
>>> fsize(112245, precision=0)
|
>>> fsize(112245, precision=0)
|
||||||
'110 KiB'
|
'110 KiB'
|
||||||
|
>>> fsize(112245, shortform=True)
|
||||||
|
'109.6 K'
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
This function has been refactored for performance with the
|
This function has been refactored for performance with the
|
||||||
fsize units being translated outside the function.
|
fsize units being translated outside the function.
|
||||||
|
|
||||||
|
Notice that short forms K|M|G|T are synonymous here with
|
||||||
|
KiB|MiB|GiB|TiB. They are powers of 1024, not 1000.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if fsize_b >= 1024**4:
|
if fsize_b >= 1024**4:
|
||||||
|
@ -469,7 +484,7 @@ def fpcnt(dec, precision=2):
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
dec (float): The ratio in the range [0.0, 1.0].
|
dec (float): The ratio in the range [0.0, 1.0].
|
||||||
precision (int): The percentage float precision.
|
precision (int): The output float precision, 2 by default.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: A formatted string representing a percentage.
|
str: A formatted string representing a percentage.
|
||||||
|
@ -493,6 +508,8 @@ def fspeed(bps, precision=1, shortform=False):
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
bps (int): The speed in bytes per second.
|
bps (int): The speed in bytes per second.
|
||||||
|
precision (int): The output float precision, 1 by default.
|
||||||
|
shortform (bool): The output short|long form, False (long form) by default.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: A formatted string representing transfer speed.
|
str: A formatted string representing transfer speed.
|
||||||
|
@ -501,6 +518,10 @@ def fspeed(bps, precision=1, shortform=False):
|
||||||
>>> fspeed(43134)
|
>>> fspeed(43134)
|
||||||
'42.1 KiB/s'
|
'42.1 KiB/s'
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Notice that short forms K|M|G|T are synonymous here with
|
||||||
|
KiB|MiB|GiB|TiB. They are powers of 1024, not 1000.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if bps < 1024**2:
|
if bps < 1024**2:
|
||||||
|
@ -537,7 +558,7 @@ def fpeer(num_peers, total_peers):
|
||||||
total_peers (int): The total number of peers.
|
total_peers (int): The total number of peers.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: A formatted string 'num_peers (total_peers)' or total_peers < 0, just 'num_peers'.
|
str: A formatted string 'num_peers (total_peers)' or if total_peers < 0, just 'num_peers'.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
>>> fpeer(10, 20)
|
>>> fpeer(10, 20)
|
||||||
|
@ -586,16 +607,16 @@ def ftime(secs):
|
||||||
time_str = f'{secs // 604800}w {secs // 86400 % 7}d'
|
time_str = f'{secs // 604800}w {secs // 86400 % 7}d'
|
||||||
else:
|
else:
|
||||||
time_str = f'{secs // 31449600}y {secs // 604800 % 52}w'
|
time_str = f'{secs // 31449600}y {secs // 604800 % 52}w'
|
||||||
|
|
||||||
return time_str
|
return time_str
|
||||||
|
|
||||||
|
|
||||||
def fdate(seconds, date_only=False, precision_secs=False):
|
def fdate(seconds, date_only=False, precision_secs=False):
|
||||||
"""Formats a date time string in the locale's date representation based on the systems timezone.
|
"""Formats a date time string in the locale's date representation based on the system's timezone.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
seconds (float): Time in seconds since the Epoch.
|
seconds (float): Time in seconds since the Epoch.
|
||||||
precision_secs (bool): Include seconds in time format.
|
date_only (bool): Whether to include only the date, False by default.
|
||||||
|
precision_secs (bool): Include seconds in time format, False by default.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: A string in the locale's datetime representation or "" if seconds < 0
|
str: A string in the locale's datetime representation or "" if seconds < 0
|
||||||
|
@ -620,10 +641,14 @@ def tokenize(text):
|
||||||
Returns:
|
Returns:
|
||||||
list: A list of strings and/or numbers.
|
list: A list of strings and/or numbers.
|
||||||
|
|
||||||
This function is used to implement robust tokenization of user input
|
Note:
|
||||||
It automatically coerces integer and floating point numbers, ignores
|
This function is used to implement robust tokenization of user input
|
||||||
whitespace and knows how to separate numbers from strings even without
|
It automatically coerces integer and floating point numbers, ignores
|
||||||
whitespace.
|
whitespace and knows how to separate numbers from strings even without
|
||||||
|
whitespace.
|
||||||
|
|
||||||
|
Possible optimization: move the 2 regexes outside of function.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
tokenized_input = []
|
tokenized_input = []
|
||||||
for token in re.split(r'(\d+(?:\.\d+)?)', text):
|
for token in re.split(r'(\d+(?:\.\d+)?)', text):
|
||||||
|
@ -644,12 +669,16 @@ size_units = [
|
||||||
{'prefix': 'GiB', 'divider': 1024**3},
|
{'prefix': 'GiB', 'divider': 1024**3},
|
||||||
{'prefix': 'TiB', 'divider': 1024**4},
|
{'prefix': 'TiB', 'divider': 1024**4},
|
||||||
{'prefix': 'PiB', 'divider': 1024**5},
|
{'prefix': 'PiB', 'divider': 1024**5},
|
||||||
|
{'prefix': 'k', 'divider': 1000**1},
|
||||||
|
{'prefix': 'm', 'divider': 1000**2},
|
||||||
|
{'prefix': 'g', 'divider': 1000**3},
|
||||||
|
{'prefix': 't', 'divider': 1000**4},
|
||||||
|
{'prefix': 'p', 'divider': 1000**5},
|
||||||
{'prefix': 'KB', 'divider': 1000**1},
|
{'prefix': 'KB', 'divider': 1000**1},
|
||||||
{'prefix': 'MB', 'divider': 1000**2},
|
{'prefix': 'MB', 'divider': 1000**2},
|
||||||
{'prefix': 'GB', 'divider': 1000**3},
|
{'prefix': 'GB', 'divider': 1000**3},
|
||||||
{'prefix': 'TB', 'divider': 1000**4},
|
{'prefix': 'TB', 'divider': 1000**4},
|
||||||
{'prefix': 'PB', 'divider': 1000**5},
|
{'prefix': 'PB', 'divider': 1000**5},
|
||||||
{'prefix': 'm', 'divider': 1000**2},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -692,6 +721,16 @@ def parse_human_size(size):
|
||||||
raise InvalidSize(msg % (size, tokens))
|
raise InvalidSize(msg % (size, tokens))
|
||||||
|
|
||||||
|
|
||||||
|
def anchorify_urls(text: str) -> str:
|
||||||
|
"""
|
||||||
|
Wrap all occurrences of text URLs with HTML
|
||||||
|
"""
|
||||||
|
url_pattern = r'((htt)|(ft)|(ud))ps?://\S+'
|
||||||
|
html_href_pattern = r'<a href="\g<0>">\g<0></a>'
|
||||||
|
|
||||||
|
return re.sub(url_pattern, html_href_pattern, text)
|
||||||
|
|
||||||
|
|
||||||
def is_url(url):
|
def is_url(url):
|
||||||
"""
|
"""
|
||||||
A simple test to check if the URL is valid
|
A simple test to check if the URL is valid
|
||||||
|
@ -833,7 +872,7 @@ def create_magnet_uri(infohash, name=None, trackers=None):
|
||||||
Args:
|
Args:
|
||||||
infohash (str): The info-hash of the torrent.
|
infohash (str): The info-hash of the torrent.
|
||||||
name (str, optional): The name of the torrent.
|
name (str, optional): The name of the torrent.
|
||||||
trackers (list or dict, optional): A list of trackers or dict or {tracker: tier} pairs.
|
trackers (list or dict, optional): A list of trackers or a dict or some {tracker: tier} pairs.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: A magnet URI string.
|
str: A magnet URI string.
|
||||||
|
@ -875,7 +914,7 @@ def get_path_size(path):
|
||||||
return os.path.getsize(path)
|
return os.path.getsize(path)
|
||||||
|
|
||||||
dir_size = 0
|
dir_size = 0
|
||||||
for (p, dummy_dirs, files) in os.walk(path):
|
for p, dummy_dirs, files in os.walk(path):
|
||||||
for _file in files:
|
for _file in files:
|
||||||
filename = os.path.join(p, _file)
|
filename = os.path.join(p, _file)
|
||||||
dir_size += os.path.getsize(filename)
|
dir_size += os.path.getsize(filename)
|
||||||
|
|
|
@ -59,11 +59,16 @@ class Component:
|
||||||
Deluge core.
|
Deluge core.
|
||||||
|
|
||||||
**update()** - This method is called every 1 second by default while the
|
**update()** - This method is called every 1 second by default while the
|
||||||
Componented is in a *Started* state. The interval can be
|
Component is in a *Started* state. The interval can be
|
||||||
specified during instantiation. The update() timer can be
|
specified during instantiation. The update() timer can be
|
||||||
paused by instructing the :class:`ComponentRegistry` to pause
|
paused by instructing the :class:`ComponentRegistry` to pause
|
||||||
this Component.
|
this Component.
|
||||||
|
|
||||||
|
**pause()** - This method is called when the component is being paused.
|
||||||
|
|
||||||
|
**resume()** - This method is called when the component resumes from a Paused
|
||||||
|
state.
|
||||||
|
|
||||||
**shutdown()** - This method is called when the client is exiting. If the
|
**shutdown()** - This method is called when the client is exiting. If the
|
||||||
Component is in a "Started" state when this is called, a
|
Component is in a "Started" state when this is called, a
|
||||||
call to stop() will be issued prior to shutdown().
|
call to stop() will be issued prior to shutdown().
|
||||||
|
@ -80,10 +85,10 @@ class Component:
|
||||||
|
|
||||||
**Stopped** - The Component has either been stopped or has yet to be started.
|
**Stopped** - The Component has either been stopped or has yet to be started.
|
||||||
|
|
||||||
**Stopping** - The Component has had it's stop method called, but it hasn't
|
**Stopping** - The Component has had its stop method called, but it hasn't
|
||||||
fully stopped yet.
|
fully stopped yet.
|
||||||
|
|
||||||
**Paused** - The Component has had it's update timer stopped, but will
|
**Paused** - The Component has had its update timer stopped, but will
|
||||||
still be considered in a Started state.
|
still be considered in a Started state.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -111,9 +116,8 @@ class Component:
|
||||||
_ComponentRegistry.deregister(self)
|
_ComponentRegistry.deregister(self)
|
||||||
|
|
||||||
def _component_start_timer(self):
|
def _component_start_timer(self):
|
||||||
if hasattr(self, 'update'):
|
self._component_timer = LoopingCall(self.update)
|
||||||
self._component_timer = LoopingCall(self.update)
|
self._component_timer.start(self._component_interval)
|
||||||
self._component_timer.start(self._component_interval)
|
|
||||||
|
|
||||||
def _component_start(self):
|
def _component_start(self):
|
||||||
def on_start(result):
|
def on_start(result):
|
||||||
|
@ -129,13 +133,10 @@ class Component:
|
||||||
return fail(result)
|
return fail(result)
|
||||||
|
|
||||||
if self._component_state == 'Stopped':
|
if self._component_state == 'Stopped':
|
||||||
if hasattr(self, 'start'):
|
self._component_state = 'Starting'
|
||||||
self._component_state = 'Starting'
|
d = deferLater(reactor, 0, self.start)
|
||||||
d = deferLater(reactor, 0, self.start)
|
d.addCallbacks(on_start, on_start_fail)
|
||||||
d.addCallbacks(on_start, on_start_fail)
|
self._component_starting_deferred = d
|
||||||
self._component_starting_deferred = d
|
|
||||||
else:
|
|
||||||
d = maybeDeferred(on_start, None)
|
|
||||||
elif self._component_state == 'Starting':
|
elif self._component_state == 'Starting':
|
||||||
return self._component_starting_deferred
|
return self._component_starting_deferred
|
||||||
elif self._component_state == 'Started':
|
elif self._component_state == 'Started':
|
||||||
|
@ -165,14 +166,11 @@ class Component:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if self._component_state != 'Stopped' and self._component_state != 'Stopping':
|
if self._component_state != 'Stopped' and self._component_state != 'Stopping':
|
||||||
if hasattr(self, 'stop'):
|
self._component_state = 'Stopping'
|
||||||
self._component_state = 'Stopping'
|
d = maybeDeferred(self.stop)
|
||||||
d = maybeDeferred(self.stop)
|
d.addCallback(on_stop)
|
||||||
d.addCallback(on_stop)
|
d.addErrback(on_stop_fail)
|
||||||
d.addErrback(on_stop_fail)
|
self._component_stopping_deferred = d
|
||||||
self._component_stopping_deferred = d
|
|
||||||
else:
|
|
||||||
d = maybeDeferred(on_stop, None)
|
|
||||||
|
|
||||||
if self._component_state == 'Stopping':
|
if self._component_state == 'Stopping':
|
||||||
return self._component_stopping_deferred
|
return self._component_stopping_deferred
|
||||||
|
@ -182,13 +180,12 @@ class Component:
|
||||||
def _component_pause(self):
|
def _component_pause(self):
|
||||||
def on_pause(result):
|
def on_pause(result):
|
||||||
self._component_state = 'Paused'
|
self._component_state = 'Paused'
|
||||||
|
if self._component_timer and self._component_timer.running:
|
||||||
|
self._component_timer.stop()
|
||||||
|
|
||||||
if self._component_state == 'Started':
|
if self._component_state == 'Started':
|
||||||
if self._component_timer and self._component_timer.running:
|
d = maybeDeferred(self.pause)
|
||||||
d = maybeDeferred(self._component_timer.stop)
|
d.addCallback(on_pause)
|
||||||
d.addCallback(on_pause)
|
|
||||||
else:
|
|
||||||
d = succeed(None)
|
|
||||||
elif self._component_state == 'Paused':
|
elif self._component_state == 'Paused':
|
||||||
d = succeed(None)
|
d = succeed(None)
|
||||||
else:
|
else:
|
||||||
|
@ -205,9 +202,10 @@ class Component:
|
||||||
def _component_resume(self):
|
def _component_resume(self):
|
||||||
def on_resume(result):
|
def on_resume(result):
|
||||||
self._component_state = 'Started'
|
self._component_state = 'Started'
|
||||||
|
self._component_start_timer()
|
||||||
|
|
||||||
if self._component_state == 'Paused':
|
if self._component_state == 'Paused':
|
||||||
d = maybeDeferred(self._component_start_timer)
|
d = maybeDeferred(self.resume)
|
||||||
d.addCallback(on_resume)
|
d.addCallback(on_resume)
|
||||||
else:
|
else:
|
||||||
d = fail(
|
d = fail(
|
||||||
|
@ -222,9 +220,7 @@ class Component:
|
||||||
|
|
||||||
def _component_shutdown(self):
|
def _component_shutdown(self):
|
||||||
def on_stop(result):
|
def on_stop(result):
|
||||||
if hasattr(self, 'shutdown'):
|
return maybeDeferred(self.shutdown)
|
||||||
return maybeDeferred(self.shutdown)
|
|
||||||
return succeed(None)
|
|
||||||
|
|
||||||
d = self._component_stop()
|
d = self._component_stop()
|
||||||
d.addCallback(on_stop)
|
d.addCallback(on_stop)
|
||||||
|
@ -245,6 +241,12 @@ class Component:
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def pause(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def resume(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ComponentRegistry:
|
class ComponentRegistry:
|
||||||
"""The ComponentRegistry holds a list of currently registered :class:`Component` objects.
|
"""The ComponentRegistry holds a list of currently registered :class:`Component` objects.
|
||||||
|
|
|
@ -38,6 +38,7 @@ this can only be done for the 'config file version' and not for the 'format'
|
||||||
version as this will be done internally.
|
version as this will be done internally.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# the additional special exception to link portions of this program with the OpenSSL library.
|
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||||
# See LICENSE for more details.
|
# See LICENSE for more details.
|
||||||
#
|
#
|
||||||
|
import asyncio
|
||||||
import tempfile
|
import tempfile
|
||||||
import warnings
|
import warnings
|
||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
@ -12,7 +12,7 @@ import pytest
|
||||||
import pytest_twisted
|
import pytest_twisted
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from twisted.internet.defer import Deferred, maybeDeferred
|
from twisted.internet.defer import Deferred, maybeDeferred
|
||||||
from twisted.internet.error import CannotListenError
|
from twisted.internet.error import CannotListenError, ProcessTerminated
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
|
|
||||||
import deluge.component as _component
|
import deluge.component as _component
|
||||||
|
@ -42,15 +42,18 @@ def mock_callback():
|
||||||
The returned Mock instance will have a `deferred` attribute which will complete when the callback has been called.
|
The returned Mock instance will have a `deferred` attribute which will complete when the callback has been called.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def reset():
|
def reset(timeout=0.5, *args, **kwargs):
|
||||||
if mock.called:
|
if mock.called:
|
||||||
original_reset_mock()
|
original_reset_mock(*args, **kwargs)
|
||||||
deferred = Deferred()
|
if mock.deferred:
|
||||||
deferred.addTimeout(0.5, reactor)
|
mock.deferred.cancel()
|
||||||
|
deferred = Deferred(canceller=lambda x: deferred.callback(None))
|
||||||
|
deferred.addTimeout(timeout, reactor)
|
||||||
mock.side_effect = lambda *args, **kw: deferred.callback((args, kw))
|
mock.side_effect = lambda *args, **kw: deferred.callback((args, kw))
|
||||||
mock.deferred = deferred
|
mock.deferred = deferred
|
||||||
|
|
||||||
mock = Mock()
|
mock = Mock()
|
||||||
|
mock.__qualname__ = 'mock'
|
||||||
original_reset_mock = mock.reset_mock
|
original_reset_mock = mock.reset_mock
|
||||||
mock.reset_mock = reset
|
mock.reset_mock = reset
|
||||||
mock.reset_mock()
|
mock.reset_mock()
|
||||||
|
@ -59,8 +62,9 @@ def mock_callback():
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def config_dir(tmp_path):
|
def config_dir(tmp_path):
|
||||||
deluge.configmanager.set_config_dir(tmp_path)
|
config_dir = tmp_path / 'config'
|
||||||
yield tmp_path
|
deluge.configmanager.set_config_dir(config_dir)
|
||||||
|
yield config_dir
|
||||||
|
|
||||||
|
|
||||||
@pytest_twisted.async_yield_fixture()
|
@pytest_twisted.async_yield_fixture()
|
||||||
|
@ -84,9 +88,10 @@ async def client(request, config_dir, monkeypatch, listen_port):
|
||||||
|
|
||||||
|
|
||||||
@pytest_twisted.async_yield_fixture
|
@pytest_twisted.async_yield_fixture
|
||||||
async def daemon(request, config_dir):
|
async def daemon(request, config_dir, tmp_path):
|
||||||
listen_port = DEFAULT_LISTEN_PORT
|
listen_port = DEFAULT_LISTEN_PORT
|
||||||
logfile = f'daemon_{request.node.name}.log'
|
logfile = tmp_path / 'daemon.log'
|
||||||
|
|
||||||
if hasattr(request.cls, 'daemon_custom_script'):
|
if hasattr(request.cls, 'daemon_custom_script'):
|
||||||
custom_script = request.cls.daemon_custom_script
|
custom_script = request.cls.daemon_custom_script
|
||||||
else:
|
else:
|
||||||
|
@ -116,7 +121,10 @@ async def daemon(request, config_dir):
|
||||||
raise exception_error
|
raise exception_error
|
||||||
daemon.listen_port = listen_port
|
daemon.listen_port = listen_port
|
||||||
yield daemon
|
yield daemon
|
||||||
await daemon.kill()
|
try:
|
||||||
|
await daemon.kill()
|
||||||
|
except ProcessTerminated:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
|
@ -137,7 +145,7 @@ def common_fixture(config_dir, request, monkeypatch, listen_port):
|
||||||
|
|
||||||
|
|
||||||
@pytest_twisted.async_yield_fixture(scope='function')
|
@pytest_twisted.async_yield_fixture(scope='function')
|
||||||
async def component(request):
|
async def component():
|
||||||
"""Verify component registry is clean, and clean up after test."""
|
"""Verify component registry is clean, and clean up after test."""
|
||||||
if len(_component._ComponentRegistry.components) != 0:
|
if len(_component._ComponentRegistry.components) != 0:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
|
@ -190,3 +198,18 @@ def mock_mkstemp(tmp_path):
|
||||||
tmp_file = tempfile.mkstemp(dir=tmp_path)
|
tmp_file = tempfile.mkstemp(dir=tmp_path)
|
||||||
with patch('tempfile.mkstemp', return_value=tmp_file):
|
with patch('tempfile.mkstemp', return_value=tmp_file):
|
||||||
yield tmp_file
|
yield tmp_file
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_collection_modifyitems(session, config, items) -> None:
|
||||||
|
"""
|
||||||
|
Automatically runs async tests with pytest_twisted.ensureDeferred
|
||||||
|
"""
|
||||||
|
function_items = (item for item in items if isinstance(item, pytest.Function))
|
||||||
|
for function_item in function_items:
|
||||||
|
function = function_item.obj
|
||||||
|
if hasattr(function, '__func__'):
|
||||||
|
# methods need to be unwrapped.
|
||||||
|
function = function.__func__
|
||||||
|
if asyncio.iscoroutinefunction(function):
|
||||||
|
# This is how pytest_twisted marks ensureDeferred tests
|
||||||
|
setattr(function, '_pytest_twisted_mark', 'async_test')
|
||||||
|
|
|
@ -14,10 +14,16 @@ This should typically only be used by the Core. Plugins should utilize the
|
||||||
`:mod:EventManager` for similar functionality.
|
`:mod:EventManager` for similar functionality.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import logging
|
|
||||||
from types import SimpleNamespace
|
|
||||||
|
|
||||||
from twisted.internet import reactor
|
import contextlib
|
||||||
|
import logging
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from collections import defaultdict
|
||||||
|
from functools import partial
|
||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
|
from twisted.internet import reactor, task, threads
|
||||||
|
|
||||||
import deluge.component as component
|
import deluge.component as component
|
||||||
from deluge._libtorrent import lt
|
from deluge._libtorrent import lt
|
||||||
|
@ -31,7 +37,7 @@ class AlertManager(component.Component):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
log.debug('AlertManager init...')
|
log.debug('AlertManager init...')
|
||||||
component.Component.__init__(self, 'AlertManager', interval=0.3)
|
component.Component.__init__(self, 'AlertManager')
|
||||||
self.session = component.get('Core').session
|
self.session = component.get('Core').session
|
||||||
|
|
||||||
# Increase the alert queue size so that alerts don't get lost.
|
# Increase the alert queue size so that alerts don't get lost.
|
||||||
|
@ -52,48 +58,88 @@ class AlertManager(component.Component):
|
||||||
self.session.apply_settings({'alert_mask': alert_mask})
|
self.session.apply_settings({'alert_mask': alert_mask})
|
||||||
|
|
||||||
# handlers is a dictionary of lists {"alert_type": [handler1,h2,..]}
|
# handlers is a dictionary of lists {"alert_type": [handler1,h2,..]}
|
||||||
self.handlers = {}
|
self.handlers = defaultdict(list)
|
||||||
|
self.handlers_timeout_secs = 2
|
||||||
self.delayed_calls = []
|
self.delayed_calls = []
|
||||||
|
self._event = threading.Event()
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
self.delayed_calls = [dc for dc in self.delayed_calls if dc.active()]
|
pass
|
||||||
self.handle_alerts()
|
|
||||||
|
def start(self):
|
||||||
|
thread = threading.Thread(
|
||||||
|
target=self.wait_for_alert_in_thread, name='alert-poller', daemon=True
|
||||||
|
)
|
||||||
|
thread.start()
|
||||||
|
self._event.set()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
|
self.cancel_delayed_calls()
|
||||||
|
|
||||||
|
def pause(self):
|
||||||
|
self._event.clear()
|
||||||
|
|
||||||
|
def resume(self):
|
||||||
|
self._event.set()
|
||||||
|
|
||||||
|
def wait_for_alert_in_thread(self):
|
||||||
|
while self._component_state not in ('Stopping', 'Stopped'):
|
||||||
|
if self.check_delayed_calls():
|
||||||
|
time.sleep(0.05)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.session.wait_for_alert(1000) is None:
|
||||||
|
continue
|
||||||
|
if self._event.wait():
|
||||||
|
threads.blockingCallFromThread(reactor, self.maybe_handle_alerts)
|
||||||
|
|
||||||
|
def on_delayed_call_timeout(self, result, timeout, **kwargs):
|
||||||
|
log.warning('Alert handler was timed-out before being called %s', kwargs)
|
||||||
|
|
||||||
|
def cancel_delayed_calls(self):
|
||||||
|
"""Cancel all delayed handlers."""
|
||||||
for delayed_call in self.delayed_calls:
|
for delayed_call in self.delayed_calls:
|
||||||
if delayed_call.active():
|
delayed_call.cancel()
|
||||||
delayed_call.cancel()
|
|
||||||
self.delayed_calls = []
|
self.delayed_calls = []
|
||||||
|
|
||||||
def register_handler(self, alert_type, handler):
|
def check_delayed_calls(self) -> bool:
|
||||||
|
"""Returns True if any handler calls are delayed."""
|
||||||
|
self.delayed_calls = [dc for dc in self.delayed_calls if not dc.called]
|
||||||
|
return len(self.delayed_calls) > 0
|
||||||
|
|
||||||
|
def maybe_handle_alerts(self) -> None:
|
||||||
|
if self._component_state != 'Started':
|
||||||
|
return
|
||||||
|
|
||||||
|
self.handle_alerts()
|
||||||
|
|
||||||
|
def register_handler(self, alert_type: str, handler: Callable[[Any], None]) -> None:
|
||||||
"""
|
"""
|
||||||
Registers a function that will be called when 'alert_type' is pop'd
|
Registers a function that will be called when 'alert_type' is pop'd
|
||||||
in handle_alerts. The handler function should look like: handler(alert)
|
in handle_alerts. The handler function should look like: handler(alert)
|
||||||
Where 'alert' is the actual alert object from libtorrent.
|
Where 'alert' is the actual alert object from libtorrent.
|
||||||
|
|
||||||
:param alert_type: str, this is string representation of the alert name
|
Args:
|
||||||
:param handler: func(alert), the function to be called when the alert is raised
|
alert_type: String representation of the libtorrent alert name.
|
||||||
|
Can be supplied with or without `_alert` suffix.
|
||||||
|
handler: Callback function when the alert is raised.
|
||||||
"""
|
"""
|
||||||
if alert_type not in self.handlers:
|
if alert_type and alert_type.endswith('_alert'):
|
||||||
# There is no entry for this alert type yet, so lets make it with an
|
alert_type = alert_type[: -len('_alert')]
|
||||||
# empty list.
|
|
||||||
self.handlers[alert_type] = []
|
|
||||||
|
|
||||||
# Append the handler to the list in the handlers dictionary
|
|
||||||
self.handlers[alert_type].append(handler)
|
self.handlers[alert_type].append(handler)
|
||||||
log.debug('Registered handler for alert %s', alert_type)
|
log.debug('Registered handler for alert %s', alert_type)
|
||||||
|
|
||||||
def deregister_handler(self, handler):
|
def deregister_handler(self, handler: Callable[[Any], None]):
|
||||||
"""
|
"""
|
||||||
De-registers the `:param:handler` function from all alert types.
|
De-registers the `handler` function from all alert types.
|
||||||
|
|
||||||
:param handler: func, the handler function to deregister
|
Args:
|
||||||
|
handler: The handler function to deregister.
|
||||||
"""
|
"""
|
||||||
# Iterate through all handlers and remove 'handler' where found
|
for alert_type_handlers in self.handlers.values():
|
||||||
for (dummy_key, value) in self.handlers.items():
|
with contextlib.suppress(ValueError):
|
||||||
if handler in value:
|
alert_type_handlers.remove(handler)
|
||||||
# Handler is in this alert type list
|
|
||||||
value.remove(handler)
|
|
||||||
|
|
||||||
def handle_alerts(self):
|
def handle_alerts(self):
|
||||||
"""
|
"""
|
||||||
|
@ -112,26 +158,32 @@ class AlertManager(component.Component):
|
||||||
num_alerts,
|
num_alerts,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Loop through all alerts in the queue
|
|
||||||
for alert in alerts:
|
for alert in alerts:
|
||||||
alert_type = type(alert).__name__
|
alert_type = alert.what()
|
||||||
|
|
||||||
# Display the alert message
|
# Display the alert message
|
||||||
if log.isEnabledFor(logging.DEBUG):
|
if log.isEnabledFor(logging.DEBUG):
|
||||||
log.debug('%s: %s', alert_type, decode_bytes(alert.message()))
|
log.debug('%s: %s', alert_type, decode_bytes(alert.message()))
|
||||||
|
|
||||||
|
if alert_type not in self.handlers:
|
||||||
|
continue
|
||||||
|
|
||||||
# Call any handlers for this alert type
|
# Call any handlers for this alert type
|
||||||
if alert_type in self.handlers:
|
for handler in self.handlers[alert_type]:
|
||||||
for handler in self.handlers[alert_type]:
|
if log.isEnabledFor(logging.DEBUG):
|
||||||
if log.isEnabledFor(logging.DEBUG):
|
log.debug('Handling alert: %s', alert_type)
|
||||||
log.debug('Handling alert: %s', alert_type)
|
d = task.deferLater(reactor, 0, handler, alert)
|
||||||
# Copy alert attributes
|
on_handler_timeout = partial(
|
||||||
alert_copy = SimpleNamespace(
|
self.on_delayed_call_timeout,
|
||||||
**{
|
handler=handler.__qualname__,
|
||||||
attr: getattr(alert, attr)
|
alert_type=alert_type,
|
||||||
for attr in dir(alert)
|
)
|
||||||
if not attr.startswith('__')
|
d.addTimeout(
|
||||||
}
|
self.handlers_timeout_secs,
|
||||||
)
|
reactor,
|
||||||
self.delayed_calls.append(reactor.callLater(0, handler, alert_copy))
|
onTimeoutCancel=on_handler_timeout,
|
||||||
|
)
|
||||||
|
self.delayed_calls.append(d)
|
||||||
|
|
||||||
def set_alert_queue_size(self, queue_size):
|
def set_alert_queue_size(self, queue_size):
|
||||||
"""Sets the maximum size of the libtorrent alert queue"""
|
"""Sets the maximum size of the libtorrent alert queue"""
|
||||||
|
|
|
@ -12,17 +12,16 @@ import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
|
||||||
from base64 import b64decode, b64encode
|
from base64 import b64decode, b64encode
|
||||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
from urllib.request import URLError, urlopen
|
from urllib.request import URLError, urlopen
|
||||||
|
|
||||||
from twisted.internet import defer, reactor, task
|
from twisted.internet import defer, reactor, task, threads
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
|
|
||||||
import deluge.common
|
import deluge.common
|
||||||
import deluge.component as component
|
import deluge.component as component
|
||||||
from deluge import path_chooser_common
|
from deluge import metafile, path_chooser_common
|
||||||
from deluge._libtorrent import LT_VERSION, lt
|
from deluge._libtorrent import LT_VERSION, lt
|
||||||
from deluge.configmanager import ConfigManager, get_config_dir
|
from deluge.configmanager import ConfigManager, get_config_dir
|
||||||
from deluge.core.alertmanager import AlertManager
|
from deluge.core.alertmanager import AlertManager
|
||||||
|
@ -199,7 +198,7 @@ class Core(component.Component):
|
||||||
self.session_status_timer_interval = 0.5
|
self.session_status_timer_interval = 0.5
|
||||||
self.session_status_timer = task.LoopingCall(self.session.post_session_stats)
|
self.session_status_timer = task.LoopingCall(self.session.post_session_stats)
|
||||||
self.alertmanager.register_handler(
|
self.alertmanager.register_handler(
|
||||||
'session_stats_alert', self._on_alert_session_stats
|
'session_stats', self._on_alert_session_stats
|
||||||
)
|
)
|
||||||
self.session_rates_timer_interval = 2
|
self.session_rates_timer_interval = 2
|
||||||
self.session_rates_timer = task.LoopingCall(self._update_session_rates)
|
self.session_rates_timer = task.LoopingCall(self._update_session_rates)
|
||||||
|
@ -374,8 +373,9 @@ class Core(component.Component):
|
||||||
def get_new_release(self):
|
def get_new_release(self):
|
||||||
log.debug('get_new_release')
|
log.debug('get_new_release')
|
||||||
try:
|
try:
|
||||||
|
# Use HTTPS URL to avoid potential spoofing of release page.
|
||||||
self.new_release = (
|
self.new_release = (
|
||||||
urlopen('http://download.deluge-torrent.org/version-2.0')
|
urlopen('https://ftp.osuosl.org/pub/deluge/version-2.0')
|
||||||
.read()
|
.read()
|
||||||
.decode()
|
.decode()
|
||||||
.strip()
|
.strip()
|
||||||
|
@ -992,31 +992,33 @@ class Core(component.Component):
|
||||||
path,
|
path,
|
||||||
tracker,
|
tracker,
|
||||||
piece_length,
|
piece_length,
|
||||||
comment,
|
comment=None,
|
||||||
target,
|
target=None,
|
||||||
webseeds,
|
webseeds=None,
|
||||||
private,
|
private=False,
|
||||||
created_by,
|
created_by=None,
|
||||||
trackers,
|
trackers=None,
|
||||||
add_to_session,
|
add_to_session=False,
|
||||||
|
torrent_format=metafile.TorrentFormat.V1,
|
||||||
):
|
):
|
||||||
|
if isinstance(torrent_format, str):
|
||||||
|
torrent_format = metafile.TorrentFormat(torrent_format)
|
||||||
|
|
||||||
log.debug('creating torrent..')
|
log.debug('creating torrent..')
|
||||||
threading.Thread(
|
return threads.deferToThread(
|
||||||
target=self._create_torrent_thread,
|
self._create_torrent_thread,
|
||||||
args=(
|
path,
|
||||||
path,
|
tracker,
|
||||||
tracker,
|
piece_length,
|
||||||
piece_length,
|
comment=comment,
|
||||||
comment,
|
target=target,
|
||||||
target,
|
webseeds=webseeds,
|
||||||
webseeds,
|
private=private,
|
||||||
private,
|
created_by=created_by,
|
||||||
created_by,
|
trackers=trackers,
|
||||||
trackers,
|
add_to_session=add_to_session,
|
||||||
add_to_session,
|
torrent_format=torrent_format,
|
||||||
),
|
)
|
||||||
).start()
|
|
||||||
|
|
||||||
def _create_torrent_thread(
|
def _create_torrent_thread(
|
||||||
self,
|
self,
|
||||||
|
@ -1030,27 +1032,41 @@ class Core(component.Component):
|
||||||
created_by,
|
created_by,
|
||||||
trackers,
|
trackers,
|
||||||
add_to_session,
|
add_to_session,
|
||||||
|
torrent_format,
|
||||||
):
|
):
|
||||||
from deluge import metafile
|
from deluge import metafile
|
||||||
|
|
||||||
metafile.make_meta_file(
|
filecontent = metafile.make_meta_file_content(
|
||||||
path,
|
path,
|
||||||
tracker,
|
tracker,
|
||||||
piece_length,
|
piece_length,
|
||||||
comment=comment,
|
comment=comment,
|
||||||
target=target,
|
|
||||||
webseeds=webseeds,
|
webseeds=webseeds,
|
||||||
private=private,
|
private=private,
|
||||||
created_by=created_by,
|
created_by=created_by,
|
||||||
trackers=trackers,
|
trackers=trackers,
|
||||||
|
torrent_format=torrent_format,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
write_file = False
|
||||||
|
if target or not add_to_session:
|
||||||
|
write_file = True
|
||||||
|
|
||||||
|
if not target:
|
||||||
|
target = metafile.default_meta_file_path(path)
|
||||||
|
filename = os.path.split(target)[-1]
|
||||||
|
|
||||||
|
if write_file:
|
||||||
|
with open(target, 'wb') as _file:
|
||||||
|
_file.write(filecontent)
|
||||||
|
|
||||||
|
filedump = b64encode(filecontent)
|
||||||
log.debug('torrent created!')
|
log.debug('torrent created!')
|
||||||
if add_to_session:
|
if add_to_session:
|
||||||
options = {}
|
options = {}
|
||||||
options['download_location'] = os.path.split(path)[0]
|
options['download_location'] = os.path.split(path)[0]
|
||||||
with open(target, 'rb') as _file:
|
self.add_torrent_file(filename, filedump, options)
|
||||||
filedump = b64encode(_file.read())
|
return filename, filedump
|
||||||
self.add_torrent_file(os.path.split(target)[1], filedump, options)
|
|
||||||
|
|
||||||
@export
|
@export
|
||||||
def upload_plugin(self, filename: str, filedump: Union[str, bytes]) -> None:
|
def upload_plugin(self, filename: str, filedump: Union[str, bytes]) -> None:
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
"""The Deluge daemon"""
|
"""The Deluge daemon"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
|
|
||||||
"""PluginManager for Core"""
|
"""PluginManager for Core"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
|
@ -200,7 +200,10 @@ class PreferencesManager(component.Component):
|
||||||
def __set_listen_on(self):
|
def __set_listen_on(self):
|
||||||
"""Set the ports and interface address to listen for incoming connections on."""
|
"""Set the ports and interface address to listen for incoming connections on."""
|
||||||
if self.config['random_port']:
|
if self.config['random_port']:
|
||||||
if not self.config['listen_random_port']:
|
if (
|
||||||
|
not self.config['listen_reuse_port']
|
||||||
|
or not self.config['listen_random_port']
|
||||||
|
):
|
||||||
self.config['listen_random_port'] = random.randrange(49152, 65525)
|
self.config['listen_random_port'] = random.randrange(49152, 65525)
|
||||||
listen_ports = [
|
listen_ports = [
|
||||||
self.config['listen_random_port']
|
self.config['listen_random_port']
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
"""RPCServer Module"""
|
"""RPCServer Module"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
@ -27,6 +28,7 @@ from deluge.core.authmanager import (
|
||||||
)
|
)
|
||||||
from deluge.crypto_utils import check_ssl_keys, get_context_factory
|
from deluge.crypto_utils import check_ssl_keys, get_context_factory
|
||||||
from deluge.error import (
|
from deluge.error import (
|
||||||
|
BadLoginError,
|
||||||
DelugeError,
|
DelugeError,
|
||||||
IncompatibleClient,
|
IncompatibleClient,
|
||||||
NotAuthorizedError,
|
NotAuthorizedError,
|
||||||
|
@ -46,13 +48,11 @@ TCallable = TypeVar('TCallable', bound=Callable)
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def export(func: TCallable) -> TCallable:
|
def export(func: TCallable) -> TCallable: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def export(auth_level: int) -> Callable[[TCallable], TCallable]:
|
def export(auth_level: int) -> Callable[[TCallable], TCallable]: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
def export(auth_level=AUTH_LEVEL_DEFAULT):
|
def export(auth_level=AUTH_LEVEL_DEFAULT):
|
||||||
|
@ -274,14 +274,22 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
||||||
raise IncompatibleClient(deluge.common.get_version())
|
raise IncompatibleClient(deluge.common.get_version())
|
||||||
ret = component.get('AuthManager').authorize(*args, **kwargs)
|
ret = component.get('AuthManager').authorize(*args, **kwargs)
|
||||||
if ret:
|
if ret:
|
||||||
self.factory.authorized_sessions[
|
self.factory.authorized_sessions[self.transport.sessionno] = (
|
||||||
self.transport.sessionno
|
self.AuthLevel(ret, args[0])
|
||||||
] = self.AuthLevel(ret, args[0])
|
)
|
||||||
self.factory.session_protocols[self.transport.sessionno] = self
|
self.factory.session_protocols[self.transport.sessionno] = self
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
send_error()
|
send_error()
|
||||||
if not isinstance(ex, _ClientSideRecreateError):
|
if not isinstance(ex, _ClientSideRecreateError):
|
||||||
log.exception(ex)
|
log.exception(ex)
|
||||||
|
if isinstance(ex, BadLoginError):
|
||||||
|
peer = self.transport.getPeer()
|
||||||
|
log.error(
|
||||||
|
'Deluge client authentication error made from: %s:%s (%s)',
|
||||||
|
peer.host,
|
||||||
|
peer.port,
|
||||||
|
str(ex),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.sendData((RPC_RESPONSE, request_id, (ret)))
|
self.sendData((RPC_RESPONSE, request_id, (ret)))
|
||||||
if not ret:
|
if not ret:
|
||||||
|
@ -545,8 +553,8 @@ class RPCServer(component.Component):
|
||||||
:type event: :class:`deluge.event.DelugeEvent`
|
:type event: :class:`deluge.event.DelugeEvent`
|
||||||
"""
|
"""
|
||||||
log.debug('intevents: %s', self.factory.interested_events)
|
log.debug('intevents: %s', self.factory.interested_events)
|
||||||
# Find sessions interested in this event
|
# Use copy of `interested_events` since it can mutate while iterating.
|
||||||
for session_id, interest in self.factory.interested_events.items():
|
for session_id, interest in self.factory.interested_events.copy().items():
|
||||||
if event.name in interest:
|
if event.name in interest:
|
||||||
log.debug('Emit Event: %s %s', event.name, event.args)
|
log.debug('Emit Event: %s %s', event.name, event.args)
|
||||||
# This session is interested so send a RPC_EVENT
|
# This session is interested so send a RPC_EVENT
|
||||||
|
|
|
@ -1138,9 +1138,8 @@ class Torrent:
|
||||||
'download_location': lambda: self.options['download_location'],
|
'download_location': lambda: self.options['download_location'],
|
||||||
'seeds_peers_ratio': lambda: -1.0
|
'seeds_peers_ratio': lambda: -1.0
|
||||||
if self.status.num_incomplete == 0
|
if self.status.num_incomplete == 0
|
||||||
else ( # Use -1.0 to signify infinity
|
# Use -1.0 to signify infinity
|
||||||
self.status.num_complete / self.status.num_incomplete
|
else (self.status.num_complete / self.status.num_incomplete),
|
||||||
),
|
|
||||||
'seed_rank': lambda: self.status.seed_rank,
|
'seed_rank': lambda: self.status.seed_rank,
|
||||||
'state': lambda: self.state,
|
'state': lambda: self.state,
|
||||||
'stop_at_ratio': lambda: self.options['stop_at_ratio'],
|
'stop_at_ratio': lambda: self.options['stop_at_ratio'],
|
||||||
|
@ -1544,20 +1543,18 @@ class Torrent:
|
||||||
self.status.pieces, self.handle.piece_availability()
|
self.status.pieces, self.handle.piece_availability()
|
||||||
):
|
):
|
||||||
if piece:
|
if piece:
|
||||||
pieces.append(3) # Completed.
|
# Completed.
|
||||||
|
pieces.append(3)
|
||||||
elif avail_piece:
|
elif avail_piece:
|
||||||
pieces.append(
|
# Available, just not downloaded nor being downloaded.
|
||||||
1
|
pieces.append(1)
|
||||||
) # Available, just not downloaded nor being downloaded.
|
|
||||||
else:
|
else:
|
||||||
pieces.append(
|
# Missing, no known peer with piece, or not asked for yet.
|
||||||
0
|
pieces.append(0)
|
||||||
) # Missing, no known peer with piece, or not asked for yet.
|
|
||||||
|
|
||||||
for peer_info in self.handle.get_peer_info():
|
for peer_info in self.handle.get_peer_info():
|
||||||
if peer_info.downloading_piece_index >= 0:
|
if peer_info.downloading_piece_index >= 0:
|
||||||
pieces[
|
# Being downloaded from peer.
|
||||||
peer_info.downloading_piece_index
|
pieces[peer_info.downloading_piece_index] = 2
|
||||||
] = 2 # Being downloaded from peer.
|
|
||||||
|
|
||||||
return pieces
|
return pieces
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
"""TorrentManager handles Torrent objects"""
|
"""TorrentManager handles Torrent objects"""
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
import operator
|
import operator
|
||||||
|
@ -50,10 +51,10 @@ from deluge.event import (
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
LT_DEFAULT_ADD_TORRENT_FLAGS = (
|
LT_DEFAULT_ADD_TORRENT_FLAGS = (
|
||||||
lt.add_torrent_params_flags_t.flag_paused
|
lt.torrent_flags.paused
|
||||||
| lt.add_torrent_params_flags_t.flag_auto_managed
|
| lt.torrent_flags.auto_managed
|
||||||
| lt.add_torrent_params_flags_t.flag_update_subscribe
|
| lt.torrent_flags.update_subscribe
|
||||||
| lt.add_torrent_params_flags_t.flag_apply_ip_filter
|
| lt.torrent_flags.apply_ip_filter
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -202,34 +203,32 @@ class TorrentManager(component.Component):
|
||||||
|
|
||||||
# Register alert functions
|
# Register alert functions
|
||||||
alert_handles = [
|
alert_handles = [
|
||||||
'external_ip_alert',
|
'external_ip',
|
||||||
'performance_alert',
|
'performance',
|
||||||
'add_torrent_alert',
|
'add_torrent',
|
||||||
'metadata_received_alert',
|
'metadata_received',
|
||||||
'torrent_finished_alert',
|
'torrent_finished',
|
||||||
'torrent_paused_alert',
|
'torrent_paused',
|
||||||
'torrent_checked_alert',
|
'torrent_checked',
|
||||||
'torrent_resumed_alert',
|
'torrent_resumed',
|
||||||
'tracker_reply_alert',
|
'tracker_reply',
|
||||||
'tracker_announce_alert',
|
'tracker_announce',
|
||||||
'tracker_warning_alert',
|
'tracker_warning',
|
||||||
'tracker_error_alert',
|
'tracker_error',
|
||||||
'file_renamed_alert',
|
'file_renamed',
|
||||||
'file_error_alert',
|
'file_error',
|
||||||
'file_completed_alert',
|
'file_completed',
|
||||||
'storage_moved_alert',
|
'storage_moved',
|
||||||
'storage_moved_failed_alert',
|
'storage_moved_failed',
|
||||||
'state_update_alert',
|
'state_update',
|
||||||
'state_changed_alert',
|
'state_changed',
|
||||||
'save_resume_data_alert',
|
'save_resume_data',
|
||||||
'save_resume_data_failed_alert',
|
'save_resume_data_failed',
|
||||||
'fastresume_rejected_alert',
|
'fastresume_rejected',
|
||||||
]
|
]
|
||||||
|
|
||||||
for alert_handle in alert_handles:
|
for alert_handle in alert_handles:
|
||||||
on_alert_func = getattr(
|
on_alert_func = getattr(self, ''.join(['on_alert_', alert_handle]))
|
||||||
self, ''.join(['on_alert_', alert_handle.replace('_alert', '')])
|
|
||||||
)
|
|
||||||
self.alerts.register_handler(alert_handle, on_alert_func)
|
self.alerts.register_handler(alert_handle, on_alert_func)
|
||||||
|
|
||||||
# Define timers
|
# Define timers
|
||||||
|
@ -292,8 +291,8 @@ class TorrentManager(component.Component):
|
||||||
if torrent.options['remove_at_ratio']:
|
if torrent.options['remove_at_ratio']:
|
||||||
self.remove(torrent_id)
|
self.remove(torrent_id)
|
||||||
break
|
break
|
||||||
if not torrent.status.paused:
|
|
||||||
torrent.pause()
|
torrent.pause()
|
||||||
|
|
||||||
def __getitem__(self, torrent_id):
|
def __getitem__(self, torrent_id):
|
||||||
"""Return the Torrent with torrent_id.
|
"""Return the Torrent with torrent_id.
|
||||||
|
@ -369,11 +368,11 @@ class TorrentManager(component.Component):
|
||||||
add_torrent_params.flags = (
|
add_torrent_params.flags = (
|
||||||
(
|
(
|
||||||
LT_DEFAULT_ADD_TORRENT_FLAGS
|
LT_DEFAULT_ADD_TORRENT_FLAGS
|
||||||
| lt.add_torrent_params_flags_t.flag_duplicate_is_error
|
| lt.torrent_flags.duplicate_is_error
|
||||||
| lt.add_torrent_params_flags_t.flag_upload_mode
|
| lt.torrent_flags.upload_mode
|
||||||
)
|
)
|
||||||
^ lt.add_torrent_params_flags_t.flag_auto_managed
|
^ lt.torrent_flags.auto_managed
|
||||||
^ lt.add_torrent_params_flags_t.flag_paused
|
^ lt.torrent_flags.paused
|
||||||
)
|
)
|
||||||
|
|
||||||
torrent_handle = self.session.add_torrent(add_torrent_params)
|
torrent_handle = self.session.add_torrent(add_torrent_params)
|
||||||
|
@ -481,16 +480,12 @@ class TorrentManager(component.Component):
|
||||||
|
|
||||||
# Set flags: enable duplicate_is_error & override_resume_data, disable auto_managed.
|
# Set flags: enable duplicate_is_error & override_resume_data, disable auto_managed.
|
||||||
add_torrent_params['flags'] = (
|
add_torrent_params['flags'] = (
|
||||||
LT_DEFAULT_ADD_TORRENT_FLAGS
|
LT_DEFAULT_ADD_TORRENT_FLAGS | lt.torrent_flags.duplicate_is_error
|
||||||
| lt.add_torrent_params_flags_t.flag_duplicate_is_error
|
) ^ lt.torrent_flags.auto_managed
|
||||||
| lt.add_torrent_params_flags_t.flag_override_resume_data
|
|
||||||
) ^ lt.add_torrent_params_flags_t.flag_auto_managed
|
|
||||||
if options['seed_mode']:
|
if options['seed_mode']:
|
||||||
add_torrent_params['flags'] |= lt.add_torrent_params_flags_t.flag_seed_mode
|
add_torrent_params['flags'] |= lt.torrent_flags.seed_mode
|
||||||
if options['super_seeding']:
|
if options['super_seeding']:
|
||||||
add_torrent_params[
|
add_torrent_params['flags'] |= lt.torrent_flags.super_seeding
|
||||||
'flags'
|
|
||||||
] |= lt.add_torrent_params_flags_t.flag_super_seeding
|
|
||||||
|
|
||||||
return torrent_id, add_torrent_params
|
return torrent_id, add_torrent_params
|
||||||
|
|
||||||
|
|
|
@ -166,7 +166,8 @@ def deprecated(func):
|
||||||
|
|
||||||
class CoroutineDeferred(defer.Deferred):
|
class CoroutineDeferred(defer.Deferred):
|
||||||
"""Wraps a coroutine in a Deferred.
|
"""Wraps a coroutine in a Deferred.
|
||||||
It will dynamically pass through the underlying coroutine without wrapping where apporpriate."""
|
It will dynamically pass through the underlying coroutine without wrapping where apporpriate.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, coro: Coroutine):
|
def __init__(self, coro: Coroutine):
|
||||||
# Delay this import to make sure a reactor was installed first
|
# Delay this import to make sure a reactor was installed first
|
||||||
|
@ -195,17 +196,33 @@ class CoroutineDeferred(defer.Deferred):
|
||||||
d = defer.ensureDeferred(self.coro)
|
d = defer.ensureDeferred(self.coro)
|
||||||
d.chainDeferred(self)
|
d.chainDeferred(self)
|
||||||
|
|
||||||
def addCallbacks(self, *args, **kwargs): # noqa: N802
|
def _callback_activate(self):
|
||||||
|
"""Verify awaited status before calling activate."""
|
||||||
assert not self.awaited, 'Cannot add callbacks to an already awaited coroutine.'
|
assert not self.awaited, 'Cannot add callbacks to an already awaited coroutine.'
|
||||||
self.activate()
|
self.activate()
|
||||||
|
|
||||||
|
def addCallback(self, *args, **kwargs): # noqa: N802
|
||||||
|
self._callback_activate()
|
||||||
|
return super().addCallback(*args, **kwargs)
|
||||||
|
|
||||||
|
def addCallbacks(self, *args, **kwargs): # noqa: N802
|
||||||
|
self._callback_activate()
|
||||||
return super().addCallbacks(*args, **kwargs)
|
return super().addCallbacks(*args, **kwargs)
|
||||||
|
|
||||||
|
def addErrback(self, *args, **kwargs): # noqa: N802
|
||||||
|
self._callback_activate()
|
||||||
|
return super().addErrback(*args, **kwargs)
|
||||||
|
|
||||||
|
def addBoth(self, *args, **kwargs): # noqa: N802
|
||||||
|
self._callback_activate()
|
||||||
|
return super().addBoth(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
_RetT = TypeVar('_RetT')
|
_RetT = TypeVar('_RetT')
|
||||||
|
|
||||||
|
|
||||||
def maybe_coroutine(
|
def maybe_coroutine(
|
||||||
f: Callable[..., Coroutine[Any, Any, _RetT]]
|
f: Callable[..., Coroutine[Any, Any, _RetT]],
|
||||||
) -> 'Callable[..., defer.Deferred[_RetT]]':
|
) -> 'Callable[..., defer.Deferred[_RetT]]':
|
||||||
"""Wraps a coroutine function to make it usable as a normal function that returns a Deferred."""
|
"""Wraps a coroutine function to make it usable as a normal function that returns a Deferred."""
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ This module describes the types of events that can be generated by the daemon
|
||||||
and subsequently emitted to the clients.
|
and subsequently emitted to the clients.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
known_events = {}
|
known_events = {}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# See LICENSE for more details.
|
# See LICENSE for more details.
|
||||||
#
|
#
|
||||||
|
|
||||||
import cgi
|
import email.message
|
||||||
import logging
|
import logging
|
||||||
import os.path
|
import os.path
|
||||||
import zlib
|
import zlib
|
||||||
|
@ -21,8 +21,6 @@ from twisted.web.http_headers import Headers
|
||||||
from twisted.web.iweb import IAgent
|
from twisted.web.iweb import IAgent
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
|
|
||||||
from deluge.common import get_version
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -133,9 +131,10 @@ class HTTPDownloaderAgent:
|
||||||
content_disp = headers.getRawHeaders(b'content-disposition')[0].decode(
|
content_disp = headers.getRawHeaders(b'content-disposition')[0].decode(
|
||||||
'utf-8'
|
'utf-8'
|
||||||
)
|
)
|
||||||
content_disp_params = cgi.parse_header(content_disp)[1]
|
message = email.message.EmailMessage()
|
||||||
if 'filename' in content_disp_params:
|
message['content-disposition'] = content_disp
|
||||||
new_file_name = content_disp_params['filename']
|
new_file_name = message.get_filename()
|
||||||
|
if new_file_name:
|
||||||
new_file_name = sanitise_filename(new_file_name)
|
new_file_name = sanitise_filename(new_file_name)
|
||||||
new_file_name = os.path.join(
|
new_file_name = os.path.join(
|
||||||
os.path.split(self.filename)[0], new_file_name
|
os.path.split(self.filename)[0], new_file_name
|
||||||
|
@ -152,7 +151,10 @@ class HTTPDownloaderAgent:
|
||||||
self.filename = new_file_name
|
self.filename = new_file_name
|
||||||
|
|
||||||
cont_type_header = headers.getRawHeaders(b'content-type')[0].decode()
|
cont_type_header = headers.getRawHeaders(b'content-type')[0].decode()
|
||||||
cont_type, params = cgi.parse_header(cont_type_header)
|
message = email.message.EmailMessage()
|
||||||
|
message['content-type'] = cont_type_header
|
||||||
|
cont_type = message.get_content_type()
|
||||||
|
params = message['content-type'].params
|
||||||
# Only re-ecode text content types.
|
# Only re-ecode text content types.
|
||||||
encoding = None
|
encoding = None
|
||||||
if cont_type.startswith('text/'):
|
if cont_type.startswith('text/'):
|
||||||
|
@ -179,8 +181,7 @@ class HTTPDownloaderAgent:
|
||||||
headers = Headers()
|
headers = Headers()
|
||||||
|
|
||||||
if not headers.hasHeader(b'User-Agent'):
|
if not headers.hasHeader(b'User-Agent'):
|
||||||
version = get_version()
|
user_agent = 'Deluge'
|
||||||
user_agent = 'Deluge/%s (https://deluge-torrent.org)' % version
|
|
||||||
headers.addRawHeader('User-Agent', user_agent)
|
headers.addRawHeader('User-Agent', user_agent)
|
||||||
|
|
||||||
d = self.agent.request(
|
d = self.agent.request(
|
||||||
|
|
6431
deluge/i18n/ab.po
Normal file
6431
deluge/i18n/ab.po
Normal file
File diff suppressed because it is too large
Load diff
8454
deluge/i18n/af.po
8454
deluge/i18n/af.po
File diff suppressed because it is too large
Load diff
8487
deluge/i18n/ar.po
8487
deluge/i18n/ar.po
File diff suppressed because it is too large
Load diff
8445
deluge/i18n/ast.po
8445
deluge/i18n/ast.po
File diff suppressed because it is too large
Load diff
9698
deluge/i18n/be.po
9698
deluge/i18n/be.po
File diff suppressed because it is too large
Load diff
8488
deluge/i18n/bg.po
8488
deluge/i18n/bg.po
File diff suppressed because it is too large
Load diff
8312
deluge/i18n/bn.po
8312
deluge/i18n/bn.po
File diff suppressed because it is too large
Load diff
8338
deluge/i18n/bs.po
8338
deluge/i18n/bs.po
File diff suppressed because it is too large
Load diff
8681
deluge/i18n/ca.po
8681
deluge/i18n/ca.po
File diff suppressed because it is too large
Load diff
8514
deluge/i18n/cs.po
8514
deluge/i18n/cs.po
File diff suppressed because it is too large
Load diff
8304
deluge/i18n/cy.po
8304
deluge/i18n/cy.po
File diff suppressed because it is too large
Load diff
8567
deluge/i18n/da.po
8567
deluge/i18n/da.po
File diff suppressed because it is too large
Load diff
9346
deluge/i18n/de.po
9346
deluge/i18n/de.po
File diff suppressed because it is too large
Load diff
6209
deluge/i18n/deluge.pot
Normal file
6209
deluge/i18n/deluge.pot
Normal file
File diff suppressed because it is too large
Load diff
8528
deluge/i18n/el.po
8528
deluge/i18n/el.po
File diff suppressed because it is too large
Load diff
8510
deluge/i18n/en_AU.po
8510
deluge/i18n/en_AU.po
File diff suppressed because it is too large
Load diff
8510
deluge/i18n/en_CA.po
8510
deluge/i18n/en_CA.po
File diff suppressed because it is too large
Load diff
9989
deluge/i18n/en_GB.po
9989
deluge/i18n/en_GB.po
File diff suppressed because it is too large
Load diff
8328
deluge/i18n/eo.po
8328
deluge/i18n/eo.po
File diff suppressed because it is too large
Load diff
8518
deluge/i18n/es.po
8518
deluge/i18n/es.po
File diff suppressed because it is too large
Load diff
8483
deluge/i18n/et.po
8483
deluge/i18n/et.po
File diff suppressed because it is too large
Load diff
8508
deluge/i18n/eu.po
8508
deluge/i18n/eu.po
File diff suppressed because it is too large
Load diff
8411
deluge/i18n/fa.po
8411
deluge/i18n/fa.po
File diff suppressed because it is too large
Load diff
8620
deluge/i18n/fi.po
8620
deluge/i18n/fi.po
File diff suppressed because it is too large
Load diff
8353
deluge/i18n/fo.po
8353
deluge/i18n/fo.po
File diff suppressed because it is too large
Load diff
8528
deluge/i18n/fr.po
8528
deluge/i18n/fr.po
File diff suppressed because it is too large
Load diff
8378
deluge/i18n/fy.po
8378
deluge/i18n/fy.po
File diff suppressed because it is too large
Load diff
8296
deluge/i18n/ga.po
8296
deluge/i18n/ga.po
File diff suppressed because it is too large
Load diff
8500
deluge/i18n/gl.po
8500
deluge/i18n/gl.po
File diff suppressed because it is too large
Load diff
8480
deluge/i18n/he.po
8480
deluge/i18n/he.po
File diff suppressed because it is too large
Load diff
8489
deluge/i18n/hi.po
8489
deluge/i18n/hi.po
File diff suppressed because it is too large
Load diff
8518
deluge/i18n/hr.po
8518
deluge/i18n/hr.po
File diff suppressed because it is too large
Load diff
8484
deluge/i18n/hu.po
8484
deluge/i18n/hu.po
File diff suppressed because it is too large
Load diff
8359
deluge/i18n/id.po
8359
deluge/i18n/id.po
File diff suppressed because it is too large
Load diff
8443
deluge/i18n/is.po
8443
deluge/i18n/is.po
File diff suppressed because it is too large
Load diff
8514
deluge/i18n/it.po
8514
deluge/i18n/it.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/iu.po
8300
deluge/i18n/iu.po
File diff suppressed because it is too large
Load diff
8488
deluge/i18n/ja.po
8488
deluge/i18n/ja.po
File diff suppressed because it is too large
Load diff
8498
deluge/i18n/ka.po
8498
deluge/i18n/ka.po
File diff suppressed because it is too large
Load diff
8504
deluge/i18n/kk.po
8504
deluge/i18n/kk.po
File diff suppressed because it is too large
Load diff
8312
deluge/i18n/km.po
8312
deluge/i18n/km.po
File diff suppressed because it is too large
Load diff
8335
deluge/i18n/kn.po
8335
deluge/i18n/kn.po
File diff suppressed because it is too large
Load diff
8488
deluge/i18n/ko.po
8488
deluge/i18n/ko.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/ku.po
8300
deluge/i18n/ku.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/ky.po
8300
deluge/i18n/ky.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/la.po
8300
deluge/i18n/la.po
File diff suppressed because it is too large
Load diff
8316
deluge/i18n/lb.po
8316
deluge/i18n/lb.po
File diff suppressed because it is too large
Load diff
8522
deluge/i18n/lt.po
8522
deluge/i18n/lt.po
File diff suppressed because it is too large
Load diff
8628
deluge/i18n/lv.po
8628
deluge/i18n/lv.po
File diff suppressed because it is too large
Load diff
8494
deluge/i18n/mk.po
8494
deluge/i18n/mk.po
File diff suppressed because it is too large
Load diff
8296
deluge/i18n/ml.po
8296
deluge/i18n/ml.po
File diff suppressed because it is too large
Load diff
6214
deluge/i18n/mo.po
Normal file
6214
deluge/i18n/mo.po
Normal file
File diff suppressed because it is too large
Load diff
8512
deluge/i18n/ms.po
8512
deluge/i18n/ms.po
File diff suppressed because it is too large
Load diff
8312
deluge/i18n/nap.po
8312
deluge/i18n/nap.po
File diff suppressed because it is too large
Load diff
8487
deluge/i18n/nb.po
8487
deluge/i18n/nb.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/nds.po
8300
deluge/i18n/nds.po
File diff suppressed because it is too large
Load diff
8516
deluge/i18n/nl.po
8516
deluge/i18n/nl.po
File diff suppressed because it is too large
Load diff
8454
deluge/i18n/nn.po
8454
deluge/i18n/nn.po
File diff suppressed because it is too large
Load diff
8467
deluge/i18n/oc.po
8467
deluge/i18n/oc.po
File diff suppressed because it is too large
Load diff
8617
deluge/i18n/pl.po
8617
deluge/i18n/pl.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/pms.po
8300
deluge/i18n/pms.po
File diff suppressed because it is too large
Load diff
8510
deluge/i18n/pt.po
8510
deluge/i18n/pt.po
File diff suppressed because it is too large
Load diff
8512
deluge/i18n/pt_BR.po
8512
deluge/i18n/pt_BR.po
File diff suppressed because it is too large
Load diff
8514
deluge/i18n/ro.po
8514
deluge/i18n/ro.po
File diff suppressed because it is too large
Load diff
10139
deluge/i18n/ru.po
10139
deluge/i18n/ru.po
File diff suppressed because it is too large
Load diff
8496
deluge/i18n/si.po
8496
deluge/i18n/si.po
File diff suppressed because it is too large
Load diff
8513
deluge/i18n/sk.po
8513
deluge/i18n/sk.po
File diff suppressed because it is too large
Load diff
8508
deluge/i18n/sl.po
8508
deluge/i18n/sl.po
File diff suppressed because it is too large
Load diff
8514
deluge/i18n/sr.po
8514
deluge/i18n/sr.po
File diff suppressed because it is too large
Load diff
8634
deluge/i18n/sv.po
8634
deluge/i18n/sv.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/ta.po
8300
deluge/i18n/ta.po
File diff suppressed because it is too large
Load diff
8300
deluge/i18n/te.po
8300
deluge/i18n/te.po
File diff suppressed because it is too large
Load diff
8296
deluge/i18n/th.po
8296
deluge/i18n/th.po
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue