初始提交

This commit is contained in:
Your Name 2025-09-19 11:54:55 +08:00
commit d4676bf51a
6860 changed files with 1556312 additions and 0 deletions

5
.auto-changelog Normal file
View File

@ -0,0 +1,5 @@
{
"template": "./changelog-dev.hbs",
"ignoreCommitPattern": "^(Release|no log:|Merge.remote-tracking).*",
"sortCommits": "date-desc"
}

3
.gitattributes vendored Normal file
View File

@ -0,0 +1,3 @@
static/jquery/* linguist-vendored
static/semantic/* linguist-vendored
*.tpl linguist-language=Python

30
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,30 @@
---
name: "\U0001F41B Bug report"
about: Create a report to help us improve
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Software (please complete the following information):**
- Bazarr: [e.g. v 0.6.1]
- Radarr version [e.g. v 0.2.0.0001]
- Sonarr version [e.g. v 2.0.0.0001]
- OS: [e.g. Windows 10]
**Additional context**
Add any other context about the problem here.

11
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@ -0,0 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: 📗 Wiki
url: https://github.com/morpheus65535/bazarr/wiki
about: The Bazarr wiki should help guide you through installation and setup as well as help resolve common problems and answer frequently asked questions.
- name: 🚀 Feature suggestions
url: https://bazarr.featureupvote.com/
about: Share your suggestions or ideas to make Bazarr better!
- name: 🌐 Discord Support
url: https://discord.gg/MH2e2eb
about: Ask questions and talk about bazarr

31
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,31 @@
version: 2
updates:
- package-ecosystem: 'npm'
directory: '/frontend'
schedule:
interval: 'weekly'
commit-message:
prefix: "[bot]"
open-pull-requests-limit: 1
target-branch: "development"
groups:
fortawesome:
patterns:
- "@fortawesome*"
mantine:
patterns:
- "@mantine*"
react:
patterns:
- "react"
- "react-dom"
- "@types/react"
- "@types/react-dom"
- package-ecosystem: 'github-actions'
directory: '/'
schedule:
interval: 'weekly'
commit-message:
prefix: "[workflow]"
open-pull-requests-limit: 1
target-branch: "development"

8
.github/files_to_copy vendored Normal file
View File

@ -0,0 +1,8 @@
bazarr
custom_libs
frontend/build
libs
bazarr.py
requirements.txt
postgres-requirements.txt
migrations

22
.github/scripts/build_test.sh vendored Executable file
View File

@ -0,0 +1,22 @@
#!/bin/bash
python3 "${ROOT_DIRECTORY}"/bazarr.py --no-update &
PID=$!
sleep 30
if kill -s 0 $PID
then
echo "Bazarr is still running. We'll test if UI is working..."
else
exit 1
fi
exitcode=0
curl -fsSL --retry-all-errors --retry 60 --retry-max-time 120 --max-time 10 "http://127.0.0.1:6767" --output /dev/null || exitcode=$?
[[ ${exitcode} == 0 ]] && echo "UI is responsive, good news!" || echo "Oops, UI isn't reachable, bad news..."
echo "Let's stop Bazarr before we exit..."
pkill -INT -P $PID
exit ${exitcode}

28
.github/scripts/create_asset.sh vendored Executable file
View File

@ -0,0 +1,28 @@
#! /bin/bash
# This script is used in release-it as hook
# Change how this script is triggered by editing .release-it.json
# NOTE: Please make sure working directory is in root of repo
# NOTE: This script is only working on linux
set -e
# Get version from tag
git describe --abbrev=0 > VERSION
# Copy files based on files_to_copy
to_dist=__builds__/bazarr
mkdir -p $to_dist
file_list=$(cat .github/files_to_copy)
for f in $file_list
do
echo "**** copying $f to release ****"
cp -r --parents "$f" $to_dist
done
# COPY VERSION file
cp VERSION $to_dist
pushd __builds__/bazarr
zip -r ../bazarr.zip . -b "$(mktemp -d)"
popd
rm -rf $to_dist

14
.github/scripts/create_changelog.sh vendored Executable file
View File

@ -0,0 +1,14 @@
#! /bin/bash
# This script is used in release-it as changelog
# export RELEASE_MASTER=1 to release master changelog
set -e
master_version=$(git describe --tags --abbrev=0 --match "v[0-9].[0-9].[0-9]")
latest_verion=$(git describe --tags --abbrev=0)
if [[ $RELEASE_MASTER -eq 1 ]]; then
auto-changelog --stdout -t changelog-master.hbs --starting-version "$master_version" --commit-limit 3
else
auto-changelog --stdout --starting-version "$latest_verion" --unreleased-only --commit-limit false
fi

18
.github/scripts/pre_check.sh vendored Executable file
View File

@ -0,0 +1,18 @@
#! /bin/bash
# This script is used in release-it as hook
# Change how this script is triggered by editing .release-it.json
# NOTE: Please make sure working directory is in root of repo
set -e
file_list=$(cat .github/files_to_copy)
for f in $file_list
do
echo "**** checking $f ****"
if [ ! -f "$f" ] && [ ! -d "$f" ]; then
echo "**** $f doesn't exist, skipping release ****"
exit 1
fi
done
echo "**** pre-check is finished ****"

105
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,105 @@
name: CI
on:
push:
branches: [development]
paths:
- frontend/**
- bazarr/**
- libs/**
- migrations/**
- bazarr.py
- requirements.txt
- dev-requirements.txt
- .github/workflows/ci.yml
pull_request:
branches: [development]
env:
UI_DIRECTORY: ./frontend
UI_ARTIFACT_NAME: ui
jobs:
Frontend:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Cache node_modules
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}
restore-keys: ${{ runner.os }}-modules-
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version-file: "${{ env.UI_DIRECTORY }}/.nvmrc"
- name: Install dependencies
run: npm install
working-directory: ${{ env.UI_DIRECTORY }}
- name: Check Types
run: npm run check:ts
working-directory: ${{ env.UI_DIRECTORY }}
- name: Check Styles
run: npm run check
working-directory: ${{ env.UI_DIRECTORY }}
- name: Check Format
run: npm run check:fmt
working-directory: ${{ env.UI_DIRECTORY }}
- name: Unit Test
run: npm test
working-directory: ${{ env.UI_DIRECTORY }}
- name: Build
run: npm run build:ci
working-directory: ${{ env.UI_DIRECTORY }}
- uses: actions/upload-artifact@v4
with:
name: ${{ env.UI_ARTIFACT_NAME }}
path: "${{ env.UI_DIRECTORY }}/build"
Backend:
runs-on: ubuntu-latest
needs: Frontend
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python 3.8
uses: actions/setup-python@v5
with:
python-version: "3.8"
- name: Install UI
uses: actions/download-artifact@v4
with:
name: ${{ env.UI_ARTIFACT_NAME }}
path: "${{ env.UI_DIRECTORY }}/build"
- name: Install Python dependencies
run: pip install -r requirements.txt
- name: Unit Tests
run: |
python3 bazarr.py --no-update &
PID=$!
sleep 15
if kill -s 0 $PID
then
echo "**** Bazarr launch successful ****"
kill $PID
exit 0
else
echo "**** Bazarr launch failed ****"
exit 1
fi

View File

@ -0,0 +1,74 @@
name: release_beta_to_dev
on: workflow_dispatch
jobs:
Release:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
STATUS_WORKFLOW_NAME: "ci.yml"
UI_DIRECTORY: ./frontend
ASSET_DIRECTORY: ./__builds__
FETCH_DEPTH: 15 # Should be enough
steps:
- name: Validate branch
if: ${{ github.ref != 'refs/heads/development' }}
run: |
echo This action can only be run on development branch, not ${{ github.ref }}
exit 1
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: ${{ env.FETCH_DEPTH }}
ref: development
- name: Setup Git
run: |
git config --global user.name "github-actions" &&
git fetch --depth ${{ env.FETCH_DEPTH }} --tags
- name: Cache node_modules
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}
restore-keys: ${{ runner.os }}-modules-
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version-file: "${{ env.UI_DIRECTORY }}/.nvmrc"
- name: Install Global Tools
run: npm install -g release-it auto-changelog
- name: Install UI Dependencies
run: npm install
working-directory: ${{ env.UI_DIRECTORY }}
- name: Build UI
run: npm run build
working-directory: ${{ env.UI_DIRECTORY }}
- name: Validate CI
id: check-ci
uses: LASER-Yi/workflow-status@v0.1.0
with:
token: ${{ secrets.GITHUB_TOKEN }}
workflow: ci.yml
event: push
branch: development
- name: Create Release (Conditional)
if: ${{ steps.check-ci.outputs.conclusion == 'success' }}
run: |
git config user.name "${{github.actor}}"
git config user.email "${{github.actor}}@users.noreply.github.com"
revision_count=$(git rev-list --invert-grep --regexp-ignore-case --extended-regexp --grep="^(Release|no log:|Merge.remote-tracking).*" $(git describe --tags --abbrev=0)..HEAD --count)
if [[ $revision_count != 0 ]]; then
echo "**** Found $revision_count changes! Releasing... ****"
release-it --ci --increment prerelease --preRelease=beta
else
echo "**** Cannot find changes! Skipping... ****"
fi

View File

@ -0,0 +1,75 @@
name: release_dev_to_master
on:
workflow_dispatch:
inputs:
increment:
description: "Increment Type (major, minor, patch)"
required: true
default: "patch"
jobs:
Release:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
UI_DIRECTORY: ./frontend
ASSET_DIRECTORY: ./__builds__
steps:
- name: Validate branch
if: ${{ github.ref != 'refs/heads/development' }}
run: |
echo This action can only be run on development branch, not ${{ github.ref }}
exit 1
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: development
- name: Setup Git
run: git config --global user.name "github-actions"
- name: Cache node_modules
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}
restore-keys: ${{ runner.os }}-modules-
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version-file: "${{ env.UI_DIRECTORY }}/.nvmrc"
- name: Install Global Tools
run: npm install -g release-it auto-changelog
- name: Install UI Dependencies
run: npm install
working-directory: ${{ env.UI_DIRECTORY }}
- name: Build UI
run: npm run build
working-directory: ${{ env.UI_DIRECTORY }}
- name: Create Release
run: |
git config user.name "${{github.actor}}"
git config user.email "${{github.actor}}@users.noreply.github.com"
export RELEASE_MASTER=1
release-it --ci --increment ${{ github.event.inputs.increment }}
Merge:
needs: Release
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Merge development -> master
uses: devmasx/merge-branch@1.4.0
with:
type: now
from_branch: development
target_branch: master
github_token: ${{ secrets.GITHUB_TOKEN }}

17
.github/workflows/schedule.yaml vendored Normal file
View File

@ -0,0 +1,17 @@
name: Schedule Trigger
on:
schedule:
- cron: '0 6 * * *'
workflow_dispatch:
jobs:
Release-Nightly:
runs-on: ubuntu-latest
steps:
- name: Execute
uses: benc-uk/workflow-dispatch@v1.2.4
with:
workflow: "release_beta_to_dev"
token: ${{ secrets.WF_GITHUB_TOKEN }}
ref: "refs/heads/development"

View File

@ -0,0 +1,49 @@
name: test_bazarr_execution
on: workflow_dispatch
jobs:
Test:
runs-on: ubuntu-latest
env:
ROOT_DIRECTORY: .
SCRIPTS_DIRECTORY: .github/scripts
FETCH_DEPTH: 15 # Should be enough
steps:
- name: Validate branch
if: ${{ github.ref != 'refs/heads/development' }}
run: |
echo This action can only be run on development branch, not ${{ github.ref }}
exit 1
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: ${{ env.FETCH_DEPTH }}
ref: development
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version-file: "${{ env.UI_DIRECTORY }}/.nvmrc"
- name: Install UI Dependencies
run: npm install
working-directory: ${{ env.UI_DIRECTORY }}
- name: Build UI
run: npm run build
working-directory: ${{ env.UI_DIRECTORY }}
- name: Set up Python 3.8
uses: actions/setup-python@v5
with:
python-version: "3.8"
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install -r '${{ env.ROOT_DIRECTORY }}/requirements.txt'
- name: Test Bazarr execution
run: |
bash '${{ env.SCRIPTS_DIRECTORY }}/build_test.sh'

26
.gitignore vendored Normal file
View File

@ -0,0 +1,26 @@
*.pyc
cachefile.dbm
bazarr.pid
/venv
/data
/bin
# IDE
.idea
.vscode
# LSP
pyrightconfig.json
# Middleware
VERSION
# CI
/__builds__
/package.json
/package-lock.json
# Allow
!*.dll

23
.release-it.json Normal file
View File

@ -0,0 +1,23 @@
{
"git": {
"requireCleanWorkingDir": false,
"changelog": ".github/scripts/create_changelog.sh",
"tagName": "v${version}"
},
"github": {
"release": true,
"releaseName": "v${version}",
"assets": ["__builds__/*.zip"]
},
"npm": {
"publish": false,
"ignoreVersion": true
},
"hooks": {
"before:init": [
"chmod +x .github/scripts/*",
".github/scripts/pre_check.sh"
],
"after:git:release": ".github/scripts/create_asset.sh"
}
}

45
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,45 @@
# How to Contribute
## Tools required
- Python 3.8.x to 3.11.x (3.10.x is highly recommended and 3.12 or greater is proscribed).
- Pycharm or Visual Studio code IDE are recommended but if you're happy with VIM, enjoy it!
- Git.
- UI testing must be done using Chrome latest version.
## Warning
As we're using Git in the development process, you better disable automatic update of Bazarr in UI or you may get your changes overwritten. Alternatively, you can completely disable the update module by running Bazarr with `--no-update` command line argument.
## Branching
### Basic rules
- `master` contains only stable releases (which have been merged to `master`) and is intended for end-users.
- `development` is the target for testing (around 10% of users) and is not intended for end-users looking for stability.
- `feature` is a temporary feature branch based on `development`.
### Conditions
- `master` is not merged back to `development`.
- `development` is not re-based on `master`.
- all `feature` branches are branched from `development` only.
- Bugfixes created specifically for a feature branch are done there (because they are specific, they're not cherry-picked to `development`).
- We will not release a patch (1.0.x) if a newer minor (1.x.0) has already been released. We only go forward.
## Typical contribution workflow
### Community devs
- Fork the repository or pull the latest changes if you already have forked it.
- Checkout `development` branch.
- Make the desired changes.
- Submit a PR to Bazarr `development` branch.
- Once reviewed, your PR will be merged using Squash and Merge with a meaningful commit message matching our standards.
### Official devs team
- All commits must have a meaningful commit message (ex.: Fixed issue with this, Improved process abc, Added input field to UI, etc.).
- Fixes can be made directly to `development` branch but keep in mind that a pre-release with a beta versioning will be created every day a new push is made.
- Features must be developed in dedicated feature branch and merged back to `development` branch using PR.
- Once reviewed, your PR will be merged by morpheus65535 using Squash and Merge with a meaningful message.

674
LICENSE Normal file
View File

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

108
README.md Normal file
View File

@ -0,0 +1,108 @@
# bazarr
Bazarr is a companion application to Sonarr and Radarr. It manages and downloads subtitles based on your requirements. You define your preferences by TV show or movie and Bazarr takes care of everything for you.
Be aware that Bazarr doesn't scan disk to detect series and movies: It only takes care of the series and movies that are indexed in Sonarr and Radarr.
Thanks to the folks at OpenSubtitles for their logo that was an inspiration for ours.
## Support on Paypal
At the request of some, here is a way to demonstrate your appreciation for the efforts made in the development of Bazarr:
[![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=XHHRWXT9YB7WE&source=url)
# Status
[![GitHub issues](https://img.shields.io/github/issues/morpheus65535/bazarr.svg?style=flat-square)](https://github.com/morpheus65535/bazarr/issues)
[![GitHub stars](https://img.shields.io/github/stars/morpheus65535/bazarr.svg?style=flat-square)](https://github.com/morpheus65535/bazarr/stargazers)
[![Docker Pulls](https://img.shields.io/docker/pulls/linuxserver/bazarr.svg?style=flat-square)](https://hub.docker.com/r/linuxserver/bazarr/)
[![Docker Pulls](https://img.shields.io/docker/pulls/hotio/bazarr.svg?style=flat-square)](https://hub.docker.com/r/hotio/bazarr/)
[![Discord](https://img.shields.io/badge/discord-chat-MH2e2eb.svg?style=flat-square)](https://discord.gg/MH2e2eb)
# Support
For installation and configuration instructions, see [wiki](https://wiki.bazarr.media).
You can reach us for support on [Discord](https://discord.gg/MH2e2eb).
If you find a bug, please open an issue on [Github](https://github.com/morpheus65535/bazarr/issues).
# Feature Requests
If you need something that is not already part of Bazarr, feel free to create a feature request on [Feature Upvote](http://features.bazarr.media).
## Major Features Include:
- Support for major platforms: Windows, Linux, macOS, Raspberry Pi, etc.
- Automatically add new series and episodes from Sonarr
- Automatically add new movies from Radarr
- Series or movies based configuration for subtitles languages
- Scan your existing library for internal and external subtitles and download any missing
- Keep history of what was downloaded from where and when
- Manual search so you can download subtitles on demand
- Upgrade subtitles previously downloaded when a better one is found
- Ability to delete external subtitles from disk
- Currently support 184 subtitles languages with support for forced/foreign subtitles (depending of providers)
- And a beautiful UI based on Sonarr
## Supported subtitles providers:
- Addic7ed
- AnimeKalesi
- Animetosho (requires AniDb HTTP API client described [here](https://wiki.anidb.net/HTTP_API_Definition))
- Assrt
- AvistaZ, CinemaZ (Get session cookies using method described [here](https://github.com/morpheus65535/bazarr/pull/2375#issuecomment-2057010996))
- BetaSeries
- BSplayer
- Embedded Subtitles
- Gestdown.info
- GreekSubs
- GreekSubtitles
- HDBits.org
- Hosszupuska
- Karagarga.in
- Ktuvit (Get `hashed_password` using method described [here](https://github.com/XBMCil/service.subtitles.ktuvit))
- LegendasDivx
- Legendas.net
- Napiprojekt
- Napisy24
- Nekur
- OpenSubtitles.com
- OpenSubtitles.org (VIP users only)
- Podnapisi
- RegieLive
- Sous-Titres.eu
- Subdivx
- subf2m.co
- Subs.sab.bz
- Subs4Free
- Subs4Series
- Subscene
- Subscenter
- Subsunacs.net
- SubSynchro
- Subtitrari-noi.ro
- subtitri.id.lv
- Subtitulamos.tv
- Supersubtitles
- Titlovi
- Titrari.ro
- Titulky.com
- Turkcealtyazi.org
- TuSubtitulo
- TVSubtitles
- Whisper (requires [ahmetoner/whisper-asr-webservice](https://github.com/ahmetoner/whisper-asr-webservice))
- Wizdom
- XSubs
- Yavka.net
- YIFY Subtitles
- Zimuku
## Screenshot
![Bazarr](/screenshot/bazarr-screenshot.png?raw=true "Bazarr")
### License
- [GNU GPL v3](http://www.gnu.org/licenses/gpl.html)
- Copyright 2010-2024

152
bazarr.py Normal file
View File

@ -0,0 +1,152 @@
# coding=utf-8
import os
import platform
import signal
import subprocess
import sys
import time
from bazarr.app.get_args import args
from bazarr.literals import EXIT_PYTHON_UPGRADE_NEEDED, EXIT_NORMAL, FILE_RESTART, FILE_STOP, ENV_RESTARTFILE, ENV_STOPFILE, EXIT_INTERRUPT
def exit_program(status_code):
print(f'Bazarr exited with status code {status_code}.')
raise SystemExit(status_code)
def check_python_version():
python_version = platform.python_version_tuple()
minimum_py3_tuple = (3, 8, 0)
minimum_py3_str = ".".join(str(i) for i in minimum_py3_tuple)
if int(python_version[0]) < minimum_py3_tuple[0]:
print("Python " + minimum_py3_str + " or greater required. "
"Current version is " + platform.python_version() + ". Please upgrade Python.")
exit_program(EXIT_PYTHON_UPGRADE_NEEDED)
elif int(python_version[0]) == 3 and int(python_version[1]) > 12:
print("Python version greater than 3.12.x is unsupported. Current version is " + platform.python_version() +
". Keep in mind that even if it works, you're on your own.")
elif (int(python_version[0]) == minimum_py3_tuple[0] and int(python_version[1]) < minimum_py3_tuple[1]) or \
(int(python_version[0]) != minimum_py3_tuple[0]):
print("Python " + minimum_py3_str + " or greater required. "
"Current version is " + platform.python_version() + ". Please upgrade Python.")
exit_program(EXIT_PYTHON_UPGRADE_NEEDED)
def get_python_path():
if sys.platform == "darwin":
# Do not run Python from within macOS framework bundle.
python_bundle_path = os.path.join(sys.base_exec_prefix, "Resources", "Python.app", "Contents", "MacOS", "Python")
if os.path.exists(python_bundle_path):
import tempfile
python_path = os.path.join(tempfile.mkdtemp(), "python")
os.symlink(python_bundle_path, python_path)
return python_path
return sys.executable
check_python_version()
dir_name = os.path.dirname(__file__)
def start_bazarr():
script = [get_python_path(), "-u", os.path.normcase(os.path.join(dir_name, 'bazarr', 'main.py'))] + sys.argv[1:]
ep = subprocess.Popen(script, stdout=None, stderr=None, stdin=subprocess.DEVNULL, env=os.environ)
print(f"Bazarr starting child process with PID {ep.pid}...")
return ep
def terminate_child():
print(f"Terminating child process with PID {child_process.pid}")
child_process.terminate()
def get_stop_status_code(input_file):
try:
with open(input_file, 'r') as file:
# read status code from file, if it exists
line = file.readline()
try:
status_code = int(line)
except (ValueError, TypeError):
status_code = EXIT_NORMAL
file.close()
except Exception:
status_code = EXIT_NORMAL
return status_code
def check_status():
global child_process
if os.path.exists(stop_file):
status_code = get_stop_status_code(stop_file)
try:
print("Deleting stop file...")
os.remove(stop_file)
except Exception:
print('Unable to delete stop file.')
finally:
terminate_child()
exit_program(status_code)
if os.path.exists(restart_file):
try:
print("Deleting restart file...")
os.remove(restart_file)
except Exception:
print('Unable to delete restart file.')
finally:
terminate_child()
print("Bazarr is restarting...")
child_process = start_bazarr()
def interrupt_handler(signum, frame):
# catch and ignore keyboard interrupt Ctrl-C
# the child process Server object will catch SIGINT and perform an orderly shutdown
global interrupted
if not interrupted:
# ignore user hammering Ctrl-C; we heard you the first time!
interrupted = True
print('Handling keyboard interrupt...')
else:
print("Stop doing that! I heard you the first time!")
if __name__ == '__main__':
interrupted = False
signal.signal(signal.SIGINT, interrupt_handler)
restart_file = os.path.join(args.config_dir, FILE_RESTART)
stop_file = os.path.join(args.config_dir, FILE_STOP)
os.environ[ENV_STOPFILE] = stop_file
os.environ[ENV_RESTARTFILE] = restart_file
# Cleanup leftover files
try:
os.remove(restart_file)
except FileNotFoundError:
pass
try:
os.remove(stop_file)
except FileNotFoundError:
pass
# Initial start of main bazarr process
child_process = start_bazarr()
# Keep the script running forever until stop is requested through term, special files or keyboard interrupt
while True:
check_status()
try:
time.sleep(5)
except (KeyboardInterrupt, SystemExit, ChildProcessError):
# this code should never be reached, if signal handling is working properly
print('Bazarr exited main script file via keyboard interrupt.')
exit_program(EXIT_INTERRUPT)

1
bazarr/__init__.py Normal file
View File

@ -0,0 +1 @@
# coding=utf-8

51
bazarr/api/__init__.py Normal file
View File

@ -0,0 +1,51 @@
# coding=utf-8
from flask import Blueprint, url_for
from flask_restx import Api, apidoc
from .badges import api_ns_list_badges
from .episodes import api_ns_list_episodes
from .files import api_ns_list_files
from .history import api_ns_list_history
from .movies import api_ns_list_movies
from .providers import api_ns_list_providers
from .series import api_ns_list_series
from .subtitles import api_ns_list_subtitles
from .system import api_ns_list_system
from .webhooks import api_ns_list_webhooks
from .swaggerui import swaggerui_api_params
api_ns_list = [
api_ns_list_badges,
api_ns_list_episodes,
api_ns_list_files,
api_ns_list_history,
api_ns_list_movies,
api_ns_list_providers,
api_ns_list_series,
api_ns_list_subtitles,
api_ns_list_system,
api_ns_list_webhooks,
]
authorizations = {
'apikey': {
'type': 'apiKey',
'in': 'header',
'name': 'X-API-KEY'
}
}
api_bp = Blueprint('api', __name__, url_prefix='/api')
@apidoc.apidoc.add_app_template_global
def swagger_static(filename):
return url_for('ui.swaggerui_static', filename=filename)
api = Api(api_bp, authorizations=authorizations, security='apikey', validate=True, **swaggerui_api_params)
for api_ns in api_ns_list:
for item in api_ns:
api.add_namespace(item, "/")

View File

@ -0,0 +1,7 @@
# coding=utf-8
from .badges import api_ns_badges
api_ns_list_badges = [
api_ns_badges
]

View File

@ -0,0 +1,76 @@
# coding=utf-8
import operator
import ast
from functools import reduce
from flask_restx import Resource, Namespace, fields, marshal
from app.database import get_exclusion_clause, TableEpisodes, TableShows, TableMovies, database, select
from app.get_providers import get_throttled_providers
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client
from app.announcements import get_all_announcements
from utilities.health import get_health_issues
from ..utils import authenticate
api_ns_badges = Namespace('Badges', description='Get badges count to update the UI (episodes and movies wanted '
'subtitles, providers with issues, health issues and announcements.')
@api_ns_badges.route('badges')
class Badges(Resource):
get_model = api_ns_badges.model('BadgesGet', {
'episodes': fields.Integer(),
'movies': fields.Integer(),
'providers': fields.Integer(),
'status': fields.Integer(),
'sonarr_signalr': fields.String(),
'radarr_signalr': fields.String(),
'announcements': fields.Integer(),
})
@authenticate
@api_ns_badges.response(401, 'Not Authenticated')
@api_ns_badges.doc(parser=None)
def get(self):
"""Get badges count to update the UI"""
episodes_conditions = [(TableEpisodes.missing_subtitles.is_not(None)),
(TableEpisodes.missing_subtitles != '[]')]
episodes_conditions += get_exclusion_clause('series')
missing_episodes = database.execute(
select(TableEpisodes.missing_subtitles)
.select_from(TableEpisodes)
.join(TableShows)
.where(reduce(operator.and_, episodes_conditions))) \
.all()
missing_episodes_count = 0
for episode in missing_episodes:
missing_episodes_count += len(ast.literal_eval(episode.missing_subtitles))
movies_conditions = [(TableMovies.missing_subtitles.is_not(None)),
(TableMovies.missing_subtitles != '[]')]
movies_conditions += get_exclusion_clause('movie')
missing_movies = database.execute(
select(TableMovies.missing_subtitles)
.select_from(TableMovies)
.where(reduce(operator.and_, movies_conditions))) \
.all()
missing_movies_count = 0
for movie in missing_movies:
missing_movies_count += len(ast.literal_eval(movie.missing_subtitles))
throttled_providers = len(get_throttled_providers())
health_issues = len(get_health_issues())
result = {
"episodes": missing_episodes_count,
"movies": missing_movies_count,
"providers": throttled_providers,
"status": health_issues,
'sonarr_signalr': "LIVE" if sonarr_signalr_client.connected else "",
'radarr_signalr': "LIVE" if radarr_signalr_client.connected else "",
'announcements': len(get_all_announcements()),
}
return marshal(result, self.get_model)

View File

@ -0,0 +1,16 @@
# coding=utf-8
from .episodes import api_ns_episodes
from .episodes_subtitles import api_ns_episodes_subtitles
from .history import api_ns_episodes_history
from .wanted import api_ns_episodes_wanted
from .blacklist import api_ns_episodes_blacklist
api_ns_list_episodes = [
api_ns_episodes,
api_ns_episodes_blacklist,
api_ns_episodes_history,
api_ns_episodes_subtitles,
api_ns_episodes_wanted,
]

View File

@ -0,0 +1,148 @@
# coding=utf-8
import pretty
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableEpisodes, TableShows, TableBlacklist, database, select
from subtitles.tools.delete import delete_subtitles
from sonarr.blacklist import blacklist_log, blacklist_delete_all, blacklist_delete
from utilities.path_mappings import path_mappings
from subtitles.mass_download import episode_download_subtitles
from app.event_handler import event_stream
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocess
api_ns_episodes_blacklist = Namespace('Episodes Blacklist', description='List, add or remove subtitles to or from '
'episodes blacklist')
@api_ns_episodes_blacklist.route('episodes/blacklist')
class EpisodesBlacklist(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('start', type=int, required=False, default=0, help='Paging start integer')
get_request_parser.add_argument('length', type=int, required=False, default=-1, help='Paging length integer')
get_language_model = api_ns_episodes_blacklist.model('subtitles_language_model', subtitles_language_model)
get_response_model = api_ns_episodes_blacklist.model('EpisodeBlacklistGetResponse', {
'seriesTitle': fields.String(),
'episode_number': fields.String(),
'episodeTitle': fields.String(),
'sonarrSeriesId': fields.Integer(),
'provider': fields.String(),
'subs_id': fields.String(),
'language': fields.Nested(get_language_model),
'timestamp': fields.String(),
'parsed_timestamp': fields.String(),
})
@authenticate
@api_ns_episodes_blacklist.response(401, 'Not Authenticated')
@api_ns_episodes_blacklist.doc(parser=get_request_parser)
def get(self):
"""List blacklisted episodes subtitles"""
args = self.get_request_parser.parse_args()
start = args.get('start')
length = args.get('length')
stmt = select(TableShows.title.label('seriesTitle'),
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).label('episode_number'),
TableEpisodes.title.label('episodeTitle'),
TableEpisodes.sonarrSeriesId,
TableBlacklist.provider,
TableBlacklist.subs_id,
TableBlacklist.language,
TableBlacklist.timestamp) \
.select_from(TableBlacklist) \
.join(TableShows, onclause=TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId) \
.join(TableEpisodes, onclause=TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId) \
.order_by(TableBlacklist.timestamp.desc())
if length > 0:
stmt = stmt.limit(length).offset(start)
return marshal([postprocess({
'seriesTitle': x.seriesTitle,
'episode_number': x.episode_number,
'episodeTitle': x.episodeTitle,
'sonarrSeriesId': x.sonarrSeriesId,
'provider': x.provider,
'subs_id': x.subs_id,
'language': x.language,
'timestamp': pretty.date(x.timestamp),
'parsed_timestamp': x.timestamp.strftime('%x %X')
}) for x in database.execute(stmt).all()], self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
post_request_parser.add_argument('episodeid', type=int, required=True, help='Episode ID')
post_request_parser.add_argument('provider', type=str, required=True, help='Provider name')
post_request_parser.add_argument('subs_id', type=str, required=True, help='Subtitles ID')
post_request_parser.add_argument('language', type=str, required=True, help='Subtitles language')
post_request_parser.add_argument('subtitles_path', type=str, required=True, help='Subtitles file path')
@authenticate
@api_ns_episodes_blacklist.doc(parser=post_request_parser)
@api_ns_episodes_blacklist.response(200, 'Success')
@api_ns_episodes_blacklist.response(401, 'Not Authenticated')
@api_ns_episodes_blacklist.response(404, 'Episode not found')
@api_ns_episodes_blacklist.response(500, 'Subtitles file not found or permission issue.')
def post(self):
"""Add an episodes subtitles to blacklist"""
args = self.post_request_parser.parse_args()
sonarr_series_id = args.get('seriesid')
sonarr_episode_id = args.get('episodeid')
provider = args.get('provider')
subs_id = args.get('subs_id')
language = args.get('language')
episodeInfo = database.execute(
select(TableEpisodes.path)
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
media_path = episodeInfo.path
subtitles_path = args.get('subtitles_path')
blacklist_log(sonarr_series_id=sonarr_series_id,
sonarr_episode_id=sonarr_episode_id,
provider=provider,
subs_id=subs_id,
language=language)
if delete_subtitles(media_type='series',
language=language,
forced=False,
hi=False,
media_path=path_mappings.path_replace(media_path),
subtitles_path=subtitles_path,
sonarr_series_id=sonarr_series_id,
sonarr_episode_id=sonarr_episode_id):
episode_download_subtitles(sonarr_episode_id)
event_stream(type='episode-history')
return '', 200
else:
return 'Subtitles file not found or permission issue.', 500
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('all', type=str, required=False, help='Empty episodes subtitles blacklist')
delete_request_parser.add_argument('provider', type=str, required=False, help='Provider name')
delete_request_parser.add_argument('subs_id', type=str, required=False, help='Subtitles ID')
@authenticate
@api_ns_episodes_blacklist.doc(parser=delete_request_parser)
@api_ns_episodes_blacklist.response(204, 'Success')
@api_ns_episodes_blacklist.response(401, 'Not Authenticated')
def delete(self):
"""Delete an episodes subtitles from blacklist"""
args = self.delete_request_parser.parse_args()
if args.get("all") == "true":
blacklist_delete_all()
else:
provider = args.get('provider')
subs_id = args.get('subs_id')
blacklist_delete(provider=provider, subs_id=subs_id)
return '', 204

View File

@ -0,0 +1,90 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableEpisodes, database, select
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
from ..utils import authenticate, postprocess
api_ns_episodes = Namespace('Episodes', description='List episodes metadata for specific series or episodes.')
@api_ns_episodes.route('episodes')
class Episodes(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('seriesid[]', type=int, action='append', required=False, default=[],
help='Series IDs to list episodes for')
get_request_parser.add_argument('episodeid[]', type=int, action='append', required=False, default=[],
help='Episodes ID to list')
get_subtitles_model = api_ns_episodes.model('subtitles_model', subtitles_model)
get_subtitles_language_model = api_ns_episodes.model('subtitles_language_model', subtitles_language_model)
get_audio_language_model = api_ns_episodes.model('audio_language_model', audio_language_model)
get_response_model = api_ns_episodes.model('EpisodeGetResponse', {
'audio_language': fields.Nested(get_audio_language_model),
'episode': fields.Integer(),
'missing_subtitles': fields.Nested(get_subtitles_language_model),
'monitored': fields.Boolean(),
'path': fields.String(),
'season': fields.Integer(),
'sonarrEpisodeId': fields.Integer(),
'sonarrSeriesId': fields.Integer(),
'subtitles': fields.Nested(get_subtitles_model),
'title': fields.String(),
'sceneName': fields.String(),
})
@authenticate
@api_ns_episodes.doc(parser=get_request_parser)
@api_ns_episodes.response(200, 'Success')
@api_ns_episodes.response(401, 'Not Authenticated')
@api_ns_episodes.response(404, 'Series or Episode ID not provided')
def get(self):
"""List episodes metadata for specific series or episodes"""
args = self.get_request_parser.parse_args()
seriesId = args.get('seriesid[]')
episodeId = args.get('episodeid[]')
stmt = select(
TableEpisodes.audio_language,
TableEpisodes.episode,
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.path,
TableEpisodes.season,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sonarrSeriesId,
TableEpisodes.subtitles,
TableEpisodes.title,
TableEpisodes.sceneName,
)
if len(episodeId) > 0:
stmt_query = database.execute(
stmt
.where(TableEpisodes.sonarrEpisodeId.in_(episodeId)))\
.all()
elif len(seriesId) > 0:
stmt_query = database.execute(
stmt
.where(TableEpisodes.sonarrSeriesId.in_(seriesId))
.order_by(TableEpisodes.season.desc(), TableEpisodes.episode.desc()))\
.all()
else:
return "Series or Episode ID not provided", 404
return marshal([postprocess({
'audio_language': x.audio_language,
'episode': x.episode,
'missing_subtitles': x.missing_subtitles,
'monitored': x.monitored,
'path': x.path,
'season': x.season,
'sonarrEpisodeId': x.sonarrEpisodeId,
'sonarrSeriesId': x.sonarrSeriesId,
'subtitles': x.subtitles,
'title': x.title,
'sceneName': x.sceneName,
}) for x in stmt_query], self.get_response_model, envelope='data')

View File

@ -0,0 +1,226 @@
# coding=utf-8
import os
import logging
from flask_restx import Resource, Namespace, reqparse
from subliminal_patch.core import SUBTITLE_EXTENSIONS
from werkzeug.datastructures import FileStorage
from app.database import TableShows, TableEpisodes, get_audio_profile_languages, get_profile_id, database, select
from utilities.path_mappings import path_mappings
from subtitles.upload import manual_upload_subtitle
from subtitles.download import generate_subtitles
from subtitles.tools.delete import delete_subtitles
from sonarr.history import history_log
from app.notifier import send_notifications
from subtitles.indexer.series import store_subtitles
from app.event_handler import event_stream, show_message
from app.config import settings
from ..utils import authenticate
api_ns_episodes_subtitles = Namespace('Episodes Subtitles', description='Download, upload or delete episodes subtitles')
@api_ns_episodes_subtitles.route('episodes/subtitles')
class EpisodesSubtitles(Resource):
patch_request_parser = reqparse.RequestParser()
patch_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
patch_request_parser.add_argument('episodeid', type=int, required=True, help='Episode ID')
patch_request_parser.add_argument('language', type=str, required=True, help='Language code2')
patch_request_parser.add_argument('forced', type=str, required=True, help='Forced true/false as string')
patch_request_parser.add_argument('hi', type=str, required=True, help='HI true/false as string')
@authenticate
@api_ns_episodes_subtitles.doc(parser=patch_request_parser)
@api_ns_episodes_subtitles.response(204, 'Success')
@api_ns_episodes_subtitles.response(401, 'Not Authenticated')
@api_ns_episodes_subtitles.response(404, 'Episode not found')
@api_ns_episodes_subtitles.response(409, 'Unable to save subtitles file. Permission or path mapping issue?')
@api_ns_episodes_subtitles.response(500, 'Custom error messages')
def patch(self):
"""Download an episode subtitles"""
args = self.patch_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = database.execute(
select(TableEpisodes.path,
TableEpisodes.sceneName,
TableEpisodes.audio_language,
TableShows.title)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
episodePath = path_mappings.path_replace(episodeInfo.path)
if not os.path.exists(episodePath):
return 'Episode file not found. Path mapping issue?', 500
sceneName = episodeInfo.sceneName or "None"
title = episodeInfo.title
language = args.get('language')
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
if hi == 'True':
language_str = f'{language}:hi'
elif forced == 'True':
language_str = f'{language}:forced'
else:
language_str = language
audio_language_list = get_audio_profile_languages(episodeInfo.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = None
try:
result = list(generate_subtitles(episodePath, [(language, hi, forced)], audio_language, sceneName,
title, 'series', profile_id=get_profile_id(episode_id=sonarrEpisodeId)))
if isinstance(result, list) and len(result):
result = result[0]
if isinstance(result, tuple) and len(result):
result = result[0]
history_log(1, sonarrSeriesId, sonarrEpisodeId, result)
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
else:
event_stream(type='episode', payload=sonarrEpisodeId)
show_message(f'No {language_str.upper()} subtitles found')
return '', 204
except OSError:
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
return '', 204
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
post_request_parser.add_argument('episodeid', type=int, required=True, help='Episode ID')
post_request_parser.add_argument('language', type=str, required=True, help='Language code2')
post_request_parser.add_argument('forced', type=str, required=True, help='Forced true/false as string')
post_request_parser.add_argument('hi', type=str, required=True, help='HI true/false as string')
post_request_parser.add_argument('file', type=FileStorage, location='files', required=True,
help='Subtitles file as file upload object')
@authenticate
@api_ns_episodes_subtitles.doc(parser=post_request_parser)
@api_ns_episodes_subtitles.response(204, 'Success')
@api_ns_episodes_subtitles.response(401, 'Not Authenticated')
@api_ns_episodes_subtitles.response(404, 'Episode not found')
@api_ns_episodes_subtitles.response(409, 'Unable to save subtitles file. Permission or path mapping issue?')
@api_ns_episodes_subtitles.response(500, 'Episode file not found. Path mapping issue?')
def post(self):
"""Upload an episode subtitles"""
args = self.post_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = database.execute(
select(TableEpisodes.path,
TableEpisodes.audio_language)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
episodePath = path_mappings.path_replace(episodeInfo.path)
if not os.path.exists(episodePath):
return 'Episode file not found. Path mapping issue?', 500
audio_language = get_audio_profile_languages(episodeInfo.audio_language)
if len(audio_language) and isinstance(audio_language[0], dict):
audio_language = audio_language[0]
else:
audio_language = {'name': '', 'code2': '', 'code3': ''}
language = args.get('language')
forced = True if args.get('forced') == 'true' else False
hi = True if args.get('hi') == 'true' else False
subFile = args.get('file')
_, ext = os.path.splitext(subFile.filename)
if not isinstance(ext, str) or ext.lower() not in SUBTITLE_EXTENSIONS:
raise ValueError('A subtitle of an invalid format was uploaded.')
try:
result = manual_upload_subtitle(path=episodePath,
language=language,
forced=forced,
hi=hi,
media_type='series',
subtitle=subFile,
audio_language=audio_language)
if not result:
logging.debug(f"BAZARR unable to process subtitles for this episode: {episodePath}")
else:
if isinstance(result, tuple) and len(result):
result = result[0]
provider = "manual"
score = 360
history_log(4, sonarrSeriesId, sonarrEpisodeId, result, fake_provider=provider, fake_score=score)
if not settings.general.dont_notify_manual_actions:
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
except OSError:
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
return '', 204
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
delete_request_parser.add_argument('episodeid', type=int, required=True, help='Episode ID')
delete_request_parser.add_argument('language', type=str, required=True, help='Language code2')
delete_request_parser.add_argument('forced', type=str, required=True, help='Forced true/false as string')
delete_request_parser.add_argument('hi', type=str, required=True, help='HI true/false as string')
delete_request_parser.add_argument('path', type=str, required=True, help='Path of the subtitles file')
@authenticate
@api_ns_episodes_subtitles.doc(parser=delete_request_parser)
@api_ns_episodes_subtitles.response(204, 'Success')
@api_ns_episodes_subtitles.response(401, 'Not Authenticated')
@api_ns_episodes_subtitles.response(404, 'Episode not found')
@api_ns_episodes_subtitles.response(500, 'Subtitles file not found or permission issue.')
def delete(self):
"""Delete an episode subtitles"""
args = self.delete_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = database.execute(
select(TableEpisodes.path)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
episodePath = path_mappings.path_replace(episodeInfo.path)
language = args.get('language')
forced = args.get('forced')
hi = args.get('hi')
subtitlesPath = args.get('path')
subtitlesPath = path_mappings.path_replace_reverse(subtitlesPath)
if delete_subtitles(media_type='series',
language=language,
forced=forced,
hi=hi,
media_path=episodePath,
subtitles_path=subtitlesPath,
sonarr_series_id=sonarrSeriesId,
sonarr_episode_id=sonarrEpisodeId):
return '', 204
else:
return 'Subtitles file not found or permission issue.', 500

View File

@ -0,0 +1,181 @@
# coding=utf-8
import operator
import ast
from functools import reduce
from api.swaggerui import subtitles_language_model
from app.database import TableEpisodes, TableShows, TableHistory, TableBlacklist, database, select, func
from subtitles.upgrade import get_upgradable_episode_subtitles, _language_still_desired
import pretty
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from ..utils import authenticate, postprocess
api_ns_episodes_history = Namespace('Episodes History', description='List episodes history events')
@api_ns_episodes_history.route('episodes/history')
class EpisodesHistory(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('start', type=int, required=False, default=0, help='Paging start integer')
get_request_parser.add_argument('length', type=int, required=False, default=-1, help='Paging length integer')
get_request_parser.add_argument('episodeid', type=int, required=False, help='Episode ID')
get_language_model = api_ns_episodes_history.model('subtitles_language_model', subtitles_language_model)
data_model = api_ns_episodes_history.model('history_episodes_data_model', {
'seriesTitle': fields.String(),
'monitored': fields.Boolean(),
'episode_number': fields.String(),
'episodeTitle': fields.String(),
'timestamp': fields.String(),
'subs_id': fields.String(),
'description': fields.String(),
'sonarrSeriesId': fields.Integer(),
'language': fields.Nested(get_language_model),
'score': fields.String(),
'tags': fields.List(fields.String),
'action': fields.Integer(),
'subtitles_path': fields.String(),
'sonarrEpisodeId': fields.Integer(),
'provider': fields.String(),
'upgradable': fields.Boolean(),
'parsed_timestamp': fields.String(),
'blacklisted': fields.Boolean(),
'matches': fields.List(fields.String),
'dont_matches': fields.List(fields.String),
})
get_response_model = api_ns_episodes_history.model('EpisodeHistoryGetResponse', {
'data': fields.Nested(data_model),
'total': fields.Integer(),
})
@authenticate
@api_ns_episodes_history.response(401, 'Not Authenticated')
@api_ns_episodes_history.doc(parser=get_request_parser)
def get(self):
"""List episodes history events"""
args = self.get_request_parser.parse_args()
start = args.get('start')
length = args.get('length')
episodeid = args.get('episodeid')
upgradable_episodes_not_perfect = get_upgradable_episode_subtitles()
blacklisted_subtitles = select(TableBlacklist.provider,
TableBlacklist.subs_id) \
.subquery()
query_conditions = [(TableEpisodes.title.is_not(None))]
if episodeid:
query_conditions.append((TableEpisodes.sonarrEpisodeId == episodeid))
stmt = select(TableHistory.id,
TableShows.title.label('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).label('episode_number'),
TableEpisodes.title.label('episodeTitle'),
TableHistory.timestamp,
TableHistory.subs_id,
TableHistory.description,
TableHistory.sonarrSeriesId,
TableEpisodes.path,
TableHistory.language,
TableHistory.score,
TableShows.tags,
TableHistory.action,
TableHistory.video_path,
TableHistory.subtitles_path,
TableHistory.sonarrEpisodeId,
TableHistory.provider,
TableShows.seriesType,
TableShows.profileId,
TableHistory.matched,
TableHistory.not_matched,
TableEpisodes.subtitles.label('external_subtitles'),
blacklisted_subtitles.c.subs_id.label('blacklisted')) \
.select_from(TableHistory) \
.join(TableShows, onclause=TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId) \
.join(TableEpisodes, onclause=TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId) \
.join(blacklisted_subtitles, onclause=TableHistory.subs_id == blacklisted_subtitles.c.subs_id,
isouter=True) \
.where(reduce(operator.and_, query_conditions)) \
.order_by(TableHistory.timestamp.desc())
if length > 0:
stmt = stmt.limit(length).offset(start)
episode_history = [{
'id': x.id,
'seriesTitle': x.seriesTitle,
'monitored': x.monitored,
'episode_number': x.episode_number,
'episodeTitle': x.episodeTitle,
'timestamp': x.timestamp,
'subs_id': x.subs_id,
'description': x.description,
'sonarrSeriesId': x.sonarrSeriesId,
'path': x.path,
'language': x.language,
'profileId': x.profileId,
'score': x.score,
'tags': x.tags,
'action': x.action,
'video_path': x.video_path,
'subtitles_path': x.subtitles_path,
'sonarrEpisodeId': x.sonarrEpisodeId,
'provider': x.provider,
'matches': x.matched,
'dont_matches': x.not_matched,
'external_subtitles': [y[1] for y in ast.literal_eval(x.external_subtitles) if y[1]],
'blacklisted': bool(x.blacklisted),
} for x in database.execute(stmt).all()]
for item in episode_history:
# is this language still desired or should we simply skip this subtitles from upgrade logic?
still_desired = _language_still_desired(item['language'], item['profileId'])
item.update(postprocess(item))
# Mark upgradable and get original_id
item.update({'original_id': upgradable_episodes_not_perfect.get(item['id'])})
item.update({'upgradable': bool(item['original_id'])})
# Mark not upgradable if video/subtitles file doesn't exist anymore or if language isn't desired anymore
if item['upgradable']:
if (item['subtitles_path'] not in item['external_subtitles'] or item['video_path'] != item['path'] or
not still_desired):
item.update({"upgradable": False})
del item['path']
del item['video_path']
del item['external_subtitles']
del item['profileId']
if item['score']:
item['score'] = f"{round((int(item['score']) * 100 / 360), 2)}%"
# Make timestamp pretty
if item['timestamp']:
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item['timestamp'] = pretty.date(item["timestamp"])
# Parse matches and dont_matches
if item['matches']:
item.update({'matches': ast.literal_eval(item['matches'])})
else:
item.update({'matches': []})
if item['dont_matches']:
item.update({'dont_matches': ast.literal_eval(item['dont_matches'])})
else:
item.update({'dont_matches': []})
count = database.execute(
select(func.count())
.select_from(TableHistory)
.join(TableEpisodes)
.where(TableEpisodes.title.is_not(None))) \
.scalar()
return marshal({'data': episode_history, 'total': count}, self.get_response_model)

View File

@ -0,0 +1,99 @@
# coding=utf-8
import operator
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import get_exclusion_clause, TableEpisodes, TableShows, database, select, func
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocess
api_ns_episodes_wanted = Namespace('Episodes Wanted', description='List episodes wanted subtitles')
@api_ns_episodes_wanted.route('episodes/wanted')
class EpisodesWanted(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('start', type=int, required=False, default=0, help='Paging start integer')
get_request_parser.add_argument('length', type=int, required=False, default=-1, help='Paging length integer')
get_request_parser.add_argument('episodeid[]', type=int, action='append', required=False, default=[],
help='Episodes ID to list')
get_subtitles_language_model = api_ns_episodes_wanted.model('subtitles_language_model', subtitles_language_model)
data_model = api_ns_episodes_wanted.model('wanted_episodes_data_model', {
'seriesTitle': fields.String(),
'episode_number': fields.String(),
'episodeTitle': fields.String(),
'missing_subtitles': fields.Nested(get_subtitles_language_model),
'sonarrSeriesId': fields.Integer(),
'sonarrEpisodeId': fields.Integer(),
'sceneName': fields.String(),
'tags': fields.List(fields.String),
'seriesType': fields.String(),
})
get_response_model = api_ns_episodes_wanted.model('EpisodeWantedGetResponse', {
'data': fields.Nested(data_model),
'total': fields.Integer(),
})
@authenticate
@api_ns_episodes_wanted.response(401, 'Not Authenticated')
@api_ns_episodes_wanted.doc(parser=get_request_parser)
def get(self):
"""List episodes wanted subtitles"""
args = self.get_request_parser.parse_args()
episodeid = args.get('episodeid[]')
wanted_conditions = [(TableEpisodes.missing_subtitles.is_not(None)),
(TableEpisodes.missing_subtitles != '[]')]
if len(episodeid) > 0:
wanted_conditions.append((TableEpisodes.sonarrEpisodeId in episodeid))
start = 0
length = 0
else:
start = args.get('start')
length = args.get('length')
wanted_conditions += get_exclusion_clause('series')
wanted_condition = reduce(operator.and_, wanted_conditions)
stmt = select(TableShows.title.label('seriesTitle'),
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).label('episode_number'),
TableEpisodes.title.label('episodeTitle'),
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sceneName,
TableShows.tags,
TableShows.seriesType) \
.select_from(TableEpisodes) \
.join(TableShows) \
.where(wanted_condition)
if length > 0:
stmt = stmt.order_by(TableEpisodes.sonarrEpisodeId.desc()).limit(length).offset(start)
results = [postprocess({
'seriesTitle': x.seriesTitle,
'episode_number': x.episode_number,
'episodeTitle': x.episodeTitle,
'missing_subtitles': x.missing_subtitles,
'sonarrSeriesId': x.sonarrSeriesId,
'sonarrEpisodeId': x.sonarrEpisodeId,
'sceneName': x.sceneName,
'tags': x.tags,
'seriesType': x.seriesType,
}) for x in database.execute(stmt).all()]
count = database.execute(
select(func.count())
.select_from(TableEpisodes)
.join(TableShows)
.where(wanted_condition)) \
.scalar()
return marshal({'data': results, 'total': count}, self.get_response_model)

View File

@ -0,0 +1,11 @@
# coding=utf-8
from .files import api_ns_files
from .files_sonarr import api_ns_files_sonarr
from .files_radarr import api_ns_files_radarr
api_ns_list_files = [
api_ns_files,
api_ns_files_radarr,
api_ns_files_sonarr,
]

39
bazarr/api/files/files.py Normal file
View File

@ -0,0 +1,39 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from utilities.filesystem import browse_bazarr_filesystem
from ..utils import authenticate
api_ns_files = Namespace('Files Browser for Bazarr', description='Browse content of file system as seen by Bazarr')
@api_ns_files.route('files')
class BrowseBazarrFS(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('path', type=str, default='', help='Path to browse')
get_response_model = api_ns_files.model('BazarrFileBrowserGetResponse', {
'name': fields.String(),
'children': fields.Boolean(),
'path': fields.String(),
})
@authenticate
@api_ns_files.response(401, 'Not Authenticated')
@api_ns_files.doc(parser=get_request_parser)
def get(self):
"""List Bazarr file system content"""
args = self.get_request_parser.parse_args()
path = args.get('path')
data = []
try:
result = browse_bazarr_filesystem(path)
if result is None:
raise ValueError
except Exception:
return []
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return marshal(data, self.get_response_model)

View File

@ -0,0 +1,40 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from radarr.filesystem import browse_radarr_filesystem
from ..utils import authenticate
api_ns_files_radarr = Namespace('Files Browser for Radarr', description='Browse content of file system as seen by '
'Radarr')
@api_ns_files_radarr.route('files/radarr')
class BrowseRadarrFS(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('path', type=str, default='', help='Path to browse')
get_response_model = api_ns_files_radarr.model('RadarrFileBrowserGetResponse', {
'name': fields.String(),
'children': fields.Boolean(),
'path': fields.String(),
})
@authenticate
@api_ns_files_radarr.response(401, 'Not Authenticated')
@api_ns_files_radarr.doc(parser=get_request_parser)
def get(self):
"""List Radarr file system content"""
args = self.get_request_parser.parse_args()
path = args.get('path')
data = []
try:
result = browse_radarr_filesystem(path)
if result is None:
raise ValueError
except Exception:
return []
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return marshal(data, self.get_response_model)

View File

@ -0,0 +1,40 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from sonarr.filesystem import browse_sonarr_filesystem
from ..utils import authenticate
api_ns_files_sonarr = Namespace('Files Browser for Sonarr', description='Browse content of file system as seen by '
'Sonarr')
@api_ns_files_sonarr.route('files/sonarr')
class BrowseSonarrFS(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('path', type=str, default='', help='Path to browse')
get_response_model = api_ns_files_sonarr.model('SonarrFileBrowserGetResponse', {
'name': fields.String(),
'children': fields.Boolean(),
'path': fields.String(),
})
@authenticate
@api_ns_files_sonarr.response(401, 'Not Authenticated')
@api_ns_files_sonarr.doc(parser=get_request_parser)
def get(self):
"""List Sonarr file system content"""
args = self.get_request_parser.parse_args()
path = args.get('path')
data = []
try:
result = browse_sonarr_filesystem(path)
if result is None:
raise ValueError
except Exception:
return []
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return marshal(data, self.get_response_model)

View File

@ -0,0 +1,8 @@
# coding=utf-8
from .stats import api_ns_history_stats
api_ns_list_history = [
api_ns_history_stats
]

123
bazarr/api/history/stats.py Normal file
View File

@ -0,0 +1,123 @@
# coding=utf-8
import datetime
import operator
import itertools
from dateutil import rrule
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import TableHistory, TableHistoryMovie, database, select
from ..utils import authenticate
api_ns_history_stats = Namespace('History Statistics', description='Get history statistics')
@api_ns_history_stats.route('history/stats')
class HistoryStats(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('timeFrame', type=str, default='month',
help='Timeframe to get stats for. Must be in ["week", "month", "trimester", '
'"year"]')
get_request_parser.add_argument('action', type=str, default='All', help='Action type to filter for.')
get_request_parser.add_argument('provider', type=str, default='All', help='Provider name to filter for.')
get_request_parser.add_argument('language', type=str, default='All', help='Language name to filter for')
series_data_model = api_ns_history_stats.model('history_series_stats_data_model', {
'date': fields.String(),
'count': fields.Integer(),
})
movies_data_model = api_ns_history_stats.model('history_movies_stats_data_model', {
'date': fields.String(),
'count': fields.Integer(),
})
get_response_model = api_ns_history_stats.model('HistoryStatsGetResponse', {
'series': fields.Nested(series_data_model),
'movies': fields.Nested(movies_data_model),
})
@authenticate
@api_ns_history_stats.response(401, 'Not Authenticated')
@api_ns_history_stats.doc(parser=get_request_parser)
def get(self):
"""Get history statistics"""
args = self.get_request_parser.parse_args()
timeframe = args.get('timeFrame')
action = args.get('action')
provider = args.get('provider')
language = args.get('language')
# timeframe must be in ['week', 'month', 'trimester', 'year']
if timeframe == 'year':
delay = 364 * 24 * 60 * 60
elif timeframe == 'trimester':
delay = 90 * 24 * 60 * 60
elif timeframe == 'month':
delay = 30 * 24 * 60 * 60
elif timeframe == 'week':
delay = 6 * 24 * 60 * 60
now = datetime.datetime.now()
past = now - datetime.timedelta(seconds=delay)
history_where_clauses = [(TableHistory.timestamp.between(past, now))]
history_where_clauses_movie = [(TableHistoryMovie.timestamp.between(past, now))]
if action != 'All':
history_where_clauses.append((TableHistory.action == action))
history_where_clauses_movie.append((TableHistoryMovie.action == action))
else:
history_where_clauses.append((TableHistory.action.in_([1, 2, 3])))
history_where_clauses_movie.append((TableHistoryMovie.action.in_([1, 2, 3])))
if provider != 'All':
history_where_clauses.append((TableHistory.provider == provider))
history_where_clauses_movie.append((TableHistoryMovie.provider == provider))
if language != 'All':
history_where_clauses.append((TableHistory.language == language))
history_where_clauses_movie.append((TableHistoryMovie.language == language))
history_where_clause = reduce(operator.and_, history_where_clauses)
history_where_clause_movie = reduce(operator.and_, history_where_clauses_movie)
data_series = [{
'timestamp': x.timestamp,
'id': x.id,
} for x in database.execute(
select(TableHistory.timestamp, TableHistory.id)
.where(history_where_clause))
.all()]
data_series = [{'date': date[0], 'count': sum(1 for item in date[1])} for date in
itertools.groupby(list(data_series),
key=lambda x: x['timestamp'].strftime(
'%Y-%m-%d'))]
data_movies = [{
'timestamp': x.timestamp,
'id': x.id,
} for x in database.execute(
select(TableHistoryMovie.timestamp, TableHistoryMovie.id)
.where(history_where_clause_movie))
.all()]
data_movies = [{'date': date[0], 'count': sum(1 for item in date[1])} for date in
itertools.groupby(list(data_movies),
key=lambda x: x['timestamp'].strftime(
'%Y-%m-%d'))]
for dt in rrule.rrule(rrule.DAILY,
dtstart=datetime.datetime.now() - datetime.timedelta(seconds=delay),
until=datetime.datetime.now()):
if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_series):
data_series.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0})
if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_movies):
data_movies.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0})
sorted_data_series = sorted(data_series, key=lambda i: i['date'])
sorted_data_movies = sorted(data_movies, key=lambda i: i['date'])
return marshal({'series': sorted_data_series, 'movies': sorted_data_movies}, self.get_response_model)

View File

@ -0,0 +1,16 @@
# coding=utf-8
from .movies import api_ns_movies
from .movies_subtitles import api_ns_movies_subtitles
from .history import api_ns_movies_history
from .wanted import api_ns_movies_wanted
from .blacklist import api_ns_movies_blacklist
api_ns_list_movies = [
api_ns_movies,
api_ns_movies_blacklist,
api_ns_movies_history,
api_ns_movies_subtitles,
api_ns_movies_wanted,
]

View File

@ -0,0 +1,141 @@
# coding=utf-8
import pretty
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableMovies, TableBlacklistMovie, database, select
from subtitles.tools.delete import delete_subtitles
from radarr.blacklist import blacklist_log_movie, blacklist_delete_all_movie, blacklist_delete_movie
from utilities.path_mappings import path_mappings
from subtitles.mass_download import movies_download_subtitles
from app.event_handler import event_stream
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocess
api_ns_movies_blacklist = Namespace('Movies Blacklist', description='List, add or remove subtitles to or from '
'movies blacklist')
@api_ns_movies_blacklist.route('movies/blacklist')
class MoviesBlacklist(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('start', type=int, required=False, default=0, help='Paging start integer')
get_request_parser.add_argument('length', type=int, required=False, default=-1, help='Paging length integer')
get_language_model = api_ns_movies_blacklist.model('subtitles_language_model', subtitles_language_model)
get_response_model = api_ns_movies_blacklist.model('MovieBlacklistGetResponse', {
'title': fields.String(),
'radarrId': fields.Integer(),
'provider': fields.String(),
'subs_id': fields.String(),
'language': fields.Nested(get_language_model),
'timestamp': fields.String(),
'parsed_timestamp': fields.String(),
})
@authenticate
@api_ns_movies_blacklist.response(401, 'Not Authenticated')
@api_ns_movies_blacklist.doc(parser=get_request_parser)
def get(self):
"""List blacklisted movies subtitles"""
args = self.get_request_parser.parse_args()
start = args.get('start')
length = args.get('length')
data = database.execute(
select(TableMovies.title,
TableMovies.radarrId,
TableBlacklistMovie.provider,
TableBlacklistMovie.subs_id,
TableBlacklistMovie.language,
TableBlacklistMovie.timestamp)
.select_from(TableBlacklistMovie)
.join(TableMovies)
.order_by(TableBlacklistMovie.timestamp.desc()))
if length > 0:
data = data.limit(length).offset(start)
return marshal([postprocess({
'title': x.title,
'radarrId': x.radarrId,
'provider': x.provider,
'subs_id': x.subs_id,
'language': x.language,
'timestamp': pretty.date(x.timestamp),
'parsed_timestamp': x.timestamp.strftime('%x %X'),
}) for x in data.all()], self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('radarrid', type=int, required=True, help='Radarr ID')
post_request_parser.add_argument('provider', type=str, required=True, help='Provider name')
post_request_parser.add_argument('subs_id', type=str, required=True, help='Subtitles ID')
post_request_parser.add_argument('language', type=str, required=True, help='Subtitles language')
post_request_parser.add_argument('subtitles_path', type=str, required=True, help='Subtitles file path')
@authenticate
@api_ns_movies_blacklist.doc(parser=post_request_parser)
@api_ns_movies_blacklist.response(200, 'Success')
@api_ns_movies_blacklist.response(401, 'Not Authenticated')
@api_ns_movies_blacklist.response(404, 'Movie not found')
@api_ns_movies_blacklist.response(500, 'Subtitles file not found or permission issue.')
def post(self):
"""Add a movies subtitles to blacklist"""
args = self.post_request_parser.parse_args()
radarr_id = args.get('radarrid')
provider = args.get('provider')
subs_id = args.get('subs_id')
language = args.get('language')
# TODO
forced = False
hi = False
data = database.execute(
select(TableMovies.path)
.where(TableMovies.radarrId == radarr_id))\
.first()
if not data:
return 'Movie not found', 404
media_path = data.path
subtitles_path = args.get('subtitles_path')
blacklist_log_movie(radarr_id=radarr_id,
provider=provider,
subs_id=subs_id,
language=language)
if delete_subtitles(media_type='movie',
language=language,
forced=forced,
hi=hi,
media_path=path_mappings.path_replace_movie(media_path),
subtitles_path=subtitles_path,
radarr_id=radarr_id):
movies_download_subtitles(radarr_id)
event_stream(type='movie-history')
return '', 200
else:
return 'Subtitles file not found or permission issue.', 500
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('all', type=str, required=False, help='Empty movies subtitles blacklist')
delete_request_parser.add_argument('provider', type=str, required=False, help='Provider name')
delete_request_parser.add_argument('subs_id', type=str, required=False, help='Subtitles ID')
@authenticate
@api_ns_movies_blacklist.doc(parser=delete_request_parser)
@api_ns_movies_blacklist.response(204, 'Success')
@api_ns_movies_blacklist.response(401, 'Not Authenticated')
def delete(self):
"""Delete a movies subtitles from blacklist"""
args = self.delete_request_parser.parse_args()
if args.get("all") == "true":
blacklist_delete_all_movie()
else:
provider = args.get('provider')
subs_id = args.get('subs_id')
blacklist_delete_movie(provider=provider, subs_id=subs_id)
return '', 200

View File

@ -0,0 +1,172 @@
# coding=utf-8
import operator
import pretty
import ast
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import TableMovies, TableHistoryMovie, TableBlacklistMovie, database, select, func
from subtitles.upgrade import get_upgradable_movies_subtitles, _language_still_desired
from api.swaggerui import subtitles_language_model
from api.utils import authenticate, postprocess
api_ns_movies_history = Namespace('Movies History', description='List movies history events')
@api_ns_movies_history.route('movies/history')
class MoviesHistory(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('start', type=int, required=False, default=0, help='Paging start integer')
get_request_parser.add_argument('length', type=int, required=False, default=-1, help='Paging length integer')
get_request_parser.add_argument('radarrid', type=int, required=False, help='Movie ID')
get_language_model = api_ns_movies_history.model('subtitles_language_model', subtitles_language_model)
data_model = api_ns_movies_history.model('history_movies_data_model', {
'action': fields.Integer(),
'title': fields.String(),
'timestamp': fields.String(),
'description': fields.String(),
'radarrId': fields.Integer(),
'monitored': fields.Boolean(),
'path': fields.String(),
'language': fields.Nested(get_language_model),
'tags': fields.List(fields.String),
'score': fields.String(),
'subs_id': fields.String(),
'provider': fields.String(),
'subtitles_path': fields.String(),
'upgradable': fields.Boolean(),
'parsed_timestamp': fields.String(),
'blacklisted': fields.Boolean(),
'matches': fields.List(fields.String),
'dont_matches': fields.List(fields.String),
})
get_response_model = api_ns_movies_history.model('MovieHistoryGetResponse', {
'data': fields.Nested(data_model),
'total': fields.Integer(),
})
@authenticate
@api_ns_movies_history.response(401, 'Not Authenticated')
@api_ns_movies_history.doc(parser=get_request_parser)
def get(self):
"""List movies history events"""
args = self.get_request_parser.parse_args()
start = args.get('start')
length = args.get('length')
radarrid = args.get('radarrid')
upgradable_movies_not_perfect = get_upgradable_movies_subtitles()
blacklisted_subtitles = select(TableBlacklistMovie.provider,
TableBlacklistMovie.subs_id) \
.subquery()
query_conditions = [(TableMovies.title.is_not(None))]
if radarrid:
query_conditions.append((TableMovies.radarrId == radarrid))
stmt = select(TableHistoryMovie.id,
TableHistoryMovie.action,
TableMovies.title,
TableHistoryMovie.timestamp,
TableHistoryMovie.description,
TableHistoryMovie.radarrId,
TableMovies.monitored,
TableMovies.path,
TableHistoryMovie.language,
TableMovies.tags,
TableHistoryMovie.score,
TableHistoryMovie.subs_id,
TableHistoryMovie.provider,
TableHistoryMovie.subtitles_path,
TableHistoryMovie.video_path,
TableHistoryMovie.matched,
TableHistoryMovie.not_matched,
TableMovies.profileId,
TableMovies.subtitles.label('external_subtitles'),
blacklisted_subtitles.c.subs_id.label('blacklisted')) \
.select_from(TableHistoryMovie) \
.join(TableMovies) \
.join(blacklisted_subtitles, onclause=TableHistoryMovie.subs_id == blacklisted_subtitles.c.subs_id,
isouter=True) \
.where(reduce(operator.and_, query_conditions)) \
.order_by(TableHistoryMovie.timestamp.desc())
if length > 0:
stmt = stmt.limit(length).offset(start)
movie_history = [{
'id': x.id,
'action': x.action,
'title': x.title,
'timestamp': x.timestamp,
'description': x.description,
'radarrId': x.radarrId,
'monitored': x.monitored,
'path': x.path,
'language': x.language,
'profileId': x.profileId,
'tags': x.tags,
'score': x.score,
'subs_id': x.subs_id,
'provider': x.provider,
'subtitles_path': x.subtitles_path,
'video_path': x.video_path,
'matches': x.matched,
'dont_matches': x.not_matched,
'external_subtitles': [y[1] for y in ast.literal_eval(x.external_subtitles) if y[1]],
'blacklisted': bool(x.blacklisted),
} for x in database.execute(stmt).all()]
for item in movie_history:
# is this language still desired or should we simply skip this subtitles from upgrade logic?
still_desired = _language_still_desired(item['language'], item['profileId'])
item.update(postprocess(item))
# Mark upgradable and get original_id
item.update({'original_id': upgradable_movies_not_perfect.get(item['id'])})
item.update({'upgradable': bool(item['original_id'])})
# Mark not upgradable if video/subtitles file doesn't exist anymore or if language isn't desired anymore
if item['upgradable']:
if (item['subtitles_path'] not in item['external_subtitles'] or item['video_path'] != item['path'] or
not still_desired):
item.update({"upgradable": False})
del item['path']
del item['video_path']
del item['external_subtitles']
del item['profileId']
if item['score']:
item['score'] = f"{round((int(item['score']) * 100 / 120), 2)}%"
# Make timestamp pretty
if item['timestamp']:
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item['timestamp'] = pretty.date(item["timestamp"])
# Parse matches and dont_matches
if item['matches']:
item.update({'matches': ast.literal_eval(item['matches'])})
else:
item.update({'matches': []})
if item['dont_matches']:
item.update({'dont_matches': ast.literal_eval(item['dont_matches'])})
else:
item.update({'dont_matches': []})
count = database.execute(
select(func.count())
.select_from(TableHistoryMovie)
.join(TableMovies)
.where(TableMovies.title.is_not(None))) \
.scalar()
return marshal({'data': movie_history, 'total': count}, self.get_response_model)

188
bazarr/api/movies/movies.py Normal file
View File

@ -0,0 +1,188 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableMovies, database, update, select, func
from subtitles.indexer.movies import list_missing_subtitles_movies, movies_scan_subtitles
from app.event_handler import event_stream
from subtitles.wanted import wanted_search_missing_subtitles_movies
from subtitles.mass_download import movies_download_subtitles
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
from api.utils import authenticate, None_Keys, postprocess
api_ns_movies = Namespace('Movies', description='List movies metadata, update movie languages profile or run actions '
'for specific movies.')
@api_ns_movies.route('movies')
class Movies(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('start', type=int, required=False, default=0, help='Paging start integer')
get_request_parser.add_argument('length', type=int, required=False, default=-1, help='Paging length integer')
get_request_parser.add_argument('radarrid[]', type=int, action='append', required=False, default=[],
help='Movies IDs to get metadata for')
get_subtitles_model = api_ns_movies.model('subtitles_model', subtitles_model)
get_subtitles_language_model = api_ns_movies.model('subtitles_language_model', subtitles_language_model)
get_audio_language_model = api_ns_movies.model('audio_language_model', audio_language_model)
data_model = api_ns_movies.model('movies_data_model', {
'alternativeTitles': fields.List(fields.String),
'audio_language': fields.Nested(get_audio_language_model),
'fanart': fields.String(),
'imdbId': fields.String(),
'missing_subtitles': fields.Nested(get_subtitles_language_model),
'monitored': fields.Boolean(),
'overview': fields.String(),
'path': fields.String(),
'poster': fields.String(),
'profileId': fields.Integer(),
'radarrId': fields.Integer(),
'sceneName': fields.String(),
'subtitles': fields.Nested(get_subtitles_model),
'tags': fields.List(fields.String),
'title': fields.String(),
'year': fields.String(),
})
get_response_model = api_ns_movies.model('MoviesGetResponse', {
'data': fields.Nested(data_model),
'total': fields.Integer(),
})
@authenticate
@api_ns_movies.doc(parser=get_request_parser)
@api_ns_movies.response(200, 'Success')
@api_ns_movies.response(401, 'Not Authenticated')
def get(self):
"""List movies metadata for specific movies"""
args = self.get_request_parser.parse_args()
start = args.get('start')
length = args.get('length')
radarrId = args.get('radarrid[]')
stmt = select(TableMovies.alternativeTitles,
TableMovies.audio_language,
TableMovies.fanart,
TableMovies.imdbId,
TableMovies.missing_subtitles,
TableMovies.monitored,
TableMovies.overview,
TableMovies.path,
TableMovies.poster,
TableMovies.profileId,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.subtitles,
TableMovies.tags,
TableMovies.title,
TableMovies.year,
)\
.order_by(TableMovies.sortTitle)
if len(radarrId) != 0:
stmt = stmt.where(TableMovies.radarrId.in_(radarrId))
if length > 0:
stmt = stmt.limit(length).offset(start)
results = [postprocess({
'alternativeTitles': x.alternativeTitles,
'audio_language': x.audio_language,
'fanart': x.fanart,
'imdbId': x.imdbId,
'missing_subtitles': x.missing_subtitles,
'monitored': x.monitored,
'overview': x.overview,
'path': x.path,
'poster': x.poster,
'profileId': x.profileId,
'radarrId': x.radarrId,
'sceneName': x.sceneName,
'subtitles': x.subtitles,
'tags': x.tags,
'title': x.title,
'year': x.year,
}) for x in database.execute(stmt).all()]
count = database.execute(
select(func.count())
.select_from(TableMovies)) \
.scalar()
return marshal({'data': results, 'total': count}, self.get_response_model)
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('radarrid', type=int, action='append', required=False, default=[],
help='Radarr movie(s) ID')
post_request_parser.add_argument('profileid', type=str, action='append', required=False, default=[],
help='Languages profile(s) ID or "none"')
@authenticate
@api_ns_movies.doc(parser=post_request_parser)
@api_ns_movies.response(204, 'Success')
@api_ns_movies.response(401, 'Not Authenticated')
@api_ns_movies.response(404, 'Languages profile not found')
def post(self):
"""Update specific movies languages profile"""
args = self.post_request_parser.parse_args()
radarrIdList = args.get('radarrid')
profileIdList = args.get('profileid')
for idx in range(len(radarrIdList)):
radarrId = radarrIdList[idx]
profileId = profileIdList[idx]
if profileId in None_Keys:
profileId = None
else:
try:
profileId = int(profileId)
except Exception:
return 'Languages profile not found', 404
database.execute(
update(TableMovies)
.values(profileId=profileId)
.where(TableMovies.radarrId == radarrId))
list_missing_subtitles_movies(no=radarrId, send_event=False)
event_stream(type='movie', payload=radarrId)
event_stream(type='movie-wanted', payload=radarrId)
event_stream(type='badges')
return '', 204
patch_request_parser = reqparse.RequestParser()
patch_request_parser.add_argument('radarrid', type=int, required=False, help='Radarr movie ID')
patch_request_parser.add_argument('action', type=str, required=False, help='Action to perform from ["scan-disk", '
'"search-missing", "search-wanted"]')
@authenticate
@api_ns_movies.doc(parser=patch_request_parser)
@api_ns_movies.response(204, 'Success')
@api_ns_movies.response(400, 'Unknown action')
@api_ns_movies.response(401, 'Not Authenticated')
@api_ns_movies.response(500, 'Movie file not found. Path mapping issue?')
def patch(self):
"""Run actions on specific movies"""
args = self.patch_request_parser.parse_args()
radarrid = args.get('radarrid')
action = args.get('action')
if action == "scan-disk":
movies_scan_subtitles(radarrid)
return '', 204
elif action == "search-missing":
try:
movies_download_subtitles(radarrid)
except OSError:
return 'Movie file not found. Path mapping issue?', 500
else:
return '', 204
elif action == "search-wanted":
wanted_search_missing_subtitles_movies()
return '', 204
return 'Unknown action', 400

View File

@ -0,0 +1,220 @@
# coding=utf-8
import os
import logging
from flask_restx import Resource, Namespace, reqparse
from subliminal_patch.core import SUBTITLE_EXTENSIONS
from werkzeug.datastructures import FileStorage
from app.database import TableMovies, get_audio_profile_languages, get_profile_id, database, select
from utilities.path_mappings import path_mappings
from subtitles.upload import manual_upload_subtitle
from subtitles.download import generate_subtitles
from subtitles.tools.delete import delete_subtitles
from radarr.history import history_log_movie
from app.notifier import send_notifications_movie
from subtitles.indexer.movies import store_subtitles_movie
from app.event_handler import event_stream, show_message
from app.config import settings
from ..utils import authenticate
api_ns_movies_subtitles = Namespace('Movies Subtitles', description='Download, upload or delete movies subtitles')
@api_ns_movies_subtitles.route('movies/subtitles')
class MoviesSubtitles(Resource):
patch_request_parser = reqparse.RequestParser()
patch_request_parser.add_argument('radarrid', type=int, required=True, help='Movie ID')
patch_request_parser.add_argument('language', type=str, required=True, help='Language code2')
patch_request_parser.add_argument('forced', type=str, required=True, help='Forced true/false as string')
patch_request_parser.add_argument('hi', type=str, required=True, help='HI true/false as string')
@authenticate
@api_ns_movies_subtitles.doc(parser=patch_request_parser)
@api_ns_movies_subtitles.response(204, 'Success')
@api_ns_movies_subtitles.response(401, 'Not Authenticated')
@api_ns_movies_subtitles.response(404, 'Movie not found')
@api_ns_movies_subtitles.response(409, 'Unable to save subtitles file. Permission or path mapping issue?')
@api_ns_movies_subtitles.response(500, 'Custom error messages')
def patch(self):
"""Download a movie subtitles"""
args = self.patch_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = database.execute(
select(
TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo.path)
if not os.path.exists(moviePath):
return 'Movie file not found. Path mapping issue?', 500
sceneName = movieInfo.sceneName or 'None'
title = movieInfo.title
language = args.get('language')
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
if hi == 'True':
language_str = f'{language}:hi'
elif forced == 'True':
language_str = f'{language}:forced'
else:
language_str = language
audio_language_list = get_audio_profile_languages(movieInfo.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = None
try:
result = list(generate_subtitles(moviePath, [(language, hi, forced)], audio_language,
sceneName, title, 'movie', profile_id=get_profile_id(movie_id=radarrId)))
if isinstance(result, list) and len(result):
result = result[0]
if isinstance(result, tuple) and len(result):
result = result[0]
history_log_movie(1, radarrId, result)
store_subtitles_movie(result.path, moviePath)
else:
event_stream(type='movie', payload=radarrId)
show_message(f'No {language_str.upper()} subtitles found')
return '', 204
except OSError:
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
return '', 204
# POST: Upload Subtitles
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('radarrid', type=int, required=True, help='Movie ID')
post_request_parser.add_argument('language', type=str, required=True, help='Language code2')
post_request_parser.add_argument('forced', type=str, required=True, help='Forced true/false as string')
post_request_parser.add_argument('hi', type=str, required=True, help='HI true/false as string')
post_request_parser.add_argument('file', type=FileStorage, location='files', required=True,
help='Subtitles file as file upload object')
@authenticate
@api_ns_movies_subtitles.doc(parser=post_request_parser)
@api_ns_movies_subtitles.response(204, 'Success')
@api_ns_movies_subtitles.response(401, 'Not Authenticated')
@api_ns_movies_subtitles.response(404, 'Movie not found')
@api_ns_movies_subtitles.response(409, 'Unable to save subtitles file. Permission or path mapping issue?')
@api_ns_movies_subtitles.response(500, 'Movie file not found. Path mapping issue?')
def post(self):
"""Upload a movie subtitles"""
# TODO: Support Multiply Upload
args = self.post_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = database.execute(
select(TableMovies.path, TableMovies.audio_language)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo.path)
if not os.path.exists(moviePath):
return 'Movie file not found. Path mapping issue?', 500
audio_language = get_audio_profile_languages(movieInfo.audio_language)
if len(audio_language) and isinstance(audio_language[0], dict):
audio_language = audio_language[0]
else:
audio_language = {'name': '', 'code2': '', 'code3': ''}
language = args.get('language')
forced = args.get('forced') == 'true'
hi = args.get('hi') == 'true'
subFile = args.get('file')
_, ext = os.path.splitext(subFile.filename)
if not isinstance(ext, str) or ext.lower() not in SUBTITLE_EXTENSIONS:
raise ValueError('A subtitle of an invalid format was uploaded.')
try:
result = manual_upload_subtitle(path=moviePath,
language=language,
forced=forced,
hi=hi,
media_type='movie',
subtitle=subFile,
audio_language=audio_language)
if not result:
logging.debug(f"BAZARR unable to process subtitles for this movie: {moviePath}")
else:
if isinstance(result, tuple) and len(result):
result = result[0]
provider = "manual"
score = 120
history_log_movie(4, radarrId, result, fake_provider=provider, fake_score=score)
if not settings.general.dont_notify_manual_actions:
send_notifications_movie(radarrId, result.message)
store_subtitles_movie(result.path, moviePath)
except OSError:
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
return '', 204
# DELETE: Delete Subtitles
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('radarrid', type=int, required=True, help='Movie ID')
delete_request_parser.add_argument('language', type=str, required=True, help='Language code2')
delete_request_parser.add_argument('forced', type=str, required=True, help='Forced true/false as string')
delete_request_parser.add_argument('hi', type=str, required=True, help='HI true/false as string')
delete_request_parser.add_argument('path', type=str, required=True, help='Path of the subtitles file')
@authenticate
@api_ns_movies_subtitles.doc(parser=delete_request_parser)
@api_ns_movies_subtitles.response(204, 'Success')
@api_ns_movies_subtitles.response(401, 'Not Authenticated')
@api_ns_movies_subtitles.response(404, 'Movie not found')
@api_ns_movies_subtitles.response(500, 'Subtitles file not found or permission issue.')
def delete(self):
"""Delete a movie subtitles"""
args = self.delete_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = database.execute(
select(TableMovies.path)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo.path)
language = args.get('language')
forced = args.get('forced')
hi = args.get('hi')
subtitlesPath = args.get('path')
subtitlesPath = path_mappings.path_replace_reverse_movie(subtitlesPath)
if delete_subtitles(media_type='movie',
language=language,
forced=forced,
hi=hi,
media_path=moviePath,
subtitles_path=subtitlesPath,
radarr_id=radarrId):
return '', 204
else:
return 'Subtitles file not found or permission issue.', 500

View File

@ -0,0 +1,84 @@
# coding=utf-8
import operator
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import get_exclusion_clause, TableMovies, database, select, func
from api.swaggerui import subtitles_language_model
from api.utils import authenticate, postprocess
api_ns_movies_wanted = Namespace('Movies Wanted', description='List movies wanted subtitles')
@api_ns_movies_wanted.route('movies/wanted')
class MoviesWanted(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('start', type=int, required=False, default=0, help='Paging start integer')
get_request_parser.add_argument('length', type=int, required=False, default=-1, help='Paging length integer')
get_request_parser.add_argument('radarrid[]', type=int, action='append', required=False, default=[],
help='Movies ID to list')
get_subtitles_language_model = api_ns_movies_wanted.model('subtitles_language_model', subtitles_language_model)
data_model = api_ns_movies_wanted.model('wanted_movies_data_model', {
'title': fields.String(),
'missing_subtitles': fields.Nested(get_subtitles_language_model),
'radarrId': fields.Integer(),
'sceneName': fields.String(),
'tags': fields.List(fields.String),
})
get_response_model = api_ns_movies_wanted.model('MovieWantedGetResponse', {
'data': fields.Nested(data_model),
'total': fields.Integer(),
})
@authenticate
@api_ns_movies_wanted.response(401, 'Not Authenticated')
@api_ns_movies_wanted.doc(parser=get_request_parser)
def get(self):
"""List movies wanted subtitles"""
args = self.get_request_parser.parse_args()
radarrid = args.get("radarrid[]")
wanted_conditions = [(TableMovies.missing_subtitles.is_not(None)),
(TableMovies.missing_subtitles != '[]')]
if len(radarrid) > 0:
wanted_conditions.append((TableMovies.radarrId.in_(radarrid)))
start = 0
length = 0
else:
start = args.get('start')
length = args.get('length')
wanted_conditions += get_exclusion_clause('movie')
wanted_condition = reduce(operator.and_, wanted_conditions)
stmt = select(TableMovies.title,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.tags) \
.where(wanted_condition)
if length > 0:
stmt = stmt.order_by(TableMovies.radarrId.desc()).limit(length).offset(start)
results = [postprocess({
'title': x.title,
'missing_subtitles': x.missing_subtitles,
'radarrId': x.radarrId,
'sceneName': x.sceneName,
'tags': x.tags,
}) for x in database.execute(stmt).all()]
count = database.execute(
select(func.count())
.select_from(TableMovies)
.where(wanted_condition)) \
.scalar()
return marshal({'data': results, 'total': count}, self.get_response_model)

View File

@ -0,0 +1,12 @@
# coding=utf-8
from .providers import api_ns_providers
from .providers_episodes import api_ns_providers_episodes
from .providers_movies import api_ns_providers_movies
api_ns_list_providers = [
api_ns_providers,
api_ns_providers_episodes,
api_ns_providers_movies,
]

View File

@ -0,0 +1,82 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from operator import itemgetter
from app.database import TableHistory, TableHistoryMovie, database, select
from app.get_providers import list_throttled_providers, reset_throttled_providers
from ..utils import authenticate, False_Keys
api_ns_providers = Namespace('Providers', description='Get and reset providers status')
@api_ns_providers.route('providers')
class Providers(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('history', type=str, required=False, help='Provider name for history stats')
get_response_model = api_ns_providers.model('MovieBlacklistGetResponse', {
'name': fields.String(),
'status': fields.String(),
'retry': fields.String(),
})
@authenticate
@api_ns_providers.response(200, 'Success')
@api_ns_providers.response(401, 'Not Authenticated')
@api_ns_providers.doc(parser=get_request_parser)
def get(self):
"""Get providers status"""
args = self.get_request_parser.parse_args()
history = args.get('history')
if history and history not in False_Keys:
providers = database.execute(
select(TableHistory.provider)
.where(TableHistory.provider and TableHistory.provider != "manual")
.distinct())\
.all()
providers += database.execute(
select(TableHistoryMovie.provider)
.where(TableHistoryMovie.provider and TableHistoryMovie.provider != "manual")
.distinct())\
.all()
providers_list = [x.provider for x in providers]
providers_dicts = []
for provider in providers_list:
if provider not in [x['name'] for x in providers_dicts]:
providers_dicts.append({
'name': provider,
'status': 'History',
'retry': '-'
})
else:
throttled_providers = list_throttled_providers()
providers_dicts = list()
for provider in throttled_providers:
providers_dicts.append({
"name": provider[0],
"status": provider[1] if provider[1] is not None else "Good",
"retry": provider[2] if provider[2] != "now" else "-"
})
return marshal(sorted(providers_dicts, key=itemgetter('name')), self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('action', type=str, required=True, help='Action to perform from ["reset"]')
@authenticate
@api_ns_providers.doc(parser=post_request_parser)
@api_ns_providers.response(204, 'Success')
@api_ns_providers.response(401, 'Not Authenticated')
@api_ns_providers.response(400, 'Unknown action')
def post(self):
"""Reset providers status"""
args = self.post_request_parser.parse_args()
action = args.get('action')
if action == 'reset':
reset_throttled_providers()
return '', 204
return 'Unknown action', 400

View File

@ -0,0 +1,159 @@
# coding=utf-8
import os
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableEpisodes, TableShows, get_audio_profile_languages, get_profile_id, database, select
from utilities.path_mappings import path_mappings
from app.get_providers import get_providers
from subtitles.manual import manual_search, manual_download_subtitle
from sonarr.history import history_log
from app.config import settings
from app.notifier import send_notifications
from subtitles.indexer.series import store_subtitles, list_missing_subtitles
from subtitles.processing import ProcessSubtitlesResult
from ..utils import authenticate
api_ns_providers_episodes = Namespace('Providers Episodes', description='List and download episodes subtitles manually')
@api_ns_providers_episodes.route('providers/episodes')
class ProviderEpisodes(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('episodeid', type=int, required=True, help='Episode ID')
get_response_model = api_ns_providers_episodes.model('ProviderEpisodesGetResponse', {
'dont_matches': fields.List(fields.String),
'forced': fields.String(),
'hearing_impaired': fields.String(),
'language': fields.String(),
'matches': fields.List(fields.String),
'original_format': fields.String(),
'orig_score': fields.Integer(),
'provider': fields.String(),
'release_info': fields.List(fields.String),
'score': fields.Integer(),
'score_without_hash': fields.Integer(),
'subtitle': fields.String(),
'uploader': fields.String(),
'url': fields.String(),
})
@authenticate
@api_ns_providers_episodes.response(401, 'Not Authenticated')
@api_ns_providers_episodes.response(404, 'Episode not found')
@api_ns_providers_episodes.response(500, 'Custom error messages')
@api_ns_providers_episodes.doc(parser=get_request_parser)
def get(self):
"""Search manually for an episode subtitles"""
args = self.get_request_parser.parse_args()
sonarrEpisodeId = args.get('episodeid')
stmt = select(TableEpisodes.path,
TableEpisodes.sceneName,
TableShows.title,
TableShows.profileId,
TableEpisodes.subtitles,
TableEpisodes.missing_subtitles) \
.select_from(TableEpisodes) \
.join(TableShows) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)
episodeInfo = database.execute(stmt).first()
if not episodeInfo:
return 'Episode not found', 404
elif episodeInfo.subtitles is None:
# subtitles indexing for this episode is incomplete, we'll do it again
store_subtitles(episodeInfo.path, path_mappings.path_replace(episodeInfo.path))
episodeInfo = database.execute(stmt).first()
elif episodeInfo.missing_subtitles is None:
# missing subtitles calculation for this episode is incomplete, we'll do it again
list_missing_subtitles(epno=sonarrEpisodeId)
episodeInfo = database.execute(stmt).first()
title = episodeInfo.title
episodePath = path_mappings.path_replace(episodeInfo.path)
if not os.path.exists(episodePath):
return 'Episode file not found. Path mapping issue?', 500
sceneName = episodeInfo.sceneName or "None"
profileId = episodeInfo.profileId
providers_list = get_providers()
data = manual_search(episodePath, profileId, providers_list, sceneName, title, 'series')
if isinstance(data, str):
return data, 500
return marshal(data, self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
post_request_parser.add_argument('episodeid', type=int, required=True, help='Episode ID')
post_request_parser.add_argument('hi', type=str, required=True, help='HI subtitles from ["True", "False"]')
post_request_parser.add_argument('forced', type=str, required=True, help='Forced subtitles from ["True", "False"]')
post_request_parser.add_argument('original_format', type=str, required=True,
help='Use original subtitles format from ["True", "False"]')
post_request_parser.add_argument('provider', type=str, required=True, help='Provider name')
post_request_parser.add_argument('subtitle', type=str, required=True, help='Pickled subtitles as return by GET')
@authenticate
@api_ns_providers_episodes.doc(parser=post_request_parser)
@api_ns_providers_episodes.response(204, 'Success')
@api_ns_providers_episodes.response(401, 'Not Authenticated')
@api_ns_providers_episodes.response(404, 'Episode not found')
@api_ns_providers_episodes.response(500, 'Custom error messages')
def post(self):
"""Manually download an episode subtitles"""
args = self.post_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = database.execute(
select(
TableEpisodes.audio_language,
TableEpisodes.path,
TableEpisodes.sceneName,
TableShows.title)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
title = episodeInfo.title
episodePath = path_mappings.path_replace(episodeInfo.path)
sceneName = episodeInfo.sceneName or "None"
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
use_original_format = args.get('original_format').capitalize()
selected_provider = args.get('provider')
subtitle = args.get('subtitle')
audio_language_list = get_audio_profile_languages(episodeInfo.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
try:
result = manual_download_subtitle(episodePath, audio_language, hi, forced, subtitle, selected_provider,
sceneName, title, 'series', use_original_format,
profile_id=get_profile_id(episode_id=sonarrEpisodeId))
except OSError:
return 'Unable to save subtitles file', 500
else:
if isinstance(result, tuple) and len(result):
result = result[0]
if isinstance(result, ProcessSubtitlesResult):
history_log(2, sonarrSeriesId, sonarrEpisodeId, result)
if not settings.general.dont_notify_manual_actions:
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
elif isinstance(result, str):
return result, 500
else:
return '', 204

View File

@ -0,0 +1,153 @@
# coding=utf-8
import os
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableMovies, get_audio_profile_languages, get_profile_id, database, select
from utilities.path_mappings import path_mappings
from app.get_providers import get_providers
from subtitles.manual import manual_search, manual_download_subtitle
from radarr.history import history_log_movie
from app.config import settings
from app.notifier import send_notifications_movie
from subtitles.indexer.movies import store_subtitles_movie, list_missing_subtitles_movies
from subtitles.processing import ProcessSubtitlesResult
from ..utils import authenticate
api_ns_providers_movies = Namespace('Providers Movies', description='List and download movies subtitles manually')
@api_ns_providers_movies.route('providers/movies')
class ProviderMovies(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('radarrid', type=int, required=True, help='Movie ID')
get_response_model = api_ns_providers_movies.model('ProviderMoviesGetResponse', {
'dont_matches': fields.List(fields.String),
'forced': fields.String(),
'hearing_impaired': fields.String(),
'language': fields.String(),
'matches': fields.List(fields.String),
'original_format': fields.String(),
'orig_score': fields.Integer(),
'provider': fields.String(),
'release_info': fields.List(fields.String),
'score': fields.Integer(),
'score_without_hash': fields.Integer(),
'subtitle': fields.String(),
'uploader': fields.String(),
'url': fields.String(),
})
@authenticate
@api_ns_providers_movies.response(401, 'Not Authenticated')
@api_ns_providers_movies.response(404, 'Movie not found')
@api_ns_providers_movies.response(500, 'Custom error messages')
@api_ns_providers_movies.doc(parser=get_request_parser)
def get(self):
"""Search manually for a movie subtitles"""
args = self.get_request_parser.parse_args()
radarrId = args.get('radarrid')
stmt = select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.profileId,
TableMovies.subtitles,
TableMovies.missing_subtitles) \
.where(TableMovies.radarrId == radarrId)
movieInfo = database.execute(stmt).first()
if not movieInfo:
return 'Movie not found', 404
elif movieInfo.subtitles is None:
# subtitles indexing for this movie is incomplete, we'll do it again
store_subtitles_movie(movieInfo.path, path_mappings.path_replace_movie(movieInfo.path))
movieInfo = database.execute(stmt).first()
elif movieInfo.missing_subtitles is None:
# missing subtitles calculation for this movie is incomplete, we'll do it again
list_missing_subtitles_movies(no=radarrId)
movieInfo = database.execute(stmt).first()
title = movieInfo.title
moviePath = path_mappings.path_replace_movie(movieInfo.path)
if not os.path.exists(moviePath):
return 'Movie file not found. Path mapping issue?', 500
sceneName = movieInfo.sceneName or "None"
profileId = movieInfo.profileId
providers_list = get_providers()
data = manual_search(moviePath, profileId, providers_list, sceneName, title, 'movie')
if isinstance(data, str):
return data, 500
return marshal(data, self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('radarrid', type=int, required=True, help='Movie ID')
post_request_parser.add_argument('hi', type=str, required=True, help='HI subtitles from ["True", "False"]')
post_request_parser.add_argument('forced', type=str, required=True, help='Forced subtitles from ["True", "False"]')
post_request_parser.add_argument('original_format', type=str, required=True,
help='Use original subtitles format from ["True", "False"]')
post_request_parser.add_argument('provider', type=str, required=True, help='Provider name')
post_request_parser.add_argument('subtitle', type=str, required=True, help='Pickled subtitles as return by GET')
@authenticate
@api_ns_providers_movies.doc(parser=post_request_parser)
@api_ns_providers_movies.response(204, 'Success')
@api_ns_providers_movies.response(401, 'Not Authenticated')
@api_ns_providers_movies.response(404, 'Movie not found')
@api_ns_providers_movies.response(500, 'Custom error messages')
def post(self):
"""Manually download a movie subtitles"""
args = self.post_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = database.execute(
select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
title = movieInfo.title
moviePath = path_mappings.path_replace_movie(movieInfo.path)
sceneName = movieInfo.sceneName or "None"
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
use_original_format = args.get('original_format').capitalize()
selected_provider = args.get('provider')
subtitle = args.get('subtitle')
audio_language_list = get_audio_profile_languages(movieInfo.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
try:
result = manual_download_subtitle(moviePath, audio_language, hi, forced, subtitle, selected_provider,
sceneName, title, 'movie', use_original_format,
profile_id=get_profile_id(movie_id=radarrId))
except OSError:
return 'Unable to save subtitles file', 500
else:
if isinstance(result, tuple) and len(result):
result = result[0]
if isinstance(result, ProcessSubtitlesResult):
history_log_movie(2, radarrId, result)
if not settings.general.dont_notify_manual_actions:
send_notifications_movie(radarrId, result.message)
store_subtitles_movie(result.path, moviePath)
elif isinstance(result, str):
return result, 500
else:
return '', 204

View File

@ -0,0 +1,8 @@
# coding=utf-8
from .series import api_ns_series
api_ns_list_series = [
api_ns_series,
]

228
bazarr/api/series/series.py Normal file
View File

@ -0,0 +1,228 @@
# coding=utf-8
import operator
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import get_exclusion_clause, TableEpisodes, TableShows, database, select, update, func
from subtitles.indexer.series import list_missing_subtitles, series_scan_subtitles
from subtitles.mass_download import series_download_subtitles
from subtitles.wanted import wanted_search_missing_subtitles_series
from app.event_handler import event_stream
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
from api.utils import authenticate, None_Keys, postprocess
api_ns_series = Namespace('Series', description='List series metadata, update series languages profile or run actions '
'for specific series.')
@api_ns_series.route('series')
class Series(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('start', type=int, required=False, default=0, help='Paging start integer')
get_request_parser.add_argument('length', type=int, required=False, default=-1, help='Paging length integer')
get_request_parser.add_argument('seriesid[]', type=int, action='append', required=False, default=[],
help='Series IDs to get metadata for')
get_subtitles_model = api_ns_series.model('subtitles_model', subtitles_model)
get_subtitles_language_model = api_ns_series.model('subtitles_language_model', subtitles_language_model)
get_audio_language_model = api_ns_series.model('audio_language_model', audio_language_model)
data_model = api_ns_series.model('series_data_model', {
'alternativeTitles': fields.List(fields.String),
'audio_language': fields.Nested(get_audio_language_model),
'episodeFileCount': fields.Integer(default=0),
'ended': fields.Boolean(),
'episodeMissingCount': fields.Integer(default=0),
'fanart': fields.String(),
'imdbId': fields.String(),
'lastAired': fields.String(),
'monitored': fields.Boolean(),
'overview': fields.String(),
'path': fields.String(),
'poster': fields.String(),
'profileId': fields.Integer(),
'seriesType': fields.String(),
'sonarrSeriesId': fields.Integer(),
'tags': fields.List(fields.String),
'title': fields.String(),
'tvdbId': fields.Integer(),
'year': fields.String(),
})
get_response_model = api_ns_series.model('SeriesGetResponse', {
'data': fields.Nested(data_model),
'total': fields.Integer(),
})
@authenticate
@api_ns_series.doc(parser=get_request_parser)
@api_ns_series.response(200, 'Success')
@api_ns_series.response(401, 'Not Authenticated')
def get(self):
"""List series metadata for specific series"""
args = self.get_request_parser.parse_args()
start = args.get('start')
length = args.get('length')
seriesId = args.get('seriesid[]')
episodeFileCount = select(TableShows.sonarrSeriesId,
func.count(TableEpisodes.sonarrSeriesId).label('episodeFileCount')) \
.select_from(TableEpisodes) \
.join(TableShows) \
.group_by(TableShows.sonarrSeriesId)\
.subquery()
episodes_missing_conditions = [(TableEpisodes.missing_subtitles.is_not(None)),
(TableEpisodes.missing_subtitles != '[]')]
episodes_missing_conditions += get_exclusion_clause('series')
episodeMissingCount = select(TableShows.sonarrSeriesId,
func.count(TableEpisodes.sonarrSeriesId).label('episodeMissingCount')) \
.select_from(TableEpisodes) \
.join(TableShows) \
.where(reduce(operator.and_, episodes_missing_conditions)) \
.group_by(TableShows.sonarrSeriesId)\
.subquery()
stmt = select(TableShows.tvdbId,
TableShows.alternativeTitles,
TableShows.audio_language,
TableShows.fanart,
TableShows.imdbId,
TableShows.monitored,
TableShows.overview,
TableShows.path,
TableShows.poster,
TableShows.profileId,
TableShows.seriesType,
TableShows.sonarrSeriesId,
TableShows.tags,
TableShows.title,
TableShows.year,
TableShows.ended,
TableShows.lastAired,
episodeFileCount.c.episodeFileCount,
episodeMissingCount.c.episodeMissingCount) \
.select_from(TableShows) \
.join(episodeFileCount, TableShows.sonarrSeriesId == episodeFileCount.c.sonarrSeriesId, isouter=True) \
.join(episodeMissingCount, TableShows.sonarrSeriesId == episodeMissingCount.c.sonarrSeriesId, isouter=True)\
.order_by(TableShows.sortTitle)
if len(seriesId) != 0:
stmt = stmt.where(TableShows.sonarrSeriesId.in_(seriesId))
elif length > 0:
stmt = stmt.limit(length).offset(start)
results = [postprocess({
'tvdbId': x.tvdbId,
'alternativeTitles': x.alternativeTitles,
'audio_language': x.audio_language,
'fanart': x.fanart,
'imdbId': x.imdbId,
'monitored': x.monitored,
'overview': x.overview,
'path': x.path,
'poster': x.poster,
'profileId': x.profileId,
'seriesType': x.seriesType,
'sonarrSeriesId': x.sonarrSeriesId,
'tags': x.tags,
'title': x.title,
'year': x.year,
'ended': x.ended,
'lastAired': x.lastAired,
'episodeFileCount': x.episodeFileCount,
'episodeMissingCount': x.episodeMissingCount,
}) for x in database.execute(stmt).all()]
count = database.execute(
select(func.count())
.select_from(TableShows)) \
.scalar()
return marshal({'data': results, 'total': count}, self.get_response_model)
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('seriesid', type=int, action='append', required=False, default=[],
help='Sonarr series ID')
post_request_parser.add_argument('profileid', type=str, action='append', required=False, default=[],
help='Languages profile(s) ID or "none"')
@authenticate
@api_ns_series.doc(parser=post_request_parser)
@api_ns_series.response(204, 'Success')
@api_ns_series.response(401, 'Not Authenticated')
@api_ns_series.response(404, 'Languages profile not found')
def post(self):
"""Update specific series languages profile"""
args = self.post_request_parser.parse_args()
seriesIdList = args.get('seriesid')
profileIdList = args.get('profileid')
for idx in range(len(seriesIdList)):
seriesId = seriesIdList[idx]
profileId = profileIdList[idx]
if profileId in None_Keys:
profileId = None
else:
try:
profileId = int(profileId)
except Exception:
return 'Languages profile not found', 404
database.execute(
update(TableShows)
.values(profileId=profileId)
.where(TableShows.sonarrSeriesId == seriesId))
list_missing_subtitles(no=seriesId, send_event=False)
event_stream(type='series', payload=seriesId)
episode_id_list = database.execute(
select(TableEpisodes.sonarrEpisodeId)
.where(TableEpisodes.sonarrSeriesId == seriesId))\
.all()
for item in episode_id_list:
event_stream(type='episode-wanted', payload=item.sonarrEpisodeId)
event_stream(type='badges')
return '', 204
patch_request_parser = reqparse.RequestParser()
patch_request_parser.add_argument('seriesid', type=int, required=False, help='Sonarr series ID')
patch_request_parser.add_argument('action', type=str, required=False, help='Action to perform from ["scan-disk", '
'"search-missing", "search-wanted"]')
@authenticate
@api_ns_series.doc(parser=patch_request_parser)
@api_ns_series.response(204, 'Success')
@api_ns_series.response(400, 'Unknown action')
@api_ns_series.response(401, 'Not Authenticated')
@api_ns_series.response(500, 'Series directory not found. Path mapping issue?')
def patch(self):
"""Run actions on specific series"""
args = self.patch_request_parser.parse_args()
seriesid = args.get('seriesid')
action = args.get('action')
if action == "scan-disk":
series_scan_subtitles(seriesid)
return '', 204
elif action == "search-missing":
try:
series_download_subtitles(seriesid)
except OSError:
return 'Series directory not found. Path mapping issue?', 500
else:
return '', 204
elif action == "search-wanted":
wanted_search_missing_subtitles_series()
return '', 204
return 'Unknown action', 400

View File

@ -0,0 +1,10 @@
# coding=utf-8
from .subtitles import api_ns_subtitles
from .subtitles_info import api_ns_subtitles_info
api_ns_list_subtitles = [
api_ns_subtitles,
api_ns_subtitles_info,
]

View File

@ -0,0 +1,227 @@
# coding=utf-8
import os
import sys
import gc
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableEpisodes, TableMovies, database, select
from languages.get_languages import alpha3_from_alpha2
from utilities.path_mappings import path_mappings
from utilities.video_analyzer import subtitles_sync_references
from subtitles.tools.subsyncer import SubSyncer
from subtitles.tools.translate import translate_subtitles_file
from subtitles.tools.mods import subtitles_apply_mods
from subtitles.indexer.series import store_subtitles
from subtitles.indexer.movies import store_subtitles_movie
from app.config import settings, empty_values
from app.event_handler import event_stream
from ..utils import authenticate
api_ns_subtitles = Namespace('Subtitles', description='Apply mods/tools on external subtitles')
@api_ns_subtitles.route('subtitles')
class Subtitles(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('subtitlesPath', type=str, required=True, help='External subtitles file path')
get_request_parser.add_argument('sonarrEpisodeId', type=int, required=False, help='Sonarr Episode ID')
get_request_parser.add_argument('radarrMovieId', type=int, required=False, help='Radarr Movie ID')
audio_tracks_data_model = api_ns_subtitles.model('audio_tracks_data_model', {
'stream': fields.String(),
'name': fields.String(),
'language': fields.String(),
})
embedded_subtitles_data_model = api_ns_subtitles.model('embedded_subtitles_data_model', {
'stream': fields.String(),
'name': fields.String(),
'language': fields.String(),
'forced': fields.Boolean(),
'hearing_impaired': fields.Boolean(),
})
external_subtitles_data_model = api_ns_subtitles.model('external_subtitles_data_model', {
'name': fields.String(),
'path': fields.String(),
'language': fields.String(),
'forced': fields.Boolean(),
'hearing_impaired': fields.Boolean(),
})
get_response_model = api_ns_subtitles.model('SubtitlesGetResponse', {
'audio_tracks': fields.Nested(audio_tracks_data_model),
'embedded_subtitles_tracks': fields.Nested(embedded_subtitles_data_model),
'external_subtitles_tracks': fields.Nested(external_subtitles_data_model),
})
@authenticate
@api_ns_subtitles.response(200, 'Success')
@api_ns_subtitles.response(401, 'Not Authenticated')
@api_ns_subtitles.doc(parser=get_request_parser)
def get(self):
"""Return available audio and embedded subtitles tracks with external subtitles. Used for manual subsync
modal"""
args = self.get_request_parser.parse_args()
subtitlesPath = args.get('subtitlesPath')
episodeId = args.get('sonarrEpisodeId', None)
movieId = args.get('radarrMovieId', None)
result = subtitles_sync_references(subtitles_path=subtitlesPath, sonarr_episode_id=episodeId,
radarr_movie_id=movieId)
return marshal(result, self.get_response_model, envelope='data')
patch_request_parser = reqparse.RequestParser()
patch_request_parser.add_argument('action', type=str, required=True,
help='Action from ["sync", "translate" or mods name]')
patch_request_parser.add_argument('language', type=str, required=True, help='Language code2')
patch_request_parser.add_argument('path', type=str, required=True, help='Subtitles file path')
patch_request_parser.add_argument('type', type=str, required=True, help='Media type from ["episode", "movie"]')
patch_request_parser.add_argument('id', type=int, required=True, help='Media ID (episodeId, radarrId)')
patch_request_parser.add_argument('forced', type=str, required=False,
help='Forced subtitles from ["True", "False"]')
patch_request_parser.add_argument('hi', type=str, required=False, help='HI subtitles from ["True", "False"]')
patch_request_parser.add_argument('original_format', type=str, required=False,
help='Use original subtitles format from ["True", "False"]')
patch_request_parser.add_argument('reference', type=str, required=False,
help='Reference to use for sync from video file track number (a:0) or some '
'subtitles file path')
patch_request_parser.add_argument('max_offset_seconds', type=str, required=False,
help='Maximum offset seconds to allow')
patch_request_parser.add_argument('no_fix_framerate', type=str, required=False,
help='Don\'t try to fix framerate from ["True", "False"]')
patch_request_parser.add_argument('gss', type=str, required=False,
help='Use Golden-Section Search from ["True", "False"]')
@authenticate
@api_ns_subtitles.doc(parser=patch_request_parser)
@api_ns_subtitles.response(204, 'Success')
@api_ns_subtitles.response(401, 'Not Authenticated')
@api_ns_subtitles.response(404, 'Episode/movie not found')
@api_ns_subtitles.response(409, 'Unable to edit subtitles file. Check logs.')
@api_ns_subtitles.response(500, 'Subtitles file not found. Path mapping issue?')
def patch(self):
"""Apply mods/tools on external subtitles"""
args = self.patch_request_parser.parse_args()
action = args.get('action')
language = args.get('language')
subtitles_path = args.get('path')
media_type = args.get('type')
id = args.get('id')
forced = True if args.get('forced') == 'True' else False
hi = True if args.get('hi') == 'True' else False
if not os.path.exists(subtitles_path):
return 'Subtitles file not found. Path mapping issue?', 500
if media_type == 'episode':
metadata = database.execute(
select(TableEpisodes.path, TableEpisodes.sonarrSeriesId)
.where(TableEpisodes.sonarrEpisodeId == id)) \
.first()
if not metadata:
return 'Episode not found', 404
video_path = path_mappings.path_replace(metadata.path)
else:
metadata = database.execute(
select(TableMovies.path)
.where(TableMovies.radarrId == id))\
.first()
if not metadata:
return 'Movie not found', 404
video_path = path_mappings.path_replace_movie(metadata.path)
if action == 'sync':
sync_kwargs = {
'video_path': video_path,
'srt_path': subtitles_path,
'srt_lang': language,
'hi': hi,
'forced': forced,
'reference': args.get('reference') if args.get('reference') not in empty_values else video_path,
'max_offset_seconds': args.get('max_offset_seconds') if args.get('max_offset_seconds') not in
empty_values else str(settings.subsync.max_offset_seconds),
'no_fix_framerate': args.get('no_fix_framerate') == 'True',
'gss': args.get('gss') == 'True',
}
subsync = SubSyncer()
try:
if media_type == 'episode':
sync_kwargs['sonarr_series_id'] = metadata.sonarrSeriesId
sync_kwargs['sonarr_episode_id'] = id
else:
sync_kwargs['radarr_id'] = id
subsync.sync(**sync_kwargs)
except OSError:
return 'Unable to edit subtitles file. Check logs.', 409
finally:
del subsync
gc.collect()
elif action == 'translate':
from_language = subtitles_lang_from_filename(subtitles_path)
dest_language = language
try:
translate_subtitles_file(video_path=video_path, source_srt_file=subtitles_path,
from_lang=from_language, to_lang=dest_language, forced=forced, hi=hi,
media_type="series" if media_type == "episode" else "movies",
sonarr_series_id=metadata.sonarrSeriesId if media_type == "episode" else None,
sonarr_episode_id=id,
radarr_id=id)
except OSError:
return 'Unable to edit subtitles file. Check logs.', 409
else:
use_original_format = True if args.get('original_format') == 'true' else False
try:
subtitles_apply_mods(language=language, subtitle_path=subtitles_path, mods=[action],
use_original_format=use_original_format, video_path=video_path)
except OSError:
return 'Unable to edit subtitles file. Check logs.', 409
# apply chmod if required
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.chmod_enabled else None
if chmod:
os.chmod(subtitles_path, chmod)
if media_type == 'episode':
store_subtitles(path_mappings.path_replace_reverse(video_path), video_path)
event_stream(type='series', payload=metadata.sonarrSeriesId)
event_stream(type='episode', payload=id)
else:
store_subtitles_movie(path_mappings.path_replace_reverse_movie(video_path), video_path)
event_stream(type='movie', payload=id)
return '', 204
def subtitles_lang_from_filename(path):
split_extensionless_path = os.path.splitext(path.lower())[0].rsplit(".", 2)
if len(split_extensionless_path) < 2:
return None
elif len(split_extensionless_path) == 2:
return_lang = split_extensionless_path[-1]
else:
first_ext = split_extensionless_path[-1]
second_ext = split_extensionless_path[-2]
if first_ext in ['hi', 'sdh', 'cc']:
if alpha3_from_alpha2(second_ext):
return_lang = second_ext
else:
return first_ext
else:
return_lang = first_ext
return return_lang.replace('_', '-')

View File

@ -0,0 +1,62 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from subliminal_patch.core import guessit
from ..utils import authenticate
api_ns_subtitles_info = Namespace('Subtitles Info', description='Guess season number, episode number or language from '
'uploaded subtitles filename')
@api_ns_subtitles_info.route('subtitles/info')
class SubtitleNameInfo(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('filenames[]', type=str, required=True, action='append',
help='Subtitles filenames')
get_response_model = api_ns_subtitles_info.model('SubtitlesInfoGetResponse', {
'filename': fields.String(),
'subtitle_language': fields.String(),
'season': fields.Integer(),
'episode': fields.Integer(),
})
@authenticate
@api_ns_subtitles_info.response(200, 'Success')
@api_ns_subtitles_info.response(401, 'Not Authenticated')
@api_ns_subtitles_info.doc(parser=get_request_parser)
def get(self):
"""Guessit over subtitles filename"""
args = self.get_request_parser.parse_args()
names = args.get('filenames[]')
results = []
for name in names:
opts = dict()
opts['type'] = 'episode'
guessit_result = guessit(name, options=opts)
result = {}
result['filename'] = name
if 'subtitle_language' in guessit_result:
result['subtitle_language'] = str(guessit_result['subtitle_language'])
result['episode'] = 0
if 'episode' in guessit_result:
if isinstance(guessit_result['episode'], list):
# for multiple episodes file, choose the first episode number
if len(guessit_result['episode']):
# make sure that guessit returned a list of more than 0 items
result['episode'] = guessit_result['episode'][0]
elif isinstance(guessit_result['episode'], int):
# if single episode
result['episode'] = guessit_result['episode']
if 'season' in guessit_result:
result['season'] = guessit_result['season']
else:
result['season'] = 0
results.append(result)
return marshal(results, self.get_response_model, envelope='data')

34
bazarr/api/swaggerui.py Normal file
View File

@ -0,0 +1,34 @@
# coding=utf-8
import os
from flask_restx import fields
swaggerui_api_params = {"version": os.environ["BAZARR_VERSION"],
"description": "API docs for Bazarr",
"title": "Bazarr",
}
subtitles_model = {
"name": fields.String(),
"code2": fields.String(),
"code3": fields.String(),
"path": fields.String(),
"forced": fields.Boolean(),
"hi": fields.Boolean(),
"file_size": fields.Integer()
}
subtitles_language_model = {
"name": fields.String(),
"code2": fields.String(),
"code3": fields.String(),
"forced": fields.Boolean(),
"hi": fields.Boolean()
}
audio_language_model = {
"name": fields.String(),
"code2": fields.String(),
"code3": fields.String()
}

View File

@ -0,0 +1,33 @@
# coding=utf-8
from .system import api_ns_system
from .searches import api_ns_system_searches
from .account import api_ns_system_account
from .announcements import api_ns_system_announcements
from .backups import api_ns_system_backups
from .tasks import api_ns_system_tasks
from .logs import api_ns_system_logs
from .status import api_ns_system_status
from .health import api_ns_system_health
from .releases import api_ns_system_releases
from .settings import api_ns_system_settings
from .languages import api_ns_system_languages
from .languages_profiles import api_ns_system_languages_profiles
from .notifications import api_ns_system_notifications
api_ns_list_system = [
api_ns_system,
api_ns_system_account,
api_ns_system_announcements,
api_ns_system_backups,
api_ns_system_health,
api_ns_system_languages,
api_ns_system_languages_profiles,
api_ns_system_logs,
api_ns_system_notifications,
api_ns_system_releases,
api_ns_system_searches,
api_ns_system_settings,
api_ns_system_status,
api_ns_system_tasks,
]

View File

@ -0,0 +1,52 @@
# coding=utf-8
import gc
from flask import session, request
from flask_restx import Resource, Namespace, reqparse
from app.config import settings
from utilities.helper import check_credentials
api_ns_system_account = Namespace('System Account', description='Login or logout from Bazarr UI')
@api_ns_system_account.hide
@api_ns_system_account.route('system/account')
class SystemAccount(Resource):
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('action', type=str, required=True, help='Action from ["login", "logout"]')
post_request_parser.add_argument('username', type=str, required=False, help='Bazarr username')
post_request_parser.add_argument('password', type=str, required=False, help='Bazarr password')
@api_ns_system_account.doc(parser=post_request_parser)
@api_ns_system_account.response(204, 'Success')
@api_ns_system_account.response(400, 'Unknown action')
@api_ns_system_account.response(403, 'Authentication failed')
@api_ns_system_account.response(406, 'Browser must be closed to invalidate basic authentication')
@api_ns_system_account.response(500, 'Unknown authentication type define in config')
def post(self):
"""Login or logout from Bazarr UI when using form login"""
args = self.post_request_parser.parse_args()
if settings.auth.type != 'form':
return 'Unknown authentication type define in config', 500
action = args.get('action')
if action == 'login':
username = args.get('username')
password = args.get('password')
if check_credentials(username, password, request):
session['logged_in'] = True
return '', 204
else:
session['logged_in'] = False
return 'Authentication failed', 403
elif action == 'logout':
if settings.auth.type == 'basic':
return 'Browser must be closed to invalidate basic authentication', 406
else:
session.clear()
gc.collect()
return '', 204
return 'Unknown action', 400

View File

@ -0,0 +1,35 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse
from app.announcements import get_all_announcements, mark_announcement_as_dismissed
from ..utils import authenticate
api_ns_system_announcements = Namespace('System Announcements', description='List announcements relative to Bazarr')
@api_ns_system_announcements.route('system/announcements')
class SystemAnnouncements(Resource):
@authenticate
@api_ns_system_announcements.doc(parser=None)
@api_ns_system_announcements.response(200, 'Success')
@api_ns_system_announcements.response(401, 'Not Authenticated')
def get(self):
"""List announcements relative to Bazarr"""
return {'data': get_all_announcements()}
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('hash', type=str, required=True, help='hash of the announcement to dismiss')
@authenticate
@api_ns_system_announcements.doc(parser=post_request_parser)
@api_ns_system_announcements.response(204, 'Success')
@api_ns_system_announcements.response(401, 'Not Authenticated')
def post(self):
"""Mark announcement as dismissed"""
args = self.post_request_parser.parse_args()
hashed_announcement = args.get('hash')
mark_announcement_as_dismissed(hashed_announcement=hashed_announcement)
return '', 204

View File

@ -0,0 +1,73 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from utilities.backup import get_backup_files, prepare_restore, delete_backup_file, backup_to_zip
from ..utils import authenticate
api_ns_system_backups = Namespace('System Backups', description='List, create, restore or delete backups')
@api_ns_system_backups.route('system/backups')
class SystemBackups(Resource):
get_response_model = api_ns_system_backups.model('SystemBackupsGetResponse', {
'date': fields.String(),
'filename': fields.String(),
'size': fields.String(),
'type': fields.String(),
})
@authenticate
@api_ns_system_backups.doc(parser=None)
@api_ns_system_backups.response(204, 'Success')
@api_ns_system_backups.response(401, 'Not Authenticated')
def get(self):
"""List backup files"""
backups = get_backup_files(fullpath=False)
return marshal(backups, self.get_response_model, envelope='data')
@authenticate
@api_ns_system_backups.doc(parser=None)
@api_ns_system_backups.response(204, 'Success')
@api_ns_system_backups.response(401, 'Not Authenticated')
def post(self):
"""Create a new backup"""
backup_to_zip()
return '', 204
patch_request_parser = reqparse.RequestParser()
patch_request_parser.add_argument('filename', type=str, required=True, help='Backups to restore filename')
@authenticate
@api_ns_system_backups.doc(parser=patch_request_parser)
@api_ns_system_backups.response(204, 'Success')
@api_ns_system_backups.response(400, 'Filename not provided')
@api_ns_system_backups.response(401, 'Not Authenticated')
def patch(self):
"""Restore a backup file"""
args = self.patch_request_parser.parse_args()
filename = args.get('filename')
if filename:
restored = prepare_restore(filename)
if restored:
return '', 204
return 'Filename not provided', 400
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('filename', type=str, required=True, help='Backups to delete filename')
@authenticate
@api_ns_system_backups.doc(parser=delete_request_parser)
@api_ns_system_backups.response(204, 'Success')
@api_ns_system_backups.response(400, 'Filename not provided')
@api_ns_system_backups.response(401, 'Not Authenticated')
def delete(self):
"""Delete a backup file"""
args = self.delete_request_parser.parse_args()
filename = args.get('filename')
if filename:
deleted = delete_backup_file(filename)
if deleted:
return '', 204
return 'Filename not provided', 400

View File

@ -0,0 +1,20 @@
# coding=utf-8
from flask_restx import Resource, Namespace
from utilities.health import get_health_issues
from ..utils import authenticate
api_ns_system_health = Namespace('System Health', description='List health issues')
@api_ns_system_health.route('system/health')
class SystemHealth(Resource):
@authenticate
@api_ns_system_health.doc(parser=None)
@api_ns_system_health.response(200, 'Success')
@api_ns_system_health.response(401, 'Not Authenticated')
def get(self):
"""List health issues"""
return {'data': get_health_issues()}

View File

@ -0,0 +1,72 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse
from operator import itemgetter
from app.database import TableHistory, TableHistoryMovie, TableSettingsLanguages, database, select
from languages.get_languages import alpha2_from_alpha3, language_from_alpha2, alpha3_from_alpha2
from ..utils import authenticate, False_Keys
api_ns_system_languages = Namespace('System Languages', description='Get languages list')
@api_ns_system_languages.route('system/languages')
class Languages(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('history', type=str, required=False, help='Language name for history stats')
@authenticate
@api_ns_system_languages.doc(parser=get_request_parser)
@api_ns_system_languages.response(200, 'Success')
@api_ns_system_languages.response(401, 'Not Authenticated')
def get(self):
"""List languages for history filter or for language filter menu"""
args = self.get_request_parser.parse_args()
history = args.get('history')
if history and history not in False_Keys:
languages = database.execute(
select(TableHistory.language)
.where(TableHistory.language.is_not(None)))\
.all()
languages += database.execute(
select(TableHistoryMovie.language)
.where(TableHistoryMovie.language.is_not(None)))\
.all()
languages_list = [lang.language.split(':')[0] for lang in languages]
languages_dicts = []
for language in languages_list:
code2 = None
if len(language) == 2:
code2 = language
elif len(language) == 3:
code2 = alpha2_from_alpha3(language)
else:
continue
if not any(x['code2'] == code2 for x in languages_dicts):
try:
languages_dicts.append({
'code2': code2,
'code3': alpha3_from_alpha2(code2),
'name': language_from_alpha2(code2),
# Compatibility: Use false temporarily
'enabled': False
})
except Exception:
continue
else:
languages_dicts = [{
'name': x.name,
'code2': x.code2,
'code3': x.code3,
'enabled': x.enabled == 1
} for x in database.execute(
select(TableSettingsLanguages.name,
TableSettingsLanguages.code2,
TableSettingsLanguages.code3,
TableSettingsLanguages.enabled)
.order_by(TableSettingsLanguages.name))
.all()]
return sorted(languages_dicts, key=itemgetter('name'))

View File

@ -0,0 +1,20 @@
# coding=utf-8
from flask_restx import Resource, Namespace
from app.database import get_profiles_list
from ..utils import authenticate
api_ns_system_languages_profiles = Namespace('System Languages Profiles', description='List languages profiles')
@api_ns_system_languages_profiles.route('system/languages/profiles')
class LanguagesProfiles(Resource):
@authenticate
@api_ns_system_languages_profiles.doc(parser=None)
@api_ns_system_languages_profiles.response(200, 'Success')
@api_ns_system_languages_profiles.response(401, 'Not Authenticated')
def get(self):
"""List languages profiles"""
return get_profiles_list()

112
bazarr/api/system/logs.py Normal file
View File

@ -0,0 +1,112 @@
# coding=utf-8
import io
import re
from flask_restx import Resource, Namespace, fields, marshal
from app.config import settings
from app.logger import empty_log
from utilities.central import get_log_file_path
from ..utils import authenticate
api_ns_system_logs = Namespace('System Logs', description='List log file entries or empty log file')
@api_ns_system_logs.route('system/logs')
class SystemLogs(Resource):
get_response_model = api_ns_system_logs.model('SystemBackupsGetResponse', {
'timestamp': fields.String(),
'type': fields.String(),
'message': fields.String(),
'exception': fields.String(),
})
@authenticate
@api_ns_system_logs.doc(parser=None)
@api_ns_system_logs.response(200, 'Success')
@api_ns_system_logs.response(401, 'Not Authenticated')
def get(self):
"""List log entries"""
logs = []
include = str(settings.log.include_filter)
exclude = str(settings.log.exclude_filter)
ignore_case = settings.log.ignore_case
regex = settings.log.use_regex
if regex:
# pre-compile regular expressions for better performance
if ignore_case:
flags = re.IGNORECASE
else:
flags = 0
if len(include) > 0:
try:
include_compiled = re.compile(include, flags)
except Exception:
include_compiled = None
if len(exclude) > 0:
try:
exclude_compiled = re.compile(exclude, flags)
except Exception:
exclude_compiled = None
elif ignore_case:
include = include.casefold()
exclude = exclude.casefold()
with io.open(get_log_file_path(), encoding='UTF-8') as file:
raw_lines = file.read()
lines = raw_lines.split('|\n')
for line in lines:
if line == '':
continue
if ignore_case and not regex:
compare_line = line.casefold()
else:
compare_line = line
if len(include) > 0:
if regex:
if include_compiled is None:
# if invalid re, keep the line
keep = True
else:
keep = include_compiled.search(compare_line)
else:
keep = include in compare_line
if not keep:
continue
if len(exclude) > 0:
if regex:
if exclude_compiled is None:
# if invalid re, keep the line
skip = False
else:
skip = exclude_compiled.search(compare_line)
else:
skip = exclude in compare_line
if skip:
continue
raw_message = line.split('|')
raw_message_len = len(raw_message)
if raw_message_len > 3:
log = dict()
log["timestamp"] = raw_message[0]
log["type"] = raw_message[1].rstrip()
log["message"] = raw_message[3]
if raw_message_len > 4 and raw_message[4] != '\n':
log['exception'] = raw_message[4].strip('\'').replace(' ', '\u2003\u2003')
else:
log['exception'] = None
logs.append(log)
logs.reverse()
return marshal(logs, self.get_response_model, envelope='data')
@authenticate
@api_ns_system_logs.doc(parser=None)
@api_ns_system_logs.response(204, 'Success')
@api_ns_system_logs.response(401, 'Not Authenticated')
def delete(self):
"""Force log rotation and create a new log file"""
empty_log()
return '', 204

View File

@ -0,0 +1,38 @@
# coding=utf-8
import apprise
from flask_restx import Resource, Namespace, reqparse
from ..utils import authenticate
api_ns_system_notifications = Namespace('System Notifications', description='Send test notifications provider message')
@api_ns_system_notifications.hide
@api_ns_system_notifications.route('system/notifications')
class Notifications(Resource):
patch_request_parser = reqparse.RequestParser()
patch_request_parser.add_argument('url', type=str, required=True, help='Notifications provider URL')
@authenticate
@api_ns_system_notifications.doc(parser=patch_request_parser)
@api_ns_system_notifications.response(204, 'Success')
@api_ns_system_notifications.response(401, 'Not Authenticated')
def patch(self):
"""Test a notifications provider URL"""
args = self.patch_request_parser.parse_args()
url = args.get("url")
asset = apprise.AppriseAsset(async_mode=False)
apobj = apprise.Apprise(asset=asset)
apobj.add(url)
apobj.notify(
title='Bazarr test notification',
body='Test notification'
)
return '', 204

View File

@ -0,0 +1,63 @@
# coding=utf-8
import io
import json
import os
import logging
from flask_restx import Resource, Namespace, fields, marshal
from app.config import settings
from app.get_args import args
from ..utils import authenticate
api_ns_system_releases = Namespace('System Releases', description='List Bazarr releases from Github')
@api_ns_system_releases.route('system/releases')
class SystemReleases(Resource):
get_response_model = api_ns_system_releases.model('SystemBackupsGetResponse', {
'body': fields.List(fields.String),
'name': fields.String(),
'date': fields.String(),
'prerelease': fields.Boolean(),
'current': fields.Boolean(),
})
@authenticate
@api_ns_system_releases.doc(parser=None)
@api_ns_system_releases.response(200, 'Success')
@api_ns_system_releases.response(401, 'Not Authenticated')
def get(self):
"""Get Bazarr releases"""
filtered_releases = []
try:
with io.open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'r', encoding='UTF-8') as f:
releases = json.loads(f.read())
for release in releases:
if settings.general.branch == 'master' and not release['prerelease']:
filtered_releases.append(release)
elif settings.general.branch != 'master' and any(not x['prerelease'] for x in filtered_releases):
continue
elif settings.general.branch != 'master':
filtered_releases.append(release)
if settings.general.branch == 'master':
filtered_releases = filtered_releases[:5]
current_version = os.environ["BAZARR_VERSION"]
for i, release in enumerate(filtered_releases):
body = release['body'].replace('- ', '').split('\n')[1:]
filtered_releases[i] = {"body": body,
"name": release['name'],
"date": release['date'][:10],
"prerelease": release['prerelease'],
"current": release['name'].lstrip('v') == current_version}
except Exception:
logging.exception(
f'BAZARR cannot parse releases caching file: '
f'{os.path.join(args.config_dir, "config", "releases.txt")}')
return marshal(filtered_releases, self.get_response_model, envelope='data')

View File

@ -0,0 +1,73 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse
from unidecode import unidecode
from app.config import base_url, settings
from app.database import TableShows, TableMovies, database, select
from ..utils import authenticate
import textdistance
api_ns_system_searches = Namespace('System Searches', description='Search for series or movies by name')
@api_ns_system_searches.route('system/searches')
class Searches(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('query', type=str, required=True, help='Series or movie name to search for')
@authenticate
@api_ns_system_searches.doc(parser=get_request_parser)
@api_ns_system_searches.response(200, 'Success')
@api_ns_system_searches.response(401, 'Not Authenticated')
def get(self):
"""List results from query"""
args = self.get_request_parser.parse_args()
query = unidecode(args.get('query')).lower()
search_list = []
if query:
if settings.general.use_sonarr:
# Get matching series
search_list += database.execute(
select(TableShows.title,
TableShows.sonarrSeriesId,
TableShows.poster,
TableShows.year)
.order_by(TableShows.title)) \
.all()
if settings.general.use_radarr:
# Get matching movies
search_list += database.execute(
select(TableMovies.title,
TableMovies.radarrId,
TableMovies.poster,
TableMovies.year)
.order_by(TableMovies.title)) \
.all()
results = []
for x in search_list:
if query in unidecode(x.title).lower():
result = {
'title': x.title,
'year': x.year,
}
if hasattr(x, 'sonarrSeriesId'):
result['sonarrSeriesId'] = x.sonarrSeriesId
result['poster'] = f"{base_url}/images/series{x.poster}" if x.poster else None
else:
result['radarrId'] = x.radarrId
result['poster'] = f"{base_url}/images/movies{x.poster}" if x.poster else None
results.append(result)
# sort results by how closely they match the query
results = sorted(results, key=lambda x: textdistance.hamming.distance(query, x['title']))
return results

View File

@ -0,0 +1,128 @@
# coding=utf-8
import json
from flask import request, jsonify
from flask_restx import Resource, Namespace
from dynaconf.validator import ValidationError
from api.utils import None_Keys
from app.database import TableLanguagesProfiles, TableSettingsLanguages, TableSettingsNotifier, \
update_profile_id_list, database, insert, update, delete, select
from app.event_handler import event_stream
from app.config import settings, save_settings, get_settings
from app.scheduler import scheduler
from subtitles.indexer.series import list_missing_subtitles
from subtitles.indexer.movies import list_missing_subtitles_movies
from ..utils import authenticate
api_ns_system_settings = Namespace('systemSettings', description='System settings API endpoint')
@api_ns_system_settings.hide
@api_ns_system_settings.route('system/settings')
class SystemSettings(Resource):
@authenticate
def get(self):
data = get_settings()
data['notifications'] = dict()
data['notifications']['providers'] = [{
'name': x.name,
'enabled': x.enabled == 1,
'url': x.url
} for x in database.execute(
select(TableSettingsNotifier.name,
TableSettingsNotifier.enabled,
TableSettingsNotifier.url)
.order_by(TableSettingsNotifier.name))
.all()]
return jsonify(data)
@authenticate
def post(self):
enabled_languages = request.form.getlist('languages-enabled')
if len(enabled_languages) != 0:
database.execute(
update(TableSettingsLanguages)
.values(enabled=0))
for code in enabled_languages:
database.execute(
update(TableSettingsLanguages)
.values(enabled=1)
.where(TableSettingsLanguages.code2 == code))
event_stream("languages")
languages_profiles = request.form.get('languages-profiles')
if languages_profiles:
existing_ids = database.execute(
select(TableLanguagesProfiles.profileId))\
.all()
existing = [x.profileId for x in existing_ids]
for item in json.loads(languages_profiles):
if item['profileId'] in existing:
# Update existing profiles
database.execute(
update(TableLanguagesProfiles)
.values(
name=item['name'],
cutoff=item['cutoff'] if item['cutoff'] not in None_Keys else None,
items=json.dumps(item['items']),
mustContain=str(item['mustContain']),
mustNotContain=str(item['mustNotContain']),
originalFormat=int(item['originalFormat']) if item['originalFormat'] not in None_Keys else
None,
tag=item['tag'] if 'tag' in item else None,
)
.where(TableLanguagesProfiles.profileId == item['profileId']))
existing.remove(item['profileId'])
else:
# Add new profiles
database.execute(
insert(TableLanguagesProfiles)
.values(
profileId=item['profileId'],
name=item['name'],
cutoff=item['cutoff'] if item['cutoff'] not in None_Keys else None,
items=json.dumps(item['items']),
mustContain=str(item['mustContain']),
mustNotContain=str(item['mustNotContain']),
originalFormat=int(item['originalFormat']) if item['originalFormat'] not in None_Keys else
None,
tag=item['tag'] if 'tag' in item else None,
))
for profileId in existing:
# Remove deleted profiles
database.execute(
delete(TableLanguagesProfiles)
.where(TableLanguagesProfiles.profileId == profileId))
# invalidate cache
update_profile_id_list.invalidate()
event_stream("languages")
if settings.general.use_sonarr:
scheduler.add_job(list_missing_subtitles, kwargs={'send_event': True})
if settings.general.use_radarr:
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': True})
# Update Notification
notifications = request.form.getlist('notifications-providers')
for item in notifications:
item = json.loads(item)
database.execute(
update(TableSettingsNotifier).values(
enabled=int(item['enabled'] is True),
url=item['url'])
.where(TableSettingsNotifier.name == item['name']))
try:
save_settings(zip(request.form.keys(), request.form.listvalues()))
except ValidationError as e:
event_stream("settings")
return e.message, 406
else:
event_stream("settings")
return '', 204

View File

@ -0,0 +1,66 @@
# coding=utf-8
import os
import platform
import logging
from flask_restx import Resource, Namespace
from tzlocal import get_localzone_name
from alembic.migration import MigrationContext
from radarr.info import get_radarr_info
from sonarr.info import get_sonarr_info
from app.get_args import args
from app.database import engine, database, select
from init import startTime
from ..utils import authenticate
api_ns_system_status = Namespace('System Status', description='List environment information and versions')
@api_ns_system_status.route('system/status')
class SystemStatus(Resource):
@authenticate
@api_ns_system_status.response(200, "Success")
@api_ns_system_status.response(401, 'Not Authenticated')
def get(self):
"""Return environment information and versions"""
package_version = ''
if 'BAZARR_PACKAGE_VERSION' in os.environ:
package_version = os.environ['BAZARR_PACKAGE_VERSION']
if 'BAZARR_PACKAGE_AUTHOR' in os.environ and os.environ['BAZARR_PACKAGE_AUTHOR'] != '':
package_version = f'{package_version} by {os.environ["BAZARR_PACKAGE_AUTHOR"]}'
try:
timezone = get_localzone_name() or "Undefined"
except Exception:
timezone = "Exception while getting time zone name."
logging.exception("BAZARR is unable to get configured time zone name.")
try:
database_version = ".".join([str(x) for x in engine.dialect.server_version_info])
except Exception:
database_version = ""
try:
database_migration = MigrationContext.configure(engine.connect()).get_current_revision()
except Exception:
database_migration = "unknown"
system_status = {}
system_status.update({'bazarr_version': os.environ["BAZARR_VERSION"]})
system_status.update({'package_version': package_version})
system_status.update({'sonarr_version': get_sonarr_info.version()})
system_status.update({'radarr_version': get_radarr_info.version()})
system_status.update({'operating_system': platform.platform()})
system_status.update({'python_version': platform.python_version()})
system_status.update({'database_engine': f'{engine.dialect.name.capitalize()} {database_version}'})
system_status.update({'database_migration': database_migration})
system_status.update({'bazarr_directory': os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(__file__))))})
system_status.update({'bazarr_config_directory': args.config_dir})
system_status.update({'start_time': startTime})
system_status.update({'timezone': timezone})
return {'data': system_status}

View File

@ -0,0 +1,30 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse
from ..utils import authenticate
api_ns_system = Namespace('System', description='Shutdown or restart Bazarr')
@api_ns_system.hide
@api_ns_system.route('system')
class System(Resource):
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('action', type=str, required=True,
help='Action to perform from ["shutdown", "restart"]')
@authenticate
@api_ns_system.doc(parser=post_request_parser)
@api_ns_system.response(204, 'Success')
@api_ns_system.response(401, 'Not Authenticated')
def post(self):
"""Shutdown or restart Bazarr"""
args = self.post_request_parser.parse_args()
from app.server import webserver
action = args.get('action')
if action == "shutdown":
webserver.shutdown()
elif action == "restart":
webserver.restart()
return '', 204

View File

@ -0,0 +1,59 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.scheduler import scheduler
from ..utils import authenticate
api_ns_system_tasks = Namespace('System Tasks', description='List or execute tasks')
@api_ns_system_tasks.route('system/tasks')
class SystemTasks(Resource):
get_response_model = api_ns_system_tasks.model('SystemBackupsGetResponse', {
'interval': fields.String(),
'job_id': fields.String(),
'job_running': fields.Boolean(),
'name': fields.String(),
'next_run_in': fields.String(),
'next_run_time': fields.String(),
})
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('taskid', type=str, required=False, help='List tasks or a single task properties')
@authenticate
@api_ns_system_tasks.doc(parser=None)
@api_ns_system_tasks.response(200, 'Success')
@api_ns_system_tasks.response(401, 'Not Authenticated')
def get(self):
"""List tasks"""
args = self.get_request_parser.parse_args()
taskid = args.get('taskid')
task_list = scheduler.get_task_list()
if taskid:
for item in task_list:
if item['job_id'] == taskid:
task_list = [item]
continue
return marshal(task_list, self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('taskid', type=str, required=True, help='Task id of the task to run')
@authenticate
@api_ns_system_tasks.doc(parser=post_request_parser)
@api_ns_system_tasks.response(204, 'Success')
@api_ns_system_tasks.response(401, 'Not Authenticated')
def post(self):
"""Run task"""
args = self.post_request_parser.parse_args()
taskid = args.get('taskid')
scheduler.execute_job_now(taskid)
return '', 204

161
bazarr/api/utils.py Normal file
View File

@ -0,0 +1,161 @@
# coding=utf-8
import ast
from functools import wraps
from flask import request, abort
from operator import itemgetter
from app.config import settings, base_url
from languages.get_languages import language_from_alpha2, alpha3_from_alpha2
from app.database import get_audio_profile_languages, get_desired_languages
from utilities.path_mappings import path_mappings
None_Keys = ['null', 'undefined', '', None]
False_Keys = ['False', 'false', '0']
def authenticate(actual_method):
@wraps(actual_method)
def wrapper(*args, **kwargs):
apikey_settings = settings.auth.apikey
apikey_get = request.args.get('apikey')
apikey_post = request.form.get('apikey')
apikey_header = None
if 'X-API-KEY' in request.headers:
apikey_header = request.headers['X-API-KEY']
if apikey_settings in [apikey_get, apikey_post, apikey_header]:
return actual_method(*args, **kwargs)
return abort(401)
return wrapper
def postprocess(item):
# Remove ffprobe_cache
if item.get('radarrId'):
path_replace = path_mappings.path_replace_movie
else:
path_replace = path_mappings.path_replace
if item.get('ffprobe_cache'):
del item['ffprobe_cache']
# Parse audio language
if item.get('audio_language'):
item['audio_language'] = get_audio_profile_languages(item['audio_language'])
# Make sure profileId is a valid None value
if item.get('profileId') in None_Keys:
item['profileId'] = None
# Parse alternate titles
if item.get('alternativeTitles'):
item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles'])
else:
item['alternativeTitles'] = []
# Parse subtitles
if item.get('subtitles'):
item['subtitles'] = ast.literal_eval(item['subtitles'])
for i, subs in enumerate(item['subtitles']):
language = subs[0].split(':')
file_size = subs[2] if len(subs) > 2 else 0
item['subtitles'][i] = {"path": path_replace(subs[1]),
"name": language_from_alpha2(language[0]),
"code2": language[0],
"code3": alpha3_from_alpha2(language[0]),
"forced": False,
"hi": False,
"file_size": file_size}
if len(language) > 1:
item['subtitles'][i].update(
{
"forced": language[1].lower() == 'forced',
"hi": language[1].lower() == 'hi',
}
)
if settings.general.embedded_subs_show_desired and item.get('profileId'):
desired_lang_list = get_desired_languages(item['profileId'])
item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']]
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))
else:
item['subtitles'] = []
# Parse missing subtitles
if item.get('missing_subtitles'):
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
for i, subs in enumerate(item['missing_subtitles']):
language = subs.split(':')
item['missing_subtitles'][i] = {"name": language_from_alpha2(language[0]),
"code2": language[0],
"code3": alpha3_from_alpha2(language[0]),
"forced": False,
"hi": False}
if len(language) > 1:
item['missing_subtitles'][i].update(
{
"forced": language[1] == 'forced',
"hi": language[1] == 'hi',
}
)
else:
item['missing_subtitles'] = []
# Parse tags
if item.get('tags') is not None:
item['tags'] = ast.literal_eval(item.get('tags', '[]'))
else:
item['tags'] = []
if item.get('monitored'):
item['monitored'] = item.get('monitored') == 'True'
else:
item['monitored'] = False
if item.get('hearing_impaired'):
item['hearing_impaired'] = item.get('hearing_impaired') == 'True'
else:
item['hearing_impaired'] = False
if item.get('language'):
if item['language'] == 'None':
item['language'] = None
if item['language'] is not None:
splitted_language = item['language'].split(':')
item['language'] = {
"name": language_from_alpha2(splitted_language[0]),
"code2": splitted_language[0],
"code3": alpha3_from_alpha2(splitted_language[0]),
"forced": bool(item['language'].endswith(':forced')),
"hi": bool(item['language'].endswith(':hi')),
}
if item.get('path'):
item['path'] = path_replace(item['path'])
if item.get('video_path'):
# Provide mapped video path for history
item['video_path'] = path_replace(item['video_path'])
if item.get('subtitles_path'):
# Provide mapped subtitles path
item['subtitles_path'] = path_replace(item['subtitles_path'])
if item.get('external_subtitles'):
# Provide mapped external subtitles paths for history
if isinstance(item['external_subtitles'], str):
item['external_subtitles'] = ast.literal_eval(item['external_subtitles'])
for i, subs in enumerate(item['external_subtitles']):
item['external_subtitles'][i] = path_replace(subs)
# map poster and fanart to server proxy
if item.get('poster') is not None:
poster = item['poster']
item['poster'] = f"{base_url}/images/{'movies' if item.get('radarrId') else 'series'}{poster}" if poster else None
if item.get('fanart') is not None:
fanart = item['fanart']
item['fanart'] = f"{base_url}/images/{'movies' if item.get('radarrId') else 'series'}{fanart}" if fanart else None
return item

View File

@ -0,0 +1,12 @@
# coding=utf-8
from .plex import api_ns_webhooks_plex
from .sonarr import api_ns_webhooks_sonarr
from .radarr import api_ns_webhooks_radarr
api_ns_list_webhooks = [
api_ns_webhooks_plex,
api_ns_webhooks_radarr,
api_ns_webhooks_sonarr,
]

102
bazarr/api/webhooks/plex.py Normal file
View File

@ -0,0 +1,102 @@
# coding=utf-8
import json
import requests
import os
import logging
from flask_restx import Resource, Namespace, reqparse
from bs4 import BeautifulSoup as bso
from app.database import TableEpisodes, TableShows, TableMovies, database, select
from subtitles.mass_download import episode_download_subtitles, movies_download_subtitles
from ..utils import authenticate
api_ns_webhooks_plex = Namespace('Webhooks Plex', description='Webhooks endpoint that can be configured in Plex to '
'trigger a subtitles search when playback start.')
@api_ns_webhooks_plex.route('webhooks/plex')
class WebHooksPlex(Resource):
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('payload', type=str, required=True, help='Webhook payload')
@authenticate
@api_ns_webhooks_plex.doc(parser=post_request_parser)
@api_ns_webhooks_plex.response(200, 'Success')
@api_ns_webhooks_plex.response(204, 'Unhandled event')
@api_ns_webhooks_plex.response(400, 'No GUID found')
@api_ns_webhooks_plex.response(401, 'Not Authenticated')
@api_ns_webhooks_plex.response(404, 'IMDB series/movie ID not found')
def post(self):
"""Trigger subtitles search on play media event in Plex"""
args = self.post_request_parser.parse_args()
json_webhook = args.get('payload')
parsed_json_webhook = json.loads(json_webhook)
if 'Guid' not in parsed_json_webhook['Metadata']:
logging.debug('No GUID provided in Plex json payload. Probably a pre-roll video.')
return "No GUID found in JSON request body", 200
event = parsed_json_webhook['event']
if event not in ['media.play']:
return 'Unhandled event', 204
media_type = parsed_json_webhook['Metadata']['type']
if media_type == 'episode':
season = parsed_json_webhook['Metadata']['parentIndex']
episode = parsed_json_webhook['Metadata']['index']
else:
season = episode = None
ids = []
for item in parsed_json_webhook['Metadata']['Guid']:
splitted_id = item['id'].split('://')
if len(splitted_id) == 2:
ids.append({splitted_id[0]: splitted_id[1]})
if not ids:
return 'No GUID found', 400
if media_type == 'episode':
try:
episode_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
r = requests.get(f'https://imdb.com/title/{episode_imdb_id}',
headers={"User-Agent": os.environ["SZ_USER_AGENT"]})
soup = bso(r.content, "html.parser")
script_tag = soup.find(id='__NEXT_DATA__')
script_tag_json = script_tag.string
show_metadata_dict = json.loads(script_tag_json)
series_imdb_id = show_metadata_dict['props']['pageProps']['aboveTheFoldData']['series']['series']['id']
except Exception:
logging.debug('BAZARR is unable to get series IMDB id.')
return 'IMDB series ID not found', 404
else:
sonarrEpisodeId = database.execute(
select(TableEpisodes.sonarrEpisodeId)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableShows.imdbId == series_imdb_id,
TableEpisodes.season == season,
TableEpisodes.episode == episode)) \
.first()
if sonarrEpisodeId:
episode_download_subtitles(no=sonarrEpisodeId.sonarrEpisodeId, send_progress=True)
else:
try:
movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
except Exception:
logging.debug('BAZARR is unable to get movie IMDB id.')
return 'IMDB movie ID not found', 404
else:
radarrId = database.execute(
select(TableMovies.radarrId)
.where(TableMovies.imdbId == movie_imdb_id)) \
.first()
if radarrId:
movies_download_subtitles(no=radarrId.radarrId)
return '', 200

View File

@ -0,0 +1,40 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse
from app.database import TableMovies, database, select
from subtitles.mass_download import movies_download_subtitles
from subtitles.indexer.movies import store_subtitles_movie
from utilities.path_mappings import path_mappings
from ..utils import authenticate
api_ns_webhooks_radarr = Namespace('Webhooks Radarr', description='Webhooks to trigger subtitles search based on '
'Radarr movie file ID')
@api_ns_webhooks_radarr.route('webhooks/radarr')
class WebHooksRadarr(Resource):
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('radarr_moviefile_id', type=int, required=True, help='Movie file ID')
@authenticate
@api_ns_webhooks_radarr.doc(parser=post_request_parser)
@api_ns_webhooks_radarr.response(200, 'Success')
@api_ns_webhooks_radarr.response(401, 'Not Authenticated')
def post(self):
"""Search for missing subtitles for a specific movie file id"""
args = self.post_request_parser.parse_args()
movie_file_id = args.get('radarr_moviefile_id')
radarrMovieId = database.execute(
select(TableMovies.radarrId, TableMovies.path)
.where(TableMovies.movie_file_id == movie_file_id)) \
.first()
if radarrMovieId:
store_subtitles_movie(radarrMovieId.path, path_mappings.path_replace_movie(radarrMovieId.path))
movies_download_subtitles(no=radarrMovieId.radarrId)
return '', 200

View File

@ -0,0 +1,42 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse
from app.database import TableEpisodes, TableShows, database, select
from subtitles.mass_download import episode_download_subtitles
from subtitles.indexer.series import store_subtitles
from utilities.path_mappings import path_mappings
from ..utils import authenticate
api_ns_webhooks_sonarr = Namespace('Webhooks Sonarr', description='Webhooks to trigger subtitles search based on '
'Sonarr episode file ID')
@api_ns_webhooks_sonarr.route('webhooks/sonarr')
class WebHooksSonarr(Resource):
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('sonarr_episodefile_id', type=int, required=True, help='Episode file ID')
@authenticate
@api_ns_webhooks_sonarr.doc(parser=post_request_parser)
@api_ns_webhooks_sonarr.response(200, 'Success')
@api_ns_webhooks_sonarr.response(401, 'Not Authenticated')
def post(self):
"""Search for missing subtitles for a specific episode file id"""
args = self.post_request_parser.parse_args()
episode_file_id = args.get('sonarr_episodefile_id')
sonarrEpisodeId = database.execute(
select(TableEpisodes.sonarrEpisodeId, TableEpisodes.path)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableEpisodes.episode_file_id == episode_file_id)) \
.first()
if sonarrEpisodeId:
store_subtitles(sonarrEpisodeId.path, path_mappings.path_replace(sonarrEpisodeId.path))
episode_download_subtitles(no=sonarrEpisodeId.sonarrEpisodeId, send_progress=True)
return '', 200

1
bazarr/app/__init__.py Normal file
View File

@ -0,0 +1 @@
# coding=utf-8

150
bazarr/app/announcements.py Normal file
View File

@ -0,0 +1,150 @@
# coding=utf-8
import os
import hashlib
import requests
import logging
import json
import pretty
from datetime import datetime
from operator import itemgetter
from app.get_providers import get_enabled_providers
from app.database import TableAnnouncements, database, insert, select
from app.config import settings
from app.get_args import args
from sonarr.info import get_sonarr_info
from radarr.info import get_radarr_info
from app.check_update import deprecated_python_version
# Announcements as receive by browser must be in the form of a list of dicts converted to JSON
# [
# {
# 'text': 'some text',
# 'link': 'http://to.somewhere.net',
# 'hash': '',
# 'dismissible': True,
# 'timestamp': 1676236978,
# 'enabled': True,
# },
# ]
def parse_announcement_dict(announcement_dict):
announcement_dict['timestamp'] = pretty.date(announcement_dict['timestamp'])
announcement_dict['link'] = announcement_dict.get('link', '')
announcement_dict['dismissible'] = announcement_dict.get('dismissible', True)
announcement_dict['enabled'] = announcement_dict.get('enabled', True)
announcement_dict['hash'] = hashlib.sha256(announcement_dict['text'].encode('UTF8')).hexdigest()
return announcement_dict
def get_announcements_to_file():
try:
r = requests.get("https://raw.githubusercontent.com/morpheus65535/bazarr-binaries/master/announcements.json",
timeout=10)
except requests.exceptions.HTTPError:
logging.exception("Error trying to get announcements from Github. Http error.")
except requests.exceptions.ConnectionError:
logging.exception("Error trying to get announcements from Github. Connection Error.")
except requests.exceptions.Timeout:
logging.exception("Error trying to get announcements from Github. Timeout Error.")
except requests.exceptions.RequestException:
logging.exception("Error trying to get announcements from Github.")
else:
with open(os.path.join(args.config_dir, 'config', 'announcements.json'), 'wb') as f:
f.write(r.content)
def get_online_announcements():
try:
with open(os.path.join(args.config_dir, 'config', 'announcements.json'), 'r') as f:
data = json.load(f)
except (OSError, json.JSONDecodeError):
return []
else:
for announcement in data['data']:
if 'enabled' not in announcement:
data['data'][announcement]['enabled'] = True
if 'dismissible' not in announcement:
data['data'][announcement]['dismissible'] = True
return data['data']
def get_local_announcements():
announcements = []
# opensubtitles.org end-of-life
enabled_providers = get_enabled_providers()
if enabled_providers and 'opensubtitles' in enabled_providers and not settings.opensubtitles.vip:
announcements.append({
'text': 'Opensubtitles.org is deprecated for non-VIP users, migrate to Opensubtitles.com ASAP and disable '
'this provider to remove this announcement.',
'link': 'https://wiki.bazarr.media/Troubleshooting/OpenSubtitles-migration/',
'dismissible': False,
'timestamp': 1676236978,
})
# deprecated Sonarr and Radarr versions
if get_sonarr_info.is_deprecated():
announcements.append({
'text': f'Sonarr {get_sonarr_info.version()} is deprecated and unsupported. You should consider upgrading '
f'as Bazarr will eventually drop support for deprecated Sonarr version.',
'link': 'https://forums.sonarr.tv/t/v3-is-now-officially-stable-v2-is-eol/27858',
'dismissible': False,
'timestamp': 1679606061,
})
if get_radarr_info.is_deprecated():
announcements.append({
'text': f'Radarr {get_radarr_info.version()} is deprecated and unsupported. You should consider upgrading '
f'as Bazarr will eventually drop support for deprecated Radarr version.',
'link': 'https://discord.com/channels/264387956343570434/264388019585286144/1051567458697363547',
'dismissible': False,
'timestamp': 1679606309,
})
# deprecated Python versions
if deprecated_python_version():
announcements.append({
'text': 'Starting with Bazarr 1.4, support for Python 3.7 will get dropped. Upgrade your current version of'
' Python ASAP to get further updates.',
'dismissible': False,
'timestamp': 1691162383,
})
for announcement in announcements:
if 'enabled' not in announcement:
announcement['enabled'] = True
if 'dismissible' not in announcement:
announcement['dismissible'] = True
return announcements
def get_all_announcements():
# get announcements that haven't been dismissed yet
announcements = [parse_announcement_dict(x) for x in get_online_announcements() + get_local_announcements() if
x['enabled'] and (not x['dismissible'] or not
database.execute(
select(TableAnnouncements)
.where(TableAnnouncements.hash ==
hashlib.sha256(x['text'].encode('UTF8')).hexdigest()))
.first())]
return sorted(announcements, key=itemgetter('timestamp'), reverse=True)
def mark_announcement_as_dismissed(hashed_announcement):
text = [x['text'] for x in get_all_announcements() if x['hash'] == hashed_announcement]
if text:
database.execute(
insert(TableAnnouncements)
.values(hash=hashed_announcement,
timestamp=datetime.now(),
text=text[0])
.on_conflict_do_nothing())

76
bazarr/app/app.py Normal file
View File

@ -0,0 +1,76 @@
# coding=utf-8
from flask import Flask, redirect, Request
from flask_compress import Compress
from flask_cors import CORS
from flask_socketio import SocketIO
from .database import database
from .get_args import args
from .config import settings, base_url
socketio = SocketIO()
class CustomRequest(Request):
def __init__(self, *args, **kwargs):
super(CustomRequest, self).__init__(*args, **kwargs)
# required to increase form-data size before returning a 413
self.max_form_parts = 10000
def create_app():
# Flask Setup
app = Flask(__name__)
app.request_class = CustomRequest
app.config['COMPRESS_ALGORITHM'] = 'gzip'
Compress(app)
app.wsgi_app = ReverseProxied(app.wsgi_app)
app.config["SECRET_KEY"] = settings.general.flask_secret_key
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = True
app.config['JSON_AS_ASCII'] = False
app.config['RESTX_MASK_SWAGGER'] = False
if settings.get('cors', 'enabled'):
CORS(app)
if args.dev:
app.config["DEBUG"] = True
else:
app.config["DEBUG"] = False
from engineio.async_drivers import threading # noqa W0611 # required to prevent an import exception in engineio
socketio.init_app(app, path=f'{base_url.rstrip("/")}/api/socket.io', cors_allowed_origins='*',
async_mode='threading', allow_upgrades=False, transports='polling', engineio_logger=False)
@app.errorhandler(404)
def page_not_found(_):
return redirect(base_url, code=302)
# This hook ensures that a connection is opened to handle any queries
# generated by the request.
@app.before_request
def _db_connect():
database.begin()
# This hook ensures that the connection is closed when we've finished
# processing the request.
@app.teardown_request
def _db_close(exc):
database.close()
return app
class ReverseProxied(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
scheme = environ.get('HTTP_X_FORWARDED_PROTO')
if scheme:
environ['wsgi.url_scheme'] = scheme
return self.app(environ, start_response)

281
bazarr/app/check_update.py Normal file
View File

@ -0,0 +1,281 @@
# coding=utf-8
import os
import re
import logging
import json
import requests
import semver
import sys
from shutil import rmtree
from zipfile import ZipFile
from .get_args import args
from .config import settings
def deprecated_python_version():
# return True if Python version is deprecated
return sys.version_info.major == 2 or (sys.version_info.major == 3 and sys.version_info.minor < 8)
def check_releases():
releases = []
url_releases = 'https://api.github.com/repos/morpheus65535/Bazarr/releases?per_page=100'
try:
logging.debug(f'BAZARR getting releases from Github: {url_releases}')
r = requests.get(url_releases, allow_redirects=True, timeout=15)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("Error trying to get releases from Github. Http error.")
except requests.exceptions.ConnectionError:
logging.exception("Error trying to get releases from Github. Connection Error.")
except requests.exceptions.Timeout:
logging.exception("Error trying to get releases from Github. Timeout Error.")
except requests.exceptions.RequestException:
logging.exception("Error trying to get releases from Github.")
else:
for release in r.json():
download_link = None
for asset in release['assets']:
if asset['name'] == 'bazarr.zip':
download_link = asset['browser_download_url']
if not download_link:
continue
releases.append({'name': release['name'],
'body': release['body'],
'date': release['published_at'],
'prerelease': release['prerelease'],
'download_link': download_link})
with open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'w') as f:
json.dump(releases, f)
logging.debug(f'BAZARR saved {len(r.json())} releases to releases.txt')
def check_if_new_update():
if settings.general.branch == 'master':
use_prerelease = False
elif settings.general.branch == 'development':
use_prerelease = True
else:
logging.error(f'BAZARR unknown branch provided to updater: {settings.general.branch}')
return
logging.debug(f'BAZARR updater is using {settings.general.branch} branch')
check_releases()
with open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'r') as f:
data = json.load(f)
if not args.no_update:
release = None
if use_prerelease:
if deprecated_python_version():
release = next((item['name'].lstrip('v') for item in data if
semver.VersionInfo.parse('1.3.1') > semver.VersionInfo.parse(item['name'].lstrip('v'))))
else:
release = next((item for item in data), None)
else:
if deprecated_python_version():
next((item['name'].lstrip('v') for item in data if
not item['prerelease'] and semver.VersionInfo.parse('1.3.1') > semver.VersionInfo.parse(
item['name'].lstrip('v'))))
else:
release = next((item for item in data if not item["prerelease"]), None)
if release and 'name' in release:
logging.debug(f'BAZARR last release available is {release["name"]}')
if deprecated_python_version():
logging.warning('BAZARR is using a deprecated Python version, you must update Python to get latest '
'version available.')
current_version = None
try:
current_version = semver.VersionInfo.parse(os.environ["BAZARR_VERSION"])
semver.VersionInfo.parse(release['name'].lstrip('v'))
except ValueError:
new_version = True
else:
new_version = True if semver.compare(release['name'].lstrip('v'), os.environ["BAZARR_VERSION"]) > 0 \
else False
# skip update process if latest release is v0.9.1.1 which is the latest pre-semver compatible release
if new_version and release['name'] != 'v0.9.1.1':
logging.debug(f'BAZARR newer release available and will be downloaded: {release["name"]}')
download_release(url=release['download_link'])
# rolling back from nightly to stable release
elif current_version:
if current_version.prerelease and not use_prerelease:
logging.debug(f'BAZARR previous stable version will be downloaded: {release["name"]}')
download_release(url=release['download_link'])
else:
logging.debug('BAZARR no newer release have been found')
else:
logging.debug('BAZARR no release found')
else:
logging.debug('BAZARR --no_update have been used as an argument')
def download_release(url):
r = None
update_dir = os.path.join(args.config_dir, 'update')
try:
os.makedirs(update_dir, exist_ok=True)
except Exception:
logging.debug(f'BAZARR unable to create update directory {update_dir}')
else:
logging.debug(f'BAZARR downloading release from Github: {url}')
r = requests.get(url, allow_redirects=True)
if r:
try:
with open(os.path.join(update_dir, 'bazarr.zip'), 'wb') as f:
f.write(r.content)
except Exception:
logging.exception('BAZARR unable to download new release and save it to disk')
else:
apply_update()
def apply_update():
is_updated = False
update_dir = os.path.join(args.config_dir, 'update')
bazarr_zip = os.path.join(update_dir, 'bazarr.zip')
bazarr_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
build_dir = os.path.join(bazarr_dir, 'frontend', 'build')
if os.path.isdir(update_dir):
if os.path.isfile(bazarr_zip):
logging.debug(f'BAZARR is trying to unzip this release to {bazarr_dir}: {bazarr_zip}')
try:
with ZipFile(bazarr_zip, 'r') as archive:
zip_root_directory = ''
if len({item.split('/')[0] for item in archive.namelist()}) == 1:
zip_root_directory = archive.namelist()[0]
if os.path.isdir(build_dir):
try:
rmtree(build_dir, ignore_errors=True)
except Exception:
logging.exception(
'BAZARR was unable to delete the previous build directory during upgrade process.')
for file in archive.namelist():
if file.startswith(zip_root_directory) and file != zip_root_directory:
file_path = os.path.join(bazarr_dir, file[len(zip_root_directory):])
parent_dir = os.path.dirname(file_path)
os.makedirs(parent_dir, exist_ok=True)
if not os.path.isdir(file_path):
if os.path.exists(file_path):
# remove the file first to handle case-insensitive file systems
os.remove(file_path)
with open(file_path, 'wb+') as f:
f.write(archive.read(file))
except Exception:
logging.exception('BAZARR unable to unzip release')
else:
is_updated = True
try:
logging.debug('BAZARR successfully unzipped new release and will now try to delete the leftover '
'files.')
update_cleaner(zipfile=bazarr_zip, bazarr_dir=bazarr_dir, config_dir=args.config_dir)
except Exception:
logging.exception('BAZARR unable to cleanup leftover files after upgrade.')
else:
logging.debug('BAZARR successfully deleted leftover files.')
finally:
logging.debug('BAZARR now deleting release archive')
os.remove(bazarr_zip)
else:
return
if is_updated:
logging.debug('BAZARR new release have been installed, now we restart')
from .server import webserver
webserver.restart()
def update_cleaner(zipfile, bazarr_dir, config_dir):
with ZipFile(zipfile, 'r') as archive:
file_in_zip = archive.namelist()
logging.debug(f'BAZARR zip file contain {len(file_in_zip)} directories and files')
separator = os.path.sep
if os.path.sep == '\\':
logging.debug('BAZARR upgrade leftover cleaner is running on Windows. We\'ll fix the zip file separator '
'accordingly.')
for i, item in enumerate(file_in_zip):
file_in_zip[i] = item.replace('/', '\\')
separator += os.path.sep
else:
logging.debug('BAZARR upgrade leftover cleaner is running on something else than Windows. The zip file '
'separator are fine.')
dir_to_ignore = [f'^.{separator}',
f'^bin{separator}',
f'^venv{separator}',
f'^WinPython{separator}',
f'{separator}__pycache__{separator}$']
if os.path.abspath(bazarr_dir).lower() == os.path.abspath(config_dir).lower():
# for users who installed Bazarr inside the config directory (ie: `%programdata%\Bazarr` on windows)
dir_to_ignore.append(f'^backup{separator}')
dir_to_ignore.append(f'^cache{separator}')
dir_to_ignore.append(f'^config{separator}')
dir_to_ignore.append(f'^db{separator}')
dir_to_ignore.append(f'^log{separator}')
dir_to_ignore.append(f'^restore{separator}')
dir_to_ignore.append(f'^update{separator}')
elif os.path.abspath(bazarr_dir).lower() in os.path.abspath(config_dir).lower():
# when config directory is a child of Bazarr installation directory
dir_to_ignore.append(f'^{os.path.relpath(config_dir, bazarr_dir)}{separator}')
dir_to_ignore_regex_string = '(?:% s)' % '|'.join(dir_to_ignore)
logging.debug(f'BAZARR upgrade leftover cleaner will ignore directories matching this '
f'regex: {dir_to_ignore_regex_string}')
dir_to_ignore_regex = re.compile(dir_to_ignore_regex_string)
file_to_ignore = ['nssm.exe', '7za.exe', 'unins000.exe', 'unins000.dat']
# prevent deletion of leftover Apprise.py/pyi files after 1.8.0 version that caused issue on case-insensitive
# filesystem. This could be removed in a couple of major versions.
file_to_ignore += ['Apprise.py', 'Apprise.pyi', 'apprise.py', 'apprise.pyi']
logging.debug(f'BAZARR upgrade leftover cleaner will ignore those files: {", ".join(file_to_ignore)}')
extension_to_ignore = ['.pyc']
logging.debug(
f'BAZARR upgrade leftover cleaner will ignore files with those extensions: {", ".join(extension_to_ignore)}')
file_on_disk = []
folder_list = []
for foldername, subfolders, filenames in os.walk(bazarr_dir):
relative_foldername = os.path.relpath(foldername, bazarr_dir) + os.path.sep
if not dir_to_ignore_regex.findall(relative_foldername):
if relative_foldername not in folder_list:
folder_list.append(relative_foldername)
for file in filenames:
if file in file_to_ignore:
continue
elif os.path.splitext(file)[1] in extension_to_ignore:
continue
elif foldername == bazarr_dir:
file_on_disk.append(file)
else:
current_dir = relative_foldername
filepath = os.path.join(current_dir, file)
if not dir_to_ignore_regex.findall(filepath):
file_on_disk.append(filepath)
logging.debug(f'BAZARR directory contain {len(file_on_disk)} files')
logging.debug(f'BAZARR directory contain {len(folder_list)} directories')
file_on_disk += folder_list
logging.debug(f'BAZARR directory contain {len(file_on_disk)} directories and files')
file_to_remove = list(set(file_on_disk) - set(file_in_zip))
logging.debug(f'BAZARR will delete {len(file_to_remove)} directories and files')
logging.debug(f'BAZARR will delete this: {", ".join(file_to_remove)}')
for file in file_to_remove:
filepath = os.path.join(bazarr_dir, file)
try:
if os.path.isdir(filepath):
rmtree(filepath, ignore_errors=True)
else:
os.remove(filepath)
except Exception:
logging.debug(f'BAZARR upgrade leftover cleaner cannot delete {filepath}')

926
bazarr/app/config.py Normal file
View File

@ -0,0 +1,926 @@
# coding=utf-8
import hashlib
import os
import ast
import logging
import re
from urllib.parse import quote_plus
from utilities.binaries import BinaryNotFound, get_binary
from literals import EXIT_VALIDATION_ERROR
from utilities.central import stop_bazarr
from subliminal.cache import region
from dynaconf import Dynaconf, Validator as OriginalValidator
from dynaconf.loaders.yaml_loader import write
from dynaconf.validator import ValidationError
from dynaconf.utils.functional import empty
from ipaddress import ip_address
from binascii import hexlify
from types import MappingProxyType
from .get_args import args
NoneType = type(None)
def base_url_slash_cleaner(uri):
while "//" in uri:
uri = uri.replace("//", "/")
return uri
def validate_ip_address(ip_string):
if ip_string == '*':
return True
try:
ip_address(ip_string)
return True
except ValueError:
return False
def validate_tags(tags):
if not tags:
return True
return all(re.match( r'^[a-z0-9_-]+$', item) for item in tags)
ONE_HUNDRED_YEARS_IN_MINUTES = 52560000
ONE_HUNDRED_YEARS_IN_HOURS = 876000
class Validator(OriginalValidator):
# Give the ability to personalize messages sent by the original dynasync Validator class.
default_messages = MappingProxyType(
{
"must_exist_true": "{name} is required",
"must_exist_false": "{name} cannot exists",
"condition": "{name} invalid for {function}({value})",
"operations": "{name} must {operation} {op_value} but it is {value}",
"combined": "combined validators failed {errors}",
}
)
def check_parser_binary(value):
try:
get_binary(value)
except BinaryNotFound:
raise ValidationError(f"Executable '{value}' not found in search path. Please install before making this selection.")
return True
validators = [
# general section
Validator('general.flask_secret_key', must_exist=True, default=hexlify(os.urandom(16)).decode(),
is_type_of=str),
Validator('general.ip', must_exist=True, default='*', is_type_of=str, condition=validate_ip_address),
Validator('general.port', must_exist=True, default=6767, is_type_of=int, gte=1, lte=65535),
Validator('general.base_url', must_exist=True, default='', is_type_of=str),
Validator('general.path_mappings', must_exist=True, default=[], is_type_of=list),
Validator('general.debug', must_exist=True, default=False, is_type_of=bool),
Validator('general.branch', must_exist=True, default='master', is_type_of=str,
is_in=['master', 'development']),
Validator('general.auto_update', must_exist=True, default=True, is_type_of=bool),
Validator('general.single_language', must_exist=True, default=False, is_type_of=bool),
Validator('general.minimum_score', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('general.use_scenename', must_exist=True, default=True, is_type_of=bool),
Validator('general.use_postprocessing', must_exist=True, default=False, is_type_of=bool),
Validator('general.postprocessing_cmd', must_exist=True, default='', is_type_of=str),
Validator('general.postprocessing_threshold', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('general.use_postprocessing_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('general.postprocessing_threshold_movie', must_exist=True, default=70, is_type_of=int, gte=0,
lte=100),
Validator('general.use_postprocessing_threshold_movie', must_exist=True, default=False, is_type_of=bool),
Validator('general.use_sonarr', must_exist=True, default=False, is_type_of=bool),
Validator('general.use_radarr', must_exist=True, default=False, is_type_of=bool),
Validator('general.use_plex', must_exist=True, default=False, is_type_of=bool),
Validator('general.path_mappings_movie', must_exist=True, default=[], is_type_of=list),
Validator('general.serie_tag_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.movie_tag_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.remove_profile_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
Validator('general.serie_default_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.serie_default_profile', must_exist=True, default='', is_type_of=(int, str)),
Validator('general.movie_default_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.movie_default_profile', must_exist=True, default='', is_type_of=(int, str)),
Validator('general.page_size', must_exist=True, default=25, is_type_of=int,
is_in=[25, 50, 100, 250, 500, 1000]),
Validator('general.theme', must_exist=True, default='auto', is_type_of=str,
is_in=['auto', 'light', 'dark']),
Validator('general.minimum_score_movie', must_exist=True, default=70, is_type_of=int, gte=0, lte=100),
Validator('general.use_embedded_subs', must_exist=True, default=True, is_type_of=bool),
Validator('general.embedded_subs_show_desired', must_exist=True, default=True, is_type_of=bool),
Validator('general.utf8_encode', must_exist=True, default=True, is_type_of=bool),
Validator('general.ignore_pgs_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.ignore_vobsub_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.ignore_ass_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.adaptive_searching', must_exist=True, default=True, is_type_of=bool),
Validator('general.adaptive_searching_delay', must_exist=True, default='3w', is_type_of=str,
is_in=['1w', '2w', '3w', '4w']),
Validator('general.adaptive_searching_delta', must_exist=True, default='1w', is_type_of=str,
is_in=['3d', '1w', '2w', '3w', '4w']),
Validator('general.enabled_providers', must_exist=True, default=[], is_type_of=list),
Validator('general.enabled_integrations', must_exist=True, default=[], is_type_of=list),
Validator('general.multithreading', must_exist=True, default=True, is_type_of=bool),
Validator('general.chmod_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.chmod', must_exist=True, default='0640', is_type_of=str),
Validator('general.subfolder', must_exist=True, default='current', is_type_of=str),
Validator('general.subfolder_custom', must_exist=True, default='', is_type_of=str),
Validator('general.upgrade_subs', must_exist=True, default=True, is_type_of=bool),
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int,
is_in=[6, 12, 24, 168, ONE_HUNDRED_YEARS_IN_HOURS]),
Validator('general.days_to_upgrade_subs', must_exist=True, default=7, is_type_of=int, gte=0, lte=30),
Validator('general.upgrade_manual', must_exist=True, default=True, is_type_of=bool),
Validator('general.anti_captcha_provider', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'anti-captcha', 'death-by-captcha']),
Validator('general.wanted_search_frequency', must_exist=True, default=6, is_type_of=int,
is_in=[6, 12, 24, 168, ONE_HUNDRED_YEARS_IN_HOURS]),
Validator('general.wanted_search_frequency_movie', must_exist=True, default=6, is_type_of=int,
is_in=[6, 12, 24, 168, ONE_HUNDRED_YEARS_IN_HOURS]),
Validator('general.subzero_mods', must_exist=True, default='', is_type_of=str),
Validator('general.dont_notify_manual_actions', must_exist=True, default=False, is_type_of=bool),
Validator('general.hi_extension', must_exist=True, default='hi', is_type_of=str, is_in=['hi', 'cc', 'sdh']),
Validator('general.embedded_subtitles_parser', must_exist=True, default='ffprobe', is_type_of=str,
is_in=['ffprobe', 'mediainfo'], condition=check_parser_binary),
Validator('general.default_und_audio_lang', must_exist=True, default='', is_type_of=str),
Validator('general.default_und_embedded_subtitles_lang', must_exist=True, default='', is_type_of=str),
Validator('general.parse_embedded_audio_track', must_exist=True, default=False, is_type_of=bool),
Validator('general.skip_hashing', must_exist=True, default=False, is_type_of=bool),
Validator('general.language_equals', must_exist=True, default=[], is_type_of=list),
# log section
Validator('log.include_filter', must_exist=True, default='', is_type_of=str, cast=str),
Validator('log.exclude_filter', must_exist=True, default='', is_type_of=str, cast=str),
Validator('log.ignore_case', must_exist=True, default=False, is_type_of=bool),
Validator('log.use_regex', must_exist=True, default=False, is_type_of=bool),
# auth section
Validator('auth.apikey', must_exist=True, default=hexlify(os.urandom(16)).decode(), is_type_of=str),
Validator('auth.type', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'basic', 'form']),
Validator('auth.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('auth.password', must_exist=True, default='', is_type_of=str, cast=str),
# cors section
Validator('cors.enabled', must_exist=True, default=False, is_type_of=bool),
# backup section
Validator('backup.folder', must_exist=True, default=os.path.join(args.config_dir, 'backup'),
is_type_of=str),
Validator('backup.retention', must_exist=True, default=31, is_type_of=int, gte=0),
Validator('backup.frequency', must_exist=True, default='Weekly', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('backup.day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('backup.hour', must_exist=True, default=3, is_type_of=int, gte=0, lte=23),
# sonarr section
Validator('sonarr.ip', must_exist=True, default='127.0.0.1', is_type_of=str),
Validator('sonarr.port', must_exist=True, default=8989, is_type_of=int, gte=1, lte=65535),
Validator('sonarr.base_url', must_exist=True, default='/', is_type_of=str),
Validator('sonarr.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.http_timeout', must_exist=True, default=60, is_type_of=int,
is_in=[60, 120, 180, 240, 300, 600]),
Validator('sonarr.apikey', must_exist=True, default='', is_type_of=str),
Validator('sonarr.full_update', must_exist=True, default='Daily', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('sonarr.full_update_day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('sonarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('sonarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.series_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
Validator('sonarr.excluded_series_types', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('sonarr.exclude_season_zero', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.sync_only_monitored_series', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.sync_only_monitored_episodes', must_exist=True, default=False, is_type_of=bool),
# radarr section
Validator('radarr.ip', must_exist=True, default='127.0.0.1', is_type_of=str),
Validator('radarr.port', must_exist=True, default=7878, is_type_of=int, gte=1, lte=65535),
Validator('radarr.base_url', must_exist=True, default='/', is_type_of=str),
Validator('radarr.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.http_timeout', must_exist=True, default=60, is_type_of=int,
is_in=[60, 120, 180, 240, 300, 600]),
Validator('radarr.apikey', must_exist=True, default='', is_type_of=str),
Validator('radarr.full_update', must_exist=True, default='Daily', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('radarr.full_update_day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('radarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('radarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.movies_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
Validator('radarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('radarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.sync_only_monitored_movies', must_exist=True, default=False, is_type_of=bool),
# plex section
Validator('plex.ip', must_exist=True, default='127.0.0.1', is_type_of=str),
Validator('plex.port', must_exist=True, default=32400, is_type_of=int, gte=1, lte=65535),
Validator('plex.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('plex.apikey', must_exist=True, default='', is_type_of=str),
Validator('plex.movie_library', must_exist=True, default='', is_type_of=str),
Validator('plex.series_library', must_exist=True, default='', is_type_of=str),
Validator('plex.set_movie_added', must_exist=True, default=False, is_type_of=bool),
Validator('plex.set_episode_added', must_exist=True, default=False, is_type_of=bool),
Validator('plex.update_movie_library', must_exist=True, default=False, is_type_of=bool),
Validator('plex.update_series_library', must_exist=True, default=False, is_type_of=bool),
# proxy section
Validator('proxy.type', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'socks5', 'socks5h', 'http']),
Validator('proxy.url', must_exist=True, default='', is_type_of=str),
Validator('proxy.port', must_exist=True, default='', is_type_of=(str, int)),
Validator('proxy.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('proxy.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('proxy.exclude', must_exist=True, default=["localhost", "127.0.0.1"], is_type_of=list),
# opensubtitles.org section
Validator('opensubtitles.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('opensubtitles.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('opensubtitles.use_tag_search', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.vip', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.timeout', must_exist=True, default=15, is_type_of=int, gte=1),
Validator('opensubtitles.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# opensubtitles.com section
Validator('opensubtitlescom.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('opensubtitlescom.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('opensubtitlescom.use_hash', must_exist=True, default=True, is_type_of=bool),
Validator('opensubtitlescom.include_ai_translated', must_exist=True, default=False, is_type_of=bool),
# napiprojekt section
Validator('napiprojekt.only_authors', must_exist=True, default=False, is_type_of=bool),
Validator('napiprojekt.only_real_names', must_exist=True, default=False, is_type_of=bool),
# addic7ed section
Validator('addic7ed.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('addic7ed.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('addic7ed.cookies', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.user_agent', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.vip', must_exist=True, default=False, is_type_of=bool),
# animetosho section
Validator('animetosho.search_threshold', must_exist=True, default=6, is_type_of=int, gte=1, lte=15),
Validator('animetosho.anidb_api_client', must_exist=True, default='', is_type_of=str, cast=str),
Validator('animetosho.anidb_api_client_ver', must_exist=True, default=1, is_type_of=int, gte=1, lte=9),
# avistaz section
Validator('avistaz.cookies', must_exist=True, default='', is_type_of=str),
Validator('avistaz.user_agent', must_exist=True, default='', is_type_of=str),
# cinemaz section
Validator('cinemaz.cookies', must_exist=True, default='', is_type_of=str),
Validator('cinemaz.user_agent', must_exist=True, default='', is_type_of=str),
# podnapisi section
Validator('podnapisi.verify_ssl', must_exist=True, default=True, is_type_of=bool),
# subf2m section
Validator('subf2m.verify_ssl', must_exist=True, default=True, is_type_of=bool),
Validator('subf2m.user_agent', must_exist=True, default='', is_type_of=str),
# hdbits section
Validator('hdbits.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('hdbits.passkey', must_exist=True, default='', is_type_of=str, cast=str),
# whisperai section
Validator('whisperai.endpoint', must_exist=True, default='http://127.0.0.1:9000', is_type_of=str),
Validator('whisperai.response', must_exist=True, default=5, is_type_of=int, gte=1),
Validator('whisperai.timeout', must_exist=True, default=3600, is_type_of=int, gte=1),
Validator('whisperai.pass_video_name', must_exist=True, default=False, is_type_of=bool),
Validator('whisperai.loglevel', must_exist=True, default='INFO', is_type_of=str,
is_in=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']),
# legendasdivx section
Validator('legendasdivx.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('legendasdivx.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('legendasdivx.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# legendasnet section
Validator('legendasnet.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('legendasnet.password', must_exist=True, default='', is_type_of=str, cast=str),
# ktuvit section
Validator('ktuvit.email', must_exist=True, default='', is_type_of=str),
Validator('ktuvit.hashed_password', must_exist=True, default='', is_type_of=str, cast=str),
# xsubs section
Validator('xsubs.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('xsubs.password', must_exist=True, default='', is_type_of=str, cast=str),
# assrt section
Validator('assrt.token', must_exist=True, default='', is_type_of=str, cast=str),
# anticaptcha section
Validator('anticaptcha.anti_captcha_key', must_exist=True, default='', is_type_of=str),
# deathbycaptcha section
Validator('deathbycaptcha.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('deathbycaptcha.password', must_exist=True, default='', is_type_of=str, cast=str),
# napisy24 section
Validator('napisy24.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('napisy24.password', must_exist=True, default='', is_type_of=str, cast=str),
# betaseries section
Validator('betaseries.token', must_exist=True, default='', is_type_of=str, cast=str),
# analytics section
Validator('analytics.enabled', must_exist=True, default=True, is_type_of=bool),
# jimaku section
Validator('jimaku.api_key', must_exist=True, default='', is_type_of=str),
Validator('jimaku.enable_name_search_fallback', must_exist=True, default=True, is_type_of=bool),
Validator('jimaku.enable_archives_download', must_exist=True, default=False, is_type_of=bool),
Validator('jimaku.enable_ai_subs', must_exist=True, default=False, is_type_of=bool),
# titlovi section
Validator('titlovi.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('titlovi.password', must_exist=True, default='', is_type_of=str, cast=str),
# titulky section
Validator('titulky.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('titulky.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('titulky.approved_only', must_exist=True, default=False, is_type_of=bool),
Validator('titulky.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# embeddedsubtitles section
Validator('embeddedsubtitles.included_codecs', must_exist=True, default=[], is_type_of=list),
Validator('embeddedsubtitles.hi_fallback', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.timeout', must_exist=True, default=600, is_type_of=int, gte=1),
Validator('embeddedsubtitles.unknown_as_fallback', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.fallback_lang', must_exist=True, default='en', is_type_of=str, cast=str),
# karagarga section
Validator('karagarga.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('karagarga.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('karagarga.f_username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('karagarga.f_password', must_exist=True, default='', is_type_of=str, cast=str),
# subdl section
Validator('subdl.api_key', must_exist=True, default='', is_type_of=str, cast=str),
# turkcealtyaziorg section
Validator('turkcealtyaziorg.cookies', must_exist=True, default='', is_type_of=str),
Validator('turkcealtyaziorg.user_agent', must_exist=True, default='', is_type_of=str),
# subsync section
Validator('subsync.use_subsync', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.use_subsync_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.subsync_threshold', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('subsync.use_subsync_movie_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.subsync_movie_threshold', must_exist=True, default=70, is_type_of=int, gte=0, lte=100),
Validator('subsync.debug', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.force_audio', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.checker', must_exist=True, default={}, is_type_of=dict),
Validator('subsync.checker.blacklisted_providers', must_exist=True, default=[], is_type_of=list),
Validator('subsync.checker.blacklisted_languages', must_exist=True, default=[], is_type_of=list),
Validator('subsync.no_fix_framerate', must_exist=True, default=True, is_type_of=bool),
Validator('subsync.gss', must_exist=True, default=True, is_type_of=bool),
Validator('subsync.max_offset_seconds', must_exist=True, default=60, is_type_of=int,
is_in=[60, 120, 300, 600]),
# series_scores section
Validator('series_scores.hash', must_exist=True, default=359, is_type_of=int),
Validator('series_scores.series', must_exist=True, default=180, is_type_of=int),
Validator('series_scores.year', must_exist=True, default=90, is_type_of=int),
Validator('series_scores.season', must_exist=True, default=30, is_type_of=int),
Validator('series_scores.episode', must_exist=True, default=30, is_type_of=int),
Validator('series_scores.release_group', must_exist=True, default=14, is_type_of=int),
Validator('series_scores.source', must_exist=True, default=7, is_type_of=int),
Validator('series_scores.audio_codec', must_exist=True, default=3, is_type_of=int),
Validator('series_scores.resolution', must_exist=True, default=2, is_type_of=int),
Validator('series_scores.video_codec', must_exist=True, default=2, is_type_of=int),
Validator('series_scores.streaming_service', must_exist=True, default=1, is_type_of=int),
Validator('series_scores.hearing_impaired', must_exist=True, default=1, is_type_of=int),
# movie_scores section
Validator('movie_scores.hash', must_exist=True, default=119, is_type_of=int),
Validator('movie_scores.title', must_exist=True, default=60, is_type_of=int),
Validator('movie_scores.year', must_exist=True, default=30, is_type_of=int),
Validator('movie_scores.release_group', must_exist=True, default=13, is_type_of=int),
Validator('movie_scores.source', must_exist=True, default=7, is_type_of=int),
Validator('movie_scores.audio_codec', must_exist=True, default=3, is_type_of=int),
Validator('movie_scores.resolution', must_exist=True, default=2, is_type_of=int),
Validator('movie_scores.video_codec', must_exist=True, default=2, is_type_of=int),
Validator('movie_scores.streaming_service', must_exist=True, default=1, is_type_of=int),
Validator('movie_scores.edition', must_exist=True, default=1, is_type_of=int),
Validator('movie_scores.hearing_impaired', must_exist=True, default=1, is_type_of=int),
# postgresql section
Validator('postgresql.enabled', must_exist=True, default=False, is_type_of=bool),
Validator('postgresql.host', must_exist=True, default='localhost', is_type_of=str),
Validator('postgresql.port', must_exist=True, default=5432, is_type_of=int, gte=1, lte=65535),
Validator('postgresql.database', must_exist=True, default='', is_type_of=str),
Validator('postgresql.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('postgresql.password', must_exist=True, default='', is_type_of=str, cast=str),
# anidb section
Validator('anidb.api_client', must_exist=True, default='', is_type_of=str),
Validator('anidb.api_client_ver', must_exist=True, default=1, is_type_of=int),
]
def convert_ini_to_yaml(config_file):
import configparser
import yaml
config_object = configparser.RawConfigParser()
file = open(config_file, "r")
config_object.read_file(file)
output_dict = dict()
sections = config_object.sections()
for section in sections:
items = config_object.items(section)
output_dict[section] = dict()
for item in items:
try:
output_dict[section].update({item[0]: ast.literal_eval(item[1])})
except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError):
output_dict[section].update({item[0]: item[1]})
with open(os.path.join(os.path.dirname(config_file), 'config.yaml'), 'w') as file:
yaml.dump(output_dict, file)
os.replace(config_file, f'{config_file}.old')
config_yaml_file = os.path.join(args.config_dir, 'config', 'config.yaml')
config_ini_file = os.path.join(args.config_dir, 'config', 'config.ini')
if os.path.exists(config_ini_file) and not os.path.exists(config_yaml_file):
convert_ini_to_yaml(config_ini_file)
elif not os.path.exists(config_yaml_file):
if not os.path.isdir(os.path.dirname(config_yaml_file)):
os.makedirs(os.path.dirname(config_yaml_file))
open(config_yaml_file, mode='w').close()
settings = Dynaconf(
settings_file=config_yaml_file,
core_loaders=['YAML'],
apply_default_on_none=True,
)
settings.validators.register(*validators)
failed_validator = True
while failed_validator:
try:
settings.validators.validate_all()
failed_validator = False
except ValidationError as e:
current_validator_details = e.details[0][0]
if hasattr(current_validator_details, 'default') and current_validator_details.default is not empty:
settings[current_validator_details.names[0]] = current_validator_details.default
else:
logging.critical(f"Value for {current_validator_details.names[0]} doesn't pass validation and there's no "
f"default value. This issue must be reported to and fixed by the development team. "
f"Bazarr won't work until it's been fixed.")
stop_bazarr(EXIT_VALIDATION_ERROR)
def write_config():
write(settings_path=config_yaml_file,
settings_data={k.lower(): v for k, v in settings.as_dict().items()},
merge=False)
base_url = settings.general.base_url.rstrip('/')
ignore_keys = ['flask_secret_key']
array_keys = ['excluded_tags',
'exclude',
'included_codecs',
'subzero_mods',
'excluded_series_types',
'enabled_providers',
'enabled_integrations',
'path_mappings',
'path_mappings_movie',
'remove_profile_tags',
'language_equals',
'blacklisted_languages',
'blacklisted_providers']
empty_values = ['', 'None', 'null', 'undefined', None, []]
str_keys = ['chmod', 'log_include_filter', 'log_exclude_filter', 'password', 'f_password', 'hashed_password']
# Increase Sonarr and Radarr sync interval since we now use SignalR feed to update in real time
if settings.sonarr.series_sync < 15:
settings.sonarr.series_sync = 60
if settings.radarr.movies_sync < 15:
settings.radarr.movies_sync = 60
# Make sure to get of double slashes in base_url
settings.general.base_url = base_url_slash_cleaner(uri=settings.general.base_url)
settings.sonarr.base_url = base_url_slash_cleaner(uri=settings.sonarr.base_url)
settings.radarr.base_url = base_url_slash_cleaner(uri=settings.radarr.base_url)
# increase delay between searches to reduce impact on providers
if settings.general.wanted_search_frequency == 3:
settings.general.wanted_search_frequency = 6
if settings.general.wanted_search_frequency_movie == 3:
settings.general.wanted_search_frequency_movie = 6
# backward compatibility embeddedsubtitles provider
if hasattr(settings.embeddedsubtitles, 'unknown_as_english'):
if settings.embeddedsubtitles.unknown_as_english:
settings.embeddedsubtitles.unknown_as_fallback = True
settings.embeddedsubtitles.fallback_lang = 'en'
del settings.embeddedsubtitles.unknown_as_english
# save updated settings to file
write_config()
def get_settings():
# return {k.lower(): v for k, v in settings.as_dict().items()}
settings_to_return = {}
for k, v in settings.as_dict().items():
if isinstance(v, dict):
k = k.lower()
settings_to_return[k] = dict()
for subk, subv in v.items():
if subk.lower() in ignore_keys:
continue
if subv in empty_values and subk.lower() in array_keys:
settings_to_return[k].update({subk: []})
elif subk == 'subzero_mods':
settings_to_return[k].update({subk: get_array_from(subv)})
else:
settings_to_return[k].update({subk: subv})
return settings_to_return
def validate_log_regex():
# handle bug in dynaconf that changes strings to numbers, so change them back to str
if not isinstance(settings.log.include_filter, str):
settings.log.include_filter = str(settings.log.include_filter)
if not isinstance(settings.log.exclude_filter, str):
settings.log.exclude_filter = str(settings.log.exclude_filter)
if settings.log.use_regex:
# compile any regular expressions specified to see if they are valid
# if invalid, tell the user which one
try:
re.compile(settings.log.include_filter)
except Exception:
raise ValidationError(f"Include filter: invalid regular expression: {settings.log.include_filter}")
try:
re.compile(settings.log.exclude_filter)
except Exception:
raise ValidationError(f"Exclude filter: invalid regular expression: {settings.log.exclude_filter}")
def save_settings(settings_items):
configure_debug = False
configure_captcha = False
update_schedule = False
sonarr_changed = False
radarr_changed = False
update_path_map = False
configure_proxy = False
exclusion_updated = False
sonarr_exclusion_updated = False
radarr_exclusion_updated = False
use_embedded_subs_changed = False
undefined_audio_track_default_changed = False
undefined_subtitles_track_default_changed = False
audio_tracks_parsing_changed = False
reset_providers = False
# Subzero Mods
update_subzero = False
subzero_mods = get_array_from(settings.general.subzero_mods)
if len(subzero_mods) == 1 and subzero_mods[0] == '':
subzero_mods = []
for key, value in settings_items:
settings_keys = key.split('-')
# Make sure that text based form values aren't passed as list
if isinstance(value, list) and len(value) == 1 and settings_keys[-1] not in array_keys:
value = value[0]
if value in empty_values and value != '':
value = None
# try to cast string as integer
if isinstance(value, str) and settings_keys[-1] not in str_keys:
try:
value = int(value)
except ValueError:
pass
# Make sure empty language list are stored correctly
if settings_keys[-1] in array_keys and value[0] in empty_values:
value = []
# Handle path mappings settings since they are array in array
if settings_keys[-1] in ['path_mappings', 'path_mappings_movie']:
value = [x.split(',') for x in value if isinstance(x, str)]
if value == 'true':
value = True
elif value == 'false':
value = False
if key in ['settings-general-use_embedded_subs', 'settings-general-ignore_pgs_subs',
'settings-general-ignore_vobsub_subs', 'settings-general-ignore_ass_subs']:
use_embedded_subs_changed = True
if key == 'settings-general-default_und_audio_lang':
undefined_audio_track_default_changed = True
if key == 'settings-general-parse_embedded_audio_track':
audio_tracks_parsing_changed = True
if key == 'settings-general-default_und_embedded_subtitles_lang':
undefined_subtitles_track_default_changed = True
if key in ['settings-general-base_url', 'settings-sonarr-base_url', 'settings-radarr-base_url']:
value = base_url_slash_cleaner(value)
if key == 'settings-auth-password':
if value != settings.auth.password and value is not None:
value = hashlib.md5(f"{value}".encode('utf-8')).hexdigest()
if key == 'settings-general-debug':
configure_debug = True
if key == 'settings-general-hi_extension':
os.environ["SZ_HI_EXTENSION"] = str(value)
if key in ['settings-general-anti_captcha_provider', 'settings-anticaptcha-anti_captcha_key',
'settings-deathbycaptcha-username', 'settings-deathbycaptcha-password']:
configure_captcha = True
if key in ['update_schedule', 'settings-general-use_sonarr', 'settings-general-use_radarr',
'settings-general-auto_update', 'settings-general-upgrade_subs',
'settings-sonarr-series_sync', 'settings-radarr-movies_sync',
'settings-sonarr-full_update', 'settings-sonarr-full_update_day', 'settings-sonarr-full_update_hour',
'settings-radarr-full_update', 'settings-radarr-full_update_day', 'settings-radarr-full_update_hour',
'settings-general-wanted_search_frequency', 'settings-general-wanted_search_frequency_movie',
'settings-general-upgrade_frequency', 'settings-backup-frequency', 'settings-backup-day',
'settings-backup-hour']:
update_schedule = True
if key in ['settings-general-use_sonarr', 'settings-sonarr-ip', 'settings-sonarr-port',
'settings-sonarr-base_url', 'settings-sonarr-ssl', 'settings-sonarr-apikey']:
sonarr_changed = True
if key in ['settings-general-use_radarr', 'settings-radarr-ip', 'settings-radarr-port',
'settings-radarr-base_url', 'settings-radarr-ssl', 'settings-radarr-apikey']:
radarr_changed = True
if key in ['settings-general-path_mappings', 'settings-general-path_mappings_movie']:
update_path_map = True
if key in ['settings-proxy-type', 'settings-proxy-url', 'settings-proxy-port', 'settings-proxy-username',
'settings-proxy-password']:
configure_proxy = True
if key in ['settings-sonarr-excluded_tags', 'settings-sonarr-only_monitored',
'settings-sonarr-excluded_series_types', 'settings-sonarr-exclude_season_zero',
'settings.radarr.excluded_tags', 'settings-radarr-only_monitored']:
exclusion_updated = True
if key in ['settings-sonarr-excluded_tags', 'settings-sonarr-only_monitored',
'settings-sonarr-excluded_series_types', 'settings-sonarr-exclude_season_zero']:
sonarr_exclusion_updated = True
if key in ['settings.radarr.excluded_tags', 'settings-radarr-only_monitored']:
radarr_exclusion_updated = True
if key == 'settings-addic7ed-username':
if key != settings.addic7ed.username:
reset_providers = True
region.delete('addic7ed_data')
elif key == 'settings-addic7ed-password':
if key != settings.addic7ed.password:
reset_providers = True
region.delete('addic7ed_data')
if key == 'settings-legendasdivx-username':
if key != settings.legendasdivx.username:
reset_providers = True
region.delete('legendasdivx_cookies2')
elif key == 'settings-legendasdivx-password':
if key != settings.legendasdivx.password:
reset_providers = True
region.delete('legendasdivx_cookies2')
if key == 'settings-opensubtitles-username':
if key != settings.opensubtitles.username:
reset_providers = True
region.delete('os_token')
elif key == 'settings-opensubtitles-password':
if key != settings.opensubtitles.password:
reset_providers = True
region.delete('os_token')
if key == 'settings-opensubtitlescom-username':
if key != settings.opensubtitlescom.username:
reset_providers = True
region.delete('oscom_token')
elif key == 'settings-opensubtitlescom-password':
if key != settings.opensubtitlescom.password:
reset_providers = True
region.delete('oscom_token')
if key == 'settings-titlovi-username':
if key != settings.titlovi.username:
reset_providers = True
region.delete('titlovi_token')
elif key == 'settings-titlovi-password':
if key != settings.titlovi.password:
reset_providers = True
region.delete('titlovi_token')
if reset_providers:
from .get_providers import reset_throttled_providers
reset_throttled_providers(only_auth_or_conf_error=True)
if settings_keys[0] == 'settings':
if len(settings_keys) == 3:
settings[settings_keys[1]][settings_keys[2]] = value
elif len(settings_keys) == 4:
settings[settings_keys[1]][settings_keys[2]][settings_keys[3]] = value
if settings_keys[0] == 'subzero':
mod = settings_keys[1]
if mod in subzero_mods and not value:
subzero_mods.remove(mod)
elif value:
subzero_mods.append(mod)
# Handle color
if mod == 'color':
previous = None
for exist_mod in subzero_mods:
if exist_mod.startswith('color'):
previous = exist_mod
break
if previous is not None:
subzero_mods.remove(previous)
if value not in empty_values:
subzero_mods.append(value)
update_subzero = True
if use_embedded_subs_changed or undefined_audio_track_default_changed:
from .scheduler import scheduler
from subtitles.indexer.series import list_missing_subtitles
from subtitles.indexer.movies import list_missing_subtitles_movies
if settings.general.use_sonarr:
scheduler.add_job(list_missing_subtitles, kwargs={'send_event': True})
if settings.general.use_radarr:
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': True})
if undefined_subtitles_track_default_changed:
from .scheduler import scheduler
from subtitles.indexer.series import series_full_scan_subtitles
from subtitles.indexer.movies import movies_full_scan_subtitles
if settings.general.use_sonarr:
scheduler.add_job(series_full_scan_subtitles, kwargs={'use_cache': True})
if settings.general.use_radarr:
scheduler.add_job(movies_full_scan_subtitles, kwargs={'use_cache': True})
if audio_tracks_parsing_changed:
from .scheduler import scheduler
if settings.general.use_sonarr:
from sonarr.sync.series import update_series
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
if settings.general.use_radarr:
from radarr.sync.movies import update_movies
scheduler.add_job(update_movies, kwargs={'send_event': True}, max_instances=1)
if update_subzero:
settings.general.subzero_mods = ','.join(subzero_mods)
try:
settings.validators.validate()
validate_log_regex()
except ValidationError:
settings.reload()
raise
else:
write_config()
# Reconfigure Bazarr to reflect changes
if configure_debug:
from .logger import configure_logging
configure_logging(settings.general.debug or args.debug)
if configure_captcha:
configure_captcha_func()
if update_schedule:
from .scheduler import scheduler
from .event_handler import event_stream
scheduler.update_configurable_tasks()
event_stream(type='task')
if sonarr_changed:
from .signalr_client import sonarr_signalr_client
try:
sonarr_signalr_client.restart()
except Exception:
pass
if radarr_changed:
from .signalr_client import radarr_signalr_client
try:
radarr_signalr_client.restart()
except Exception:
pass
if update_path_map:
from utilities.path_mappings import path_mappings
path_mappings.update()
if configure_proxy:
configure_proxy_func()
if exclusion_updated:
from .event_handler import event_stream
event_stream(type='badges')
if sonarr_exclusion_updated:
event_stream(type='reset-episode-wanted')
if radarr_exclusion_updated:
event_stream(type='reset-movie-wanted')
def get_array_from(property):
if property:
if '[' in property:
return ast.literal_eval(property)
elif ',' in property:
return property.split(',')
else:
return [property]
else:
return []
def configure_captcha_func():
# set anti-captcha provider and key
if settings.general.anti_captcha_provider == 'anti-captcha' and settings.anticaptcha.anti_captcha_key != "":
os.environ["ANTICAPTCHA_CLASS"] = 'AntiCaptchaProxyLess'
os.environ["ANTICAPTCHA_ACCOUNT_KEY"] = str(settings.anticaptcha.anti_captcha_key)
elif settings.general.anti_captcha_provider == 'death-by-captcha' and settings.deathbycaptcha.username != "" and \
settings.deathbycaptcha.password != "":
os.environ["ANTICAPTCHA_CLASS"] = 'DeathByCaptchaProxyLess'
os.environ["ANTICAPTCHA_ACCOUNT_KEY"] = str(':'.join(
{settings.deathbycaptcha.username, settings.deathbycaptcha.password}))
else:
os.environ["ANTICAPTCHA_CLASS"] = ''
def configure_proxy_func():
if settings.proxy.type:
if settings.proxy.username != '' and settings.proxy.password != '':
proxy = (f'{settings.proxy.type}://{quote_plus(settings.proxy.username)}:'
f'{quote_plus(settings.proxy.password)}@{settings.proxy.url}:{settings.proxy.port}')
else:
proxy = f'{settings.proxy.type}://{settings.proxy.url}:{settings.proxy.port}'
os.environ['HTTP_PROXY'] = str(proxy)
os.environ['HTTPS_PROXY'] = str(proxy)
exclude = ','.join(settings.proxy.exclude)
os.environ['NO_PROXY'] = exclude
def get_scores():
settings = get_settings()
return {"movie": settings["movie_scores"], "episode": settings["series_scores"]}
def sync_checker(subtitle):
" This function can be extended with settings. It only takes a Subtitle argument"
logging.debug("Checker data [%s] for %s", settings.subsync.checker, subtitle)
bl_providers = settings.subsync.checker.blacklisted_providers
# TODO
# bl_languages = settings.subsync.checker.blacklisted_languages
verdicts = set()
# You can add more inner checkers. The following is a verfy basic one for providers,
# but you can make your own functions, etc to handle more complex stuff. You have
# subtitle data to compare.
verdicts.add(subtitle.provider_name not in bl_providers)
met = False not in verdicts
if met is True:
logging.debug("BAZARR Sync checker passed.")
return True
else:
logging.debug("BAZARR Sync checker not passed. Won't sync.")
return False

567
bazarr/app/database.py Normal file
View File

@ -0,0 +1,567 @@
# -*- coding: utf-8 -*-
import ast
import atexit
import json
import logging
import os
import flask_migrate
import signal
from dogpile.cache import make_region
from datetime import datetime
from sqlalchemy import create_engine, inspect, DateTime, ForeignKey, Integer, LargeBinary, Text, func, text, BigInteger
# importing here to be indirectly imported in other modules later
from sqlalchemy import update, delete, select, func # noqa W0611
from sqlalchemy.orm import scoped_session, sessionmaker, mapped_column, close_all_sessions
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from flask_sqlalchemy import SQLAlchemy
from .config import settings
from .get_args import args
logger = logging.getLogger(__name__)
POSTGRES_ENABLED_ENV = os.getenv("POSTGRES_ENABLED")
if POSTGRES_ENABLED_ENV:
postgresql = POSTGRES_ENABLED_ENV.lower() == 'true'
else:
postgresql = settings.postgresql.enabled
region = make_region().configure('dogpile.cache.memory')
migrations_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'migrations')
if postgresql:
# insert is different between database types
from sqlalchemy.dialects.postgresql import insert # noqa E402
from sqlalchemy.engine import URL # noqa E402
postgres_database = os.getenv("POSTGRES_DATABASE", settings.postgresql.database)
postgres_username = os.getenv("POSTGRES_USERNAME", settings.postgresql.username)
postgres_password = os.getenv("POSTGRES_PASSWORD", settings.postgresql.password)
postgres_host = os.getenv("POSTGRES_HOST", settings.postgresql.host)
postgres_port = os.getenv("POSTGRES_PORT", settings.postgresql.port)
logger.debug(f"Connecting to PostgreSQL database: {postgres_host}:{postgres_port}/{postgres_database}")
url = URL.create(
drivername="postgresql",
username=postgres_username,
password=postgres_password,
host=postgres_host,
port=postgres_port,
database=postgres_database
)
engine = create_engine(url, poolclass=NullPool, isolation_level="AUTOCOMMIT")
else:
# insert is different between database types
from sqlalchemy.dialects.sqlite import insert # noqa E402
url = f'sqlite:///{os.path.join(args.config_dir, "db", "bazarr.db")}'
logger.debug(f"Connecting to SQLite database: {url}")
engine = create_engine(url, poolclass=NullPool, isolation_level="AUTOCOMMIT")
from sqlalchemy.engine import Engine
from sqlalchemy import event
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
session_factory = sessionmaker(bind=engine)
database = scoped_session(session_factory)
def close_database():
close_all_sessions()
engine.dispose()
@atexit.register
def _stop_worker_threads():
database.remove()
signal.signal(signal.SIGTERM, lambda signal_no, frame: close_database())
Base = declarative_base()
metadata = Base.metadata
class System(Base):
__tablename__ = 'system'
id = mapped_column(Integer, primary_key=True)
configured = mapped_column(Text)
updated = mapped_column(Text)
class TableAnnouncements(Base):
__tablename__ = 'table_announcements'
id = mapped_column(Integer, primary_key=True)
timestamp = mapped_column(DateTime, nullable=False, default=datetime.now)
hash = mapped_column(Text)
text = mapped_column(Text)
class TableBlacklist(Base):
__tablename__ = 'table_blacklist'
id = mapped_column(Integer, primary_key=True)
language = mapped_column(Text)
provider = mapped_column(Text)
sonarr_episode_id = mapped_column(Integer, ForeignKey('table_episodes.sonarrEpisodeId', ondelete='CASCADE'))
sonarr_series_id = mapped_column(Integer, ForeignKey('table_shows.sonarrSeriesId', ondelete='CASCADE'))
subs_id = mapped_column(Text)
timestamp = mapped_column(DateTime, default=datetime.now)
class TableBlacklistMovie(Base):
__tablename__ = 'table_blacklist_movie'
id = mapped_column(Integer, primary_key=True)
language = mapped_column(Text)
provider = mapped_column(Text)
radarr_id = mapped_column(Integer, ForeignKey('table_movies.radarrId', ondelete='CASCADE'))
subs_id = mapped_column(Text)
timestamp = mapped_column(DateTime, default=datetime.now)
class TableEpisodes(Base):
__tablename__ = 'table_episodes'
audio_codec = mapped_column(Text)
audio_language = mapped_column(Text)
created_at_timestamp = mapped_column(DateTime)
episode = mapped_column(Integer, nullable=False)
episode_file_id = mapped_column(Integer)
failedAttempts = mapped_column(Text)
ffprobe_cache = mapped_column(LargeBinary)
file_size = mapped_column(BigInteger)
format = mapped_column(Text)
missing_subtitles = mapped_column(Text)
monitored = mapped_column(Text)
path = mapped_column(Text, nullable=False)
resolution = mapped_column(Text)
sceneName = mapped_column(Text)
season = mapped_column(Integer, nullable=False)
sonarrEpisodeId = mapped_column(Integer, primary_key=True)
sonarrSeriesId = mapped_column(Integer, ForeignKey('table_shows.sonarrSeriesId', ondelete='CASCADE'))
subtitles = mapped_column(Text)
title = mapped_column(Text, nullable=False)
updated_at_timestamp = mapped_column(DateTime)
video_codec = mapped_column(Text)
class TableHistory(Base):
__tablename__ = 'table_history'
id = mapped_column(Integer, primary_key=True)
action = mapped_column(Integer, nullable=False)
description = mapped_column(Text, nullable=False)
language = mapped_column(Text)
provider = mapped_column(Text)
score = mapped_column(Integer)
sonarrEpisodeId = mapped_column(Integer, ForeignKey('table_episodes.sonarrEpisodeId', ondelete='CASCADE'))
sonarrSeriesId = mapped_column(Integer, ForeignKey('table_shows.sonarrSeriesId', ondelete='CASCADE'))
subs_id = mapped_column(Text)
subtitles_path = mapped_column(Text)
timestamp = mapped_column(DateTime, nullable=False, default=datetime.now)
video_path = mapped_column(Text)
matched = mapped_column(Text)
not_matched = mapped_column(Text)
upgradedFromId = mapped_column(Integer, ForeignKey('table_history.id'))
class TableHistoryMovie(Base):
__tablename__ = 'table_history_movie'
id = mapped_column(Integer, primary_key=True)
action = mapped_column(Integer, nullable=False)
description = mapped_column(Text, nullable=False)
language = mapped_column(Text)
provider = mapped_column(Text)
radarrId = mapped_column(Integer, ForeignKey('table_movies.radarrId', ondelete='CASCADE'))
score = mapped_column(Integer)
subs_id = mapped_column(Text)
subtitles_path = mapped_column(Text)
timestamp = mapped_column(DateTime, nullable=False, default=datetime.now)
video_path = mapped_column(Text)
matched = mapped_column(Text)
not_matched = mapped_column(Text)
upgradedFromId = mapped_column(Integer, ForeignKey('table_history_movie.id'))
class TableLanguagesProfiles(Base):
__tablename__ = 'table_languages_profiles'
profileId = mapped_column(Integer, primary_key=True)
cutoff = mapped_column(Integer)
originalFormat = mapped_column(Integer)
items = mapped_column(Text, nullable=False)
name = mapped_column(Text, nullable=False)
mustContain = mapped_column(Text)
mustNotContain = mapped_column(Text)
tag = mapped_column(Text)
class TableMovies(Base):
__tablename__ = 'table_movies'
alternativeTitles = mapped_column(Text)
audio_codec = mapped_column(Text)
audio_language = mapped_column(Text)
created_at_timestamp = mapped_column(DateTime)
failedAttempts = mapped_column(Text)
fanart = mapped_column(Text)
ffprobe_cache = mapped_column(LargeBinary)
file_size = mapped_column(BigInteger)
format = mapped_column(Text)
imdbId = mapped_column(Text)
missing_subtitles = mapped_column(Text)
monitored = mapped_column(Text)
movie_file_id = mapped_column(Integer)
overview = mapped_column(Text)
path = mapped_column(Text, nullable=False, unique=True)
poster = mapped_column(Text)
profileId = mapped_column(Integer, ForeignKey('table_languages_profiles.profileId', ondelete='SET NULL'))
radarrId = mapped_column(Integer, primary_key=True)
resolution = mapped_column(Text)
sceneName = mapped_column(Text)
sortTitle = mapped_column(Text)
subtitles = mapped_column(Text)
tags = mapped_column(Text)
title = mapped_column(Text, nullable=False)
tmdbId = mapped_column(Text, nullable=False, unique=True)
updated_at_timestamp = mapped_column(DateTime)
video_codec = mapped_column(Text)
year = mapped_column(Text)
class TableMoviesRootfolder(Base):
__tablename__ = 'table_movies_rootfolder'
accessible = mapped_column(Integer)
error = mapped_column(Text)
id = mapped_column(Integer, primary_key=True)
path = mapped_column(Text)
class TableSettingsLanguages(Base):
__tablename__ = 'table_settings_languages'
code3 = mapped_column(Text, primary_key=True)
code2 = mapped_column(Text)
code3b = mapped_column(Text)
enabled = mapped_column(Integer)
name = mapped_column(Text, nullable=False)
class TableSettingsNotifier(Base):
__tablename__ = 'table_settings_notifier'
name = mapped_column(Text, primary_key=True)
enabled = mapped_column(Integer)
url = mapped_column(Text)
class TableShows(Base):
__tablename__ = 'table_shows'
tvdbId = mapped_column(Integer)
alternativeTitles = mapped_column(Text)
audio_language = mapped_column(Text)
created_at_timestamp = mapped_column(DateTime)
ended = mapped_column(Text)
fanart = mapped_column(Text)
imdbId = mapped_column(Text)
lastAired = mapped_column(Text)
monitored = mapped_column(Text)
overview = mapped_column(Text)
path = mapped_column(Text, nullable=False, unique=True)
poster = mapped_column(Text)
profileId = mapped_column(Integer, ForeignKey('table_languages_profiles.profileId', ondelete='SET NULL'))
seriesType = mapped_column(Text)
sonarrSeriesId = mapped_column(Integer, primary_key=True)
sortTitle = mapped_column(Text)
tags = mapped_column(Text)
title = mapped_column(Text, nullable=False)
updated_at_timestamp = mapped_column(DateTime)
year = mapped_column(Text)
class TableShowsRootfolder(Base):
__tablename__ = 'table_shows_rootfolder'
accessible = mapped_column(Integer)
error = mapped_column(Text)
id = mapped_column(Integer, primary_key=True)
path = mapped_column(Text)
def init_db():
database.begin()
# Create tables if they don't exist.
metadata.create_all(engine)
def create_db_revision(app):
logging.info("Creating a new database revision for future migration")
app.config["SQLALCHEMY_DATABASE_URI"] = url
db = SQLAlchemy(app, metadata=metadata)
with app.app_context():
flask_migrate.Migrate(app, db, render_as_batch=True)
flask_migrate.migrate(directory=migrations_directory)
db.engine.dispose()
def migrate_db(app):
logging.debug("Upgrading database schema")
app.config["SQLALCHEMY_DATABASE_URI"] = url
db = SQLAlchemy(app, metadata=metadata)
insp = inspect(engine)
alembic_temp_tables_list = [x for x in insp.get_table_names() if x.startswith('_alembic_tmp_')]
for table in alembic_temp_tables_list:
database.execute(text(f"DROP TABLE IF EXISTS {table}"))
with app.app_context():
flask_migrate.Migrate(app, db, render_as_batch=True)
flask_migrate.upgrade(directory=migrations_directory)
db.engine.dispose()
# add the system table single row if it's not existing
if not database.execute(
select(System)) \
.first():
database.execute(
insert(System)
.values(configured='0', updated='0'))
def get_exclusion_clause(exclusion_type):
where_clause = []
if exclusion_type == 'series':
tagsList = settings.sonarr.excluded_tags
for tag in tagsList:
where_clause.append(~(TableShows.tags.contains(f"\'{tag}\'")))
else:
tagsList = settings.radarr.excluded_tags
for tag in tagsList:
where_clause.append(~(TableMovies.tags.contains(f"\'{tag}\'")))
if exclusion_type == 'series':
monitoredOnly = settings.sonarr.only_monitored
if monitoredOnly:
where_clause.append((TableEpisodes.monitored == 'True')) # noqa E712
where_clause.append((TableShows.monitored == 'True')) # noqa E712
else:
monitoredOnly = settings.radarr.only_monitored
if monitoredOnly:
where_clause.append((TableMovies.monitored == 'True')) # noqa E712
if exclusion_type == 'series':
typesList = settings.sonarr.excluded_series_types
for item in typesList:
where_clause.append((TableShows.seriesType != item))
exclude_season_zero = settings.sonarr.exclude_season_zero
if exclude_season_zero:
where_clause.append((TableEpisodes.season != 0))
return where_clause
@region.cache_on_arguments()
def update_profile_id_list():
return [{
'profileId': x.profileId,
'name': x.name,
'cutoff': x.cutoff,
'items': json.loads(x.items),
'mustContain': ast.literal_eval(x.mustContain) if x.mustContain else [],
'mustNotContain': ast.literal_eval(x.mustNotContain) if x.mustNotContain else [],
'originalFormat': x.originalFormat,
'tag': x.tag,
} for x in database.execute(
select(TableLanguagesProfiles.profileId,
TableLanguagesProfiles.name,
TableLanguagesProfiles.cutoff,
TableLanguagesProfiles.items,
TableLanguagesProfiles.mustContain,
TableLanguagesProfiles.mustNotContain,
TableLanguagesProfiles.originalFormat,
TableLanguagesProfiles.tag))
.all()
]
def get_profiles_list(profile_id=None):
profile_id_list = update_profile_id_list()
if profile_id and profile_id != 'null':
for profile in profile_id_list:
if profile['profileId'] == profile_id:
return profile
else:
return profile_id_list
def get_desired_languages(profile_id):
for profile in update_profile_id_list():
if profile['profileId'] == profile_id:
return [x['language'] for x in profile['items']]
def get_profile_id_name(profile_id):
for profile in update_profile_id_list():
if profile['profileId'] == profile_id:
return profile['name']
def get_profile_cutoff(profile_id):
cutoff_language = None
profile_id_list = update_profile_id_list()
if profile_id and profile_id != 'null':
cutoff_language = []
for profile in profile_id_list:
profileId, name, cutoff, items, mustContain, mustNotContain, originalFormat, tag = profile.values()
if cutoff:
if profileId == int(profile_id):
for item in items:
if item['id'] == cutoff:
return [item]
elif cutoff == 65535:
cutoff_language.append(item)
if not len(cutoff_language):
cutoff_language = None
return cutoff_language
def get_audio_profile_languages(audio_languages_list_str):
from languages.get_languages import alpha2_from_language, alpha3_from_language, language_from_alpha2
audio_languages = []
und_default_language = language_from_alpha2(settings.general.default_und_audio_lang)
try:
audio_languages_list = ast.literal_eval(audio_languages_list_str or '[]')
except ValueError:
pass
else:
for language in audio_languages_list:
if language:
audio_languages.append(
{"name": language,
"code2": alpha2_from_language(language) or None,
"code3": alpha3_from_language(language) or None}
)
else:
if und_default_language:
logging.debug(f"Undefined language audio track treated as {und_default_language}")
audio_languages.append(
{"name": und_default_language,
"code2": alpha2_from_language(und_default_language) or None,
"code3": alpha3_from_language(und_default_language) or None}
)
return audio_languages
def get_profile_id(series_id=None, episode_id=None, movie_id=None):
if series_id:
data = database.execute(
select(TableShows.profileId)
.where(TableShows.sonarrSeriesId == series_id))\
.first()
if data:
return data.profileId
elif episode_id:
data = database.execute(
select(TableShows.profileId)
.select_from(TableShows)
.join(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId == episode_id)) \
.first()
if data:
return data.profileId
elif movie_id:
data = database.execute(
select(TableMovies.profileId)
.where(TableMovies.radarrId == movie_id))\
.first()
if data:
return data.profileId
return None
def convert_list_to_clause(arr: list):
if isinstance(arr, list):
return f"({','.join(str(x) for x in arr)})"
else:
return ""
def upgrade_languages_profile_hi_values():
for languages_profile in (database.execute(
select(
TableLanguagesProfiles.profileId,
TableLanguagesProfiles.name,
TableLanguagesProfiles.cutoff,
TableLanguagesProfiles.items,
TableLanguagesProfiles.mustContain,
TableLanguagesProfiles.mustNotContain,
TableLanguagesProfiles.originalFormat,
TableLanguagesProfiles.tag)
))\
.all():
items = json.loads(languages_profile.items)
for language in items:
if language['hi'] == "only":
language['hi'] = "True"
elif language['hi'] in ["also", "never"]:
language['hi'] = "False"
database.execute(
update(TableLanguagesProfiles)
.values({"items": json.dumps(items)})
.where(TableLanguagesProfiles.profileId == languages_profile.profileId)
)
def fix_languages_profiles_with_duplicate_ids():
languages_profiles = database.execute(
select(TableLanguagesProfiles.profileId, TableLanguagesProfiles.items, TableLanguagesProfiles.cutoff)).all()
for languages_profile in languages_profiles:
if languages_profile.cutoff:
# ignore profiles that have a cutoff set
continue
languages_profile_ids = []
languages_profile_has_duplicate = False
languages_profile_items = json.loads(languages_profile.items)
for items in languages_profile_items:
if items['id'] in languages_profile_ids:
languages_profile_has_duplicate = True
break
else:
languages_profile_ids.append(items['id'])
if languages_profile_has_duplicate:
item_id = 0
for items in languages_profile_items:
item_id += 1
items['id'] = item_id
database.execute(
update(TableLanguagesProfiles)
.values({"items": json.dumps(languages_profile_items)})
.where(TableLanguagesProfiles.profileId == languages_profile.profileId)
)

View File

@ -0,0 +1,31 @@
# coding=utf-8
from .app import socketio
def event_stream(type, action="update", payload=None):
"""
:param type: The type of element.
:type type: str
:param action: The action type of element from update and delete.
:type action: str
:param payload: The payload to send, can be anything
"""
try:
payload = int(payload)
except (ValueError, TypeError):
pass
socketio.emit("data", {"type": type, "action": action, "payload": payload})
def show_message(msg):
event_stream(type="message", payload=msg)
def show_progress(id, header, name, value, count):
event_stream(type="progress", payload={"id": id, "header": header, "name": name, "value": value, "count": count})
def hide_progress(id):
event_stream(type="progress", action="delete", payload=id)

42
bazarr/app/get_args.py Normal file
View File

@ -0,0 +1,42 @@
# coding=utf-8
import os
import argparse
from distutils.util import strtobool
no_update = os.environ.get("NO_UPDATE", "false").strip() == "true"
no_cli = os.environ.get("NO_CLI", "false").strip() == "true"
parser = argparse.ArgumentParser()
parser.register('type', bool, strtobool)
config_dir = os.path.realpath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..', 'data'))
parser.add_argument('-c', '--config', default=config_dir, type=str, metavar="DIR",
dest="config_dir", help="Directory containing the configuration (default: %s)" % config_dir)
parser.add_argument('-p', '--port', type=int, metavar="PORT", dest="port",
help="Port number (default: 6767)")
if not no_update:
parser.add_argument('--no-update', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Disable update functionality (default: False)")
parser.add_argument('--debug', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Enable console debugging (default: False)")
parser.add_argument('--release-update', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Enable file based updater (default: False)")
parser.add_argument('--dev', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Enable developer mode (default: False)")
parser.add_argument('--no-tasks', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Disable all tasks (default: False)")
parser.add_argument('--no-signalr', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Disable SignalR connections to Sonarr and/or Radarr (default: False)")
parser.add_argument('--create-db-revision', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Create a new database revision that will be used to migrate database")
if not no_cli:
args = parser.parse_args()
if no_update:
args.no_update = True
else:
args = parser.parse_args(["-c", config_dir, "--no-update"])

540
bazarr/app/get_providers.py Normal file
View File

@ -0,0 +1,540 @@
# coding=utf-8
import os
import datetime
import pytz
import logging
import subliminal_patch
import pretty
import time
import socket
import requests
import traceback
import re
from requests import ConnectionError
from subzero.language import Language
from subliminal_patch.exceptions import TooManyRequests, APIThrottled, ParseResponseError, IPAddressBlocked, \
MustGetBlacklisted, SearchLimitReached, ProviderError
from subliminal.providers.opensubtitles import DownloadLimitReached, PaymentRequired, Unauthorized
from subliminal.exceptions import DownloadLimitExceeded, ServiceUnavailable, AuthenticationError, ConfigurationError
from subliminal import region as subliminal_cache_region
from subliminal_patch.extensions import provider_registry
from app.get_args import args
from app.config import settings
from languages.get_languages import CustomLanguage
from app.event_handler import event_stream
from utilities.binaries import get_binary
from radarr.blacklist import blacklist_log_movie
from sonarr.blacklist import blacklist_log
from utilities.analytics import event_tracker
_TRACEBACK_RE = re.compile(r'File "(.*?providers[\\/].*?)", line (\d+)')
def time_until_midnight(timezone):
# type: (datetime.datetime) -> datetime.timedelta
"""
Get timedelta until midnight.
"""
now_in_tz = datetime.datetime.now(tz=timezone)
midnight = now_in_tz.replace(hour=0, minute=0, second=0, microsecond=0) + \
datetime.timedelta(days=1)
return midnight - now_in_tz
# Titulky resets its download limits at the start of a new day from its perspective - the Europe/Prague timezone
# Needs to convert to offset-naive dt
def titulky_limit_reset_timedelta():
return time_until_midnight(timezone=pytz.timezone('Europe/Prague'))
# LegendasDivx reset its searches limit at approximately midnight, Lisbon time, every day. We wait 1 more hours just
# to be sure.
def legendasdivx_limit_reset_timedelta():
return time_until_midnight(timezone=pytz.timezone('Europe/Lisbon')) + datetime.timedelta(minutes=60)
VALID_THROTTLE_EXCEPTIONS = (TooManyRequests, DownloadLimitExceeded, ServiceUnavailable, APIThrottled,
ParseResponseError, IPAddressBlocked)
VALID_COUNT_EXCEPTIONS = ('TooManyRequests', 'ServiceUnavailable', 'APIThrottled', requests.exceptions.Timeout,
requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, socket.timeout)
def provider_throttle_map():
return {
"default": {
TooManyRequests: (datetime.timedelta(hours=1), "1 hour"),
DownloadLimitExceeded: (datetime.timedelta(hours=3), "3 hours"),
ServiceUnavailable: (datetime.timedelta(minutes=20), "20 minutes"),
APIThrottled: (datetime.timedelta(minutes=10), "10 minutes"),
ParseResponseError: (datetime.timedelta(hours=6), "6 hours"),
requests.exceptions.Timeout: (datetime.timedelta(hours=1), "1 hour"),
socket.timeout: (datetime.timedelta(hours=1), "1 hour"),
requests.exceptions.ConnectTimeout: (datetime.timedelta(hours=1), "1 hour"),
requests.exceptions.ReadTimeout: (datetime.timedelta(hours=1), "1 hour"),
ConfigurationError: (datetime.timedelta(hours=12), "12 hours"),
PermissionError: (datetime.timedelta(hours=12), "12 hours"),
requests.exceptions.ProxyError: (datetime.timedelta(hours=1), "1 hour"),
AuthenticationError: (datetime.timedelta(hours=12), "12 hours"),
},
"opensubtitles": {
TooManyRequests: (datetime.timedelta(hours=3), "3 hours"),
DownloadLimitExceeded: (datetime.timedelta(hours=6), "6 hours"),
DownloadLimitReached: (datetime.timedelta(hours=6), "6 hours"),
PaymentRequired: (datetime.timedelta(hours=12), "12 hours"),
Unauthorized: (datetime.timedelta(hours=12), "12 hours"),
APIThrottled: (datetime.timedelta(seconds=15), "15 seconds"),
ServiceUnavailable: (datetime.timedelta(hours=1), "1 hour"),
},
"opensubtitlescom": {
TooManyRequests: (datetime.timedelta(minutes=1), "1 minute"),
DownloadLimitExceeded: (datetime.timedelta(hours=6), "6 hours"),
},
"addic7ed": {
DownloadLimitExceeded: (datetime.timedelta(hours=3), "3 hours"),
TooManyRequests: (datetime.timedelta(minutes=5), "5 minutes"),
IPAddressBlocked: (datetime.timedelta(hours=1), "1 hours"),
},
"titlovi": {
TooManyRequests: (datetime.timedelta(minutes=5), "5 minutes"),
},
"titrari": {
TooManyRequests: (datetime.timedelta(minutes=10), "10 minutes"),
},
"titulky": {
DownloadLimitExceeded: (
titulky_limit_reset_timedelta(),
f"{titulky_limit_reset_timedelta().seconds // 3600 + 1} hours")
},
"legendasdivx": {
TooManyRequests: (datetime.timedelta(hours=3), "3 hours"),
DownloadLimitExceeded: (
legendasdivx_limit_reset_timedelta(),
f"{legendasdivx_limit_reset_timedelta().seconds // 3600 + 1} hours"),
IPAddressBlocked: (
legendasdivx_limit_reset_timedelta(),
f"{legendasdivx_limit_reset_timedelta().seconds // 3600 + 1} hours"),
SearchLimitReached: (
legendasdivx_limit_reset_timedelta(),
f"{legendasdivx_limit_reset_timedelta().seconds // 3600 + 1} hours"),
},
"whisperai": {
ConnectionError: (datetime.timedelta(hours=24), "24 hours"),
},
"regielive": {
APIThrottled: (datetime.timedelta(hours=1), "1 hour"),
TooManyRequests: (datetime.timedelta(minutes=5), "5 minutes"),
ProviderError: (datetime.timedelta(minutes=10), "10 minutes"),
},
}
PROVIDERS_FORCED_OFF = ["addic7ed", "tvsubtitles", "legendasdivx", "napiprojekt", "shooter",
"hosszupuska", "supersubtitles", "titlovi", "assrt"]
throttle_count = {}
def provider_pool():
if settings.general.multithreading:
return subliminal_patch.core.SZAsyncProviderPool
return subliminal_patch.core.SZProviderPool
def _lang_from_str(content: str):
" Formats: es-MX en@hi es-MX@forced "
extra_info = content.split("@")
if len(extra_info) > 1:
kwargs = {extra_info[-1]: True}
else:
kwargs = {}
content = extra_info[0]
try:
code, country = content.split("-")
except ValueError:
lang = CustomLanguage.from_value(content)
if lang is not None:
lang = lang.subzero_language()
return lang.rebuild(lang, **kwargs)
code, country = content, None
return subliminal_patch.core.Language(code, country, **kwargs)
def get_language_equals(settings_=None):
settings_ = settings_ or settings
equals = settings_.general.language_equals
if not equals:
return []
items = []
for equal in equals:
try:
from_, to_ = equal.split(":")
from_, to_ = _lang_from_str(from_), _lang_from_str(to_)
except Exception as error:
logging.info("Invalid equal value: '%s' [%s]", equal, error)
else:
items.append((from_, to_))
return items
def get_providers():
providers_list = []
existing_providers = provider_registry.names()
providers = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in providers:
reason, until, throttle_desc = tp.get(provider, (None, None, None))
providers_list.append(provider)
if reason:
now = datetime.datetime.now()
if now < until:
logging.debug("Not using %s until %s, because of: %s", provider,
until.strftime("%y/%m/%d %H:%M"), reason)
providers_list.remove(provider)
else:
logging.info("Using %s again after %s, (disabled because: %s)", provider, throttle_desc, reason)
del tp[provider]
set_throttled_providers(str(tp))
# if forced only is enabled: # fixme: Prepared for forced only implementation to remove providers with don't support forced only subtitles
# for provider in providers_list:
# if provider in PROVIDERS_FORCED_OFF:
# providers_list.remove(provider)
if not providers_list:
providers_list = None
return providers_list
def get_enabled_providers():
# return enabled provider including those who can be throttled
if isinstance(settings.general.enabled_providers, list):
return settings.general.enabled_providers
else:
return []
_FFPROBE_BINARY = get_binary("ffprobe")
_FFMPEG_BINARY = get_binary("ffmpeg")
def get_providers_auth():
return {
'addic7ed': {
'username': settings.addic7ed.username,
'password': settings.addic7ed.password,
'cookies': settings.addic7ed.cookies,
'user_agent': settings.addic7ed.user_agent,
'is_vip': settings.addic7ed.vip,
},
'avistaz': {
'cookies': settings.avistaz.cookies,
'user_agent': settings.avistaz.user_agent,
},
'cinemaz': {
'cookies': settings.cinemaz.cookies,
'user_agent': settings.cinemaz.user_agent,
},
'opensubtitles': {
'username': settings.opensubtitles.username,
'password': settings.opensubtitles.password,
'use_tag_search': settings.opensubtitles.use_tag_search,
'only_foreign': False, # fixme
'also_foreign': False, # fixme
'is_vip': settings.opensubtitles.vip,
'use_ssl': settings.opensubtitles.ssl,
'timeout': int(settings.opensubtitles.timeout) or 15,
'skip_wrong_fps': settings.opensubtitles.skip_wrong_fps,
},
'opensubtitlescom': {'username': settings.opensubtitlescom.username,
'password': settings.opensubtitlescom.password,
'use_hash': settings.opensubtitlescom.use_hash,
'include_ai_translated': settings.opensubtitlescom.include_ai_translated,
'api_key': 's38zmzVlW7IlYruWi7mHwDYl2SfMQoC1'
},
'napiprojekt': {'only_authors': settings.napiprojekt.only_authors,
'only_real_names': settings.napiprojekt.only_real_names},
'podnapisi': {
'only_foreign': False, # fixme
'also_foreign': False, # fixme
'verify_ssl': settings.podnapisi.verify_ssl
},
'legendasdivx': {
'username': settings.legendasdivx.username,
'password': settings.legendasdivx.password,
'skip_wrong_fps': settings.legendasdivx.skip_wrong_fps,
},
'legendasnet': {
'username': settings.legendasnet.username,
'password': settings.legendasnet.password,
},
'xsubs': {
'username': settings.xsubs.username,
'password': settings.xsubs.password,
},
'assrt': {
'token': settings.assrt.token,
},
'napisy24': {
'username': settings.napisy24.username,
'password': settings.napisy24.password,
},
'betaseries': {'token': settings.betaseries.token},
'titulky': {
'username': settings.titulky.username,
'password': settings.titulky.password,
'approved_only': settings.titulky.approved_only,
'skip_wrong_fps': settings.titulky.skip_wrong_fps,
},
'titlovi': {
'username': settings.titlovi.username,
'password': settings.titlovi.password,
},
'jimaku': {
'api_key': settings.jimaku.api_key,
'enable_name_search_fallback': settings.jimaku.enable_name_search_fallback,
'enable_archives_download': settings.jimaku.enable_archives_download,
'enable_ai_subs': settings.jimaku.enable_ai_subs,
},
'ktuvit': {
'email': settings.ktuvit.email,
'hashed_password': settings.ktuvit.hashed_password,
},
'embeddedsubtitles': {
'included_codecs': settings.embeddedsubtitles.included_codecs,
'hi_fallback': settings.embeddedsubtitles.hi_fallback,
'cache_dir': os.path.join(args.config_dir, "cache"),
'ffprobe_path': _FFPROBE_BINARY,
'ffmpeg_path': _FFMPEG_BINARY,
'timeout': settings.embeddedsubtitles.timeout,
'unknown_as_fallback': settings.embeddedsubtitles.unknown_as_fallback,
'fallback_lang': settings.embeddedsubtitles.fallback_lang,
},
'karagarga': {
'username': settings.karagarga.username,
'password': settings.karagarga.password,
'f_username': settings.karagarga.f_username,
'f_password': settings.karagarga.f_password,
},
'hdbits': {
'username': settings.hdbits.username,
'passkey': settings.hdbits.passkey,
},
'subf2m': {
'verify_ssl': settings.subf2m.verify_ssl,
'user_agent': settings.subf2m.user_agent,
},
'whisperai': {
'endpoint': settings.whisperai.endpoint,
'response': settings.whisperai.response,
'timeout': settings.whisperai.timeout,
'ffmpeg_path': _FFMPEG_BINARY,
'loglevel': settings.whisperai.loglevel,
'pass_video_name': settings.whisperai.pass_video_name,
},
"animetosho": {
'search_threshold': settings.animetosho.search_threshold,
},
"subdl": {
'api_key': settings.subdl.api_key,
},
'turkcealtyaziorg': {
'cookies': settings.turkcealtyaziorg.cookies,
'user_agent': settings.turkcealtyaziorg.user_agent,
}
}
def _handle_mgb(name, exception, ids, language):
if language.forced:
language_str = f'{language.basename}:forced'
elif language.hi:
language_str = f'{language.basename}:hi'
else:
language_str = language.basename
if ids:
if exception.media_type == "series":
if 'sonarrSeriesId' in ids and 'sonarrEpsiodeId' in ids:
blacklist_log(ids['sonarrSeriesId'], ids['sonarrEpisodeId'], name, exception.id, language_str)
else:
blacklist_log_movie(ids['radarrId'], name, exception.id, language_str)
def provider_throttle(name, exception, ids=None, language=None):
if isinstance(exception, MustGetBlacklisted) and isinstance(ids, dict) and isinstance(language, Language):
return _handle_mgb(name, exception, ids, language)
cls = getattr(exception, "__class__")
cls_name = getattr(cls, "__name__")
if cls not in VALID_THROTTLE_EXCEPTIONS:
for valid_cls in VALID_THROTTLE_EXCEPTIONS:
if isinstance(cls, valid_cls):
cls = valid_cls
throttle_data = provider_throttle_map().get(name, provider_throttle_map()["default"]).get(cls, None) or \
provider_throttle_map()["default"].get(cls, None)
if throttle_data:
throttle_delta, throttle_description = throttle_data
else:
throttle_delta, throttle_description = datetime.timedelta(minutes=10), "10 minutes"
throttle_until = datetime.datetime.now() + throttle_delta
if cls_name not in VALID_COUNT_EXCEPTIONS or throttled_count(name):
if cls_name == 'ValueError' and isinstance(exception.args, tuple) and len(exception.args) and exception.args[
0].startswith('unsupported pickle protocol'):
for fn in subliminal_cache_region.backend.all_filenames:
try:
os.remove(fn)
except (IOError, OSError):
logging.debug("Couldn't remove cache file: %s", os.path.basename(fn))
else:
tp[name] = (cls_name, throttle_until, throttle_description)
set_throttled_providers(str(tp))
trac_info = _get_traceback_info(exception)
logging.info("Throttling %s for %s, until %s, because of: %s. Exception info: %r", name,
throttle_description, throttle_until.strftime("%y/%m/%d %H:%M"), cls_name, trac_info)
event_tracker.track_throttling(provider=name, exception_name=cls_name, exception_info=trac_info)
update_throttled_provider()
def _get_traceback_info(exc: Exception):
traceback_str = " ".join(traceback.format_exception(type(exc), exc, exc.__traceback__))
clean_msg = str(exc).replace("\n", " ").strip()
line_info = _TRACEBACK_RE.findall(traceback_str)
# Value info max chars len is 100
if not line_info:
return clean_msg[:100]
line_info = line_info[-1]
file_, line = line_info
extra = f"' ~ {os.path.basename(file_)}@{line}"[:90]
message = f"'{clean_msg}"[:100 - len(extra)]
return message + extra
def throttled_count(name):
global throttle_count
if name in list(throttle_count.keys()):
if 'count' in list(throttle_count[name].keys()):
for key, value in throttle_count[name].items():
if key == 'count':
value += 1
throttle_count[name]['count'] = value
else:
throttle_count[name] = {"count": 1, "time": (datetime.datetime.now() + datetime.timedelta(seconds=120))}
else:
throttle_count[name] = {"count": 1, "time": (datetime.datetime.now() + datetime.timedelta(seconds=120))}
if throttle_count[name]['count'] >= 5:
return True
if throttle_count[name]['time'] <= datetime.datetime.now():
throttle_count[name] = {"count": 1, "time": (datetime.datetime.now() + datetime.timedelta(seconds=120))}
logging.info("Provider %s throttle count %s of 5, waiting 5sec and trying again", name,
throttle_count[name]['count'])
time.sleep(5)
return False
def update_throttled_provider():
existing_providers = provider_registry.names()
providers_list = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in list(tp):
if provider not in providers_list:
del tp[provider]
set_throttled_providers(str(tp))
reason, until, throttle_desc = tp.get(provider, (None, None, None))
if reason:
now = datetime.datetime.now()
if now < until:
pass
else:
logging.info("Using %s again after %s, (disabled because: %s)", provider, throttle_desc, reason)
del tp[provider]
set_throttled_providers(str(tp))
reason, until, throttle_desc = tp.get(provider, (None, None, None))
if reason:
now = datetime.datetime.now()
if now >= until:
logging.info("Using %s again after %s, (disabled because: %s)", provider, throttle_desc, reason)
del tp[provider]
set_throttled_providers(str(tp))
event_stream(type='badges')
def list_throttled_providers():
update_throttled_provider()
throttled_providers = []
existing_providers = provider_registry.names()
providers = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in providers:
reason, until, throttle_desc = tp.get(provider, (None, None, None))
throttled_providers.append([provider, reason, pretty.date(until)])
return throttled_providers
def reset_throttled_providers(only_auth_or_conf_error=False):
for provider in list(tp):
if only_auth_or_conf_error and tp[provider][0] not in ['AuthenticationError', 'ConfigurationError',
'PaymentRequired']:
continue
del tp[provider]
set_throttled_providers(str(tp))
update_throttled_provider()
if only_auth_or_conf_error:
logging.info('BAZARR throttled providers have been reset (only AuthenticationError, ConfigurationError and '
'PaymentRequired).')
else:
logging.info('BAZARR throttled providers have been reset.')
def get_throttled_providers():
providers = {}
try:
if os.path.exists(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')):
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')), 'r') as \
handle:
providers = eval(handle.read())
except Exception:
# set empty content in throttled_providers.dat
logging.error("Invalid content in throttled_providers.dat. Resetting")
set_throttled_providers(str(providers))
finally:
return providers
def set_throttled_providers(data):
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')), 'w+') as handle:
handle.write(data)
tp = get_throttled_providers()
if not isinstance(tp, dict):
raise ValueError('tp should be a dict')

26
bazarr/app/libs.py Normal file
View File

@ -0,0 +1,26 @@
# coding=utf-8
import os
import sys
from shutil import rmtree
def clean_libs():
libs_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'libs')
# Delete the old module almost empty directory compatible only with Python 2.7.x that cause bad magic number error
# if they are present in Python 3.x.
module_list = ['enum', 'concurrent']
for module in module_list:
module_path = os.path.join(libs_dir, module)
rmtree(module_path, ignore_errors=True)
def set_libs():
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), '../custom_libs/'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), '../libs/'))
clean_libs()
set_libs()

236
bazarr/app/logger.py Normal file
View File

@ -0,0 +1,236 @@
# coding=utf-8
import os
import sys
import logging
import re
import platform
import warnings
from logging.handlers import TimedRotatingFileHandler
from utilities.central import get_log_file_path
from pytz_deprecation_shim import PytzUsageWarning
from .config import settings
logger = logging.getLogger()
class FileHandlerFormatter(logging.Formatter):
"""Formatter that removes apikey from logs."""
APIKEY_RE = re.compile(r'apikey(?:=|%3D)([a-zA-Z0-9]+)')
IPv4_RE = re.compile(r'\b(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9]'
r'[0-9]|[1-9]?[0-9])\b')
def formatException(self, exc_info):
"""
Format an exception so that it prints on a single line.
"""
result = super(FileHandlerFormatter, self).formatException(exc_info)
return repr(result) # or format into one line however you want to
def formatApikey(self, s):
return re.sub(self.APIKEY_RE, 'apikey=(removed)', s)
def formatIPv4(self, s):
return re.sub(self.IPv4_RE, '***.***.***.***', s)
def format(self, record):
s = super(FileHandlerFormatter, self).format(record)
if record.exc_text:
s = s.replace('\n', '') + '|'
s = self.formatApikey(s)
s = self.formatIPv4(s)
return s
class NoExceptionFormatter(logging.Formatter):
def format(self, record):
record.exc_text = '' # ensure formatException gets called
return super(NoExceptionFormatter, self).format(record)
def formatException(self, record):
return ''
class UnwantedWaitressMessageFilter(logging.Filter):
def filter(self, record):
if settings.general.debug or "BAZARR" in record.msg:
# no filtering in debug mode or if originating from us
return True
if record.levelno < logging.ERROR:
return False
unwantedMessages = [
"Exception while serving /api/socket.io/",
['Session is disconnected', 'Session not found'],
"Exception while serving /api/socket.io/",
["'Session is disconnected'", "'Session not found'"],
"Exception while serving /api/socket.io/",
['"Session is disconnected"', '"Session not found"'],
"Exception when servicing %r",
[],
]
wanted = True
listLength = len(unwantedMessages)
for i in range(0, listLength, 2):
if record.msg == unwantedMessages[i]:
exceptionTuple = record.exc_info
if exceptionTuple is not None:
if len(unwantedMessages[i+1]) == 0 or str(exceptionTuple[1]) in unwantedMessages[i+1]:
wanted = False
break
return wanted
def configure_logging(debug=False):
warnings.simplefilter('ignore', category=ResourceWarning)
warnings.simplefilter('ignore', category=PytzUsageWarning)
# warnings.simplefilter('ignore', category=SAWarning)
if debug:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logger.handlers = []
logger.setLevel(log_level)
# Console logging
ch = logging.StreamHandler()
cf = (debug and logging.Formatter or NoExceptionFormatter)(
'%(asctime)-15s - %(name)-32s (%(thread)x) : %(levelname)s (%(module)s:%(lineno)d) - %(message)s')
ch.setFormatter(cf)
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
# File Logging
global fh
if sys.version_info >= (3, 9):
fh = PatchedTimedRotatingFileHandler(get_log_file_path(), when="midnight",
interval=1, backupCount=7, delay=True, encoding='utf-8')
else:
fh = TimedRotatingFileHandler(get_log_file_path(), when="midnight", interval=1,
backupCount=7, delay=True, encoding='utf-8')
f = FileHandlerFormatter('%(asctime)s|%(levelname)-8s|%(name)-32s|%(message)s|',
'%Y-%m-%d %H:%M:%S')
fh.setFormatter(f)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
if debug:
logging.getLogger("alembic.runtime.migration").setLevel(logging.DEBUG)
logging.getLogger("apscheduler").setLevel(logging.DEBUG)
logging.getLogger("subliminal").setLevel(logging.DEBUG)
logging.getLogger("subliminal_patch").setLevel(logging.DEBUG)
logging.getLogger("subzero").setLevel(logging.DEBUG)
logging.getLogger("git").setLevel(logging.DEBUG)
logging.getLogger("apprise").setLevel(logging.DEBUG)
logging.getLogger("engineio.server").setLevel(logging.DEBUG)
logging.getLogger("socketio.server").setLevel(logging.DEBUG)
logging.getLogger("ffsubsync.subtitle_parser").setLevel(logging.DEBUG)
logging.getLogger("ffsubsync.speech_transformers").setLevel(logging.DEBUG)
logging.getLogger("ffsubsync.ffsubsync").setLevel(logging.DEBUG)
logging.getLogger("ffsubsync.aligners").setLevel(logging.DEBUG)
logging.getLogger("srt").setLevel(logging.DEBUG)
logging.debug('Bazarr version: %s', os.environ["BAZARR_VERSION"])
logging.debug('Bazarr branch: %s', settings.general.branch)
logging.debug('Operating system: %s', platform.platform())
logging.debug('Python version: %s', platform.python_version())
else:
logging.getLogger("alembic.runtime.migration").setLevel(logging.CRITICAL)
logging.getLogger("apscheduler").setLevel(logging.WARNING)
logging.getLogger("apprise").setLevel(logging.WARNING)
logging.getLogger("subliminal").setLevel(logging.CRITICAL)
logging.getLogger("subliminal_patch").setLevel(logging.CRITICAL)
logging.getLogger("subzero").setLevel(logging.ERROR)
logging.getLogger("engineio.server").setLevel(logging.ERROR)
logging.getLogger("socketio.server").setLevel(logging.ERROR)
logging.getLogger("ffsubsync.subtitle_parser").setLevel(logging.ERROR)
logging.getLogger("ffsubsync.speech_transformers").setLevel(logging.ERROR)
logging.getLogger("ffsubsync.ffsubsync").setLevel(logging.ERROR)
logging.getLogger("ffsubsync.aligners").setLevel(logging.ERROR)
logging.getLogger("srt").setLevel(logging.ERROR)
logging.getLogger("SignalRCoreClient").setLevel(logging.CRITICAL)
logging.getLogger("websocket").setLevel(logging.CRITICAL)
logging.getLogger("ga4mp.ga4mp").setLevel(logging.ERROR)
logging.getLogger("waitress").setLevel(logging.INFO)
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
logging.getLogger("knowit").setLevel(logging.CRITICAL)
logging.getLogger("enzyme").setLevel(logging.CRITICAL)
logging.getLogger("guessit").setLevel(logging.WARNING)
logging.getLogger("rebulk").setLevel(logging.WARNING)
logging.getLogger("stevedore.extension").setLevel(logging.CRITICAL)
def empty_file(filename):
# Open the log file in write mode to clear its contents
with open(filename, 'w'):
pass # Just opening and closing the file will clear it
def empty_log():
fh.doRollover()
empty_file(get_log_file_path())
logging.info('BAZARR Log file emptied')
class PatchedTimedRotatingFileHandler(TimedRotatingFileHandler):
# This super classed version of logging.TimedRotatingFileHandler is required to fix a bug in earlier version of
# Python 3.9, 3.10 and 3.11 where log rotation isn't working as expected and do not delete backup log files.
def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False,
atTime=None, errors=None):
super(PatchedTimedRotatingFileHandler, self).__init__(filename, when, interval, backupCount, encoding, delay, utc,
atTime, errors)
def getFilesToDelete(self):
"""
Determine the files to delete when rolling over.
More specific than the earlier method, which just used glob.glob().
"""
dirName, baseName = os.path.split(self.baseFilename)
fileNames = os.listdir(dirName)
result = []
# See bpo-44753: Don't use the extension when computing the prefix.
n, e = os.path.splitext(baseName)
prefix = f'{n}.'
plen = len(prefix)
for fileName in fileNames:
if self.namer is None:
# Our files will always start with baseName
if not fileName.startswith(baseName):
continue
else:
# Our files could be just about anything after custom naming, but
# likely candidates are of the form
# foo.log.DATETIME_SUFFIX or foo.DATETIME_SUFFIX.log
if (not fileName.startswith(baseName) and fileName.endswith(e) and
len(fileName) > (plen + 1) and not fileName[plen+1].isdigit()):
continue
if fileName[:plen] == prefix:
suffix = fileName[plen:]
# See bpo-45628: The date/time suffix could be anywhere in the
# filename
parts = suffix.split('.')
for part in parts:
if self.extMatch.match(part):
result.append(os.path.join(dirName, fileName))
break
if len(result) < self.backupCount:
result = []
else:
result.sort()
result = result[:len(result) - self.backupCount]
return result

115
bazarr/app/notifier.py Normal file
View File

@ -0,0 +1,115 @@
# coding=utf-8
from apprise import Apprise, AppriseAsset
import logging
from .database import TableSettingsNotifier, TableEpisodes, TableShows, TableMovies, database, insert, delete, select
def update_notifier():
# define apprise object
a = Apprise()
# Retrieve all the details
results = a.details()
notifiers_added = []
notifiers_kept = []
notifiers_in_db = [row.name for row in
database.execute(
select(TableSettingsNotifier.name))
.all()]
for x in results['schemas']:
if x['service_name'] not in notifiers_in_db:
notifiers_added.append({'name': str(x['service_name']), 'enabled': 0})
logging.debug(f'Adding new notifier agent: {x["service_name"]}')
else:
notifiers_kept.append(x['service_name'])
notifiers_to_delete = [item for item in notifiers_in_db if item not in notifiers_kept]
for item in notifiers_to_delete:
database.execute(
delete(TableSettingsNotifier)
.where(TableSettingsNotifier.name == item))
database.execute(
insert(TableSettingsNotifier)
.values(notifiers_added))
def get_notifier_providers():
return database.execute(
select(TableSettingsNotifier.name, TableSettingsNotifier.url)
.where(TableSettingsNotifier.enabled == 1))\
.all()
def send_notifications(sonarr_series_id, sonarr_episode_id, message):
providers = get_notifier_providers()
if not len(providers):
return
series = database.execute(
select(TableShows.title, TableShows.year)
.where(TableShows.sonarrSeriesId == sonarr_series_id))\
.first()
if not series:
return
series_title = series.title
series_year = series.year
if series_year not in [None, '', '0']:
series_year = f' ({series_year})'
else:
series_year = ''
episode = database.execute(
select(TableEpisodes.title, TableEpisodes.season, TableEpisodes.episode)
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id))\
.first()
if not episode:
return
asset = AppriseAsset(async_mode=False)
apobj = Apprise(asset=asset)
for provider in providers:
if provider.url is not None:
apobj.add(provider.url)
apobj.notify(
title='Bazarr notification',
body=f"{series_title}{series_year} - S{episode.season:02d}E{episode.episode:02d} - {episode.title} : {message}",
)
def send_notifications_movie(radarr_id, message):
providers = get_notifier_providers()
if not len(providers):
return
movie = database.execute(
select(TableMovies.title, TableMovies.year)
.where(TableMovies.radarrId == radarr_id))\
.first()
if not movie:
return
movie_title = movie.title
movie_year = movie.year
if movie_year not in [None, '', '0']:
movie_year = f' ({movie_year})'
else:
movie_year = ''
asset = AppriseAsset(async_mode=False)
apobj = Apprise(asset=asset)
for provider in providers:
if provider.url is not None:
apobj.add(provider.url)
apobj.notify(
title='Bazarr notification',
body=f"{movie_title}{movie_year} : {message}",
)

348
bazarr/app/scheduler.py Normal file
View File

@ -0,0 +1,348 @@
# coding=utf-8
import os
import pretty
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
from apscheduler.triggers.cron import CronTrigger
from apscheduler.events import EVENT_JOB_SUBMITTED, EVENT_JOB_EXECUTED, EVENT_JOB_ERROR
from datetime import datetime, timedelta
from calendar import day_name
from random import randrange
from tzlocal import get_localzone
try:
import zoneinfo # pragma: no cover
except ImportError:
from backports import zoneinfo # pragma: no cover
from dateutil import tz
import logging
from app.announcements import get_announcements_to_file
from sonarr.sync.series import update_series
from sonarr.sync.episodes import update_all_episodes
from radarr.sync.movies import update_movies, update_all_movies
from subtitles.wanted import wanted_search_missing_subtitles_series, wanted_search_missing_subtitles_movies
from subtitles.upgrade import upgrade_subtitles
from utilities.cache import cache_maintenance
from utilities.health import check_health
from utilities.backup import backup_to_zip
from .config import settings
from .get_args import args
from .event_handler import event_stream
if not args.no_update:
from .check_update import check_if_new_update, check_releases
else:
from .check_update import check_releases
from dateutil.relativedelta import relativedelta
NO_INTERVAL = "None"
NEVER_DATE = "Never"
ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365
def a_long_time_from_now(job):
# job isn't scheduled at all
if job.next_run_time is None:
return True
# currently defined as more than a year from now
delta = job.next_run_time - datetime.now(job.next_run_time.tzinfo)
return delta.total_seconds() > ONE_YEAR_IN_SECONDS
def in_a_century():
century = datetime.now() + relativedelta(years=100)
return century.year
class Scheduler:
def __init__(self):
self.__running_tasks = []
# delete empty TZ environment variable to prevent UserWarning
if os.environ.get("TZ") == "":
del os.environ["TZ"]
try:
self.timezone = get_localzone()
except zoneinfo.ZoneInfoNotFoundError:
logging.error("BAZARR cannot use the specified timezone and will use UTC instead.")
self.timezone = tz.gettz("UTC")
else:
logging.info(f"Scheduler will use this timezone: {self.timezone}")
self.aps_scheduler = BackgroundScheduler({'apscheduler.timezone': self.timezone})
# task listener
def task_listener_add(event):
if event.job_id not in self.__running_tasks:
self.__running_tasks.append(event.job_id)
event_stream(type='task')
def task_listener_remove(event):
if event.job_id in self.__running_tasks:
self.__running_tasks.remove(event.job_id)
event_stream(type='task')
self.aps_scheduler.add_listener(task_listener_add, EVENT_JOB_SUBMITTED)
self.aps_scheduler.add_listener(task_listener_remove, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR)
# configure all tasks
self.__cache_cleanup_task()
self.__check_health_task()
self.update_configurable_tasks()
self.aps_scheduler.start()
def update_configurable_tasks(self):
self.__sonarr_update_task()
self.__radarr_update_task()
self.__sonarr_full_update_task()
self.__radarr_full_update_task()
self.__update_bazarr_task()
self.__search_wanted_subtitles_task()
self.__upgrade_subtitles_task()
self.__randomize_interval_task()
self.__automatic_backup()
if args.no_tasks:
self.__no_task()
def add_job(self, job, name=None, max_instances=1, coalesce=True, args=None, kwargs=None):
self.aps_scheduler.add_job(
job, 'date', run_date=datetime.now(), name=name, id=name, max_instances=max_instances,
coalesce=coalesce, args=args, kwargs=kwargs)
def execute_job_now(self, taskid):
self.aps_scheduler.modify_job(taskid, next_run_time=datetime.now())
def get_running_tasks(self):
return self.__running_tasks
def get_task_list(self):
def get_time_from_interval(td_object):
seconds = int(td_object.total_seconds())
periods = [
('year', 60 * 60 * 24 * 365),
('month', 60 * 60 * 24 * 30),
('day', 60 * 60 * 24),
('hour', 60 * 60),
('minute', 60),
('second', 1)
]
if seconds > ONE_YEAR_IN_SECONDS:
# more than a year is None
return NO_INTERVAL
strings = []
for period_name, period_seconds in periods:
if seconds > period_seconds:
period_value, seconds = divmod(seconds, period_seconds)
has_s = 's' if period_value > 1 else ''
strings.append("%s %s%s" % (period_value, period_name, has_s))
return ", ".join(strings)
def get_time_from_cron(cron):
day = str(cron[4])
hour = str(cron[5])
if day == "*":
text = "every day"
else:
text = f"every {day_name[int(day)]}"
if hour != "*":
text += f" at {hour}:00"
return text
task_list = []
for job in self.aps_scheduler.get_jobs():
next_run = NEVER_DATE
if job.next_run_time:
if a_long_time_from_now(job):
# Never for IntervalTrigger jobs
next_run = NEVER_DATE
else:
next_run = pretty.date(job.next_run_time.replace(tzinfo=None))
if isinstance(job.trigger, CronTrigger):
if a_long_time_from_now(job):
# Never for CronTrigger jobs
next_run = NEVER_DATE
else:
if job.next_run_time:
next_run = pretty.date(job.next_run_time.replace(tzinfo=None))
if job.id in self.__running_tasks:
running = True
else:
running = False
if isinstance(job.trigger, IntervalTrigger):
interval = get_time_from_interval(job.trigger.__getstate__()['interval'])
if interval != NO_INTERVAL:
interval = f"every {interval}"
# else:
# interval = "100 Year Interval"
task_list.append({'name': job.name, 'interval': interval, 'next_run_in': next_run,
'next_run_time': next_run, 'job_id': job.id, 'job_running': running})
elif isinstance(job.trigger, CronTrigger):
if a_long_time_from_now(job):
interval = NO_INTERVAL
else:
interval = get_time_from_cron(job.trigger.fields)
task_list.append({'name': job.name, 'interval': interval,
'next_run_in': next_run, 'next_run_time': next_run, 'job_id': job.id,
'job_running': running})
return task_list
def __sonarr_update_task(self):
if settings.general.use_sonarr:
self.aps_scheduler.add_job(
update_series, 'interval', minutes=int(settings.sonarr.series_sync), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_series', name='Sync with Sonarr',
replace_existing=True)
def __radarr_update_task(self):
if settings.general.use_radarr:
self.aps_scheduler.add_job(
update_movies, 'interval', minutes=int(settings.radarr.movies_sync), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_movies', name='Sync with Radarr',
replace_existing=True)
def __cache_cleanup_task(self):
self.aps_scheduler.add_job(cache_maintenance, 'interval', hours=24, max_instances=1, coalesce=True,
misfire_grace_time=15, id='cache_cleanup', name='Cache Maintenance')
def __check_health_task(self):
self.aps_scheduler.add_job(check_health, 'interval', hours=6, max_instances=1, coalesce=True,
misfire_grace_time=15, id='check_health', name='Check Health')
def __automatic_backup(self):
backup = settings.backup.frequency
if backup == "Daily":
trigger = {'hour': settings.backup.hour}
elif backup == "Weekly":
trigger = {'day_of_week': settings.backup.day, 'hour': settings.backup.hour}
else:
trigger = {'year': in_a_century()}
self.aps_scheduler.add_job(backup_to_zip, 'cron', **trigger,
max_instances=1, coalesce=True, misfire_grace_time=15, id='backup',
name='Backup Database and Configuration File', replace_existing=True)
def __sonarr_full_update_task(self):
if settings.general.use_sonarr:
full_update = settings.sonarr.full_update
if full_update == "Daily":
self.aps_scheduler.add_job(
update_all_episodes, 'cron', hour=settings.sonarr.full_update_hour, max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_all_episodes',
name='Index All Episode Subtitles from Disk', replace_existing=True)
elif full_update == "Weekly":
self.aps_scheduler.add_job(
update_all_episodes, 'cron', day_of_week=settings.sonarr.full_update_day,
hour=settings.sonarr.full_update_hour, max_instances=1, coalesce=True, misfire_grace_time=15,
id='update_all_episodes', name='Index All Episode Subtitles from Disk', replace_existing=True)
elif full_update == "Manually":
self.aps_scheduler.add_job(
update_all_episodes, 'cron', year=in_a_century(), max_instances=1, coalesce=True,
misfire_grace_time=15, id='update_all_episodes', name='Index All Episode Subtitles from Disk',
replace_existing=True)
def __radarr_full_update_task(self):
if settings.general.use_radarr:
full_update = settings.radarr.full_update
if full_update == "Daily":
self.aps_scheduler.add_job(
update_all_movies, 'cron', hour=settings.radarr.full_update_hour, max_instances=1,
coalesce=True, misfire_grace_time=15,
id='update_all_movies', name='Index All Movie Subtitles from Disk', replace_existing=True)
elif full_update == "Weekly":
self.aps_scheduler.add_job(
update_all_movies,
'cron', day_of_week=settings.radarr.full_update_day, hour=settings.radarr.full_update_hour,
max_instances=1, coalesce=True, misfire_grace_time=15, id='update_all_movies',
name='Index All Movie Subtitles from Disk', replace_existing=True)
elif full_update == "Manually":
self.aps_scheduler.add_job(
update_all_movies, 'cron', year=in_a_century(), max_instances=1, coalesce=True,
misfire_grace_time=15, id='update_all_movies', name='Index All Movie Subtitles from Disk',
replace_existing=True)
def __update_bazarr_task(self):
if not args.no_update and os.environ["BAZARR_VERSION"] != '':
task_name = 'Update Bazarr'
if settings.general.auto_update:
self.aps_scheduler.add_job(
check_if_new_update, 'interval', hours=6, max_instances=1, coalesce=True,
misfire_grace_time=15, id='update_bazarr', name=task_name, replace_existing=True)
else:
self.aps_scheduler.add_job(
check_if_new_update, 'cron', year=in_a_century(), hour=4, id='update_bazarr', name=task_name,
replace_existing=True)
self.aps_scheduler.add_job(
check_releases, 'interval', hours=3, max_instances=1, coalesce=True, misfire_grace_time=15,
id='update_release', name='Update Release Info', replace_existing=True)
else:
self.aps_scheduler.add_job(
check_releases, 'interval', hours=3, max_instances=1, coalesce=True, misfire_grace_time=15,
id='update_release', name='Update Release Info', replace_existing=True)
self.aps_scheduler.add_job(
get_announcements_to_file, 'interval', hours=6, max_instances=1, coalesce=True, misfire_grace_time=15,
id='update_announcements', name='Update Announcements File', replace_existing=True)
def __search_wanted_subtitles_task(self):
if settings.general.use_sonarr:
self.aps_scheduler.add_job(
wanted_search_missing_subtitles_series, 'interval', hours=int(settings.general.wanted_search_frequency),
max_instances=1, coalesce=True, misfire_grace_time=15, id='wanted_search_missing_subtitles_series',
replace_existing=True, name='Search for Missing Series Subtitles')
if settings.general.use_radarr:
self.aps_scheduler.add_job(
wanted_search_missing_subtitles_movies, 'interval',
hours=int(settings.general.wanted_search_frequency_movie), max_instances=1, coalesce=True,
misfire_grace_time=15, id='wanted_search_missing_subtitles_movies',
name='Search for Missing Movies Subtitles', replace_existing=True)
def __upgrade_subtitles_task(self):
if settings.general.use_sonarr or settings.general.use_radarr:
self.aps_scheduler.add_job(
upgrade_subtitles, 'interval', hours=int(settings.general.upgrade_frequency), max_instances=1,
coalesce=True, misfire_grace_time=15, id='upgrade_subtitles',
name='Upgrade Previously Downloaded Subtitles', replace_existing=True)
def __randomize_interval_task(self):
for job in self.aps_scheduler.get_jobs():
if isinstance(job.trigger, IntervalTrigger):
# do not randomize the Never jobs
if job.trigger.interval.total_seconds() > ONE_YEAR_IN_SECONDS:
continue
self.aps_scheduler.modify_job(job.id,
next_run_time=datetime.now(tz=self.timezone) +
timedelta(seconds=randrange(
int(job.trigger.interval.total_seconds() * 0.75),
int(job.trigger.interval.total_seconds()))))
def __no_task(self):
for job in self.aps_scheduler.get_jobs():
self.aps_scheduler.modify_job(job.id, next_run_time=None)
scheduler = Scheduler()
# Force the execution of the sync process with Sonarr and Radarr after migration to v0.9.1
if 'BAZARR_AUDIO_PROFILES_MIGRATION' in os.environ:
if settings.general.use_sonarr:
scheduler.aps_scheduler.modify_job('update_series', next_run_time=datetime.now())
scheduler.aps_scheduler.modify_job('sync_episodes', next_run_time=datetime.now())
if settings.general.use_radarr:
scheduler.aps_scheduler.modify_job('update_movies', next_run_time=datetime.now())
del os.environ['BAZARR_AUDIO_PROFILES_MIGRATION']

108
bazarr/app/server.py Normal file
View File

@ -0,0 +1,108 @@
# coding=utf-8
import signal
import warnings
import logging
import errno
from literals import EXIT_INTERRUPT, EXIT_NORMAL, EXIT_PORT_ALREADY_IN_USE_ERROR
from utilities.central import restart_bazarr, stop_bazarr
from waitress.server import create_server
from time import sleep
from api import api_bp
from .ui import ui_bp
from .get_args import args
from .config import settings, base_url
from .database import close_database
from .app import create_app
app = create_app()
app.register_blueprint(api_bp, url_prefix=base_url.rstrip('/') + '/api')
app.register_blueprint(ui_bp, url_prefix=base_url.rstrip('/'))
class Server:
def __init__(self):
# Mute DeprecationWarning
warnings.simplefilter("ignore", DeprecationWarning)
# Mute Insecure HTTPS requests made to Sonarr and Radarr
warnings.filterwarnings('ignore', message='Unverified HTTPS request')
# Mute Python3 BrokenPipeError
warnings.simplefilter("ignore", BrokenPipeError)
self.server = None
self.connected = False
self.address = str(settings.general.ip)
self.port = int(args.port) if args.port else int(settings.general.port)
self.interrupted = False
while not self.connected:
sleep(0.1)
self.configure_server()
def configure_server(self):
try:
self.server = create_server(app,
host=self.address,
port=self.port,
threads=100)
self.connected = True
except OSError as error:
if error.errno == errno.EADDRNOTAVAIL:
logging.exception("BAZARR cannot bind to specified IP, trying with 0.0.0.0")
self.address = '0.0.0.0'
self.connected = False
super(Server, self).__init__()
elif error.errno == errno.EADDRINUSE:
if self.port != '6767':
logging.exception("BAZARR cannot bind to specified TCP port, trying with default (6767)")
self.port = '6767'
self.connected = False
super(Server, self).__init__()
else:
logging.exception("BAZARR cannot bind to default TCP port (6767) because it's already in use, "
"exiting...")
self.shutdown(EXIT_PORT_ALREADY_IN_USE_ERROR)
elif error.errno in [errno.ENOLINK, errno.EAFNOSUPPORT]:
logging.exception("BAZARR cannot bind to IPv6 (*), trying with 0.0.0.0")
self.address = '0.0.0.0'
self.connected = False
super(Server, self).__init__()
else:
logging.exception("BAZARR cannot start because of unhandled exception.")
self.shutdown()
def interrupt_handler(self, signum, frame):
# print('Server signal interrupt handler called with signal', signum)
if not self.interrupted:
# ignore user hammering Ctrl-C; we heard you the first time!
self.interrupted = True
self.shutdown(EXIT_INTERRUPT)
def start(self):
self.server.print_listen("BAZARR is started and waiting for requests on: http://{}:{}")
signal.signal(signal.SIGINT, self.interrupt_handler)
try:
self.server.run()
except (KeyboardInterrupt, SystemExit):
self.shutdown()
except Exception:
pass
def close_all(self):
print("Closing database...")
close_database()
print("Closing webserver...")
self.server.close()
def shutdown(self, status=EXIT_NORMAL):
self.close_all()
stop_bazarr(status, False)
def restart(self):
self.close_all()
restart_bazarr()
webserver = Server()

View File

@ -0,0 +1,361 @@
# coding=utf-8
import logging
import json
import time
import threading
from requests import Session
from signalr import Connection
from requests.exceptions import ConnectionError
from signalrcore.hub_connection_builder import HubConnectionBuilder
from collections import deque
from time import sleep
from constants import HEADERS
from app.event_handler import event_stream
from sonarr.sync.episodes import sync_episodes, sync_one_episode
from sonarr.sync.series import update_series, update_one_series
from radarr.sync.movies import update_movies, update_one_movie
from sonarr.info import get_sonarr_info, url_sonarr
from radarr.info import url_radarr
from .database import TableShows, TableMovies, database, select
from .config import settings
from .scheduler import scheduler
from .get_args import args
sonarr_queue = deque()
radarr_queue = deque()
last_event_data = None
class SonarrSignalrClientLegacy:
def __init__(self):
super(SonarrSignalrClientLegacy, self).__init__()
self.apikey_sonarr = None
self.session = Session()
self.session.timeout = 60
self.session.verify = False
self.session.headers = HEADERS
self.connection = None
self.connected = False
def start(self):
if get_sonarr_info.is_legacy():
logging.warning(
f'BAZARR can only sync from Sonarr v3 SignalR feed to get real-time update. You should consider '
f'upgrading your version({get_sonarr_info.version()}).')
else:
self.connected = False
event_stream(type='badges')
logging.info('BAZARR trying to connect to Sonarr SignalR feed...')
self.configure()
while not self.connection.started:
try:
self.connection.start()
except ConnectionError:
time.sleep(5)
except json.decoder.JSONDecodeError:
logging.error("BAZARR cannot parse JSON returned by SignalR feed. This is caused by a permissions "
"issue when Sonarr try to access its /config/.config directory."
"Typically permissions are too permissive - only the user and group Sonarr runs as "
"should have Read/Write permissions (e.g. files 664 / folders 775). You should fix "
"permissions on that directory and restart Sonarr. Also, if you're a Docker image "
"user, you should make sure you properly defined PUID/PGID environment variables. "
"Otherwise, please contact Sonarr support.")
self.stop()
break
else:
self.connected = True
event_stream(type='badges')
logging.info('BAZARR SignalR client for Sonarr is connected and waiting for events.')
if not args.dev:
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
def stop(self, log=True):
try:
self.connection.close()
except Exception:
self.connection.started = False
if log:
logging.info('BAZARR SignalR client for Sonarr is now disconnected.')
def restart(self):
if self.connection:
if self.connection.started:
self.stop(log=False)
if settings.general.use_sonarr:
self.start()
def exception_handler(self):
sonarr_queue.clear()
self.connected = False
event_stream(type='badges')
logging.error('BAZARR connection to Sonarr SignalR feed has been lost.')
self.restart()
def configure(self):
self.apikey_sonarr = settings.sonarr.apikey
self.connection = Connection(f"{url_sonarr()}/signalr", self.session)
self.connection.qs = {'apikey': self.apikey_sonarr}
sonarr_hub = self.connection.register_hub('') # Sonarr doesn't use named hub
sonarr_method = ['series', 'episode']
for item in sonarr_method:
sonarr_hub.client.on(item, feed_queue)
self.connection.exception += self.exception_handler
class SonarrSignalrClient:
def __init__(self):
super(SonarrSignalrClient, self).__init__()
self.apikey_sonarr = None
self.connection = None
self.connected = False
def start(self):
self.configure()
logging.info('BAZARR trying to connect to Sonarr SignalR feed...')
while self.connection.transport.state.value not in [0, 1, 2]:
try:
self.connection.start()
except ConnectionError:
time.sleep(5)
def stop(self):
logging.info('BAZARR SignalR client for Sonarr is now disconnected.')
self.connection.stop()
def restart(self):
if self.connection:
if self.connection.transport.state.value in [0, 1, 2]:
self.stop()
if settings.general.use_sonarr:
self.start()
def exception_handler(self):
sonarr_queue.clear()
self.connected = False
event_stream(type='badges')
logging.error("BAZARR connection to Sonarr SignalR feed has failed. We'll try to reconnect.")
self.restart()
def on_connect_handler(self):
self.connected = True
event_stream(type='badges')
logging.info('BAZARR SignalR client for Sonarr is connected and waiting for events.')
if not args.dev:
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
def on_reconnect_handler(self):
self.connected = False
event_stream(type='badges')
logging.error('BAZARR SignalR client for Sonarr connection as been lost. Trying to reconnect...')
def configure(self):
self.apikey_sonarr = settings.sonarr.apikey
self.connection = HubConnectionBuilder() \
.with_url(f"{url_sonarr()}/signalr/messages?access_token={self.apikey_sonarr}",
options={
"verify_ssl": False,
"headers": HEADERS
}) \
.with_automatic_reconnect({
"type": "raw",
"keep_alive_interval": 5,
"reconnect_interval": 180,
"max_attempts": None
}).build()
self.connection.on_open(self.on_connect_handler)
self.connection.on_reconnect(self.on_reconnect_handler)
self.connection.on_close(lambda: logging.debug('BAZARR SignalR client for Sonarr is disconnected.'))
self.connection.on_error(self.exception_handler)
self.connection.on("receiveMessage", feed_queue)
class RadarrSignalrClient:
def __init__(self):
super(RadarrSignalrClient, self).__init__()
self.apikey_radarr = None
self.connection = None
self.connected = False
def start(self):
self.configure()
logging.info('BAZARR trying to connect to Radarr SignalR feed...')
while self.connection.transport.state.value not in [0, 1, 2]:
try:
self.connection.start()
except ConnectionError:
time.sleep(5)
def stop(self):
logging.info('BAZARR SignalR client for Radarr is now disconnected.')
self.connection.stop()
def restart(self):
if self.connection:
if self.connection.transport.state.value in [0, 1, 2]:
self.stop()
if settings.general.use_radarr:
self.start()
def exception_handler(self):
radarr_queue.clear()
self.connected = False
event_stream(type='badges')
logging.error("BAZARR connection to Radarr SignalR feed has failed. We'll try to reconnect.")
self.restart()
def on_connect_handler(self):
self.connected = True
event_stream(type='badges')
logging.info('BAZARR SignalR client for Radarr is connected and waiting for events.')
if not args.dev:
scheduler.add_job(update_movies, kwargs={'send_event': True}, max_instances=1)
def on_reconnect_handler(self):
self.connected = False
event_stream(type='badges')
logging.error('BAZARR SignalR client for Radarr connection as been lost. Trying to reconnect...')
def configure(self):
self.apikey_radarr = settings.radarr.apikey
self.connection = HubConnectionBuilder() \
.with_url(f"{url_radarr()}/signalr/messages?access_token={self.apikey_radarr}",
options={
"verify_ssl": False,
"headers": HEADERS
}) \
.with_automatic_reconnect({
"type": "raw",
"keep_alive_interval": 5,
"reconnect_interval": 180,
"max_attempts": None
}).build()
self.connection.on_open(self.on_connect_handler)
self.connection.on_reconnect(self.on_reconnect_handler)
self.connection.on_close(lambda: logging.debug('BAZARR SignalR client for Radarr is disconnected.'))
self.connection.on_error(self.exception_handler)
self.connection.on("receiveMessage", feed_queue)
def dispatcher(data):
try:
series_title = series_year = episode_title = season_number = episode_number = movie_title = movie_year = None
#
try:
episodesChanged = False
topic = data['name']
media_id = data['body']['resource']['id']
action = data['body']['action']
if topic == 'series':
if 'episodesChanged' in data['body']['resource']:
episodesChanged = data['body']['resource']['episodesChanged']
series_title = data['body']['resource']['title']
series_year = data['body']['resource']['year']
elif topic == 'episode':
if 'series' in data['body']['resource']:
series_title = data['body']['resource']['series']['title']
series_year = data['body']['resource']['series']['year']
else:
series_metadata = database.execute(
select(TableShows.title, TableShows.year)
.where(TableShows.sonarrSeriesId == data['body']['resource']['seriesId']))\
.first()
if series_metadata:
series_title = series_metadata.title
series_year = series_metadata.year
episode_title = data['body']['resource']['title']
season_number = data['body']['resource']['seasonNumber']
episode_number = data['body']['resource']['episodeNumber']
elif topic == 'movie':
if action == 'deleted':
existing_movie_details = database.execute(
select(TableMovies.title, TableMovies.year)
.where(TableMovies.radarrId == media_id)) \
.first()
if existing_movie_details:
movie_title = existing_movie_details.title
movie_year = existing_movie_details.year
else:
return
else:
movie_title = data['body']['resource']['title']
movie_year = data['body']['resource']['year']
except KeyError:
return
if topic == 'series':
logging.debug(f'Event received from Sonarr for series: {series_title} ({series_year})')
update_one_series(series_id=media_id, action=action)
if episodesChanged:
# this will happen if a season monitored status is changed.
sync_episodes(series_id=media_id, send_event=True)
elif topic == 'episode':
logging.debug(f'Event received from Sonarr for episode: {series_title} ({series_year}) - '
f'S{season_number:0>2}E{episode_number:0>2} - {episode_title}')
sync_one_episode(episode_id=media_id, defer_search=settings.sonarr.defer_search_signalr)
elif topic == 'movie':
logging.debug(f'Event received from Radarr for movie: {movie_title} ({movie_year})')
update_one_movie(movie_id=media_id, action=action,
defer_search=settings.radarr.defer_search_signalr)
except Exception as e:
logging.debug(f'BAZARR an exception occurred while parsing SignalR feed: {repr(e)}')
finally:
event_stream(type='badges')
return
def feed_queue(data):
# check if event is duplicate from the previous one
global last_event_data
if data == last_event_data:
return
else:
last_event_data = data
# some sonarr version send event as a list of a single dict, we make it a dict
if isinstance(data, list) and len(data):
data = data[0]
# if data is a dict and contain an event for series, episode or movie, we add it to the event queue
if isinstance(data, dict) and 'name' in data:
if data['name'] in ['series', 'episode']:
sonarr_queue.append(data)
elif data['name'] == 'movie':
radarr_queue.append(data)
def consume_queue(queue):
# get events data from queue one at a time and dispatch it
while True:
try:
data = queue.popleft()
except IndexError:
pass
except (KeyboardInterrupt, SystemExit):
break
else:
dispatcher(data)
sleep(0.1)
# start both queue consuming threads
sonarr_queue_thread = threading.Thread(target=consume_queue, args=(sonarr_queue,))
sonarr_queue_thread.daemon = True
sonarr_queue_thread.start()
radarr_queue_thread = threading.Thread(target=consume_queue, args=(radarr_queue,))
radarr_queue_thread.daemon = True
radarr_queue_thread.start()
# instantiate proper SignalR client
sonarr_signalr_client = SonarrSignalrClientLegacy() if get_sonarr_info.version().startswith(('0.', '2.', '3.')) else \
SonarrSignalrClient()
radarr_signalr_client = RadarrSignalrClient()

201
bazarr/app/ui.py Normal file
View File

@ -0,0 +1,201 @@
# coding=utf-8
import os
import requests
import mimetypes
from flask import (request, abort, render_template, Response, session, send_file, stream_with_context, Blueprint,
redirect)
from functools import wraps
from urllib.parse import unquote
from constants import HEADERS
from literals import FILE_LOG
from sonarr.info import url_api_sonarr
from radarr.info import url_api_radarr
from utilities.helper import check_credentials
from utilities.central import get_log_file_path
from .config import settings, base_url
from .database import System
from .get_args import args
frontend_build_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'frontend', 'build')
ui_bp = Blueprint('ui', __name__,
template_folder=frontend_build_path,
static_folder=os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'frontend',
'build', 'assets'),
static_url_path='/assets')
if os.path.exists(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'frontend', 'build',
'images')):
static_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'frontend', 'build',
'images')
else:
static_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'frontend', 'public',
'images')
static_bp = Blueprint('images', __name__, static_folder=static_directory, static_url_path='/images')
ui_bp.register_blueprint(static_bp)
mimetypes.add_type('application/javascript', '.js')
mimetypes.add_type('text/css', '.css')
mimetypes.add_type('font/woff2', '.woff2')
mimetypes.add_type('image/svg+xml', '.svg')
mimetypes.add_type('image/png', '.png')
mimetypes.add_type('image/x-icon', '.ico')
mimetypes.add_type('application/manifest+json', '.webmanifest')
pwa_assets = ['registerSW.js', 'manifest.webmanifest', 'sw.js']
def check_login(actual_method):
@wraps(actual_method)
def wrapper(*args, **kwargs):
if settings.auth.type == 'basic':
auth = request.authorization
if not (auth and
check_credentials(request.authorization.username, request.authorization.password, request)):
return ('Unauthorized', 401, {
'WWW-Authenticate': 'Basic realm="Login Required"'
})
elif settings.auth.type == 'form':
if 'logged_in' not in session:
return abort(401, message="Unauthorized")
actual_method(*args, **kwargs)
@ui_bp.route('/', defaults={'path': ''})
@ui_bp.route('/<path:path>')
def catch_all(path):
if path.startswith('login') and settings.auth.type not in ['basic', 'form']:
# login page has been accessed when no authentication is enabled
return redirect(base_url or "/", code=302)
# PWA Assets are returned from frontend root folder
if path in pwa_assets or path.startswith('workbox-'):
return send_file(os.path.join(frontend_build_path, path))
auth = True
if settings.auth.type == 'basic':
auth = request.authorization
if not (auth and check_credentials(request.authorization.username, request.authorization.password, request,
log_success=False)):
return ('Unauthorized', 401, {
'WWW-Authenticate': 'Basic realm="Login Required"'
})
elif settings.auth.type == 'form':
if 'logged_in' not in session or not session['logged_in']:
auth = False
try:
updated = System.get().updated
except Exception:
updated = '0'
inject = dict()
if not path.startswith('api/'):
inject["baseUrl"] = base_url
inject["canUpdate"] = not args.no_update
inject["hasUpdate"] = updated != '0'
if auth:
inject["apiKey"] = settings.auth.apikey
template_url = base_url
if not template_url.endswith("/"):
template_url += "/"
return render_template("index.html", BAZARR_SERVER_INJECT=inject, baseUrl=template_url)
@check_login
@ui_bp.route('/' + FILE_LOG)
def download_log():
return send_file(get_log_file_path(), max_age=0, as_attachment=True)
@check_login
@ui_bp.route('/images/series/<path:url>', methods=['GET'])
def series_images(url):
url = url.strip("/")
apikey = settings.sonarr.apikey
baseUrl = settings.sonarr.base_url
url_image = f'{url_api_sonarr()}{url.lstrip(baseUrl)}?apikey={apikey}'.replace('poster-250', 'poster-500')
try:
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=HEADERS)
except Exception:
return '', 404
else:
return Response(stream_with_context(req.iter_content(2048)), content_type=req.headers['content-type'])
@check_login
@ui_bp.route('/images/movies/<path:url>', methods=['GET'])
def movies_images(url):
apikey = settings.radarr.apikey
baseUrl = settings.radarr.base_url
url_image = f'{url_api_radarr()}{url.lstrip(baseUrl)}?apikey={apikey}'
try:
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=HEADERS)
except Exception:
return '', 404
else:
return Response(stream_with_context(req.iter_content(2048)), content_type=req.headers['content-type'])
@check_login
@ui_bp.route('/system/backup/download/<path:filename>', methods=['GET'])
def backup_download(filename):
fullpath = os.path.normpath(os.path.join(settings.backup.folder, filename))
if not fullpath.startswith(settings.backup.folder):
return '', 404
else:
return send_file(fullpath, max_age=0, as_attachment=True)
@ui_bp.route('/api/swaggerui/static/<path:filename>', methods=['GET'])
def swaggerui_static(filename):
basepath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'libs', 'flask_restx',
'static')
fullpath = os.path.realpath(os.path.join(basepath, filename))
if not basepath == os.path.commonpath((basepath, fullpath)):
return '', 404
else:
return send_file(fullpath)
def configured():
System.update({System.configured: '1'}).execute()
@check_login
@ui_bp.route('/test', methods=['GET'])
@ui_bp.route('/test/<protocol>/<path:url>', methods=['GET'])
def proxy(protocol, url):
if protocol.lower() not in ['http', 'https']:
return dict(status=False, error='Unsupported protocol', code=0)
url = f'{protocol}://{unquote(url)}'
params = request.args
try:
result = requests.get(url, params, allow_redirects=False, verify=False, timeout=5, headers=HEADERS)
except Exception as e:
return dict(status=False, error=repr(e))
else:
if result.status_code == 200:
try:
version = result.json()['version']
return dict(status=True, version=version, code=result.status_code)
except Exception:
return dict(status=False, error='Error Occurred. Check your settings.', code=result.status_code)
elif result.status_code == 401:
return dict(status=False, error='Access Denied. Check API key.', code=result.status_code)
elif result.status_code == 404:
return dict(status=False, error='Cannot get version. Maybe unsupported legacy API call?',
code=result.status_code)
elif 300 <= result.status_code <= 399:
return dict(status=False, error='Wrong URL Base.', code=result.status_code)
else:
return dict(status=False, error=result.raise_for_status(), code=result.status_code)

12
bazarr/constants.py Normal file
View File

@ -0,0 +1,12 @@
# coding=utf-8
import os
# set Bazarr user-agent used to make requests
HEADERS = {"User-Agent": os.environ["SZ_USER_AGENT"]}
# minimum file size for Bazarr to consider it a video
MINIMUM_VIDEO_SIZE = 20480
# maximum size for a subtitles file
MAXIMUM_SUBTITLE_SIZE = 1 * 1024 * 1024

4
bazarr/get_args.py Normal file
View File

@ -0,0 +1,4 @@
# coding=utf-8
# This is required to prevent daemon (bazarr.py) from raising an ImportError Exception after upgrading from 1.0.4
from .app.get_args import args # noqa: W0611

221
bazarr/init.py Normal file
View File

@ -0,0 +1,221 @@
# coding=utf-8
import os
import sys
import subprocess
import subliminal
import datetime
import time
import rarfile
from dogpile.cache.region import register_backend as register_cache_backend
from app.config import settings, configure_captcha_func, write_config
from app.get_args import args
from app.logger import configure_logging
from utilities.binaries import get_binary, BinaryNotFound
from utilities.path_mappings import path_mappings
from utilities.backup import restore_from_backup
from app.database import init_db
from literals import (EXIT_CONFIG_CREATE_ERROR, ENV_BAZARR_ROOT_DIR, DIR_BACKUP, DIR_CACHE, DIR_CONFIG, DIR_DB, DIR_LOG,
DIR_RESTORE, EXIT_REQUIREMENTS_ERROR)
from utilities.central import make_bazarr_dir, restart_bazarr, stop_bazarr
# set start time global variable as epoch
global startTime
startTime = time.time()
# restore backup if required
restore_from_backup()
# set subliminal_patch user agent
os.environ["SZ_USER_AGENT"] = f"Bazarr/{os.environ['BAZARR_VERSION']}"
# Check if args.config_dir exist
if not os.path.exists(args.config_dir):
# Create config_dir directory tree
try:
os.mkdir(os.path.join(args.config_dir))
except OSError:
print("BAZARR The configuration directory doesn't exist and Bazarr cannot create it (permission issue?).")
stop_bazarr(EXIT_CONFIG_CREATE_ERROR)
os.environ[ENV_BAZARR_ROOT_DIR] = os.path.join(args.config_dir)
make_bazarr_dir(DIR_BACKUP)
make_bazarr_dir(DIR_CACHE)
make_bazarr_dir(DIR_CONFIG)
make_bazarr_dir(DIR_DB)
make_bazarr_dir(DIR_LOG)
make_bazarr_dir(DIR_RESTORE)
# set subliminal_patch hearing-impaired extension to use when naming subtitles
os.environ["SZ_HI_EXTENSION"] = settings.general.hi_extension
# set anti-captcha provider and key
configure_captcha_func()
# import Google Analytics module to make sure logging is properly configured afterwards
from ga4mp import GtagMP # noqa E402
# configure logging
configure_logging(settings.general.debug or args.debug)
import logging # noqa E402
def is_virtualenv():
# return True if Bazarr have been start from within a virtualenv or venv
base_prefix = getattr(sys, "base_prefix", None)
# real_prefix will return None if not in a virtualenv environment or the default python path
real_prefix = getattr(sys, "real_prefix", None) or sys.prefix
return base_prefix != real_prefix
# deploy requirements.txt
if not args.no_update:
try:
if os.name == 'nt':
import win32api, win32con # noqa E401
import lxml, numpy, webrtcvad, setuptools, PIL # noqa E401
except ImportError:
try:
import pip # noqa W0611
except ImportError:
logging.info('BAZARR unable to install requirements (pip not installed).')
else:
if os.path.expanduser("~") == '/':
logging.info('BAZARR unable to install requirements (user without home directory).')
else:
logging.info('BAZARR installing requirements...')
try:
pip_command = [sys.executable, '-m', 'pip', 'install', '-qq', '--disable-pip-version-check',
'-r', os.path.join(os.path.dirname(os.path.dirname(__file__)), 'requirements.txt')]
if not is_virtualenv():
# --user only make sense if not running under venv
pip_command.insert(4, '--user')
subprocess.check_output(pip_command, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
logging.exception(f'BAZARR requirements.txt installation result: {e.stdout}')
os._exit(EXIT_REQUIREMENTS_ERROR)
else:
logging.info('BAZARR requirements installed.')
restart_bazarr()
# change default base_url to ''
settings.general.base_url = settings.general.base_url.rstrip('/')
write_config()
# migrate enabled_providers from comma separated string to list
if isinstance(settings.general.enabled_providers, str) and not settings.general.enabled_providers.startswith('['):
settings.general.enabled_providers = str(settings.general.enabled_providers.split(","))
write_config()
# Read package_info (if exists) to override some settings by package maintainers
# This file can also provide some info about the package version and author
package_info_file = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'package_info')
if os.path.isfile(package_info_file):
try:
splitted_lines = []
package_info = {}
with open(package_info_file) as file:
lines = file.readlines()
for line in lines:
splitted_lines += line.split(r'\n')
for line in splitted_lines:
splitted_line = line.split('=', 1)
if len(splitted_line) == 2:
package_info[splitted_line[0].lower()] = splitted_line[1].replace('\n', '')
else:
continue
# package author can force a branch to follow
if 'branch' in package_info:
settings.general.branch = package_info['branch']
# package author can disable update
if package_info.get('updatemethod', '') == 'External':
os.environ['BAZARR_UPDATE_ALLOWED'] = '0'
os.environ['BAZARR_UPDATE_MESSAGE'] = package_info.get('updatemethodmessage', '')
# package author can provide version and contact info
os.environ['BAZARR_PACKAGE_VERSION'] = package_info.get('packageversion', '')
os.environ['BAZARR_PACKAGE_AUTHOR'] = package_info.get('packageauthor', '')
except Exception:
pass
else:
write_config()
# Configure dogpile file caching for Subliminal request
register_cache_backend("subzero.cache.file", "subzero.cache_backends.file", "SZFileBackend")
subliminal.region.configure('subzero.cache.file', expiration_time=datetime.timedelta(days=30),
arguments={'appname': "sz_cache", 'app_cache_dir': args.config_dir},
replace_existing_backend=True)
subliminal.region.backend.sync()
if not os.path.exists(os.path.join(args.config_dir, 'config', 'releases.txt')):
from app.check_update import check_releases
check_releases()
logging.debug("BAZARR Created releases file")
if not os.path.exists(os.path.join(args.config_dir, 'config', 'announcements.txt')):
from app.announcements import get_announcements_to_file
get_announcements_to_file()
logging.debug("BAZARR Created announcements file")
# Move GA visitor from config to dedicated file
if 'visitor' in settings.analytics:
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'analytics.dat')), 'w+') as handle:
handle.write(settings.analytics.visitor)
settings['analytics'].pop('visitor', None)
# Clean unused settings from config
settings['general'].pop('throtteled_providers', None)
settings['general'].pop('update_restart', None)
write_config()
# Remove deprecated providers from enabled providers in config
from subliminal_patch.extensions import provider_registry # noqa E401
existing_providers = provider_registry.names()
enabled_providers = settings.general.enabled_providers
settings.general.enabled_providers = [x for x in enabled_providers if x in existing_providers]
write_config()
def init_binaries():
try:
exe = get_binary("unar")
rarfile.UNAR_TOOL = exe
rarfile.UNRAR_TOOL = None
rarfile.SEVENZIP_TOOL = None
rarfile.tool_setup(unrar=False, unar=True, bsdtar=False, sevenzip=False, force=True)
except (BinaryNotFound, rarfile.RarCannotExec):
try:
exe = get_binary("unrar")
rarfile.UNRAR_TOOL = exe
rarfile.UNAR_TOOL = None
rarfile.SEVENZIP_TOOL = None
rarfile.tool_setup(unrar=True, unar=False, bsdtar=False, sevenzip=False, force=True)
except (BinaryNotFound, rarfile.RarCannotExec):
try:
exe = get_binary("7z")
rarfile.UNRAR_TOOL = None
rarfile.UNAR_TOOL = None
rarfile.SEVENZIP_TOOL = "7z"
rarfile.tool_setup(unrar=False, unar=False, bsdtar=False, sevenzip=True, force=True)
except (BinaryNotFound, rarfile.RarCannotExec):
logging.exception("BAZARR requires a rar archive extraction utilities (unrar, unar, 7zip) and it can't be found.")
raise BinaryNotFound
else:
logging.debug("Using 7zip from: %s", exe)
return exe
else:
logging.debug("Using UnRAR from: %s", exe)
return exe
else:
logging.debug("Using unar from: %s", exe)
return exe
init_db()
init_binaries()
path_mappings.update()

View File

@ -0,0 +1 @@
# coding=utf-8

View File

@ -0,0 +1,238 @@
# -*- coding: utf-8 -*-
import logging
import os
from subzero.language import Language
from app.database import database, insert, update
from sqlalchemy.exc import IntegrityError
logger = logging.getLogger(__name__)
class CustomLanguage:
"""Base class for custom languages."""
alpha2 = "pb"
alpha3 = "pob"
language = "pt-BR"
official_alpha2 = "pt"
official_alpha3 = "por"
name = "Portuguese (Brazil)"
iso = "BR"
_scripts = []
_possible_matches = ("pt-br", "pob", "pb", "brazilian", "brasil", "brazil")
_extensions = (".pt-br", ".pob", ".pb")
_extensions_forced = (".pt-br.forced", ".pob.forced", ".pb.forced")
_extensions_hi = (".pt-br.hi", ".pob.hi", ".pb.hi",
".pt-br.cc", ".pob.cc", ".pb.cc",
".pt-br.sdh", ".pob.sdh", ".pb.sdh")
def subzero_language(self):
return Language(self.official_alpha3, self.iso)
@classmethod
def from_value(cls, value, attr="alpha3"):
"""Return a custom language subclass by value and attribute
if found, otherwise return None.
:param value:
:param attr:
"""
for sub in cls.__subclasses__():
if getattr(sub, attr) == str(value):
return sub()
return None
@classmethod
def register(cls, table):
"""Register the custom language subclasses in the database."""
for sub in cls.__subclasses__():
try:
database.execute(
insert(table)
.values(code3=sub.alpha3,
code2=sub.alpha2,
name=sub.name,
enabled=0))
except IntegrityError:
database.execute(
update(table)
.values(code2=sub.alpha2,
name=sub.name)
.where(table.code3 == sub.alpha3))
@classmethod
def found_external(cls, subtitle, subtitle_path):
for sub in cls.__subclasses__():
code = sub.get_alpha_type(subtitle, subtitle_path)
if code:
return code
return None
@classmethod
def get_alpha_type(cls, subtitle: str, subtitle_path=None):
extension = str(os.path.splitext(subtitle)[0]).lower()
to_return = None
if extension.endswith(cls._extensions):
to_return = cls.alpha2
if extension.endswith(cls._extensions_forced):
to_return = f"{cls.alpha2}:forced"
if extension.endswith(cls._extensions_hi):
to_return = f"{cls.alpha2}:HI"
if to_return is not None:
logging.debug("BAZARR external subtitles detected: %s", to_return)
return to_return
def ffprobe_found(self, detected_language: dict) -> bool:
name = detected_language.get("name", "").lower()
if not name:
return False
return any(ext in name for ext in self._possible_matches)
def language_found(self, language: Language):
if str(language.country) == self.iso:
return True
if language.script and str(language.script) in self._scripts:
return True
return False
class BrazilianPortuguese(CustomLanguage):
# Same attributes as base class
pass
class Portuguese(CustomLanguage):
alpha2 = "pt"
alpha3 = "por"
language = "pt-PT"
official_alpha2 = "pt"
official_alpha3 = "por"
name = "Portuguese"
iso = "PT"
_scripts = []
_possible_matches = ("pt-pt", "por", "pt")
_extensions = (".pt-pt", ".por", ".pt")
_extensions_forced = (".pt-pt.forced", ".por.forced", ".pt.forced")
_extensions_hi = (".pt-pt.hi", ".por.hi", ".pt.hi",
".pt-pt.cc", ".por.cc", ".pt.cc",
".pt-pt.sdh", ".por.sdh", ".pt.sdh")
def subzero_language(self):
return Language(self.official_alpha3)
def language_found(self, language: Language):
return str(language.alpha3) == self.alpha3
class ChineseTraditional(CustomLanguage):
alpha2 = "zt"
alpha3 = "zht"
language = "zh-TW"
official_alpha2 = "zh"
official_alpha3 = "zho"
name = "Chinese Traditional"
iso = "TW"
# _scripts = (Script("Hant"),)
# We'll use literals for now
_scripts = ("Hant",)
_extensions = (
".cht", ".tc", ".zh-tw", ".zht", ".zh-hant", ".zhhant", ".zh_hant", ".hant", ".big5", ".traditional",
)
_extensions_forced = (
".cht.forced", ".tc.forced", ".zht.forced", "hant.forced", ".big5.forced", "繁體中文.forced", "雙語.forced",
".zh-tw.forced",
)
_extensions_hi = (
".cht.hi", ".tc.hi", ".zht.hi", "hant.hi", ".big5.hi", "繁體中文.hi", "雙語.hi", ".zh-tw.hi",
".cht.cc", ".tc.cc", ".zht.cc", "hant.cc", ".big5.cc", "繁體中文.cc", "雙語.cc", ".zh-tw.cc",
".cht.sdh", ".tc.sdh", ".zht.sdh", "hant.sdh", ".big5.sdh", "繁體中文.sdh", "雙語.sdh", ".zh-tw.sdh",
)
_extensions_fuzzy = ("", "雙語")
_extensions_disamb_fuzzy = ("", "双语")
_extensions_disamb = (
".chs", ".sc", ".zhs", ".zh-hans", ".hans", ".zh_hans", ".zhhans", ".gb", ".simplified",
)
_extensions_disamb_forced = (
".chs.forced", ".sc.forced", ".zhs.forced", "hans.forced", ".gb.forced", "简体中文.forced", "双语.forced",
)
_extensions_disamb_hi = (
".chs.hi", ".sc.hi", ".zhs.hi", "hans.hi", ".gb.hi", "简体中文.hi", "双语.hi",
".chs.cc", ".sc.cc", ".zhs.cc", "hans.cc", ".gb.cc", "简体中文.cc", "双语.cc",
".chs.sdh", ".sc.sdh", ".zhs.sdh", "hans.sdh", ".gb.sdh", "简体中文.sdh", "双语.sdh",
)
@classmethod
def get_alpha_type(cls, subtitle, subtitle_path=None):
subtitle_path = str(subtitle_path).lower()
extension = str(os.path.splitext(subtitle)[0]).lower()
to_return = None
# Simplified chinese
if (
extension.endswith(cls._extensions_disamb)
or subtitle_path in cls._extensions_disamb_fuzzy
):
to_return = "zh"
elif any(ext in extension[-12:] for ext in cls._extensions_disamb_forced):
to_return = "zh:forced"
elif any(ext in extension[-12:] for ext in cls._extensions_disamb_hi):
to_return = "zh:HI"
# Traditional chinese
elif (
extension.endswith(cls._extensions)
or subtitle_path[:-5] in cls._extensions_fuzzy
):
to_return = "zt"
elif any(ext in extension[-12:] for ext in cls._extensions_forced):
to_return = "zt:forced"
elif any(ext in extension[-12:] for ext in cls._extensions_hi):
to_return = "zt:HI"
if to_return is not None:
logging.debug("BAZARR external subtitles detected: %s", to_return)
return to_return
class LatinAmericanSpanish(CustomLanguage):
alpha2 = "ea" # Only one available I can think of
alpha3 = "spl"
language = "es-MX"
official_alpha2 = "es"
official_alpha3 = "spa"
name = "Spanish (Latino)"
iso = "MX" # Not fair, but ok
_scripts = ("419",)
_possible_matches = (
"es-la", "spa-la", "spl", "mx", "latin", "mexic", "argent", "latam",
)
_extensions = (".es-la", ".spl", ".spa-la", ".ea", ".es-mx", ".lat", ".es.ar")
_extensions_forced = (
".es-la.forced", ".spl.forced", ".spa-la.forced", ".ea.forced", ".es-mx.forced", ".lat.forced", ".es.ar.forced",
)
_extensions_hi = (
".es-la.hi", ".spl.hi", ".spa-la.hi", ".ea.hi", ".es-mx.hi", ".lat.hi", ".es.ar.hi",
".es-la.cc", ".spl.cc", ".spa-la.cc", ".ea.cc", ".es-mx.cc", ".lat.cc", ".es.ar.cc",
".es-la.sdh", ".spl.sdh", ".spa-la.sdh", ".ea.sdh", ".es-mx.sdh", ".lat.sdh", ".es.ar.sdh",
)

View File

@ -0,0 +1,116 @@
# coding=utf-8
import pycountry
from subzero.language import Language
from .custom_lang import CustomLanguage
from app.database import TableSettingsLanguages, database, insert, update, select
def load_language_in_db():
# Get languages list in langs tuple
langs = [{'code3': lang.alpha_3, 'code2': lang.alpha_2, 'name': lang.name, 'enabled': 0}
for lang in pycountry.languages
if hasattr(lang, 'alpha_2')]
# Insert standard languages in database table
database.execute(
insert(TableSettingsLanguages)
.values(langs)
.on_conflict_do_nothing())
# Update standard languages with code3b if available
langs = [{'code3b': lang.bibliographic, 'code3': lang.alpha_3}
for lang in pycountry.languages
if hasattr(lang, 'alpha_2') and hasattr(lang, 'bibliographic')]
# Update languages in database table
database.execute(
update(TableSettingsLanguages), langs)
# Insert custom languages in database table
CustomLanguage.register(TableSettingsLanguages)
# Create languages dictionary for faster conversion than calling database
create_languages_dict()
def create_languages_dict():
global languages_dict
# replace chinese by chinese simplified
database.execute(
update(TableSettingsLanguages)
.values(name='Chinese Simplified')
.where(TableSettingsLanguages.code3 == 'zho'))
# replace Modern Greek by Greek to match Sonarr and Radarr languages
database.execute(
update(TableSettingsLanguages)
.values(name='Greek')
.where(TableSettingsLanguages.code3 == 'ell'))
languages_dict = [{
'code3': x.code3,
'code2': x.code2,
'name': x.name,
'code3b': x.code3b,
} for x in database.execute(
select(TableSettingsLanguages.code3, TableSettingsLanguages.code2, TableSettingsLanguages.name,
TableSettingsLanguages.code3b))
.all()]
def audio_language_from_name(lang):
lang_map = {
'Chinese': 'zh',
}
alpha2_code = lang_map.get(lang, None)
if alpha2_code is None:
return lang
return language_from_alpha2(alpha2_code)
def language_from_alpha2(lang):
return next((item['name'] for item in languages_dict if item['code2'] == lang[:2]), None)
def language_from_alpha3(lang):
return next((item['name'] for item in languages_dict if lang[:3] in [item['code3'], item['code3b']]), None)
def alpha2_from_alpha3(lang):
return next((item['code2'] for item in languages_dict if lang[:3] in [item['code3'], item['code3b']]), None)
def alpha2_from_language(lang):
return next((item['code2'] for item in languages_dict if item['name'] == lang), None)
def alpha3_from_alpha2(lang):
return next((item['code3'] for item in languages_dict if item['code2'] == lang[:2]), None)
def alpha3_from_language(lang):
return next((item['code3'] for item in languages_dict if item['name'] == lang), None)
def get_language_set():
languages = database.execute(
select(TableSettingsLanguages.code3)
.where(TableSettingsLanguages.enabled == 1))\
.all()
language_set = set()
for lang in languages:
custom = CustomLanguage.from_value(lang.code3, "alpha3")
if custom is None:
language_set.add(Language(lang.code3))
else:
language_set.add(custom.subzero_language())
return language_set

31
bazarr/literals.py Normal file
View File

@ -0,0 +1,31 @@
# coding=utf-8
# only primitive types can be specified here
# for other derived values, use constants.py
# bazarr environment variable names
ENV_STOPFILE = 'STOPFILE'
ENV_RESTARTFILE = 'RESTARTFILE'
ENV_BAZARR_ROOT_DIR = 'BAZARR_ROOT'
# bazarr subdirectories
DIR_BACKUP = 'backup'
DIR_CACHE = 'cache'
DIR_CONFIG = 'config'
DIR_DB = 'db'
DIR_LOG = 'log'
DIR_RESTORE = 'restore'
# bazarr special files
FILE_LOG = 'bazarr.log'
FILE_RESTART = 'bazarr.restart'
FILE_STOP = 'bazarr.stop'
# bazarr exit codes
EXIT_NORMAL = 0
EXIT_INTERRUPT = -100
EXIT_VALIDATION_ERROR = -101
EXIT_CONFIG_CREATE_ERROR = -102
EXIT_PYTHON_UPGRADE_NEEDED = -103
EXIT_REQUIREMENTS_ERROR = -104
EXIT_PORT_ALREADY_IN_USE_ERROR = -105

82
bazarr/main.py Normal file
View File

@ -0,0 +1,82 @@
# coding=utf-8
import os
from threading import Thread
bazarr_version = 'unknown'
version_file = os.path.join(os.path.dirname(__file__), '..', 'VERSION')
if os.path.isfile(version_file):
with open(version_file, 'r') as f:
bazarr_version = f.readline()
bazarr_version = bazarr_version.rstrip('\n')
os.environ["BAZARR_VERSION"] = bazarr_version.lstrip('v')
import app.libs # noqa W0611
from app.get_args import args # noqa E402
from app.check_update import apply_update, check_releases, check_if_new_update # noqa E402
from app.config import settings, configure_proxy_func, base_url # noqa E402
from init import * # noqa E402
import logging # noqa E402
# Install downloaded update
if bazarr_version != '':
apply_update()
# Check for new update and install latest
if args.no_update or not settings.general.auto_update:
# user have explicitly requested that we do not update or is using some kind of package/docker that prevent it
check_releases()
else:
# we want to update to the latest version before loading too much stuff. This should prevent deadlock when
# there's missing embedded packages after a commit
check_if_new_update()
from app.database import (System, database, update, migrate_db, create_db_revision, upgrade_languages_profile_hi_values,
fix_languages_profiles_with_duplicate_ids) # noqa E402
from app.notifier import update_notifier # noqa E402
from languages.get_languages import load_language_in_db # noqa E402
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client # noqa E402
from app.server import webserver, app # noqa E402
from app.announcements import get_announcements_to_file # noqa E402
from utilities.central import stop_bazarr # noqa E402
from literals import EXIT_NORMAL # noqa E402
if args.create_db_revision:
create_db_revision(app)
stop_bazarr(EXIT_NORMAL)
else:
migrate_db(app)
upgrade_languages_profile_hi_values()
fix_languages_profiles_with_duplicate_ids()
configure_proxy_func()
get_announcements_to_file()
# Reset the updated once Bazarr have been restarted after an update
database.execute(
update(System)
.values(updated='0'))
# Load languages in database
load_language_in_db()
update_notifier()
if not args.no_signalr:
if settings.general.use_sonarr:
sonarr_signalr_thread = Thread(target=sonarr_signalr_client.start)
sonarr_signalr_thread.daemon = True
sonarr_signalr_thread.start()
if settings.general.use_radarr:
radarr_signalr_thread = Thread(target=radarr_signalr_client.start)
radarr_signalr_thread.daemon = True
radarr_signalr_thread.start()
if __name__ == "__main__":
webserver.start()

1
bazarr/plex/__init__.py Normal file
View File

@ -0,0 +1 @@
# coding=utf-8

81
bazarr/plex/operations.py Normal file
View File

@ -0,0 +1,81 @@
# coding=utf-8
from datetime import datetime
from app.config import settings
from plexapi.server import PlexServer
import logging
logger = logging.getLogger(__name__)
# Constants
DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
def get_plex_server() -> PlexServer:
"""Connect to the Plex server and return the server instance."""
try:
protocol = "https://" if settings.plex.ssl else "http://"
baseurl = f"{protocol}{settings.plex.ip}:{settings.plex.port}"
return PlexServer(baseurl, settings.plex.apikey)
except Exception as e:
logger.error(f"Failed to connect to Plex server: {e}")
raise
def update_added_date(video, added_date: str) -> None:
"""Update the added date of a video in Plex."""
try:
updates = {"addedAt.value": added_date}
video.edit(**updates)
logger.info(f"Updated added date for {video.title} to {added_date}")
except Exception as e:
logger.error(f"Failed to update added date for {video.title}: {e}")
raise
def plex_set_movie_added_date_now(movie_metadata) -> None:
"""
Update the added date of a movie in Plex to the current datetime.
:param movie_metadata: Metadata object containing the movie's IMDb ID.
"""
try:
plex = get_plex_server()
library = plex.library.section(settings.plex.movie_library)
video = library.getGuid(guid=movie_metadata.imdbId)
current_date = datetime.now().strftime(DATETIME_FORMAT)
update_added_date(video, current_date)
except Exception as e:
logger.error(f"Error in plex_set_movie_added_date_now: {e}")
def plex_set_episode_added_date_now(episode_metadata) -> None:
"""
Update the added date of a TV episode in Plex to the current datetime.
:param episode_metadata: Metadata object containing the episode's IMDb ID, season, and episode number.
"""
try:
plex = get_plex_server()
library = plex.library.section(settings.plex.series_library)
show = library.getGuid(episode_metadata.imdbId)
episode = show.episode(season=episode_metadata.season, episode=episode_metadata.episode)
current_date = datetime.now().strftime(DATETIME_FORMAT)
update_added_date(episode, current_date)
except Exception as e:
logger.error(f"Error in plex_set_episode_added_date_now: {e}")
def plex_update_library(is_movie_library: bool) -> None:
"""
Trigger a library update for the specified library type.
:param is_movie_library: True for movie library, False for series library.
"""
try:
plex = get_plex_server()
library_name = settings.plex.movie_library if is_movie_library else settings.plex.series_library
library = plex.library.section(library_name)
library.update()
logger.info(f"Triggered update for library: {library_name}")
except Exception as e:
logger.error(f"Error in plex_update_library: {e}")

View File

@ -0,0 +1 @@
# coding=utf-8

Some files were not shown because too many files have changed in this diff Show More