1
0
mirror of https://github.com/Sonarr/Sonarr.git synced 2026-04-17 21:26:13 -04:00

Compare commits

..

30 Commits

Author SHA1 Message Date
Mark McDowall
da610a1f40 New: Parse 'BEN THE MAN' release group
Closes #7255
2024-09-27 17:27:50 -07:00
Mark McDowall
6d0f10b877 Fixed: Ignore extra spaces in path when not running on Windows
Closes #7251
2024-09-27 17:27:37 -07:00
Mark McDowall
4f0e1c54c1 Fixed: Don't reject revision upgrades if profile doesn't allow upgrades 2024-09-27 17:27:27 -07:00
Bogdan
2f0ca42341 New: Ignore '.DS_Store' and '.unmanic' files 2024-09-27 20:27:17 -04:00
Bogdan
768af433d1 Display naming example errors when all fields are empty 2024-09-27 17:26:47 -07:00
Bogdan
8bf0298227 Fix translation for Custom Colon Replacement label 2024-09-27 17:26:47 -07:00
Robin Dadswell
a7cb264cc8 Fixed: Telegram log message including token 2024-09-27 20:26:29 -04:00
Bogdan
10302323af Fixed: Parsing of Hybrid-Remux as Remux 2024-09-27 20:26:04 -04:00
Mark McDowall
dc1524c64f Fixed: Loading series images after placeholder in Safari 2024-09-27 17:25:30 -07:00
Mark McDowall
4d7a3d0909 New: Errors sending Telegram notifications when links aren't available
Closes #7240
2024-09-27 17:25:21 -07:00
Bogdan
30a52d11aa Fixed: Sorting queue by columns
Sort allowed keys

Co-authored-by: Mark McDowall <markus.mcd5@gmail.com>
2024-09-27 17:25:14 -07:00
Weblate
be4a9e9491 Multiple Translations updated by Weblate
ignore-downstream

Co-authored-by: GkhnGRBZ <gkhn.gurbuz@hotmail.com>
Co-authored-by: Lizandra Candido da Silva <lizandra.c.s@gmail.com>
Co-authored-by: Weblate <noreply@weblate.org>
Co-authored-by: liuwqq <843384478@qq.com>
Translate-URL: https://translate.servarr.com/projects/servarr/sonarr/pt_BR/
Translate-URL: https://translate.servarr.com/projects/servarr/sonarr/tr/
Translate-URL: https://translate.servarr.com/projects/servarr/sonarr/zh_CN/
Translation: Servarr/Sonarr
2024-09-27 17:25:02 -07:00
Sonarr
e196c1be69 Automated API Docs update
ignore-downstream
2024-09-21 10:32:16 -07:00
Mark McDowall
106ffd410c New: Persist sort in Select Episodes modal
Closes #7233
2024-09-21 10:17:09 -07:00
Mark McDowall
c199fd05d3 Fixed: Don't set last write time on episode files if difference is within the same second
Closes #7228
2024-09-21 10:16:59 -07:00
Mark McDowall
75fae9262c Update src/Sonarr.Http/Authentication/AuthenticationBuilderExtensions.cs
Co-authored-by: Bogdan <mynameisbogdan@users.noreply.github.com>
2024-09-21 10:16:52 -07:00
Mark McDowall
faf9173b3b Fixed: Unable to login when instance name contained brackets
Closes #7229
2024-09-21 10:16:52 -07:00
Bogdan
0fa8e24f48 New: Fetch up to 1000 series from Plex Watchlist 2024-09-21 10:16:43 -07:00
Mark McDowall
27da041388 Fixed: Reprocessing manual import items unable to detect sample
Closes #7221
2024-09-21 10:16:24 -07:00
Mark McDowall
ca38a9b577 Fixed: Aggregating media files with 576p resolution 2024-09-21 10:16:17 -07:00
Mark McDowall
4b72a0a4e8 Fixed: Rejections for Custom Format score increment 2024-09-21 10:16:17 -07:00
Mark McDowall
9875e550a8 Fixed: Adding Bluray 576p to some profiles 2024-09-21 10:16:17 -07:00
ManiMatter
c9aa59340c Add 'includeSeries' and 'includeEpisodeFile' to Episode API endpoint 2024-09-21 13:16:05 -04:00
momo
30c36fdc3b Fix description for API key as query parameter 2024-09-21 13:15:51 -04:00
Mark McDowall
3976e5daf7 Fixed: Interactive searches causing multiple requests to indexers 2024-09-21 10:12:13 -07:00
Bogdan
fca8c36156 Guard against using invalid sort keys 2024-09-21 13:12:01 -04:00
Stevie Robinson
85f53e8cb1 New: Parse KCRT as release group
Closes #7214
2024-09-21 13:10:44 -04:00
Weblate
a73a5cc85c Multiple Translations updated by Weblate
ignore-downstream

Co-authored-by: FloatStream <1213193613@qq.com>
Co-authored-by: Havok Dan <havokdan@yahoo.com.br>
Co-authored-by: Weblate <noreply@weblate.org>
Co-authored-by: fordas <fordas15@gmail.com>
Translate-URL: https://translate.servarr.com/projects/servarr/sonarr/es/
Translate-URL: https://translate.servarr.com/projects/servarr/sonarr/pt_BR/
Translate-URL: https://translate.servarr.com/projects/servarr/sonarr/zh_CN/
Translation: Servarr/Sonarr
2024-09-21 10:10:20 -07:00
Mark McDowall
89d730cdfd Fixed: Links for Trakt and TVMaze in Gotify notifications 2024-09-21 10:10:03 -07:00
Treycos
99fc52039f Convert ClipboardButton to TypeScript 2024-09-21 13:09:55 -04:00
58 changed files with 663 additions and 347 deletions

View File

@@ -42,7 +42,9 @@ function FormInputButton(props) {
FormInputButton.propTypes = {
className: PropTypes.string.isRequired,
isLastButton: PropTypes.bool.isRequired,
canSpin: PropTypes.bool.isRequired
canSpin: PropTypes.bool.isRequired,
children: PropTypes.element,
id: PropTypes.string
};
FormInputButton.defaultProps = {

View File

@@ -1,139 +0,0 @@
import Clipboard from 'clipboard';
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import FormInputButton from 'Components/Form/FormInputButton';
import Icon from 'Components/Icon';
import { icons, kinds } from 'Helpers/Props';
import getUniqueElememtId from 'Utilities/getUniqueElementId';
import styles from './ClipboardButton.css';
class ClipboardButton extends Component {
//
// Lifecycle
constructor(props, context) {
super(props, context);
this._id = getUniqueElememtId();
this._successTimeout = null;
this._testResultTimeout = null;
this.state = {
showSuccess: false,
showError: false
};
}
componentDidMount() {
this._clipboard = new Clipboard(`#${this._id}`, {
text: () => this.props.value,
container: document.getElementById(this._id)
});
this._clipboard.on('success', this.onSuccess);
}
componentDidUpdate() {
const {
showSuccess,
showError
} = this.state;
if (showSuccess || showError) {
this._testResultTimeout = setTimeout(this.resetState, 3000);
}
}
componentWillUnmount() {
if (this._clipboard) {
this._clipboard.destroy();
}
if (this._testResultTimeout) {
clearTimeout(this._testResultTimeout);
}
}
//
// Control
resetState = () => {
this.setState({
showSuccess: false,
showError: false
});
};
//
// Listeners
onSuccess = () => {
this.setState({
showSuccess: true
});
};
onError = () => {
this.setState({
showError: true
});
};
//
// Render
render() {
const {
value,
className,
...otherProps
} = this.props;
const {
showSuccess,
showError
} = this.state;
const showStateIcon = showSuccess || showError;
const iconName = showError ? icons.DANGER : icons.CHECK;
const iconKind = showError ? kinds.DANGER : kinds.SUCCESS;
return (
<FormInputButton
id={this._id}
className={className}
{...otherProps}
>
<span className={showStateIcon ? styles.showStateIcon : undefined}>
{
showSuccess &&
<span className={styles.stateIconContainer}>
<Icon
name={iconName}
kind={iconKind}
/>
</span>
}
{
<span className={styles.clipboardIconContainer}>
<Icon name={icons.CLIPBOARD} />
</span>
}
</span>
</FormInputButton>
);
}
}
ClipboardButton.propTypes = {
className: PropTypes.string.isRequired,
value: PropTypes.string.isRequired
};
ClipboardButton.defaultProps = {
className: styles.button
};
export default ClipboardButton;

View File

@@ -0,0 +1,69 @@
import React, { useCallback, useEffect, useState } from 'react';
import FormInputButton from 'Components/Form/FormInputButton';
import Icon from 'Components/Icon';
import { icons, kinds } from 'Helpers/Props';
import { ButtonProps } from './Button';
import styles from './ClipboardButton.css';
export interface ClipboardButtonProps extends Omit<ButtonProps, 'children'> {
value: string;
}
export type ClipboardState = 'success' | 'error' | null;
export default function ClipboardButton({
id,
value,
className = styles.button,
...otherProps
}: ClipboardButtonProps) {
const [state, setState] = useState<ClipboardState>(null);
useEffect(() => {
if (!state) {
return;
}
const timeoutId = setTimeout(() => {
setState(null);
}, 3000);
return () => {
if (timeoutId) {
clearTimeout(timeoutId);
}
};
}, [state]);
const handleClick = useCallback(async () => {
try {
await navigator.clipboard.writeText(value);
setState('success');
} catch (_) {
setState('error');
}
}, [value]);
return (
<FormInputButton
className={className}
onClick={handleClick}
{...otherProps}
>
<span className={state ? styles.showStateIcon : undefined}>
{state ? (
<span className={styles.stateIconContainer}>
<Icon
name={state === 'error' ? icons.DANGER : icons.CHECK}
kind={state === 'error' ? kinds.DANGER : kinds.SUCCESS}
/>
</span>
) : null}
<span className={styles.clipboardIconContainer}>
<Icon name={icons.CLIPBOARD} />
</span>
</span>
</FormInputButton>
);
}

View File

@@ -161,13 +161,12 @@ function InteractiveSearch({ type, searchPayload }: InteractiveSearchProps) {
);
useEffect(() => {
// If search results are not yet isPopulated fetch them,
// otherwise re-show the existing props.
// Only fetch releases if they are not already being fetched and not yet populated.
if (!isPopulated) {
if (!isFetching && !isPopulated) {
dispatch(fetchReleases(searchPayload));
}
}, [isPopulated, searchPayload, dispatch]);
}, [isFetching, isPopulated, searchPayload, dispatch]);
const errorMessage = getErrorMessage(error);

View File

@@ -43,7 +43,7 @@ function SeriesImage({
}: SeriesImageProps) {
const [url, setUrl] = useState<string | null>(null);
const [hasError, setHasError] = useState(false);
const [isLoaded, setIsLoaded] = useState(false);
const [isLoaded, setIsLoaded] = useState(true);
const image = useRef<Image | null>(null);
const handleLoad = useCallback(() => {

View File

@@ -48,7 +48,7 @@ const COLUMNS: Column[] = [
isSortable: true,
},
{
name: 'tvdbid',
name: 'tvdbId',
label: () => translate('TvdbId'),
isVisible: true,
isSortable: true,

View File

@@ -266,7 +266,7 @@ class Naming extends Component {
{
replaceIllegalCharacters && settings.colonReplacementFormat.value === 5 ?
<FormGroup>
<FormLabel>{translate('ColonReplacement')}</FormLabel>
<FormLabel>{translate('CustomColonReplacement')}</FormLabel>
<FormInputGroup
type={inputTypes.TEXT}

View File

@@ -1,4 +1,3 @@
import _ from 'lodash';
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import { connect } from 'react-redux';
@@ -15,11 +14,11 @@ function createMapStateToProps() {
(state) => state.settings.advancedSettings,
(state) => state.settings.namingExamples,
createSettingsSectionSelector(SECTION),
(advancedSettings, examples, sectionSettings) => {
(advancedSettings, namingExamples, sectionSettings) => {
return {
advancedSettings,
examples: examples.item,
examplesPopulated: !_.isEmpty(examples.item),
examples: namingExamples.item,
examplesPopulated: namingExamples.isPopulated,
...sectionSettings
};
}

View File

@@ -24,6 +24,11 @@ export const defaultState = {
items: []
};
export const persistState = [
'episodeSelection.sortKey',
'episodeSelection.sortDirection'
];
//
// Actions Types
@@ -54,7 +59,9 @@ export const reducers = createHandleActions({
[CLEAR_EPISODES]: (state) => {
return updateSectionState(state, section, {
...defaultState
...defaultState,
sortKey: state.sortKey,
sortDirection: state.sortDirection
});
}

View File

@@ -110,7 +110,6 @@ export const defaultState = {
{
name: 'actions',
columnLabel: () => translate('Actions'),
isSortable: true,
isVisible: true,
isModifiable: false
}

View File

@@ -1,7 +1,9 @@
let i = 0;
// returns a HTML 4.0 compliant element IDs (http://stackoverflow.com/a/79022)
/**
* @deprecated Use React's useId() instead
* @returns An HTML 4.0 compliant element IDs (http://stackoverflow.com/a/79022)
*/
export default function getUniqueElementId() {
return `id-${i++}`;
}

View File

@@ -33,7 +33,6 @@
"@types/react": "18.2.79",
"@types/react-dom": "18.2.25",
"classnames": "2.3.2",
"clipboard": "2.0.11",
"connected-react-router": "6.9.3",
"element-class": "0.2.2",
"filesize": "10.0.7",

View File

@@ -90,6 +90,10 @@ namespace NzbDrone.Common.Test.InstrumentationTests
[TestCase(@"https://discord.com/api/webhooks/mySecret")]
[TestCase(@"https://discord.com/api/webhooks/mySecret/01233210")]
// Telegram
[TestCase(@"https://api.telegram.org/bot1234567890:mySecret/sendmessage: chat_id=123456&parse_mode=HTML&text=<text>")]
[TestCase(@"https://api.telegram.org/bot1234567890:mySecret/")]
public void should_clean_message(string message)
{
var cleansedMessage = CleanseLogMessage.Cleanse(message);

View File

@@ -1,4 +1,4 @@
using System;
using System;
namespace NzbDrone.Common.Extensions
{
@@ -38,5 +38,10 @@ namespace NzbDrone.Common.Extensions
{
return dateTime >= afterDateTime && dateTime <= beforeDateTime;
}
public static DateTime WithoutTicks(this DateTime dateTime)
{
return dateTime.AddTicks(-(dateTime.Ticks % TimeSpan.TicksPerSecond));
}
}
}

View File

@@ -147,14 +147,14 @@ namespace NzbDrone.Common.Extensions
return false;
}
if (path.Trim() != path)
{
return false;
}
// Only check for leading or trailing spaces for path when running on Windows.
if (OsInfo.IsWindows)
{
if (path.Trim() != path)
{
return false;
}
var directoryInfo = new DirectoryInfo(path);
while (directoryInfo != null)

View File

@@ -54,7 +54,10 @@ namespace NzbDrone.Common.Instrumentation
new (@"api/v[0-9]/notification/sonarr/(?<secret>[\w-]+)", RegexOptions.Compiled | RegexOptions.IgnoreCase),
// Discord
new (@"discord.com/api/webhooks/((?<secret>[\w-]+)/)?(?<secret>[\w-]+)", RegexOptions.Compiled | RegexOptions.IgnoreCase)
new (@"discord.com/api/webhooks/((?<secret>[\w-]+)/)?(?<secret>[\w-]+)", RegexOptions.Compiled | RegexOptions.IgnoreCase),
// Telegram
new (@"api.telegram.org/bot(?<id>[\d]+):(?<secret>[\w-]+)/", RegexOptions.Compiled | RegexOptions.IgnoreCase)
};
private static readonly Regex CleanseRemoteIPRegex = new (@"(?:Auth-\w+(?<!Failure|Unauthorized) ip|from) (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})", RegexOptions.Compiled);

View File

@@ -1,41 +0,0 @@
using System.Linq;
using FluentAssertions;
using NUnit.Framework;
using NzbDrone.Core.Datastore.Migration;
using NzbDrone.Core.Qualities;
using NzbDrone.Core.Test.Framework;
namespace NzbDrone.Core.Test.Datastore.Migration
{
[TestFixture]
public class add_bluray576p_in_profileFixture : MigrationTest<add_blurary576p_quality_in_profiles>
{
private string GenerateQualityJson(int quality, bool allowed)
{
return $"{{ \"quality\": {quality}, \"allowed\": {allowed.ToString().ToLowerInvariant()} }}";
}
[Test]
public void should_add_bluray576p_to_old_profile()
{
var db = WithMigrationTestDb(c =>
{
c.Insert.IntoTable("QualityProfiles").Row(new
{
Id = 0,
Name = "Bluray",
Cutoff = 7,
Items = $"[{GenerateQualityJson((int)Quality.DVD, true)}, {GenerateQualityJson((int)Quality.Bluray480p, true)}, {GenerateQualityJson((int)Quality.Bluray720p, false)}]"
});
});
var profiles = db.Query<Profile122>("SELECT \"Items\" FROM \"QualityProfiles\" LIMIT 1");
var items = profiles.First().Items;
items.Should().HaveCount(4);
items.Select(v => v.Quality).Should().Equal((int)Quality.DVD, (int)Quality.Bluray480p, (int)Quality.Bluray576p, (int)Quality.Bluray720p);
items.Select(v => v.Allowed).Should().Equal(true, true, true, false);
items.Select(v => v.Name).Should().Equal(null, null, null, null);
}
}
}

View File

@@ -0,0 +1,141 @@
using System.Linq;
using FluentAssertions;
using NUnit.Framework;
using NzbDrone.Core.Datastore.Migration;
using NzbDrone.Core.Qualities;
using NzbDrone.Core.Test.Framework;
namespace NzbDrone.Core.Test.Datastore.Migration
{
[TestFixture]
public class add_blurary576p_quality_in_profiles_with_grouped_blurary480pFixture : MigrationTest<add_blurary576p_quality_in_profiles_with_grouped_blurary480p>
{
private string GenerateQualityJson(int quality, bool allowed)
{
return $"{{ \"quality\": {quality}, \"allowed\": {allowed.ToString().ToLowerInvariant()} }}";
}
private string GenerateQualityGroupJson(int id, string name, int[] qualities, bool allowed)
{
return $"{{ \"id\": {id}, \"name\": \"{name}\", \"items\": [{string.Join(", ", qualities.Select(q => $"{{ \"quality\": {q} }}"))}], \"allowed\": {allowed.ToString().ToLowerInvariant()} }}";
}
[Test]
public void should_add_bluray576p_to_old_profile()
{
var db = WithMigrationTestDb(c =>
{
c.Insert.IntoTable("QualityProfiles").Row(new
{
Id = 0,
Name = "Bluray",
Cutoff = 7,
Items = $"[{GenerateQualityJson((int)Quality.DVD, true)}, {GenerateQualityJson((int)Quality.Bluray480p, true)}, {GenerateQualityJson((int)Quality.Bluray720p, false)}]"
});
});
var profiles = db.Query<Profile122>("SELECT \"Items\" FROM \"QualityProfiles\" LIMIT 1");
var items = profiles.First().Items;
items.Should().HaveCount(4);
items.Select(v => v.Quality).Should().Equal((int)Quality.DVD, (int)Quality.Bluray480p, (int)Quality.Bluray576p, (int)Quality.Bluray720p);
items.Select(v => v.Allowed).Should().Equal(true, true, true, false);
items.Select(v => v.Name).Should().Equal(null, null, null, null);
}
[Test]
public void should_not_allow_bluray576p_if_blurary480p_not_allowed()
{
var db = WithMigrationTestDb(c =>
{
c.Insert.IntoTable("QualityProfiles").Row(new
{
Id = 0,
Name = "Bluray",
Cutoff = 7,
Items = $"[{GenerateQualityJson((int)Quality.DVD, true)}, {GenerateQualityJson((int)Quality.Bluray480p, false)}, {GenerateQualityJson((int)Quality.Bluray720p, false)}]"
});
});
var profiles = db.Query<Profile122>("SELECT \"Items\" FROM \"QualityProfiles\" LIMIT 1");
var items = profiles.First().Items;
items.Should().HaveCount(4);
items.Select(v => v.Quality).Should().Equal((int)Quality.DVD, (int)Quality.Bluray480p, (int)Quality.Bluray576p, (int)Quality.Bluray720p);
items.Select(v => v.Allowed).Should().Equal(true, false, false, false);
items.Select(v => v.Name).Should().Equal(null, null, null, null);
}
[Test]
public void should_add_bluray576p_to_old_profile_with_grouped_bluray_480p()
{
var db = WithMigrationTestDb(c =>
{
c.Insert.IntoTable("QualityProfiles").Row(new
{
Id = 0,
Name = "Bluray",
Cutoff = 7,
Items = $"[{GenerateQualityGroupJson(1000, "DVD", new[] { (int)Quality.DVD, (int)Quality.Bluray480p }, true)}, {GenerateQualityJson((int)Quality.Bluray720p, false)}]"
});
});
var profiles = db.Query<Profile122>("SELECT \"Items\" FROM \"QualityProfiles\" LIMIT 1");
var items = profiles.First().Items;
items.Should().HaveCount(3);
items.Select(v => v.Quality).Should().Equal(null, (int)Quality.Bluray576p, (int)Quality.Bluray720p);
items.Select(v => v.Id).Should().Equal(1000, 0, 0);
items.Select(v => v.Allowed).Should().Equal(true, true, false);
items.Select(v => v.Name).Should().Equal("DVD", null, null);
}
[Test]
public void should_not_add_bluray576p_to_profile_with_bluray_576p()
{
var db = WithMigrationTestDb(c =>
{
c.Insert.IntoTable("QualityProfiles").Row(new
{
Id = 0,
Name = "Bluray",
Cutoff = 7,
Items = $"[{GenerateQualityJson((int)Quality.DVD, true)}, {GenerateQualityJson((int)Quality.Bluray480p, false)}, {GenerateQualityJson((int)Quality.Bluray576p, false)}, {GenerateQualityJson((int)Quality.Bluray720p, false)}]"
});
});
var profiles = db.Query<Profile122>("SELECT \"Items\" FROM \"QualityProfiles\" LIMIT 1");
var items = profiles.First().Items;
items.Should().HaveCount(4);
items.Select(v => v.Quality).Should().Equal((int)Quality.DVD, (int)Quality.Bluray480p, (int)Quality.Bluray576p, (int)Quality.Bluray720p);
items.Select(v => v.Allowed).Should().Equal(true, false, false, false);
items.Select(v => v.Name).Should().Equal(null, null, null, null);
}
[Test]
public void should_not_add_bluray576p_to_profile_with_grouped_bluray_576p()
{
var db = WithMigrationTestDb(c =>
{
c.Insert.IntoTable("QualityProfiles").Row(new
{
Id = 0,
Name = "Bluray",
Cutoff = 7,
Items = $"[{GenerateQualityGroupJson(1000, "DVD", new[] { (int)Quality.DVD, (int)Quality.Bluray480p, (int)Quality.Bluray576p }, true)}, {GenerateQualityJson((int)Quality.Bluray720p, false)}]"
});
});
var profiles = db.Query<Profile122>("SELECT \"Items\" FROM \"QualityProfiles\" LIMIT 1");
var items = profiles.First().Items;
items.Should().HaveCount(2);
items.Select(v => v.Quality).Should().Equal(null, (int)Quality.Bluray720p);
items.Select(v => v.Id).Should().Equal(1000, 0);
items.Select(v => v.Allowed).Should().Equal(true, false);
items.Select(v => v.Name).Should().Equal("DVD", null);
items.First().Items.Select(v => v.Quality).Should().Equal((int)Quality.DVD, (int)Quality.Bluray480p, (int)Quality.Bluray576p);
}
}
}

View File

@@ -206,5 +206,19 @@ namespace NzbDrone.Core.Test.DecisionEngineTests
new List<CustomFormat>())
.Should().BeTrue();
}
[Test]
public void should_returntrue_when_quality_is_revision_upgrade_for_same_quality()
{
_qualityProfile.UpgradeAllowed = false;
Subject.IsUpgradeAllowed(
_qualityProfile,
new QualityModel(Quality.DVD, new Revision(1)),
new List<CustomFormat> { _customFormatOne },
new QualityModel(Quality.DVD, new Revision(2)),
new List<CustomFormat> { _customFormatOne })
.Should().BeTrue();
}
}
}

View File

@@ -9,6 +9,8 @@ using NzbDrone.Common.Extensions;
using NzbDrone.Core.Configuration;
using NzbDrone.Core.MediaFiles;
using NzbDrone.Core.MediaFiles.EpisodeImport;
using NzbDrone.Core.MediaFiles.Events;
using NzbDrone.Core.Messaging.Events;
using NzbDrone.Core.RootFolders;
using NzbDrone.Core.Test.Framework;
using NzbDrone.Core.Tv;
@@ -457,5 +459,27 @@ namespace NzbDrone.Core.Test.MediaFiles.DiskScanServiceTests
Mocker.GetMock<IMakeImportDecision>()
.Verify(v => v.GetImportDecisions(It.Is<List<string>>(l => l.Count == 1), _series, false), Times.Once());
}
[Test]
public void should_not_scan_excluded_files()
{
GivenSeriesFolder();
GivenFiles(new List<string>
{
Path.Combine(_series.Path, ".DS_Store").AsOsAgnostic(),
Path.Combine(_series.Path, ".unmanic").AsOsAgnostic(),
Path.Combine(_series.Path, ".unmanic.part").AsOsAgnostic(),
Path.Combine(_series.Path, "24 The Status Quo Combustion.mkv").AsOsAgnostic()
});
Subject.Scan(_series);
Mocker.GetMock<IMakeImportDecision>()
.Verify(v => v.GetImportDecisions(It.Is<List<string>>(l => l.Count == 1), _series, false), Times.Once());
Mocker.GetMock<IEventAggregator>()
.Verify(v => v.PublishEvent(It.Is<SeriesScannedEvent>(c => c.Series != null && c.PossibleExtraFiles.Count == 0)), Times.Once());
}
}
}

View File

@@ -170,5 +170,41 @@ namespace NzbDrone.Core.Test.MediaFiles.EpisodeImport.Aggregation.Aggregators
result.Quality.Revision.Version.Should().Be(2);
result.Quality.RevisionDetectionSource.Should().Be(QualityDetectionSource.Name);
}
[Test]
public void should_return_Bluray576p_when_Bluray_came_from_name_and_mediainfo_indicates_576p()
{
_nameAugmenter.Setup(s => s.AugmentQuality(It.IsAny<LocalEpisode>(), It.IsAny<DownloadClientItem>()))
.Returns(new AugmentQualityResult(QualitySource.Bluray, Confidence.Default, 480, Confidence.Default, new Revision(0), Confidence.Tag));
_mediaInfoAugmenter.Setup(s => s.AugmentQuality(It.IsAny<LocalEpisode>(), It.IsAny<DownloadClientItem>()))
.Returns(AugmentQualityResult.ResolutionOnly(576, Confidence.MediaInfo));
GivenAugmenters(_nameAugmenter, _mediaInfoAugmenter);
var result = Subject.Aggregate(new LocalEpisode(), null);
result.Quality.SourceDetectionSource.Should().Be(QualityDetectionSource.Name);
result.Quality.ResolutionDetectionSource.Should().Be(QualityDetectionSource.MediaInfo);
result.Quality.Quality.Should().Be(Quality.Bluray576p);
}
[Test]
public void should_return_SDTV_when_HDTV_came_from_name_and_mediainfo_indicates_576p()
{
_nameAugmenter.Setup(s => s.AugmentQuality(It.IsAny<LocalEpisode>(), It.IsAny<DownloadClientItem>()))
.Returns(new AugmentQualityResult(QualitySource.Television, Confidence.Default, 480, Confidence.Default, new Revision(0), Confidence.Tag));
_mediaInfoAugmenter.Setup(s => s.AugmentQuality(It.IsAny<LocalEpisode>(), It.IsAny<DownloadClientItem>()))
.Returns(AugmentQualityResult.ResolutionOnly(576, Confidence.MediaInfo));
GivenAugmenters(_nameAugmenter, _mediaInfoAugmenter);
var result = Subject.Aggregate(new LocalEpisode(), null);
result.Quality.SourceDetectionSource.Should().Be(QualityDetectionSource.Name);
result.Quality.ResolutionDetectionSource.Should().Be(QualityDetectionSource.MediaInfo);
result.Quality.Quality.Should().Be(Quality.SDTV);
}
}
}

View File

@@ -47,6 +47,8 @@ namespace NzbDrone.Core.Test.MediaFiles.EpisodeImport.Aggregation.Aggregators.Au
[TestCase(1490, 1, 720)]
[TestCase(1280, 1, 720)] // HD
[TestCase(1200, 1, 720)]
[TestCase(1000, 1, 576)]
[TestCase(720, 576, 576)]
[TestCase(800, 1, 480)]
[TestCase(720, 1, 480)] // SDTV
[TestCase(600, 1, 480)]
@@ -108,5 +110,25 @@ namespace NzbDrone.Core.Test.MediaFiles.EpisodeImport.Aggregation.Aggregators.Au
result.Resolution.Should().Be(1080);
result.Source.Should().Be(QualitySource.Unknown);
}
[Test]
public void should_include_source_for_576_if_extracted_from_title()
{
var mediaInfo = Builder<MediaInfoModel>.CreateNew()
.With(m => m.Width = 1024)
.With(m => m.Height = 576)
.With(m => m.Title = "Series.Title.S01E05.Bluray.x264-Sonarr")
.Build();
var localEpisode = Builder<LocalEpisode>.CreateNew()
.With(l => l.MediaInfo = mediaInfo)
.Build();
var result = Subject.AugmentQuality(localEpisode, null);
result.Should().NotBe(null);
result.Resolution.Should().Be(576);
result.Source.Should().Be(QualitySource.Bluray);
}
}
}

View File

@@ -352,6 +352,8 @@ namespace NzbDrone.Core.Test.ParserTests
[TestCase("Series Title Season 2 (BDRemux 1080p HEVC FLAC) [Netaro]", false)]
[TestCase("[Vodes] Series Title - Other Title (2020) [BDRemux 1080p HEVC Dual-Audio]", false)]
[TestCase("Adventures.of.Sonic.the.Hedgehog.S01E01.Best.Hedgehog.1080p.DD.2.0.AVC.REMUX-FraMeSToR", false)]
[TestCase("Series Title S01 2018 1080p BluRay Hybrid-REMUX AVC TRUEHD 5.1 Dual Audio-ZR-", false)]
[TestCase("Series.Title.S01.2018.1080p.BluRay.Hybrid-REMUX.AVC.TRUEHD.5.1.Dual.Audio-ZR-", false)]
public void should_parse_bluray1080p_remux_quality(string title, bool proper)
{
ParseAndVerifyQuality(title, Quality.Bluray1080pRemux, proper);
@@ -373,6 +375,8 @@ namespace NzbDrone.Core.Test.ParserTests
[TestCase("Series.Title.2x11.Nato.Per.The.Sonarr.Bluray.Remux.AVC.2160p.AC3.ITA", false)]
[TestCase("[Dolby Vision] Sonarr.of.Series.S07.MULTi.UHD.BLURAY.REMUX.DV-NoTag", false)]
[TestCase("Adventures.of.Sonic.the.Hedgehog.S01E01.Best.Hedgehog.2160p.DD.2.0.AVC.REMUX-FraMeSToR", false)]
[TestCase("Series Title S01 2018 2160p BluRay Hybrid-REMUX AVC TRUEHD 5.1 Dual Audio-ZR-", false)]
[TestCase("Series.Title.S01.2018.2160p.BluRay.Hybrid-REMUX.AVC.TRUEHD.5.1.Dual.Audio-ZR-", false)]
public void should_parse_bluray2160p_remux_quality(string title, bool proper)
{
ParseAndVerifyQuality(title, Quality.Bluray2160pRemux, proper);

View File

@@ -88,6 +88,8 @@ namespace NzbDrone.Core.Test.ParserTests
[TestCase("Series Title S01 1080p Blu-ray Remux AVC FLAC 2.0 - KRaLiMaRKo", "KRaLiMaRKo")]
[TestCase("Series Title S01 1080p Blu-ray Remux AVC DTS-HD MA 2.0 - BluDragon", "BluDragon")]
[TestCase("Example (2013) S01E01 (1080p iP WEBRip x265 SDR AAC 2.0 English - DarQ)", "DarQ")]
[TestCase("Series.Title.S08E03.720p.WEB.DL.AAC2.0.H.264.KCRT", "KCRT")]
[TestCase("S02E05 2160p WEB-DL DV HDR ENG DDP5.1 Atmos H265 MP4-BEN THE MAN", "BEN THE MAN")]
public void should_parse_exception_release_group(string title, string expected)
{
Parser.Parser.ParseReleaseGroup(title).Should().Be(expected);

View File

@@ -0,0 +1,14 @@
using FluentMigrator;
using NzbDrone.Core.Datastore.Migration.Framework;
namespace NzbDrone.Core.Datastore.Migration
{
[Migration(214)]
public class add_blurary576p_quality_in_profiles : NzbDroneMigrationBase
{
protected override void MainDbUpgrade()
{
// Replaced with 215
}
}
}

View File

@@ -9,8 +9,8 @@ using NzbDrone.Core.Datastore.Migration.Framework;
namespace NzbDrone.Core.Datastore.Migration
{
[Migration(214)]
public class add_blurary576p_quality_in_profiles : NzbDroneMigrationBase
[Migration(215)]
public class add_blurary576p_quality_in_profiles_with_grouped_blurary480p : NzbDroneMigrationBase
{
protected override void MainDbUpgrade()
{
@@ -19,46 +19,46 @@ namespace NzbDrone.Core.Datastore.Migration
private void ConvertProfile(IDbConnection conn, IDbTransaction tran)
{
var updater = new ProfileUpdater214(conn, tran);
var updater = new ProfileUpdater215(conn, tran);
updater.InsertQualityAfter(13, 22); // Group Bluray576p with Bluray480p
updater.Commit();
}
}
public class Profile214
public class Profile215
{
public int Id { get; set; }
public string Name { get; set; }
public int Cutoff { get; set; }
public List<ProfileItem214> Items { get; set; }
public List<ProfileItem215> Items { get; set; }
}
public class ProfileItem214
public class ProfileItem215
{
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)]
public int Id { get; set; }
public string Name { get; set; }
public int? Quality { get; set; }
public List<ProfileItem214> Items { get; set; }
public List<ProfileItem215> Items { get; set; }
public bool Allowed { get; set; }
public ProfileItem214()
public ProfileItem215()
{
Items = new List<ProfileItem214>();
Items = new List<ProfileItem215>();
}
}
public class ProfileUpdater214
public class ProfileUpdater215
{
private readonly IDbConnection _connection;
private readonly IDbTransaction _transaction;
private List<Profile214> _profiles;
private HashSet<Profile214> _changedProfiles = new HashSet<Profile214>();
private List<Profile215> _profiles;
private HashSet<Profile215> _changedProfiles = new HashSet<Profile215>();
public ProfileUpdater214(IDbConnection conn, IDbTransaction tran)
public ProfileUpdater215(IDbConnection conn, IDbTransaction tran)
{
_connection = conn;
_transaction = tran;
@@ -86,11 +86,17 @@ namespace NzbDrone.Core.Datastore.Migration
{
foreach (var profile in _profiles)
{
var findIndex = profile.Items.FindIndex(v => v.Quality == find);
// Don't update if Bluray 576p was already added to the profile in 214
if (profile.Items.FindIndex(v => v.Quality == quality || v.Items.Any(i => i.Quality == quality)) > -1)
{
continue;
}
var findIndex = profile.Items.FindIndex(v => v.Quality == find || v.Items.Any(i => i.Quality == find));
if (findIndex > -1)
{
profile.Items.Insert(findIndex + 1, new ProfileItem214
profile.Items.Insert(findIndex + 1, new ProfileItem215
{
Quality = quality,
Allowed = profile.Items[findIndex].Allowed
@@ -101,9 +107,9 @@ namespace NzbDrone.Core.Datastore.Migration
}
}
private List<Profile214> GetProfiles()
private List<Profile215> GetProfiles()
{
var profiles = new List<Profile214>();
var profiles = new List<Profile215>();
using (var getProfilesCmd = _connection.CreateCommand())
{
@@ -114,12 +120,12 @@ namespace NzbDrone.Core.Datastore.Migration
{
while (profileReader.Read())
{
profiles.Add(new Profile214
profiles.Add(new Profile215
{
Id = profileReader.GetInt32(0),
Name = profileReader.GetString(1),
Cutoff = profileReader.GetInt32(2),
Items = Json.Deserialize<List<ProfileItem214>>(profileReader.GetString(3))
Items = Json.Deserialize<List<ProfileItem215>>(profileReader.GetString(3))
});
}
}

View File

@@ -79,19 +79,22 @@ namespace NzbDrone.Core.DecisionEngine.Specifications
switch (upgradeableRejectReason)
{
case UpgradeableRejectReason.BetterQuality:
return Decision.Reject("Release in queue on disk is of equal or higher preference: {0}", remoteEpisode.ParsedEpisodeInfo.Quality);
return Decision.Reject("Release in queue is of equal or higher preference: {0}", remoteEpisode.ParsedEpisodeInfo.Quality);
case UpgradeableRejectReason.BetterRevision:
return Decision.Reject("Release in queue on disk is of equal or higher revision: {0}", remoteEpisode.ParsedEpisodeInfo.Quality.Revision);
return Decision.Reject("Release in queue is of equal or higher revision: {0}", remoteEpisode.ParsedEpisodeInfo.Quality.Revision);
case UpgradeableRejectReason.QualityCutoff:
return Decision.Reject("Release in queue on disk meets quality cutoff: {0}", qualityProfile.Items[qualityProfile.GetIndex(qualityProfile.Cutoff).Index]);
return Decision.Reject("Release in queue meets quality cutoff: {0}", qualityProfile.Items[qualityProfile.GetIndex(qualityProfile.Cutoff).Index]);
case UpgradeableRejectReason.CustomFormatCutoff:
return Decision.Reject("Release in queue on disk meets Custom Format cutoff: {0}", qualityProfile.CutoffFormatScore);
return Decision.Reject("Release in queue meets Custom Format cutoff: {0}", qualityProfile.CutoffFormatScore);
case UpgradeableRejectReason.CustomFormatScore:
return Decision.Reject("Release in queue on disk has an equal or higher custom format score: {0}", qualityProfile.CalculateCustomFormatScore(queuedItemCustomFormats));
return Decision.Reject("Release in queue has an equal or higher Custom Format score: {0}", qualityProfile.CalculateCustomFormatScore(queuedItemCustomFormats));
case UpgradeableRejectReason.MinCustomFormatScore:
return Decision.Reject("Release in queue has Custom Format score within Custom Format score increment: {0}", qualityProfile.MinUpgradeFormatScore);
}
_logger.Debug("Checking if profiles allow upgrading. Queued: {0}", remoteEpisode.ParsedEpisodeInfo.Quality);

View File

@@ -93,6 +93,7 @@ namespace NzbDrone.Core.DecisionEngine.Specifications.RssSync
{
case UpgradeableRejectReason.None:
continue;
case UpgradeableRejectReason.BetterQuality:
return Decision.Reject("{0} grab event in history is of equal or higher preference: {1}", rejectionSubject, mostRecent.Quality);
@@ -106,7 +107,10 @@ namespace NzbDrone.Core.DecisionEngine.Specifications.RssSync
return Decision.Reject("{0} grab event in history meets Custom Format cutoff: {1}", rejectionSubject, qualityProfile.CutoffFormatScore);
case UpgradeableRejectReason.CustomFormatScore:
return Decision.Reject("{0} grab event in history has an equal or higher custom format score: {1}", rejectionSubject, qualityProfile.CalculateCustomFormatScore(customFormats));
return Decision.Reject("{0} grab event in history has an equal or higher Custom Format score: {1}", rejectionSubject, qualityProfile.CalculateCustomFormatScore(customFormats));
case UpgradeableRejectReason.MinCustomFormatScore:
return Decision.Reject("{0} grab event in history has Custom Format score within Custom Format score increment: {1}", rejectionSubject, qualityProfile.MinUpgradeFormatScore);
}
}
}

View File

@@ -178,6 +178,12 @@ namespace NzbDrone.Core.DecisionEngine.Specifications
var isQualityUpgrade = new QualityModelComparer(qualityProfile).Compare(newQuality, currentQuality) > 0;
var isCustomFormatUpgrade = qualityProfile.CalculateCustomFormatScore(newCustomFormats) > qualityProfile.CalculateCustomFormatScore(currentCustomFormats);
if (IsRevisionUpgrade(currentQuality, newQuality))
{
_logger.Debug("New quality '{0}' is a revision upgrade for '{1}'", newQuality, currentQuality);
return true;
}
if ((isQualityUpgrade || isCustomFormatUpgrade) && qualityProfile.UpgradeAllowed)
{
_logger.Debug("Quality profile allows upgrading");

View File

@@ -63,6 +63,7 @@ namespace NzbDrone.Core.DecisionEngine.Specifications
{
case UpgradeableRejectReason.None:
continue;
case UpgradeableRejectReason.BetterQuality:
return Decision.Reject("Existing file on disk is of equal or higher preference: {0}", file.Quality);
@@ -76,10 +77,10 @@ namespace NzbDrone.Core.DecisionEngine.Specifications
return Decision.Reject("Existing file on disk meets Custom Format cutoff: {0}", qualityProfile.CutoffFormatScore);
case UpgradeableRejectReason.CustomFormatScore:
return Decision.Reject("Existing file on disk has a equal or higher custom format score: {0}", qualityProfile.CalculateCustomFormatScore(customFormats));
return Decision.Reject("Existing file on disk has a equal or higher Custom Format score: {0}", qualityProfile.CalculateCustomFormatScore(customFormats));
case UpgradeableRejectReason.MinCustomFormatScore:
return Decision.Reject("Existing file differential between new release does not meet minimum Custom Format score increment: {0}", qualityProfile.MinFormatScore);
return Decision.Reject("Existing file on disk has Custom Format score within Custom Format score increment: {0}", qualityProfile.MinUpgradeFormatScore);
}
}

View File

@@ -14,11 +14,15 @@ namespace NzbDrone.Core.ImportLists.Plex
{
public class PlexImport : HttpImportListBase<PlexListSettings>
{
public readonly IPlexTvService _plexTvService;
public override string Name => _localizationService.GetLocalizedString("ImportListsPlexSettingsWatchlistName");
public override ImportListType ListType => ImportListType.Plex;
public override TimeSpan MinRefreshInterval => TimeSpan.FromHours(6);
public override int PageSize => 100;
public override TimeSpan RateLimit => TimeSpan.FromSeconds(5);
private readonly IPlexTvService _plexTvService;
public PlexImport(IPlexTvService plexTvService,
IHttpClient httpClient,
IImportListStatusService importListStatusService,
@@ -31,15 +35,10 @@ namespace NzbDrone.Core.ImportLists.Plex
_plexTvService = plexTvService;
}
public override string Name => _localizationService.GetLocalizedString("ImportListsPlexSettingsWatchlistName");
public override int PageSize => 50;
public override ImportListFetchResult Fetch()
{
Settings.Validate().Filter("AccessToken").ThrowOnError();
// var generator = GetRequestGenerator();
return FetchItems(g => g.GetListItems());
}
@@ -50,10 +49,7 @@ namespace NzbDrone.Core.ImportLists.Plex
public override IImportListRequestGenerator GetRequestGenerator()
{
return new PlexListRequestGenerator(_plexTvService, PageSize)
{
Settings = Settings
};
return new PlexListRequestGenerator(_plexTvService, Settings, PageSize);
}
public override object RequestAction(string action, IDictionary<string, string> query)

View File

@@ -5,13 +5,16 @@ namespace NzbDrone.Core.ImportLists.Plex
{
public class PlexListRequestGenerator : IImportListRequestGenerator
{
private readonly IPlexTvService _plexTvService;
private readonly int _pageSize;
public PlexListSettings Settings { get; set; }
private const int MaxPages = 10;
public PlexListRequestGenerator(IPlexTvService plexTvService, int pageSize)
private readonly IPlexTvService _plexTvService;
private readonly PlexListSettings _settings;
private readonly int _pageSize;
public PlexListRequestGenerator(IPlexTvService plexTvService, PlexListSettings settings, int pageSize)
{
_plexTvService = plexTvService;
_settings = settings;
_pageSize = pageSize;
}
@@ -26,11 +29,9 @@ namespace NzbDrone.Core.ImportLists.Plex
private IEnumerable<ImportListRequest> GetSeriesRequest()
{
var maxPages = 10;
for (var page = 0; page < maxPages; page++)
for (var page = 0; page < MaxPages; page++)
{
yield return new ImportListRequest(_plexTvService.GetWatchlist(Settings.AccessToken, _pageSize, page * _pageSize));
yield return new ImportListRequest(_plexTvService.GetWatchlist(_settings.AccessToken, _pageSize, page * _pageSize));
}
}
}

View File

@@ -78,7 +78,7 @@
"Torrents": "Torrents",
"Ui": "Interfaz",
"Underscore": "Guion bajo",
"UpdateMechanismHelpText": "Usar el actualizador integrado de {appName} o un script",
"UpdateMechanismHelpText": "Usa el actualizador integrado de {appName} o un script",
"Warn": "Advertencia",
"AutoTagging": "Etiquetado Automático",
"AddAutoTag": "Añadir etiqueta automática",
@@ -2112,5 +2112,12 @@
"CountCustomFormatsSelected": "{count} formato(s) personalizado(s) seleccionado(s)",
"LastSearched": "Último buscado",
"CustomFormatsSpecificationExceptLanguageHelpText": "Coincide si cualquier idioma distinto del seleccionado está presente",
"CustomFormatsSpecificationExceptLanguage": "Excepto idioma"
"CustomFormatsSpecificationExceptLanguage": "Excepto idioma",
"MinimumCustomFormatScoreIncrement": "Incremento mínimo de puntuación de formato personalizado",
"MinimumCustomFormatScoreIncrementHelpText": "Mejora mínima requerida de la puntuación de formato personalizado entre los lanzamientos existentes y nuevos antes de que {appName} lo considere una actualización",
"NotificationsGotifySettingsMetadataLinks": "Enlaces de metadatos",
"NotificationsGotifySettingsMetadataLinksHelpText": "Añade un enlace a los metadatos de la serie cuando se envían notificaciones",
"NotificationsGotifySettingsPreferredMetadataLink": "Enlace de metadatos preferido",
"NotificationsGotifySettingsPreferredMetadataLinkHelpText": "Enlace de metadatos para clientes que solo soportan un único enlace",
"SkipFreeSpaceCheckHelpText": "Se usa cuando {appName} no puede detectar el espacio libre de tu carpeta raíz"
}

View File

@@ -365,7 +365,7 @@
"RejectionCount": "Número de rejeição",
"SubtitleLanguages": "Idiomas das Legendas",
"UnmonitoredOnly": "Somente Não Monitorados",
"AddAutoTag": "Adicionar Tag Automática",
"AddAutoTag": "Adicionar tag automática",
"AddCondition": "Adicionar Condição",
"Conditions": "Condições",
"CloneAutoTag": "Clonar Tag Automática",
@@ -2110,5 +2110,10 @@
"ManageCustomFormats": "Gerenciar formatos personalizados",
"NoCustomFormatsFound": "Nenhum formato personalizado encontrado",
"CountCustomFormatsSelected": "{count} formato(s) personalizado(s) selecionado(s)",
"LastSearched": "Última Pesquisa"
"LastSearched": "Última Pesquisa",
"SkipFreeSpaceCheckHelpText": "Usar quando {appName} não consegue detectar espaço livre em sua pasta raiz",
"CustomFormatsSpecificationExceptLanguage": "Exceto Idioma",
"CustomFormatsSpecificationExceptLanguageHelpText": "Corresponde se qualquer idioma diferente do idioma selecionado estiver presente",
"MinimumCustomFormatScoreIncrement": "Incremento Mínimo da Pontuação de Formato Personalizado",
"MinimumCustomFormatScoreIncrementHelpText": "Melhoria mínima necessária da pontuação do formato personalizado entre versões existentes e novas antes que {appName} considere isso uma atualização"
}

View File

@@ -855,5 +855,17 @@
"LogSizeLimitHelpText": "Arşivlemeden önce MB cinsinden maksimum log dosya boyutu. Varsayılan 1 MB'tır.",
"ProgressBarProgress": "İlerleme Çubuğu %{progress} seviyesinde",
"CountVotes": "{votes} oy",
"UpdateAvailableHealthCheckMessage": "Yeni güncelleme mevcut: {version}"
"UpdateAvailableHealthCheckMessage": "Yeni güncelleme mevcut: {version}",
"MinimumCustomFormatScoreIncrement": "Minimum Özel Format Puanı Artışı",
"MinimumCustomFormatScoreIncrementHelpText": "{appName}'in bunu bir yükseltme olarak değerlendirmesi için mevcut ve yeni sürümler arasında özel biçim puanında gereken minimum iyileştirme",
"SkipFreeSpaceCheckHelpText": "{appName} kök klasörünüzde boş alan tespit edemediğinde bunu kullansın",
"DayOfWeekAt": "{day}, {time} saatinde",
"Logout": ıkış",
"TodayAt": "Bugün {time}'da",
"TomorrowAt": "Yarın {time}'da",
"NoBlocklistItems": "Engellenenler listesi öğesi yok",
"YesterdayAt": "Dün saat {time}'da",
"CustomFormatsSpecificationExceptLanguage": "Dil Dışında",
"CustomFormatsSpecificationExceptLanguageHelpText": "Seçilen dil dışında herhangi bir dil mevcutsa eşleşir",
"LastSearched": "Son Aranan"
}

View File

@@ -223,7 +223,7 @@
"EpisodeAirDate": "剧集播出日期",
"IndexerSearchNoInteractiveHealthCheckMessage": "没有启用交互式搜索的索引器,{appName}将不提供任何交互式搜索结果",
"ProxyFailedToTestHealthCheckMessage": "测试代理失败: {url}",
"About": "关于",
"About": "关于关于",
"Actions": "动作",
"AppDataDirectory": "AppData 目录",
"ApplyTagsHelpTextHowToApplySeries": "如何将标记应用于所选剧集",
@@ -1134,7 +1134,7 @@
"SeriesPremiere": "剧集首播",
"ShortDateFormat": "短日期格式",
"ShowEpisodes": "显示剧集",
"ShowMonitored": "显示追踪",
"ShowMonitored": "显示追踪状态",
"ShowMonitoredHelpText": "在海报下显示追踪状态",
"ShowNetwork": "显示网络",
"ShowPreviousAiring": "显示上一次播出",
@@ -1471,8 +1471,8 @@
"UrlBase": "基本URL",
"DownloadClientRemovesCompletedDownloadsHealthCheckMessage": "下载客户端 {downloadClientName} 已被设置为删除已完成的下载。这可能导致在 {appName} 导入之前,已下载的文件会被您的客户端移除。",
"ImportListSearchForMissingEpisodesHelpText": "将系列添加到{appName}后,自动搜索缺失的剧集",
"AutoRedownloadFailed": "重新下载失败",
"AutoRedownloadFailedFromInteractiveSearch": "来自手动搜索的资源重新下载失败",
"AutoRedownloadFailed": "失败时重新下载",
"AutoRedownloadFailedFromInteractiveSearch": "失败时重新下载来自手动搜索的资源",
"AutoRedownloadFailedFromInteractiveSearchHelpText": "当从手动搜索中抓取的发布资源下载失败时,自动搜索并尝试下载不同的发布资源",
"ImportListSearchForMissingEpisodes": "搜索缺失集",
"QueueFilterHasNoItems": "所选的队列过滤器中无项目",
@@ -1946,5 +1946,9 @@
"NotificationsTwitterSettingsMentionHelpText": "在发送的推文中提及此用户",
"NotificationsTwitterSettingsMention": "提及",
"ShowTags": "显示标签",
"ShowTagsHelpText": "在海报下显示标签"
"ShowTagsHelpText": "在海报下显示标签",
"SkipFreeSpaceCheckHelpText": "当 {appName} 无法检测到根目录的剩余空间时使用",
"MinimumCustomFormatScoreIncrement": "自定义格式分数最小增量",
"MinimumCustomFormatScoreIncrementHelpText": "{appName} 将新版本视为升级版本之前,新版本资源相较于现有版本在自定义格式分数上的最小提升",
"LastSearched": "最近搜索"
}

View File

@@ -72,7 +72,7 @@ namespace NzbDrone.Core.MediaFiles
private static readonly Regex ExcludedExtrasSubFolderRegex = new Regex(@"(?:\\|\/|^)(?:extras|extrafanart|behind the scenes|deleted scenes|featurettes|interviews|other|scenes|samples|shorts|trailers)(?:\\|\/)", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static readonly Regex ExcludedSubFoldersRegex = new Regex(@"(?:\\|\/|^)(?:@eadir|\.@__thumb|plex versions|\.[^\\/]+)(?:\\|\/)", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static readonly Regex ExcludedExtraFilesRegex = new Regex(@"(-(trailer|other|behindthescenes|deleted|featurette|interview|scene|short)\.[^.]+$)", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static readonly Regex ExcludedFilesRegex = new Regex(@"^\._|^Thumbs\.db$", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static readonly Regex ExcludedFilesRegex = new Regex(@"^\.(_|unmanic|DS_Store$)|^Thumbs\.db$", RegexOptions.Compiled | RegexOptions.IgnoreCase);
public void Scan(Series series)
{

View File

@@ -63,6 +63,12 @@ namespace NzbDrone.Core.MediaFiles.EpisodeImport.Aggregation.Aggregators.Augment
return AugmentQualityResult.SourceAndResolutionOnly(source, sourceConfidence, 720, Confidence.MediaInfo);
}
if (width >= 1000 || height >= 560)
{
_logger.Trace("Resolution {0}x{1} considered 576p", width, height);
return AugmentQualityResult.SourceAndResolutionOnly(source, sourceConfidence, 576, Confidence.MediaInfo);
}
if (width > 0 && height > 0)
{
_logger.Trace("Resolution {0}x{1} considered 480p", width, height);

View File

@@ -155,10 +155,19 @@ namespace NzbDrone.Core.MediaFiles.EpisodeImport.Manual
if (episodeIds.Any())
{
var downloadClientItem = GetTrackedDownload(downloadId)?.DownloadItem;
var episodes = _episodeService.GetEpisodes(episodeIds);
var finalReleaseGroup = releaseGroup.IsNullOrWhiteSpace()
? Parser.Parser.ParseReleaseGroup(path)
: releaseGroup;
var finalQuality = quality.Quality == Quality.Unknown ? QualityParser.ParseQuality(path) : quality;
var finalLanguges =
languages?.Count <= 1 && (languages?.SingleOrDefault() ?? Language.Unknown) == Language.Unknown
? languageParse
: languages;
var localEpisode = new LocalEpisode();
localEpisode.Series = series;
localEpisode.Episodes = _episodeService.GetEpisodes(episodeIds);
localEpisode.Episodes = episodes;
localEpisode.FileEpisodeInfo = Parser.Parser.ParsePath(path);
localEpisode.DownloadClientEpisodeInfo = downloadClientItem == null ? null : Parser.Parser.ParseTitle(downloadClientItem.Title);
localEpisode.DownloadItem = downloadClientItem;
@@ -166,15 +175,27 @@ namespace NzbDrone.Core.MediaFiles.EpisodeImport.Manual
localEpisode.SceneSource = SceneSource(series, rootFolder);
localEpisode.ExistingFile = series.Path.IsParentPath(path);
localEpisode.Size = _diskProvider.GetFileSize(path);
localEpisode.ReleaseGroup = releaseGroup.IsNullOrWhiteSpace() ? Parser.Parser.ParseReleaseGroup(path) : releaseGroup;
localEpisode.Languages = languages?.Count <= 1 && (languages?.SingleOrDefault() ?? Language.Unknown) == Language.Unknown ? languageParse : languages;
localEpisode.Quality = quality.Quality == Quality.Unknown ? QualityParser.ParseQuality(path) : quality;
localEpisode.ReleaseGroup = finalReleaseGroup;
localEpisode.Languages = finalLanguges;
localEpisode.Quality = finalQuality;
localEpisode.IndexerFlags = (IndexerFlags)indexerFlags;
localEpisode.ReleaseType = releaseType;
localEpisode.CustomFormats = _formatCalculator.ParseCustomFormat(localEpisode);
localEpisode.CustomFormatScore = localEpisode.Series?.QualityProfile?.Value.CalculateCustomFormatScore(localEpisode.CustomFormats) ?? 0;
// Augment episode file so imported files have all additional information an automatic import would
localEpisode = _aggregationService.Augment(localEpisode, downloadClientItem);
// Reapply the user-chosen values.
localEpisode.Series = series;
localEpisode.Episodes = episodes;
localEpisode.ReleaseGroup = finalReleaseGroup;
localEpisode.Quality = finalQuality;
localEpisode.Languages = finalLanguges;
localEpisode.IndexerFlags = (IndexerFlags)indexerFlags;
localEpisode.ReleaseType = releaseType;
return MapItem(_importDecisionMaker.GetDecision(localEpisode, downloadClientItem), rootFolder, downloadId, null);
}

View File

@@ -84,7 +84,7 @@ namespace NzbDrone.Core.MediaFiles
if (DateTime.TryParse(fileDate + ' ' + fileTime, out var airDate))
{
// avoiding false +ve checks and set date skewing by not using UTC (Windows)
var oldDateTime = _diskProvider.FileGetLastWrite(filePath);
var oldLastWrite = _diskProvider.FileGetLastWrite(filePath);
if (OsInfo.IsNotWindows && airDate < EpochTime)
{
@@ -92,12 +92,12 @@ namespace NzbDrone.Core.MediaFiles
airDate = EpochTime;
}
if (!DateTime.Equals(airDate, oldDateTime))
if (!DateTime.Equals(airDate.WithoutTicks(), oldLastWrite.WithoutTicks()))
{
try
{
_diskProvider.FileSetLastWriteTime(filePath, airDate);
_logger.Debug("Date of file [{0}] changed from '{1}' to '{2}'", filePath, oldDateTime, airDate);
_logger.Debug("Date of file [{0}] changed from '{1}' to '{2}'", filePath, oldLastWrite, airDate);
return true;
}
@@ -125,11 +125,11 @@ namespace NzbDrone.Core.MediaFiles
airDateUtc = EpochTime;
}
if (!DateTime.Equals(airDateUtc, oldLastWrite))
if (!DateTime.Equals(airDateUtc.WithoutTicks(), oldLastWrite.WithoutTicks()))
{
try
{
_diskProvider.FileSetLastWriteTime(filePath, airDateUtc);
_diskProvider.FileSetLastWriteTime(filePath, airDateUtc.AddMilliseconds(oldLastWrite.Millisecond));
_logger.Debug("Date of file [{0}] changed from '{1}' to '{2}'", filePath, oldLastWrite, airDateUtc);
return true;

View File

@@ -180,13 +180,13 @@ namespace NzbDrone.Core.Notifications.Gotify
if (linkType == MetadataLinkType.Trakt && series.TvdbId > 0)
{
linkText = "TVMaze";
linkText = "Trakt";
linkUrl = $"http://trakt.tv/search/tvdb/{series.TvdbId}?id_type=show";
}
if (linkType == MetadataLinkType.Tvmaze && series.TvMazeId > 0)
{
linkText = "Trakt";
linkText = "TVMaze";
linkUrl = $"http://www.tvmaze.com/shows/{series.TvMazeId}/_";
}

View File

@@ -69,28 +69,29 @@ namespace NzbDrone.Core.Notifications.Telegram
{
var title = Settings.IncludeAppNameInTitle ? HEALTH_ISSUE_TITLE_BRANDED : HEALTH_ISSUE_TITLE;
_proxy.SendNotification(title, healthCheck.Message, null, Settings);
_proxy.SendNotification(title, healthCheck.Message, new List<TelegramLink>(), Settings);
}
public override void OnHealthRestored(HealthCheck.HealthCheck previousCheck)
{
var title = Settings.IncludeAppNameInTitle ? HEALTH_RESTORED_TITLE_BRANDED : HEALTH_RESTORED_TITLE;
_proxy.SendNotification(title, $"The following issue is now resolved: {previousCheck.Message}", null, Settings);
_proxy.SendNotification(title, $"The following issue is now resolved: {previousCheck.Message}", new List<TelegramLink>(), Settings);
}
public override void OnApplicationUpdate(ApplicationUpdateMessage updateMessage)
{
var title = Settings.IncludeAppNameInTitle ? APPLICATION_UPDATE_TITLE_BRANDED : APPLICATION_UPDATE_TITLE;
_proxy.SendNotification(title, updateMessage.Message, null, Settings);
_proxy.SendNotification(title, updateMessage.Message, new List<TelegramLink>(), Settings);
}
public override void OnManualInteractionRequired(ManualInteractionRequiredMessage message)
{
var title = Settings.IncludeAppNameInTitle ? MANUAL_INTERACTION_REQUIRED_TITLE_BRANDED : MANUAL_INTERACTION_REQUIRED_TITLE;
var links = GetLinks(message.Series);
_proxy.SendNotification(title, message.Message, null, Settings);
_proxy.SendNotification(title, message.Message, links, Settings);
}
public override ValidationResult Test()
@@ -106,6 +107,11 @@ namespace NzbDrone.Core.Notifications.Telegram
{
var links = new List<TelegramLink>();
if (series == null)
{
return links;
}
foreach (var link in Settings.MetadataLinks)
{
var linkType = (MetadataLinkType)link;

View File

@@ -556,7 +556,7 @@ namespace NzbDrone.Core.Parser
// Handle Exception Release Groups that don't follow -RlsGrp; Manual List
// name only...be very careful with this last; high chance of false positives
private static readonly Regex ExceptionReleaseGroupRegexExact = new Regex(@"(?<releasegroup>(?:D\-Z0N3|Fight-BB|VARYG|E\.N\.D|KRaLiMaRKo|BluDragon|DarQ)\b)", RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex ExceptionReleaseGroupRegexExact = new Regex(@"(?<releasegroup>(?:D\-Z0N3|Fight-BB|VARYG|E\.N\.D|KRaLiMaRKo|BluDragon|DarQ|KCRT|BEN THE MAN)\b)", RegexOptions.IgnoreCase | RegexOptions.Compiled);
// groups whose releases end with RlsGroup) or RlsGroup]
private static readonly Regex ExceptionReleaseGroupRegex = new Regex(@"(?<=[._ \[])(?<releasegroup>(Silence|afm72|Panda|Ghost|MONOLITH|Tigole|Joy|ImE|UTR|t3nzin|Anime Time|Project Angel|Hakata Ramen|HONE|Vyndros|SEV|Garshasp|Kappa|Natty|RCVR|SAMPA|YOGI|r00t|EDGE2020|RZeroX)(?=\]|\)))", RegexOptions.IgnoreCase | RegexOptions.Compiled);

View File

@@ -63,7 +63,7 @@ namespace NzbDrone.Core.Parser
private static readonly Regex HighDefPdtvRegex = new (@"hr[-_. ]ws", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static readonly Regex RemuxRegex = new (@"(?:[_. ]|\d{4}p-)(?<remux>(?:(BD|UHD)[-_. ]?)?Remux)\b|(?<remux>(?:(BD|UHD)[-_. ]?)?Remux[_. ]\d{4}p)", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static readonly Regex RemuxRegex = new (@"(?:[_. ]|\d{4}p-|\bHybrid-)(?<remux>(?:(BD|UHD)[-_. ]?)?Remux)\b|(?<remux>(?:(BD|UHD)[-_. ]?)?Remux[_. ]\d{4}p)", RegexOptions.Compiled | RegexOptions.IgnoreCase);
public static QualityModel ParseQuality(string name)
{

View File

@@ -17,6 +17,20 @@ namespace NzbDrone.Core.Qualities
return matchingQuality;
}
// Handle 576p releases that have a Television or Web source, so they don't get rolled up to Bluray 576p
if (resolution < 720)
{
switch (source)
{
case QualitySource.Television:
return Quality.SDTV;
case QualitySource.Web:
return Quality.WEBDL480p;
case QualitySource.WebRip:
return Quality.WEBRip480p;
}
}
var matchingResolution = Quality.All.Where(q => q.Resolution == resolution)
.OrderBy(q => q.Source)
.ToList();

View File

@@ -135,7 +135,7 @@ namespace NzbDrone.Host
Name = "apikey",
Type = SecuritySchemeType.ApiKey,
Scheme = "apiKey",
Description = "Apikey passed as header",
Description = "Apikey passed as query parameter",
In = ParameterLocation.Query,
Reference = new OpenApiReference
{

View File

@@ -1,3 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.AspNetCore.Mvc;
using NzbDrone.Core.Blocklisting;
@@ -28,7 +30,16 @@ namespace Sonarr.Api.V3.Blocklist
public PagingResource<BlocklistResource> GetBlocklist([FromQuery] PagingRequestResource paging, [FromQuery] int[] seriesIds = null, [FromQuery] DownloadProtocol[] protocols = null)
{
var pagingResource = new PagingResource<BlocklistResource>(paging);
var pagingSpec = pagingResource.MapToPagingSpec<BlocklistResource, NzbDrone.Core.Blocklisting.Blocklist>("date", SortDirection.Descending);
var pagingSpec = pagingResource.MapToPagingSpec<BlocklistResource, NzbDrone.Core.Blocklisting.Blocklist>(
new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"date",
"indexer",
"series.sortTitle",
"sourceTitle"
},
"date",
SortDirection.Descending);
if (seriesIds?.Any() == true)
{

View File

@@ -25,24 +25,24 @@ namespace Sonarr.Api.V3.Episodes
[HttpGet]
[Produces("application/json")]
public List<EpisodeResource> GetEpisodes(int? seriesId, int? seasonNumber, [FromQuery]List<int> episodeIds, int? episodeFileId, bool includeImages = false)
public List<EpisodeResource> GetEpisodes(int? seriesId, int? seasonNumber, [FromQuery]List<int> episodeIds, int? episodeFileId, bool includeSeries = false, bool includeEpisodeFile = false, bool includeImages = false)
{
if (seriesId.HasValue)
{
if (seasonNumber.HasValue)
{
return MapToResource(_episodeService.GetEpisodesBySeason(seriesId.Value, seasonNumber.Value), false, false, includeImages);
return MapToResource(_episodeService.GetEpisodesBySeason(seriesId.Value, seasonNumber.Value), includeSeries, includeEpisodeFile, includeImages);
}
return MapToResource(_episodeService.GetEpisodeBySeries(seriesId.Value), false, false, includeImages);
return MapToResource(_episodeService.GetEpisodeBySeries(seriesId.Value), includeSeries, includeEpisodeFile, includeImages);
}
else if (episodeIds.Any())
{
return MapToResource(_episodeService.GetEpisodes(episodeIds), false, false, includeImages);
return MapToResource(_episodeService.GetEpisodes(episodeIds), includeSeries, includeEpisodeFile, includeImages);
}
else if (episodeFileId.HasValue)
{
return MapToResource(_episodeService.GetEpisodesByFileId(episodeFileId.Value), false, false, includeImages);
return MapToResource(_episodeService.GetEpisodesByFileId(episodeFileId.Value), includeSeries, includeEpisodeFile, includeImages);
}
throw new BadRequestException("seriesId or episodeIds must be provided");

View File

@@ -65,7 +65,14 @@ namespace Sonarr.Api.V3.History
public PagingResource<HistoryResource> GetHistory([FromQuery] PagingRequestResource paging, bool includeSeries, bool includeEpisode, [FromQuery(Name = "eventType")] int[] eventTypes, int? episodeId, string downloadId, [FromQuery] int[] seriesIds = null, [FromQuery] int[] languages = null, [FromQuery] int[] quality = null)
{
var pagingResource = new PagingResource<HistoryResource>(paging);
var pagingSpec = pagingResource.MapToPagingSpec<HistoryResource, EpisodeHistory>("date", SortDirection.Descending);
var pagingSpec = pagingResource.MapToPagingSpec<HistoryResource, EpisodeHistory>(
new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"date",
"series.sortTitle"
},
"date",
SortDirection.Descending);
if (eventTypes != null && eventTypes.Any())
{

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Linq;
using FluentValidation;
using Microsoft.AspNetCore.Mvc;
using NzbDrone.Core.Datastore;
using NzbDrone.Core.ImportLists.Exclusions;
using Sonarr.Http;
using Sonarr.Http.Extensions;
@@ -46,7 +47,15 @@ namespace Sonarr.Api.V3.ImportLists
public PagingResource<ImportListExclusionResource> GetImportListExclusionsPaged([FromQuery] PagingRequestResource paging)
{
var pagingResource = new PagingResource<ImportListExclusionResource>(paging);
var pageSpec = pagingResource.MapToPagingSpec<ImportListExclusionResource, ImportListExclusion>();
var pageSpec = pagingResource.MapToPagingSpec<ImportListExclusionResource, ImportListExclusion>(
new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"id",
"title",
"tvdbId"
},
"id",
SortDirection.Descending);
return pageSpec.ApplyToPage(_importListExclusionService.Paged, ImportListExclusionResourceMapper.ToResource);
}

View File

@@ -1,3 +1,5 @@
using System;
using System.Collections.Generic;
using Microsoft.AspNetCore.Mvc;
using NzbDrone.Common.Extensions;
using NzbDrone.Core.Configuration;
@@ -29,7 +31,11 @@ namespace Sonarr.Api.V3.Logs
}
var pagingResource = new PagingResource<LogResource>(paging);
var pageSpec = pagingResource.MapToPagingSpec<LogResource, Log>();
var pageSpec = pagingResource.MapToPagingSpec<LogResource, Log>(new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"id",
"time"
});
if (pageSpec.SortKey == "time")
{

View File

@@ -139,7 +139,31 @@ namespace Sonarr.Api.V3.Queue
public PagingResource<QueueResource> GetQueue([FromQuery] PagingRequestResource paging, bool includeUnknownSeriesItems = false, bool includeSeries = false, bool includeEpisode = false, [FromQuery] int[] seriesIds = null, DownloadProtocol? protocol = null, [FromQuery] int[] languages = null, int? quality = null)
{
var pagingResource = new PagingResource<QueueResource>(paging);
var pagingSpec = pagingResource.MapToPagingSpec<QueueResource, NzbDrone.Core.Queue.Queue>("timeleft", SortDirection.Ascending);
var pagingSpec = pagingResource.MapToPagingSpec<QueueResource, NzbDrone.Core.Queue.Queue>(
new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"added",
"downloadClient",
"episode",
"episode.airDateUtc",
"episode.title",
"episodes.airDateUtc",
"episodes.title",
"estimatedCompletionTime",
"indexer",
"language",
"languages",
"progress",
"protocol",
"quality",
"series.sortTitle",
"size",
"status",
"timeleft",
"title"
},
"timeleft",
SortDirection.Ascending);
return pagingSpec.ApplyToPage((spec) => GetQueue(spec, seriesIds?.ToHashSet(), protocol, languages?.ToHashSet(), quality, includeUnknownSeriesItems), (q) => MapToResource(q, includeSeries, includeEpisode));
}

View File

@@ -1,3 +1,5 @@
using System;
using System.Collections.Generic;
using Microsoft.AspNetCore.Mvc;
using NzbDrone.Core.CustomFormats;
using NzbDrone.Core.Datastore;
@@ -31,13 +33,15 @@ namespace Sonarr.Api.V3.Wanted
public PagingResource<EpisodeResource> GetCutoffUnmetEpisodes([FromQuery] PagingRequestResource paging, bool includeSeries = false, bool includeEpisodeFile = false, bool includeImages = false, bool monitored = true)
{
var pagingResource = new PagingResource<EpisodeResource>(paging);
var pagingSpec = new PagingSpec<Episode>
{
Page = pagingResource.Page,
PageSize = pagingResource.PageSize,
SortKey = pagingResource.SortKey,
SortDirection = pagingResource.SortDirection
};
var pagingSpec = pagingResource.MapToPagingSpec<EpisodeResource, Episode>(
new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"episodes.airDateUtc",
"episodes.lastSearchTime",
"series.sortTitle"
},
"episodes.airDateUtc",
SortDirection.Ascending);
if (monitored)
{

View File

@@ -1,3 +1,5 @@
using System;
using System.Collections.Generic;
using Microsoft.AspNetCore.Mvc;
using NzbDrone.Core.CustomFormats;
using NzbDrone.Core.Datastore;
@@ -27,13 +29,15 @@ namespace Sonarr.Api.V3.Wanted
public PagingResource<EpisodeResource> GetMissingEpisodes([FromQuery] PagingRequestResource paging, bool includeSeries = false, bool includeImages = false, bool monitored = true)
{
var pagingResource = new PagingResource<EpisodeResource>(paging);
var pagingSpec = new PagingSpec<Episode>
{
Page = pagingResource.Page,
PageSize = pagingResource.PageSize,
SortKey = pagingResource.SortKey,
SortDirection = pagingResource.SortDirection
};
var pagingSpec = pagingResource.MapToPagingSpec<EpisodeResource, Episode>(
new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"episodes.airDateUtc",
"episodes.lastSearchTime",
"series.sortTitle"
},
"episodes.airDateUtc",
SortDirection.Ascending);
if (monitored)
{

View File

@@ -1927,6 +1927,22 @@
"format": "int32"
}
},
{
"name": "includeSeries",
"in": "query",
"schema": {
"type": "boolean",
"default": false
}
},
{
"name": "includeEpisodeFile",
"in": "query",
"schema": {
"type": "boolean",
"default": false
}
},
{
"name": "includeImages",
"in": "query",
@@ -12364,7 +12380,7 @@
},
"apikey": {
"type": "apiKey",
"description": "Apikey passed as header",
"description": "Apikey passed as query parameter",
"name": "apikey",
"in": "query"
}

View File

@@ -1,5 +1,6 @@
using System;
using System.Web;
using System.Text.RegularExpressions;
using Diacritical;
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.Extensions.DependencyInjection;
@@ -10,6 +11,8 @@ namespace Sonarr.Http.Authentication
{
public static class AuthenticationBuilderExtensions
{
private static readonly Regex CookieNameRegex = new Regex(@"[^a-z0-9]+", RegexOptions.Compiled | RegexOptions.IgnoreCase);
public static AuthenticationBuilder AddApiKey(this AuthenticationBuilder authenticationBuilder, string name, Action<ApiKeyAuthenticationOptions> options)
{
return authenticationBuilder.AddScheme<ApiKeyAuthenticationOptions, ApiKeyAuthenticationHandler>(name, options);
@@ -35,8 +38,10 @@ namespace Sonarr.Http.Authentication
services.AddOptions<CookieAuthenticationOptions>(AuthenticationType.Forms.ToString())
.Configure<IConfigFileProvider>((options, configFileProvider) =>
{
// Url Encode the cookie name to account for spaces or other invalid characters in the configured instance name
var instanceName = HttpUtility.UrlEncode(configFileProvider.InstanceName);
// Replace diacritics and replace non-word characters to ensure cookie name doesn't contain any valid URL characters not allowed in cookie names
var instanceName = configFileProvider.InstanceName;
instanceName = instanceName.RemoveDiacritics();
instanceName = CookieNameRegex.Replace(instanceName, string.Empty);
options.Cookie.Name = $"{instanceName}Auth";
options.AccessDeniedPath = "/login?loginFailed=true";

View File

@@ -38,7 +38,11 @@ namespace Sonarr.Http
public static class PagingResourceMapper
{
public static PagingSpec<TModel> MapToPagingSpec<TResource, TModel>(this PagingResource<TResource> pagingResource, string defaultSortKey = "Id", SortDirection defaultSortDirection = SortDirection.Ascending)
public static PagingSpec<TModel> MapToPagingSpec<TResource, TModel>(
this PagingResource<TResource> pagingResource,
HashSet<string> allowedSortKeys,
string defaultSortKey = "id",
SortDirection defaultSortDirection = SortDirection.Ascending)
{
var pagingSpec = new PagingSpec<TModel>
{
@@ -48,15 +52,15 @@ namespace Sonarr.Http
SortDirection = pagingResource.SortDirection,
};
if (pagingResource.SortKey == null)
{
pagingSpec.SortKey = defaultSortKey;
pagingSpec.SortKey = pagingResource.SortKey != null &&
allowedSortKeys is { Count: > 0 } &&
allowedSortKeys.Contains(pagingResource.SortKey)
? pagingResource.SortKey
: defaultSortKey;
if (pagingResource.SortDirection == SortDirection.Default)
{
pagingSpec.SortDirection = defaultSortDirection;
}
}
pagingSpec.SortDirection = pagingResource.SortDirection == SortDirection.Default
? defaultSortDirection
: pagingResource.SortDirection;
return pagingSpec;
}

View File

@@ -2477,15 +2477,6 @@ clean-stack@^2.0.0:
resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
clipboard@2.0.11:
version "2.0.11"
resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.11.tgz#62180360b97dd668b6b3a84ec226975762a70be5"
integrity sha512-C+0bbOqkezLIsmWSvlsXS0Q0bmkugu7jcfMIACB+RDEntIzQIkdr148we28AfSloQLRdZlYL/QYyrq05j/3Faw==
dependencies:
good-listener "^1.2.2"
select "^1.1.2"
tiny-emitter "^2.0.0"
clone-deep@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387"
@@ -2880,11 +2871,6 @@ del@^6.1.1:
rimraf "^3.0.2"
slash "^3.0.0"
delegate@^3.1.2:
version "3.2.0"
resolved "https://registry.yarnpkg.com/delegate/-/delegate-3.2.0.tgz#b66b71c3158522e8ab5744f720d8ca0c2af59166"
integrity sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw==
detect-node-es@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/detect-node-es/-/detect-node-es-1.1.0.tgz#163acdf643330caa0b4cd7c21e7ee7755d6fa493"
@@ -3811,13 +3797,6 @@ globjoin@^0.1.4:
resolved "https://registry.yarnpkg.com/globjoin/-/globjoin-0.1.4.tgz#2f4494ac8919e3767c5cbb691e9f463324285d43"
integrity sha512-xYfnw62CKG8nLkZBfWbhWwDw02CHty86jfPcc2cr3ZfeuK9ysoVPPEUxf21bAD/rWAgk52SuBrLJlefNy8mvFg==
good-listener@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/good-listener/-/good-listener-1.2.2.tgz#d53b30cdf9313dffb7dc9a0d477096aa6d145c50"
integrity sha512-goW1b+d9q/HIwbVYZzZ6SsTr4IgE+WA44A0GmPIQstuOrgsFcT7VEJ48nmr9GaRtNu0XTKacFLGnBPAM6Afouw==
dependencies:
delegate "^3.1.2"
gopd@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c"
@@ -6271,11 +6250,6 @@ section-iterator@^2.0.0:
resolved "https://registry.yarnpkg.com/section-iterator/-/section-iterator-2.0.0.tgz#bf444d7afeeb94ad43c39ad2fb26151627ccba2a"
integrity sha512-xvTNwcbeDayXotnV32zLb3duQsP+4XosHpb/F+tu6VzEZFmIjzPdNk6/O+QOOx5XTh08KL2ufdXeCO33p380pQ==
select@^1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d"
integrity sha512-OwpTSOfy6xSs1+pwcNrv0RBMOzI39Lp3qQKUTPVVPRjCdNa5JH/oPRiqsesIskK8TVgmRiHwO4KXlV2Li9dANA==
"semver@2 || 3 || 4 || 5", semver@^5.6.0:
version "5.7.2"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
@@ -6756,11 +6730,6 @@ text-table@^0.2.0:
resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==
tiny-emitter@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423"
integrity sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==
tiny-invariant@^1.0.2:
version "1.3.3"
resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.3.3.tgz#46680b7a873a0d5d10005995eb90a70d74d60127"