mirror of
https://github.com/JackDallas/Premiumizearr.git
synced 2026-01-10 22:58:12 +01:00
Compare commits
109 Commits
v1.2-rc6
...
download-h
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fafe685730 | ||
|
|
49a796a658 | ||
|
|
1f958fce00 | ||
|
|
befb6fa3d2 | ||
|
|
b32fe81250 | ||
|
|
5bf34987cf | ||
|
|
dbc65f0309 | ||
|
|
20370cfcad | ||
|
|
5c224f092d | ||
|
|
38b030a598 | ||
|
|
5dd7501255 | ||
|
|
364ae74e30 | ||
|
|
ba74f04ea0 | ||
|
|
a47ce80bca | ||
|
|
d990ab6245 | ||
|
|
f0699b8919 | ||
|
|
7a8dd46d62 | ||
|
|
0143d9e38d | ||
|
|
efe4e30c5d | ||
|
|
f65f2b9fff | ||
|
|
900e6bfd11 | ||
|
|
f22331af6e | ||
|
|
cb384cab34 | ||
|
|
eb87786606 | ||
|
|
9cfa60f6c9 | ||
|
|
23a2987ecd | ||
|
|
14ef50267f | ||
|
|
5f3feddb9d | ||
|
|
b2196b7242 | ||
|
|
8b264c97d7 | ||
|
|
ba061c03f6 | ||
|
|
627097ca97 | ||
|
|
5331c9336d | ||
|
|
a85337d2ce | ||
|
|
2381d3b5b8 | ||
|
|
62bbc25c43 | ||
|
|
427e250bce | ||
|
|
2a04c26eda | ||
|
|
2542651e82 | ||
|
|
0508f87969 | ||
|
|
a79dcedfbe | ||
|
|
b0a44c8ff2 | ||
|
|
e6e7e59fb6 | ||
|
|
7f74ddc5d3 | ||
|
|
369ef0759b | ||
|
|
e62240a5cc | ||
|
|
6c6d8d8829 | ||
|
|
1f7431d56f | ||
|
|
24847825db | ||
|
|
b6cf141b68 | ||
|
|
b4cf1e0a4f | ||
|
|
4cfdee6bc7 | ||
|
|
80506f41d7 | ||
|
|
b63e16b596 | ||
|
|
5c06dd4200 | ||
|
|
2c05d8530f | ||
|
|
8c4c490db2 | ||
|
|
d9bd141951 | ||
|
|
802eeedfef | ||
|
|
4f9d8299e6 | ||
|
|
6bdf60f272 | ||
|
|
742bd1e324 | ||
|
|
1cd862c5dc | ||
|
|
9479ebe7fd | ||
|
|
4147817d6f | ||
|
|
c48259cb12 | ||
|
|
18ce2c95e9 | ||
|
|
ff832a5d18 | ||
|
|
56d52d0b3a | ||
|
|
3e50ba2ae1 | ||
|
|
898b53276a | ||
|
|
91cf5bcfc8 | ||
|
|
c86896e881 | ||
|
|
3af570479f | ||
|
|
e6825dcb26 | ||
|
|
7c06cb050b | ||
|
|
4d610d3f59 | ||
|
|
4bed257802 | ||
|
|
07843219ef | ||
|
|
b32ae333a8 | ||
|
|
49a716764c | ||
|
|
d6b123d7a3 | ||
|
|
0a4d6923b1 | ||
|
|
da1a11dba5 | ||
|
|
1e295c3608 | ||
|
|
78fc4b8b39 | ||
|
|
bdda3ca793 | ||
|
|
d44204a8ed | ||
|
|
5fff9f9f53 | ||
|
|
085d26c816 | ||
|
|
b60ef30a93 | ||
|
|
5af4083c67 | ||
|
|
e8e93c667f | ||
|
|
1893a1a5e4 | ||
|
|
fdddc40699 | ||
|
|
828eb43a4c | ||
|
|
969a3ac2cb | ||
|
|
83f1d19dfb | ||
|
|
ff36423729 | ||
|
|
cd0b5fba99 | ||
|
|
df9c768066 | ||
|
|
3786e1411c | ||
|
|
4bf929967a | ||
|
|
43ea4903c3 | ||
|
|
a55eea881c | ||
|
|
8c4c3a1b24 | ||
|
|
2db7e04604 | ||
|
|
7ec072a767 | ||
|
|
935813b27f |
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
75
.github/workflows/build.yml
vendored
75
.github/workflows/build.yml
vendored
@@ -9,6 +9,8 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
security-events: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -21,27 +23,90 @@ jobs:
|
||||
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '1.17'
|
||||
go-version: '1.20'
|
||||
|
||||
- name: Confirm Version
|
||||
- name: Go Version
|
||||
run: go version
|
||||
|
||||
- name: Go Vet
|
||||
run: go vet ./...
|
||||
|
||||
- name: Docker Version
|
||||
run: docker version
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# Standard Build
|
||||
- name: Build
|
||||
uses: goreleaser/goreleaser-action@v2
|
||||
if: ! startsWith(github.ref, 'refs/tags/')
|
||||
if: startsWith(github.ref, 'refs/tags/') == false
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --rm-dist --snapshot
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DOCKER_BUILDKIT: 1
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
|
||||
- name: 'Get Previous tag'
|
||||
id: previoustag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@7b7aa264d83dc58691451798b4d117d53d21edfe
|
||||
with:
|
||||
image-ref: 'ghcr.io/jackdallas/premiumizearr:${{ steps.previoustag.outputs.tag }}-amd64'
|
||||
format: 'template'
|
||||
template: '@/contrib/sarif.tpl'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v2
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
|
||||
# Release build
|
||||
- uses: docker/login-action@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Release
|
||||
uses: goreleaser/goreleaser-action@v2
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: startsWith(github.ref, 'refs/tags/') && !contains(github.ref, '-rc')
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --rm-dist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DOCKER_BUILDKIT: 1
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
|
||||
# Pre-Release build
|
||||
- name: Pre-Release
|
||||
uses: goreleaser/goreleaser-action@v2
|
||||
if: startsWith(github.ref, 'refs/tags/') && contains(github.ref, '-rc')
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --rm-dist -f .prerelease.goreleaser.yaml
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DOCKER_BUILDKIT: 1
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
|
||||
- name: Upload assets
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: artifacts
|
||||
path: dist/
|
||||
|
||||
47
.github/workflows/codeql-analysis.yml
vendored
47
.github/workflows/codeql-analysis.yml
vendored
@@ -1,47 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go', 'javascript' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
27
.github/workflows/dependabot-auto-merge.yaml
vendored
Normal file
27
.github/workflows/dependabot-auto-merge.yaml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Dependabot auto-merge
|
||||
on: pull_request
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
dependabot:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
steps:
|
||||
- name: Dependabot metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@v1
|
||||
with:
|
||||
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Enable auto-merge for Dependabot PRs
|
||||
run: gh pr merge --auto --rebase "$PR_URL"
|
||||
env:
|
||||
PR_URL: ${{github.event.pull_request.html_url}}
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
- name: Auto Approve Dependabot PRs
|
||||
run: gh pr review --approve -b "Dependabot auto approve" "$PR_URL"
|
||||
env:
|
||||
PR_URL: ${{github.event.pull_request.html_url}}
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
20
.github/workflows/dependency-review.yml
vendored
Normal file
20
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
# Dependency Review Action
|
||||
#
|
||||
# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging.
|
||||
#
|
||||
# Source repository: https://github.com/actions/dependency-review-action
|
||||
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
|
||||
name: 'Dependency Review'
|
||||
on: [pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v3
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v2
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -18,5 +18,7 @@ build/
|
||||
.vscode
|
||||
premiumizearrd
|
||||
premiumizearrd.exe
|
||||
|
||||
dist/
|
||||
|
||||
dist/
|
||||
|
||||
!cmd/premiumizearrd/
|
||||
|
||||
135
.goreleaser.yaml
135
.goreleaser.yaml
@@ -1,3 +1,6 @@
|
||||
env:
|
||||
- DOCKER_BUILDKIT=1
|
||||
|
||||
before:
|
||||
hooks:
|
||||
- go mod tidy
|
||||
@@ -13,9 +16,14 @@ builds:
|
||||
goarch:
|
||||
- amd64
|
||||
- arm64
|
||||
- arm
|
||||
goarm:
|
||||
- 7
|
||||
ignore:
|
||||
- goos: windows
|
||||
goarch: arm64
|
||||
- goos: windows
|
||||
goarch: arm
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
@@ -31,50 +39,129 @@ archives:
|
||||
- src: build/static/*
|
||||
dst: static
|
||||
strip_parent: true
|
||||
|
||||
checksum:
|
||||
name_template: 'checksums.txt'
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}-next"
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^test:'
|
||||
|
||||
nfpms:
|
||||
# note that this is an array of nfpm configs
|
||||
-
|
||||
# Name of the package.
|
||||
# Defaults to `ProjectName`.
|
||||
package_name: premiumizearr
|
||||
|
||||
# Your app's vendor.
|
||||
# Default is empty.
|
||||
bindir: /opt/premiumizearrd
|
||||
vendor: Jack Dallas.
|
||||
|
||||
# Template to your app's homepage.
|
||||
# Default is empty.
|
||||
homepage: https://github.com/JackDallas/Premiumizearr
|
||||
|
||||
# Your app's maintainer (probably you).
|
||||
# Default is empty.
|
||||
maintainer: Dallas <jack-dallas@outlook.com>
|
||||
|
||||
# Template to your app's description.
|
||||
# Default is empty.
|
||||
description: Service to connect premiumize.me to Arr clients.
|
||||
|
||||
# Your app's license.
|
||||
# Default is empty.
|
||||
license: GPLv3
|
||||
|
||||
# Formats to be generated.
|
||||
formats:
|
||||
- deb
|
||||
|
||||
contents:
|
||||
# Basic file that applies to all packagers
|
||||
- src: build/static/*
|
||||
dst: /usr/local/bin/static/
|
||||
dst: /opt/premiumizearrd/static/
|
||||
- src: init/premiumizearrd.service
|
||||
dst: /etc/systemd/system/
|
||||
dst: /etc/systemd/system/premiumizearrd.service
|
||||
scripts:
|
||||
postinstall: "scripts/postinstall.sh"
|
||||
|
||||
dockers:
|
||||
-
|
||||
use: buildx
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
image_templates:
|
||||
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64"
|
||||
skip_push: "false"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
|
||||
- "--platform=linux/amd64"
|
||||
dockerfile: "docker/Dockerfile.amd64"
|
||||
extra_files:
|
||||
- build/static/
|
||||
- docker/
|
||||
-
|
||||
use: buildx
|
||||
goos: linux
|
||||
goarch: arm64
|
||||
image_templates:
|
||||
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64"
|
||||
skip_push: "false"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
|
||||
- "--platform=linux/arm64"
|
||||
dockerfile: "docker/Dockerfile.arm64"
|
||||
extra_files:
|
||||
- build/static/
|
||||
- docker/
|
||||
-
|
||||
use: buildx
|
||||
goos: linux
|
||||
goarch: arm
|
||||
goarm: 7
|
||||
image_templates:
|
||||
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7"
|
||||
skip_push: "false"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
|
||||
- "--platform=linux/arm/v7"
|
||||
dockerfile: "docker/Dockerfile.armv7"
|
||||
extra_files:
|
||||
- build/static/
|
||||
- docker/
|
||||
|
||||
docker_manifests:
|
||||
# Release variants not created on rc-$i tags
|
||||
- skip_push: auto
|
||||
- name_template: 'ghcr.io/jackdallas/premiumizearr:latest'
|
||||
image_templates:
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
|
||||
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}'
|
||||
image_templates:
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
|
||||
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Major }}'
|
||||
image_templates:
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
|
||||
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Major }}.{{ .Minor }}'
|
||||
image_templates:
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
|
||||
|
||||
release:
|
||||
# If set to auto, will mark the release as not ready for production
|
||||
# in case there is an indicator for this in the tag e.g. v1.0.0-rc1
|
||||
# If set to true, will mark the release as not ready for production.
|
||||
# Default is false.
|
||||
prerelease: auto
|
||||
footer: "**Full Changelog**: https://github.com/JackDallas/Premiumizearr/compare/{{ .PreviousTag }}...{{ .Tag }}"
|
||||
|
||||
|
||||
|
||||
147
.prerelease.goreleaser.yaml
Normal file
147
.prerelease.goreleaser.yaml
Normal file
@@ -0,0 +1,147 @@
|
||||
before:
|
||||
hooks:
|
||||
- go mod tidy
|
||||
- make web
|
||||
builds:
|
||||
- env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
- windows
|
||||
main: ./cmd/premiumizearrd
|
||||
binary: premiumizearrd
|
||||
goarch:
|
||||
- amd64
|
||||
- arm64
|
||||
- arm
|
||||
goarm:
|
||||
- 7
|
||||
ignore:
|
||||
- goos: windows
|
||||
goarch: arm64
|
||||
- goos: windows
|
||||
goarch: arm
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
wrap_in_directory: true
|
||||
files:
|
||||
- README.md
|
||||
- LICENSE
|
||||
- src: build/*.service
|
||||
dst: ./
|
||||
strip_parent: true
|
||||
- src: build/static/*
|
||||
dst: static
|
||||
strip_parent: true
|
||||
|
||||
checksum:
|
||||
name_template: 'checksums.txt'
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^test:'
|
||||
|
||||
nfpms:
|
||||
-
|
||||
package_name: premiumizearr
|
||||
bindir: /opt/premiumizearrd
|
||||
vendor: Jack Dallas.
|
||||
homepage: https://github.com/JackDallas/Premiumizearr
|
||||
maintainer: Dallas <jack-dallas@outlook.com>
|
||||
description: Service to connect premiumize.me to Arr clients.
|
||||
license: GPLv3
|
||||
formats:
|
||||
- deb
|
||||
contents:
|
||||
- src: build/static/*
|
||||
dst: /opt/premiumizearrd/static/
|
||||
- src: init/premiumizearrd.service
|
||||
dst: /etc/systemd/system/premiumizearrd.service
|
||||
scripts:
|
||||
postinstall: "scripts/postinstall.sh"
|
||||
|
||||
dockers:
|
||||
-
|
||||
use: docker
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
image_templates:
|
||||
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64"
|
||||
skip_push: "false"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
|
||||
- "--platform=linux/amd64"
|
||||
dockerfile: "docker/Dockerfile.amd64"
|
||||
extra_files:
|
||||
- build/static/
|
||||
- docker/
|
||||
-
|
||||
use: buildx
|
||||
goos: linux
|
||||
goarch: arm64
|
||||
image_templates:
|
||||
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64"
|
||||
skip_push: "false"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
|
||||
- "--platform=linux/arm64"
|
||||
dockerfile: "docker/Dockerfile.arm64"
|
||||
extra_files:
|
||||
- build/static/
|
||||
- docker/
|
||||
-
|
||||
use: buildx
|
||||
goos: linux
|
||||
goarch: arm
|
||||
goarm: 7
|
||||
image_templates:
|
||||
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7"
|
||||
skip_push: "false"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||
- "--label=org.opencontainers.image.source=\"https://github.com/JackDallas/Premiumizearr\""
|
||||
- "--platform=linux/arm/v7"
|
||||
dockerfile: "docker/Dockerfile.armv7"
|
||||
extra_files:
|
||||
- build/static/
|
||||
- docker/
|
||||
|
||||
docker_manifests:
|
||||
- skip_push: false
|
||||
- name_template: 'ghcr.io/jackdallas/premiumizearr:dev'
|
||||
image_templates:
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
|
||||
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}'
|
||||
image_templates:
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
|
||||
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
|
||||
|
||||
release:
|
||||
prerelease: true
|
||||
header: "Premiumizearr Pre-Release {{ .Tag }}"
|
||||
footer: "**Full Changelog**: https://github.com/JackDallas/Premiumizearr/compare/{{ .PreviousTag }}...{{ .Tag }}"
|
||||
|
||||
|
||||
110
README.md
110
README.md
@@ -1,11 +1,111 @@
|
||||
# premiumizearr
|
||||
# Premiumizearr
|
||||
|
||||
## Build
|
||||
|
||||
[](https://github.com/JackDallas/Premiumizearr/actions/workflows/build.yml)
|
||||
|
||||
## Features
|
||||
|
||||
- Monitor blackhole directory to push `.magnet` and `.nzb` to Premiumize.me
|
||||
- Monitor and download Premiumize.me transfers
|
||||
- Mark transfers as failed in sonarr
|
||||
- Monitor and download Premiumize.me transfers (web ui on default port 8182)
|
||||
- Mark transfers as failed in Radarr & Sonarr
|
||||
|
||||
## Up Next
|
||||
## Support the project by using my invite code
|
||||
|
||||
- Radarr support
|
||||
[Invite Code](https://www.premiumize.me/ref/446038083)
|
||||
|
||||
## Install
|
||||
|
||||
[Grab the latest release artifact links here](https://github.com/JackDallas/Premiumizearr/releases/)
|
||||
|
||||
### Binary
|
||||
|
||||
#### System Install
|
||||
|
||||
```cli
|
||||
wget https://github.com/JackDallas/Premiumizearr/releases/download/x.x.x/Premiumizearr_x.x.x_linux_amd64.tar.gz
|
||||
tar xf Premiumizearr_x.x.x.x_linux_amd64.tar.gz
|
||||
cd Premiumizearr_x.x.x.x_linux_amd64
|
||||
sudo mkdir /opt/premiumizearrd/
|
||||
sudo cp -r premiumizearrd static/ /opt/premiumizearrd/
|
||||
sudo cp premiumizearrd.service /etc/systemd/system/
|
||||
sudo systemctl-reload
|
||||
sudo systemctl enable premiumizearrd.service
|
||||
sudo systemctl start premiumizearrd.service
|
||||
```
|
||||
|
||||
#### User Install
|
||||
|
||||
```cli
|
||||
wget https://github.com/JackDallas/Premiumizearr/releases/download/x.x.x/Premiumizearr_x.x.x_linux_amd64.tar.gz
|
||||
tar xf Premiumizearr_x.x.x.x_linux_amd64.tar.gz
|
||||
cd Premiumizearr_x.x.x.x_linux_amd64
|
||||
mkdir -p ~/.local/bin/
|
||||
cp -r premiumizearrd static/ ~/.local/bin/
|
||||
echo -e "export PATH=~/.local/bin/:$PATH" >> ~/.bashrc
|
||||
source ~/.bashrc
|
||||
```
|
||||
|
||||
You're now able to run the daemon from anywhere just by typing `premiumizearrd`
|
||||
|
||||
### deb file
|
||||
|
||||
```cmd
|
||||
wget https://github.com/JackDallas/Premiumizearr/releases/download/x.x.x/premiumizearr_x.x.x._linux_amd64.deb
|
||||
sudo dpkg -i premiumizearr_x.x.x.x_linux_amd64.deb
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
[Docker images are listed here](https://github.com/jackdallas/Premiumizearr/pkgs/container/premiumizearr)
|
||||
|
||||
```cmd
|
||||
docker run \
|
||||
-v /home/dallas/test/data:/data \
|
||||
-v /home/dallas/test/blackhole:/blackhole \
|
||||
-v /home/dallas/test/downloads:/downloads \
|
||||
-p 8182:8182 \
|
||||
ghcr.io/jackdallas/premiumizearr:latest
|
||||
```
|
||||
|
||||
If you wish to increase logging (which you'll be asked to do if you submit an issue) you can add `-e PREMIUMIZEARR_LOG_LEVEL=trace` to the command
|
||||
|
||||
> Note: The /data mount is where the `config.yaml` and log files are kept
|
||||
|
||||
## Setup
|
||||
|
||||
### Premiumizearrd
|
||||
|
||||
Running for the first time the server will start on `http://0.0.0.0:8182`
|
||||
|
||||
If you already use this binding for something else you can edit them in the `config.yaml`
|
||||
|
||||
> WARNING: This app exposes api keys in the ui and does not have authentication, it is strongly recommended you put it behind a reverse proxy with auth and set the host to `127.0.0.1` to hide the app from the web.
|
||||
|
||||
### Sonarr/Radarr
|
||||
|
||||
- Go to your Arr's `Download Client` settings page
|
||||
|
||||
- Add a new Torrent Blackhole client, set the `Torrent Folder` to the previously set `BlackholeDirectory` location, set the `Watch Folder` to the previously set `DownloadsDirectory` location
|
||||
|
||||
- Add a new Usenet Blackhole client, set the `Nzb Folder` to the previously set `BlackholeDirectory` location, set the `Watch Folder` to the previously set `DownloadsDirectory` location
|
||||
|
||||
### Reverse Proxy
|
||||
|
||||
Premiumizearr does not have authentication built in so it's strongly recommended you use a reverse proxy
|
||||
|
||||
#### Nginx
|
||||
|
||||
```nginx
|
||||
location /premiumizearr/ {
|
||||
proxy_pass http://127.0.0.1:8182/;
|
||||
proxy_set_header Host $proxy_host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_redirect off;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $http_connection;
|
||||
}
|
||||
```
|
||||
|
||||
124
cmd/premiumizearrd/app.go
Normal file
124
cmd/premiumizearrd/app.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
"github.com/jackdallas/premiumizearr/internal/service"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
"github.com/orandin/lumberjackrus"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type App struct {
|
||||
config config.Config
|
||||
premiumizemeClient premiumizeme.Premiumizeme
|
||||
transferManager service.TransferManagerService
|
||||
directoryWatcher service.DirectoryWatcherService
|
||||
webServer service.WebServerService
|
||||
arrsManager service.ArrsManagerService
|
||||
downloadManager service.DownloadManagerService
|
||||
taskRunner service.TaskRunnerService
|
||||
}
|
||||
|
||||
// Makes go vet error - prevents copies
|
||||
func (app *App) Lock() {}
|
||||
func (app *App) UnLock() {}
|
||||
|
||||
// Start
|
||||
func (app *App) Start(logLevel string, configFile string, loggingDirectory string) error {
|
||||
//Setup static login
|
||||
lvl, err := log.ParseLevel(logLevel)
|
||||
if err != nil {
|
||||
log.Errorf("Error flag not recognized, defaulting to Info!! %v", err)
|
||||
lvl = log.InfoLevel
|
||||
}
|
||||
log.SetLevel(lvl)
|
||||
hook, err := lumberjackrus.NewHook(
|
||||
&lumberjackrus.LogFile{
|
||||
Filename: path.Join(loggingDirectory, "premiumizearr.general.log"),
|
||||
MaxSize: 100,
|
||||
MaxBackups: 1,
|
||||
MaxAge: 1,
|
||||
Compress: false,
|
||||
LocalTime: false,
|
||||
},
|
||||
log.InfoLevel,
|
||||
&log.TextFormatter{},
|
||||
&lumberjackrus.LogFileOpts{
|
||||
log.InfoLevel: &lumberjackrus.LogFile{
|
||||
Filename: path.Join(loggingDirectory, "premiumizearr.info.log"),
|
||||
MaxSize: 100,
|
||||
MaxBackups: 1,
|
||||
MaxAge: 1,
|
||||
Compress: false,
|
||||
LocalTime: false,
|
||||
},
|
||||
log.ErrorLevel: &lumberjackrus.LogFile{
|
||||
Filename: path.Join(loggingDirectory, "premiumizearr.error.log"),
|
||||
MaxSize: 100, // optional
|
||||
MaxBackups: 1, // optional
|
||||
MaxAge: 1, // optional
|
||||
Compress: false, // optional
|
||||
LocalTime: false, // optional
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
log.AddHook(hook)
|
||||
|
||||
log.Info("---------- Starting premiumizearr daemon ----------")
|
||||
log.Info("")
|
||||
|
||||
log.Trace("Running load or create config")
|
||||
log.Tracef("Reading config file location from flag or env: %s", configFile)
|
||||
app.config, err = config.LoadOrCreateConfig(configFile, app.ConfigUpdatedCallback)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Initialisation
|
||||
app.premiumizemeClient = premiumizeme.NewPremiumizemeClient(app.config.PremiumizemeAPIKey)
|
||||
|
||||
app.transferManager = service.TransferManagerService{}.New()
|
||||
app.directoryWatcher = service.DirectoryWatcherService{}.New()
|
||||
app.webServer = service.WebServerService{}.New()
|
||||
app.arrsManager = service.ArrsManagerService{}.New()
|
||||
app.downloadManager = service.DownloadManagerService{}.New()
|
||||
app.taskRunner = service.TaskRunnerService{}.New()
|
||||
|
||||
// Initialise Services
|
||||
app.taskRunner.Init(&app.config)
|
||||
|
||||
// Must come after taskRunner initialised
|
||||
app.arrsManager.Init(&app.config)
|
||||
app.directoryWatcher.Init(&app.premiumizemeClient, &app.config)
|
||||
app.downloadManager.Init(&app.premiumizemeClient, &app.taskRunner, &app.config)
|
||||
|
||||
// Must come after arrsManager
|
||||
app.transferManager.Init(&app.premiumizemeClient, &app.arrsManager, &app.config)
|
||||
// Must come after transfer, arrManager and directory
|
||||
app.webServer.Init(&app.transferManager, &app.directoryWatcher, &app.arrsManager, &app.config)
|
||||
|
||||
app.arrsManager.Start()
|
||||
app.webServer.Start()
|
||||
app.directoryWatcher.Start()
|
||||
app.taskRunner.Start()
|
||||
//Block until the program is terminated
|
||||
app.transferManager.Run(15 * time.Second)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (app *App) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
|
||||
app.transferManager.ConfigUpdatedCallback(currentConfig, newConfig)
|
||||
app.directoryWatcher.ConfigUpdatedCallback(currentConfig, newConfig)
|
||||
app.webServer.ConfigUpdatedCallback(currentConfig, newConfig)
|
||||
app.arrsManager.ConfigUpdatedCallback(currentConfig, newConfig)
|
||||
}
|
||||
@@ -1,90 +1,24 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"io"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/arr"
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
"github.com/jackdallas/premiumizearr/internal/service"
|
||||
"github.com/jackdallas/premiumizearr/internal/web_service"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golift.io/starr"
|
||||
"golift.io/starr/radarr"
|
||||
"golift.io/starr/sonarr"
|
||||
)
|
||||
|
||||
func main() {
|
||||
//Flags
|
||||
var logLevel string
|
||||
var configFile string
|
||||
|
||||
//Parse flags
|
||||
flag.StringVar(&logLevel, "log", "info", "Logging level: \n \tinfo,debug,trace")
|
||||
flag.StringVar(&configFile, "config", "", "Config file path")
|
||||
flag.Parse()
|
||||
|
||||
lvl, err := log.ParseLevel(logLevel)
|
||||
if err != nil {
|
||||
log.Errorf("Error flag not recognized, defaulting to Info!!", err)
|
||||
lvl = log.InfoLevel
|
||||
}
|
||||
log.SetLevel(lvl)
|
||||
|
||||
logFile, err := os.OpenFile("premiumizearr.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
} else {
|
||||
log.SetOutput(io.MultiWriter(logFile, os.Stdout))
|
||||
}
|
||||
|
||||
log.Info("")
|
||||
log.Info("---------- Starting premiumizearr daemon ----------")
|
||||
log.Info("")
|
||||
|
||||
config, err := config.LoadOrCreateConfig(configFile)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if config.PremiumizemeAPIKey == "" {
|
||||
panic("premiumizearr API Key is empty")
|
||||
}
|
||||
|
||||
// Initialisation
|
||||
|
||||
premiumizearr_client := premiumizeme.NewPremiumizemeClient(config.PremiumizemeAPIKey)
|
||||
|
||||
starr_config_sonarr := starr.New(config.SonarrAPIKey, config.SonarrURL, 0)
|
||||
starr_config_radarr := starr.New(config.RadarrAPIKey, config.RadarrURL, 0)
|
||||
|
||||
sonarr_wrapper := arr.SonarrArr{
|
||||
Client: sonarr.New(starr_config_sonarr),
|
||||
History: nil,
|
||||
LastUpdate: time.Now(),
|
||||
}
|
||||
radarr_wrapper := arr.RadarrArr{
|
||||
Client: radarr.New(starr_config_radarr),
|
||||
History: nil,
|
||||
LastUpdate: time.Now(),
|
||||
}
|
||||
|
||||
arrs := []arr.IArr{
|
||||
&sonarr_wrapper,
|
||||
&radarr_wrapper,
|
||||
}
|
||||
|
||||
transfer_manager := service.NewTransferManagerService(premiumizearr_client, &arrs, &config)
|
||||
|
||||
directory_watcher := service.NewDirectoryWatcherService(premiumizearr_client, &config)
|
||||
|
||||
go directory_watcher.Watch()
|
||||
|
||||
go web_service.StartWebServer(&transfer_manager, &directory_watcher, &config)
|
||||
//Block until the program is terminated
|
||||
transfer_manager.Run(15 * time.Second)
|
||||
}
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/utils"
|
||||
)
|
||||
|
||||
func main() {
|
||||
//Flags
|
||||
var logLevel string
|
||||
var configFile string
|
||||
var loggingDirectory string
|
||||
|
||||
//Parse flags
|
||||
flag.StringVar(&logLevel, "log", utils.EnvOrDefault("PREMIUMIZEARR_LOG_LEVEL", "info"), "Logging level: \n \tinfo,debug,trace")
|
||||
flag.StringVar(&configFile, "config", utils.EnvOrDefault("PREMIUMIZEARR_CONFIG_DIR_PATH", "./"), "The directory the config.yml is located in")
|
||||
flag.StringVar(&loggingDirectory, "logging-dir", utils.EnvOrDefault("PREMIUMIZEARR_LOGGING_DIR_PATH", "./"), "The directory logs are to be written to")
|
||||
flag.Parse()
|
||||
|
||||
App := &App{}
|
||||
App.Start(logLevel, configFile, loggingDirectory)
|
||||
|
||||
}
|
||||
|
||||
20
config.yaml
Normal file
20
config.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
PremiumizemeAPIKey: xxxxxxxxx
|
||||
Arrs:
|
||||
- Name: Sonarr
|
||||
URL: http://localhost:8989
|
||||
APIKey: xxxxxxxxx
|
||||
Type: Sonarr
|
||||
- Name: Radarr
|
||||
URL: http://localhost:7878
|
||||
APIKey: xxxxxxxxx
|
||||
Type: Radarr
|
||||
BlackholeDirectory: ""
|
||||
PollBlackholeDirectory: false
|
||||
PollBlackholeIntervalMinutes: 10
|
||||
DownloadsDirectory: ""
|
||||
UnzipDirectory: ""
|
||||
bindIP: 0.0.0.0
|
||||
bindPort: "8182"
|
||||
WebRoot: ""
|
||||
SimultaneousDownloads: 5
|
||||
ArrHistoryUpdateIntervalSeconds: 20
|
||||
@@ -1,9 +0,0 @@
|
||||
PremiumizemeAPIKey: ""
|
||||
SonarrURL: http://localhost:8989
|
||||
SonarrAPIKey: ""
|
||||
RadarrURL: http://localhost:7878
|
||||
RadarrAPIKey: ""
|
||||
BlackholeDirectory: ""
|
||||
DownloadsDirectory: ""
|
||||
bindIP: 0.0.0.0
|
||||
bindPort: "8182"
|
||||
5
docker/Dockerfile.amd64
Normal file
5
docker/Dockerfile.amd64
Normal file
@@ -0,0 +1,5 @@
|
||||
# syntax=edrevo/dockerfile-plus
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16-f525477c-ls6@sha256:c25011f564093f523b1a793658d19275d9eac5a7f21aa5d00ce6cdff29c2a8c1
|
||||
|
||||
INCLUDE+ docker/Dockerfile.common
|
||||
5
docker/Dockerfile.arm64
Normal file
5
docker/Dockerfile.arm64
Normal file
@@ -0,0 +1,5 @@
|
||||
# syntax=edrevo/dockerfile-plus
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16-f525477c-ls6@sha256:611bc4a5a75132914dba740dffa4adcea5039fbe67e3704afd5731a55bf8c82f
|
||||
|
||||
INCLUDE+ docker/Dockerfile.common
|
||||
5
docker/Dockerfile.armv7
Normal file
5
docker/Dockerfile.armv7
Normal file
@@ -0,0 +1,5 @@
|
||||
# syntax=edrevo/dockerfile-plus
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16-f525477c-ls6@sha256:a31127cd9764c95d6137764a1854402d3a33ee085edd139e08726e2fc98d2254
|
||||
|
||||
INCLUDE+ docker/Dockerfile.common
|
||||
23
docker/Dockerfile.common
Normal file
23
docker/Dockerfile.common
Normal file
@@ -0,0 +1,23 @@
|
||||
LABEL build_version="Premiumizearr version:- ${VERSION} Build-date:- ${BUILD_DATE}"
|
||||
LABEL maintainer="JackDallas"
|
||||
|
||||
COPY docker/root/ /
|
||||
|
||||
EXPOSE 8182
|
||||
|
||||
RUN mkdir /data
|
||||
RUN mkdir /unzip
|
||||
RUN mkdir /downloads
|
||||
RUN mkdir /transfers
|
||||
RUN mkdir /blackhole
|
||||
RUN mkdir -p /opt/app/
|
||||
|
||||
WORKDIR /opt/app/
|
||||
|
||||
ENV PREMIUMIZEARR_CONFIG_DIR_PATH=/data
|
||||
ENV PREMIUMIZEARR_LOGGING_DIR_PATH=/data
|
||||
|
||||
COPY premiumizearrd /opt/app/
|
||||
COPY build/static /opt/app/static
|
||||
|
||||
ENTRYPOINT ["/init"]
|
||||
10
docker/root/etc/cont-init.d/30-config
Normal file
10
docker/root/etc/cont-init.d/30-config
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
|
||||
# permissions
|
||||
chown -R abc:abc \
|
||||
/data \
|
||||
/unzip \
|
||||
/downloads \
|
||||
/transfers \
|
||||
/blackhole \
|
||||
/opt \
|
||||
6
docker/root/etc/services.d/premiumizearr/run
Normal file
6
docker/root/etc/services.d/premiumizearr/run
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
|
||||
cd /opt/app/ || exit
|
||||
|
||||
exec \
|
||||
s6-setuidgid abc /opt/app/premiumizearrd
|
||||
17
go.mod
17
go.mod
@@ -1,17 +1,20 @@
|
||||
module github.com/jackdallas/premiumizearr
|
||||
|
||||
go 1.17
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
github.com/dustin/go-humanize v1.0.0
|
||||
github.com/fsnotify/fsnotify v1.5.1
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/fsnotify/fsnotify v1.6.0
|
||||
github.com/gorilla/mux v1.8.0
|
||||
github.com/sirupsen/logrus v1.8.1
|
||||
golift.io/starr v0.13.0
|
||||
github.com/orandin/lumberjackrus v1.0.1
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
golift.io/starr v0.14.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
|
||||
require (
|
||||
golang.org/x/net v0.0.0-20220114011407-0dd24b26b47d // indirect
|
||||
golang.org/x/sys v0.0.0-20211110154304-99a53858aa08 // indirect
|
||||
github.com/BurntSushi/toml v1.0.0 // indirect
|
||||
golang.org/x/net v0.7.0 // indirect
|
||||
golang.org/x/sys v0.5.0 // indirect
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
|
||||
)
|
||||
|
||||
61
go.sum
61
go.sum
@@ -1,52 +1,77 @@
|
||||
github.com/BurntSushi/toml v1.0.0 h1:dtDWrepsVPfW9H/4y7dDgFc2MBUSeJhlaDtK13CxFlU=
|
||||
github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI=
|
||||
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
||||
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
|
||||
github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc=
|
||||
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
|
||||
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
|
||||
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
|
||||
github.com/orandin/lumberjackrus v1.0.1 h1:7ysDQ0MHD79zIFN9/EiDHjUcgopNi5ehtxFDy8rUkWo=
|
||||
github.com/orandin/lumberjackrus v1.0.1/go.mod h1:xYLt6H8W93pKnQgUQaxsApS0Eb4BwHLOkxk5DVzf5H0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
|
||||
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20220114011407-0dd24b26b47d h1:1n1fc535VhN8SYtD4cDUyNlfpAF2ROMM9+11equK3hs=
|
||||
golang.org/x/net v0.0.0-20220114011407-0dd24b26b47d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211110154304-99a53858aa08 h1:WecRHqgE09JBkh/584XIE6PMz5KKE/vER4izNUi30AQ=
|
||||
golang.org/x/sys v0.0.0-20211110154304-99a53858aa08/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golift.io/starr v0.13.0 h1:LoihBAH3DQ0ikPNHTVg47tUU+475mzbr1ahMcY5gdno=
|
||||
golift.io/starr v0.13.0/go.mod h1:IZIzdT5/NBdhM08xAEO5R1INgGN+Nyp4vCwvgHrbKVs=
|
||||
golift.io/starr v0.14.0 h1:G6bmXs0BNS0Kkwhv46FinlW09G6VILV+P6o62SPp2lY=
|
||||
golift.io/starr v0.14.0/go.mod h1:LpR7iazinHYn50wNcTkJeVYxbBYQbkU/DcVYBwc5D9I=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
Description=Premiumizearr Daemon
|
||||
After=network.target
|
||||
[Service]
|
||||
User=$USER
|
||||
Group=$USER
|
||||
User=1000
|
||||
Group=1000
|
||||
UMask=0002
|
||||
Type=simple
|
||||
Environment=PREMIUMIZEARR_LOG_LEVEL=info
|
||||
ExecStart=/opt/premiumizearrd/premiumizearrd
|
||||
WorkingDirectory=/opt/premiumizearrd/
|
||||
TimeoutStopSec=20
|
||||
|
||||
@@ -2,13 +2,10 @@ package arr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/utils"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golift.io/starr"
|
||||
"golift.io/starr/radarr"
|
||||
)
|
||||
|
||||
@@ -18,7 +15,7 @@ import (
|
||||
|
||||
//Data Access
|
||||
|
||||
//GetHistory: Updates the history if it's been more than 15 seconds since last update
|
||||
// GetHistory: Updates the history if it's been more than 15 seconds since last update
|
||||
func (arr *RadarrArr) GetHistory() (radarr.History, error) {
|
||||
arr.LastUpdateMutex.Lock()
|
||||
defer arr.LastUpdateMutex.Unlock()
|
||||
@@ -29,38 +26,19 @@ func (arr *RadarrArr) GetHistory() (radarr.History, error) {
|
||||
arr.LastUpdateCountMutex.Lock()
|
||||
defer arr.LastUpdateCountMutex.Unlock()
|
||||
|
||||
if time.Since(arr.LastUpdate) > 60*time.Second || arr.History == nil {
|
||||
//Get first page of records
|
||||
his, err := arr.Client.GetHistoryPage(&starr.Req{PageSize: 250, Page: 1})
|
||||
if time.Since(arr.LastUpdate) > time.Duration(arr.Config.ArrHistoryUpdateIntervalSeconds)*time.Second || arr.History == nil {
|
||||
his, err := arr.Client.GetHistory(0, 1000)
|
||||
if err != nil {
|
||||
return radarr.History{}, fmt.Errorf("failed to get history from radarr: %+v", err)
|
||||
}
|
||||
|
||||
if his.TotalRecords == arr.LastUpdateCount && his.TotalRecords > 0 {
|
||||
return *arr.History, nil
|
||||
}
|
||||
|
||||
if his.TotalRecords > 250 {
|
||||
cachedPages := int(math.Ceil(float64(arr.LastUpdateCount) / 250))
|
||||
log.Tracef("Loaded %d cached pages of history\n", cachedPages)
|
||||
remotePages := int(math.Ceil(float64(his.TotalRecords) / float64(250)))
|
||||
log.Tracef("Found %d pages of history on the radarr server\n", cachedPages)
|
||||
for i := 2; i <= remotePages-cachedPages; i++ {
|
||||
log.Tracef("Radarr.GetHistory(): Getting History Page %d", i)
|
||||
h, err := arr.Client.GetHistoryPage(&starr.Req{PageSize: 250, Page: i})
|
||||
if err != nil {
|
||||
return radarr.History{}, fmt.Errorf("failed to get history from radarr: %+v", err)
|
||||
}
|
||||
his.Records = append(his.Records, h.Records...)
|
||||
}
|
||||
return radarr.History{}, err
|
||||
}
|
||||
|
||||
arr.History = his
|
||||
arr.LastUpdate = time.Now()
|
||||
arr.LastUpdateCount = his.TotalRecords
|
||||
log.Debugf("[Radarr] [%s]: Updated history, next update in %d seconds", arr.Name, arr.Config.ArrHistoryUpdateIntervalSeconds)
|
||||
}
|
||||
|
||||
log.Tracef("Radarr.GetHistory(): Returning from GetHistory")
|
||||
log.Tracef("[Radarr] [%s]: Returning from GetHistory", arr.Name)
|
||||
return *arr.History, nil
|
||||
}
|
||||
|
||||
@@ -78,20 +56,18 @@ func (arr *RadarrArr) GetArrName() string {
|
||||
//Functions
|
||||
|
||||
func (arr *RadarrArr) HistoryContains(name string) (int64, bool) {
|
||||
log.Tracef("Radarr.HistoryContains(): Checking history for %s", name)
|
||||
log.Tracef("Radarr [%s]: Checking history for %s", arr.Name, name)
|
||||
his, err := arr.GetHistory()
|
||||
if err != nil {
|
||||
log.Errorf("Radarr.HistoryContains(): Failed to get history: %+v", err)
|
||||
log.Errorf("Radarr [%s]: Failed to get history: %+v", arr.Name, err)
|
||||
return -1, false
|
||||
}
|
||||
log.Trace("Radarr.HistoryContains(): Got History, now Locking History")
|
||||
log.Tracef("Radarr [%s]: Got History, now Locking History", arr.Name)
|
||||
arr.HistoryMutex.Lock()
|
||||
defer arr.HistoryMutex.Unlock()
|
||||
|
||||
name = utils.StripDownloadTypesExtention(name)
|
||||
// name = strings.ReplaceAll(name, ".", " ")
|
||||
for _, item := range his.Records {
|
||||
if item.SourceTitle == name {
|
||||
if CompareFileNamesFuzzy(item.SourceTitle, name) {
|
||||
return item.ID, true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,13 +2,10 @@ package arr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/utils"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golift.io/starr"
|
||||
"golift.io/starr/sonarr"
|
||||
)
|
||||
|
||||
@@ -18,7 +15,7 @@ import (
|
||||
|
||||
//Data Access
|
||||
|
||||
//GetHistory: Updates the history if it's been more than 15 seconds since last update
|
||||
// GetHistory: Updates the history if it's been more than 15 seconds since last update
|
||||
func (arr *SonarrArr) GetHistory() (sonarr.History, error) {
|
||||
arr.LastUpdateMutex.Lock()
|
||||
defer arr.LastUpdateMutex.Unlock()
|
||||
@@ -29,38 +26,19 @@ func (arr *SonarrArr) GetHistory() (sonarr.History, error) {
|
||||
arr.LastUpdateCountMutex.Lock()
|
||||
defer arr.LastUpdateCountMutex.Unlock()
|
||||
|
||||
if time.Since(arr.LastUpdate) > 60*time.Second || arr.History == nil {
|
||||
//Get first page of records
|
||||
his, err := arr.Client.GetHistoryPage(&starr.Req{PageSize: 250, Page: 1})
|
||||
if time.Since(arr.LastUpdate) > time.Duration(arr.Config.ArrHistoryUpdateIntervalSeconds)*time.Second || arr.History == nil {
|
||||
his, err := arr.Client.GetHistory(0, 1000)
|
||||
if err != nil {
|
||||
return sonarr.History{}, fmt.Errorf("failed to get history from sonarr: %+v", err)
|
||||
}
|
||||
|
||||
if his.TotalRecords == arr.LastUpdateCount && his.TotalRecords > 0 {
|
||||
return *arr.History, nil
|
||||
}
|
||||
|
||||
if his.TotalRecords > 250 {
|
||||
cachedPages := int(math.Ceil(float64(arr.LastUpdateCount) / 250))
|
||||
fmt.Printf("Loaded %d cached pages of history\n", cachedPages)
|
||||
remotePages := int(math.Ceil(float64(his.TotalRecords) / float64(250)))
|
||||
fmt.Printf("Found %d pages of history on the sonarr server\n", cachedPages)
|
||||
for i := 2; i <= remotePages-cachedPages; i++ {
|
||||
log.Tracef("Sonarr.GetHistory(): Getting History Page %d", i)
|
||||
h, err := arr.Client.GetHistoryPage(&starr.Req{PageSize: 250, Page: i})
|
||||
if err != nil {
|
||||
return sonarr.History{}, fmt.Errorf("failed to get history from sonarr: %+v", err)
|
||||
}
|
||||
his.Records = append(his.Records, h.Records...)
|
||||
}
|
||||
return sonarr.History{}, err
|
||||
}
|
||||
|
||||
arr.History = his
|
||||
arr.LastUpdate = time.Now()
|
||||
arr.LastUpdateCount = his.TotalRecords
|
||||
log.Debugf("[Sonarr] [%s]: Updated history, next update in %d seconds", arr.Name, arr.Config.ArrHistoryUpdateIntervalSeconds)
|
||||
}
|
||||
|
||||
log.Tracef("Sonarr.GetHistory(): Returning from GetHistory")
|
||||
log.Tracef("[Sonarr] [%s]: Returning from GetHistory", arr.Name)
|
||||
return *arr.History, nil
|
||||
}
|
||||
|
||||
@@ -77,22 +55,21 @@ func (arr *SonarrArr) GetArrName() string {
|
||||
// Functions
|
||||
|
||||
func (arr *SonarrArr) HistoryContains(name string) (int64, bool) {
|
||||
log.Tracef("Sonarr.HistoryContains(): Checking history for %s", name)
|
||||
log.Tracef("Sonarr [%s]: Checking history for %s", arr.Name, name)
|
||||
his, err := arr.GetHistory()
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
log.Trace("Sonarr.HistoryContains(): Got History, now Locking History")
|
||||
log.Tracef("Sonarr [%s]: Got History, now Locking History", arr.Name)
|
||||
arr.HistoryMutex.Lock()
|
||||
defer arr.HistoryMutex.Unlock()
|
||||
|
||||
name = utils.StripDownloadTypesExtention(name)
|
||||
for _, item := range his.Records {
|
||||
if utils.StripDownloadTypesExtention(item.SourceTitle) == name {
|
||||
if CompareFileNamesFuzzy(item.SourceTitle, name) {
|
||||
return item.ID, true
|
||||
}
|
||||
}
|
||||
log.Tracef("Sonarr.HistoryContains(): %s Not in History", name)
|
||||
log.Tracef("Sonarr [%s]: %s Not in History", arr.Name, name)
|
||||
|
||||
return -1, false
|
||||
}
|
||||
|
||||
@@ -1,15 +1,43 @@
|
||||
package arr
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
"github.com/jackdallas/premiumizearr/internal/utils"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
"golift.io/starr/radarr"
|
||||
"golift.io/starr/sonarr"
|
||||
)
|
||||
|
||||
//TODO: Thread Safe version
|
||||
func CompareFileNamesFuzzy(a, b string) bool {
|
||||
//Strip file extension
|
||||
a = utils.StripDownloadTypesExtension(a)
|
||||
b = utils.StripDownloadTypesExtension(b)
|
||||
//Strip media type extension
|
||||
a = utils.StripMediaTypesExtension(a)
|
||||
b = utils.StripMediaTypesExtension(b)
|
||||
//Strip Spaces
|
||||
a = strings.ReplaceAll(a, " ", "")
|
||||
b = strings.ReplaceAll(b, " ", "")
|
||||
//Strip periods
|
||||
a = strings.ReplaceAll(a, ".", "")
|
||||
b = strings.ReplaceAll(b, ".", "")
|
||||
//Strip dashes
|
||||
a = strings.ReplaceAll(a, "-", "")
|
||||
b = strings.ReplaceAll(b, "-", "")
|
||||
//Strip underscores
|
||||
a = strings.ReplaceAll(a, "_", "")
|
||||
b = strings.ReplaceAll(b, "_", "")
|
||||
//Convert to lowercase
|
||||
a = strings.ToLower(a)
|
||||
b = strings.ToLower(b)
|
||||
|
||||
return a == b
|
||||
}
|
||||
|
||||
type IArr interface {
|
||||
HistoryContains(string) (int64, bool)
|
||||
MarkHistoryItemAsFailed(int64) error
|
||||
@@ -18,6 +46,7 @@ type IArr interface {
|
||||
}
|
||||
|
||||
type SonarrArr struct {
|
||||
Name string
|
||||
ClientMutex sync.Mutex
|
||||
Client *sonarr.Sonarr
|
||||
HistoryMutex sync.Mutex
|
||||
@@ -26,9 +55,11 @@ type SonarrArr struct {
|
||||
LastUpdate time.Time
|
||||
LastUpdateCount int
|
||||
LastUpdateCountMutex sync.Mutex
|
||||
Config *config.Config
|
||||
}
|
||||
|
||||
type RadarrArr struct {
|
||||
Name string
|
||||
ClientMutex sync.Mutex
|
||||
Client *radarr.Radarr
|
||||
HistoryMutex sync.Mutex
|
||||
@@ -37,4 +68,5 @@ type RadarrArr struct {
|
||||
LastUpdate time.Time
|
||||
LastUpdateCount int
|
||||
LastUpdateCountMutex sync.Mutex
|
||||
Config *config.Config
|
||||
}
|
||||
|
||||
@@ -3,99 +3,218 @@ package config
|
||||
import (
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidConfigFile = errors.New("invalid Config File")
|
||||
ErrFailedToFindConfigFile = errors.New("failed to find config file")
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
PremiumizemeAPIKey string `yaml:"PremiumizemeAPIKey"`
|
||||
|
||||
SonarrURL string `yaml:"SonarrURL"`
|
||||
SonarrAPIKey string `yaml:"SonarrAPIKey"`
|
||||
|
||||
RadarrURL string `yaml:"RadarrURL"`
|
||||
RadarrAPIKey string `yaml:"RadarrAPIKey"`
|
||||
|
||||
BlackholeDirectory string `yaml:"BlackholeDirectory"`
|
||||
DownloadsDirectory string `yaml:"DownloadsDirectory"`
|
||||
|
||||
BindIP string `yaml:"bindIP"`
|
||||
BindPort string `yaml:"bindPort"`
|
||||
}
|
||||
|
||||
func loadConfigFromDisk() (Config, error) {
|
||||
var config Config
|
||||
file, err := ioutil.ReadFile("config.yaml")
|
||||
// LoadOrCreateConfig - Loads the config from disk or creates a new one
|
||||
func LoadOrCreateConfig(altConfigLocation string, _appCallback AppCallback) (Config, error) {
|
||||
config, err := loadConfigFromDisk(altConfigLocation)
|
||||
|
||||
if err != nil {
|
||||
return config, ErrFailedToFindConfigFile
|
||||
if err == ErrFailedToFindConfigFile {
|
||||
log.Warn("No config file found, created default config file")
|
||||
config = defaultConfig()
|
||||
}
|
||||
if err == ErrInvalidConfigFile || err == ErrFailedToSaveConfig {
|
||||
return config, err
|
||||
}
|
||||
}
|
||||
|
||||
err = yaml.Unmarshal(file, &config)
|
||||
if err != nil {
|
||||
return config, ErrInvalidConfigFile
|
||||
// Override unzip directory if running in docker
|
||||
if utils.IsRunningInDockerContainer() {
|
||||
log.Info("Running in docker, overriding unzip directory!")
|
||||
config.UnzipDirectory = "/unzip"
|
||||
// Override config data directories if blank
|
||||
if config.BlackholeDirectory == "" {
|
||||
log.Trace("Running in docker, overriding blank directory settings for blackhole directory")
|
||||
config.BlackholeDirectory = "/blackhole"
|
||||
}
|
||||
if config.DownloadsDirectory == "" {
|
||||
log.Trace("Running in docker, overriding blank directory settings for downloads directory")
|
||||
config.DownloadsDirectory = "/downloads"
|
||||
}
|
||||
}
|
||||
|
||||
log.Tracef("Setting config location to %s", altConfigLocation)
|
||||
|
||||
config.appCallback = _appCallback
|
||||
config.altConfigLocation = altConfigLocation
|
||||
|
||||
config.Save()
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func createDefaultConfig() error {
|
||||
config := Config{
|
||||
PremiumizemeAPIKey: "",
|
||||
SonarrURL: "http://localhost:8989",
|
||||
SonarrAPIKey: "",
|
||||
RadarrURL: "http://localhost:7878",
|
||||
RadarrAPIKey: "",
|
||||
BlackholeDirectory: "",
|
||||
DownloadsDirectory: "",
|
||||
BindIP: "0.0.0.0",
|
||||
BindPort: "8182",
|
||||
}
|
||||
|
||||
file, err := yaml.Marshal(config)
|
||||
// Save - Saves the config to disk
|
||||
func (c *Config) Save() error {
|
||||
log.Trace("Marshaling & saving config")
|
||||
data, err := yaml.Marshal(*c)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = ioutil.WriteFile("config.yaml", file, 0644)
|
||||
savePath := "./config.yaml"
|
||||
if c.altConfigLocation != "" {
|
||||
savePath = path.Join(c.altConfigLocation, "config.yaml")
|
||||
}
|
||||
|
||||
log.Tracef("Writing config to %s", savePath)
|
||||
err = ioutil.WriteFile(savePath, data, 0644)
|
||||
if err != nil {
|
||||
log.Errorf("Failed to save config file: %+v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
log.Trace("Config saved")
|
||||
return nil
|
||||
}
|
||||
|
||||
func LoadOrCreateConfig(altConfigLocation string) (Config, error) {
|
||||
if altConfigLocation != "" {
|
||||
if _, err := ioutil.ReadFile(altConfigLocation); err != nil {
|
||||
log.Panicf("Failed to find config file at %s Error: %+v", altConfigLocation, err)
|
||||
}
|
||||
}
|
||||
func loadConfigFromDisk(altConfigLocation string) (Config, error) {
|
||||
var config Config
|
||||
|
||||
log.Trace("Trying to load config from disk")
|
||||
configLocation := path.Join(altConfigLocation, "config.yaml")
|
||||
|
||||
log.Tracef("Reading config from %s", configLocation)
|
||||
file, err := ioutil.ReadFile(configLocation)
|
||||
|
||||
config, err := loadConfigFromDisk()
|
||||
if err != nil {
|
||||
if err == ErrFailedToFindConfigFile {
|
||||
err = createDefaultConfig()
|
||||
if err != nil {
|
||||
return config, err
|
||||
}
|
||||
panic("Default config created, please fill it out")
|
||||
}
|
||||
if err == ErrInvalidConfigFile {
|
||||
return config, ErrInvalidConfigFile
|
||||
}
|
||||
}
|
||||
//Clean up url
|
||||
if strings.HasSuffix(config.SonarrURL, ("/")) {
|
||||
config.SonarrURL = config.SonarrURL[:len(config.SonarrURL)-1]
|
||||
log.Trace("Failed to find config file")
|
||||
return config, ErrFailedToFindConfigFile
|
||||
}
|
||||
|
||||
log.Trace("Loading to interface")
|
||||
var configInterface map[interface{}]interface{}
|
||||
err = yaml.Unmarshal(file, &configInterface)
|
||||
if err != nil {
|
||||
log.Errorf("Failed to unmarshal config file: %+v", err)
|
||||
return config, ErrInvalidConfigFile
|
||||
}
|
||||
|
||||
log.Trace("Unmarshalling to struct")
|
||||
err = yaml.Unmarshal(file, &config)
|
||||
if err != nil {
|
||||
log.Errorf("Failed to unmarshal config file: %+v", err)
|
||||
return config, ErrInvalidConfigFile
|
||||
}
|
||||
|
||||
log.Trace("Checking for missing config fields")
|
||||
updated := false
|
||||
|
||||
if configInterface["PollBlackholeDirectory"] == nil {
|
||||
log.Info("PollBlackholeDirectory not set, setting to false")
|
||||
config.PollBlackholeDirectory = false
|
||||
updated = true
|
||||
}
|
||||
|
||||
if configInterface["SimultaneousDownloads"] == nil {
|
||||
log.Info("SimultaneousDownloads not set, setting to 5")
|
||||
config.SimultaneousDownloads = 5
|
||||
updated = true
|
||||
}
|
||||
|
||||
if configInterface["PollBlackholeIntervalMinutes"] == nil {
|
||||
log.Info("PollBlackholeIntervalMinutes not set, setting to 10")
|
||||
config.PollBlackholeIntervalMinutes = 10
|
||||
updated = true
|
||||
}
|
||||
|
||||
if configInterface["ArrHistoryUpdateIntervalSeconds"] == nil {
|
||||
log.Info("ArrHistoryUpdateIntervalSeconds not set, setting to 20")
|
||||
config.ArrHistoryUpdateIntervalSeconds = 20
|
||||
updated = true
|
||||
}
|
||||
|
||||
config.altConfigLocation = altConfigLocation
|
||||
|
||||
if updated {
|
||||
log.Trace("Version updated saving")
|
||||
err = config.Save()
|
||||
|
||||
if err == nil {
|
||||
log.Trace("Config saved")
|
||||
return config, nil
|
||||
} else {
|
||||
log.Errorf("Failed to save config to %s", configLocation)
|
||||
log.Error(err)
|
||||
return config, ErrFailedToSaveConfig
|
||||
}
|
||||
}
|
||||
|
||||
log.Trace("Config loaded")
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func defaultConfig() Config {
|
||||
return Config{
|
||||
PremiumizemeAPIKey: "xxxxxxxxx",
|
||||
Arrs: []ArrConfig{
|
||||
{Name: "Sonarr", URL: "http://localhost:8989", APIKey: "xxxxxxxxx", Type: Sonarr},
|
||||
{Name: "Radarr", URL: "http://localhost:7878", APIKey: "xxxxxxxxx", Type: Radarr},
|
||||
},
|
||||
BlackholeDirectory: "",
|
||||
PollBlackholeDirectory: false,
|
||||
PollBlackholeIntervalMinutes: 10,
|
||||
DownloadsDirectory: "",
|
||||
UnzipDirectory: "",
|
||||
BindIP: "0.0.0.0",
|
||||
BindPort: "8182",
|
||||
WebRoot: "",
|
||||
SimultaneousDownloads: 5,
|
||||
ArrHistoryUpdateIntervalSeconds: 20,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
ErrUnzipDirectorySetToRoot = errors.New("unzip directory set to root")
|
||||
ErrUnzipDirectoryNotWriteable = errors.New("unzip directory not writeable")
|
||||
)
|
||||
|
||||
func (c *Config) GetUnzipBaseLocation() (string, error) {
|
||||
if c.UnzipDirectory == "" {
|
||||
log.Tracef("Unzip directory not set, using default: %s", os.TempDir())
|
||||
return path.Join(os.TempDir(), "premiumizearrd"), nil
|
||||
}
|
||||
|
||||
if c.UnzipDirectory == "/" || c.UnzipDirectory == "\\" || c.UnzipDirectory == "C:\\" {
|
||||
log.Error("Unzip directory set to root, please set a directory")
|
||||
return "", ErrUnzipDirectorySetToRoot
|
||||
}
|
||||
|
||||
if !utils.IsDirectoryWriteable(c.UnzipDirectory) {
|
||||
log.Errorf("Unzip directory not writeable: %s", c.UnzipDirectory)
|
||||
return c.UnzipDirectory, ErrUnzipDirectoryNotWriteable
|
||||
}
|
||||
|
||||
log.Tracef("Unzip directory set to: %s", c.UnzipDirectory)
|
||||
return c.UnzipDirectory, nil
|
||||
}
|
||||
|
||||
func (c *Config) GetNewUnzipLocation() (string, error) {
|
||||
// Create temp dir in os temp location or unzip-directory
|
||||
tempDir, err := c.GetUnzipBaseLocation()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
log.Trace("Creating unzip directory")
|
||||
err = os.MkdirAll(tempDir, os.ModePerm)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
log.Trace("Creating generated unzip directory")
|
||||
dir, err := ioutil.TempDir(tempDir, "unzip-")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return dir, nil
|
||||
}
|
||||
|
||||
13
internal/config/config_update.go
Normal file
13
internal/config/config_update.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package config
|
||||
|
||||
func (c *Config) UpdateConfig(_newConfig Config) {
|
||||
oldConfig := *c
|
||||
|
||||
//move private fields over
|
||||
_newConfig.appCallback = c.appCallback
|
||||
_newConfig.altConfigLocation = c.altConfigLocation
|
||||
*c = _newConfig
|
||||
|
||||
c.appCallback(oldConfig, *c)
|
||||
c.Save()
|
||||
}
|
||||
54
internal/config/types.go
Normal file
54
internal/config/types.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package config
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
ErrInvalidConfigFile = errors.New("invalid Config File")
|
||||
ErrFailedToFindConfigFile = errors.New("failed to find config file")
|
||||
ErrFailedToSaveConfig = errors.New("failed to save config")
|
||||
)
|
||||
|
||||
// ArrType enum for Sonarr/Radarr
|
||||
type ArrType string
|
||||
|
||||
// AppCallback - Callback for the app to use
|
||||
type AppCallback func(oldConfig Config, newConfig Config)
|
||||
|
||||
const (
|
||||
Sonarr ArrType = "Sonarr"
|
||||
Radarr ArrType = "Radarr"
|
||||
)
|
||||
|
||||
type ArrConfig struct {
|
||||
Name string `yaml:"Name" json:"Name"`
|
||||
URL string `yaml:"URL" json:"URL"`
|
||||
APIKey string `yaml:"APIKey" json:"APIKey"`
|
||||
Type ArrType `yaml:"Type" json:"Type"`
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
altConfigLocation string
|
||||
appCallback AppCallback
|
||||
|
||||
//PremiumizemeAPIKey string with yaml and json tag
|
||||
PremiumizemeAPIKey string `yaml:"PremiumizemeAPIKey" json:"PremiumizemeAPIKey"`
|
||||
|
||||
Arrs []ArrConfig `yaml:"Arrs" json:"Arrs"`
|
||||
|
||||
BlackholeDirectory string `yaml:"BlackholeDirectory" json:"BlackholeDirectory"`
|
||||
PollBlackholeDirectory bool `yaml:"PollBlackholeDirectory" json:"PollBlackholeDirectory"`
|
||||
PollBlackholeIntervalMinutes int `yaml:"PollBlackholeIntervalMinutes" json:"PollBlackholeIntervalMinutes"`
|
||||
|
||||
DownloadsDirectory string `yaml:"DownloadsDirectory" json:"DownloadsDirectory"`
|
||||
|
||||
UnzipDirectory string `yaml:"UnzipDirectory" json:"UnzipDirectory"`
|
||||
|
||||
BindIP string `yaml:"bindIP" json:"BindIP"`
|
||||
BindPort string `yaml:"bindPort" json:"BindPort"`
|
||||
|
||||
WebRoot string `yaml:"WebRoot" json:"WebRoot"`
|
||||
|
||||
SimultaneousDownloads int `yaml:"SimultaneousDownloads" json:"SimultaneousDownloads"`
|
||||
|
||||
ArrHistoryUpdateIntervalSeconds int `yaml:"ArrHistoryUpdateIntervalSeconds" json:"ArrHistoryUpdateIntervalSeconds"`
|
||||
}
|
||||
@@ -19,17 +19,16 @@ func NewDirectoryWatcher(path string, recursive bool, matchFunction func(string)
|
||||
}
|
||||
|
||||
func (w *WatchDirectory) Watch() error {
|
||||
watcher, err := fsnotify.NewWatcher()
|
||||
var err error
|
||||
w.Watcher, err = fsnotify.NewWatcher()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer watcher.Close()
|
||||
|
||||
done := make(chan bool)
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case event, ok := <-watcher.Events:
|
||||
case event, ok := <-w.Watcher.Events:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
@@ -38,7 +37,7 @@ func (w *WatchDirectory) Watch() error {
|
||||
w.CallbackFunction(event.Name)
|
||||
}
|
||||
}
|
||||
case _, ok := <-watcher.Errors:
|
||||
case _, ok := <-w.Watcher.Errors:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
@@ -52,10 +51,20 @@ func (w *WatchDirectory) Watch() error {
|
||||
return err
|
||||
}
|
||||
|
||||
err = watcher.Add(cleanPath)
|
||||
err = w.Watcher.Add(cleanPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
<-done
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *WatchDirectory) UpdatePath(path string) error {
|
||||
w.Watcher.Remove(w.Path)
|
||||
w.Path = path
|
||||
return w.Watcher.Add(w.Path)
|
||||
}
|
||||
|
||||
func (w *WatchDirectory) Stop() error {
|
||||
return w.Watcher.Close()
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package directory_watcher
|
||||
|
||||
import "github.com/fsnotify/fsnotify"
|
||||
|
||||
// WatchDirectory watches a directory for changes.
|
||||
type WatchDirectory struct {
|
||||
// Path is the path to the directory to watch.
|
||||
@@ -12,4 +14,6 @@ type WatchDirectory struct {
|
||||
MatchFunction func(string) bool
|
||||
// Callback is the function to call when a file is created that matches with MatchFunction.
|
||||
CallbackFunction func(string)
|
||||
// watcher is the fsnotify watcher.
|
||||
Watcher *fsnotify.Watcher
|
||||
}
|
||||
|
||||
98
internal/service/arrs_manager_service.go
Normal file
98
internal/service/arrs_manager_service.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/arr"
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golift.io/starr"
|
||||
"golift.io/starr/radarr"
|
||||
"golift.io/starr/sonarr"
|
||||
)
|
||||
|
||||
type ArrsManagerService struct {
|
||||
arrs []arr.IArr
|
||||
config *config.Config
|
||||
}
|
||||
|
||||
func (am ArrsManagerService) New() ArrsManagerService {
|
||||
am.arrs = []arr.IArr{}
|
||||
return am
|
||||
}
|
||||
|
||||
func (am *ArrsManagerService) Init(_config *config.Config) {
|
||||
am.config = _config
|
||||
}
|
||||
|
||||
func (am *ArrsManagerService) Start() {
|
||||
am.arrs = []arr.IArr{}
|
||||
log.Debugf("Starting ArrsManagerService")
|
||||
for _, arr_config := range am.config.Arrs {
|
||||
switch arr_config.Type {
|
||||
case config.Sonarr:
|
||||
c := starr.New(arr_config.APIKey, arr_config.URL, 0)
|
||||
wrapper := arr.SonarrArr{
|
||||
Name: arr_config.Name,
|
||||
Client: sonarr.New(c),
|
||||
History: nil,
|
||||
LastUpdate: time.Now(),
|
||||
Config: am.config,
|
||||
}
|
||||
am.arrs = append(am.arrs, &wrapper)
|
||||
log.Tracef("Added Sonarr arr: %s", arr_config.Name)
|
||||
case config.Radarr:
|
||||
c := starr.New(arr_config.APIKey, arr_config.URL, 0)
|
||||
wrapper := arr.RadarrArr{
|
||||
Name: arr_config.Name,
|
||||
Client: radarr.New(c),
|
||||
History: nil,
|
||||
LastUpdate: time.Now(),
|
||||
Config: am.config,
|
||||
}
|
||||
am.arrs = append(am.arrs, &wrapper)
|
||||
log.Tracef("Added Radarr arr: %s", arr_config.Name)
|
||||
default:
|
||||
log.Errorf("Unknown arr type: %s, not adding Arr %s", arr_config.Type, arr_config.Name)
|
||||
}
|
||||
}
|
||||
log.Debugf("Created %d Arrs", len(am.arrs))
|
||||
}
|
||||
|
||||
func (am *ArrsManagerService) Stop() {
|
||||
//noop
|
||||
}
|
||||
|
||||
func (am *ArrsManagerService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
|
||||
if len(currentConfig.Arrs) != len(newConfig.Arrs) {
|
||||
am.Start()
|
||||
return
|
||||
}
|
||||
for i, arr_config := range newConfig.Arrs {
|
||||
if currentConfig.Arrs[i].Type != arr_config.Type ||
|
||||
currentConfig.Arrs[i].APIKey != arr_config.APIKey ||
|
||||
currentConfig.Arrs[i].URL != arr_config.URL {
|
||||
am.Start()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (am *ArrsManagerService) GetArrs() []arr.IArr {
|
||||
return am.arrs
|
||||
}
|
||||
|
||||
func TestArrConnection(arr config.ArrConfig) error {
|
||||
c := starr.New(arr.APIKey, arr.URL, 0)
|
||||
|
||||
switch arr.Type {
|
||||
case config.Sonarr:
|
||||
_, err := sonarr.New(c).GetSystemStatus()
|
||||
return err
|
||||
case config.Radarr:
|
||||
_, err := radarr.New(c).GetSystemStatus()
|
||||
return err
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
@@ -21,6 +21,7 @@ type DirectoryWatcherService struct {
|
||||
Queue *stringqueue.StringQueue
|
||||
status string
|
||||
downloadsFolderID string
|
||||
watchDirectory *directory_watcher.WatchDirectory
|
||||
}
|
||||
|
||||
const (
|
||||
@@ -28,11 +29,32 @@ const (
|
||||
ERROR_ALREADY_UPLOADED = "You already added this job."
|
||||
)
|
||||
|
||||
func NewDirectoryWatcherService(pm *premiumizeme.Premiumizeme, con *config.Config) DirectoryWatcherService {
|
||||
func (DirectoryWatcherService) New() DirectoryWatcherService {
|
||||
return DirectoryWatcherService{
|
||||
premiumizemeClient: pm,
|
||||
config: con,
|
||||
premiumizemeClient: nil,
|
||||
config: nil,
|
||||
Queue: nil,
|
||||
status: "",
|
||||
downloadsFolderID: "",
|
||||
}
|
||||
}
|
||||
|
||||
func (dw *DirectoryWatcherService) Init(premiumizemeClient *premiumizeme.Premiumizeme, config *config.Config) {
|
||||
dw.premiumizemeClient = premiumizemeClient
|
||||
dw.config = config
|
||||
}
|
||||
|
||||
func (dw *DirectoryWatcherService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
|
||||
if currentConfig.BlackholeDirectory != newConfig.BlackholeDirectory {
|
||||
log.Info("Blackhole directory changed, restarting directory watcher...")
|
||||
log.Info("Running initial directory scan...")
|
||||
go dw.directoryScan(dw.config.BlackholeDirectory)
|
||||
dw.watchDirectory.UpdatePath(newConfig.BlackholeDirectory)
|
||||
}
|
||||
|
||||
if currentConfig.PollBlackholeDirectory != newConfig.PollBlackholeDirectory {
|
||||
log.Info("Poll blackhole directory changed, restarting directory watcher...")
|
||||
dw.Start()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,49 +62,64 @@ func (dw *DirectoryWatcherService) GetStatus() string {
|
||||
return dw.status
|
||||
}
|
||||
|
||||
//TODO (Radarr): accept paths as a parameter, support multiple paths
|
||||
//Watch: This is the entrypoint for the directory watcher
|
||||
func (dw *DirectoryWatcherService) Watch() {
|
||||
// Start: This is the entrypoint for the directory watcher
|
||||
func (dw *DirectoryWatcherService) Start() {
|
||||
log.Info("Starting directory watcher...")
|
||||
|
||||
dw.downloadsFolderID = utils.GetDownloadsFolderIDFromPremiumizeme(dw.premiumizemeClient)
|
||||
|
||||
log.Info("Clearing tmp directory...")
|
||||
tempDir := utils.GetTempBaseDir()
|
||||
err := os.RemoveAll(tempDir)
|
||||
if err != nil {
|
||||
log.Errorf("Error clearing tmp directory %s", tempDir)
|
||||
}
|
||||
os.Mkdir(tempDir, os.ModePerm)
|
||||
|
||||
log.Info("Creating Queue...")
|
||||
dw.Queue = stringqueue.NewStringQueue()
|
||||
|
||||
log.Info("Starting uploads processor...")
|
||||
go dw.processUploads()
|
||||
|
||||
log.Info("Starting initial directory scans...")
|
||||
go dw.initialDirectoryScan(dw.config.BlackholeDirectory)
|
||||
log.Info("Running initial directory scan...")
|
||||
go dw.directoryScan(dw.config.BlackholeDirectory)
|
||||
|
||||
// Build and start a DirectoryWatcher
|
||||
watcher := directory_watcher.NewDirectoryWatcher(dw.config.BlackholeDirectory,
|
||||
false,
|
||||
dw.checkFile,
|
||||
dw.addFileToQueue,
|
||||
)
|
||||
if dw.watchDirectory != nil {
|
||||
log.Info("Stopping directory watcher...")
|
||||
err := dw.watchDirectory.Stop()
|
||||
if err != nil {
|
||||
log.Errorf("Error stopping directory watcher: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
watcher.Watch()
|
||||
if dw.config.PollBlackholeDirectory {
|
||||
log.Info("Starting directory poller...")
|
||||
go func() {
|
||||
for {
|
||||
if !dw.config.PollBlackholeDirectory {
|
||||
log.Info("Directory poller stopped")
|
||||
break
|
||||
}
|
||||
time.Sleep(time.Duration(dw.config.PollBlackholeIntervalMinutes) * time.Minute)
|
||||
log.Infof("Running directory scan of %s", dw.config.BlackholeDirectory)
|
||||
dw.directoryScan(dw.config.BlackholeDirectory)
|
||||
log.Infof("Scan complete, next scan in %d minutes", dw.config.PollBlackholeIntervalMinutes)
|
||||
}
|
||||
}()
|
||||
} else {
|
||||
log.Info("Starting directory watcher...")
|
||||
dw.watchDirectory = directory_watcher.NewDirectoryWatcher(dw.config.BlackholeDirectory,
|
||||
false,
|
||||
dw.checkFile,
|
||||
dw.addFileToQueue,
|
||||
)
|
||||
dw.watchDirectory.Watch()
|
||||
}
|
||||
}
|
||||
|
||||
func (dw *DirectoryWatcherService) initialDirectoryScan(p string) {
|
||||
log.Trace("Initial directory scan")
|
||||
files, err := ioutil.ReadDir(p)
|
||||
func (dw *DirectoryWatcherService) directoryScan(p string) {
|
||||
log.Trace("Running directory scan")
|
||||
files, err := os.ReadDir(p)
|
||||
if err != nil {
|
||||
log.Errorf("Error with initial directory scan %+v", err)
|
||||
log.Errorf("Error with directory scan %+v", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
go func(file os.FileInfo) {
|
||||
go func(file fs.DirEntry) {
|
||||
file_path := path.Join(p, file.Name())
|
||||
if dw.checkFile(file_path) {
|
||||
dw.addFileToQueue(file_path)
|
||||
@@ -101,7 +138,7 @@ func (dw *DirectoryWatcherService) checkFile(path string) bool {
|
||||
}
|
||||
|
||||
if fi.IsDir() {
|
||||
log.Errorf("Directory created in blackhole %s ignoring (Warning premiumizearrzed does not look in subfolders!)", path)
|
||||
log.Errorf("Directory created in blackhole %s ignoring (Warning premiumizearrd does not look in subfolders!)", path)
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -119,7 +156,6 @@ func (dw *DirectoryWatcherService) addFileToQueue(path string) {
|
||||
}
|
||||
|
||||
func (dw *DirectoryWatcherService) processUploads() {
|
||||
//TODO: Global running state
|
||||
for {
|
||||
if dw.Queue.Len() < 1 {
|
||||
log.Trace("No files in Queue, sleeping for 10 seconds")
|
||||
@@ -146,7 +182,7 @@ func (dw *DirectoryWatcherService) processUploads() {
|
||||
log.Trace("File already uploaded, removing from Disk")
|
||||
os.Remove(filePath)
|
||||
default:
|
||||
log.Error(err)
|
||||
log.Errorf("Error creating transfer: %s", err)
|
||||
}
|
||||
} else {
|
||||
dw.status = "Okay"
|
||||
@@ -158,7 +194,7 @@ func (dw *DirectoryWatcherService) processUploads() {
|
||||
}
|
||||
time.Sleep(time.Second * time.Duration(sleepTimeSeconds))
|
||||
} else {
|
||||
log.Errorf("Received %s from blackhole Queue. Appears to be an empty path.")
|
||||
log.Error("Received blank string from blackhole Queue.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
166
internal/service/download_manager_service.go
Normal file
166
internal/service/download_manager_service.go
Normal file
@@ -0,0 +1,166 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
"github.com/jackdallas/premiumizearr/internal/utils"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/pkg/downloadmanager"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type DownloadManagerService struct {
|
||||
downloadManager *downloadmanager.DownloadManager
|
||||
taskRunner *TaskRunnerService
|
||||
premiumizemeClient *premiumizeme.Premiumizeme
|
||||
config *config.Config
|
||||
downloadingIDs map[string]bool
|
||||
|
||||
downloadsFolderID string
|
||||
}
|
||||
|
||||
func (DownloadManagerService) New() DownloadManagerService {
|
||||
return DownloadManagerService{
|
||||
downloadsFolderID: "",
|
||||
downloadManager: &downloadmanager.DownloadManager{},
|
||||
downloadingIDs: make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
func (manager *DownloadManagerService) Init(_premiumizemeClient *premiumizeme.Premiumizeme, taskRunner *TaskRunnerService, _config *config.Config) {
|
||||
manager.premiumizemeClient = _premiumizemeClient
|
||||
manager.taskRunner = taskRunner
|
||||
manager.config = _config
|
||||
|
||||
manager.downloadsFolderID = utils.GetDownloadsFolderIDFromPremiumizeme(manager.premiumizemeClient)
|
||||
manager.CleanUpUnzipDir()
|
||||
|
||||
log.Info("Starting download manager thread")
|
||||
go manager.downloadManager.Run()
|
||||
log.Info("Creating check premiumize downloads folder task")
|
||||
manager.taskRunner.AddTask("Check Premiumize Downloads Folder", 20*time.Second, manager.TaskCheckPremiumizeDownloadsFolder)
|
||||
}
|
||||
|
||||
func (manager *DownloadManagerService) CleanUpUnzipDir() {
|
||||
log.Info("Cleaning unzip directory")
|
||||
|
||||
unzipBase, err := manager.config.GetUnzipBaseLocation()
|
||||
if err != nil {
|
||||
log.Errorf("Error getting unzip base location: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
err = utils.RemoveContents(unzipBase)
|
||||
if err != nil {
|
||||
log.Errorf("Error cleaning unzip directory: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (manager *DownloadManagerService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
|
||||
if currentConfig.UnzipDirectory != newConfig.UnzipDirectory {
|
||||
manager.CleanUpUnzipDir()
|
||||
}
|
||||
}
|
||||
|
||||
func (manager *DownloadManagerService) TaskCheckPremiumizeDownloadsFolder() {
|
||||
log.Debug("Running Task CheckPremiumizeDownloadsFolder")
|
||||
|
||||
items, err := manager.premiumizemeClient.ListFolder(manager.downloadsFolderID)
|
||||
if err != nil {
|
||||
log.Errorf("Error listing downloads folder: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
for _, item := range items {
|
||||
if _, ok := manager.downloadingIDs[item.ID]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
manager.downloadingIDs[item.ID] = true
|
||||
manager.downloadFinishedTransfer(item, manager.config.DownloadsDirectory)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (manager *DownloadManagerService) downloadFinishedTransfer(item premiumizeme.Item, downloadDirectory string) {
|
||||
log.Debug("Downloading: ", item.Name)
|
||||
log.Tracef("%+v", item)
|
||||
var link string
|
||||
var err error
|
||||
if item.Type == "file" {
|
||||
link, err = manager.premiumizemeClient.GenerateZippedFileLink(item.ID)
|
||||
} else if item.Type == "folder" {
|
||||
link, err = manager.premiumizemeClient.GenerateZippedFolderLink(item.ID)
|
||||
} else {
|
||||
log.Errorf("Item is not of type 'file' or 'folder' !! Can't download %s", item.Name)
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
log.Errorf("Error generating download link: %s", err)
|
||||
return
|
||||
}
|
||||
log.Trace("Downloading from: ", link)
|
||||
|
||||
tempDir, err := manager.config.GetNewUnzipLocation()
|
||||
if err != nil {
|
||||
log.Errorf("Could not create temp dir: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
splitString := strings.Split(link, "/")
|
||||
savePath := path.Join(tempDir, splitString[len(splitString)-1])
|
||||
log.Trace("Downloading to: ", savePath)
|
||||
|
||||
out, err := os.Create(savePath)
|
||||
if err != nil {
|
||||
log.Errorf("Could not create save path: %s", err)
|
||||
return
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
transfer, err := manager.downloadManager.AddTransfer(link, savePath)
|
||||
if err != nil {
|
||||
log.Errorf("Could not add transfer: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
go func() {
|
||||
<-transfer.Finished
|
||||
|
||||
if transfer.GetStatus() == downloadmanager.STATUS_ERROR || transfer.GetStatus() == downloadmanager.STATUS_CANCELED {
|
||||
log.Errorf("Could not download file: %s", strings.Join(transfer.GetErrorStrings(), ", "))
|
||||
return
|
||||
}
|
||||
|
||||
unzipped := true
|
||||
log.Tracef("Unzipping %s to %s", savePath, downloadDirectory)
|
||||
err = utils.Unzip(savePath, downloadDirectory)
|
||||
if err != nil {
|
||||
log.Errorf("Could not unzip file: %s", err)
|
||||
unzipped = false
|
||||
}
|
||||
|
||||
log.Tracef("Removing zip %s from system", savePath)
|
||||
err = os.RemoveAll(savePath)
|
||||
if err != nil {
|
||||
log.Errorf("Could not remove zip: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
if unzipped {
|
||||
err = manager.premiumizemeClient.DeleteFolder(item.ID)
|
||||
if err != nil {
|
||||
log.Errorf("Error deleting folder on premiumize.me: %s", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}()
|
||||
}
|
||||
12
internal/service/service.go
Normal file
12
internal/service/service.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
)
|
||||
|
||||
//Service interface
|
||||
type Service interface {
|
||||
New() (*config.Config, error)
|
||||
Start() error
|
||||
Stop() error
|
||||
}
|
||||
68
internal/service/task_runner_service.go
Normal file
68
internal/service/task_runner_service.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
)
|
||||
|
||||
type ServiceTask struct {
|
||||
TaskName string `json:"task_name"`
|
||||
LastCompleted time.Time `json:"last_completed"`
|
||||
Interval time.Duration `json:"interval"`
|
||||
IsRunning bool `json:"is_running"`
|
||||
function func()
|
||||
}
|
||||
|
||||
type TaskRunnerService struct {
|
||||
tasks []ServiceTask
|
||||
tasksMutex *sync.RWMutex
|
||||
config *config.Config
|
||||
}
|
||||
|
||||
func (TaskRunnerService) New() TaskRunnerService {
|
||||
return TaskRunnerService{
|
||||
tasks: []ServiceTask{},
|
||||
tasksMutex: &sync.RWMutex{},
|
||||
}
|
||||
}
|
||||
|
||||
func (manager *TaskRunnerService) Init(config *config.Config) {
|
||||
manager.config = config
|
||||
}
|
||||
|
||||
func (manager *TaskRunnerService) AddTask(taskName string, interval time.Duration, function func()) {
|
||||
manager.tasksMutex.Lock()
|
||||
defer manager.tasksMutex.Unlock()
|
||||
manager.tasks = append(manager.tasks, ServiceTask{
|
||||
TaskName: taskName,
|
||||
LastCompleted: time.Time{},
|
||||
Interval: interval,
|
||||
IsRunning: false,
|
||||
function: function,
|
||||
})
|
||||
}
|
||||
|
||||
func (manager *TaskRunnerService) Start() {
|
||||
go func() {
|
||||
for {
|
||||
manager.tasksMutex.Lock()
|
||||
for _, task := range manager.tasks {
|
||||
if task.IsRunning {
|
||||
continue
|
||||
}
|
||||
if time.Since(task.LastCompleted) > task.Interval {
|
||||
task.IsRunning = true
|
||||
go func(task ServiceTask) {
|
||||
task.function()
|
||||
task.LastCompleted = time.Now()
|
||||
task.IsRunning = false
|
||||
}(task)
|
||||
}
|
||||
}
|
||||
manager.tasksMutex.Unlock()
|
||||
time.Sleep(time.Millisecond * 50)
|
||||
}
|
||||
}()
|
||||
}
|
||||
@@ -1,71 +1,75 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"sync"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/arr"
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
"github.com/jackdallas/premiumizearr/internal/progress_downloader"
|
||||
"github.com/jackdallas/premiumizearr/internal/utils"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type DownloadDetails struct {
|
||||
Added time.Time
|
||||
Name string
|
||||
ProgressDownloader *progress_downloader.WriteCounter
|
||||
}
|
||||
|
||||
type TransferManagerService struct {
|
||||
premiumizemeClient *premiumizeme.Premiumizeme
|
||||
arrs *[]arr.IArr
|
||||
arrsManager *ArrsManagerService
|
||||
config *config.Config
|
||||
lastUpdated int64
|
||||
transfers []premiumizeme.Transfer
|
||||
runningTask bool
|
||||
downloadListMutex *sync.Mutex
|
||||
downloadList map[string]*DownloadDetails
|
||||
status string
|
||||
downloadsFolderID string
|
||||
}
|
||||
|
||||
func NewTransferManagerService(pme *premiumizeme.Premiumizeme, arrs *[]arr.IArr, config *config.Config) TransferManagerService {
|
||||
return TransferManagerService{
|
||||
premiumizemeClient: pme,
|
||||
arrs: arrs,
|
||||
config: config,
|
||||
lastUpdated: time.Now().Unix(),
|
||||
transfers: make([]premiumizeme.Transfer, 0),
|
||||
runningTask: false,
|
||||
downloadListMutex: &sync.Mutex{},
|
||||
downloadList: make(map[string]*DownloadDetails, 0),
|
||||
status: "",
|
||||
downloadsFolderID: "",
|
||||
// Handle
|
||||
func (t TransferManagerService) New() TransferManagerService {
|
||||
t.premiumizemeClient = nil
|
||||
t.arrsManager = nil
|
||||
t.config = nil
|
||||
t.lastUpdated = time.Now().Unix()
|
||||
t.transfers = make([]premiumizeme.Transfer, 0)
|
||||
t.runningTask = false
|
||||
t.status = ""
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *TransferManagerService) Init(pme *premiumizeme.Premiumizeme, arrsManager *ArrsManagerService, config *config.Config) {
|
||||
t.premiumizemeClient = pme
|
||||
t.arrsManager = arrsManager
|
||||
t.config = config
|
||||
t.CleanUpUnzipDir()
|
||||
}
|
||||
|
||||
func (t *TransferManagerService) CleanUpUnzipDir() {
|
||||
log.Info("Cleaning unzip directory")
|
||||
|
||||
unzipBase, err := t.config.GetUnzipBaseLocation()
|
||||
if err != nil {
|
||||
log.Errorf("Error getting unzip base location: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
err = utils.RemoveContents(unzipBase)
|
||||
if err != nil {
|
||||
log.Errorf("Error cleaning unzip directory: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
|
||||
//NOOP
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) Run(interval time.Duration) {
|
||||
manager.downloadsFolderID = utils.GetDownloadsFolderIDFromPremiumizeme(manager.premiumizemeClient)
|
||||
for {
|
||||
manager.runningTask = true
|
||||
manager.TaskUpdateTransfersList()
|
||||
//TODO: Seperate loop maybe
|
||||
manager.TaskCheckPremiumizeDownloadsFolder()
|
||||
manager.runningTask = false
|
||||
manager.lastUpdated = time.Now().Unix()
|
||||
time.Sleep(interval)
|
||||
}
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) GetDownloads() map[string]*DownloadDetails {
|
||||
return manager.downloadList
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) GetTransfers() *[]premiumizeme.Transfer {
|
||||
return &manager.transfers
|
||||
}
|
||||
@@ -73,18 +77,48 @@ func (manager *TransferManagerService) GetStatus() string {
|
||||
return manager.status
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) updateTransfers(transfers []premiumizeme.Transfer) {
|
||||
manager.transfers = transfers
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) TaskUpdateTransfersList() {
|
||||
log.Debug("Running Task UpdateTransfersList")
|
||||
transfers, err := manager.premiumizemeClient.GetTransfers()
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
log.Errorf("Error getting transfers: %s", err.Error())
|
||||
return
|
||||
}
|
||||
manager.updateTransfers(transfers)
|
||||
|
||||
log.Tracef("Checking %d transfers against %d Arr clients", len(transfers), len(manager.arrsManager.GetArrs()))
|
||||
earlyReturn := false
|
||||
|
||||
if len(transfers) == 0 {
|
||||
manager.status = "No transfers"
|
||||
earlyReturn = true
|
||||
} else {
|
||||
manager.status = fmt.Sprintf("Got %d transfers", len(transfers))
|
||||
}
|
||||
|
||||
if len(manager.arrsManager.GetArrs()) == 0 {
|
||||
manager.status = fmt.Sprintf("%s, no ARRs available", manager.status)
|
||||
earlyReturn = true
|
||||
}
|
||||
//else {
|
||||
// //TODO: Test
|
||||
// // if manager.status[len(manager.status)-19:] == ", no ARRs available" {
|
||||
// // manager.status = manager.status[:len(manager.status)-19]
|
||||
// // }
|
||||
// fmt.Print(manager.status)
|
||||
// }
|
||||
|
||||
if earlyReturn {
|
||||
return
|
||||
}
|
||||
|
||||
for _, transfer := range transfers {
|
||||
found := false
|
||||
for _, arr := range *manager.arrs {
|
||||
for _, arr := range manager.arrsManager.GetArrs() {
|
||||
if found {
|
||||
break
|
||||
}
|
||||
@@ -97,143 +131,10 @@ func (manager *TransferManagerService) TaskUpdateTransfersList() {
|
||||
}
|
||||
log.Tracef("Found %s in %s history", transfer.Name, arr.GetArrName())
|
||||
found = true
|
||||
log.Debugf("Processing transfer that has errored: ", transfer.Name)
|
||||
log.Debugf("Processing transfer that has errored: %s", transfer.Name)
|
||||
go arr.HandleErrorTransfer(&transfer, arrID, manager.premiumizemeClient)
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) TaskCheckPremiumizeDownloadsFolder() {
|
||||
log.Debug("Running Task CheckPremiumizeDownloadsFolder")
|
||||
items, err := manager.premiumizemeClient.ListFolder(manager.downloadsFolderID)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, item := range items {
|
||||
log.Debugf("Processing completed item: %s", item.Name)
|
||||
go manager.HandleFinishedItem(item, manager.config.DownloadsDirectory)
|
||||
}
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) updateTransfers(transfers []premiumizeme.Transfer) {
|
||||
manager.transfers = transfers
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) addDownload(item *premiumizeme.Item) {
|
||||
manager.downloadListMutex.Lock()
|
||||
defer manager.downloadListMutex.Unlock()
|
||||
|
||||
manager.downloadList[item.Name] = &DownloadDetails{
|
||||
Added: time.Now(),
|
||||
Name: item.Name,
|
||||
ProgressDownloader: progress_downloader.NewWriteCounter(),
|
||||
}
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) removeDownload(name string) {
|
||||
manager.downloadListMutex.Lock()
|
||||
defer manager.downloadListMutex.Unlock()
|
||||
|
||||
delete(manager.downloadList, name)
|
||||
}
|
||||
|
||||
func (manager *TransferManagerService) downloadExists(itemName string) bool {
|
||||
manager.downloadListMutex.Lock()
|
||||
defer manager.downloadListMutex.Unlock()
|
||||
|
||||
for _, dl := range manager.downloadList {
|
||||
if dl.Name == itemName {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Ran in a goroutine
|
||||
func (manager *TransferManagerService) HandleFinishedItem(item premiumizeme.Item, downloadDirectory string) {
|
||||
if manager.downloadExists(item.Name) {
|
||||
log.Tracef("Transfer %s is already downloading", item.Name)
|
||||
return
|
||||
}
|
||||
|
||||
manager.addDownload(&item)
|
||||
//Create entry in downloads map to lock item
|
||||
// manager.downloadList[item.Name] = progress_downloader.NewWriteCounter()
|
||||
|
||||
log.Debug("Downloading: ", item.Name)
|
||||
log.Tracef("%+v", item)
|
||||
var link string
|
||||
var err error
|
||||
if item.Type == "file" {
|
||||
link, err = manager.premiumizemeClient.GenerateZippedFileLink(item.ID)
|
||||
} else if item.Type == "folder" {
|
||||
link, err = manager.premiumizemeClient.GenerateZippedFolderLink(item.ID)
|
||||
} else {
|
||||
log.Errorf("Item is not of type 'file' or 'folder' !! Can't download %s", item.Name)
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
manager.removeDownload(item.Name)
|
||||
return
|
||||
}
|
||||
log.Trace("Downloading: ", link)
|
||||
|
||||
tempDir, err := utils.GetTempDir()
|
||||
if err != nil {
|
||||
log.Errorf("Could not create temp dir: %s", err)
|
||||
manager.removeDownload(item.Name)
|
||||
return
|
||||
}
|
||||
|
||||
splitString := strings.Split(link, "/")
|
||||
savePath := path.Join(tempDir, splitString[len(splitString)-1])
|
||||
log.Trace("Downloading to: ", savePath)
|
||||
|
||||
out, err := os.Create(savePath)
|
||||
if err != nil {
|
||||
log.Errorf("Could not create save path: %s", err)
|
||||
manager.removeDownload(item.Name)
|
||||
return
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
err = progress_downloader.DownloadFile(link, savePath, manager.downloadList[item.Name].ProgressDownloader)
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("Could not download file: %s", err)
|
||||
manager.removeDownload(item.Name)
|
||||
return
|
||||
}
|
||||
|
||||
log.Tracef("Unzipping %s to %s", savePath, downloadDirectory)
|
||||
err = utils.Unzip(savePath, downloadDirectory)
|
||||
if err != nil {
|
||||
log.Errorf("Could not unzip file: %s", err)
|
||||
manager.removeDownload(item.Name)
|
||||
return
|
||||
}
|
||||
|
||||
log.Tracef("Removing zip %s from system", savePath)
|
||||
err = os.RemoveAll(savePath)
|
||||
if err != nil {
|
||||
manager.removeDownload(item.Name)
|
||||
log.Errorf("Could not remove zip: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
err = manager.premiumizemeClient.DeleteFolder(item.ID)
|
||||
if err != nil {
|
||||
manager.removeDownload(item.Name)
|
||||
log.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
//Remove download entry from downloads map
|
||||
manager.removeDownload(item.Name)
|
||||
}
|
||||
|
||||
144
internal/service/web_service.go
Normal file
144
internal/service/web_service.go
Normal file
@@ -0,0 +1,144 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type IndexTemplates struct {
|
||||
RootPath string
|
||||
}
|
||||
|
||||
var indexBytes []byte
|
||||
|
||||
type WebServerService struct {
|
||||
transferManager *TransferManagerService
|
||||
directoryWatcherService *DirectoryWatcherService
|
||||
arrsManagerService *ArrsManagerService
|
||||
config *config.Config
|
||||
srv *http.Server
|
||||
}
|
||||
|
||||
func (s WebServerService) New() WebServerService {
|
||||
s.config = nil
|
||||
s.transferManager = nil
|
||||
s.directoryWatcherService = nil
|
||||
s.arrsManagerService = nil
|
||||
s.srv = nil
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *WebServerService) ConfigUpdatedCallback(currentConfig config.Config, newConfig config.Config) {
|
||||
if currentConfig.BindIP != newConfig.BindIP ||
|
||||
currentConfig.BindPort != newConfig.BindPort ||
|
||||
currentConfig.WebRoot != newConfig.WebRoot {
|
||||
log.Tracef("Config updated, restarting web server...")
|
||||
s.srv.Close()
|
||||
s.Start()
|
||||
}
|
||||
}
|
||||
|
||||
func (s *WebServerService) Init(transferManager *TransferManagerService, directoryWatcher *DirectoryWatcherService, arrManager *ArrsManagerService, config *config.Config) {
|
||||
s.transferManager = transferManager
|
||||
s.directoryWatcherService = directoryWatcher
|
||||
s.arrsManagerService = arrManager
|
||||
s.config = config
|
||||
}
|
||||
|
||||
func (s *WebServerService) Start() {
|
||||
log.Info("Starting web server...")
|
||||
tmpl, err := template.ParseFiles("./static/index.html")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
var ibytes bytes.Buffer
|
||||
err = tmpl.Execute(&ibytes, &IndexTemplates{s.config.WebRoot})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
indexBytes = ibytes.Bytes()
|
||||
|
||||
spa := spaHandler{
|
||||
staticPath: "static",
|
||||
indexPath: "index.html",
|
||||
webRoot: s.config.WebRoot,
|
||||
}
|
||||
|
||||
r := mux.NewRouter()
|
||||
|
||||
r.HandleFunc("/api/transfers", s.TransfersHandler)
|
||||
r.HandleFunc("/api/downloads", s.DownloadsHandler)
|
||||
r.HandleFunc("/api/blackhole", s.BlackholeHandler)
|
||||
r.HandleFunc("/api/config", s.ConfigHandler)
|
||||
r.HandleFunc("/api/testArr", s.TestArrHandler)
|
||||
|
||||
r.PathPrefix("/").Handler(spa)
|
||||
|
||||
address := fmt.Sprintf("%s:%s", s.config.BindIP, s.config.BindPort)
|
||||
|
||||
s.srv = &http.Server{
|
||||
Handler: r,
|
||||
Addr: address,
|
||||
// Good practice: enforce timeouts for servers you create!
|
||||
WriteTimeout: 15 * time.Second,
|
||||
ReadTimeout: 15 * time.Second,
|
||||
}
|
||||
|
||||
log.Infof("Web server started on %s", address)
|
||||
|
||||
go s.srv.ListenAndServe()
|
||||
}
|
||||
|
||||
// Shamelessly stolen from mux examples https://github.com/gorilla/mux#examples
|
||||
type spaHandler struct {
|
||||
staticPath string
|
||||
indexPath string
|
||||
webRoot string
|
||||
}
|
||||
|
||||
func (h spaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
// get the absolute path to prevent directory traversal
|
||||
path, err := filepath.Abs(r.URL.Path)
|
||||
if err != nil {
|
||||
// if we failed to get the absolute path respond with a 400 bad request
|
||||
// and stop
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if h.webRoot != "" {
|
||||
path = strings.Replace(path, h.webRoot, "", 1)
|
||||
}
|
||||
// prepend the path with the path to the static directory
|
||||
path = filepath.Join(h.staticPath, path)
|
||||
|
||||
// check whether a file exists at the given path
|
||||
_, err = os.Stat(path)
|
||||
if os.IsNotExist(err) || strings.HasSuffix(path, h.staticPath) {
|
||||
// file does not exist, serve index.html
|
||||
// http.ServeFile(w, r, filepath.Join(h.staticPath, h.indexPath))
|
||||
// file does not exist, serve index.html template
|
||||
w.Write(indexBytes)
|
||||
return
|
||||
} else if err != nil {
|
||||
// if we got an error (that wasn't that the file doesn't exist) stating the
|
||||
// file, return a 500 internal server error and stop
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
r.URL.Path = strings.Replace(path, h.staticPath, "", -1)
|
||||
// otherwise, use http.FileServer to serve the static dir
|
||||
http.FileServer(http.Dir(h.staticPath)).ServeHTTP(w, r)
|
||||
}
|
||||
56
internal/service/web_service_config_routes.go
Normal file
56
internal/service/web_service_config_routes.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
)
|
||||
|
||||
type ConfigChangeResponse struct {
|
||||
Succeeded bool `json:"succeeded"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
func (s *WebServerService) ConfigHandler(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
data, err := json.Marshal(s.config)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
case http.MethodPost:
|
||||
var newConfig config.Config
|
||||
err := json.NewDecoder(r.Body).Decode(&newConfig)
|
||||
if err != nil {
|
||||
EncodeAndWriteConfigChangeResponse(w, &ConfigChangeResponse{
|
||||
Succeeded: false,
|
||||
Status: fmt.Sprintf("Config failed to update %s", err.Error()),
|
||||
})
|
||||
return
|
||||
}
|
||||
s.config.UpdateConfig(newConfig)
|
||||
EncodeAndWriteConfigChangeResponse(w, &ConfigChangeResponse{
|
||||
Succeeded: true,
|
||||
Status: "Config updated",
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func EncodeAndWriteConfigChangeResponse(w http.ResponseWriter, resp *ConfigChangeResponse) {
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
}
|
||||
134
internal/service/web_service_handlers.go
Normal file
134
internal/service/web_service_handlers.go
Normal file
@@ -0,0 +1,134 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"path"
|
||||
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
)
|
||||
|
||||
type TransfersResponse struct {
|
||||
Transfers []premiumizeme.Transfer `json:"data"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
func (s *WebServerService) TransfersHandler(w http.ResponseWriter, r *http.Request) {
|
||||
var resp TransfersResponse
|
||||
resp.Transfers = *s.transferManager.GetTransfers()
|
||||
resp.Status = s.transferManager.GetStatus()
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
}
|
||||
|
||||
type BlackholeFile struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
type BlackholeResponse struct {
|
||||
BlackholeFiles []BlackholeFile `json:"data"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
type Download struct {
|
||||
ID int64 `json:"id"`
|
||||
Added int64 `json:"added"`
|
||||
Name string `json:"name"`
|
||||
Progress string `json:"progress"`
|
||||
Speed string `json:"speed"`
|
||||
}
|
||||
type DownloadsResponse struct {
|
||||
Downloads []Download `json:"data"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
func (s *WebServerService) DownloadsHandler(w http.ResponseWriter, r *http.Request) {
|
||||
var resp DownloadsResponse
|
||||
|
||||
if s.transferManager == nil {
|
||||
resp.Status = "Not Initialized"
|
||||
} else {
|
||||
// for _, v := range s.transferManager.GetDownloads() {
|
||||
// resp.Downloads = append(resp.Downloads, Download{
|
||||
// ID: v.ID,
|
||||
// Added: v.Added.Unix(),
|
||||
// Name: v.Name,
|
||||
// Progress: v.ProgressDownloader.GetProgress(),
|
||||
// Speed: v.ProgressDownloader.GetSpeed(),
|
||||
// })
|
||||
// }
|
||||
resp.Status = ""
|
||||
}
|
||||
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
}
|
||||
|
||||
func (s *WebServerService) BlackholeHandler(w http.ResponseWriter, r *http.Request) {
|
||||
var resp BlackholeResponse
|
||||
|
||||
if s.directoryWatcherService == nil {
|
||||
resp.Status = "Not Initialized"
|
||||
} else {
|
||||
for i, n := range s.directoryWatcherService.Queue.GetQueue() {
|
||||
name := path.Base(n)
|
||||
resp.BlackholeFiles = append(resp.BlackholeFiles, BlackholeFile{
|
||||
ID: i,
|
||||
Name: name,
|
||||
})
|
||||
}
|
||||
|
||||
resp.Status = s.directoryWatcherService.GetStatus()
|
||||
}
|
||||
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
}
|
||||
|
||||
type TestArrResponse struct {
|
||||
Status string `json:"status"`
|
||||
Succeeded bool `json:"succeeded"`
|
||||
}
|
||||
|
||||
func (s *WebServerService) TestArrHandler(w http.ResponseWriter, r *http.Request) {
|
||||
var arr config.ArrConfig
|
||||
err := json.NewDecoder(r.Body).Decode(&arr)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = TestArrConnection(arr)
|
||||
|
||||
var resp TestArrResponse
|
||||
if err != nil {
|
||||
resp.Status = err.Error()
|
||||
resp.Succeeded = false
|
||||
} else {
|
||||
resp.Succeeded = true
|
||||
}
|
||||
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
}
|
||||
@@ -4,9 +4,7 @@ import (
|
||||
"archive/zip"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
@@ -14,7 +12,7 @@ import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func StripDownloadTypesExtention(fileName string) string {
|
||||
func StripDownloadTypesExtension(fileName string) string {
|
||||
var exts = [...]string{".nzb", ".magnet"}
|
||||
for _, ext := range exts {
|
||||
fileName = strings.TrimSuffix(fileName, ext)
|
||||
@@ -23,19 +21,13 @@ func StripDownloadTypesExtention(fileName string) string {
|
||||
return fileName
|
||||
}
|
||||
|
||||
func GetTempBaseDir() string {
|
||||
return path.Join(os.TempDir(), "premiumizearrd")
|
||||
}
|
||||
|
||||
func GetTempDir() (string, error) {
|
||||
// Create temp dir in os temp location
|
||||
tempDir := GetTempBaseDir()
|
||||
err := os.Mkdir(tempDir, os.ModePerm)
|
||||
dir, err := ioutil.TempDir(tempDir, "unzip-")
|
||||
if err != nil {
|
||||
return "", err
|
||||
func StripMediaTypesExtension(fileName string) string {
|
||||
var exts = [...]string{".mkv", ".mp4", ".avi", ".mov", ".flv", ".wmv", ".mpg", ".mpeg", ".m4v", ".3gp", ".3g2", ".m2ts", ".mts", ".ts", ".webm", ".m4a", ".m4b", ".m4p", ".m4r", ".m4v"}
|
||||
for _, ext := range exts {
|
||||
fileName = strings.TrimSuffix(fileName, ext)
|
||||
}
|
||||
return dir, nil
|
||||
|
||||
return fileName
|
||||
}
|
||||
|
||||
// https://golangcode.com/unzip-files-in-go/
|
||||
@@ -101,10 +93,11 @@ func StringInSlice(a string, list []string) int {
|
||||
|
||||
func GetDownloadsFolderIDFromPremiumizeme(premiumizemeClient *premiumizeme.Premiumizeme) string {
|
||||
var downloadsFolderID string
|
||||
|
||||
folders, err := premiumizemeClient.GetFolders()
|
||||
if err != nil {
|
||||
log.Errorf("Error getting folders: %s", err)
|
||||
log.Fatalf("Cannot read folders from premiumize.me, exiting!")
|
||||
log.Fatal("Cannot read folders from premiumize.me, application will not run!")
|
||||
}
|
||||
|
||||
const folderName = "arrDownloads"
|
||||
@@ -119,10 +112,71 @@ func GetDownloadsFolderIDFromPremiumizeme(premiumizemeClient *premiumizeme.Premi
|
||||
if len(downloadsFolderID) == 0 {
|
||||
id, err := premiumizemeClient.CreateFolder(folderName)
|
||||
if err != nil {
|
||||
log.Fatalf("Cannot create downloads folder on premiumize.me, exiting! %+v", err)
|
||||
log.Errorf("Cannot create downloads folder on premiumize.me, application will not run correctly! %+v", err)
|
||||
}
|
||||
downloadsFolderID = id
|
||||
}
|
||||
|
||||
return downloadsFolderID
|
||||
}
|
||||
|
||||
func EnvOrDefault(envName string, defaultValue string) string {
|
||||
envValue := os.Getenv(envName)
|
||||
if len(envValue) == 0 {
|
||||
return defaultValue
|
||||
}
|
||||
return envValue
|
||||
}
|
||||
|
||||
func IsRunningInDockerContainer() bool {
|
||||
// docker creates a .dockerenv file at the root
|
||||
// of the directory tree inside the container.
|
||||
// if this file exists then the viewer is running
|
||||
// from inside a container so return true
|
||||
|
||||
if _, err := os.Stat("/.dockerenv"); err == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func IsDirectoryWriteable(path string) bool {
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Errorf("Directory does not exist: %s", path)
|
||||
return false
|
||||
}
|
||||
|
||||
if _, err := os.Create(path + "/test.txt"); err != nil {
|
||||
log.Errorf("Cannot write test.txt to directory: %s", path)
|
||||
return false
|
||||
}
|
||||
|
||||
// Delete test file
|
||||
if err := os.Remove(path + "/test.txt"); err != nil {
|
||||
log.Errorf("Cannot delete test.txt file in: %s", path)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/questions/33450980/how-to-remove-all-contents-of-a-directory-using-golang
|
||||
func RemoveContents(dir string) error {
|
||||
d, err := os.Open(dir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer d.Close()
|
||||
names, err := d.Readdirnames(-1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, name := range names {
|
||||
err = os.RemoveAll(filepath.Join(dir, name))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,201 +0,0 @@
|
||||
package web_service
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/jackdallas/premiumizearr/internal/config"
|
||||
"github.com/jackdallas/premiumizearr/internal/service"
|
||||
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type IndexTemplates struct {
|
||||
RootPath string
|
||||
}
|
||||
|
||||
var indexBytes []byte
|
||||
|
||||
const webRoot = "premiumizearr"
|
||||
|
||||
type server struct {
|
||||
transferManager *service.TransferManagerService
|
||||
directoryWatcherService *service.DirectoryWatcherService
|
||||
}
|
||||
|
||||
// http Router
|
||||
func StartWebServer(transferManager *service.TransferManagerService, directoryWatcher *service.DirectoryWatcherService, config *config.Config) {
|
||||
tmpl, err := template.ParseFiles("./static/index.html")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
var ibytes bytes.Buffer
|
||||
err = tmpl.Execute(&ibytes, &IndexTemplates{webRoot})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
indexBytes = ibytes.Bytes()
|
||||
|
||||
s := server{
|
||||
transferManager: transferManager,
|
||||
directoryWatcherService: directoryWatcher,
|
||||
}
|
||||
spa := spaHandler{
|
||||
staticPath: "static",
|
||||
indexPath: "index.html",
|
||||
}
|
||||
|
||||
r := mux.NewRouter()
|
||||
|
||||
log.Infof("Creating route: %s", webRoot+"/api/transfers")
|
||||
r.HandleFunc("/"+webRoot+"/api/transfers", s.TransfersHandler)
|
||||
|
||||
log.Infof("Creating route: %s", webRoot+"/api/downloads")
|
||||
r.HandleFunc("/"+webRoot+"/api/downloads", s.DownloadsHandler)
|
||||
|
||||
log.Infof("Creating route: %s", webRoot+"/api/blackhole")
|
||||
r.HandleFunc("/"+webRoot+"/api/blackhole", s.BlackholeHandler)
|
||||
|
||||
r.PathPrefix("/").Handler(spa)
|
||||
|
||||
srv := &http.Server{
|
||||
Handler: r,
|
||||
Addr: fmt.Sprintf("%s:%s", config.BindIP, config.BindPort),
|
||||
// Good practice: enforce timeouts for servers you create!
|
||||
WriteTimeout: 15 * time.Second,
|
||||
ReadTimeout: 15 * time.Second,
|
||||
}
|
||||
|
||||
srv.ListenAndServe()
|
||||
}
|
||||
|
||||
type TransfersResponse struct {
|
||||
Transfers []premiumizeme.Transfer `json:"data"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
func (s *server) TransfersHandler(w http.ResponseWriter, r *http.Request) {
|
||||
var resp TransfersResponse
|
||||
resp.Transfers = *s.transferManager.GetTransfers()
|
||||
resp.Status = s.transferManager.GetStatus()
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
}
|
||||
|
||||
type BlackholeFile struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
type BlackholeResponse struct {
|
||||
BlackholeFiles []BlackholeFile `json:"data"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
type Download struct {
|
||||
Added int64 `json:"added"`
|
||||
Name string `json:"name"`
|
||||
Progress string `json:"progress"`
|
||||
Speed string `json:"speed"`
|
||||
}
|
||||
type DownloadsResponse struct {
|
||||
Downloads []Download `json:"data"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
func (s *server) DownloadsHandler(w http.ResponseWriter, r *http.Request) {
|
||||
var resp DownloadsResponse
|
||||
|
||||
for _, v := range s.transferManager.GetDownloads() {
|
||||
resp.Downloads = append(resp.Downloads, Download{
|
||||
Added: v.Added.Unix(),
|
||||
Name: v.Name,
|
||||
Progress: v.ProgressDownloader.GetProgress(),
|
||||
Speed: v.ProgressDownloader.GetSpeed(),
|
||||
})
|
||||
}
|
||||
resp.Status = ""
|
||||
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
}
|
||||
|
||||
func (s *server) BlackholeHandler(w http.ResponseWriter, r *http.Request) {
|
||||
var resp BlackholeResponse
|
||||
for i, n := range s.directoryWatcherService.Queue.GetQueue() {
|
||||
name := path.Base(n)
|
||||
resp.BlackholeFiles = append(resp.BlackholeFiles, BlackholeFile{
|
||||
ID: i,
|
||||
Name: name,
|
||||
})
|
||||
}
|
||||
resp.Status = s.directoryWatcherService.GetStatus()
|
||||
|
||||
data, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(data)
|
||||
}
|
||||
|
||||
// Shamlessly stolen from mux examples https://github.com/gorilla/mux#examples
|
||||
type spaHandler struct {
|
||||
staticPath string
|
||||
indexPath string
|
||||
}
|
||||
|
||||
func (h spaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
// get the absolute path to prevent directory traversal
|
||||
path, err := filepath.Abs(r.URL.Path)
|
||||
if err != nil {
|
||||
// if we failed to get the absolute path respond with a 400 bad request
|
||||
// and stop
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
path = strings.Replace(path, webRoot, "", 1)
|
||||
|
||||
// prepend the path with the path to the static directory
|
||||
path = filepath.Join(h.staticPath, path)
|
||||
|
||||
// check whether a file exists at the given path
|
||||
_, err = os.Stat(path)
|
||||
if os.IsNotExist(err) || strings.HasSuffix(path, h.staticPath) {
|
||||
// file does not exist, serve index.html
|
||||
// http.ServeFile(w, r, filepath.Join(h.staticPath, h.indexPath))
|
||||
// file does not exist, serve index.html template
|
||||
w.Write(indexBytes)
|
||||
return
|
||||
} else if err != nil {
|
||||
// if we got an error (that wasn't that the file doesn't exist) stating the
|
||||
// file, return a 500 internal server error and stop
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
r.URL.Path = strings.Replace(path, h.staticPath, "", -1)
|
||||
// otherwise, use http.FileServer to serve the static dir
|
||||
http.FileServer(http.Dir(h.staticPath)).ServeHTTP(w, r)
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
package clouddownloader
|
||||
|
||||
// Interface for the CloudDownloader interface
|
||||
type CloudDownloaderInterface interface {
|
||||
GetTransfers() []Transfer
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
package clouddownloader
|
||||
|
||||
type Transfer struct {
|
||||
}
|
||||
86
pkg/downloadmanager/downloadmanager.go
Normal file
86
pkg/downloadmanager/downloadmanager.go
Normal file
@@ -0,0 +1,86 @@
|
||||
package downloadmanager
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (d *DownloadManager) Run() {
|
||||
for {
|
||||
select {
|
||||
case <-d.CancelChannel:
|
||||
return
|
||||
default:
|
||||
time.Sleep(time.Millisecond * 100)
|
||||
for i := 0; i < len(d.transfers); i++ {
|
||||
t := &d.transfers[i]
|
||||
switch t.GetStatus() {
|
||||
case STATUS_QUEUED:
|
||||
if d.GetActiveTransferCount() < d.MaxSimultaneousDownloads {
|
||||
if err := t.Download(); err != nil {
|
||||
log.Errorf("Error downloading: %s", err)
|
||||
}
|
||||
} else {
|
||||
log.Debugf("Too many active transfers, skipping %d", t.GetID())
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *DownloadManager) GetTransfers() []Transfer {
|
||||
d.transfersLock.Lock()
|
||||
defer d.transfersLock.Unlock()
|
||||
return d.transfers
|
||||
}
|
||||
|
||||
func (d *DownloadManager) GetTransfer(id int64) (*Transfer, error) {
|
||||
d.transfersLock.Lock()
|
||||
defer d.transfersLock.Unlock()
|
||||
for i := 0; i < len(d.transfers); i++ {
|
||||
if d.transfers[i].GetID() == id {
|
||||
return &d.transfers[i], nil
|
||||
}
|
||||
}
|
||||
return nil, ErrorNoTransferWithID
|
||||
}
|
||||
|
||||
func (d *DownloadManager) AddTransfer(url string, savePath string) (*Transfer, error) {
|
||||
d.transfersLock.Lock()
|
||||
defer d.transfersLock.Unlock()
|
||||
|
||||
nextID := d.IdCounter.Add(1)
|
||||
|
||||
d.transfers = append(d.transfers, NewTransfer(nextID, url, savePath))
|
||||
|
||||
log.Debugf("Added transfer %d", nextID)
|
||||
return d.GetTransfer(nextID)
|
||||
}
|
||||
|
||||
func (d *DownloadManager) GetActiveTransferCount() int {
|
||||
c := 0
|
||||
|
||||
for i := 0; i < len(d.transfers); i++ {
|
||||
if d.transfers[i].GetStatus() == STATUS_DOWNLOADING {
|
||||
c++
|
||||
}
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func (d *DownloadManager) RemoveTransfer(id int64) error {
|
||||
d.transfersLock.Lock()
|
||||
defer d.transfersLock.Unlock()
|
||||
|
||||
for i := range d.transfers {
|
||||
if d.transfers[i].GetID() == id {
|
||||
return d.transfers[i].Cancel()
|
||||
}
|
||||
}
|
||||
|
||||
return ErrorNoTransferWithID
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package progress_downloader
|
||||
package downloadmanager
|
||||
|
||||
// https://golangcode.com/download-a-file-with-progress/
|
||||
|
||||
@@ -19,6 +19,7 @@ type WriteCounter struct {
|
||||
LastUpdate time.Time
|
||||
LastAmount uint64
|
||||
Total uint64
|
||||
Closing bool
|
||||
}
|
||||
|
||||
func NewWriteCounter() *WriteCounter {
|
||||
@@ -41,6 +42,9 @@ func (wc *WriteCounter) GetSpeed() string {
|
||||
}
|
||||
|
||||
func (wc *WriteCounter) Write(p []byte) (int, error) {
|
||||
if wc.Closing {
|
||||
return 0, io.ErrClosedPipe
|
||||
}
|
||||
n := len(p)
|
||||
wc.LastAmount = wc.Total
|
||||
wc.Total += uint64(n)
|
||||
@@ -52,9 +56,6 @@ func (wc WriteCounter) GetProgress() string {
|
||||
return fmt.Sprintf("%s complete", humanize.Bytes(wc.Total))
|
||||
}
|
||||
|
||||
// DownloadFile will download a url to a local file. It's efficient because it will
|
||||
// write as it downloads and not load the whole file into memory. We pass an io.TeeReader
|
||||
// into Copy() to report progress on the download.
|
||||
func DownloadFile(url string, filepath string, counter *WriteCounter) error {
|
||||
|
||||
// Create the file, but give it a tmp file extension, this means we won't overwrite a
|
||||
@@ -72,6 +73,7 @@ func DownloadFile(url string, filepath string, counter *WriteCounter) error {
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// resp.Body.
|
||||
if _, err = io.Copy(out, io.TeeReader(resp.Body, counter)); err != nil {
|
||||
out.Close()
|
||||
return err
|
||||
208
pkg/downloadmanager/transfer.go
Normal file
208
pkg/downloadmanager/transfer.go
Normal file
@@ -0,0 +1,208 @@
|
||||
package downloadmanager
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrorNoTransferWithID = errors.New("no transfer with id")
|
||||
)
|
||||
|
||||
type transferStatus int
|
||||
|
||||
const (
|
||||
STATUS_QUEUED transferStatus = iota
|
||||
STATUS_DOWNLOADING
|
||||
STATUS_PAUSED
|
||||
STATUS_COMPLETED
|
||||
STATUS_CANCELED
|
||||
STATUS_ERROR
|
||||
)
|
||||
|
||||
type Transfer struct {
|
||||
id int64
|
||||
totalSize atomic.Int64
|
||||
downloaded atomic.Int64
|
||||
savePath string
|
||||
url string
|
||||
urlLock sync.Mutex
|
||||
status transferStatus
|
||||
statusLock sync.Mutex
|
||||
errorStrings []string
|
||||
errorStringsLock sync.Mutex
|
||||
tempFileName string
|
||||
Finished chan bool
|
||||
}
|
||||
|
||||
func NewTransfer(id int64, url string, savePath string) Transfer {
|
||||
return Transfer{
|
||||
id: id,
|
||||
totalSize: atomic.Int64{},
|
||||
downloaded: atomic.Int64{},
|
||||
savePath: savePath,
|
||||
url: url,
|
||||
urlLock: sync.Mutex{},
|
||||
status: STATUS_QUEUED,
|
||||
statusLock: sync.Mutex{},
|
||||
errorStrings: make([]string, 0),
|
||||
errorStringsLock: sync.Mutex{},
|
||||
tempFileName: "",
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Transfer) SetID(id int64) {
|
||||
atomic.StoreInt64(&t.id, id)
|
||||
}
|
||||
|
||||
func (t *Transfer) GetID() int64 {
|
||||
return atomic.LoadInt64(&t.id)
|
||||
}
|
||||
|
||||
func (t *Transfer) SetTotalSize(size int64) {
|
||||
t.totalSize.Store(size)
|
||||
}
|
||||
|
||||
func (t *Transfer) GetTotalSize() int64 {
|
||||
return t.totalSize.Load()
|
||||
}
|
||||
|
||||
func (t *Transfer) SetDownloaded(size int64) {
|
||||
t.downloaded.Store(size)
|
||||
}
|
||||
|
||||
func (t *Transfer) GetDownloaded() int64 {
|
||||
return t.downloaded.Load()
|
||||
}
|
||||
|
||||
func (t *Transfer) SetURL(url string) {
|
||||
t.urlLock.Lock()
|
||||
t.url = url
|
||||
t.urlLock.Unlock()
|
||||
}
|
||||
|
||||
func (t *Transfer) GetURL() string {
|
||||
t.urlLock.Lock()
|
||||
defer t.urlLock.Unlock()
|
||||
return t.url
|
||||
}
|
||||
|
||||
func (t *Transfer) SetStatus(status transferStatus) {
|
||||
t.statusLock.Lock()
|
||||
t.status = status
|
||||
t.statusLock.Unlock()
|
||||
}
|
||||
|
||||
func (t *Transfer) GetStatus() transferStatus {
|
||||
t.statusLock.Lock()
|
||||
defer t.statusLock.Unlock()
|
||||
return t.status
|
||||
}
|
||||
|
||||
func (t *Transfer) AddErrorString(str string) {
|
||||
t.errorStringsLock.Lock()
|
||||
t.errorStrings = append(t.errorStrings, str)
|
||||
t.errorStringsLock.Unlock()
|
||||
}
|
||||
|
||||
func (t *Transfer) GetErrorStrings() []string {
|
||||
t.errorStringsLock.Lock()
|
||||
defer t.errorStringsLock.Unlock()
|
||||
return t.errorStrings
|
||||
}
|
||||
|
||||
func (t *Transfer) GetTempFilePath() string {
|
||||
if t.tempFileName == "" {
|
||||
url, err := url.Parse(t.GetURL())
|
||||
if err != nil {
|
||||
t.tempFileName = fmt.Sprintf("download-%d", t.GetID())
|
||||
} else {
|
||||
finalPath := strings.Split(url.Path, "/")[len(strings.Split(url.Path, "/"))-1]
|
||||
t.tempFileName = fmt.Sprintf("download-%d-%s", t.GetID(), finalPath)
|
||||
}
|
||||
}
|
||||
|
||||
return t.tempFileName
|
||||
}
|
||||
|
||||
func (t *Transfer) Write(p []byte) (int, error) {
|
||||
if t.GetStatus() == STATUS_CANCELED || t.GetStatus() == STATUS_PAUSED {
|
||||
return 0, io.EOF
|
||||
}
|
||||
t.SetDownloaded(t.GetDownloaded() + int64(len(p)))
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
func (t *Transfer) Pause() error {
|
||||
t.SetStatus(STATUS_PAUSED)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *Transfer) Cancel() error {
|
||||
t.SetStatus(STATUS_CANCELED)
|
||||
t.Finished <- true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *Transfer) Resume() error {
|
||||
return t.Download()
|
||||
}
|
||||
|
||||
func (t *Transfer) Download() error {
|
||||
client := &http.Client{}
|
||||
|
||||
//Built http get request with a content range header
|
||||
req, err := http.NewRequest("GET", t.GetURL(), nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if t.GetDownloaded() > 0 {
|
||||
req.Header.Set("Range", fmt.Sprintf("bytes=%d-", t.GetDownloaded()))
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
t.SetTotalSize(resp.ContentLength)
|
||||
var out *os.File
|
||||
|
||||
if t.GetDownloaded() > 0 {
|
||||
out, err = os.Open(t.GetTempFilePath())
|
||||
} else {
|
||||
out, err = os.Create(t.GetTempFilePath())
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t.SetStatus(STATUS_DOWNLOADING)
|
||||
go func() {
|
||||
defer out.Close()
|
||||
|
||||
if _, err := io.Copy(out, io.TeeReader(resp.Body, t)); err != nil {
|
||||
t.AddErrorString(err.Error())
|
||||
t.SetStatus(STATUS_ERROR)
|
||||
log.Error(err)
|
||||
}
|
||||
t.SetStatus(STATUS_COMPLETED)
|
||||
t.Finished <- true
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Start() {
|
||||
|
||||
}
|
||||
25
pkg/downloadmanager/types.go
Normal file
25
pkg/downloadmanager/types.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package downloadmanager
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
)
|
||||
|
||||
// type DownloadManager interface {
|
||||
// GetTransfers() []Transfer
|
||||
|
||||
// GetTransfer(id int64) (*Transfer, error)
|
||||
// AddTransfer(url string) (*Transfer, error)
|
||||
// RemoveTransfer(id int64) error
|
||||
// }
|
||||
|
||||
type DownloadManager struct {
|
||||
MaxSimultaneousDownloads int
|
||||
|
||||
transfers []Transfer
|
||||
transfersLock sync.Mutex
|
||||
|
||||
IdCounter atomic.Int64
|
||||
|
||||
CancelChannel chan bool
|
||||
}
|
||||
@@ -22,8 +22,8 @@ type Premiumizeme struct {
|
||||
APIKey string
|
||||
}
|
||||
|
||||
func NewPremiumizemeClient(APIKey string) *Premiumizeme {
|
||||
return &Premiumizeme{APIKey: APIKey}
|
||||
func NewPremiumizemeClient(APIKey string) Premiumizeme {
|
||||
return Premiumizeme{APIKey: APIKey}
|
||||
}
|
||||
|
||||
func (pm *Premiumizeme) createPremiumizemeURL(urlPath string) (url.URL, error) {
|
||||
@@ -38,7 +38,15 @@ func (pm *Premiumizeme) createPremiumizemeURL(urlPath string) (url.URL, error) {
|
||||
return *u, nil
|
||||
}
|
||||
|
||||
var (
|
||||
ErrAPIKeyNotSet = fmt.Errorf("premiumize.me API key not set")
|
||||
)
|
||||
|
||||
func (pm *Premiumizeme) GetTransfers() ([]Transfer, error) {
|
||||
if pm.APIKey == "" {
|
||||
return nil, ErrAPIKeyNotSet
|
||||
}
|
||||
|
||||
log.Trace("Getting transfers list from premiumize.me")
|
||||
url, err := pm.createPremiumizemeURL("/transfer/list")
|
||||
if err != nil {
|
||||
@@ -70,6 +78,10 @@ func (pm *Premiumizeme) GetTransfers() ([]Transfer, error) {
|
||||
}
|
||||
|
||||
func (pm *Premiumizeme) ListFolder(folderID string) ([]Item, error) {
|
||||
if pm.APIKey == "" {
|
||||
return nil, ErrAPIKeyNotSet
|
||||
}
|
||||
|
||||
var ret []Item
|
||||
url, err := pm.createPremiumizemeURL("/folder/list")
|
||||
if err != nil {
|
||||
@@ -112,6 +124,10 @@ func (pm *Premiumizeme) ListFolder(folderID string) ([]Item, error) {
|
||||
}
|
||||
|
||||
func (pm *Premiumizeme) GetFolders() ([]Item, error) {
|
||||
if pm.APIKey == "" {
|
||||
return nil, ErrAPIKeyNotSet
|
||||
}
|
||||
|
||||
log.Trace("Getting folder list from premiumize.me")
|
||||
url, err := pm.createPremiumizemeURL("/folder/list")
|
||||
if err != nil {
|
||||
@@ -119,30 +135,46 @@ func (pm *Premiumizeme) GetFolders() ([]Item, error) {
|
||||
}
|
||||
|
||||
var ret []Item
|
||||
req, _ := http.NewRequest("GET", url.String(), nil)
|
||||
req, err := http.NewRequest("GET", url.String(), nil)
|
||||
if err != nil {
|
||||
return ret, err
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return ret, err
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
res := ListFoldersResponse{}
|
||||
err = json.NewDecoder(resp.Body).Decode(&res)
|
||||
if resp.StatusCode != 200 {
|
||||
return ret, fmt.Errorf("error listing folder: %s (%d)", resp.Status, resp.StatusCode)
|
||||
}
|
||||
|
||||
if res.Status != "success" {
|
||||
return ret, fmt.Errorf("%s", res.Status)
|
||||
defer resp.Body.Close()
|
||||
list_folders_res := ListFoldersResponse{}
|
||||
err = json.NewDecoder(resp.Body).Decode(&list_folders_res)
|
||||
if err != nil {
|
||||
return ret, err
|
||||
}
|
||||
|
||||
if list_folders_res.Status != "success" {
|
||||
fmt.Printf("%+v\n", resp)
|
||||
fmt.Printf("%+v\n", list_folders_res)
|
||||
return ret, fmt.Errorf(list_folders_res.Message)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return ret, err
|
||||
}
|
||||
|
||||
log.Tracef("Received %d Folders", len(res.Content))
|
||||
return res.Content, nil
|
||||
log.Tracef("Received %d Folders", len(list_folders_res.Content))
|
||||
return list_folders_res.Content, nil
|
||||
}
|
||||
|
||||
func (pm *Premiumizeme) CreateTransfer(filePath string, parentID string) error {
|
||||
if pm.APIKey == "" {
|
||||
return ErrAPIKeyNotSet
|
||||
}
|
||||
|
||||
//TODO: handle file size, i.e. incorrect file being saved
|
||||
log.Trace("Opening file: ", filePath)
|
||||
file, err := os.Open(filePath)
|
||||
@@ -203,6 +235,10 @@ func (pm *Premiumizeme) CreateTransfer(filePath string, parentID string) error {
|
||||
}
|
||||
|
||||
func (pm *Premiumizeme) DeleteFolder(folderID string) error {
|
||||
if pm.APIKey == "" {
|
||||
return ErrAPIKeyNotSet
|
||||
}
|
||||
|
||||
url, err := pm.createPremiumizemeURL("/folder/delete")
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -246,6 +282,10 @@ func (pm *Premiumizeme) DeleteFolder(folderID string) error {
|
||||
}
|
||||
|
||||
func (pm *Premiumizeme) CreateFolder(folderName string) (string, error) {
|
||||
if pm.APIKey == "" {
|
||||
return "", ErrAPIKeyNotSet
|
||||
}
|
||||
|
||||
url, err := pm.createPremiumizemeURL("/folder/create")
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -289,6 +329,10 @@ func (pm *Premiumizeme) CreateFolder(folderName string) (string, error) {
|
||||
}
|
||||
|
||||
func (pm *Premiumizeme) DeleteTransfer(id string) error {
|
||||
if pm.APIKey == "" {
|
||||
return ErrAPIKeyNotSet
|
||||
}
|
||||
|
||||
url, err := pm.createPremiumizemeURL("/transfer/delete")
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -437,6 +481,10 @@ func (pm *Premiumizeme) GenerateZippedFolderLink(fileID string) (string, error)
|
||||
}
|
||||
|
||||
func (pm *Premiumizeme) generateZip(ID string, srcType SRCType) (string, error) {
|
||||
if pm.APIKey == "" {
|
||||
return "", ErrAPIKeyNotSet
|
||||
}
|
||||
|
||||
// Build URL with apikey
|
||||
URL, err := pm.createPremiumizemeURL("/zip/generate")
|
||||
if err != nil {
|
||||
|
||||
@@ -49,7 +49,7 @@ type Item struct {
|
||||
}
|
||||
type FolderItems struct {
|
||||
Status string `json:"status"`
|
||||
Contant []Item `json:"content"`
|
||||
Content []Item `json:"content"`
|
||||
Name string `json:"name"`
|
||||
ParentID string `json:"parent_id"`
|
||||
FolderID string `json:"folder_id"`
|
||||
|
||||
6
scripts/postinstall.sh
Normal file
6
scripts/postinstall.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
chown -R 1000:1000 /opt/premiumizearrd/
|
||||
systemctl enable premiumizearrd.service
|
||||
systemctl daemon-reload
|
||||
systemctl start premiumizearrd.service
|
||||
1
web/.node-version
Normal file
1
web/.node-version
Normal file
@@ -0,0 +1 @@
|
||||
v16
|
||||
6522
web/package-lock.json
generated
6522
web/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -2,25 +2,25 @@
|
||||
"name": "premiumizearr-ui",
|
||||
"version": "0.0.1",
|
||||
"devDependencies": {
|
||||
"carbon-components-svelte": "^0.49.0",
|
||||
"carbon-icons-svelte": "^10.38.0",
|
||||
"carbon-preprocess-svelte": "^0.6.0",
|
||||
"copy-webpack-plugin": "^9.1.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"css-loader": "^5.0.1",
|
||||
"esbuild-loader": "^2.16.0",
|
||||
"mini-css-extract-plugin": "^1.3.4",
|
||||
"svelte": "^3.31.2",
|
||||
"svelte-loader": "^3.0.0",
|
||||
"webpack": "^5.16.0",
|
||||
"webpack-cli": "^4.4.0",
|
||||
"webpack-dev-server": "^4.7.3"
|
||||
"carbon-components-svelte": "^0.73.5",
|
||||
"carbon-icons-svelte": "^11.4.0",
|
||||
"carbon-preprocess-svelte": "^0.9.1",
|
||||
"copy-webpack-plugin": "^11.0.0",
|
||||
"cross-env": "^7.0.0",
|
||||
"css-loader": "^5.0.0",
|
||||
"esbuild-loader": "^2.0.0",
|
||||
"mini-css-extract-plugin": "^2.7.5",
|
||||
"svelte": "^3.59.1",
|
||||
"svelte-loader": "^3.1.8",
|
||||
"webpack": "^5.87.0",
|
||||
"webpack-cli": "^5.1.4",
|
||||
"webpack-dev-server": "^4.15.1"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production webpack",
|
||||
"dev": "webpack serve --content-base public"
|
||||
"dev": "webpack serve --static public"
|
||||
},
|
||||
"dependencies": {
|
||||
"luxon": "^2.3.0"
|
||||
"luxon": "^3.3.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,13 +6,13 @@
|
||||
|
||||
<title>Premiumizearr</title>
|
||||
|
||||
<link rel='icon' type='image/png' href='/favicon.png'>
|
||||
<link rel='stylesheet' href='/{{.RootPath}}/bundle.css'>
|
||||
<link rel='icon' type='image/png' href='./{{.RootPath}}/favicon.png'>
|
||||
<link rel='stylesheet' href='./{{.RootPath}}/bundle.css'>
|
||||
<!-- Material Icons -->
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons" />
|
||||
<!-- Roboto -->
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,600,700" />
|
||||
<script defer src='/{{.RootPath}}/bundle.js'></script>
|
||||
<script defer src='./{{.RootPath}}/bundle.js'></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
@@ -1,150 +1,31 @@
|
||||
<script>
|
||||
import APITable from "./components/APITable.svelte";
|
||||
import "carbon-components-svelte/css/g100.css";
|
||||
import { Grid, Row, Column } from "carbon-components-svelte";
|
||||
import DateTime from "luxon";
|
||||
|
||||
let dlSpeed = 0;
|
||||
|
||||
let webRoot = new URL(window.location.href).pathname;
|
||||
|
||||
function parseDLSpeedFromMessage(m) {
|
||||
if (m == "Loading..." || m == undefined) return 0;
|
||||
let speed = m.split(" ")[0];
|
||||
speed = speed.replace(",", "");
|
||||
let unit = m.split(" ")[1];
|
||||
if (Number.isNaN(speed)) {
|
||||
console.log("Speed is not a number: ", speed);
|
||||
console.log("Message: ", message);
|
||||
return 0;
|
||||
}
|
||||
if (unit === undefined || unit === null || unit == "") {
|
||||
console.log("Unit undefined in : " + m);
|
||||
return 0;
|
||||
} else {
|
||||
try {
|
||||
unit = unit.toUpperCase();
|
||||
} catch (error) {
|
||||
return 0;
|
||||
}
|
||||
unit = unit.replace("/", "");
|
||||
unit = unit.substring(0, 2);
|
||||
switch (unit) {
|
||||
case "KB":
|
||||
return speed * 1024;
|
||||
case "MB":
|
||||
return speed * 1024 * 1024;
|
||||
case "GB":
|
||||
return speed * 1024 * 1024 * 1024;
|
||||
default:
|
||||
console.log("Unknown unit: " + unit);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function HumanReadableSpeed(bytes) {
|
||||
if (bytes < 1024) {
|
||||
return bytes + " B/s";
|
||||
} else if (bytes < 1024 * 1024) {
|
||||
return (bytes / 1024).toFixed(2) + " KB/s";
|
||||
} else if (bytes < 1024 * 1024 * 1024) {
|
||||
return (bytes / 1024 / 1024).toFixed(2) + " MB/s";
|
||||
} else {
|
||||
return (bytes / 1024 / 1024 / 1024).toFixed(2) + " GB/s";
|
||||
}
|
||||
}
|
||||
|
||||
function dataToRows(data) {
|
||||
let rows = [];
|
||||
dlSpeed = 0;
|
||||
if (!data) return rows;
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let d = data[i];
|
||||
rows.push({
|
||||
id: d.id,
|
||||
name: d.name,
|
||||
status: d.status,
|
||||
progress: (d.progress * 100).toFixed(0) + "%",
|
||||
message: d.message,
|
||||
});
|
||||
|
||||
let speed = parseDLSpeedFromMessage(d.message);
|
||||
if (!Number.isNaN(speed)) {
|
||||
dlSpeed += speed;
|
||||
} else {
|
||||
console.error("Invalid speed: " + d.message);
|
||||
}
|
||||
}
|
||||
return rows;
|
||||
}
|
||||
|
||||
function downloadsToRows(downloads) {
|
||||
let rows = [];
|
||||
if (!downloads) return rows;
|
||||
|
||||
for (let i = 0; i < downloads.length; i++) {
|
||||
let d = downloads[i];
|
||||
rows.push({
|
||||
Added: DateTime.fromMillis(d.added).toFormat('dd hh:mm:ss a'),
|
||||
name: d.name,
|
||||
progress: (d.progress * 100).toFixed(0) + "%",
|
||||
});
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<main>
|
||||
<Grid fullWidth>
|
||||
<Row>
|
||||
<Column md={4} >
|
||||
<h3>Blackhole</h3>
|
||||
<APITable
|
||||
headers={[
|
||||
{ key: "id", value: "Pos" },
|
||||
{ key: "name", value: "Name", sort: false },
|
||||
]}
|
||||
{webRoot}
|
||||
APIpath="/api/blackhole"
|
||||
zebra={true}
|
||||
totalName="In Queue: "
|
||||
/>
|
||||
</Column>
|
||||
<Column md={4} >
|
||||
<h3>Downloads</h3>
|
||||
<APITable
|
||||
headers={[
|
||||
{ key: "added", value: "Added" },
|
||||
{ key: "name", value: "Name" },
|
||||
{ key: "progress", value: "Progress" },
|
||||
{ key: "speed", value: "Speed" },
|
||||
]}
|
||||
updateTimeSeconds={2}
|
||||
{webRoot}
|
||||
APIpath="/api/downloads"
|
||||
zebra={true}
|
||||
totalName="Downloading: "
|
||||
/>
|
||||
</Column>
|
||||
</Row>
|
||||
<Row>
|
||||
<Column>
|
||||
<h3>Transfers</h3>
|
||||
<p>Download Speed: {HumanReadableSpeed(dlSpeed)}</p>
|
||||
<APITable
|
||||
headers={[
|
||||
{ key: "name", value: "Name" },
|
||||
{ key: "status", value: "Status" },
|
||||
{ key: "progress", value: "Progress" },
|
||||
{ key: "message", value: "Message", sort: false },
|
||||
]}
|
||||
{webRoot}
|
||||
APIpath="/api/transfers"
|
||||
zebra={true}
|
||||
{dataToRows}
|
||||
/>
|
||||
</Column>
|
||||
</Row>
|
||||
</Grid>
|
||||
</main>
|
||||
<script>
|
||||
import "carbon-components-svelte/css/g100.css";
|
||||
|
||||
import {
|
||||
Grid,
|
||||
Row,
|
||||
Column,
|
||||
Tabs,
|
||||
Tab,
|
||||
TabContent,
|
||||
} from "carbon-components-svelte";
|
||||
import Config from "./pages/Config.svelte";
|
||||
import Info from "./pages/Info.svelte";
|
||||
</script>
|
||||
|
||||
<main>
|
||||
<Grid fullWidth>
|
||||
<Row>
|
||||
<Column>
|
||||
<Tabs>
|
||||
<Tab label="Info" />
|
||||
<Tab label="Config" />
|
||||
<svelte:fragment slot="content">
|
||||
<TabContent><Info /></TabContent>
|
||||
<TabContent><Config /></TabContent>
|
||||
</svelte:fragment>
|
||||
</Tabs>
|
||||
</Column>
|
||||
</Row>
|
||||
</Grid>
|
||||
</main>
|
||||
|
||||
21
web/src/Utilities/web_root.js
Normal file
21
web/src/Utilities/web_root.js
Normal file
@@ -0,0 +1,21 @@
|
||||
export function CalculateAPIPath(path) {
|
||||
let webRoot = window.location.href;
|
||||
|
||||
if (webRoot.indexOf("index.html") > -1) {
|
||||
webRoot = webRoot.substring(0, webRoot.indexOf("index.html"));
|
||||
}
|
||||
|
||||
if (webRoot[webRoot.length - 1] !== "/") {
|
||||
webRoot += "/";
|
||||
}
|
||||
|
||||
if (path[0] == "/") {
|
||||
// console.log(webRoot + path.substring(1));
|
||||
return webRoot + path.substring(1);
|
||||
}
|
||||
|
||||
// console.log(webRoot + path);
|
||||
|
||||
return webRoot + path;
|
||||
}
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
<script>
|
||||
import { DataTable, InlineLoading } from "carbon-components-svelte";
|
||||
import { CalculateAPIPath } from "../Utilities/web_root";
|
||||
|
||||
export let sortable = true;
|
||||
export let sortKey;
|
||||
export let sortOrder;
|
||||
|
||||
export let totalName = "";
|
||||
export let headers = {};
|
||||
export let webRoot = "";
|
||||
export let updateTimeSeconds = 10;
|
||||
export let APIpath = "/api/transfers";
|
||||
export let dataToRows = function (data) {
|
||||
if (!data)
|
||||
return [];
|
||||
if (!data) return [];
|
||||
return data;
|
||||
};
|
||||
|
||||
|
||||
let updating = false;
|
||||
let status = "";
|
||||
let rows = [];
|
||||
@@ -21,7 +24,7 @@
|
||||
if (updating) return;
|
||||
// Refresh from endpoint
|
||||
updating = true;
|
||||
fetch(webRoot + APIpath)
|
||||
fetch(CalculateAPIPath(APIpath))
|
||||
.then((res) => res.json())
|
||||
.then((data) => {
|
||||
rows = dataToRows(data.data);
|
||||
@@ -46,16 +49,17 @@
|
||||
<main>
|
||||
{#if totalName !== ""}
|
||||
<p>
|
||||
{totalName} {safeLength(rows)}
|
||||
{totalName}
|
||||
{safeLength(rows)}
|
||||
</p>
|
||||
{/if}
|
||||
<p>
|
||||
<InlineLoading status={statusIndicator} description="Update status" />
|
||||
<InlineLoading status={statusIndicator} description="Update status" />
|
||||
</p>
|
||||
<p>
|
||||
Message: {status}
|
||||
Message: {status}
|
||||
</p>
|
||||
<p>
|
||||
<DataTable sortable {headers} {rows} />
|
||||
<DataTable {sortKey} {sortOrder} {sortable} {headers} {rows} />
|
||||
</p>
|
||||
</main>
|
||||
|
||||
357
web/src/pages/Config.svelte
Normal file
357
web/src/pages/Config.svelte
Normal file
@@ -0,0 +1,357 @@
|
||||
<script>
|
||||
import {
|
||||
Row,
|
||||
Column,
|
||||
Button,
|
||||
TextInput,
|
||||
Modal,
|
||||
FormGroup,
|
||||
Dropdown,
|
||||
Form,
|
||||
Checkbox,
|
||||
} from "carbon-components-svelte";
|
||||
import {
|
||||
Save,
|
||||
CheckmarkFilled,
|
||||
AddFilled,
|
||||
TrashCan,
|
||||
HelpFilled,
|
||||
MisuseOutline,
|
||||
WatsonHealthRotate_360,
|
||||
} from "carbon-icons-svelte";
|
||||
import { CalculateAPIPath } from "../Utilities/web_root";
|
||||
|
||||
let config = {
|
||||
BlackholeDirectory: "",
|
||||
PollBlackholeDirectory: false,
|
||||
PollBlackholeIntervalMinutes: 10,
|
||||
DownloadsDirectory: "",
|
||||
UnzipDirectory: "",
|
||||
BindIP: "",
|
||||
BindPort: "",
|
||||
WebRoot: "",
|
||||
SimultaneousDownloads: 0,
|
||||
Arrs: [],
|
||||
};
|
||||
const ERR_SAVE = "Error Saving Config";
|
||||
const ERR_TEST = "Error Testing *arr client";
|
||||
|
||||
let arrTesting = [];
|
||||
let arrTestIcons = [];
|
||||
let arrTestKind = [];
|
||||
|
||||
let inputDisabled = true;
|
||||
|
||||
let errorModal = false;
|
||||
let errorTitle = ERR_SAVE;
|
||||
let errorMessage = "";
|
||||
|
||||
let saveIcon = Save;
|
||||
|
||||
function getConfig() {
|
||||
inputDisabled = true;
|
||||
fetch(CalculateAPIPath("api/config"))
|
||||
.then((response) => response.json())
|
||||
.then((data) => {
|
||||
if (Array.isArray(data.Arrs)) {
|
||||
for (let i = 0; i < data.Arrs.length; i++) {
|
||||
SetTestArr(i, HelpFilled, "secondary", false);
|
||||
}
|
||||
}
|
||||
|
||||
config = data;
|
||||
inputDisabled = false;
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error: ", error);
|
||||
});
|
||||
}
|
||||
|
||||
function submit() {
|
||||
inputDisabled = true;
|
||||
fetch(CalculateAPIPath("api/config"), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(config),
|
||||
})
|
||||
.then((response) => response.json())
|
||||
.then((data) => {
|
||||
if (data.succeeded) {
|
||||
saveIcon = CheckmarkFilled;
|
||||
getConfig();
|
||||
setTimeout(() => {
|
||||
saveIcon = Save;
|
||||
}, 1000);
|
||||
} else {
|
||||
errorMessage = data.status;
|
||||
errorTitle = ERR_SAVE;
|
||||
errorModal = true;
|
||||
getConfig();
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error: ", error);
|
||||
errorTitle = ERR_SAVE;
|
||||
errorMessage = error;
|
||||
errorModal = true;
|
||||
setTimeout(() => {
|
||||
getConfig();
|
||||
}, 1500);
|
||||
});
|
||||
}
|
||||
|
||||
function AddArr() {
|
||||
config.Arrs.push({
|
||||
Name: "New Arr",
|
||||
URL: "http://localhost:1234",
|
||||
APIKey: "xxxxxxxx",
|
||||
Type: "Sonarr",
|
||||
});
|
||||
//Force re-paint
|
||||
config.Arrs = [...config.Arrs];
|
||||
}
|
||||
|
||||
function RemoveArr(index) {
|
||||
config.Arrs.splice(index, 1);
|
||||
//Force re-paint
|
||||
config.Arrs = [...config.Arrs];
|
||||
}
|
||||
|
||||
function TestArr(index) {
|
||||
SetTestArr(index, WatsonHealthRotate_360, "secondary", true);
|
||||
|
||||
fetch(CalculateAPIPath("api/testArr"), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(config.Arrs[index]),
|
||||
})
|
||||
.then((response) => response.json())
|
||||
.then((data) => {
|
||||
if (data.succeeded) {
|
||||
SetTestArr(index, CheckmarkFilled, "primary", false);
|
||||
ResetArrTestDelayed(index, 10);
|
||||
} else {
|
||||
SetTestArr(index, MisuseOutline, "danger", false);
|
||||
ResetArrTestDelayed(index, 5);
|
||||
errorTitle = ERR_TEST;
|
||||
errorMessage = data.status;
|
||||
errorModal = true;
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error: ", error);
|
||||
SetTestArr(index, MisuseOutline, "danger", false);
|
||||
ResetArrTestDelayed(index, 5);
|
||||
errorTitle = ERR_TEST;
|
||||
errorMessage = error;
|
||||
errorModal = true;
|
||||
});
|
||||
}
|
||||
|
||||
function UntestArr(index) {
|
||||
SetTestArr(index, HelpFilled, "secondary", false);
|
||||
}
|
||||
|
||||
function SetTestArr(index, icon, kind, testing) {
|
||||
arrTesting[index] = testing;
|
||||
arrTestIcons[index] = icon;
|
||||
arrTestKind[index] = kind;
|
||||
|
||||
arrTesting = [...arrTesting];
|
||||
arrTestIcons = [...arrTestIcons];
|
||||
arrTestKind = [...arrTestKind];
|
||||
}
|
||||
|
||||
function ResetArrTestDelayed(index, seconds) {
|
||||
setTimeout(() => {
|
||||
SetTestArr(index, HelpFilled, "secondary", false);
|
||||
}, 1000 * seconds);
|
||||
}
|
||||
|
||||
getConfig();
|
||||
</script>
|
||||
|
||||
<main>
|
||||
<Row>
|
||||
<Column>
|
||||
<h4>*Arr Settings</h4>
|
||||
<FormGroup>
|
||||
<TextInput
|
||||
type="number"
|
||||
disabled={inputDisabled}
|
||||
labelText="Arr Update History Interval (seconds)"
|
||||
bind:value={config.ArrHistoryUpdateIntervalSeconds}
|
||||
/>
|
||||
{#if config.Arrs !== undefined}
|
||||
{#each config.Arrs as arr, i}
|
||||
<h5>- {arr.Name ? arr.Name : i}</h5>
|
||||
<FormGroup>
|
||||
<TextInput
|
||||
labelText="Name"
|
||||
bind:value={arr.Name}
|
||||
disabled={inputDisabled}
|
||||
on:input={() => {
|
||||
UntestArr(i);
|
||||
}}
|
||||
/>
|
||||
<TextInput
|
||||
labelText="URL"
|
||||
bind:value={arr.URL}
|
||||
disabled={inputDisabled}
|
||||
on:input={() => {
|
||||
UntestArr(i);
|
||||
}}
|
||||
/>
|
||||
<TextInput
|
||||
labelText="APIKey"
|
||||
bind:value={arr.APIKey}
|
||||
disabled={inputDisabled}
|
||||
on:input={() => {
|
||||
UntestArr(i);
|
||||
}}
|
||||
/>
|
||||
<Dropdown
|
||||
titleText="Type"
|
||||
selectedId={arr.Type}
|
||||
on:select={(e) => {
|
||||
config.Arrs[i].Type = e.detail.selectedId;
|
||||
UntestArr(i);
|
||||
}}
|
||||
items={[
|
||||
{ id: "Sonarr", text: "Sonarr" },
|
||||
{ id: "Radarr", text: "Radarr" },
|
||||
]}
|
||||
disabled={inputDisabled}
|
||||
/>
|
||||
<Button
|
||||
style="margin-top: 10px;"
|
||||
on:click={() => {
|
||||
RemoveArr(i);
|
||||
}}
|
||||
kind="danger"
|
||||
icon={TrashCan}
|
||||
iconDescription="Delete Arr"
|
||||
/>
|
||||
<Button
|
||||
style="margin-top: 10px;"
|
||||
on:click={() => {
|
||||
TestArr(i);
|
||||
}}
|
||||
disabled={arrTesting[i]}
|
||||
kind={arrTestKind[i]}
|
||||
icon={arrTestIcons[i]}
|
||||
>
|
||||
Test
|
||||
</Button>
|
||||
</FormGroup>
|
||||
{/each}
|
||||
{/if}
|
||||
</FormGroup>
|
||||
<Button on:click={AddArr} disabled={inputDisabled} icon={AddFilled}>
|
||||
Add Arr
|
||||
</Button>
|
||||
</Column>
|
||||
<Column>
|
||||
<h4>Premiumize.me Settings</h4>
|
||||
<FormGroup>
|
||||
<TextInput
|
||||
disabled={inputDisabled}
|
||||
labelText="API Key"
|
||||
bind:value={config.PremiumizemeAPIKey}
|
||||
/>
|
||||
</FormGroup>
|
||||
<h4>Directory Settings</h4>
|
||||
<FormGroup>
|
||||
<TextInput
|
||||
disabled={inputDisabled}
|
||||
labelText="Blackhole Directory"
|
||||
bind:value={config.BlackholeDirectory}
|
||||
/>
|
||||
<Checkbox
|
||||
disabled={inputDisabled}
|
||||
bind:checked={config.PollBlackholeDirectory}
|
||||
labelText="Poll Blackhole Directory"
|
||||
/>
|
||||
<TextInput
|
||||
type="number"
|
||||
disabled={inputDisabled}
|
||||
labelText="Poll Blackhole Interval Minutes"
|
||||
bind:value={config.PollBlackholeIntervalMinutes}
|
||||
/>
|
||||
</FormGroup>
|
||||
<FormGroup>
|
||||
<TextInput
|
||||
disabled={inputDisabled}
|
||||
labelText="Download Directory"
|
||||
bind:value={config.DownloadsDirectory}
|
||||
/>
|
||||
<TextInput
|
||||
disabled={inputDisabled}
|
||||
labelText="Unzip Directory"
|
||||
bind:value={config.UnzipDirectory}
|
||||
/>
|
||||
</FormGroup>
|
||||
<h4>Web Server Settings</h4>
|
||||
<FormGroup>
|
||||
<TextInput
|
||||
disabled={inputDisabled}
|
||||
labelText="Bind IP"
|
||||
bind:value={config.BindIP}
|
||||
/>
|
||||
<TextInput
|
||||
disabled={inputDisabled}
|
||||
labelText="Bind Port"
|
||||
bind:value={config.BindPort}
|
||||
/>
|
||||
<TextInput
|
||||
disabled={inputDisabled}
|
||||
labelText="Web Root"
|
||||
bind:value={config.WebRoot}
|
||||
/>
|
||||
</FormGroup>
|
||||
<h4>Download Settings</h4>
|
||||
<FormGroup>
|
||||
<TextInput
|
||||
type="number"
|
||||
disabled={inputDisabled}
|
||||
labelText="Simultaneous Downloads"
|
||||
bind:value={config.SimultaneousDownloads}
|
||||
/>
|
||||
</FormGroup>
|
||||
<Button on:click={submit} icon={saveIcon} disabled={inputDisabled}
|
||||
>Save</Button
|
||||
>
|
||||
</Column>
|
||||
</Row>
|
||||
</main>
|
||||
|
||||
<Modal
|
||||
bind:open={errorModal}
|
||||
on:open={errorModal}
|
||||
passiveModal
|
||||
modalHeading={errorTitle}
|
||||
on:close={() => {
|
||||
errorModal = false;
|
||||
}}
|
||||
>
|
||||
<p>{errorMessage}</p>
|
||||
</Modal>
|
||||
<!--
|
||||
|
||||
{() => {
|
||||
console.log(testStatus.get(i));
|
||||
if (testStatus.get(i) == undefined)
|
||||
return "secondary";
|
||||
|
||||
if (testStatus.get(i) === 3) {
|
||||
return "danger";
|
||||
} else {
|
||||
return "secondary";
|
||||
}
|
||||
}}
|
||||
|
||||
-->
|
||||
148
web/src/pages/Info.svelte
Normal file
148
web/src/pages/Info.svelte
Normal file
@@ -0,0 +1,148 @@
|
||||
<script>
|
||||
import APITable from "../components/APITable.svelte";
|
||||
import { Row, Column } from "carbon-components-svelte";
|
||||
import {DateTime} from "luxon";
|
||||
|
||||
let dlSpeed = 0;
|
||||
|
||||
function parseDLSpeedFromMessage(m) {
|
||||
if (m == "Loading..." || m == undefined) return 0;
|
||||
if (m == "too many missing articles") return 0;
|
||||
|
||||
let speed = m.split(" ")[0];
|
||||
speed = speed.replace(",", "");
|
||||
let unit = m.split(" ")[1];
|
||||
if (Number.isNaN(speed)) {
|
||||
console.log("Speed is not a number: ", speed);
|
||||
console.log("Message: ", message);
|
||||
return 0;
|
||||
}
|
||||
if (unit === undefined || unit === null || unit == "") {
|
||||
console.log("Unit undefined in : " + m);
|
||||
return 0;
|
||||
} else {
|
||||
try {
|
||||
unit = unit.toUpperCase();
|
||||
} catch (error) {
|
||||
return 0;
|
||||
}
|
||||
unit = unit.replace("/", "");
|
||||
unit = unit.substring(0, 2);
|
||||
switch (unit) {
|
||||
case "KB":
|
||||
return speed * 1024;
|
||||
case "MB":
|
||||
return speed * 1024 * 1024;
|
||||
case "GB":
|
||||
return speed * 1024 * 1024 * 1024;
|
||||
default:
|
||||
console.log("Unknown unit: " + unit + " in message '" + m + "'");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function HumanReadableSpeed(bytes) {
|
||||
if (bytes < 1024) {
|
||||
return bytes + " B/s";
|
||||
} else if (bytes < 1024 * 1024) {
|
||||
return (bytes / 1024).toFixed(2) + " KB/s";
|
||||
} else if (bytes < 1024 * 1024 * 1024) {
|
||||
return (bytes / 1024 / 1024).toFixed(2) + " MB/s";
|
||||
} else {
|
||||
return (bytes / 1024 / 1024 / 1024).toFixed(2) + " GB/s";
|
||||
}
|
||||
}
|
||||
|
||||
function dataToRows(data) {
|
||||
let rows = [];
|
||||
dlSpeed = 0;
|
||||
if (!data) return rows;
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let d = data[i];
|
||||
rows.push({
|
||||
id: d.id,
|
||||
name: d.name,
|
||||
status: d.status,
|
||||
progress: (d.progress * 100).toFixed(0) + "%",
|
||||
message: d.message,
|
||||
});
|
||||
|
||||
let speed = parseDLSpeedFromMessage(d.message);
|
||||
if (!Number.isNaN(speed)) {
|
||||
dlSpeed += speed;
|
||||
} else {
|
||||
console.error("Invalid speed: " + d.message);
|
||||
}
|
||||
}
|
||||
return rows;
|
||||
}
|
||||
|
||||
function downloadsToRows(downloads) {
|
||||
let rows = [];
|
||||
if (!downloads) return rows;
|
||||
|
||||
for (let i = 0; i < downloads.length; i++) {
|
||||
let d = downloads[i];
|
||||
rows.push({
|
||||
Added: DateTime.fromMillis(d.added).toFormat('dd hh:mm:ss a'),
|
||||
name: d.name,
|
||||
progress: (d.progress * 100).toFixed(0) + "%",
|
||||
});
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<main>
|
||||
<Row>
|
||||
<Column md={4} >
|
||||
<h3>Blackhole</h3>
|
||||
<APITable
|
||||
headers={[
|
||||
{ key: "id", value: "Pos" },
|
||||
{ key: "name", value: "Name", sort: false },
|
||||
]}
|
||||
APIpath="api/blackhole"
|
||||
zebra={true}
|
||||
totalName="In Queue: "
|
||||
/>
|
||||
</Column>
|
||||
<Column md={4} >
|
||||
<h3>Downloads</h3>
|
||||
<APITable
|
||||
headers={[
|
||||
{ key : "id", value : "ID" },
|
||||
{ key: "added", value: "Added" },
|
||||
{ key: "name", value: "Name" },
|
||||
{ key: "progress", value: "Progress" },
|
||||
{ key: "speed", value: "Speed" },
|
||||
]}
|
||||
sortable={false}
|
||||
sortKey={"id"}
|
||||
sortOrder={"desc"}
|
||||
updateTimeSeconds={2}
|
||||
APIpath="api/downloads"
|
||||
zebra={true}
|
||||
totalName="Downloading: "
|
||||
/>
|
||||
</Column>
|
||||
</Row>
|
||||
<Row>
|
||||
<Column>
|
||||
<h3>Transfers</h3>
|
||||
<p>Download Speed: {HumanReadableSpeed(dlSpeed)}</p>
|
||||
<APITable
|
||||
headers={[
|
||||
{ key: "name", value: "Name" },
|
||||
{ key: "status", value: "Status" },
|
||||
{ key: "progress", value: "Progress" },
|
||||
{ key: "message", value: "Message", sort: false },
|
||||
]}
|
||||
APIpath="api/transfers"
|
||||
zebra={true}
|
||||
{dataToRows}
|
||||
/>
|
||||
</Column>
|
||||
</Row>
|
||||
</main>
|
||||
@@ -61,7 +61,7 @@ module.exports = {
|
||||
devServer: {
|
||||
hot: true,
|
||||
proxy: {
|
||||
'/api': 'https://projectmouseion.com/premiumizearr/api'
|
||||
'/api': 'http://localhost:8182'
|
||||
}
|
||||
},
|
||||
optimization: {
|
||||
|
||||
Reference in New Issue
Block a user