Compare commits

...

44 Commits

Author SHA1 Message Date
Jack Dallas
2e75f89615 Update README.md 2022-06-04 21:29:11 +01:00
Jack Dallas
3769dcb030 Set default Docker directories 2022-06-04 21:29:11 +01:00
Jack Dallas
f1b43bc52c Update docker default config and log location 2022-06-04 21:08:23 +01:00
Jack Dallas
ba18439aa5 Add Config API Service & Refactor Config Implementation 2022-06-04 21:08:23 +01:00
Jack Dallas
3cd946e2ca Add Config page to UI 2022-06-04 21:08:23 +01:00
Jack Dallas
dadf648c64 Update default config 2022-06-04 21:06:29 +01:00
Jack Dallas
62eb20149d Update docker release method 2022-06-04 19:59:05 +01:00
Jack Dallas
8a1bd66175 [ci] Always setup docker 2022-06-04 04:04:37 +01:00
Jack Dallas
31b0e4e2fc Enable pre-releases 2022-06-04 04:04:37 +01:00
Jack Dallas
4964ff95af Add arm64 docker build 2022-05-24 23:46:20 +01:00
Jack Dallas
a42388ad5a Handle web calls if when services aren't initialised 2022-05-24 22:46:14 +01:00
Jack Dallas
f8d253c460 Fix broken log messages 2022-05-24 22:46:14 +01:00
Jack Dallas
06a6f4f885 Fix pointer deref 2022-05-24 22:46:14 +01:00
Jack Dallas
4742c0823e Update NPM 2022-05-24 22:46:14 +01:00
Jack Dallas
e7794d57cb UI: Support more errors 2022-05-24 22:46:14 +01:00
Jack Dallas
27d90324e0 Add environment variables for flags 2022-05-24 22:46:14 +01:00
Jack Dallas
3d67c6d8ab Rework simultaneous downloads cap 2022-05-24 22:46:14 +01:00
Jack Dallas
8c3d0dc8c4 Update existing configs 2022-05-15 19:40:27 +01:00
Jack Dallas
35dd81c204 update docker ubuntu version & expose port 2022-05-15 19:30:23 +01:00
Jack Dallas
4512262e73 Update Dockerfile (Thanks @JRDevo) 2022-05-15 19:16:46 +01:00
Jack Dallas
ef1cb2978d Add simultaneous downloads cap 2022-05-15 19:15:39 +01:00
dependabot[bot]
25b8431f7e Bump async from 2.6.3 to 2.6.4 in /web
Bumps [async](https://github.com/caolan/async) from 2.6.3 to 2.6.4.
- [Release notes](https://github.com/caolan/async/releases)
- [Changelog](https://github.com/caolan/async/blob/v2.6.4/CHANGELOG.md)
- [Commits](https://github.com/caolan/async/compare/v2.6.3...v2.6.4)

---
updated-dependencies:
- dependency-name: async
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-05-15 19:11:26 +01:00
Jack Dallas
0008b80df4 Fix docker build 2022-04-12 23:40:57 +01:00
Jack Dallas
c80fc3149d Make config change a breaking update 2022-04-12 20:23:58 +01:00
dependabot[bot]
00bf4e1966 Bump node-forge from 1.2.1 to 1.3.1 in /web
Bumps [node-forge](https://github.com/digitalbazaar/forge) from 1.2.1 to 1.3.1.
- [Release notes](https://github.com/digitalbazaar/forge/releases)
- [Changelog](https://github.com/digitalbazaar/forge/blob/main/CHANGELOG.md)
- [Commits](https://github.com/digitalbazaar/forge/compare/v1.2.1...v1.3.1)

---
updated-dependencies:
- dependency-name: node-forge
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-04-12 20:04:55 +01:00
dependabot[bot]
419854a757 Bump follow-redirects from 1.14.7 to 1.14.9 in /web
Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.14.7 to 1.14.9.
- [Release notes](https://github.com/follow-redirects/follow-redirects/releases)
- [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.14.7...v1.14.9)

---
updated-dependencies:
- dependency-name: follow-redirects
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-04-12 20:04:26 +01:00
dependabot[bot]
e19b49c9c8 Bump minimist from 1.2.5 to 1.2.6 in /web
Bumps [minimist](https://github.com/substack/minimist) from 1.2.5 to 1.2.6.
- [Release notes](https://github.com/substack/minimist/releases)
- [Commits](https://github.com/substack/minimist/compare/1.2.5...1.2.6)

---
updated-dependencies:
- dependency-name: minimist
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-04-12 19:58:26 +01:00
Jack Dallas
6c0c53d029 Update README and docker location 2022-02-11 14:05:05 +00:00
Jack Dallas
ba9476cb34 Docker support 2022-02-11 13:44:48 +00:00
Jack Dallas
27534fe932 Make Arr's a list not locked to one of each type 2022-02-11 13:43:49 +00:00
Jack Dallas
73622eda17 Make unzip directory configurable 2022-02-03 12:17:52 +00:00
Jack Dallas
3dff845fd2 Tweak web paths to work on url root and subpaths 2022-02-01 15:18:12 +00:00
Jack Dallas
74c49799eb Enhance logging 2022-02-01 15:18:12 +00:00
Dallas
d19606c4dd Update README.md 2022-02-01 00:07:14 +00:00
Jack Dallas
f1d66d9532 Update README 2022-02-01 00:00:25 +00:00
Dallas
5d02813066 Merge pull request #10 from JackDallas/dpkg
Fix dpkg and perms
2022-01-31 19:11:08 +00:00
Jack Dallas
226b2067ff Fix dpkg and perms 2022-01-31 19:07:18 +00:00
Dallas
27037a9621 Merge pull request #7 from JackDallas/limit-logs
Limit log size
2022-01-31 17:33:08 +00:00
Jack Dallas
fd371c38d9 limit logs 2022-01-31 17:25:19 +00:00
Dallas
26c9a5e07a Don't glob 2022-01-31 17:25:00 +00:00
Dallas
dfe64f0c56 Merge pull request #9 from JackDallas/update-ci
update-ci
2022-01-31 17:15:59 +00:00
Jack Dallas
8f112500cc Upload artifacts 2022-01-31 17:15:45 +00:00
Dallas
395083f32f Merge pull request #8 from JackDallas/fix-ci
Fix ci
2022-01-31 16:56:08 +00:00
Jack Dallas
8dd342a7a5 fix ci 2022-01-31 16:51:03 +00:00
25 changed files with 6247 additions and 1104 deletions

View File

@@ -9,6 +9,7 @@ on:
permissions:
contents: write
packages: write
jobs:
build:
@@ -26,9 +27,16 @@ jobs:
- name: Confirm Version
run: go version
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
- name: Build
uses: goreleaser/goreleaser-action@v2
if: ! startsWith(github.ref, 'refs/tags/')
if: startsWith(github.ref, 'refs/tags/') == false
with:
distribution: goreleaser
version: latest
@@ -36,6 +44,13 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: docker/login-action@v1
if: startsWith(github.ref, 'refs/tags/')
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Release
uses: goreleaser/goreleaser-action@v2
if: startsWith(github.ref, 'refs/tags/')
@@ -44,4 +59,10 @@ jobs:
version: latest
args: release --rm-dist
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload assets
uses: actions/upload-artifact@v2
with:
name: artifacts
path: dist/

View File

@@ -13,9 +13,14 @@ builds:
goarch:
- amd64
- arm64
- arm
goarm:
- 7
ignore:
- goos: windows
goarch: arm64
- goos: windows
goarch: arm
archives:
- format_overrides:
@@ -31,50 +36,120 @@ archives:
- src: build/static/*
dst: static
strip_parent: true
checksum:
name_template: 'checksums.txt'
snapshot:
name_template: "{{ incpatch .Version }}-next"
changelog:
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
nfpms:
# note that this is an array of nfpm configs
-
# Name of the package.
# Defaults to `ProjectName`.
package_name: premiumizearr
# Your app's vendor.
# Default is empty.
bindir: /opt/premiumizearrd
vendor: Jack Dallas.
# Template to your app's homepage.
# Default is empty.
homepage: https://github.com/JackDallas/Premiumizearr
# Your app's maintainer (probably you).
# Default is empty.
maintainer: Dallas <jack-dallas@outlook.com>
# Template to your app's description.
# Default is empty.
description: Service to connect premiumize.me to Arr clients.
# Your app's license.
# Default is empty.
license: GPLv3
# Formats to be generated.
formats:
- deb
contents:
# Basic file that applies to all packagers
- src: build/static/*
dst: /usr/local/bin/static/
dst: /opt/premiumizearrd/static/
- src: init/premiumizearrd.service
dst: /etc/systemd/system/
dst: /etc/systemd/system/premiumizearrd.service
scripts:
postinstall: "scripts/postinstall.sh"
dockers:
-
use: docker
goos: linux
goarch: amd64
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--platform=linux/amd64"
extra_files:
- build/static/
-
use: buildx
goos: linux
goarch: arm64
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--platform=linux/arm64"
extra_files:
- build/static/
-
use: buildx
goos: linux
goarch: arm
goarm: 7
image_templates:
- "ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7"
skip_push: "false"
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.title={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--platform=linux/arm/v7"
extra_files:
- build/static/
docker_manifests:
# Release variants not created on rc-$i tags
- skip_push: auto
- name_template: 'ghcr.io/jackdallas/premiumizearr:latest'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Major }}'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
- name_template: 'ghcr.io/jackdallas/premiumizearr:{{ .Major }}.{{ .Minor }}'
image_templates:
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-amd64'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-armv7'
- 'ghcr.io/jackdallas/premiumizearr:{{ .Tag }}-arm64'
release:
# If set to auto, will mark the release as not ready for production
# in case there is an indicator for this in the tag e.g. v1.0.0-rc1
# If set to true, will mark the release as not ready for production.
# Default is false.
prerelease: auto
footer: "**Full Changelog**: https://github.com/JackDallas/Premiumizearr/compare/{{ .PreviousTag }}...{{ .Tag }}"

20
Dockerfile Normal file
View File

@@ -0,0 +1,20 @@
FROM ubuntu:latest
RUN apt update && \
apt install openssl -y && \
apt install ca-certificates \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir /data
ENV PREMIUMIZEARR_CONFIG_DIR_PATH=/data
ENV PREMIUMIZEARR_LOGGING_DIR_PATH=/data
EXPOSE 8182
WORKDIR /opt/app/
COPY premiumizearrd /opt/app/
COPY build/static /opt/app/static
ENTRYPOINT [ "/opt/app/premiumizearrd" ]

View File

@@ -1,11 +1,62 @@
# premiumizearr
# Premiumizearr
## Features
- Monitor blackhole directory to push `.magnet` and `.nzb` to Premiumize.me
- Monitor and download Premiumize.me transfers
- Mark transfers as failed in sonarr
- Monitor and download Premiumize.me transfers (web ui on default port 8182)
- Mark transfers as failed in Radarr & Sonarr
## Up Next
## Support the project by using my invite code
- Radarr support
[Invite Code](https://www.premiumize.me/ref/446038083)
## Install
[Grab the latest release artifact links here](https://github.com/JackDallas/Premiumizearr/releases/)
### Binary
```
wget https://github.com/JackDallas/Premiumizearr/releases/download/x.x.x/Premiumizearr_x.x.x_linux_amd64.tar.gz
tar xf Premiumizearr_x.x.x.x_linux_amd64.tar.gz
cd Premiumizearr_x.x.x.x_linux_amd64
sudo mkdir /opt/premiumizearrd/
sudo cp -r premiumizearrd static/ /opt/premiumizearrd/
sudo cp premiumizearrd /etc/systemd/system/
sudo systemctl-reload
sudo systemctl enable premiumizearrd.service
sudo systemctl start premiumizearrd.service
```
### deb file
```
wget https://github.com/JackDallas/Premiumizearr/releases/download/x.x.x/premiumizearr_x.x.x._linux_amd64.deb
sudo dpkg -i premiumizearr_x.x.x.x_linux_amd64.deb
```
### Docker
[Docker images are listed here](https://github.com/jackdallas/Premiumizearr/pkgs/container/premiumizearr)
`docker run -p 8182:8182 -v /host/data/path:/data -v /host/downloads/path:/downloads -v /host/blackhole/path:/blackhole ghcr.io/jackdallas/premiumizearr:latest`
> Note: The /data mount is where the `config.yaml` and log files are kept
## Setup
### Premiumizearrd
Running for the first time the server will start on http://0.0.0.0:8182
If you already use this binding you can edit them in the `config.yaml`
> Note: Currently most changes in the config ui will not be used until a restart is complete
### Sonarr/Radarr
- Go to your Arr's `Download Client` settings page
- Add a new Torrent Blackhole client, set the `Torrent Folder` to the previously set `BlackholeDirectory` location, set the `Watch Folder` to the previously set `DownloadsDirectory` location
- Add a new Usenet Blackhole client, set the `Nzb Folder` to the previously set `BlackholeDirectory` location, set the `Watch Folder` to the previously set `DownloadsDirectory` location

View File

@@ -2,15 +2,16 @@ package main
import (
"flag"
"io"
"os"
"path"
"time"
"github.com/jackdallas/premiumizearr/internal/arr"
"github.com/jackdallas/premiumizearr/internal/config"
"github.com/jackdallas/premiumizearr/internal/service"
"github.com/jackdallas/premiumizearr/internal/utils"
"github.com/jackdallas/premiumizearr/internal/web_service"
"github.com/jackdallas/premiumizearr/pkg/premiumizeme"
"github.com/orandin/lumberjackrus"
log "github.com/sirupsen/logrus"
"golift.io/starr"
"golift.io/starr/radarr"
@@ -21,10 +22,12 @@ func main() {
//Flags
var logLevel string
var configFile string
var loggingDirectory string
//Parse flags
flag.StringVar(&logLevel, "log", "info", "Logging level: \n \tinfo,debug,trace")
flag.StringVar(&configFile, "config", "", "Config file path")
flag.StringVar(&logLevel, "log", utils.EnvOrDefault("PREMIUMIZEARR_LOG_LEVEL", "info"), "Logging level: \n \tinfo,debug,trace")
flag.StringVar(&configFile, "config", utils.EnvOrDefault("PREMIUMIZEARR_CONFIG_DIR_PATH", "./"), "The directory the config.yml is located in")
flag.StringVar(&loggingDirectory, "logging-dir", utils.EnvOrDefault("PREMIUMIZEARR_LOGGING_DIR_PATH", "./"), "The directory logs are to be written to")
flag.Parse()
lvl, err := log.ParseLevel(logLevel)
@@ -33,49 +36,98 @@ func main() {
lvl = log.InfoLevel
}
log.SetLevel(lvl)
hook, err := lumberjackrus.NewHook(
&lumberjackrus.LogFile{
Filename: path.Join(loggingDirectory, "premiumizearr.general.log"),
MaxSize: 100,
MaxBackups: 1,
MaxAge: 1,
Compress: false,
LocalTime: false,
},
log.InfoLevel,
&log.TextFormatter{},
&lumberjackrus.LogFileOpts{
log.InfoLevel: &lumberjackrus.LogFile{
Filename: path.Join(loggingDirectory, "premiumizearr.info.log"),
MaxSize: 100,
MaxBackups: 1,
MaxAge: 1,
Compress: false,
LocalTime: false,
},
log.ErrorLevel: &lumberjackrus.LogFile{
Filename: path.Join(loggingDirectory, "premiumizearr.error.log"),
MaxSize: 100, // optional
MaxBackups: 1, // optional
MaxAge: 1, // optional
Compress: false, // optional
LocalTime: false, // optional
},
},
)
logFile, err := os.OpenFile("premiumizearr.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Error(err)
} else {
log.SetOutput(io.MultiWriter(logFile, os.Stdout))
panic(err)
}
log.Info("")
log.AddHook(hook)
log.Info("---------- Starting premiumizearr daemon ----------")
log.Info("")
config, err := config.LoadOrCreateConfig(configFile)
// Override config data directories if running in docker
if utils.IsRunningInDockerContainer() {
if config.BlackholeDirectory == "" {
config.BlackholeDirectory = "/blackhole"
}
if config.DownloadsDirectory == "" {
config.DownloadsDirectory = "/downloads"
}
if config.UnzipDirectory == "" {
config.UnzipDirectory = "/unzip"
}
}
if err != nil {
panic(err)
}
if config.PremiumizemeAPIKey == "" {
panic("premiumizearr API Key is empty")
log.Warn("Premiumizeme API key not set, application will not work until it's set")
}
// Initialisation
premiumizearr_client := premiumizeme.NewPremiumizemeClient(config.PremiumizemeAPIKey)
starr_config_sonarr := starr.New(config.SonarrAPIKey, config.SonarrURL, 0)
starr_config_radarr := starr.New(config.RadarrAPIKey, config.RadarrURL, 0)
arrs := []arr.IArr{}
sonarr_wrapper := arr.SonarrArr{
Client: sonarr.New(starr_config_sonarr),
History: nil,
LastUpdate: time.Now(),
}
radarr_wrapper := arr.RadarrArr{
Client: radarr.New(starr_config_radarr),
History: nil,
LastUpdate: time.Now(),
}
arrs := []arr.IArr{
&sonarr_wrapper,
&radarr_wrapper,
for _, arr_config := range config.Arrs {
switch arr_config.Type {
case "Sonarr":
config := starr.New(arr_config.APIKey, arr_config.URL, 0)
wrapper := arr.SonarrArr{
Name: arr_config.Name,
Client: sonarr.New(config),
History: nil,
LastUpdate: time.Now(),
}
arrs = append(arrs, &wrapper)
case "Radarr":
config := starr.New(arr_config.APIKey, arr_config.URL, 0)
wrapper := arr.RadarrArr{
Name: arr_config.Name,
Client: radarr.New(config),
History: nil,
LastUpdate: time.Now(),
}
arrs = append(arrs, &wrapper)
default:
log.Error("Unknown arr type: %s, not adding Arr %s", arr_config.Type, arr_config.Name)
}
}
transfer_manager := service.NewTransferManagerService(premiumizearr_client, &arrs, &config)

17
config.yaml Normal file
View File

@@ -0,0 +1,17 @@
PremiumizemeAPIKey: xxxxxxxxx
Arrs:
- Name: ""
URL: http://localhost:8989
APIKey: xxxxxxxxx
Type: Sonarr
- Name: ""
URL: http://localhost:7878
APIKey: xxxxxxxxx
Type: Radarr
BlackholeDirectory: ""
DownloadsDirectory: ""
UnzipDirectory: ""
bindIP: 0.0.0.0
bindPort: "8182"
WebRoot: ""
SimultaneousDownloads: 5

View File

@@ -1,9 +0,0 @@
PremiumizemeAPIKey: ""
SonarrURL: http://localhost:8989
SonarrAPIKey: ""
RadarrURL: http://localhost:7878
RadarrAPIKey: ""
BlackholeDirectory: ""
DownloadsDirectory: ""
bindIP: 0.0.0.0
bindPort: "8182"

3
go.mod
View File

@@ -6,12 +6,15 @@ require (
github.com/dustin/go-humanize v1.0.0
github.com/fsnotify/fsnotify v1.5.1
github.com/gorilla/mux v1.8.0
github.com/orandin/lumberjackrus v1.0.1
github.com/sirupsen/logrus v1.8.1
golift.io/starr v0.13.0
gopkg.in/yaml.v2 v2.4.0
)
require (
github.com/BurntSushi/toml v1.0.0 // indirect
golang.org/x/net v0.0.0-20220114011407-0dd24b26b47d // indirect
golang.org/x/sys v0.0.0-20211110154304-99a53858aa08 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
)

6
go.sum
View File

@@ -1,3 +1,5 @@
github.com/BurntSushi/toml v1.0.0 h1:dtDWrepsVPfW9H/4y7dDgFc2MBUSeJhlaDtK13CxFlU=
github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
@@ -7,6 +9,8 @@ github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/orandin/lumberjackrus v1.0.1 h1:7ysDQ0MHD79zIFN9/EiDHjUcgopNi5ehtxFDy8rUkWo=
github.com/orandin/lumberjackrus v1.0.1/go.mod h1:xYLt6H8W93pKnQgUQaxsApS0Eb4BwHLOkxk5DVzf5H0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
@@ -48,5 +52,7 @@ golift.io/starr v0.13.0 h1:LoihBAH3DQ0ikPNHTVg47tUU+475mzbr1ahMcY5gdno=
golift.io/starr v0.13.0/go.mod h1:IZIzdT5/NBdhM08xAEO5R1INgGN+Nyp4vCwvgHrbKVs=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=

View File

@@ -2,8 +2,8 @@
Description=Premiumizearr Daemon
After=network.target
[Service]
User=$USER
Group=$USER
User=1000
Group=1000
UMask=0002
Type=simple
ExecStart=/opt/premiumizearrd/premiumizearrd

View File

@@ -18,6 +18,7 @@ type IArr interface {
}
type SonarrArr struct {
Name string
ClientMutex sync.Mutex
Client *sonarr.Sonarr
HistoryMutex sync.Mutex
@@ -29,6 +30,7 @@ type SonarrArr struct {
}
type RadarrArr struct {
Name string
ClientMutex sync.Mutex
Client *radarr.Radarr
HistoryMutex sync.Mutex

View File

@@ -3,8 +3,11 @@ package config
import (
"errors"
"io/ioutil"
"log"
"strings"
log "github.com/sirupsen/logrus"
"os"
"path"
"gopkg.in/yaml.v2"
)
@@ -14,25 +17,44 @@ var (
ErrFailedToFindConfigFile = errors.New("failed to find config file")
)
//ArrType enum for Sonarr/Radarr
type ArrType string
const (
Sonarr ArrType = "Sonarr"
Radarr ArrType = "Radarr"
)
type ArrConfig struct {
Name string `yaml:"Name"`
URL string `yaml:"URL"`
APIKey string `yaml:"APIKey"`
Type ArrType `yaml:"Type"`
}
type Config struct {
altConfigLocation string
PremiumizemeAPIKey string `yaml:"PremiumizemeAPIKey"`
SonarrURL string `yaml:"SonarrURL"`
SonarrAPIKey string `yaml:"SonarrAPIKey"`
RadarrURL string `yaml:"RadarrURL"`
RadarrAPIKey string `yaml:"RadarrAPIKey"`
Arrs []ArrConfig `yaml:"Arrs"`
BlackholeDirectory string `yaml:"BlackholeDirectory"`
DownloadsDirectory string `yaml:"DownloadsDirectory"`
UnzipDirectory string `yaml:"UnzipDirectory"`
BindIP string `yaml:"bindIP"`
BindPort string `yaml:"bindPort"`
WebRoot string `yaml:"WebRoot"`
SimultaneousDownloads int `yaml:"SimultaneousDownloads"`
}
func loadConfigFromDisk() (Config, error) {
func loadConfigFromDisk(altConfigLocation string) (Config, error) {
var config Config
file, err := ioutil.ReadFile("config.yaml")
file, err := ioutil.ReadFile(path.Join(altConfigLocation, "config.yaml"))
if err != nil {
return config, ErrFailedToFindConfigFile
@@ -43,59 +65,89 @@ func loadConfigFromDisk() (Config, error) {
return config, ErrInvalidConfigFile
}
config = versionUpdateConfig(config)
config.Save()
config.altConfigLocation = altConfigLocation
return config, nil
}
func createDefaultConfig() error {
config := Config{
PremiumizemeAPIKey: "",
SonarrURL: "http://localhost:8989",
SonarrAPIKey: "",
RadarrURL: "http://localhost:7878",
RadarrAPIKey: "",
BlackholeDirectory: "",
DownloadsDirectory: "",
BindIP: "0.0.0.0",
BindPort: "8182",
func (c *Config) Save() bool {
data, err := yaml.Marshal(*c)
if err == nil {
//Save config to disk to add missing fields
log.Tracef("Saving config to %s", path.Join(c.altConfigLocation, "config.yaml"))
err = ioutil.WriteFile(path.Join(c.altConfigLocation, "config.yaml"), data, 0644)
if err != nil {
log.Errorf("Failed to save config file: %+v", err)
return false
}
}
log.Trace("Config saved")
return true
}
func versionUpdateConfig(config Config) Config {
// 1.1.3
if config.SimultaneousDownloads == 0 {
config.SimultaneousDownloads = 5
}
file, err := yaml.Marshal(config)
if err != nil {
return err
}
return config
}
err = ioutil.WriteFile("config.yaml", file, 0644)
if err != nil {
return err
func defaultConfig(altConfigLocation string) Config {
return Config{
PremiumizemeAPIKey: "xxxxxxxxx",
Arrs: []ArrConfig{
{Name: "Sonarr", URL: "http://localhost:8989", APIKey: "xxxxxxxxx", Type: Sonarr},
{Name: "Radarr", URL: "http://localhost:7878", APIKey: "xxxxxxxxx", Type: Radarr},
},
BlackholeDirectory: "",
DownloadsDirectory: "",
UnzipDirectory: "",
BindIP: "0.0.0.0",
BindPort: "8182",
WebRoot: "",
SimultaneousDownloads: 5,
}
return nil
}
func LoadOrCreateConfig(altConfigLocation string) (Config, error) {
if altConfigLocation != "" {
if _, err := ioutil.ReadFile(altConfigLocation); err != nil {
log.Panicf("Failed to find config file at %s Error: %+v", altConfigLocation, err)
}
}
config, err := loadConfigFromDisk()
config, err := loadConfigFromDisk(altConfigLocation)
if err != nil {
if err == ErrFailedToFindConfigFile {
err = createDefaultConfig()
if err != nil {
return config, err
}
panic("Default config created, please fill it out")
config = defaultConfig(altConfigLocation)
log.Warn("No config file found, created default config file")
}
if err == ErrInvalidConfigFile {
return config, ErrInvalidConfigFile
}
}
//Clean up url
if strings.HasSuffix(config.SonarrURL, ("/")) {
config.SonarrURL = config.SonarrURL[:len(config.SonarrURL)-1]
}
return config, nil
}
func (c *Config) GetTempBaseDir() string {
if c.UnzipDirectory != "" {
return path.Dir(c.UnzipDirectory)
}
return path.Join(os.TempDir(), "premiumizearrd")
}
func (c *Config) GetTempDir() (string, error) {
// Create temp dir in os temp location
tempDir := c.GetTempBaseDir()
err := os.MkdirAll(tempDir, os.ModePerm)
if err != nil {
return "", err
}
dir, err := ioutil.TempDir(tempDir, "unzip-")
if err != nil {
return "", err
}
return dir, nil
}

View File

@@ -48,7 +48,7 @@ func (dw *DirectoryWatcherService) Watch() {
dw.downloadsFolderID = utils.GetDownloadsFolderIDFromPremiumizeme(dw.premiumizemeClient)
log.Info("Clearing tmp directory...")
tempDir := utils.GetTempBaseDir()
tempDir := dw.config.GetTempBaseDir()
err := os.RemoveAll(tempDir)
if err != nil {
log.Errorf("Error clearing tmp directory %s", tempDir)
@@ -146,7 +146,7 @@ func (dw *DirectoryWatcherService) processUploads() {
log.Trace("File already uploaded, removing from Disk")
os.Remove(filePath)
default:
log.Error(err)
log.Error("Error creating transfer: %s", err)
}
} else {
dw.status = "Okay"

View File

@@ -77,7 +77,7 @@ func (manager *TransferManagerService) TaskUpdateTransfersList() {
log.Debug("Running Task UpdateTransfersList")
transfers, err := manager.premiumizemeClient.GetTransfers()
if err != nil {
log.Error(err)
log.Errorf("Error getting transfers: %s", err.Error())
return
}
manager.updateTransfers(transfers)
@@ -97,7 +97,7 @@ func (manager *TransferManagerService) TaskUpdateTransfersList() {
}
log.Tracef("Found %s in %s history", transfer.Name, arr.GetArrName())
found = true
log.Debugf("Processing transfer that has errored: ", transfer.Name)
log.Debugf("Processing transfer that has errored: %s", transfer.Name)
go arr.HandleErrorTransfer(&transfer, arrID, manager.premiumizemeClient)
}
@@ -107,15 +107,21 @@ func (manager *TransferManagerService) TaskUpdateTransfersList() {
func (manager *TransferManagerService) TaskCheckPremiumizeDownloadsFolder() {
log.Debug("Running Task CheckPremiumizeDownloadsFolder")
items, err := manager.premiumizemeClient.ListFolder(manager.downloadsFolderID)
if err != nil {
log.Error(err)
log.Errorf("Error listing downloads folder: %s", err.Error())
return
}
for _, item := range items {
log.Debugf("Processing completed item: %s", item.Name)
go manager.HandleFinishedItem(item, manager.config.DownloadsDirectory)
if manager.countDownloads() < manager.config.SimultaneousDownloads {
log.Debugf("Processing completed item: %s", item.Name)
manager.HandleFinishedItem(item, manager.config.DownloadsDirectory)
} else {
log.Debugf("Not processing any more transfers, %d are running and cap is %d", manager.countDownloads(), manager.config.SimultaneousDownloads)
break
}
}
}
@@ -134,6 +140,13 @@ func (manager *TransferManagerService) addDownload(item *premiumizeme.Item) {
}
}
func (manager *TransferManagerService) countDownloads() int {
manager.downloadListMutex.Lock()
defer manager.downloadListMutex.Unlock()
return len(manager.downloadList)
}
func (manager *TransferManagerService) removeDownload(name string) {
manager.downloadListMutex.Lock()
defer manager.downloadListMutex.Unlock()
@@ -154,7 +167,7 @@ func (manager *TransferManagerService) downloadExists(itemName string) bool {
return false
}
// Ran in a goroutine
// Returns when the download has been added to the list
func (manager *TransferManagerService) HandleFinishedItem(item premiumizeme.Item, downloadDirectory string) {
if manager.downloadExists(item.Name) {
log.Tracef("Transfer %s is already downloading", item.Name)
@@ -162,78 +175,78 @@ func (manager *TransferManagerService) HandleFinishedItem(item premiumizeme.Item
}
manager.addDownload(&item)
//Create entry in downloads map to lock item
// manager.downloadList[item.Name] = progress_downloader.NewWriteCounter()
log.Debug("Downloading: ", item.Name)
log.Tracef("%+v", item)
var link string
var err error
if item.Type == "file" {
link, err = manager.premiumizemeClient.GenerateZippedFileLink(item.ID)
} else if item.Type == "folder" {
link, err = manager.premiumizemeClient.GenerateZippedFolderLink(item.ID)
} else {
log.Errorf("Item is not of type 'file' or 'folder' !! Can't download %s", item.Name)
return
}
if err != nil {
log.Error(err)
go func() {
log.Debug("Downloading: ", item.Name)
log.Tracef("%+v", item)
var link string
var err error
if item.Type == "file" {
link, err = manager.premiumizemeClient.GenerateZippedFileLink(item.ID)
} else if item.Type == "folder" {
link, err = manager.premiumizemeClient.GenerateZippedFolderLink(item.ID)
} else {
log.Errorf("Item is not of type 'file' or 'folder' !! Can't download %s", item.Name)
return
}
if err != nil {
log.Error("Error generating download link: %s", err)
manager.removeDownload(item.Name)
return
}
log.Trace("Downloading: ", link)
tempDir, err := manager.config.GetTempDir()
if err != nil {
log.Errorf("Could not create temp dir: %s", err)
manager.removeDownload(item.Name)
return
}
splitString := strings.Split(link, "/")
savePath := path.Join(tempDir, splitString[len(splitString)-1])
log.Trace("Downloading to: ", savePath)
out, err := os.Create(savePath)
if err != nil {
log.Errorf("Could not create save path: %s", err)
manager.removeDownload(item.Name)
return
}
defer out.Close()
err = progress_downloader.DownloadFile(link, savePath, manager.downloadList[item.Name].ProgressDownloader)
if err != nil {
log.Errorf("Could not download file: %s", err)
manager.removeDownload(item.Name)
return
}
log.Tracef("Unzipping %s to %s", savePath, downloadDirectory)
err = utils.Unzip(savePath, downloadDirectory)
if err != nil {
log.Errorf("Could not unzip file: %s", err)
manager.removeDownload(item.Name)
return
}
log.Tracef("Removing zip %s from system", savePath)
err = os.RemoveAll(savePath)
if err != nil {
manager.removeDownload(item.Name)
log.Errorf("Could not remove zip: %s", err)
return
}
err = manager.premiumizemeClient.DeleteFolder(item.ID)
if err != nil {
manager.removeDownload(item.Name)
log.Error("Error deleting folder on premiumuze.me: %s", err)
return
}
//Remove download entry from downloads map
manager.removeDownload(item.Name)
return
}
log.Trace("Downloading: ", link)
tempDir, err := utils.GetTempDir()
if err != nil {
log.Errorf("Could not create temp dir: %s", err)
manager.removeDownload(item.Name)
return
}
splitString := strings.Split(link, "/")
savePath := path.Join(tempDir, splitString[len(splitString)-1])
log.Trace("Downloading to: ", savePath)
out, err := os.Create(savePath)
if err != nil {
log.Errorf("Could not create save path: %s", err)
manager.removeDownload(item.Name)
return
}
defer out.Close()
err = progress_downloader.DownloadFile(link, savePath, manager.downloadList[item.Name].ProgressDownloader)
if err != nil {
log.Errorf("Could not download file: %s", err)
manager.removeDownload(item.Name)
return
}
log.Tracef("Unzipping %s to %s", savePath, downloadDirectory)
err = utils.Unzip(savePath, downloadDirectory)
if err != nil {
log.Errorf("Could not unzip file: %s", err)
manager.removeDownload(item.Name)
return
}
log.Tracef("Removing zip %s from system", savePath)
err = os.RemoveAll(savePath)
if err != nil {
manager.removeDownload(item.Name)
log.Errorf("Could not remove zip: %s", err)
return
}
err = manager.premiumizemeClient.DeleteFolder(item.ID)
if err != nil {
manager.removeDownload(item.Name)
log.Error(err)
return
}
//Remove download entry from downloads map
manager.removeDownload(item.Name)
}()
}

View File

@@ -4,9 +4,7 @@ import (
"archive/zip"
"fmt"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
@@ -23,21 +21,6 @@ func StripDownloadTypesExtention(fileName string) string {
return fileName
}
func GetTempBaseDir() string {
return path.Join(os.TempDir(), "premiumizearrd")
}
func GetTempDir() (string, error) {
// Create temp dir in os temp location
tempDir := GetTempBaseDir()
err := os.Mkdir(tempDir, os.ModePerm)
dir, err := ioutil.TempDir(tempDir, "unzip-")
if err != nil {
return "", err
}
return dir, nil
}
// https://golangcode.com/unzip-files-in-go/
func Unzip(src string, dest string) error {
r, err := zip.OpenReader(src)
@@ -104,7 +87,8 @@ func GetDownloadsFolderIDFromPremiumizeme(premiumizemeClient *premiumizeme.Premi
folders, err := premiumizemeClient.GetFolders()
if err != nil {
log.Errorf("Error getting folders: %s", err)
log.Fatalf("Cannot read folders from premiumize.me, exiting!")
log.Errorf("Cannot read folders from premiumize.me, application will not run!")
return ""
}
const folderName = "arrDownloads"
@@ -119,10 +103,31 @@ func GetDownloadsFolderIDFromPremiumizeme(premiumizemeClient *premiumizeme.Premi
if len(downloadsFolderID) == 0 {
id, err := premiumizemeClient.CreateFolder(folderName)
if err != nil {
log.Fatalf("Cannot create downloads folder on premiumize.me, exiting! %+v", err)
log.Errorf("Cannot create downloads folder on premiumize.me, application will not run correctly! %+v", err)
}
downloadsFolderID = id
}
return downloadsFolderID
}
func EnvOrDefault(envName string, defaultValue string) string {
envValue := os.Getenv(envName)
if len(envValue) == 0 {
return defaultValue
}
return envValue
}
func IsRunningInDockerContainer() bool {
// docker creates a .dockerenv file at the root
// of the directory tree inside the container.
// if this file exists then the viewer is running
// from inside a container so return true
if _, err := os.Stat("/.dockerenv"); err == nil {
return true
}
return false
}

View File

@@ -0,0 +1,57 @@
package web_service
import (
"encoding/json"
"fmt"
"net/http"
"github.com/jackdallas/premiumizearr/internal/config"
)
type ConfigChangeResponse struct {
Succeeded bool `json:"succeeded"`
Status string `json:"status"`
}
func (s *server) ConfigHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodGet:
data, err := json.Marshal(s.config)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
case http.MethodPost:
var newConfig config.Config
err := json.NewDecoder(r.Body).Decode(&newConfig)
if err != nil {
EncodeAndWriteConfigChangeResponse(w, &ConfigChangeResponse{
Succeeded: false,
Status: fmt.Sprintf("Config failed to update %s", err.Error()),
})
return
}
s.config = &newConfig
s.config.Save()
EncodeAndWriteConfigChangeResponse(w, &ConfigChangeResponse{
Succeeded: true,
Status: "Config updated",
})
default:
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
}
}
func EncodeAndWriteConfigChangeResponse(w http.ResponseWriter, resp *ConfigChangeResponse) {
data, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(data)
}

View File

@@ -25,22 +25,22 @@ type IndexTemplates struct {
var indexBytes []byte
const webRoot = "premiumizearr"
type server struct {
transferManager *service.TransferManagerService
directoryWatcherService *service.DirectoryWatcherService
config *config.Config
}
// http Router
func StartWebServer(transferManager *service.TransferManagerService, directoryWatcher *service.DirectoryWatcherService, config *config.Config) {
log.Info("Starting web server...")
tmpl, err := template.ParseFiles("./static/index.html")
if err != nil {
log.Fatal(err)
}
var ibytes bytes.Buffer
err = tmpl.Execute(&ibytes, &IndexTemplates{webRoot})
err = tmpl.Execute(&ibytes, &IndexTemplates{config.WebRoot})
if err != nil {
log.Fatal(err)
}
@@ -49,33 +49,46 @@ func StartWebServer(transferManager *service.TransferManagerService, directoryWa
s := server{
transferManager: transferManager,
directoryWatcherService: directoryWatcher,
config: config,
}
spa := spaHandler{
staticPath: "static",
indexPath: "index.html",
webRoot: config.WebRoot,
}
r := mux.NewRouter()
log.Infof("Creating route: %s", webRoot+"/api/transfers")
r.HandleFunc("/"+webRoot+"/api/transfers", s.TransfersHandler)
transferPath := "/api/transfers"
downloadsPath := "/api/downloads"
blackholePath := "/api/blackhole"
configPathBase := "/api/config"
log.Infof("Creating route: %s", webRoot+"/api/downloads")
r.HandleFunc("/"+webRoot+"/api/downloads", s.DownloadsHandler)
if config.WebRoot != "" {
transferPath = path.Join(config.WebRoot, transferPath)
downloadsPath = path.Join(config.WebRoot, downloadsPath)
blackholePath = path.Join(config.WebRoot, blackholePath)
configPathBase = path.Join(config.WebRoot, configPathBase)
}
log.Infof("Creating route: %s", webRoot+"/api/blackhole")
r.HandleFunc("/"+webRoot+"/api/blackhole", s.BlackholeHandler)
r.HandleFunc(transferPath, s.TransfersHandler)
r.HandleFunc(downloadsPath, s.DownloadsHandler)
r.HandleFunc(blackholePath, s.BlackholeHandler)
r.HandleFunc(configPathBase, s.ConfigHandler)
r.PathPrefix("/").Handler(spa)
address := fmt.Sprintf("%s:%s", config.BindIP, config.BindPort)
srv := &http.Server{
Handler: r,
Addr: fmt.Sprintf("%s:%s", config.BindIP, config.BindPort),
Addr: address,
// Good practice: enforce timeouts for servers you create!
WriteTimeout: 15 * time.Second,
ReadTimeout: 15 * time.Second,
}
log.Infof("Web server started on %s", address)
srv.ListenAndServe()
}
@@ -120,15 +133,19 @@ type DownloadsResponse struct {
func (s *server) DownloadsHandler(w http.ResponseWriter, r *http.Request) {
var resp DownloadsResponse
for _, v := range s.transferManager.GetDownloads() {
resp.Downloads = append(resp.Downloads, Download{
Added: v.Added.Unix(),
Name: v.Name,
Progress: v.ProgressDownloader.GetProgress(),
Speed: v.ProgressDownloader.GetSpeed(),
})
if s.transferManager == nil {
resp.Status = "Not Initialized"
} else {
for _, v := range s.transferManager.GetDownloads() {
resp.Downloads = append(resp.Downloads, Download{
Added: v.Added.Unix(),
Name: v.Name,
Progress: v.ProgressDownloader.GetProgress(),
Speed: v.ProgressDownloader.GetSpeed(),
})
}
resp.Status = ""
}
resp.Status = ""
data, err := json.Marshal(resp)
if err != nil {
@@ -141,14 +158,20 @@ func (s *server) DownloadsHandler(w http.ResponseWriter, r *http.Request) {
func (s *server) BlackholeHandler(w http.ResponseWriter, r *http.Request) {
var resp BlackholeResponse
for i, n := range s.directoryWatcherService.Queue.GetQueue() {
name := path.Base(n)
resp.BlackholeFiles = append(resp.BlackholeFiles, BlackholeFile{
ID: i,
Name: name,
})
if s.directoryWatcherService == nil {
resp.Status = "Not Initialized"
} else {
for i, n := range s.directoryWatcherService.Queue.GetQueue() {
name := path.Base(n)
resp.BlackholeFiles = append(resp.BlackholeFiles, BlackholeFile{
ID: i,
Name: name,
})
}
resp.Status = s.directoryWatcherService.GetStatus()
}
resp.Status = s.directoryWatcherService.GetStatus()
data, err := json.Marshal(resp)
if err != nil {
@@ -159,10 +182,11 @@ func (s *server) BlackholeHandler(w http.ResponseWriter, r *http.Request) {
w.Write(data)
}
// Shamlessly stolen from mux examples https://github.com/gorilla/mux#examples
// Shamelessly stolen from mux examples https://github.com/gorilla/mux#examples
type spaHandler struct {
staticPath string
indexPath string
webRoot string
}
func (h spaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
@@ -175,8 +199,9 @@ func (h spaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return
}
path = strings.Replace(path, webRoot, "", 1)
if h.webRoot != "" {
path = strings.Replace(path, h.webRoot, "", 1)
}
// prepend the path with the path to the static directory
path = filepath.Join(h.staticPath, path)

6
scripts/postinstall.sh Normal file
View File

@@ -0,0 +1,6 @@
#!/bin/bash
chown -R 1000:1000 /opt/premiumizearrd/
systemctl enable premiumizearrd.service
systemctl daemon-reload
systemctl start premiumizearrd.service

5890
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,25 +2,25 @@
"name": "premiumizearr-ui",
"version": "0.0.1",
"devDependencies": {
"carbon-components-svelte": "^0.49.0",
"carbon-icons-svelte": "^10.38.0",
"carbon-preprocess-svelte": "^0.6.0",
"copy-webpack-plugin": "^9.1.0",
"cross-env": "^7.0.3",
"css-loader": "^5.0.1",
"esbuild-loader": "^2.16.0",
"mini-css-extract-plugin": "^1.3.4",
"svelte": "^3.31.2",
"carbon-components-svelte": "^0.64.0",
"carbon-icons-svelte": "^11.0.0",
"carbon-preprocess-svelte": "^0.9.0",
"copy-webpack-plugin": "^9.0.0",
"cross-env": "^7.0.0",
"css-loader": "^5.0.0",
"esbuild-loader": "^2.0.0",
"mini-css-extract-plugin": "^1.0.0",
"svelte": "^3.0.0",
"svelte-loader": "^3.0.0",
"webpack": "^5.16.0",
"webpack-cli": "^4.4.0",
"webpack-dev-server": "^4.7.3"
"webpack": "^5.0.0",
"webpack-cli": "^4.0.0",
"webpack-dev-server": "^4.0.0"
},
"scripts": {
"build": "cross-env NODE_ENV=production webpack",
"dev": "webpack serve --content-base public"
"dev": "webpack serve --static public"
},
"dependencies": {
"luxon": "^2.3.0"
"luxon": "^2.0.0"
}
}

View File

@@ -6,13 +6,13 @@
<title>Premiumizearr</title>
<link rel='icon' type='image/png' href='/favicon.png'>
<link rel='stylesheet' href='/{{.RootPath}}/bundle.css'>
<link rel='icon' type='image/png' href='./{{.RootPath}}/favicon.png'>
<link rel='stylesheet' href='./{{.RootPath}}/bundle.css'>
<!-- Material Icons -->
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons" />
<!-- Roboto -->
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,600,700" />
<script defer src='/{{.RootPath}}/bundle.js'></script>
<script defer src='./{{.RootPath}}/bundle.js'></script>
</head>
<body>

View File

@@ -1,150 +1,31 @@
<script>
import APITable from "./components/APITable.svelte";
import "carbon-components-svelte/css/g100.css";
import { Grid, Row, Column } from "carbon-components-svelte";
import DateTime from "luxon";
let dlSpeed = 0;
let webRoot = new URL(window.location.href).pathname;
function parseDLSpeedFromMessage(m) {
if (m == "Loading..." || m == undefined) return 0;
let speed = m.split(" ")[0];
speed = speed.replace(",", "");
let unit = m.split(" ")[1];
if (Number.isNaN(speed)) {
console.log("Speed is not a number: ", speed);
console.log("Message: ", message);
return 0;
}
if (unit === undefined || unit === null || unit == "") {
console.log("Unit undefined in : " + m);
return 0;
} else {
try {
unit = unit.toUpperCase();
} catch (error) {
return 0;
}
unit = unit.replace("/", "");
unit = unit.substring(0, 2);
switch (unit) {
case "KB":
return speed * 1024;
case "MB":
return speed * 1024 * 1024;
case "GB":
return speed * 1024 * 1024 * 1024;
default:
console.log("Unknown unit: " + unit);
return 0;
}
}
}
function HumanReadableSpeed(bytes) {
if (bytes < 1024) {
return bytes + " B/s";
} else if (bytes < 1024 * 1024) {
return (bytes / 1024).toFixed(2) + " KB/s";
} else if (bytes < 1024 * 1024 * 1024) {
return (bytes / 1024 / 1024).toFixed(2) + " MB/s";
} else {
return (bytes / 1024 / 1024 / 1024).toFixed(2) + " GB/s";
}
}
function dataToRows(data) {
let rows = [];
dlSpeed = 0;
if (!data) return rows;
for (let i = 0; i < data.length; i++) {
let d = data[i];
rows.push({
id: d.id,
name: d.name,
status: d.status,
progress: (d.progress * 100).toFixed(0) + "%",
message: d.message,
});
let speed = parseDLSpeedFromMessage(d.message);
if (!Number.isNaN(speed)) {
dlSpeed += speed;
} else {
console.error("Invalid speed: " + d.message);
}
}
return rows;
}
function downloadsToRows(downloads) {
let rows = [];
if (!downloads) return rows;
for (let i = 0; i < downloads.length; i++) {
let d = downloads[i];
rows.push({
Added: DateTime.fromMillis(d.added).toFormat('dd hh:mm:ss a'),
name: d.name,
progress: (d.progress * 100).toFixed(0) + "%",
});
}
}
</script>
<main>
<Grid fullWidth>
<Row>
<Column md={4} >
<h3>Blackhole</h3>
<APITable
headers={[
{ key: "id", value: "Pos" },
{ key: "name", value: "Name", sort: false },
]}
{webRoot}
APIpath="/api/blackhole"
zebra={true}
totalName="In Queue: "
/>
</Column>
<Column md={4} >
<h3>Downloads</h3>
<APITable
headers={[
{ key: "added", value: "Added" },
{ key: "name", value: "Name" },
{ key: "progress", value: "Progress" },
{ key: "speed", value: "Speed" },
]}
updateTimeSeconds={2}
{webRoot}
APIpath="/api/downloads"
zebra={true}
totalName="Downloading: "
/>
</Column>
</Row>
<Row>
<Column>
<h3>Transfers</h3>
<p>Download Speed: {HumanReadableSpeed(dlSpeed)}</p>
<APITable
headers={[
{ key: "name", value: "Name" },
{ key: "status", value: "Status" },
{ key: "progress", value: "Progress" },
{ key: "message", value: "Message", sort: false },
]}
{webRoot}
APIpath="/api/transfers"
zebra={true}
{dataToRows}
/>
</Column>
</Row>
</Grid>
</main>
<script>
import "carbon-components-svelte/css/g100.css";
import {
Grid,
Row,
Column,
Tabs,
Tab,
TabContent,
} from "carbon-components-svelte";
import Config from "./pages/Config.svelte";
import Info from "./pages/Info.svelte";
</script>
<main>
<Grid fullWidth>
<Row>
<Column>
<Tabs>
<Tab label="Info" />
<Tab label="Config" />
<svelte:fragment slot="content">
<TabContent><Info /></TabContent>
<TabContent><Config /></TabContent>
</svelte:fragment>
</Tabs>
</Column>
</Row>
</Grid>
</main>

221
web/src/pages/Config.svelte Normal file
View File

@@ -0,0 +1,221 @@
<script>
import {
Row,
Column,
Button,
TextInput,
Modal,
FormGroup,
Dropdown,
} from "carbon-components-svelte";
import {
Save,
CheckmarkFilled,
AddFilled,
TrashCan,
} from "carbon-icons-svelte";
let webRoot = window.location.href;
let config = {
BlackholeDirectory: "",
DownloadsDirectory: "",
UnzipDirectory: "",
BindIP: "",
BindPort: "",
WebRoot: "",
SimultaneousDownloads: 0,
Arrs: [],
};
let inputDisabled = true;
let errorModal = false;
let errorMessage = "";
let saveIcon = Save;
function getConfig() {
inputDisabled = true;
fetch(webRoot + "api/config")
.then((response) => response.json())
.then((data) => {
config = data;
inputDisabled = false;
})
.catch((error) => {
console.error("Error: ", error);
});
}
function submit() {
inputDisabled = true;
fetch(webRoot + "api/config", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(config),
})
.then((response) => response.json())
.then((data) => {
if (data.succeeded) {
saveIcon = CheckmarkFilled;
getConfig();
setTimeout(() => {
saveIcon = Save;
}, 1000);
} else {
errorMessage = data.status;
errorModal = true;
getConfig();
}
})
.catch((error) => {
console.error("Error: ", error);
errorModal = true;
errorMessage = error;
setTimeout(() => {
getConfig();
}, 1500);
});
}
function AddArr() {
config.Arrs.push({
Name: "New Arr",
URL: "http://localhost:1234",
APIKey: "xxxxxxxx",
Type: "Sonarr",
});
//Force re-paint
config.Arrs = [...config.Arrs];
}
function RemoveArr(index) {
console.log(index);
config.Arrs.splice(index, 1);
//Force re-paint
config.Arrs = [...config.Arrs];
}
getConfig();
</script>
<main>
<Row>
<Column>
<h4>*Arr Settings</h4>
<FormGroup>
{#if config.Arrs !== undefined}
{#each config.Arrs as arr, i}
<h5>- {arr.Name ? arr.Name : i}</h5>
<FormGroup>
<TextInput
labelText="Name"
bind:value={arr.Name}
disabled={inputDisabled}
/>
<TextInput
labelText="URL"
bind:value={arr.URL}
disabled={inputDisabled}
/>
<TextInput
labelText="APIKey"
bind:value={arr.APIKey}
disabled={inputDisabled}
/>
<Dropdown
titleText="Type"
selectedId={arr.Type}
on:select={(e) => {
config.Arrs[i].Type = e.detail.selectedId;
}}
items={[
{ id: "Sonarr", text: "Sonarr" },
{ id: "Radarr", text: "Radarr" },
]}
disabled={inputDisabled}
/>
<Button
style="margin-top: 10px;"
on:click={() => {
RemoveArr(i);
}}
kind="danger"
icon={TrashCan}
iconDescription="Delete Arr"
/>
</FormGroup>
{/each}
{/if}
</FormGroup>
<Button on:click={AddArr} disabled={inputDisabled} icon={AddFilled}>
Add Arr
</Button>
</Column>
<Column>
<h4>Directory Settings</h4>
<FormGroup>
<TextInput
disabled={inputDisabled}
labelText="Blackhole Directory"
bind:value={config.BlackholeDirectory}
/>
<TextInput
disabled={inputDisabled}
labelText="Download Directory"
bind:value={config.DownloadsDirectory}
/>
<TextInput
disabled={inputDisabled}
labelText="Unzip Directory"
bind:value={config.UnzipDirectory}
/>
</FormGroup>
<h4>Web Server Settings</h4>
<FormGroup>
<TextInput
disabled={inputDisabled}
labelText="Bind IP"
bind:value={config.BindIP}
/>
<TextInput
disabled={inputDisabled}
labelText="Bind Port"
bind:value={config.BindPort}
/>
<TextInput
disabled={inputDisabled}
labelText="Web Root"
bind:value={config.WebRoot}
/>
</FormGroup>
<h4>Download Settings</h4>
<FormGroup>
<TextInput
type="number"
disabled={inputDisabled}
labelText="Simultaneous Downloads"
bind:value={config.SimultaneousDownloads}
/>
</FormGroup>
<Button on:click={submit} icon={saveIcon} disabled={inputDisabled}
>Save</Button
>
</Column>
</Row>
</main>
<Modal
bind:open={errorModal}
on:open={errorModal}
passiveModal
modalHeading="Error Saving Config"
on:close={() => {
errorModal = false;
}}
>
<p>{errorMessage}</p>
</Modal>

149
web/src/pages/Info.svelte Normal file
View File

@@ -0,0 +1,149 @@
<script>
import APITable from "../components/APITable.svelte";
import { Row, Column } from "carbon-components-svelte";
import {DateTime} from "luxon";
let dlSpeed = 0;
let webRoot = window.location.href;
function parseDLSpeedFromMessage(m) {
if (m == "Loading..." || m == undefined) return 0;
if (m == "too many missing articles") return 0;
let speed = m.split(" ")[0];
speed = speed.replace(",", "");
let unit = m.split(" ")[1];
if (Number.isNaN(speed)) {
console.log("Speed is not a number: ", speed);
console.log("Message: ", message);
return 0;
}
if (unit === undefined || unit === null || unit == "") {
console.log("Unit undefined in : " + m);
return 0;
} else {
try {
unit = unit.toUpperCase();
} catch (error) {
return 0;
}
unit = unit.replace("/", "");
unit = unit.substring(0, 2);
switch (unit) {
case "KB":
return speed * 1024;
case "MB":
return speed * 1024 * 1024;
case "GB":
return speed * 1024 * 1024 * 1024;
default:
console.log("Unknown unit: " + unit + " in message '" + m + "'");
return 0;
}
}
}
function HumanReadableSpeed(bytes) {
if (bytes < 1024) {
return bytes + " B/s";
} else if (bytes < 1024 * 1024) {
return (bytes / 1024).toFixed(2) + " KB/s";
} else if (bytes < 1024 * 1024 * 1024) {
return (bytes / 1024 / 1024).toFixed(2) + " MB/s";
} else {
return (bytes / 1024 / 1024 / 1024).toFixed(2) + " GB/s";
}
}
function dataToRows(data) {
let rows = [];
dlSpeed = 0;
if (!data) return rows;
for (let i = 0; i < data.length; i++) {
let d = data[i];
rows.push({
id: d.id,
name: d.name,
status: d.status,
progress: (d.progress * 100).toFixed(0) + "%",
message: d.message,
});
let speed = parseDLSpeedFromMessage(d.message);
if (!Number.isNaN(speed)) {
dlSpeed += speed;
} else {
console.error("Invalid speed: " + d.message);
}
}
return rows;
}
function downloadsToRows(downloads) {
let rows = [];
if (!downloads) return rows;
for (let i = 0; i < downloads.length; i++) {
let d = downloads[i];
rows.push({
Added: DateTime.fromMillis(d.added).toFormat('dd hh:mm:ss a'),
name: d.name,
progress: (d.progress * 100).toFixed(0) + "%",
});
}
}
</script>
<main>
<Row>
<Column md={4} >
<h3>Blackhole</h3>
<APITable
headers={[
{ key: "id", value: "Pos" },
{ key: "name", value: "Name", sort: false },
]}
{webRoot}
APIpath="api/blackhole"
zebra={true}
totalName="In Queue: "
/>
</Column>
<Column md={4} >
<h3>Downloads</h3>
<APITable
headers={[
{ key: "added", value: "Added" },
{ key: "name", value: "Name" },
{ key: "progress", value: "Progress" },
{ key: "speed", value: "Speed" },
]}
updateTimeSeconds={2}
{webRoot}
APIpath="api/downloads"
zebra={true}
totalName="Downloading: "
/>
</Column>
</Row>
<Row>
<Column>
<h3>Transfers</h3>
<p>Download Speed: {HumanReadableSpeed(dlSpeed)}</p>
<APITable
headers={[
{ key: "name", value: "Name" },
{ key: "status", value: "Status" },
{ key: "progress", value: "Progress" },
{ key: "message", value: "Message", sort: false },
]}
{webRoot}
APIpath="api/transfers"
zebra={true}
{dataToRows}
/>
</Column>
</Row>
</main>

View File

@@ -61,7 +61,7 @@ module.exports = {
devServer: {
hot: true,
proxy: {
'/api': 'https://projectmouseion.com/premiumizearr/api'
'/api': 'http://localhost:8182'
}
},
optimization: {