mirror of
https://github.com/onedr0p/exportarr.git
synced 2026-02-06 10:57:32 +00:00
Add Sabnzbd Support (#141)
* Add Sabnzbd Support Signed-off-by: Russell Troxel <russell@troxel.io> * Fix collector name in sabnzbd logger Signed-off-by: Russell Troxel <russell@troxel.io> --------- Signed-off-by: Russell Troxel <russell@troxel.io>
This commit is contained in:
parent
68ac4199b0
commit
6dcb7b4d92
1
go.mod
1
go.mod
@ -15,6 +15,7 @@ require (
|
||||
github.com/stretchr/testify v1.8.2
|
||||
go.uber.org/zap v1.24.0
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
3
go.sum
3
go.sum
@ -278,8 +278,6 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20230315142452-642cacee5cc0 h1:pVgRXcIictcr+lBQIFeiwuwtDIs4eL21OuM9nyAADmo=
|
||||
golang.org/x/exp v0.0.0-20230315142452-642cacee5cc0/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29 h1:ooxPy7fPvB4kwsA2h+iBNHkAbp/4JxTSwCmvdjEYmug=
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
@ -353,6 +351,7 @@ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
|
||||
131
internal/arr/client/auth.go
Normal file
131
internal/arr/client/auth.go
Normal file
@ -0,0 +1,131 @@
|
||||
package client
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
base_client "github.com/onedr0p/exportarr/internal/client"
|
||||
)
|
||||
|
||||
func NewClient(config *config.ArrConfig) (*base_client.Client, error) {
|
||||
auth, err := NewAuth(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return base_client.NewClient(config.BaseURL(), config.DisableSSLVerify, auth)
|
||||
}
|
||||
|
||||
func NewAuth(config *config.ArrConfig) (client.Authenticator, error) {
|
||||
var auth client.Authenticator
|
||||
|
||||
if config.UseFormAuth() {
|
||||
u, err := url.Parse(config.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
auth = &FormAuth{
|
||||
Username: config.AuthUsername,
|
||||
Password: config.AuthPassword,
|
||||
ApiKey: config.ApiKey,
|
||||
AuthBaseURL: u,
|
||||
Transport: client.BaseTransport(config.DisableSSLVerify),
|
||||
}
|
||||
} else if config.UseBasicAuth() {
|
||||
auth = &BasicAuth{
|
||||
Username: config.AuthUsername,
|
||||
Password: config.AuthPassword,
|
||||
ApiKey: config.ApiKey,
|
||||
}
|
||||
} else {
|
||||
auth = &ApiKeyAuth{
|
||||
ApiKey: config.ApiKey,
|
||||
}
|
||||
}
|
||||
return auth, nil
|
||||
}
|
||||
|
||||
type ApiKeyAuth struct {
|
||||
ApiKey string
|
||||
}
|
||||
|
||||
func (a *ApiKeyAuth) Auth(req *http.Request) error {
|
||||
req.Header.Add("X-Api-Key", a.ApiKey)
|
||||
return nil
|
||||
}
|
||||
|
||||
type BasicAuth struct {
|
||||
Username string
|
||||
Password string
|
||||
ApiKey string
|
||||
}
|
||||
|
||||
func (a *BasicAuth) Auth(req *http.Request) error {
|
||||
req.SetBasicAuth(a.Username, a.Password)
|
||||
req.Header.Add("X-Api-Key", a.ApiKey)
|
||||
return nil
|
||||
}
|
||||
|
||||
type FormAuth struct {
|
||||
Username string
|
||||
Password string
|
||||
ApiKey string
|
||||
AuthBaseURL *url.URL
|
||||
Transport http.RoundTripper
|
||||
cookie *http.Cookie
|
||||
}
|
||||
|
||||
func (a *FormAuth) Auth(req *http.Request) error {
|
||||
if a.cookie == nil || a.cookie.Expires.Before(time.Now().Add(-5*time.Minute)) {
|
||||
form := url.Values{
|
||||
"username": {a.Username},
|
||||
"password": {a.Password},
|
||||
"rememberMe": {"on"},
|
||||
}
|
||||
|
||||
u := a.AuthBaseURL.JoinPath("login")
|
||||
u.Query().Add("ReturnUrl", "/general/settings")
|
||||
|
||||
authReq, err := http.NewRequest("POST", u.String(), strings.NewReader(form.Encode()))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: %w", err)
|
||||
}
|
||||
|
||||
authReq.Header.Add("Content-Type", "application/x-www-form-urlencoded")
|
||||
authReq.Header.Add("Content-Length", fmt.Sprintf("%d", len(form.Encode())))
|
||||
|
||||
client := &http.Client{Transport: a.Transport, CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||
if req.URL.Query().Get("loginFailed") == "true" {
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: Login Failed")
|
||||
}
|
||||
return http.ErrUseLastResponse
|
||||
}}
|
||||
|
||||
authResp, err := client.Do(authReq)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: %w", err)
|
||||
}
|
||||
|
||||
if authResp.StatusCode != 302 {
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: Received Status Code %d", authResp.StatusCode)
|
||||
}
|
||||
|
||||
for _, cookie := range authResp.Cookies() {
|
||||
if strings.HasSuffix(cookie.Name, "arrAuth") {
|
||||
copy := *cookie
|
||||
a.cookie = ©
|
||||
break
|
||||
}
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: No Cookie with suffix 'arrAuth' found")
|
||||
}
|
||||
}
|
||||
|
||||
req.AddCookie(a.cookie)
|
||||
req.Header.Add("X-Api-Key", a.ApiKey)
|
||||
|
||||
return nil
|
||||
}
|
||||
237
internal/arr/client/auth_test.go
Normal file
237
internal/arr/client/auth_test.go
Normal file
@ -0,0 +1,237 @@
|
||||
package client
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
base_client "github.com/onedr0p/exportarr/internal/client"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var (
|
||||
TEST_USER = "testuser1"
|
||||
TEST_PASS = "hunter2"
|
||||
TEST_KEY = "abcdef1234567890abcdef1234567890"
|
||||
)
|
||||
|
||||
type testRoundTripFunc func(req *http.Request) (*http.Response, error)
|
||||
|
||||
func (t testRoundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
return t(req)
|
||||
}
|
||||
|
||||
func TestRoundTrip_Auth(t *testing.T) {
|
||||
require := require.New(t)
|
||||
parameters := []struct {
|
||||
name string
|
||||
auth base_client.Authenticator
|
||||
testFunc func(req *http.Request) (*http.Response, error)
|
||||
}{
|
||||
{
|
||||
name: "BasicAuth",
|
||||
auth: &BasicAuth{
|
||||
Username: TEST_USER,
|
||||
Password: TEST_PASS,
|
||||
ApiKey: TEST_KEY,
|
||||
},
|
||||
testFunc: func(req *http.Request) (*http.Response, error) {
|
||||
require.NotNil(req, "Request should not be nil")
|
||||
require.NotNil(req.Header, "Request header should not be nil")
|
||||
require.NotEmpty(req.Header.Get("Authorization"), "Authorization header should be set")
|
||||
require.Equal(
|
||||
"Basic "+base64.StdEncoding.EncodeToString([]byte(TEST_USER+":"+TEST_PASS)),
|
||||
req.Header.Get("Authorization"),
|
||||
"Authorization Header set to wrong value",
|
||||
)
|
||||
require.NotEmpty(req.Header.Get("X-Api-Key"), "X-Api-Key header should be set")
|
||||
require.Equal(TEST_KEY, req.Header.Get("X-Api-Key"), "X-Api-Key Header set to wrong value")
|
||||
return &http.Response{
|
||||
StatusCode: 200,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ApiKey",
|
||||
auth: &ApiKeyAuth{
|
||||
ApiKey: TEST_KEY,
|
||||
},
|
||||
testFunc: func(req *http.Request) (*http.Response, error) {
|
||||
require.NotNil(req, "Request should not be nil")
|
||||
require.NotNil(req.Header, "Request header should not be nil")
|
||||
require.Empty(req.Header.Get("Authorization"), "Authorization header should be empty")
|
||||
require.NotEmpty(req.Header.Get("X-Api-Key"), "X-Api-Key header should be set")
|
||||
require.Equal(TEST_KEY, req.Header.Get("X-Api-Key"), "X-Api-Key Header set to wrong value")
|
||||
return &http.Response{
|
||||
StatusCode: 200,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, param := range parameters {
|
||||
t.Run(param.name, func(t *testing.T) {
|
||||
transport := base_client.NewExportarrTransport(testRoundTripFunc(param.testFunc), param.auth)
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.NoError(err, "Error creating request: %s", err)
|
||||
_, err = client.Do(req)
|
||||
require.NoError(err, "Error sending request: %s", err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundTrip_FormAuth(t *testing.T) {
|
||||
require := require.New(t)
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
require.NotNil(r, "Request should not be nil")
|
||||
require.NotNil(r.Header, "Request header should not be nil")
|
||||
require.Empty(r.Header.Get("Authorization"), "Authorization header should be empty")
|
||||
require.Equal("POST", r.Method, "Request method should be POST")
|
||||
require.Equal("/login", r.URL.Path, "Request URL should be /login")
|
||||
require.Equal("application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Content-Type should be application/x-www-form-urlencoded")
|
||||
require.Equal(TEST_USER, r.FormValue("username"), "Username should be %s", TEST_USER)
|
||||
require.Equal(TEST_PASS, r.FormValue("password"), "Password should be %s", TEST_PASS)
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: "RadarrAuth",
|
||||
Value: "abcdef1234567890abcdef1234567890",
|
||||
Expires: time.Now().Add(24 * time.Hour),
|
||||
})
|
||||
w.WriteHeader(http.StatusFound)
|
||||
w.Write([]byte("OK"))
|
||||
}))
|
||||
defer ts.Close()
|
||||
tsUrl, _ := url.Parse(ts.URL)
|
||||
auth := &FormAuth{
|
||||
Username: TEST_USER,
|
||||
Password: TEST_PASS,
|
||||
ApiKey: TEST_KEY,
|
||||
AuthBaseURL: tsUrl,
|
||||
Transport: http.DefaultTransport,
|
||||
}
|
||||
transport := base_client.NewExportarrTransport(testRoundTripFunc(func(req *http.Request) (*http.Response, error) {
|
||||
require.NotNil(req, "Request should not be nil")
|
||||
require.NotNil(req.Header, "Request header should not be nil")
|
||||
cookie, err := req.Cookie("RadarrAuth")
|
||||
require.NoError(err, "Cookie should be set")
|
||||
require.Equal(cookie.Value, "abcdef1234567890abcdef1234567890", "Cookie should be set")
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
}), auth)
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.NoError(err, "Error creating request: %s", err)
|
||||
_, err = client.Do(req)
|
||||
require.NoError(err, "Error sending request: %s", err)
|
||||
}
|
||||
|
||||
func TestRoundTrip_FormAuthFailure(t *testing.T) {
|
||||
require := require.New(t)
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, "/?loginFailed=true", http.StatusFound)
|
||||
}))
|
||||
u, _ := url.Parse(ts.URL)
|
||||
auth := &FormAuth{
|
||||
Username: TEST_USER,
|
||||
Password: TEST_PASS,
|
||||
ApiKey: TEST_KEY,
|
||||
AuthBaseURL: u,
|
||||
Transport: http.DefaultTransport,
|
||||
}
|
||||
transport := base_client.NewExportarrTransport(testRoundTripFunc(func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
}), auth)
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.NoError(err, "Error creating request: %s", err)
|
||||
require.NotPanics(func() {
|
||||
_, err = client.Do(req)
|
||||
}, "Form Auth should not panic on auth failure")
|
||||
require.Error(err, "Form Auth Transport should throw an error when auth fails")
|
||||
}
|
||||
|
||||
func TestRoundTrip_Retries(t *testing.T) {
|
||||
parameters := []struct {
|
||||
name string
|
||||
testFunc func(req *http.Request) (*http.Response, error)
|
||||
}{
|
||||
{
|
||||
name: "500",
|
||||
testFunc: func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: 500,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Err",
|
||||
testFunc: func(req *http.Request) (*http.Response, error) {
|
||||
return nil, &http.ProtocolError{}
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, param := range parameters {
|
||||
t.Run(param.name, func(t *testing.T) {
|
||||
require := require.New(t)
|
||||
auth := &ApiKeyAuth{
|
||||
ApiKey: TEST_KEY,
|
||||
}
|
||||
attempts := 0
|
||||
transport := base_client.NewExportarrTransport(testRoundTripFunc(func(req *http.Request) (*http.Response, error) {
|
||||
attempts++
|
||||
return param.testFunc(req)
|
||||
}), auth)
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.NoError(err, "Error creating request: %s", err)
|
||||
_, err = client.Do(req)
|
||||
require.Error(err, "Error should be returned from Do()")
|
||||
require.Equal(3, attempts, "Should retry 3 times")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundTrip_StatusCodes(t *testing.T) {
|
||||
parameters := []int{200, 201, 202, 204, 301, 302, 400, 401, 403, 404, 500, 503}
|
||||
for _, param := range parameters {
|
||||
t.Run(fmt.Sprintf("%d", param), func(t *testing.T) {
|
||||
require := require.New(t)
|
||||
auth := &ApiKeyAuth{
|
||||
ApiKey: TEST_KEY,
|
||||
}
|
||||
transport := base_client.NewExportarrTransport(testRoundTripFunc(func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: param,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
}), auth)
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.Nil(err, "Error creating request: %s", err)
|
||||
_, err = client.Do(req)
|
||||
if param >= 200 && param < 300 {
|
||||
require.NoError(err, "Should Not error on 2XX: %s", err)
|
||||
} else {
|
||||
require.Error(err, "Should error on non-2XX")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -3,33 +3,33 @@ package collector
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type systemHealthCollector struct {
|
||||
config *config.Config // App configuration
|
||||
systemHealthMetric *prometheus.Desc // Total number of health issues
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
systemHealthMetric *prometheus.Desc // Total number of health issues
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewSystemHealthCollector(c *config.Config) *systemHealthCollector {
|
||||
func NewSystemHealthCollector(c *config.ArrConfig) *systemHealthCollector {
|
||||
return &systemHealthCollector{
|
||||
config: c,
|
||||
systemHealthMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_system_health_issues", c.Arr),
|
||||
fmt.Sprintf("%s_system_health_issues", c.App),
|
||||
"Total number of health issues by source, type, message and wikiurl",
|
||||
[]string{"source", "type", "message", "wikiurl"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_health_collector_error", c.Arr),
|
||||
fmt.Sprintf("%s_health_collector_error", c.App),
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -3,33 +3,33 @@ package collector
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type historyCollector struct {
|
||||
config *config.Config // App configuration
|
||||
historyMetric *prometheus.Desc // Total number of history items
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
historyMetric *prometheus.Desc // Total number of history items
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewHistoryCollector(c *config.Config) *historyCollector {
|
||||
func NewHistoryCollector(c *config.ArrConfig) *historyCollector {
|
||||
return &historyCollector{
|
||||
config: c,
|
||||
historyMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_history_total", c.Arr),
|
||||
fmt.Sprintf("%s_history_total", c.App),
|
||||
"Total number of item in the history",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_history_collector_error", c.Arr),
|
||||
fmt.Sprintf("%s_history_collector_error", c.App),
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -3,110 +3,110 @@ package collector
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type lidarrCollector struct {
|
||||
config *config.Config // App configuration
|
||||
artistsMetric *prometheus.Desc // Total number of artists
|
||||
artistsMonitoredMetric *prometheus.Desc // Total number of monitored artists
|
||||
artistGenresMetric *prometheus.Desc // Total number of artists by genre
|
||||
artistsFileSizeMetric *prometheus.Desc // Total fizesize of all artists in bytes
|
||||
albumsMetric *prometheus.Desc // Total number of albums
|
||||
albumsMonitoredMetric *prometheus.Desc // Total number of monitored albums
|
||||
albumsGenresMetric *prometheus.Desc // Total number of albums by genre
|
||||
albumsMissingMetric *prometheus.Desc // Total number of missing albums
|
||||
songsMetric *prometheus.Desc // Total number of songs
|
||||
songsMonitoredMetric *prometheus.Desc // Total number of monitored songs
|
||||
songsDownloadedMetric *prometheus.Desc // Total number of downloaded songs
|
||||
songsQualitiesMetric *prometheus.Desc // Total number of songs by quality
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
artistsMetric *prometheus.Desc // Total number of artists
|
||||
artistsMonitoredMetric *prometheus.Desc // Total number of monitored artists
|
||||
artistGenresMetric *prometheus.Desc // Total number of artists by genre
|
||||
artistsFileSizeMetric *prometheus.Desc // Total fizesize of all artists in bytes
|
||||
albumsMetric *prometheus.Desc // Total number of albums
|
||||
albumsMonitoredMetric *prometheus.Desc // Total number of monitored albums
|
||||
albumsGenresMetric *prometheus.Desc // Total number of albums by genre
|
||||
albumsMissingMetric *prometheus.Desc // Total number of missing albums
|
||||
songsMetric *prometheus.Desc // Total number of songs
|
||||
songsMonitoredMetric *prometheus.Desc // Total number of monitored songs
|
||||
songsDownloadedMetric *prometheus.Desc // Total number of downloaded songs
|
||||
songsQualitiesMetric *prometheus.Desc // Total number of songs by quality
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewLidarrCollector(c *config.Config) *lidarrCollector {
|
||||
func NewLidarrCollector(c *config.ArrConfig) *lidarrCollector {
|
||||
return &lidarrCollector{
|
||||
config: c,
|
||||
artistsMetric: prometheus.NewDesc(
|
||||
"lidarr_artists_total",
|
||||
"Total number of artists",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
artistsMonitoredMetric: prometheus.NewDesc(
|
||||
"lidarr_artists_monitored_total",
|
||||
"Total number of monitored artists",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
artistGenresMetric: prometheus.NewDesc(
|
||||
"lidarr_artists_genres_total",
|
||||
"Total number of artists by genre",
|
||||
[]string{"genre"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
artistsFileSizeMetric: prometheus.NewDesc(
|
||||
"lidarr_artists_filesize_bytes",
|
||||
"Total fizesize of all artists in bytes",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
albumsMetric: prometheus.NewDesc(
|
||||
"lidarr_albums_total",
|
||||
"Total number of albums",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
albumsMonitoredMetric: prometheus.NewDesc(
|
||||
"lidarr_albums_monitored_total",
|
||||
"Total number of albums",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
albumsGenresMetric: prometheus.NewDesc(
|
||||
"lidarr_albums_genres_total",
|
||||
"Total number of albums by genre",
|
||||
[]string{"genre"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
albumsMissingMetric: prometheus.NewDesc(
|
||||
"lidarr_albums_missing_total",
|
||||
"Total number of missing albums",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
songsMetric: prometheus.NewDesc(
|
||||
"lidarr_songs_total",
|
||||
"Total number of songs",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
songsMonitoredMetric: prometheus.NewDesc(
|
||||
"lidarr_songs_monitored_total",
|
||||
"Total number of monitored songs",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
songsDownloadedMetric: prometheus.NewDesc(
|
||||
"lidarr_songs_downloaded_total",
|
||||
"Total number of downloaded songs",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
songsQualitiesMetric: prometheus.NewDesc(
|
||||
"lidarr_songs_quality_total",
|
||||
"Total number of downloaded songs by quality",
|
||||
[]string{"quality"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
"lidarr_collector_error",
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -4,9 +4,9 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -92,7 +92,7 @@ func (u *userAgentStatCache) UpdateKey(key string, value model.UserAgentStats) m
|
||||
}
|
||||
|
||||
type prowlarrCollector struct {
|
||||
config *config.Config // App configuration
|
||||
config *config.ArrConfig // App configuration
|
||||
indexerStatCache indexerStatCache // Cache of indexer stats
|
||||
userAgentStatCache userAgentStatCache // Cache of user agent stats
|
||||
lastStatUpdate time.Time // Last time stat caches were updated
|
||||
@ -115,7 +115,7 @@ type prowlarrCollector struct {
|
||||
|
||||
}
|
||||
|
||||
func NewProwlarrCollector(c *config.Config) *prowlarrCollector {
|
||||
func NewProwlarrCollector(c *config.ArrConfig) *prowlarrCollector {
|
||||
lastStatUpdate := time.Now()
|
||||
if c.Prowlarr.Backfill || !c.Prowlarr.BackfillSinceTime.IsZero() {
|
||||
lastStatUpdate = c.Prowlarr.BackfillSinceTime
|
||||
@ -129,97 +129,97 @@ func NewProwlarrCollector(c *config.Config) *prowlarrCollector {
|
||||
"prowlarr_indexer_total",
|
||||
"Total number of configured indexers",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerEnabledMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_enabled_total",
|
||||
"Total number of enabled indexers",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerAverageResponseTimeMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_average_response_time_ms",
|
||||
"Average response time of indexers in ms",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerQueriesMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_queries_total",
|
||||
"Total number of queries",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerGrabsMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_grabs_total",
|
||||
"Total number of grabs",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerRssQueriesMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_rss_queries_total",
|
||||
"Total number of rss queries",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerAuthQueriesMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_auth_queries_total",
|
||||
"Total number of auth queries",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerFailedQueriesMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_failed_queries_total",
|
||||
"Total number of failed queries",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerFailedGrabsMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_failed_grabs_total",
|
||||
"Total number of failed grabs",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerFailedRssQueriesMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_failed_rss_queries_total",
|
||||
"Total number of failed rss queries",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerFailedAuthQueriesMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_failed_auth_queries_total",
|
||||
"Total number of failed auth queries",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
indexerVipExpirationMetric: prometheus.NewDesc(
|
||||
"prowlarr_indexer_vip_expires_in_seconds",
|
||||
"VIP expiration date",
|
||||
[]string{"indexer"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
userAgentMetric: prometheus.NewDesc(
|
||||
"prowlarr_user_agent_total",
|
||||
"Total number of active user agents",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
userAgentQueriesMetric: prometheus.NewDesc(
|
||||
"prowlarr_user_agent_queries_total",
|
||||
"Total number of queries",
|
||||
[]string{"user_agent"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
userAgentGrabsMetric: prometheus.NewDesc(
|
||||
"prowlarr_user_agent_grabs_total",
|
||||
"Total number of grabs",
|
||||
[]string{"user_agent"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
"prowlarr_collector_error",
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -3,33 +3,33 @@ package collector
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type queueCollector struct {
|
||||
config *config.Config // App configuration
|
||||
queueMetric *prometheus.Desc // Total number of queue items
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
queueMetric *prometheus.Desc // Total number of queue items
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewQueueCollector(c *config.Config) *queueCollector {
|
||||
func NewQueueCollector(c *config.ArrConfig) *queueCollector {
|
||||
return &queueCollector{
|
||||
config: c,
|
||||
queueMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_queue_total", c.Arr),
|
||||
fmt.Sprintf("%s_queue_total", c.App),
|
||||
"Total number of items in the queue by status, download_status, and download_state",
|
||||
[]string{"status", "download_status", "download_state"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_queue_collector_error", c.Arr),
|
||||
fmt.Sprintf("%s_queue_collector_error", c.App),
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -50,9 +50,9 @@ func (collector *queueCollector) Collect(ch chan<- prometheus.Metric) {
|
||||
|
||||
params := map[string]string{"page": "1"}
|
||||
if collector.config.EnableUnknownQueueItems {
|
||||
if collector.config.Arr == "sonarr" {
|
||||
if collector.config.App == "sonarr" {
|
||||
params["includeUnknownSeriesItems"] = "true"
|
||||
} else if collector.config.Arr == "radarr" {
|
||||
} else if collector.config.App == "radarr" {
|
||||
params["includeUnknownMovieItems"] = "true"
|
||||
}
|
||||
}
|
||||
@ -1,82 +1,82 @@
|
||||
package collector
|
||||
|
||||
import (
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type radarrCollector struct {
|
||||
config *config.Config // App configuration
|
||||
movieMetric *prometheus.Desc // Total number of movies
|
||||
movieDownloadedMetric *prometheus.Desc // Total number of downloaded movies
|
||||
movieMonitoredMetric *prometheus.Desc // Total number of monitored movies
|
||||
movieUnmonitoredMetric *prometheus.Desc // Total number of unmonitored movies
|
||||
movieWantedMetric *prometheus.Desc // Total number of wanted movies
|
||||
movieMissingMetric *prometheus.Desc // Total number of missing movies
|
||||
movieQualitiesMetric *prometheus.Desc // Total number of movies by quality
|
||||
movieFileSizeMetric *prometheus.Desc // Total fizesize of all movies in bytes
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
movieMetric *prometheus.Desc // Total number of movies
|
||||
movieDownloadedMetric *prometheus.Desc // Total number of downloaded movies
|
||||
movieMonitoredMetric *prometheus.Desc // Total number of monitored movies
|
||||
movieUnmonitoredMetric *prometheus.Desc // Total number of unmonitored movies
|
||||
movieWantedMetric *prometheus.Desc // Total number of wanted movies
|
||||
movieMissingMetric *prometheus.Desc // Total number of missing movies
|
||||
movieQualitiesMetric *prometheus.Desc // Total number of movies by quality
|
||||
movieFileSizeMetric *prometheus.Desc // Total fizesize of all movies in bytes
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewRadarrCollector(c *config.Config) *radarrCollector {
|
||||
func NewRadarrCollector(c *config.ArrConfig) *radarrCollector {
|
||||
return &radarrCollector{
|
||||
config: c,
|
||||
movieMetric: prometheus.NewDesc(
|
||||
"radarr_movie_total",
|
||||
"Total number of movies",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
movieDownloadedMetric: prometheus.NewDesc(
|
||||
"radarr_movie_downloaded_total",
|
||||
"Total number of downloaded movies",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
movieMonitoredMetric: prometheus.NewDesc(
|
||||
"radarr_movie_monitored_total",
|
||||
"Total number of monitored movies",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
movieUnmonitoredMetric: prometheus.NewDesc(
|
||||
"radarr_movie_unmonitored_total",
|
||||
"Total number of unmonitored movies",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
movieWantedMetric: prometheus.NewDesc(
|
||||
"radarr_movie_wanted_total",
|
||||
"Total number of wanted movies",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
movieMissingMetric: prometheus.NewDesc(
|
||||
"radarr_movie_missing_total",
|
||||
"Total number of missing movies",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
movieFileSizeMetric: prometheus.NewDesc(
|
||||
"radarr_movie_filesize_total",
|
||||
"Total filesize of all movies",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
movieQualitiesMetric: prometheus.NewDesc(
|
||||
"radarr_movie_quality_total",
|
||||
"Total number of downloaded movies by quality",
|
||||
[]string{"quality"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
"radarr_collector_error",
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -3,103 +3,103 @@ package collector
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type readarrCollector struct {
|
||||
config *config.Config // App configuration
|
||||
authorMetric *prometheus.Desc // Total number of authors
|
||||
authorDownloadedMetric *prometheus.Desc // Total number of downloaded authors
|
||||
authorMonitoredMetric *prometheus.Desc // Total number of monitored authors
|
||||
authorUnmonitoredMetric *prometheus.Desc // Total number of unmonitored authors
|
||||
authorFileSizeMetric *prometheus.Desc // Total filesize of all authors in bytes
|
||||
bookMetric *prometheus.Desc // Total number of monitored books
|
||||
bookGrabbedMetric *prometheus.Desc // Total number of grabbed books
|
||||
bookDownloadedMetric *prometheus.Desc // Total number of downloaded books
|
||||
bookMonitoredMetric *prometheus.Desc // Total number of monitored books
|
||||
bookUnmonitoredMetric *prometheus.Desc // Total number of unmonitored books
|
||||
bookMissingMetric *prometheus.Desc // Total number of missing books
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
authorMetric *prometheus.Desc // Total number of authors
|
||||
authorDownloadedMetric *prometheus.Desc // Total number of downloaded authors
|
||||
authorMonitoredMetric *prometheus.Desc // Total number of monitored authors
|
||||
authorUnmonitoredMetric *prometheus.Desc // Total number of unmonitored authors
|
||||
authorFileSizeMetric *prometheus.Desc // Total filesize of all authors in bytes
|
||||
bookMetric *prometheus.Desc // Total number of monitored books
|
||||
bookGrabbedMetric *prometheus.Desc // Total number of grabbed books
|
||||
bookDownloadedMetric *prometheus.Desc // Total number of downloaded books
|
||||
bookMonitoredMetric *prometheus.Desc // Total number of monitored books
|
||||
bookUnmonitoredMetric *prometheus.Desc // Total number of unmonitored books
|
||||
bookMissingMetric *prometheus.Desc // Total number of missing books
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewReadarrCollector(c *config.Config) *readarrCollector {
|
||||
func NewReadarrCollector(c *config.ArrConfig) *readarrCollector {
|
||||
return &readarrCollector{
|
||||
config: c,
|
||||
authorMetric: prometheus.NewDesc(
|
||||
"readarr_author_total",
|
||||
"Total number of authors",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
authorDownloadedMetric: prometheus.NewDesc(
|
||||
"readarr_author_downloaded_total",
|
||||
"Total number of downloaded authors",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
authorMonitoredMetric: prometheus.NewDesc(
|
||||
"readarr_author_monitored_total",
|
||||
"Total number of monitored authors",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
authorUnmonitoredMetric: prometheus.NewDesc(
|
||||
"readarr_author_unmonitored_total",
|
||||
"Total number of unmonitored authors",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
authorFileSizeMetric: prometheus.NewDesc(
|
||||
"readarr_author_filesize_bytes",
|
||||
"Total filesize of all authors in bytes",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
bookMetric: prometheus.NewDesc(
|
||||
"readarr_book_total",
|
||||
"Total number of books",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
bookGrabbedMetric: prometheus.NewDesc(
|
||||
"readarr_book_grabbed_total",
|
||||
"Total number of grabbed books",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
bookDownloadedMetric: prometheus.NewDesc(
|
||||
"readarr_book_downloaded_total",
|
||||
"Total number of downloaded books",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
bookMonitoredMetric: prometheus.NewDesc(
|
||||
"readarr_book_monitored_total",
|
||||
"Total number of monitored books",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
bookUnmonitoredMetric: prometheus.NewDesc(
|
||||
"readarr_book_unmonitored_total",
|
||||
"Total number of unmonitored books",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
bookMissingMetric: prometheus.NewDesc(
|
||||
"readarr_book_missing_total",
|
||||
"Total number of missing books",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
"readarr_collector_error",
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -3,33 +3,33 @@ package collector
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type rootFolderCollector struct {
|
||||
config *config.Config // App configuration
|
||||
rootFolderMetric *prometheus.Desc // Total number of root folders
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
rootFolderMetric *prometheus.Desc // Total number of root folders
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewRootFolderCollector(c *config.Config) *rootFolderCollector {
|
||||
func NewRootFolderCollector(c *config.ArrConfig) *rootFolderCollector {
|
||||
return &rootFolderCollector{
|
||||
config: c,
|
||||
rootFolderMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_rootfolder_freespace_bytes", c.Arr),
|
||||
fmt.Sprintf("%s_rootfolder_freespace_bytes", c.App),
|
||||
"Root folder space in bytes by path",
|
||||
[]string{"path"},
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_rootfolder_collector_error", c.Arr),
|
||||
fmt.Sprintf("%s_rootfolder_collector_error", c.App),
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -4,131 +4,131 @@ import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type sonarrCollector struct {
|
||||
config *config.Config // App configuration
|
||||
seriesMetric *prometheus.Desc // Total number of series
|
||||
seriesDownloadedMetric *prometheus.Desc // Total number of downloaded series
|
||||
seriesMonitoredMetric *prometheus.Desc // Total number of monitored series
|
||||
seriesUnmonitoredMetric *prometheus.Desc // Total number of unmonitored series
|
||||
seriesFileSizeMetric *prometheus.Desc // Total fizesize of all series in bytes
|
||||
seasonMetric *prometheus.Desc // Total number of seasons
|
||||
seasonDownloadedMetric *prometheus.Desc // Total number of downloaded seasons
|
||||
seasonMonitoredMetric *prometheus.Desc // Total number of monitored seasons
|
||||
seasonUnmonitoredMetric *prometheus.Desc // Total number of unmonitored seasons
|
||||
episodeMetric *prometheus.Desc // Total number of episodes
|
||||
episodeMonitoredMetric *prometheus.Desc // Total number of monitored episodes
|
||||
episodeUnmonitoredMetric *prometheus.Desc // Total number of unmonitored episodes
|
||||
episodeDownloadedMetric *prometheus.Desc // Total number of downloaded episodes
|
||||
episodeMissingMetric *prometheus.Desc // Total number of missing episodes
|
||||
episodeQualitiesMetric *prometheus.Desc // Total number of episodes by quality
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
seriesMetric *prometheus.Desc // Total number of series
|
||||
seriesDownloadedMetric *prometheus.Desc // Total number of downloaded series
|
||||
seriesMonitoredMetric *prometheus.Desc // Total number of monitored series
|
||||
seriesUnmonitoredMetric *prometheus.Desc // Total number of unmonitored series
|
||||
seriesFileSizeMetric *prometheus.Desc // Total fizesize of all series in bytes
|
||||
seasonMetric *prometheus.Desc // Total number of seasons
|
||||
seasonDownloadedMetric *prometheus.Desc // Total number of downloaded seasons
|
||||
seasonMonitoredMetric *prometheus.Desc // Total number of monitored seasons
|
||||
seasonUnmonitoredMetric *prometheus.Desc // Total number of unmonitored seasons
|
||||
episodeMetric *prometheus.Desc // Total number of episodes
|
||||
episodeMonitoredMetric *prometheus.Desc // Total number of monitored episodes
|
||||
episodeUnmonitoredMetric *prometheus.Desc // Total number of unmonitored episodes
|
||||
episodeDownloadedMetric *prometheus.Desc // Total number of downloaded episodes
|
||||
episodeMissingMetric *prometheus.Desc // Total number of missing episodes
|
||||
episodeQualitiesMetric *prometheus.Desc // Total number of episodes by quality
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewSonarrCollector(conf *config.Config) *sonarrCollector {
|
||||
func NewSonarrCollector(conf *config.ArrConfig) *sonarrCollector {
|
||||
return &sonarrCollector{
|
||||
config: conf,
|
||||
seriesMetric: prometheus.NewDesc(
|
||||
"sonarr_series_total",
|
||||
"Total number of series",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
seriesDownloadedMetric: prometheus.NewDesc(
|
||||
"sonarr_series_downloaded_total",
|
||||
"Total number of downloaded series",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
seriesMonitoredMetric: prometheus.NewDesc(
|
||||
"sonarr_series_monitored_total",
|
||||
"Total number of monitored series",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
seriesUnmonitoredMetric: prometheus.NewDesc(
|
||||
"sonarr_series_unmonitored_total",
|
||||
"Total number of unmonitored series",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
seriesFileSizeMetric: prometheus.NewDesc(
|
||||
"sonarr_series_filesize_bytes",
|
||||
"Total fizesize of all series in bytes",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
seasonMetric: prometheus.NewDesc(
|
||||
"sonarr_season_total",
|
||||
"Total number of seasons",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
seasonDownloadedMetric: prometheus.NewDesc(
|
||||
"sonarr_season_downloaded_total",
|
||||
"Total number of downloaded seasons",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
seasonMonitoredMetric: prometheus.NewDesc(
|
||||
"sonarr_season_monitored_total",
|
||||
"Total number of monitored seasons",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
seasonUnmonitoredMetric: prometheus.NewDesc(
|
||||
"sonarr_season_unmonitored_total",
|
||||
"Total number of unmonitored seasons",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
episodeMetric: prometheus.NewDesc(
|
||||
"sonarr_episode_total",
|
||||
"Total number of episodes",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
episodeMonitoredMetric: prometheus.NewDesc(
|
||||
"sonarr_episode_monitored_total",
|
||||
"Total number of monitored episodes",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
episodeUnmonitoredMetric: prometheus.NewDesc(
|
||||
"sonarr_episode_unmonitored_total",
|
||||
"Total number of unmonitored episodes",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
episodeDownloadedMetric: prometheus.NewDesc(
|
||||
"sonarr_episode_downloaded_total",
|
||||
"Total number of downloaded episodes",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
episodeMissingMetric: prometheus.NewDesc(
|
||||
"sonarr_episode_missing_total",
|
||||
"Total number of missing episodes",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
episodeQualitiesMetric: prometheus.NewDesc(
|
||||
"sonarr_episode_quality_total",
|
||||
"Total number of downloaded episodes by quality",
|
||||
[]string{"quality"},
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
"sonarr_collector_error",
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": conf.URLLabel()},
|
||||
prometheus.Labels{"url": conf.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -3,34 +3,34 @@ package collector
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/onedr0p/exportarr/internal/model"
|
||||
"github.com/onedr0p/exportarr/internal/arr/client"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
"github.com/onedr0p/exportarr/internal/arr/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type systemStatusCollector struct {
|
||||
config *config.Config // App configuration
|
||||
configFile *model.Config // *arr configuration from config.xml
|
||||
systemStatus *prometheus.Desc // Total number of system statuses
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
config *config.ArrConfig // App configuration
|
||||
configFile *model.Config // *arr configuration from config.xml
|
||||
systemStatus *prometheus.Desc // Total number of system statuses
|
||||
errorMetric *prometheus.Desc // Error Description for use with InvalidMetric
|
||||
}
|
||||
|
||||
func NewSystemStatusCollector(c *config.Config) *systemStatusCollector {
|
||||
func NewSystemStatusCollector(c *config.ArrConfig) *systemStatusCollector {
|
||||
return &systemStatusCollector{
|
||||
config: c,
|
||||
systemStatus: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_system_status", c.Arr),
|
||||
fmt.Sprintf("%s_system_status", c.App),
|
||||
"System Status",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
errorMetric: prometheus.NewDesc(
|
||||
fmt.Sprintf("%s_status_collector_error", c.Arr),
|
||||
fmt.Sprintf("%s_status_collector_error", c.App),
|
||||
"Error while collecting metrics",
|
||||
nil,
|
||||
prometheus.Labels{"url": c.URLLabel()},
|
||||
prometheus.Labels{"url": c.URL},
|
||||
),
|
||||
}
|
||||
}
|
||||
169
internal/arr/config/arr.go
Normal file
169
internal/arr/config/arr.go
Normal file
@ -0,0 +1,169 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/gookit/validate"
|
||||
"github.com/knadh/koanf/providers/confmap"
|
||||
"github.com/knadh/koanf/providers/env"
|
||||
"github.com/knadh/koanf/providers/file"
|
||||
"github.com/knadh/koanf/providers/posflag"
|
||||
"github.com/knadh/koanf/v2"
|
||||
flag "github.com/spf13/pflag"
|
||||
|
||||
base_config "github.com/onedr0p/exportarr/internal/config"
|
||||
)
|
||||
|
||||
func RegisterArrFlags(flags *flag.FlagSet) {
|
||||
flags.StringP("config", "c", "", "*arr config.xml file for parsing authentication information")
|
||||
flags.String("auth-username", "", "Username for basic or form auth")
|
||||
flags.String("auth-password", "", "Password for basic or form auth")
|
||||
flags.Bool("form-auth", false, "Use form based authentication")
|
||||
flags.Bool("enable-unknown-queue-items", false, "Enable unknown queue items")
|
||||
flags.Bool("enable-additional-metrics", false, "Enable additional metrics")
|
||||
|
||||
// Backwards Compatibility - normalize function will hide these from --help. remove in v2.0.0
|
||||
flags.String("basic-auth-username", "", "Username for basic or form auth")
|
||||
flags.String("basic-auth-password", "", "Password for basic or form auth")
|
||||
flags.SetNormalizeFunc(backwardsCompatibilityNormalizeFunc)
|
||||
}
|
||||
|
||||
type ArrConfig struct {
|
||||
App string `koanf:"arr"`
|
||||
ApiVersion string `koanf:"api-version" validate:"required|in:v1,v3"`
|
||||
XMLConfig string `koanf:"config"`
|
||||
AuthUsername string `koanf:"auth-username"`
|
||||
AuthPassword string `koanf:"auth-password"`
|
||||
FormAuth bool `koanf:"form-auth"`
|
||||
EnableUnknownQueueItems bool `koanf:"enable-unknown-queue-items"`
|
||||
EnableAdditionalMetrics bool `koanf:"enable-additional-metrics"`
|
||||
URL string `koanf:"url" validate:"required|url"` // stores rendered Arr URL (with api version)
|
||||
ApiKey string `koanf:"api-key" validate:"required|regex:(^[a-z0-9]{32}$)"` // stores the API key
|
||||
DisableSSLVerify bool `koanf:"disable-ssl-verify"` // stores the disable SSL verify flag
|
||||
Prowlarr ProwlarrConfig `koanf:"prowlarr"`
|
||||
k *koanf.Koanf
|
||||
}
|
||||
|
||||
func (c *ArrConfig) UseBasicAuth() bool {
|
||||
return !c.FormAuth && c.AuthUsername != "" && c.AuthPassword != ""
|
||||
}
|
||||
|
||||
func (c *ArrConfig) UseFormAuth() bool {
|
||||
return c.FormAuth
|
||||
}
|
||||
|
||||
func (c *ArrConfig) BaseURL() string {
|
||||
ret, _ := url.JoinPath(c.URL, "api", c.ApiVersion)
|
||||
return ret
|
||||
}
|
||||
|
||||
func LoadArrConfig(conf base_config.Config, flags *flag.FlagSet) (*ArrConfig, error) {
|
||||
k := koanf.New(".")
|
||||
|
||||
// Defaults
|
||||
err := k.Load(confmap.Provider(map[string]interface{}{
|
||||
"api-version": "v3",
|
||||
}, "."), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Environment
|
||||
err = k.Load(env.Provider("", ".", func(s string) string {
|
||||
s = strings.ToLower(s)
|
||||
s = strings.Replace(s, "__", ".", -1)
|
||||
s = strings.Replace(s, "_", "-", -1)
|
||||
return backwardsCompatibilityTransforms(s)
|
||||
}), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Flags
|
||||
if err := k.Load(posflag.Provider(flags, ".", k), nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// XMLConfig
|
||||
xmlConfig := k.String("config")
|
||||
if xmlConfig != "" {
|
||||
err := k.Load(file.Provider(xmlConfig), XMLParser(), koanf.WithMergeFunc(XMLParser().Merge(conf.URL)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
out := &ArrConfig{
|
||||
URL: conf.URL,
|
||||
ApiKey: conf.ApiKey,
|
||||
DisableSSLVerify: conf.DisableSSLVerify,
|
||||
k: k,
|
||||
}
|
||||
if err = k.Unmarshal("", out); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *ArrConfig) Validate() error {
|
||||
v := validate.Struct(c)
|
||||
if !v.Validate() {
|
||||
return v.Errors
|
||||
}
|
||||
|
||||
if c.AuthPassword != "" && c.AuthUsername == "" {
|
||||
return fmt.Errorf("auth-username is required when auth-password is set")
|
||||
}
|
||||
if c.AuthUsername != "" && c.AuthPassword == "" {
|
||||
return fmt.Errorf("auth-password is required when auth-username is set")
|
||||
}
|
||||
if c.FormAuth && (c.AuthUsername == "" || c.AuthPassword == "") {
|
||||
return fmt.Errorf("auth-username and auth-password are required when form-auth is set")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c ArrConfig) Messages() map[string]string {
|
||||
return validate.MS{
|
||||
"ApiKey.regex": "api-key must be a 32 character hex string",
|
||||
"LogLevel.ValidateLogLevel": "log-level must be one of: debug, info, warn, error, dpanic, panic, fatal",
|
||||
}
|
||||
}
|
||||
|
||||
func (c ArrConfig) Translates() map[string]string {
|
||||
return validate.MS{
|
||||
"ApiVersion": "api-version",
|
||||
"XMLConfig": "config",
|
||||
"AuthUsername": "auth-username",
|
||||
"AuthPassword": "auth-password",
|
||||
"FormAuth": "form-auth",
|
||||
"EnableUnknownQueueItems": "enable-unknown-queue-items",
|
||||
"EnableAdditionalMetrics": "enable-additional-metrics",
|
||||
}
|
||||
}
|
||||
|
||||
// Remove in v2.0.0
|
||||
func backwardsCompatibilityNormalizeFunc(f *flag.FlagSet, name string) flag.NormalizedName {
|
||||
if name == "basic-auth-username" {
|
||||
return flag.NormalizedName("auth-username")
|
||||
}
|
||||
if name == "basic-auth-password" {
|
||||
return flag.NormalizedName("auth-password")
|
||||
}
|
||||
return flag.NormalizedName(name)
|
||||
}
|
||||
|
||||
// Remove in v2.0.0
|
||||
func backwardsCompatibilityTransforms(s string) string {
|
||||
switch s {
|
||||
case "basic-auth-username":
|
||||
return "auth-username"
|
||||
case "basic-auth-password":
|
||||
return "auth-password"
|
||||
default:
|
||||
return s
|
||||
}
|
||||
}
|
||||
255
internal/arr/config/arr_test.go
Normal file
255
internal/arr/config/arr_test.go
Normal file
@ -0,0 +1,255 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
base_config "github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func testFlagSet() *pflag.FlagSet {
|
||||
ret := pflag.NewFlagSet("test", pflag.ContinueOnError)
|
||||
RegisterArrFlags(ret)
|
||||
return ret
|
||||
}
|
||||
|
||||
func TestUsecAuth(t *testing.T) {
|
||||
c := ArrConfig{
|
||||
AuthUsername: "user",
|
||||
AuthPassword: "pass",
|
||||
}
|
||||
require := require.New(t)
|
||||
require.True(c.UseBasicAuth())
|
||||
require.False(c.UseFormAuth())
|
||||
c.FormAuth = true
|
||||
require.True(c.UseFormAuth())
|
||||
require.False(c.UseBasicAuth())
|
||||
|
||||
}
|
||||
|
||||
func TestBaseURL(t *testing.T) {
|
||||
c := ArrConfig{
|
||||
URL: "http://localhost:8080",
|
||||
ApiVersion: "v1",
|
||||
}
|
||||
require := require.New(t)
|
||||
require.Equal("http://localhost:8080/api/v1", c.BaseURL())
|
||||
}
|
||||
|
||||
func TestLoadConfig_Defaults(t *testing.T) {
|
||||
flags := testFlagSet()
|
||||
c := base_config.Config{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
DisableSSLVerify: true,
|
||||
}
|
||||
|
||||
require := require.New(t)
|
||||
|
||||
config, err := LoadArrConfig(c, flags)
|
||||
require.NoError(err)
|
||||
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
|
||||
// base config values are not overwritten
|
||||
require.Equal("http://localhost", config.URL)
|
||||
require.Equal("abcdef0123456789abcdef0123456789", config.ApiKey)
|
||||
require.True(config.DisableSSLVerify)
|
||||
}
|
||||
|
||||
func TestLoadConfig_Environment(t *testing.T) {
|
||||
flags := testFlagSet()
|
||||
c := base_config.Config{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
DisableSSLVerify: true,
|
||||
}
|
||||
|
||||
require := require.New(t)
|
||||
t.Setenv("AUTH_USERNAME", "user")
|
||||
t.Setenv("AUTH_PASSWORD", "pass")
|
||||
t.Setenv("FORM_AUTH", "true")
|
||||
t.Setenv("ENABLE_UNKNOWN_QUEUE_ITEMS", "true")
|
||||
t.Setenv("ENABLE_ADDITIONAL_METRICS", "true")
|
||||
|
||||
config, err := LoadArrConfig(c, flags)
|
||||
require.NoError(err)
|
||||
|
||||
require.Equal("user", config.AuthUsername)
|
||||
require.Equal("pass", config.AuthPassword)
|
||||
require.True(config.FormAuth)
|
||||
require.True(config.EnableUnknownQueueItems)
|
||||
require.True(config.EnableAdditionalMetrics)
|
||||
|
||||
// defaults are not overwritten
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
|
||||
// base config values are not overwritten
|
||||
require.Equal("http://localhost", config.URL)
|
||||
require.Equal("abcdef0123456789abcdef0123456789", config.ApiKey)
|
||||
require.True(config.DisableSSLVerify)
|
||||
|
||||
}
|
||||
|
||||
func TestLoadConfig_PartialEnvironment(t *testing.T) {
|
||||
flags := testFlagSet()
|
||||
flags.Set("auth-username", "user")
|
||||
flags.Set("auth-password", "pass")
|
||||
|
||||
t.Setenv("ENABLE_UNKNOWN_QUEUE_ITEMS", "true")
|
||||
t.Setenv("ENABLE_ADDITIONAL_METRICS", "true")
|
||||
|
||||
c := base_config.Config{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
}
|
||||
|
||||
require := require.New(t)
|
||||
config, err := LoadArrConfig(c, flags)
|
||||
require.NoError(err)
|
||||
|
||||
require.Equal("user", config.AuthUsername)
|
||||
require.Equal("pass", config.AuthPassword)
|
||||
require.True(config.EnableUnknownQueueItems)
|
||||
require.True(config.EnableAdditionalMetrics)
|
||||
|
||||
require.Equal("http://localhost", config.URL)
|
||||
require.Equal("abcdef0123456789abcdef0123456789", config.ApiKey)
|
||||
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
|
||||
}
|
||||
|
||||
func TestLoadConfig_Flags(t *testing.T) {
|
||||
flags := testFlagSet()
|
||||
flags.Set("auth-username", "user")
|
||||
flags.Set("auth-password", "pass")
|
||||
flags.Set("form-auth", "true")
|
||||
flags.Set("enable-unknown-queue-items", "true")
|
||||
flags.Set("enable-additional-metrics", "true")
|
||||
c := base_config.Config{}
|
||||
|
||||
// should be overridden by flags
|
||||
t.Setenv("AUTH_USERNAME", "user2")
|
||||
|
||||
require := require.New(t)
|
||||
config, err := LoadArrConfig(c, flags)
|
||||
require.NoError(err)
|
||||
require.Equal("user", config.AuthUsername)
|
||||
require.Equal("pass", config.AuthPassword)
|
||||
require.True(config.FormAuth)
|
||||
require.True(config.EnableUnknownQueueItems)
|
||||
require.True(config.EnableAdditionalMetrics)
|
||||
|
||||
// defaults fall through
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
}
|
||||
|
||||
func TestLoadConfig_XMLConfig(t *testing.T) {
|
||||
flags := testFlagSet()
|
||||
flags.Set("config", "test_fixtures/config.test_xml")
|
||||
c := base_config.Config{
|
||||
URL: "http://localhost",
|
||||
}
|
||||
|
||||
config, err := LoadArrConfig(c, flags)
|
||||
|
||||
require := require.New(t)
|
||||
require.NoError(err)
|
||||
|
||||
// schema/host from config, port, and asdf from xml, api & version defaulted in LoadConfig.
|
||||
require.Equal("http://localhost:7878/asdf", config.URL)
|
||||
require.Equal("abcdef0123456789abcdef0123456789", config.ApiKey)
|
||||
}
|
||||
|
||||
func TestValidate(t *testing.T) {
|
||||
params := []struct {
|
||||
name string
|
||||
config *ArrConfig
|
||||
valid bool
|
||||
}{
|
||||
{
|
||||
name: "good-basic-auth",
|
||||
config: &ArrConfig{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
AuthUsername: "user",
|
||||
AuthPassword: "pass",
|
||||
},
|
||||
valid: true,
|
||||
},
|
||||
{
|
||||
name: "good-form-auth",
|
||||
config: &ArrConfig{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
AuthUsername: "user",
|
||||
AuthPassword: "pass",
|
||||
FormAuth: true,
|
||||
},
|
||||
valid: true,
|
||||
},
|
||||
{
|
||||
name: "bad-api-key",
|
||||
config: &ArrConfig{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef01234567",
|
||||
ApiVersion: "v3",
|
||||
},
|
||||
valid: false,
|
||||
},
|
||||
{
|
||||
name: "bad-api-version",
|
||||
config: &ArrConfig{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v2",
|
||||
},
|
||||
valid: false,
|
||||
},
|
||||
{
|
||||
name: "password-needs-username",
|
||||
config: &ArrConfig{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
AuthPassword: "password",
|
||||
},
|
||||
valid: false,
|
||||
},
|
||||
{
|
||||
name: "username-needs-password",
|
||||
config: &ArrConfig{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
AuthUsername: "username",
|
||||
},
|
||||
valid: false,
|
||||
},
|
||||
{
|
||||
name: "form-auth-needs-user-and-password",
|
||||
config: &ArrConfig{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
FormAuth: true,
|
||||
},
|
||||
valid: false,
|
||||
},
|
||||
}
|
||||
for _, p := range params {
|
||||
t.Run(p.name, func(t *testing.T) {
|
||||
require := require.New(t)
|
||||
err := p.config.Validate()
|
||||
if p.valid {
|
||||
require.NoError(err)
|
||||
} else {
|
||||
require.Error(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -16,6 +16,10 @@ type ProwlarrConfig struct {
|
||||
BackfillSinceTime time.Time
|
||||
}
|
||||
|
||||
func RegisterProwlarrFlags(flags *flag.FlagSet) {
|
||||
flags.Bool("backfill", false, "Backfill Prowlarr")
|
||||
flags.String("backfill-since-date", "", "Date from which to start Prowlarr Backfill")
|
||||
}
|
||||
func (p ProwlarrConfig) Validate() error {
|
||||
v := validate.Struct(p)
|
||||
if !v.Validate() {
|
||||
@ -40,7 +44,7 @@ func (p ProwlarrConfig) Translates() map[string]string {
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Config) LoadProwlarrFlags(flags *flag.FlagSet) error {
|
||||
func (c *ArrConfig) LoadProwlarrConfig(flags *flag.FlagSet) error {
|
||||
err := c.k.Load(posflag.Provider(flags, ".", c.k), nil, koanf.WithMergeFunc(func(src, dest map[string]interface{}) error {
|
||||
dest["prowlarr"] = src
|
||||
return nil
|
||||
@ -50,9 +54,9 @@ func (c *Config) LoadProwlarrFlags(flags *flag.FlagSet) error {
|
||||
}
|
||||
|
||||
err = c.k.Unmarshal("prowlarr", &c.Prowlarr)
|
||||
c.Prowlarr.BackfillSinceTime = c.k.Time("prowlarr.backfill-since-date", "2006-01-02")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.Prowlarr.BackfillSinceTime = c.k.Time("prowlarr.backfill-since-date", "2006-01-02")
|
||||
return nil
|
||||
}
|
||||
@ -4,9 +4,34 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/knadh/koanf/v2"
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestLoadProwlarrConfig(t *testing.T) {
|
||||
flags := pflag.FlagSet{}
|
||||
RegisterProwlarrFlags(&flags)
|
||||
|
||||
flags.Set("backfill", "true")
|
||||
flags.Set("backfill-since-date", "2021-01-01")
|
||||
c := ArrConfig{
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
DisableSSLVerify: true,
|
||||
k: koanf.New("."),
|
||||
}
|
||||
c.LoadProwlarrConfig(&flags)
|
||||
|
||||
require := require.New(t)
|
||||
require.True(c.Prowlarr.Backfill)
|
||||
require.Equal("2021-01-01", c.Prowlarr.BackfillSinceDate)
|
||||
require.Equal("2021-01-01", c.Prowlarr.BackfillSinceTime.Format("2006-01-02"))
|
||||
require.Equal("http://localhost", c.URL)
|
||||
require.Equal("abcdef0123456789abcdef0123456789", c.ApiKey)
|
||||
require.True(c.DisableSSLVerify)
|
||||
}
|
||||
|
||||
func TestValidateProwlarr(t *testing.T) {
|
||||
tm, _ := time.Parse("2006-01-02", "2021-01-01")
|
||||
parameters := []struct {
|
||||
@ -25,28 +25,34 @@ func (p *XML) Unmarshal(b []byte) (map[string]interface{}, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return map[string]interface{}{
|
||||
ret := map[string]interface{}{
|
||||
"api-key": config.ApiKey,
|
||||
"url-base": config.UrlBase,
|
||||
"target-port": config.Port,
|
||||
}, nil
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (p *XML) Marshal(o map[string]interface{}) ([]byte, error) {
|
||||
return nil, errors.New("not implemented")
|
||||
}
|
||||
|
||||
func (p *XML) Merge(src, dest map[string]interface{}) error {
|
||||
dest["api-key"] = src["api-key"]
|
||||
func (p *XML) Merge(baseURL string) func(src, dest map[string]interface{}) error {
|
||||
return func(src, dest map[string]interface{}) error {
|
||||
|
||||
u, err := url.Parse(dest["url"].(string))
|
||||
if err != nil {
|
||||
return err
|
||||
if src["api-key"] != nil && src["api-key"].(string) != "" {
|
||||
dest["api-key"] = src["api-key"]
|
||||
}
|
||||
|
||||
u, err := url.Parse(baseURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Add or replace target port
|
||||
u.Host = u.Hostname() + ":" + src["target-port"].(string)
|
||||
u = u.JoinPath(src["url-base"].(string))
|
||||
dest["url"] = u.String()
|
||||
return nil
|
||||
}
|
||||
|
||||
// Add or replace target port
|
||||
u.Host = u.Hostname() + ":" + src["target-port"].(string)
|
||||
u = u.JoinPath(src["url-base"].(string))
|
||||
dest["url"] = u.String()
|
||||
return nil
|
||||
}
|
||||
@ -9,7 +9,6 @@ import (
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@ -20,37 +19,11 @@ type Client struct {
|
||||
}
|
||||
|
||||
// NewClient method initializes a new *Arr client.
|
||||
func NewClient(config *config.Config) (*Client, error) {
|
||||
func NewClient(baseURL string, insecureSkipVerify bool, auth Authenticator) (*Client, error) {
|
||||
|
||||
baseURL, err := url.Parse(config.URL)
|
||||
u, err := url.Parse(baseURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to parse URL(%s): %w", config.URL, err)
|
||||
}
|
||||
|
||||
baseTransport := http.DefaultTransport
|
||||
if config.DisableSSLVerify {
|
||||
baseTransport.(*http.Transport).TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
|
||||
var auth Authenticator
|
||||
if config.UseFormAuth() {
|
||||
auth = &FormAuth{
|
||||
Username: config.AuthUsername,
|
||||
Password: config.AuthPassword,
|
||||
ApiKey: config.ApiKey,
|
||||
AuthBaseURL: baseURL,
|
||||
Transport: baseTransport,
|
||||
}
|
||||
} else if config.UseBasicAuth() {
|
||||
auth = &BasicAuth{
|
||||
Username: config.AuthUsername,
|
||||
Password: config.AuthPassword,
|
||||
ApiKey: config.ApiKey,
|
||||
}
|
||||
} else {
|
||||
auth = &ApiKeyAuth{
|
||||
ApiKey: config.ApiKey,
|
||||
}
|
||||
return nil, fmt.Errorf("Failed to parse URL(%s): %w", baseURL, err)
|
||||
}
|
||||
|
||||
return &Client{
|
||||
@ -58,9 +31,9 @@ func NewClient(config *config.Config) (*Client, error) {
|
||||
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||
return http.ErrUseLastResponse
|
||||
},
|
||||
Transport: NewArrTransport(auth, baseTransport),
|
||||
Transport: NewExportarrTransport(BaseTransport(insecureSkipVerify), auth),
|
||||
},
|
||||
URL: *baseURL.JoinPath("api", config.ApiVersion),
|
||||
URL: *u,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@ -112,3 +85,11 @@ func (c *Client) DoRequest(endpoint string, target interface{}, queryParams ...m
|
||||
defer resp.Body.Close()
|
||||
return c.unmarshalBody(resp.Body, target)
|
||||
}
|
||||
|
||||
func BaseTransport(insecureSkipVerify bool) http.RoundTripper {
|
||||
baseTransport := http.DefaultTransport
|
||||
if insecureSkipVerify {
|
||||
baseTransport.(*http.Transport).TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
return baseTransport
|
||||
}
|
||||
|
||||
@ -8,24 +8,17 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
)
|
||||
|
||||
func TestNewClient(t *testing.T) {
|
||||
require := require.New(t)
|
||||
c := &config.Config{
|
||||
URL: "http://localhost:7878",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
}
|
||||
u := "http://localhost"
|
||||
|
||||
client, err := NewClient(c)
|
||||
_, ok := client.httpClient.Transport.(*ArrTransport).auth.(*ApiKeyAuth)
|
||||
require.True(ok, "NewClient should return a client with an ApiKeyAuth authenticator")
|
||||
require.Nil(err, "NewClient should not return an error")
|
||||
require.NotNil(client, "NewClient should return a client")
|
||||
require.Equal(client.URL.String(), "http://localhost:7878/api/v3", "NewClient should return a client with the correct URL")
|
||||
require := require.New(t)
|
||||
c, err := NewClient(u, true, nil)
|
||||
require.NoError(err, "NewClient should not return an error")
|
||||
require.NotNil(c, "NewClient should return a client")
|
||||
require.Equal(u, c.URL.String(), "NewClient should set the correct URL")
|
||||
require.True(c.httpClient.Transport.(*ExportarrTransport).inner.(*http.Transport).TLSClientConfig.InsecureSkipVerify)
|
||||
}
|
||||
|
||||
// Need tests for FormAuth & BasicAuth
|
||||
@ -40,7 +33,7 @@ func TestDoRequest(t *testing.T) {
|
||||
{
|
||||
name: "noParams",
|
||||
endpoint: "queue",
|
||||
expectedURL: "/api/v3/queue",
|
||||
expectedURL: "/queue",
|
||||
},
|
||||
{
|
||||
name: "params",
|
||||
@ -49,7 +42,7 @@ func TestDoRequest(t *testing.T) {
|
||||
"page": "1",
|
||||
"testParam": "asdf",
|
||||
},
|
||||
expectedURL: "/api/v3/test?page=1&testParam=asdf",
|
||||
expectedURL: "/test?page=1&testParam=asdf",
|
||||
},
|
||||
}
|
||||
for _, param := range parameters {
|
||||
@ -61,17 +54,15 @@ func TestDoRequest(t *testing.T) {
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
c := &config.Config{
|
||||
URL: ts.URL,
|
||||
ApiVersion: "v3",
|
||||
}
|
||||
|
||||
target := struct {
|
||||
Test string `json:"test"`
|
||||
}{}
|
||||
expected := target
|
||||
expected.Test = "asdf2"
|
||||
client, err := NewClient(c)
|
||||
client, err := NewClient(ts.URL, false, nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
require.Nil(err, "NewClient should not return an error")
|
||||
require.NotNil(client, "NewClient should return a client")
|
||||
err = client.DoRequest(param.endpoint, &target, param.queryParams)
|
||||
@ -99,12 +90,7 @@ func TestDoRequest_PanicRecovery(t *testing.T) {
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
c := &config.Config{
|
||||
URL: ts.URL,
|
||||
ApiVersion: "v3",
|
||||
}
|
||||
|
||||
client, err := NewClient(c)
|
||||
client, err := NewClient(ts.URL, false, nil)
|
||||
require.Nil(err, "NewClient should not return an error")
|
||||
require.NotNil(client, "NewClient should return a client")
|
||||
|
||||
|
||||
@ -3,9 +3,6 @@ package client
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Authenticator interface {
|
||||
@ -13,22 +10,24 @@ type Authenticator interface {
|
||||
}
|
||||
|
||||
// ArrTransport is a http.RoundTripper that adds authentication to requests
|
||||
type ArrTransport struct {
|
||||
type ExportarrTransport struct {
|
||||
inner http.RoundTripper
|
||||
auth Authenticator
|
||||
}
|
||||
|
||||
func NewArrTransport(auth Authenticator, inner http.RoundTripper) *ArrTransport {
|
||||
return &ArrTransport{
|
||||
func NewExportarrTransport(inner http.RoundTripper, auth Authenticator) *ExportarrTransport {
|
||||
return &ExportarrTransport{
|
||||
inner: inner,
|
||||
auth: auth,
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ArrTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
err := t.auth.Auth(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error authenticating request: %w", err)
|
||||
func (t *ExportarrTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if t.auth != nil {
|
||||
err := t.auth.Auth(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error authenticating request: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := t.inner.RoundTrip(req)
|
||||
@ -58,84 +57,3 @@ func (t *ArrTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
type ApiKeyAuth struct {
|
||||
ApiKey string
|
||||
}
|
||||
|
||||
func (a *ApiKeyAuth) Auth(req *http.Request) error {
|
||||
req.Header.Add("X-Api-Key", a.ApiKey)
|
||||
return nil
|
||||
}
|
||||
|
||||
type BasicAuth struct {
|
||||
Username string
|
||||
Password string
|
||||
ApiKey string
|
||||
}
|
||||
|
||||
func (a *BasicAuth) Auth(req *http.Request) error {
|
||||
req.SetBasicAuth(a.Username, a.Password)
|
||||
req.Header.Add("X-Api-Key", a.ApiKey)
|
||||
return nil
|
||||
}
|
||||
|
||||
type FormAuth struct {
|
||||
Username string
|
||||
Password string
|
||||
ApiKey string
|
||||
AuthBaseURL *url.URL
|
||||
Transport http.RoundTripper
|
||||
cookie *http.Cookie
|
||||
}
|
||||
|
||||
func (a *FormAuth) Auth(req *http.Request) error {
|
||||
if a.cookie == nil || a.cookie.Expires.Before(time.Now().Add(-5*time.Minute)) {
|
||||
form := url.Values{
|
||||
"username": {a.Username},
|
||||
"password": {a.Password},
|
||||
"rememberMe": {"on"},
|
||||
}
|
||||
|
||||
u := a.AuthBaseURL.JoinPath("login")
|
||||
u.Query().Add("ReturnUrl", "/general/settings")
|
||||
|
||||
authReq, err := http.NewRequest("POST", u.String(), strings.NewReader(form.Encode()))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: %w", err)
|
||||
}
|
||||
|
||||
authReq.Header.Add("Content-Type", "application/x-www-form-urlencoded")
|
||||
authReq.Header.Add("Content-Length", fmt.Sprintf("%d", len(form.Encode())))
|
||||
|
||||
client := &http.Client{Transport: a.Transport, CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||
if req.URL.Query().Get("loginFailed") == "true" {
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: Login Failed")
|
||||
}
|
||||
return http.ErrUseLastResponse
|
||||
}}
|
||||
|
||||
authResp, err := client.Do(authReq)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: %w", err)
|
||||
}
|
||||
|
||||
if authResp.StatusCode != 302 {
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: Received Status Code %d", authResp.StatusCode)
|
||||
}
|
||||
|
||||
for _, cookie := range authResp.Cookies() {
|
||||
if strings.HasSuffix(cookie.Name, "arrAuth") {
|
||||
copy := *cookie
|
||||
a.cookie = ©
|
||||
break
|
||||
}
|
||||
return fmt.Errorf("Failed to renew FormAuth Cookie: No Cookie with suffix 'arrAuth' found")
|
||||
}
|
||||
}
|
||||
|
||||
req.AddCookie(a.cookie)
|
||||
req.Header.Add("X-Api-Key", a.ApiKey)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -1,235 +1 @@
|
||||
package client
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var (
|
||||
TEST_USER = "testuser1"
|
||||
TEST_PASS = "hunter2"
|
||||
TEST_KEY = "abcdef1234567890abcdef1234567890"
|
||||
)
|
||||
|
||||
type testRoundTripFunc func(req *http.Request) (*http.Response, error)
|
||||
|
||||
func (t testRoundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
return t(req)
|
||||
}
|
||||
|
||||
func TestRoundTrip_Auth(t *testing.T) {
|
||||
require := require.New(t)
|
||||
parameters := []struct {
|
||||
name string
|
||||
auth Authenticator
|
||||
testFunc func(req *http.Request) (*http.Response, error)
|
||||
}{
|
||||
{
|
||||
name: "BasicAuth",
|
||||
auth: &BasicAuth{
|
||||
Username: TEST_USER,
|
||||
Password: TEST_PASS,
|
||||
ApiKey: TEST_KEY,
|
||||
},
|
||||
testFunc: func(req *http.Request) (*http.Response, error) {
|
||||
require.NotNil(req, "Request should not be nil")
|
||||
require.NotNil(req.Header, "Request header should not be nil")
|
||||
require.NotEmpty(req.Header.Get("Authorization"), "Authorization header should be set")
|
||||
require.Equal(
|
||||
"Basic "+base64.StdEncoding.EncodeToString([]byte(TEST_USER+":"+TEST_PASS)),
|
||||
req.Header.Get("Authorization"),
|
||||
"Authorization Header set to wrong value",
|
||||
)
|
||||
require.NotEmpty(req.Header.Get("X-Api-Key"), "X-Api-Key header should be set")
|
||||
require.Equal(TEST_KEY, req.Header.Get("X-Api-Key"), "X-Api-Key Header set to wrong value")
|
||||
return &http.Response{
|
||||
StatusCode: 200,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ApiKey",
|
||||
auth: &ApiKeyAuth{
|
||||
ApiKey: TEST_KEY,
|
||||
},
|
||||
testFunc: func(req *http.Request) (*http.Response, error) {
|
||||
require.NotNil(req, "Request should not be nil")
|
||||
require.NotNil(req.Header, "Request header should not be nil")
|
||||
require.Empty(req.Header.Get("Authorization"), "Authorization header should be empty")
|
||||
require.NotEmpty(req.Header.Get("X-Api-Key"), "X-Api-Key header should be set")
|
||||
require.Equal(TEST_KEY, req.Header.Get("X-Api-Key"), "X-Api-Key Header set to wrong value")
|
||||
return &http.Response{
|
||||
StatusCode: 200,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, param := range parameters {
|
||||
t.Run(param.name, func(t *testing.T) {
|
||||
transport := NewArrTransport(param.auth, testRoundTripFunc(param.testFunc))
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.NoError(err, "Error creating request: %s", err)
|
||||
_, err = client.Do(req)
|
||||
require.NoError(err, "Error sending request: %s", err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundTrip_FormAuth(t *testing.T) {
|
||||
require := require.New(t)
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
require.NotNil(r, "Request should not be nil")
|
||||
require.NotNil(r.Header, "Request header should not be nil")
|
||||
require.Empty(r.Header.Get("Authorization"), "Authorization header should be empty")
|
||||
require.Equal("POST", r.Method, "Request method should be POST")
|
||||
require.Equal("/login", r.URL.Path, "Request URL should be /login")
|
||||
require.Equal("application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Content-Type should be application/x-www-form-urlencoded")
|
||||
require.Equal(TEST_USER, r.FormValue("username"), "Username should be %s", TEST_USER)
|
||||
require.Equal(TEST_PASS, r.FormValue("password"), "Password should be %s", TEST_PASS)
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: "RadarrAuth",
|
||||
Value: "abcdef1234567890abcdef1234567890",
|
||||
Expires: time.Now().Add(24 * time.Hour),
|
||||
})
|
||||
w.WriteHeader(http.StatusFound)
|
||||
w.Write([]byte("OK"))
|
||||
}))
|
||||
defer ts.Close()
|
||||
tsUrl, _ := url.Parse(ts.URL)
|
||||
auth := &FormAuth{
|
||||
Username: TEST_USER,
|
||||
Password: TEST_PASS,
|
||||
ApiKey: TEST_KEY,
|
||||
AuthBaseURL: tsUrl,
|
||||
Transport: http.DefaultTransport,
|
||||
}
|
||||
transport := NewArrTransport(auth, testRoundTripFunc(func(req *http.Request) (*http.Response, error) {
|
||||
require.NotNil(req, "Request should not be nil")
|
||||
require.NotNil(req.Header, "Request header should not be nil")
|
||||
cookie, err := req.Cookie("RadarrAuth")
|
||||
require.NoError(err, "Cookie should be set")
|
||||
require.Equal(cookie.Value, "abcdef1234567890abcdef1234567890", "Cookie should be set")
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
}))
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.NoError(err, "Error creating request: %s", err)
|
||||
_, err = client.Do(req)
|
||||
require.NoError(err, "Error sending request: %s", err)
|
||||
}
|
||||
|
||||
func TestRoundTrip_FormAuthFailure(t *testing.T) {
|
||||
require := require.New(t)
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, "/?loginFailed=true", http.StatusFound)
|
||||
}))
|
||||
u, _ := url.Parse(ts.URL)
|
||||
auth := &FormAuth{
|
||||
Username: TEST_USER,
|
||||
Password: TEST_PASS,
|
||||
ApiKey: TEST_KEY,
|
||||
AuthBaseURL: u,
|
||||
Transport: http.DefaultTransport,
|
||||
}
|
||||
transport := NewArrTransport(auth, testRoundTripFunc(func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
}))
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.NoError(err, "Error creating request: %s", err)
|
||||
require.NotPanics(func() {
|
||||
_, err = client.Do(req)
|
||||
}, "Form Auth should not panic on auth failure")
|
||||
require.Error(err, "Form Auth Transport should throw an error when auth fails")
|
||||
}
|
||||
|
||||
func TestRoundTrip_Retries(t *testing.T) {
|
||||
parameters := []struct {
|
||||
name string
|
||||
testFunc func(req *http.Request) (*http.Response, error)
|
||||
}{
|
||||
{
|
||||
name: "500",
|
||||
testFunc: func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: 500,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Err",
|
||||
testFunc: func(req *http.Request) (*http.Response, error) {
|
||||
return nil, &http.ProtocolError{}
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, param := range parameters {
|
||||
t.Run(param.name, func(t *testing.T) {
|
||||
require := require.New(t)
|
||||
auth := &ApiKeyAuth{
|
||||
ApiKey: TEST_KEY,
|
||||
}
|
||||
attempts := 0
|
||||
transport := NewArrTransport(auth, testRoundTripFunc(func(req *http.Request) (*http.Response, error) {
|
||||
attempts++
|
||||
return param.testFunc(req)
|
||||
}))
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.NoError(err, "Error creating request: %s", err)
|
||||
_, err = client.Do(req)
|
||||
require.Error(err, "Error should be returned from Do()")
|
||||
require.Equal(3, attempts, "Should retry 3 times")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundTrip_StatusCodes(t *testing.T) {
|
||||
parameters := []int{200, 201, 202, 204, 301, 302, 400, 401, 403, 404, 500, 503}
|
||||
for _, param := range parameters {
|
||||
t.Run(fmt.Sprintf("%d", param), func(t *testing.T) {
|
||||
require := require.New(t)
|
||||
auth := &ApiKeyAuth{
|
||||
ApiKey: TEST_KEY,
|
||||
}
|
||||
transport := NewArrTransport(auth, testRoundTripFunc(func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: param,
|
||||
Body: nil,
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
}))
|
||||
client := &http.Client{Transport: transport}
|
||||
req, err := http.NewRequest("GET", "http://example.com", nil)
|
||||
require.Nil(err, "Error creating request: %s", err)
|
||||
_, err = client.Do(req)
|
||||
if param >= 200 && param < 300 {
|
||||
require.NoError(err, "Should Not error on 2XX: %s", err)
|
||||
} else {
|
||||
require.Error(err, "Should error on non-2XX")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
179
internal/commands/arr.go
Normal file
179
internal/commands/arr.go
Normal file
@ -0,0 +1,179 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/arr/collector"
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
)
|
||||
|
||||
func init() {
|
||||
config.RegisterArrFlags(radarrCmd.PersistentFlags())
|
||||
config.RegisterArrFlags(sonarrCmd.PersistentFlags())
|
||||
config.RegisterArrFlags(lidarrCmd.PersistentFlags())
|
||||
config.RegisterArrFlags(readarrCmd.PersistentFlags())
|
||||
config.RegisterArrFlags(prowlarrCmd.PersistentFlags())
|
||||
config.RegisterProwlarrFlags(prowlarrCmd.PersistentFlags())
|
||||
|
||||
rootCmd.AddCommand(
|
||||
radarrCmd,
|
||||
sonarrCmd,
|
||||
lidarrCmd,
|
||||
readarrCmd,
|
||||
prowlarrCmd,
|
||||
)
|
||||
}
|
||||
|
||||
func UsageOnError(cmd *cobra.Command, err error) {
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
cmd.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
var radarrCmd = &cobra.Command{
|
||||
Use: "radarr",
|
||||
Aliases: []string{"r"},
|
||||
Short: "Prometheus Exporter for Radarr",
|
||||
Long: "Prometheus Exporter for Radarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
c, err := config.LoadArrConfig(*conf, cmd.PersistentFlags())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.App = "radarr"
|
||||
c.ApiVersion = "v3"
|
||||
UsageOnError(cmd, c.Validate())
|
||||
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
collector.NewRadarrCollector(c),
|
||||
collector.NewQueueCollector(c),
|
||||
collector.NewHistoryCollector(c),
|
||||
collector.NewRootFolderCollector(c),
|
||||
collector.NewSystemStatusCollector(c),
|
||||
collector.NewSystemHealthCollector(c),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var sonarrCmd = &cobra.Command{
|
||||
Use: "sonarr",
|
||||
Aliases: []string{"s"},
|
||||
Short: "Prometheus Exporter for Sonarr",
|
||||
Long: "Prometheus Exporter for Sonarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
c, err := config.LoadArrConfig(*conf, cmd.PersistentFlags())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.App = "sonarr"
|
||||
c.ApiVersion = "v3"
|
||||
UsageOnError(cmd, c.Validate())
|
||||
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
collector.NewSonarrCollector(c),
|
||||
collector.NewQueueCollector(c),
|
||||
collector.NewHistoryCollector(c),
|
||||
collector.NewRootFolderCollector(c),
|
||||
collector.NewSystemStatusCollector(c),
|
||||
collector.NewSystemHealthCollector(c),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var lidarrCmd = &cobra.Command{
|
||||
Use: "lidarr",
|
||||
Short: "Prometheus Exporter for Lidarr",
|
||||
Long: "Prometheus Exporter for Lidarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
c, err := config.LoadArrConfig(*conf, cmd.PersistentFlags())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.App = "lidarr"
|
||||
c.ApiVersion = "v1"
|
||||
UsageOnError(cmd, c.Validate())
|
||||
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
collector.NewLidarrCollector(c),
|
||||
collector.NewQueueCollector(c),
|
||||
collector.NewHistoryCollector(c),
|
||||
collector.NewRootFolderCollector(c),
|
||||
collector.NewSystemStatusCollector(c),
|
||||
collector.NewSystemHealthCollector(c),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var readarrCmd = &cobra.Command{
|
||||
Use: "readarr",
|
||||
Aliases: []string{"b"},
|
||||
Short: "Prometheus Exporter for Readarr",
|
||||
Long: "Prometheus Exporter for Readarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
c, err := config.LoadArrConfig(*conf, cmd.PersistentFlags())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.App = "readarr"
|
||||
c.ApiVersion = "v1"
|
||||
UsageOnError(cmd, c.Validate())
|
||||
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
collector.NewReadarrCollector(c),
|
||||
collector.NewQueueCollector(c),
|
||||
collector.NewHistoryCollector(c),
|
||||
collector.NewRootFolderCollector(c),
|
||||
collector.NewSystemStatusCollector(c),
|
||||
collector.NewSystemHealthCollector(c),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var prowlarrCmd = &cobra.Command{
|
||||
Use: "prowlarr",
|
||||
Aliases: []string{"p"},
|
||||
Short: "Prometheus Exporter for Prowlarr",
|
||||
Long: "Prometheus Exporter for Prowlarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
c, err := config.LoadArrConfig(*conf, cmd.PersistentFlags())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.App = "prowlarr"
|
||||
c.ApiVersion = "v1"
|
||||
c.LoadProwlarrConfig(cmd.PersistentFlags())
|
||||
if err := c.Prowlarr.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
UsageOnError(cmd, c.Validate())
|
||||
UsageOnError(cmd, c.Prowlarr.Validate())
|
||||
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
collector.NewProwlarrCollector(c),
|
||||
collector.NewHistoryCollector(c),
|
||||
collector.NewSystemStatusCollector(c),
|
||||
collector.NewSystemHealthCollector(c),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
51
internal/commands/arr_test.go
Normal file
51
internal/commands/arr_test.go
Normal file
@ -0,0 +1,51 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/arr/config"
|
||||
base_config "github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestBackwardsCompatibility(t *testing.T) {
|
||||
params := []struct {
|
||||
name string
|
||||
flags *pflag.FlagSet
|
||||
}{
|
||||
{
|
||||
name: "radarr",
|
||||
flags: radarrCmd.PersistentFlags(),
|
||||
},
|
||||
{
|
||||
name: "sonarr",
|
||||
flags: sonarrCmd.PersistentFlags(),
|
||||
},
|
||||
{
|
||||
name: "lidarr",
|
||||
flags: lidarrCmd.PersistentFlags(),
|
||||
},
|
||||
{
|
||||
name: "readarr",
|
||||
flags: readarrCmd.PersistentFlags(),
|
||||
},
|
||||
{
|
||||
name: "prowlarr",
|
||||
flags: prowlarrCmd.PersistentFlags(),
|
||||
},
|
||||
}
|
||||
for _, p := range params {
|
||||
t.Run(p.name, func(t *testing.T) {
|
||||
p.flags.Set("basic-auth-username", "user")
|
||||
p.flags.Set("basic-auth-password", "pass")
|
||||
|
||||
require := require.New(t)
|
||||
config, err := config.LoadArrConfig(base_config.Config{}, p.flags)
|
||||
require.NoError(err)
|
||||
require.Equal("user", config.AuthUsername)
|
||||
require.Equal("pass", config.AuthPassword)
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,34 +0,0 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
lidarrCollector "github.com/onedr0p/exportarr/internal/collector/lidarr"
|
||||
sharedCollector "github.com/onedr0p/exportarr/internal/collector/shared"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(lidarrCmd)
|
||||
}
|
||||
|
||||
var lidarrCmd = &cobra.Command{
|
||||
Use: "lidarr",
|
||||
Short: "Prometheus Exporter for Lidarr",
|
||||
Long: "Prometheus Exporter for Lidarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
conf.Arr = "lidarr"
|
||||
conf.ApiVersion = "v1"
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
lidarrCollector.NewLidarrCollector(conf),
|
||||
sharedCollector.NewQueueCollector(conf),
|
||||
sharedCollector.NewHistoryCollector(conf),
|
||||
sharedCollector.NewRootFolderCollector(conf),
|
||||
sharedCollector.NewSystemStatusCollector(conf),
|
||||
sharedCollector.NewSystemHealthCollector(conf),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
prowlarrCollector "github.com/onedr0p/exportarr/internal/collector/prowlarr"
|
||||
sharedCollector "github.com/onedr0p/exportarr/internal/collector/shared"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(prowlarrCmd)
|
||||
|
||||
prowlarrCmd.PersistentFlags().Bool("backfill", false, "Backfill Prowlarr")
|
||||
prowlarrCmd.PersistentFlags().String("backfill-since-date", "", "Date from which to start Prowlarr Backfill")
|
||||
}
|
||||
|
||||
var prowlarrCmd = &cobra.Command{
|
||||
Use: "prowlarr",
|
||||
Aliases: []string{"p"},
|
||||
Short: "Prometheus Exporter for Prowlarr",
|
||||
Long: "Prometheus Exporter for Prowlarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
conf.Arr = "prowlarr"
|
||||
conf.ApiVersion = "v1"
|
||||
conf.LoadProwlarrFlags(cmd.PersistentFlags())
|
||||
if err := conf.Prowlarr.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
prowlarrCollector.NewProwlarrCollector(conf),
|
||||
sharedCollector.NewHistoryCollector(conf),
|
||||
sharedCollector.NewSystemStatusCollector(conf),
|
||||
sharedCollector.NewSystemHealthCollector(conf),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
@ -1,34 +0,0 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
radarrCollector "github.com/onedr0p/exportarr/internal/collector/radarr"
|
||||
sharedCollector "github.com/onedr0p/exportarr/internal/collector/shared"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(radarrCmd)
|
||||
}
|
||||
|
||||
var radarrCmd = &cobra.Command{
|
||||
Use: "radarr",
|
||||
Aliases: []string{"r"},
|
||||
Short: "Prometheus Exporter for Radarr",
|
||||
Long: "Prometheus Exporter for Radarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
conf.Arr = "radarr"
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
radarrCollector.NewRadarrCollector(conf),
|
||||
sharedCollector.NewQueueCollector(conf),
|
||||
sharedCollector.NewHistoryCollector(conf),
|
||||
sharedCollector.NewRootFolderCollector(conf),
|
||||
sharedCollector.NewSystemStatusCollector(conf),
|
||||
sharedCollector.NewSystemHealthCollector(conf),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
@ -1,35 +0,0 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
readarrCollector "github.com/onedr0p/exportarr/internal/collector/readarr"
|
||||
sharedCollector "github.com/onedr0p/exportarr/internal/collector/shared"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(readarrCmd)
|
||||
}
|
||||
|
||||
var readarrCmd = &cobra.Command{
|
||||
Use: "readarr",
|
||||
Aliases: []string{"b"},
|
||||
Short: "Prometheus Exporter for Readarr",
|
||||
Long: "Prometheus Exporter for Readarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
conf.Arr = "readarr"
|
||||
conf.ApiVersion = "v1"
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
readarrCollector.NewReadarrCollector(conf),
|
||||
sharedCollector.NewQueueCollector(conf),
|
||||
sharedCollector.NewHistoryCollector(conf),
|
||||
sharedCollector.NewRootFolderCollector(conf),
|
||||
sharedCollector.NewSystemStatusCollector(conf),
|
||||
sharedCollector.NewSystemHealthCollector(conf),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
@ -12,7 +12,6 @@ import (
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
"github.com/spf13/cobra"
|
||||
flag "github.com/spf13/pflag"
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
|
||||
@ -42,25 +41,7 @@ func init() {
|
||||
cobra.OnInitialize(initConfig, initLogger)
|
||||
cobra.OnFinalize(finalizeLogger)
|
||||
|
||||
rootCmd.PersistentFlags().StringP("log-level", "l", "info", "Log level (debug, info, warn, error, fatal, panic)")
|
||||
rootCmd.PersistentFlags().String("log-format", "console", "Log format (console, json)")
|
||||
rootCmd.PersistentFlags().StringP("config", "c", "", "*arr config.xml file for parsing authentication information")
|
||||
rootCmd.PersistentFlags().StringP("url", "u", "", "URL to *arr instance")
|
||||
rootCmd.PersistentFlags().StringP("api-key", "a", "", "API Key for *arr instance")
|
||||
rootCmd.PersistentFlags().String("api-key-file", "", "File containing API Key for *arr instance")
|
||||
rootCmd.PersistentFlags().IntP("port", "p", 0, "Port to listen on")
|
||||
rootCmd.PersistentFlags().StringP("interface", "i", "", "IP address to listen on")
|
||||
rootCmd.PersistentFlags().Bool("disable-ssl-verify", false, "Disable SSL verification")
|
||||
rootCmd.PersistentFlags().String("auth-username", "", "Username for basic or form auth")
|
||||
rootCmd.PersistentFlags().String("auth-password", "", "Password for basic or form auth")
|
||||
rootCmd.PersistentFlags().Bool("form-auth", false, "Use form based authentication")
|
||||
rootCmd.PersistentFlags().Bool("enable-unknown-queue-items", false, "Enable unknown queue items")
|
||||
rootCmd.PersistentFlags().Bool("enable-additional-metrics", false, "Enable additional metrics")
|
||||
|
||||
// Backwards Compatibility - normalize function will hide these from --help. remove in v2.0.0
|
||||
rootCmd.PersistentFlags().String("basic-auth-username", "", "Username for basic or form auth")
|
||||
rootCmd.PersistentFlags().String("basic-auth-password", "", "Password for basic or form auth")
|
||||
rootCmd.PersistentFlags().SetNormalizeFunc(backwardsCompatibilityNormalizeFunc)
|
||||
config.RegisterConfigFlags(rootCmd.PersistentFlags())
|
||||
}
|
||||
|
||||
func initConfig() {
|
||||
@ -162,13 +143,3 @@ func logRequest(handler http.Handler) http.Handler {
|
||||
handler.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
func backwardsCompatibilityNormalizeFunc(f *flag.FlagSet, name string) flag.NormalizedName {
|
||||
if name == "basic-auth-username" {
|
||||
return flag.NormalizedName("auth-username")
|
||||
}
|
||||
if name == "basic-auth-password" {
|
||||
return flag.NormalizedName("auth-password")
|
||||
}
|
||||
return flag.NormalizedName(name)
|
||||
}
|
||||
|
||||
@ -1,22 +0,0 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/config"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestBackwardsCompatibility(t *testing.T) {
|
||||
flags := rootCmd.PersistentFlags()
|
||||
flags.Set("url", "http://localhost")
|
||||
flags.Set("api-key", "abcdef0123456789abcdef0123456789")
|
||||
flags.Set("basic-auth-username", "user")
|
||||
flags.Set("basic-auth-password", "pass")
|
||||
|
||||
require := require.New(t)
|
||||
config, err := config.LoadConfig(flags)
|
||||
require.NoError(err)
|
||||
require.Equal("user", config.AuthUsername)
|
||||
require.Equal("pass", config.AuthPassword)
|
||||
}
|
||||
37
internal/commands/sabnzbd.go
Normal file
37
internal/commands/sabnzbd.go
Normal file
@ -0,0 +1,37 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/onedr0p/exportarr/internal/sabnzbd/collector"
|
||||
"github.com/onedr0p/exportarr/internal/sabnzbd/config"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(sabnzbdCmd)
|
||||
}
|
||||
|
||||
var sabnzbdCmd = &cobra.Command{
|
||||
Use: "sabnzbd",
|
||||
Aliases: []string{"sab"},
|
||||
Short: "Prometheus Exporter for Sabnzbd",
|
||||
Long: "Prometheus Exporter for Sabnzbd.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
c, err := config.LoadSabnzbdConfig(*conf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := c.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
collector, err := collector.NewSabnzbdCollector(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(collector)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
@ -1,34 +0,0 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
sharedCollector "github.com/onedr0p/exportarr/internal/collector/shared"
|
||||
sonarrCollector "github.com/onedr0p/exportarr/internal/collector/sonarr"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(sonarrCmd)
|
||||
}
|
||||
|
||||
var sonarrCmd = &cobra.Command{
|
||||
Use: "sonarr",
|
||||
Aliases: []string{"s"},
|
||||
Short: "Prometheus Exporter for Sonarr",
|
||||
Long: "Prometheus Exporter for Sonarr.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
conf.Arr = "sonarr"
|
||||
serveHttp(func(r *prometheus.Registry) {
|
||||
r.MustRegister(
|
||||
sonarrCollector.NewSonarrCollector(conf),
|
||||
sharedCollector.NewQueueCollector(conf),
|
||||
sharedCollector.NewHistoryCollector(conf),
|
||||
sharedCollector.NewRootFolderCollector(conf),
|
||||
sharedCollector.NewSystemStatusCollector(conf),
|
||||
sharedCollector.NewSystemHealthCollector(conf),
|
||||
)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
@ -2,14 +2,12 @@ package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/gookit/validate"
|
||||
"github.com/knadh/koanf/providers/confmap"
|
||||
"github.com/knadh/koanf/providers/env"
|
||||
"github.com/knadh/koanf/providers/file"
|
||||
"github.com/knadh/koanf/providers/posflag"
|
||||
"github.com/knadh/koanf/v2"
|
||||
flag "github.com/spf13/pflag"
|
||||
@ -17,47 +15,27 @@ import (
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
func RegisterConfigFlags(flags *flag.FlagSet) {
|
||||
flags.StringP("log-level", "l", "info", "Log level (debug, info, warn, error, fatal, panic)")
|
||||
flags.String("log-format", "console", "Log format (console, json)")
|
||||
flags.StringP("url", "u", "", "URL to *arr instance")
|
||||
flags.StringP("api-key", "a", "", "API Key for *arr instance")
|
||||
flags.String("api-key-file", "", "File containing API Key for *arr instance")
|
||||
flags.Bool("disable-ssl-verify", false, "Disable SSL verification")
|
||||
flags.StringP("interface", "i", "", "IP address to listen on")
|
||||
flags.IntP("port", "p", 0, "Port to listen on")
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Arr string `koanf:"arr"`
|
||||
LogLevel string `koanf:"log-level" validate:"ValidateLogLevel"`
|
||||
LogFormat string `koanf:"log-format" validate:"in:console,json"`
|
||||
URL string `koanf:"url" validate:"required|url"`
|
||||
ApiKey string `koanf:"api-key" validate:"required|regex:(^[a-z0-9]{32}$)"`
|
||||
ApiKeyFile string `koanf:"api-key-file"`
|
||||
ApiVersion string `koanf:"api-version" validate:"required|in:v3,v4"`
|
||||
XMLConfig string `koanf:"config"`
|
||||
Port int `koanf:"port" validate:"required"`
|
||||
Interface string `koanf:"interface" validate:"required|ip"`
|
||||
DisableSSLVerify bool `koanf:"disable-ssl-verify"`
|
||||
AuthUsername string `koanf:"auth-username"`
|
||||
AuthPassword string `koanf:"auth-password"`
|
||||
FormAuth bool `koanf:"form-auth"`
|
||||
EnableUnknownQueueItems bool `koanf:"enable-unknown-queue-items"`
|
||||
EnableAdditionalMetrics bool `koanf:"enable-additional-metrics"`
|
||||
Prowlarr ProwlarrConfig `koanf:"prowlarr"`
|
||||
k *koanf.Koanf
|
||||
}
|
||||
|
||||
func (c *Config) UseBasicAuth() bool {
|
||||
return !c.FormAuth && c.AuthUsername != "" && c.AuthPassword != ""
|
||||
}
|
||||
|
||||
func (c *Config) UseFormAuth() bool {
|
||||
return c.FormAuth
|
||||
}
|
||||
|
||||
// URLLabel() exists for backwards compatibility -- prior versions built the URL in the client,
|
||||
// meaning that the "url" metric label was missing the Port & base path that the XMLConfig provided.
|
||||
func (c *Config) URLLabel() string {
|
||||
if c.XMLConfig != "" {
|
||||
u, err := url.Parse(c.URL)
|
||||
if err != nil {
|
||||
// Should be unreachable as long as we validate that the URL is valid in LoadConfig/Validate
|
||||
return "Could Not Parse URL"
|
||||
}
|
||||
return u.Scheme + "://" + u.Host
|
||||
}
|
||||
return c.URL
|
||||
LogLevel string `koanf:"log-level" validate:"ValidateLogLevel"`
|
||||
LogFormat string `koanf:"log-format" validate:"in:console,json"`
|
||||
URL string `koanf:"url" validate:"required|url"`
|
||||
ApiKey string `koanf:"api-key" validate:"required"`
|
||||
ApiKeyFile string `koanf:"api-key-file"`
|
||||
Port int `koanf:"port" validate:"required"`
|
||||
Interface string `koanf:"interface" validate:"required|ip"`
|
||||
DisableSSLVerify bool `koanf:"disable-ssl-verify"`
|
||||
k *koanf.Koanf
|
||||
}
|
||||
|
||||
func LoadConfig(flags *flag.FlagSet) (*Config, error) {
|
||||
@ -91,15 +69,6 @@ func LoadConfig(flags *flag.FlagSet) (*Config, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// XMLConfig
|
||||
xmlConfig := k.String("config")
|
||||
if xmlConfig != "" {
|
||||
err = k.Load(file.Provider(xmlConfig), XMLParser(), koanf.WithMergeFunc(XMLParser().Merge))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// API Key File
|
||||
apiKeyFile := k.String("api-key-file")
|
||||
if apiKeyFile != "" {
|
||||
@ -134,15 +103,6 @@ func (c *Config) Validate() error {
|
||||
if !v.Validate() {
|
||||
return v.Errors
|
||||
}
|
||||
if c.AuthPassword != "" && c.AuthUsername == "" {
|
||||
return fmt.Errorf("auth-username is required when auth-password is set")
|
||||
}
|
||||
if c.AuthUsername != "" && c.AuthPassword == "" {
|
||||
return fmt.Errorf("auth-password is required when auth-username is set")
|
||||
}
|
||||
if c.FormAuth && (c.AuthUsername == "" || c.AuthPassword == "") {
|
||||
return fmt.Errorf("auth-username and auth-password are required when form-auth is set")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -155,21 +115,15 @@ func (c Config) Messages() map[string]string {
|
||||
|
||||
func (c Config) Translates() map[string]string {
|
||||
return validate.MS{
|
||||
"LogLevel": "log-level",
|
||||
"LogFormat": "log-format",
|
||||
"URL": "url",
|
||||
"ApiKey": "api-key",
|
||||
"ApiKeyFile": "api-key-file",
|
||||
"ApiVersion": "api-version",
|
||||
"XMLConfig": "config",
|
||||
"Port": "port",
|
||||
"Interface": "interface",
|
||||
"DisableSSLVerify": "disable-ssl-verify",
|
||||
"AuthUsername": "auth-username",
|
||||
"AuthPassword": "auth-password",
|
||||
"FormAuth": "form-auth",
|
||||
"EnableUnknownQueueItems": "enable-unknown-queue-items",
|
||||
"EnableAdditionalMetrics": "enable-additional-metrics",
|
||||
"LogLevel": "log-level",
|
||||
"LogFormat": "log-format",
|
||||
"URL": "url",
|
||||
"ApiKey": "api-key",
|
||||
"ApiKeyFile": "api-key-file",
|
||||
"ApiVersion": "api-version",
|
||||
"Port": "port",
|
||||
"Interface": "interface",
|
||||
"DisableSSLVerify": "disable-ssl-verify",
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,10 +134,6 @@ func backwardsCompatibilityTransforms(s string) string {
|
||||
return "api-key-file"
|
||||
case "apikey":
|
||||
return "api-key"
|
||||
case "basic-auth-username":
|
||||
return "auth-username"
|
||||
case "basic-auth-password":
|
||||
return "auth-password"
|
||||
default:
|
||||
return s
|
||||
}
|
||||
|
||||
@ -17,11 +17,6 @@ func testFlagSet() *pflag.FlagSet {
|
||||
out.Int("port", 0, "Port to listen on")
|
||||
out.StringP("interface", "i", "", "IP address to listen on")
|
||||
out.Bool("disable-ssl-verify", false, "Disable SSL verification")
|
||||
out.String("auth-username", "", "Username for basic auth")
|
||||
out.String("auth-password", "", "Password for basic auth")
|
||||
out.Bool("form-auth", false, "Use form based authentication")
|
||||
out.Bool("enable-unknown-queue-items", false, "Enable unknown queue items")
|
||||
out.Bool("enable-additional-metrics", false, "Enable additional metrics")
|
||||
return out
|
||||
}
|
||||
func TestLoadConfig_Defaults(t *testing.T) {
|
||||
@ -31,7 +26,6 @@ func TestLoadConfig_Defaults(t *testing.T) {
|
||||
require.NoError(err)
|
||||
require.Equal("info", config.LogLevel)
|
||||
require.Equal("console", config.LogFormat)
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
require.Equal(8081, config.Port)
|
||||
require.Equal("0.0.0.0", config.Interface)
|
||||
}
|
||||
@ -44,11 +38,6 @@ func TestLoadConfig_Flags(t *testing.T) {
|
||||
flags.Set("port", "1234")
|
||||
flags.Set("interface", "1.2.3.4")
|
||||
flags.Set("disable-ssl-verify", "true")
|
||||
flags.Set("auth-username", "user")
|
||||
flags.Set("auth-password", "pass")
|
||||
flags.Set("form-auth", "true")
|
||||
flags.Set("enable-unknown-queue-items", "true")
|
||||
flags.Set("enable-additional-metrics", "true")
|
||||
|
||||
require := require.New(t)
|
||||
config, err := LoadConfig(flags)
|
||||
@ -60,21 +49,10 @@ func TestLoadConfig_Flags(t *testing.T) {
|
||||
require.Equal(1234, config.Port)
|
||||
require.Equal("1.2.3.4", config.Interface)
|
||||
require.True(config.DisableSSLVerify)
|
||||
require.Equal("user", config.AuthUsername)
|
||||
require.Equal("pass", config.AuthPassword)
|
||||
require.True(config.FormAuth)
|
||||
require.True(config.EnableUnknownQueueItems)
|
||||
require.True(config.EnableAdditionalMetrics)
|
||||
// Defaults fall through
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
require.True(config.UseFormAuth())
|
||||
require.False(config.UseBasicAuth())
|
||||
|
||||
flags.Set("form-auth", "false")
|
||||
config, err = LoadConfig(flags)
|
||||
require.NoError(err)
|
||||
require.False(config.UseFormAuth())
|
||||
require.True(config.UseBasicAuth())
|
||||
}
|
||||
|
||||
func TestLoadConfig_Environment(t *testing.T) {
|
||||
@ -86,11 +64,6 @@ func TestLoadConfig_Environment(t *testing.T) {
|
||||
t.Setenv("PORT", "1234")
|
||||
t.Setenv("INTERFACE", "1.2.3.4")
|
||||
t.Setenv("DISABLE_SSL_VERIFY", "true")
|
||||
t.Setenv("AUTH_USERNAME", "user")
|
||||
t.Setenv("AUTH_PASSWORD", "pass")
|
||||
t.Setenv("FORM_AUTH", "true")
|
||||
t.Setenv("ENABLE_UNKNOWN_QUEUE_ITEMS", "true")
|
||||
t.Setenv("ENABLE_ADDITIONAL_METRICS", "true")
|
||||
|
||||
config, err := LoadConfig(&pflag.FlagSet{})
|
||||
require.NoError(err)
|
||||
@ -100,13 +73,6 @@ func TestLoadConfig_Environment(t *testing.T) {
|
||||
require.Equal(1234, config.Port)
|
||||
require.Equal("1.2.3.4", config.Interface)
|
||||
require.True(config.DisableSSLVerify)
|
||||
require.Equal("user", config.AuthUsername)
|
||||
require.Equal("pass", config.AuthPassword)
|
||||
require.True(config.FormAuth)
|
||||
require.True(config.EnableUnknownQueueItems)
|
||||
require.True(config.EnableAdditionalMetrics)
|
||||
// Defaults fall through
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
}
|
||||
|
||||
func TestLoadConfig_PartialEnvironment(t *testing.T) {
|
||||
@ -130,7 +96,6 @@ func TestLoadConfig_PartialEnvironment(t *testing.T) {
|
||||
require.Equal(1234, config.Port)
|
||||
|
||||
// Defaults
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
require.Equal("info", config.LogLevel)
|
||||
require.Equal("console", config.LogFormat)
|
||||
}
|
||||
@ -149,8 +114,6 @@ func TestLoadConfig_BackwardsCompatibility_ApiKeyFile(t *testing.T) {
|
||||
require.NoError(err)
|
||||
|
||||
require.Equal("abcdef0123456789abcdef0123456783", config.ApiKey)
|
||||
require.Equal("user", config.AuthUsername)
|
||||
require.Equal("pass", config.AuthPassword)
|
||||
}
|
||||
|
||||
func TestLoadConfig_BackwardsCompatibility_ApiKey(t *testing.T) {
|
||||
@ -166,26 +129,6 @@ func TestLoadConfig_BackwardsCompatibility_ApiKey(t *testing.T) {
|
||||
|
||||
require.Equal("abcdef0123456789abcdef0123456780", config.ApiKey)
|
||||
}
|
||||
func TestLoadConfig_XMLConfig(t *testing.T) {
|
||||
flags := testFlagSet()
|
||||
flags.Set("config", "test_fixtures/config.test_xml")
|
||||
flags.Set("url", "http://localhost")
|
||||
|
||||
require := require.New(t)
|
||||
config, err := LoadConfig(flags)
|
||||
require.NoError(err)
|
||||
|
||||
require.Equal("http://localhost:7878/asdf", config.URL)
|
||||
require.Equal("abcdef0123456789abcdef0123456789", config.ApiKey)
|
||||
|
||||
// test defaults survive when not set in config
|
||||
require.Equal("info", config.LogLevel)
|
||||
require.Equal("console", config.LogFormat)
|
||||
require.Equal("v3", config.ApiVersion)
|
||||
require.Equal(8081, config.Port)
|
||||
require.Equal("0.0.0.0", config.Interface)
|
||||
|
||||
}
|
||||
|
||||
func TestLoadConfig_ApiKeyFile(t *testing.T) {
|
||||
flags := testFlagSet()
|
||||
@ -214,11 +157,6 @@ func TestLoadConfig_OverrideOrder(t *testing.T) {
|
||||
require.NoError(err)
|
||||
require.Equal("abcdef0123456789abcdef0123456780", config.ApiKey)
|
||||
|
||||
flags.Set("config", "test_fixtures/config.test_xml")
|
||||
config, err = LoadConfig(flags)
|
||||
require.NoError(err)
|
||||
require.Equal("abcdef0123456789abcdef0123456789", config.ApiKey)
|
||||
|
||||
flags.Set("api-key-file", "test_fixtures/api_key")
|
||||
config, err = LoadConfig(flags)
|
||||
require.NoError(err)
|
||||
@ -234,137 +172,43 @@ func TestValidate(t *testing.T) {
|
||||
{
|
||||
name: "good",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "good-basic-auth",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
AuthUsername: "user",
|
||||
AuthPassword: "pass",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "good-form-auth",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
AuthUsername: "user",
|
||||
AuthPassword: "pass",
|
||||
FormAuth: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad-api-key",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef01234567",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
},
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "bad-api-version",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v2",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
},
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "missing-port",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 0,
|
||||
Interface: "0.0.0.0",
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
Port: 0,
|
||||
Interface: "0.0.0.0",
|
||||
},
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "bad-interface",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0",
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0",
|
||||
},
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "bad-log-level",
|
||||
config: &Config{
|
||||
LogLevel: "asdf",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
},
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "password-needs-username",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
AuthPassword: "password",
|
||||
},
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "username-needs-password",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
AuthUsername: "username",
|
||||
},
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "form-auth-needs-user-and-password",
|
||||
config: &Config{
|
||||
LogLevel: "debug",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
ApiVersion: "v3",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
FormAuth: true,
|
||||
LogLevel: "asdf",
|
||||
URL: "http://localhost",
|
||||
ApiKey: "abcdef0123456789abcdef0123456789",
|
||||
Port: 1234,
|
||||
Interface: "0.0.0.0",
|
||||
},
|
||||
shouldError: true,
|
||||
},
|
||||
|
||||
17
internal/sabnzbd/auth/auth.go
Normal file
17
internal/sabnzbd/auth/auth.go
Normal file
@ -0,0 +1,17 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type ApiKeyAuth struct {
|
||||
ApiKey string
|
||||
}
|
||||
|
||||
func (a ApiKeyAuth) Auth(req *http.Request) error {
|
||||
q := req.URL.Query()
|
||||
q.Add("apikey", a.ApiKey)
|
||||
q.Add("output", "json")
|
||||
req.URL.RawQuery = q.Encode()
|
||||
return nil
|
||||
}
|
||||
99
internal/sabnzbd/collector/cache.go
Normal file
99
internal/sabnzbd/collector/cache.go
Normal file
@ -0,0 +1,99 @@
|
||||
package collector
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/sabnzbd/model"
|
||||
)
|
||||
|
||||
type ServerStats interface {
|
||||
Update(stat model.ServerStat) ServerStats
|
||||
GetTotal() int
|
||||
GetArticlesTried() int
|
||||
GetArticlesSuccess() int
|
||||
}
|
||||
|
||||
type serverStatCache struct {
|
||||
total int
|
||||
articlesTriedHistorical int
|
||||
articlesTriedToday int
|
||||
articlesSuccessHistorical int
|
||||
articlesSuccessToday int
|
||||
todayKey string
|
||||
}
|
||||
|
||||
func (s serverStatCache) Update(stat model.ServerStat) ServerStats {
|
||||
s.total = stat.Total
|
||||
|
||||
if stat.DayParsed != s.todayKey {
|
||||
s.articlesTriedHistorical += s.articlesTriedToday
|
||||
s.articlesSuccessHistorical += s.articlesSuccessToday
|
||||
s.articlesTriedToday = 0
|
||||
s.articlesSuccessToday = 0
|
||||
s.todayKey = stat.DayParsed
|
||||
}
|
||||
|
||||
s.articlesTriedToday = stat.ArticlesTried
|
||||
s.articlesSuccessToday = stat.ArticlesSuccess
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func (s serverStatCache) GetTotal() int {
|
||||
return s.total
|
||||
}
|
||||
|
||||
func (s serverStatCache) GetArticlesTried() int {
|
||||
return s.articlesTriedHistorical + s.articlesTriedToday
|
||||
}
|
||||
|
||||
func (s serverStatCache) GetArticlesSuccess() int {
|
||||
return s.articlesSuccessHistorical + s.articlesSuccessToday
|
||||
}
|
||||
|
||||
type ServersStatsCache struct {
|
||||
lock sync.RWMutex
|
||||
Total int
|
||||
Servers map[string]serverStatCache
|
||||
}
|
||||
|
||||
func NewServersStatsCache() *ServersStatsCache {
|
||||
return &ServersStatsCache{
|
||||
Servers: make(map[string]serverStatCache),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ServersStatsCache) Update(stats model.ServerStats) {
|
||||
c.lock.Lock()
|
||||
defer c.lock.Unlock()
|
||||
|
||||
c.Total = stats.Total
|
||||
|
||||
for name, srv := range stats.Servers {
|
||||
var toCache serverStatCache
|
||||
if cached, ok := c.Servers[name]; ok {
|
||||
toCache = cached
|
||||
}
|
||||
|
||||
c.Servers[name] = toCache.Update(srv).(serverStatCache)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ServersStatsCache) GetTotal() int {
|
||||
c.lock.RLock()
|
||||
defer c.lock.RUnlock()
|
||||
|
||||
return c.Total
|
||||
}
|
||||
|
||||
func (c *ServersStatsCache) GetServerMap() map[string]ServerStats {
|
||||
c.lock.RLock()
|
||||
defer c.lock.RUnlock()
|
||||
|
||||
ret := make(map[string]ServerStats)
|
||||
for k, v := range c.Servers {
|
||||
ret[k] = v
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
244
internal/sabnzbd/collector/cache_test.go
Normal file
244
internal/sabnzbd/collector/cache_test.go
Normal file
@ -0,0 +1,244 @@
|
||||
package collector
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/sabnzbd/model"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestUpdateServerStatsCache_SameDay(t *testing.T) {
|
||||
require := require.New(t)
|
||||
cache := NewServersStatsCache()
|
||||
cache.Update(model.ServerStats{
|
||||
Total: 1,
|
||||
Servers: map[string]model.ServerStat{
|
||||
"server1": {
|
||||
Total: 1,
|
||||
ArticlesTried: 2,
|
||||
ArticlesSuccess: 2,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
"server2": {
|
||||
Total: 2,
|
||||
ArticlesTried: 4,
|
||||
ArticlesSuccess: 4,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
},
|
||||
})
|
||||
require.Equal(1, cache.GetTotal())
|
||||
m := cache.GetServerMap()
|
||||
require.Equal(2, len(m))
|
||||
|
||||
server1 := m["server1"]
|
||||
require.Equal(1, server1.GetTotal())
|
||||
require.Equal(2, server1.GetArticlesTried())
|
||||
require.Equal(2, server1.GetArticlesSuccess())
|
||||
|
||||
server2 := m["server2"]
|
||||
require.Equal(2, server2.GetTotal())
|
||||
require.Equal(4, server2.GetArticlesTried())
|
||||
require.Equal(4, server2.GetArticlesSuccess())
|
||||
cache.Update(model.ServerStats{
|
||||
Total: 2,
|
||||
Servers: map[string]model.ServerStat{
|
||||
"server1": {
|
||||
Total: 2,
|
||||
ArticlesTried: 6,
|
||||
ArticlesSuccess: 6,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
"server2": {
|
||||
Total: 3,
|
||||
ArticlesTried: 8,
|
||||
ArticlesSuccess: 8,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
},
|
||||
})
|
||||
require.Equal(2, cache.GetTotal())
|
||||
m = cache.GetServerMap()
|
||||
require.Equal(2, len(m))
|
||||
|
||||
server1 = m["server1"]
|
||||
require.Equal(2, server1.GetTotal())
|
||||
require.Equal(6, server1.GetArticlesTried())
|
||||
require.Equal(6, server1.GetArticlesSuccess())
|
||||
|
||||
server2 = m["server2"]
|
||||
require.Equal(3, server2.GetTotal())
|
||||
require.Equal(8, server2.GetArticlesTried())
|
||||
require.Equal(8, server2.GetArticlesSuccess())
|
||||
}
|
||||
|
||||
func TestUpdateServerStatsCache_DifferentDay(t *testing.T) {
|
||||
require := require.New(t)
|
||||
cache := NewServersStatsCache()
|
||||
cache.Update(model.ServerStats{
|
||||
Total: 1,
|
||||
Servers: map[string]model.ServerStat{
|
||||
"server1": {
|
||||
Total: 1,
|
||||
ArticlesTried: 2,
|
||||
ArticlesSuccess: 2,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
"server2": {
|
||||
Total: 2,
|
||||
ArticlesTried: 4,
|
||||
ArticlesSuccess: 4,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
},
|
||||
})
|
||||
require.Equal(1, cache.GetTotal())
|
||||
m := cache.GetServerMap()
|
||||
require.Equal(2, len(m))
|
||||
|
||||
server1 := m["server1"]
|
||||
require.Equal(1, server1.GetTotal())
|
||||
require.Equal(2, server1.GetArticlesTried())
|
||||
require.Equal(2, server1.GetArticlesSuccess())
|
||||
|
||||
server2 := m["server2"]
|
||||
require.Equal(2, server2.GetTotal())
|
||||
require.Equal(4, server2.GetArticlesTried())
|
||||
require.Equal(4, server2.GetArticlesSuccess())
|
||||
cache.Update(model.ServerStats{
|
||||
Total: 2,
|
||||
Servers: map[string]model.ServerStat{
|
||||
"server1": {
|
||||
Total: 2,
|
||||
ArticlesTried: 6,
|
||||
ArticlesSuccess: 6,
|
||||
DayParsed: "2020-01-02",
|
||||
},
|
||||
"server2": {
|
||||
Total: 3,
|
||||
ArticlesTried: 8,
|
||||
ArticlesSuccess: 8,
|
||||
DayParsed: "2020-01-02",
|
||||
},
|
||||
},
|
||||
})
|
||||
require.Equal(2, cache.GetTotal())
|
||||
m = cache.GetServerMap()
|
||||
require.Equal(2, len(m))
|
||||
|
||||
server1 = m["server1"]
|
||||
require.Equal(2, server1.GetTotal())
|
||||
require.Equal(8, server1.GetArticlesTried())
|
||||
require.Equal(8, server1.GetArticlesSuccess())
|
||||
|
||||
server2 = m["server2"]
|
||||
require.Equal(3, server2.GetTotal())
|
||||
require.Equal(12, server2.GetArticlesTried())
|
||||
require.Equal(12, server2.GetArticlesSuccess())
|
||||
}
|
||||
|
||||
func TestNewServerStatsCache_SetsServers(t *testing.T) {
|
||||
require := require.New(t)
|
||||
cache := NewServersStatsCache()
|
||||
require.NotNil(cache.Servers)
|
||||
}
|
||||
|
||||
func TestUpdateServerStatsCache(t *testing.T) {
|
||||
require := require.New(t)
|
||||
cache := NewServersStatsCache()
|
||||
cache.Update(model.ServerStats{
|
||||
Total: 1,
|
||||
Servers: map[string]model.ServerStat{
|
||||
"server1": {
|
||||
Total: 1,
|
||||
ArticlesTried: 2,
|
||||
ArticlesSuccess: 2,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
"server2": {
|
||||
Total: 2,
|
||||
ArticlesTried: 4,
|
||||
ArticlesSuccess: 4,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
server1 := cache.Servers["server1"]
|
||||
server2 := cache.Servers["server2"]
|
||||
|
||||
require.Equal(1, server1.GetTotal())
|
||||
require.Equal(2, server1.GetArticlesTried())
|
||||
require.Equal(2, server1.GetArticlesSuccess())
|
||||
require.Equal(2, server2.GetTotal())
|
||||
require.Equal(4, server2.GetArticlesTried())
|
||||
require.Equal(4, server2.GetArticlesSuccess())
|
||||
|
||||
cache.Update(model.ServerStats{
|
||||
Total: 2,
|
||||
Servers: map[string]model.ServerStat{
|
||||
"server1": {
|
||||
Total: 3,
|
||||
ArticlesTried: 6,
|
||||
ArticlesSuccess: 6,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
server1 = cache.Servers["server1"]
|
||||
server2 = cache.Servers["server2"]
|
||||
|
||||
require.Equal(2, cache.GetTotal())
|
||||
require.Equal(3, server1.GetTotal())
|
||||
require.Equal(6, server1.GetArticlesTried())
|
||||
require.Equal(6, server1.GetArticlesSuccess())
|
||||
require.Equal(2, server2.GetTotal())
|
||||
require.Equal(4, server2.GetArticlesTried())
|
||||
require.Equal(4, server2.GetArticlesSuccess())
|
||||
}
|
||||
|
||||
func TestGetServerMap_ReturnsCopy(t *testing.T) {
|
||||
// It's important to return a true copy to maintain thread safety
|
||||
require := require.New(t)
|
||||
|
||||
cache := NewServersStatsCache()
|
||||
cache.Update(model.ServerStats{
|
||||
Total: 1,
|
||||
Servers: map[string]model.ServerStat{
|
||||
"server1": {
|
||||
Total: 1,
|
||||
ArticlesTried: 2,
|
||||
ArticlesSuccess: 2,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
serverMap := cache.GetServerMap()
|
||||
|
||||
for k, v := range serverMap {
|
||||
require.Equal(cache.Servers[k], v)
|
||||
}
|
||||
|
||||
require.NotSame(&cache.Servers, &serverMap)
|
||||
|
||||
cache.Update(model.ServerStats{
|
||||
Total: 2,
|
||||
Servers: map[string]model.ServerStat{
|
||||
"server1": {
|
||||
Total: 3,
|
||||
ArticlesTried: 6,
|
||||
ArticlesSuccess: 6,
|
||||
DayParsed: "2020-01-01",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
cServer := cache.Servers["server1"]
|
||||
sServer := serverMap["server1"]
|
||||
|
||||
require.NotEqual(cServer.GetTotal(), sServer.GetTotal())
|
||||
require.NotEqual(cServer.GetArticlesTried(), sServer.GetArticlesTried())
|
||||
require.NotEqual(cServer.GetArticlesSuccess(), sServer.GetArticlesSuccess())
|
||||
}
|
||||
375
internal/sabnzbd/collector/collector.go
Normal file
375
internal/sabnzbd/collector/collector.go
Normal file
@ -0,0 +1,375 @@
|
||||
package collector
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/client"
|
||||
"github.com/onedr0p/exportarr/internal/sabnzbd/auth"
|
||||
"github.com/onedr0p/exportarr/internal/sabnzbd/config"
|
||||
"github.com/onedr0p/exportarr/internal/sabnzbd/model"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
var METRIC_PREFIX = "sabnzbd"
|
||||
|
||||
var (
|
||||
downloadedBytes = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "downloaded_bytes"),
|
||||
"Total Bytes Downloaded by SABnzbd",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
serverDownloadedBytes = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "server_downloaded_bytes"),
|
||||
"Total Bytes Downloaded from UseNet Server",
|
||||
[]string{"target", "server"},
|
||||
nil,
|
||||
)
|
||||
serverArticlesTotal = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "server_articles_total"),
|
||||
"Total Articles Attempted to download from UseNet Server",
|
||||
[]string{"target", "server"},
|
||||
nil,
|
||||
)
|
||||
serverArticlesSuccess = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "server_articles_success"),
|
||||
"Total Articles Successfully downloaded from UseNet Server",
|
||||
[]string{"target", "server"},
|
||||
nil,
|
||||
)
|
||||
info = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "info"),
|
||||
"Info about the target SabnzbD instance",
|
||||
[]string{"target", "version", "status"},
|
||||
nil,
|
||||
)
|
||||
paused = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "paused"),
|
||||
"Is the target SabnzbD instance paused",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
pausedAll = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "paused_all"),
|
||||
"Are all the target SabnzbD instance's queues paused",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
pauseDuration = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "pause_duration_seconds"),
|
||||
"Duration until the SabnzbD instance is unpaused",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
diskUsed = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "disk_used_bytes"),
|
||||
"Used Bytes Used on the SabnzbD instance's disk",
|
||||
[]string{"target", "folder"},
|
||||
nil,
|
||||
)
|
||||
diskTotal = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "disk_total_bytes"),
|
||||
"Total Bytes on the SabnzbD instance's disk",
|
||||
[]string{"target", "folder"},
|
||||
nil,
|
||||
)
|
||||
remainingQuota = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "remaining_quota_bytes"),
|
||||
"Total Bytes Left in the SabnzbD instance's quota",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
quota = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "quota_bytes"),
|
||||
"Total Bytes in the SabnzbD instance's quota",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
cachedArticles = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "article_cache_articles"),
|
||||
"Total Articles Cached in the SabnzbD instance",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
cachedBytes = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "article_cache_bytes"),
|
||||
"Total Bytes Cached in the SabnzbD instance Article Cache",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
speed = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "speed_bps"),
|
||||
"Total Bytes Downloaded per Second by the SabnzbD instance",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
bytesRemaining = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "remaining_bytes"),
|
||||
"Total Bytes Remaining to Download by the SabnzbD instance",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
bytesTotal = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "total_bytes"),
|
||||
"Total Bytes in queue to Download by the SabnzbD instance",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
queueLength = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "queue_length"),
|
||||
"Total Number of Items in the SabnzbD instance's queue",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
status = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "status"),
|
||||
"Status of the SabnzbD instance's queue (0=Unknown, 1=Idle, 2=Paused, 3=Downloading)",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
timeEstimate = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "time_estimate_seconds"),
|
||||
"Estimated Time Remaining to Download by the SabnzbD instance",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
warnings = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "warnings"),
|
||||
"Total Warnings in the SabnzbD instance's queue",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
scrapeDuration = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "scrape_duration_seconds"),
|
||||
"Duration of the SabnzbD scrape",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
queueQueryDuration = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "queue_query_duration_seconds"),
|
||||
"Duration querying the queue endpoint of SabnzbD",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
serverStatsQueryDuration = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(METRIC_PREFIX, "", "server_stats_query_duration_seconds"),
|
||||
"Duration querying the server_stats endpoint of SabnzbD",
|
||||
[]string{"target"},
|
||||
nil,
|
||||
)
|
||||
)
|
||||
|
||||
func boolToFloat(b bool) float64 {
|
||||
if b {
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
type SabnzbdCollector struct {
|
||||
cache *ServersStatsCache
|
||||
client *client.Client
|
||||
baseURL string
|
||||
}
|
||||
|
||||
// TODO: Add a sab-specific config struct to abstract away the config parsing
|
||||
func NewSabnzbdCollector(config *config.SabnzbdConfig) (*SabnzbdCollector, error) {
|
||||
auther := auth.ApiKeyAuth{ApiKey: config.ApiKey}
|
||||
client, err := client.NewClient(config.URL, config.DisableSSLVerify, auther)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to build client: %w", err)
|
||||
}
|
||||
|
||||
return &SabnzbdCollector{
|
||||
cache: NewServersStatsCache(),
|
||||
client: client,
|
||||
baseURL: config.URL,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *SabnzbdCollector) doRequest(mode string, target interface{}) error {
|
||||
return s.client.DoRequest("/sabnzbd/api", target, map[string]string{"mode": mode})
|
||||
}
|
||||
|
||||
func (s *SabnzbdCollector) getQueueStats() (*model.QueueStats, error) {
|
||||
var stats = &model.QueueStats{}
|
||||
|
||||
err := s.doRequest("queue", stats)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to get queue stats: %w", err)
|
||||
}
|
||||
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
func (s *SabnzbdCollector) getServerStats() (*model.ServerStats, error) {
|
||||
var stats = &model.ServerStats{}
|
||||
err := s.doRequest("server_stats", stats)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to get server stats: %w", err)
|
||||
}
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
func (e *SabnzbdCollector) Describe(ch chan<- *prometheus.Desc) {
|
||||
ch <- downloadedBytes
|
||||
ch <- info
|
||||
ch <- paused
|
||||
ch <- pausedAll
|
||||
ch <- pauseDuration
|
||||
ch <- diskUsed
|
||||
ch <- diskTotal
|
||||
ch <- remainingQuota
|
||||
ch <- quota
|
||||
ch <- cachedArticles
|
||||
ch <- cachedBytes
|
||||
ch <- speed
|
||||
ch <- bytesRemaining
|
||||
ch <- bytesTotal
|
||||
ch <- queueLength
|
||||
ch <- status
|
||||
ch <- timeEstimate
|
||||
ch <- serverDownloadedBytes
|
||||
ch <- serverArticlesTotal
|
||||
ch <- serverArticlesSuccess
|
||||
ch <- warnings
|
||||
ch <- scrapeDuration
|
||||
ch <- queueQueryDuration
|
||||
ch <- serverStatsQueryDuration
|
||||
}
|
||||
|
||||
func (e *SabnzbdCollector) Collect(ch chan<- prometheus.Metric) {
|
||||
log := zap.S().With("collector", "sabnzbd")
|
||||
start := time.Now()
|
||||
defer func() { //nolint:wsl
|
||||
ch <- prometheus.MustNewConstMetric(scrapeDuration, prometheus.GaugeValue, time.Since(start).Seconds(), e.baseURL)
|
||||
}()
|
||||
|
||||
queueStats := &model.QueueStats{}
|
||||
serverStats := &model.ServerStats{}
|
||||
|
||||
g := new(errgroup.Group)
|
||||
|
||||
g.Go(func() error {
|
||||
qStart := time.Now()
|
||||
defer func() { //nolint:wsl
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
queueQueryDuration, prometheus.GaugeValue, time.Since(qStart).Seconds(), e.baseURL)
|
||||
}()
|
||||
|
||||
var err error
|
||||
queueStats, err = e.getQueueStats()
|
||||
if err != nil {
|
||||
log.Errorw("Failed to get queue stats", "error", err)
|
||||
return fmt.Errorf("failed to get queue stats: %w", err)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
g.Go(func() error {
|
||||
sStart := time.Now()
|
||||
defer func() { //nolint:wsl
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
serverStatsQueryDuration, prometheus.GaugeValue, time.Since(sStart).Seconds(), e.baseURL)
|
||||
}()
|
||||
|
||||
var err error
|
||||
serverStats, err = e.getServerStats()
|
||||
if err != nil {
|
||||
log.Errorw("Failed to get server stats", "error", err)
|
||||
return fmt.Errorf("failed to get server stats: %w", err)
|
||||
}
|
||||
|
||||
e.cache.Update(*serverStats)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err := g.Wait(); err != nil {
|
||||
log.Errorw("Failed to get stats", "error", err)
|
||||
ch <- prometheus.NewInvalidMetric(
|
||||
prometheus.NewDesc("sabnzbd_collector_error", "Error getting stats", nil, prometheus.Labels{"target": e.baseURL}),
|
||||
err,
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
downloadedBytes, prometheus.CounterValue, float64(e.cache.GetTotal()), e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
info, prometheus.GaugeValue, 1, e.baseURL, queueStats.Version, queueStats.Status.String(),
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
paused, prometheus.GaugeValue, boolToFloat(queueStats.Paused), e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
pausedAll, prometheus.GaugeValue, boolToFloat(queueStats.PausedAll), e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
pauseDuration, prometheus.GaugeValue, queueStats.PauseDuration.Seconds(), e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
diskUsed, prometheus.GaugeValue, queueStats.DownloadDirDiskspaceUsed, e.baseURL, "download",
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
diskUsed, prometheus.GaugeValue, queueStats.CompletedDirDiskspaceUsed, e.baseURL, "complete",
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
diskTotal, prometheus.GaugeValue, queueStats.DownloadDirDiskspaceTotal, e.baseURL, "download",
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
diskTotal, prometheus.GaugeValue, queueStats.CompletedDirDiskspaceTotal, e.baseURL, "complete",
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
remainingQuota, prometheus.GaugeValue, queueStats.RemainingQuota, e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
quota, prometheus.GaugeValue, queueStats.Quota, e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
cachedArticles, prometheus.GaugeValue, queueStats.CacheArt, e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
cachedBytes, prometheus.GaugeValue, queueStats.CacheSize, e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
speed, prometheus.GaugeValue, queueStats.Speed, e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
bytesRemaining, prometheus.GaugeValue, queueStats.RemainingSize, e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
bytesTotal, prometheus.GaugeValue, queueStats.Size, e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
queueLength, prometheus.GaugeValue, queueStats.ItemsInQueue, e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
status, prometheus.GaugeValue, queueStats.Status.Float64(), e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
timeEstimate, prometheus.GaugeValue, queueStats.TimeEstimate.Seconds(), e.baseURL,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
warnings, prometheus.GaugeValue, queueStats.HaveWarnings, e.baseURL,
|
||||
)
|
||||
|
||||
for name, stats := range e.cache.GetServerMap() {
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
serverDownloadedBytes, prometheus.CounterValue, float64(stats.GetTotal()), e.baseURL, name,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
serverArticlesTotal, prometheus.CounterValue, float64(stats.GetArticlesTried()), e.baseURL, name,
|
||||
)
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
serverArticlesSuccess, prometheus.CounterValue, float64(stats.GetArticlesSuccess()), e.baseURL, name,
|
||||
)
|
||||
}
|
||||
}
|
||||
116
internal/sabnzbd/collector/collector_test.go
Normal file
116
internal/sabnzbd/collector/collector_test.go
Normal file
@ -0,0 +1,116 @@
|
||||
package collector
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/onedr0p/exportarr/internal/sabnzbd/config"
|
||||
"github.com/prometheus/client_golang/prometheus/testutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const API_KEY = "abcdef0123456789abcdef0123456789"
|
||||
|
||||
func newTestServer(t *testing.T, fn func(http.ResponseWriter, *http.Request)) (*httptest.Server, error) {
|
||||
queue, err := os.ReadFile("../test_fixtures/queue.json")
|
||||
require.NoError(t, err)
|
||||
serverStats, err := os.ReadFile("../test_fixtures/server_stats.json")
|
||||
require.NoError(t, err)
|
||||
|
||||
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
fn(w, r)
|
||||
require.NotEmpty(t, r.URL.Query().Get("mode"))
|
||||
switch r.URL.Query().Get("mode") {
|
||||
case "queue":
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, err := w.Write(queue)
|
||||
require.NoError(t, err)
|
||||
case "server_stats":
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, err := w.Write(serverStats)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
})), nil
|
||||
}
|
||||
|
||||
func TestCollect(t *testing.T) {
|
||||
require := require.New(t)
|
||||
ts, err := newTestServer(t, func(w http.ResponseWriter, r *http.Request) {
|
||||
require.Equal("/sabnzbd/api", r.URL.Path)
|
||||
require.Equal(API_KEY, r.URL.Query().Get("apikey"))
|
||||
require.Equal("json", r.URL.Query().Get("output"))
|
||||
})
|
||||
require.NoError(err)
|
||||
|
||||
defer ts.Close()
|
||||
|
||||
config := &config.SabnzbdConfig{
|
||||
URL: ts.URL,
|
||||
ApiKey: API_KEY,
|
||||
}
|
||||
collector, err := NewSabnzbdCollector(config)
|
||||
require.NoError(err)
|
||||
|
||||
b, err := os.ReadFile("../test_fixtures/expected_metrics.txt")
|
||||
require.NoError(err)
|
||||
|
||||
expected := strings.Replace(string(b), "http://127.0.0.1:39965", ts.URL, -1)
|
||||
f := strings.NewReader(expected)
|
||||
|
||||
require.NotPanics(func() {
|
||||
err = testutil.CollectAndCompare(collector, f,
|
||||
"sabnzbd_downloaded_bytes",
|
||||
"sabnzbd_server_downloaded_bytes",
|
||||
"sabnzbd_server_articles_total",
|
||||
"sabnzbd_server_articles_success",
|
||||
"sabnzbd_info",
|
||||
"sabnzbd_paused",
|
||||
"sabnzbd_paused_all",
|
||||
"sabnzbd_pause_duration_seconds",
|
||||
"sabnzbd_disk_used_bytes",
|
||||
"sabnzbd_disk_total_bytes",
|
||||
"sabnzbd_remaining_quota_bytes",
|
||||
"sabnzbd_quota_bytes",
|
||||
"sabnzbd_article_cache_articles",
|
||||
"sabnzbd_article_cache_bytes",
|
||||
"sabnzbd_speed_bps",
|
||||
"sabnzbd_remaining_bytes",
|
||||
"sabnzbd_total_bytes",
|
||||
"sabnzbd_queue_size",
|
||||
"sabnzbd_status",
|
||||
"sabnzbd_time_estimate_seconds",
|
||||
"sabnzbd_queue_length",
|
||||
"sabnzbd_warnings",
|
||||
)
|
||||
})
|
||||
require.NoError(err)
|
||||
|
||||
require.GreaterOrEqual(29, testutil.CollectAndCount(collector))
|
||||
}
|
||||
|
||||
func TestCollect_FailureDoesntPanic(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
config := &config.SabnzbdConfig{
|
||||
URL: ts.URL,
|
||||
ApiKey: API_KEY,
|
||||
}
|
||||
collector, err := NewSabnzbdCollector(config)
|
||||
require.NoError(err)
|
||||
|
||||
f := strings.NewReader("")
|
||||
|
||||
require.NotPanics(func() {
|
||||
err = testutil.CollectAndCompare(collector, f)
|
||||
require.Error(err)
|
||||
}, "Collecting metrics should not panic on failure")
|
||||
require.Error(err)
|
||||
}
|
||||
29
internal/sabnzbd/config/config.go
Normal file
29
internal/sabnzbd/config/config.go
Normal file
@ -0,0 +1,29 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/gookit/validate"
|
||||
base_config "github.com/onedr0p/exportarr/internal/config"
|
||||
)
|
||||
|
||||
type SabnzbdConfig struct {
|
||||
URL string `validate:"required|url"`
|
||||
ApiKey string `validate:"required"`
|
||||
DisableSSLVerify bool
|
||||
}
|
||||
|
||||
func LoadSabnzbdConfig(conf base_config.Config) (*SabnzbdConfig, error) {
|
||||
ret := &SabnzbdConfig{
|
||||
URL: conf.URL,
|
||||
ApiKey: conf.ApiKey,
|
||||
DisableSSLVerify: conf.DisableSSLVerify,
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (c *SabnzbdConfig) Validate() error {
|
||||
v := validate.Struct(c)
|
||||
if !v.Validate() {
|
||||
return v.Errors
|
||||
}
|
||||
return nil
|
||||
}
|
||||
312
internal/sabnzbd/model/model.go
Normal file
312
internal/sabnzbd/model/model.go
Normal file
@ -0,0 +1,312 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Status int
|
||||
|
||||
const (
|
||||
KB = 1024
|
||||
MB = 1024 * KB
|
||||
)
|
||||
|
||||
const (
|
||||
UNKNOWN Status = iota
|
||||
IDLE
|
||||
PAUSED
|
||||
DOWNLOADING
|
||||
)
|
||||
|
||||
func (s Status) Float64() float64 {
|
||||
return float64(s)
|
||||
}
|
||||
func (s Status) String() string {
|
||||
switch s {
|
||||
case IDLE:
|
||||
return "Idle"
|
||||
case PAUSED:
|
||||
return "Paused"
|
||||
case DOWNLOADING:
|
||||
return "Downloading"
|
||||
default:
|
||||
return "Unknown"
|
||||
}
|
||||
}
|
||||
|
||||
func StatusFromString(s string) Status {
|
||||
switch s {
|
||||
case "Idle":
|
||||
return IDLE
|
||||
case "Paused":
|
||||
return PAUSED
|
||||
case "Downloading":
|
||||
return DOWNLOADING
|
||||
default:
|
||||
return UNKNOWN
|
||||
}
|
||||
}
|
||||
|
||||
type ServerStats struct {
|
||||
Total int `json:"total"` // Total Data Downloaded in bytes
|
||||
Servers map[string]ServerStat `json:"servers"`
|
||||
}
|
||||
|
||||
type ServerStat struct {
|
||||
Total int // Total Data Downloaded in bytes
|
||||
ArticlesTried int // Number of Articles Tried
|
||||
ArticlesSuccess int // Number of Articles Successfully Downloaded
|
||||
DayParsed string // Last Date Parsed
|
||||
}
|
||||
|
||||
func (s *ServerStat) UnmarshalJSON(data []byte) error {
|
||||
var tmp struct {
|
||||
Total int `json:"total"` // Total Data Downloaded in bytes
|
||||
ArticlesTried map[string]int `json:"articles_tried"` // Number of Articles Tried (YYYY-MM-DD -> count)
|
||||
ArticlesSuccess map[string]int `json:"articles_success"` // Number of Articles Successfully Downloaded (YYYY-MM-DD -> count)
|
||||
}
|
||||
if err := json.Unmarshal(data, &tmp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d, tried := latestStat(tmp.ArticlesTried)
|
||||
_, success := latestStat(tmp.ArticlesSuccess)
|
||||
s.Total = tmp.Total
|
||||
s.ArticlesTried = tried
|
||||
s.ArticlesSuccess = success
|
||||
s.DayParsed = d
|
||||
return nil
|
||||
}
|
||||
|
||||
// QueueStatsResponse is the response from the sabnzbd queue endpoint
|
||||
// Paused vs PausedAll -- as best I can tell, Paused is
|
||||
// "pause the queue but finish anything in flight"
|
||||
// PausedAll is "hard pause, including pausing in progress downloads"
|
||||
type QueueStats struct {
|
||||
Version string // Sabnzbd Version
|
||||
Paused bool // Is the sabnzbd queue globally paused?
|
||||
PausedAll bool // Paused All actions which causes disk activity
|
||||
PauseDuration time.Duration // Duration sabnzbd will remain paused
|
||||
DownloadDirDiskspaceUsed float64 // Download Directory Used in bytes
|
||||
DownloadDirDiskspaceTotal float64 // Download Directory Total in bytes
|
||||
CompletedDirDiskspaceUsed float64 // Completed Directory Used in bytes
|
||||
CompletedDirDiskspaceTotal float64 // Completed Directory Total in bytes
|
||||
SpeedLimit float64 // The Speed Limit set as a percentage of configured line speed
|
||||
SpeedLimitAbs float64 // The Speed Limit set in B/s
|
||||
HaveWarnings float64 // Number of Warnings present
|
||||
Quota float64 // Total Quota configured Bytes
|
||||
HaveQuota bool // Is a Periodic Quota set for Sabnzbd?
|
||||
RemainingQuota float64 // Quota Remaining Bytes
|
||||
CacheArt float64 // Number of Articles in Cache
|
||||
CacheSize float64 // Size of Cache in bytes
|
||||
Speed float64 // Float String representing bps
|
||||
RemainingSize float64 // Bytes left to download in queue
|
||||
Size float64 // total bytes represented by queue
|
||||
ItemsInQueue float64 // Total number of items in queue
|
||||
Status Status // Status of sabnzbd (1 = Idle, 2 = Paused, 3 = Downloading)
|
||||
TimeEstimate time.Duration // Estimated time remaining to download queue
|
||||
}
|
||||
|
||||
func (q *QueueStats) UnmarshalJSON(data []byte) error {
|
||||
var v map[string]map[string]interface{}
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
queue := v["queue"]
|
||||
|
||||
q.Version, _ = queue["version"].(string)
|
||||
q.Paused, _ = queue["paused"].(bool)
|
||||
q.PausedAll, _ = queue["paused_all"].(bool)
|
||||
q.HaveQuota, _ = queue["have_quota"].(bool)
|
||||
q.ItemsInQueue, _ = queue["noofslots_total"].(float64)
|
||||
status, ok := queue["status"].(string)
|
||||
if ok {
|
||||
q.Status = StatusFromString(status)
|
||||
}
|
||||
|
||||
var err error
|
||||
q.PauseDuration, err = parseDuration(queue["pause_int"], err)
|
||||
q.DownloadDirDiskspaceUsed, err = parseFloat(queue["diskspace1"], err)
|
||||
q.DownloadDirDiskspaceTotal, err = parseFloat(queue["diskspacetotal1"], err)
|
||||
q.CompletedDirDiskspaceUsed, err = parseFloat(queue["diskspace2"], err)
|
||||
q.CompletedDirDiskspaceTotal, err = parseFloat(queue["diskspacetotal2"], err)
|
||||
q.SpeedLimit, err = parseSize(queue["speedlimit"], err)
|
||||
q.SpeedLimitAbs, err = parseSize(queue["speedlimit_abs"], err)
|
||||
q.HaveWarnings, err = parseFloat(queue["have_warnings"], err)
|
||||
q.Quota, err = parseSize(queue["quota"], err)
|
||||
q.RemainingQuota, err = parseSize(queue["left_quota"], err)
|
||||
q.CacheArt, err = parseSize(queue["cache_art"], err)
|
||||
q.CacheSize, err = parseSize(queue["cache_size"], err)
|
||||
q.Speed, err = parseFloat(queue["kbpersec"], err)
|
||||
q.RemainingSize, err = parseFloat(queue["mbleft"], err)
|
||||
q.Size, err = parseFloat(queue["mb"], err)
|
||||
q.TimeEstimate, err = parseDuration(queue["timeleft"], err)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error parsing queue stats: %w", err)
|
||||
}
|
||||
|
||||
q.DownloadDirDiskspaceUsed *= MB
|
||||
q.DownloadDirDiskspaceTotal *= MB
|
||||
q.CompletedDirDiskspaceUsed *= MB
|
||||
q.CompletedDirDiskspaceTotal *= MB
|
||||
q.Speed *= KB
|
||||
q.RemainingSize *= MB
|
||||
q.Size *= MB
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// latestStat gets the most recent date's value from a map of dates to values
|
||||
func latestStat(m map[string]int) (string, int) {
|
||||
keys := make([]string, 0, len(m))
|
||||
for k := range m {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
|
||||
sort.Strings(keys)
|
||||
key := keys[len(keys)-1]
|
||||
|
||||
return key, m[key]
|
||||
}
|
||||
|
||||
func monadCast(i interface{}, t reflect.Type) {
|
||||
|
||||
}
|
||||
|
||||
// parseFloat is a monad version of strconv.ParseFloat
|
||||
func parseFloat(s interface{}, prevErr error) (float64, error) {
|
||||
if prevErr != nil {
|
||||
return 0, prevErr
|
||||
}
|
||||
|
||||
if s == nil {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
f, ok := s.(string)
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Invalid float: %v", s)
|
||||
}
|
||||
|
||||
if f == "" {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
ret, err := strconv.ParseFloat(f, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// parseSize is a monad which parses a size string in the format of "123.45 KB" or "123.45"
|
||||
func parseSize(s interface{}, prevErr error) (float64, error) {
|
||||
if prevErr != nil {
|
||||
return 0, prevErr
|
||||
}
|
||||
|
||||
if s == nil {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
sz, ok := s.(string)
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Invalid float: %v", s)
|
||||
}
|
||||
|
||||
fields := strings.Fields(strings.TrimSpace(sz))
|
||||
if len(fields) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
if len(fields) > 2 {
|
||||
return 0, fmt.Errorf("Invalid size: %s", sz)
|
||||
}
|
||||
|
||||
ret, err := strconv.ParseFloat(fields[0], 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if len(fields) == 1 {
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
switch fields[1] {
|
||||
case "B":
|
||||
return ret, nil
|
||||
case "KB", "K":
|
||||
return ret * 1024, nil
|
||||
case "MB", "M":
|
||||
return ret * 1024 * 1024, nil
|
||||
case "GB", "G":
|
||||
return ret * 1024 * 1024 * 1024, nil
|
||||
case "TB", "T":
|
||||
return ret * 1024 * 1024 * 1024 * 1024, nil
|
||||
case "PB", "P":
|
||||
return ret * 1024 * 1024 * 1024 * 1024 * 1024, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("Invalid size suffix: %s", sz)
|
||||
}
|
||||
}
|
||||
|
||||
// parseDuration is a monad which parses a duration string in the format of "HH:MM:SS" or "MM:SS"
|
||||
func parseDuration(sd interface{}, prevErr error) (time.Duration, error) {
|
||||
if prevErr != nil {
|
||||
return 0, prevErr
|
||||
}
|
||||
|
||||
if sd == nil {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
s, ok := sd.(string)
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Invalid float: %v", sd)
|
||||
}
|
||||
|
||||
if s == "" {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
fields := strings.Split(strings.TrimSpace(s), ":")
|
||||
if len(fields) < 1 || len(fields) > 4 {
|
||||
return 0, fmt.Errorf("Invalid duration: %s", s)
|
||||
}
|
||||
|
||||
intFields := make([]int, len(fields))
|
||||
|
||||
for i, f := range fields {
|
||||
var err error
|
||||
// Reverse the order of the fields
|
||||
intFields[len(intFields)-1-i], err = strconv.Atoi(f)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("Invalid integer in duration: %s: %w", f, err)
|
||||
}
|
||||
}
|
||||
|
||||
ret := time.Duration(intFields[0]) * time.Second
|
||||
|
||||
fieldCount := len(intFields)
|
||||
if fieldCount > 1 {
|
||||
ret += time.Duration(intFields[1]) * time.Minute
|
||||
}
|
||||
|
||||
if fieldCount > 2 {
|
||||
ret += time.Duration(intFields[2]) * time.Hour
|
||||
}
|
||||
|
||||
if fieldCount > 3 {
|
||||
ret += time.Duration(intFields[3]) * 24 * time.Hour
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
183
internal/sabnzbd/model/model_test.go
Normal file
183
internal/sabnzbd/model/model_test.go
Normal file
@ -0,0 +1,183 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
type TestServerStatsResponse struct {
|
||||
Total int `json:"total"` // Total Data Downloaded in bytes
|
||||
Servers map[string]TestServerStatResponse `json:"servers"`
|
||||
}
|
||||
|
||||
type TestServerStatResponse struct {
|
||||
Total int `json:"total"` // Total Data Downloaded in bytes
|
||||
ArticlesTried map[string]int `json:"articles_tried"`
|
||||
ArticlesSuccess map[string]int `json:"articles_success"`
|
||||
}
|
||||
|
||||
func TestStatusToString(t *testing.T) {
|
||||
require := require.New(t)
|
||||
require.Equal("Downloading", DOWNLOADING.String())
|
||||
require.Equal("Paused", PAUSED.String())
|
||||
require.Equal("Idle", IDLE.String())
|
||||
require.Equal("Unknown", Status(999).String())
|
||||
}
|
||||
|
||||
func TestStatusFromString(t *testing.T) {
|
||||
require := require.New(t)
|
||||
require.Equal(DOWNLOADING, StatusFromString("Downloading"))
|
||||
require.Equal(PAUSED, StatusFromString("Paused"))
|
||||
require.Equal(IDLE, StatusFromString("Idle"))
|
||||
require.Equal(UNKNOWN, StatusFromString("Unknown"))
|
||||
require.Equal(UNKNOWN, StatusFromString("Unknown"))
|
||||
}
|
||||
|
||||
func TestStatusToFloat(t *testing.T) {
|
||||
require := require.New(t)
|
||||
require.Equal(3.0, DOWNLOADING.Float64())
|
||||
require.Equal(2.0, PAUSED.Float64())
|
||||
require.Equal(1.0, IDLE.Float64())
|
||||
require.Equal(0.0, UNKNOWN.Float64())
|
||||
}
|
||||
|
||||
func TestQueueStats_UnmarshalJSON(t *testing.T) {
|
||||
require := require.New(t)
|
||||
queue, err := os.ReadFile("../test_fixtures/queue.json")
|
||||
require.NoError(err)
|
||||
|
||||
var queueStats QueueStats
|
||||
err = queueStats.UnmarshalJSON(queue)
|
||||
require.NoError(err)
|
||||
require.Equal("3.7.2", queueStats.Version)
|
||||
require.False(queueStats.Paused)
|
||||
require.False(queueStats.PausedAll)
|
||||
require.Equal(time.Duration(0), queueStats.PauseDuration)
|
||||
require.Equal(3.64627623936e+10, queueStats.DownloadDirDiskspaceUsed)
|
||||
require.Equal(4.4971327488e+10, queueStats.DownloadDirDiskspaceTotal)
|
||||
require.Equal(3.64061392896e+10, queueStats.CompletedDirDiskspaceUsed)
|
||||
require.Equal(4.4972376064e+10, queueStats.CompletedDirDiskspaceTotal)
|
||||
require.Equal(100.0, queueStats.SpeedLimit)
|
||||
require.Equal(1.048576e+09, queueStats.SpeedLimitAbs)
|
||||
require.Equal(0.0, queueStats.HaveWarnings)
|
||||
require.Equal(1.07911053312e+12, queueStats.Quota)
|
||||
require.True(queueStats.HaveQuota)
|
||||
require.Equal(1.073741824e+12, queueStats.RemainingQuota)
|
||||
require.Equal(0.0, queueStats.CacheArt)
|
||||
require.Equal(0.0, queueStats.CacheSize)
|
||||
require.Equal(358.4, queueStats.Speed)
|
||||
require.Equal(3.21070825472e+09, queueStats.RemainingSize)
|
||||
require.Equal(3.21175683072e+09, queueStats.Size)
|
||||
require.Equal(2.0, queueStats.ItemsInQueue)
|
||||
require.Equal(DOWNLOADING, queueStats.Status)
|
||||
d, _ := time.ParseDuration("2495h59m3s")
|
||||
require.Equal(d, queueStats.TimeEstimate)
|
||||
|
||||
}
|
||||
|
||||
func TestQueueStats_ParseSize(t *testing.T) {
|
||||
parameters := []struct {
|
||||
input string
|
||||
expected float64
|
||||
}{
|
||||
{"0 B", 0.0},
|
||||
{"1 B", 1.0},
|
||||
{"1.0 B", 1.0},
|
||||
{"10 K", 10240.0},
|
||||
{"10.0 KB", 10240.0},
|
||||
{"10 M", 10485760.0},
|
||||
{"10.0 MB", 10485760.0},
|
||||
{"10 G", 10737418240.0},
|
||||
{"10.0 GB", 10737418240.0},
|
||||
{"10 T", 10995116277760.0},
|
||||
{"10.0 TB", 10995116277760.0},
|
||||
{"10 P", 11258999068426240.0},
|
||||
{"10.0 PB", 11258999068426240.0},
|
||||
}
|
||||
|
||||
require := require.New(t)
|
||||
|
||||
for _, parameter := range parameters {
|
||||
statsResponse := fmt.Sprintf(`{"queue": {"left_quota": "%s"}}`, parameter.input)
|
||||
var stats QueueStats
|
||||
err := json.Unmarshal([]byte(statsResponse), &stats)
|
||||
require.NoError(err)
|
||||
require.Equal(parameter.expected, stats.RemainingQuota)
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueueStatus_ParseDuration(t *testing.T) {
|
||||
parameters := []struct {
|
||||
input string
|
||||
expected time.Duration
|
||||
}{
|
||||
{"", time.Duration(0)},
|
||||
{"10", time.Duration(10) * time.Second},
|
||||
{"10:01", time.Duration(10)*time.Minute + time.Duration(1)*time.Second},
|
||||
{"13:12:11", time.Duration(13)*time.Hour + time.Duration(12)*time.Minute + time.Duration(11)*time.Second},
|
||||
{"14:13:12:11", time.Duration(349)*time.Hour + time.Duration(12)*time.Minute + time.Duration(11)*time.Second},
|
||||
}
|
||||
|
||||
require := require.New(t)
|
||||
|
||||
for _, parameter := range parameters {
|
||||
var stats QueueStats
|
||||
statsResponse := fmt.Sprintf(`{ "queue": {"timeleft": "%s"}}`, parameter.input)
|
||||
err := json.Unmarshal([]byte(statsResponse), &stats)
|
||||
require.NoError(err)
|
||||
require.Equal(parameter.expected, stats.TimeEstimate)
|
||||
}
|
||||
}
|
||||
|
||||
func TestServerStats_UnmarshalJSON(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
statsResponse := TestServerStatsResponse{
|
||||
Total: 123456789,
|
||||
Servers: map[string]TestServerStatResponse{
|
||||
"server1": {
|
||||
Total: 234567890,
|
||||
ArticlesTried: map[string]int{
|
||||
"2020-01-01": 1,
|
||||
"2020-01-02": 2,
|
||||
},
|
||||
ArticlesSuccess: map[string]int{
|
||||
"2020-01-01": 3,
|
||||
"2020-01-02": 4,
|
||||
},
|
||||
},
|
||||
"server2": {
|
||||
Total: 345678901,
|
||||
ArticlesTried: map[string]int{
|
||||
"2020-01-02": 6,
|
||||
"2020-01-01": 5,
|
||||
},
|
||||
ArticlesSuccess: map[string]int{
|
||||
"2020-01-02": 8,
|
||||
"2020-01-01": 7,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
b, err := json.Marshal(statsResponse)
|
||||
require.NoError(err)
|
||||
|
||||
var stats ServerStats
|
||||
err = json.Unmarshal(b, &stats)
|
||||
require.NoError(err)
|
||||
require.Equal(123456789, stats.Total)
|
||||
require.Equal(2, len(stats.Servers))
|
||||
require.Equal(234567890, stats.Servers["server1"].Total)
|
||||
require.Equal(2, stats.Servers["server1"].ArticlesTried)
|
||||
require.Equal(4, stats.Servers["server1"].ArticlesSuccess)
|
||||
require.Equal("2020-01-02", stats.Servers["server1"].DayParsed)
|
||||
require.Equal(345678901, stats.Servers["server2"].Total)
|
||||
require.Equal(6, stats.Servers["server2"].ArticlesTried)
|
||||
require.Equal(8, stats.Servers["server2"].ArticlesSuccess)
|
||||
require.Equal("2020-01-02", stats.Servers["server2"].DayParsed)
|
||||
}
|
||||
68
internal/sabnzbd/test_fixtures/expected_metrics.txt
Normal file
68
internal/sabnzbd/test_fixtures/expected_metrics.txt
Normal file
@ -0,0 +1,68 @@
|
||||
# HELP sabnzbd_article_cache_articles Total Articles Cached in the SabnzbD instance
|
||||
# TYPE sabnzbd_article_cache_articles gauge
|
||||
sabnzbd_article_cache_articles{target="http://127.0.0.1:39965"} 0
|
||||
# HELP sabnzbd_article_cache_bytes Total Bytes Cached in the SabnzbD instance Article Cache
|
||||
# TYPE sabnzbd_article_cache_bytes gauge
|
||||
sabnzbd_article_cache_bytes{target="http://127.0.0.1:39965"} 0
|
||||
# HELP sabnzbd_disk_total_bytes Total Bytes on the SabnzbD instance's disk
|
||||
# TYPE sabnzbd_disk_total_bytes gauge
|
||||
sabnzbd_disk_total_bytes{folder="complete",target="http://127.0.0.1:39965"} 4.4972376064e+10
|
||||
sabnzbd_disk_total_bytes{folder="download",target="http://127.0.0.1:39965"} 4.4971327488e+10
|
||||
# HELP sabnzbd_disk_used_bytes Used Bytes Used on the SabnzbD instance's disk
|
||||
# TYPE sabnzbd_disk_used_bytes gauge
|
||||
sabnzbd_disk_used_bytes{folder="complete",target="http://127.0.0.1:39965"} 3.64061392896e+10
|
||||
sabnzbd_disk_used_bytes{folder="download",target="http://127.0.0.1:39965"} 3.64627623936e+10
|
||||
# HELP sabnzbd_downloaded_bytes Total Bytes Downloaded by SABnzbd
|
||||
# TYPE sabnzbd_downloaded_bytes counter
|
||||
sabnzbd_downloaded_bytes{target="http://127.0.0.1:39965"} 5.869995742788e+12
|
||||
# HELP sabnzbd_info Info about the target SabnzbD instance
|
||||
# TYPE sabnzbd_info gauge
|
||||
sabnzbd_info{status="Downloading",target="http://127.0.0.1:39965",version="3.7.2"} 1
|
||||
# HELP sabnzbd_pause_duration_seconds Duration until the SabnzbD instance is unpaused
|
||||
# TYPE sabnzbd_pause_duration_seconds gauge
|
||||
sabnzbd_pause_duration_seconds{target="http://127.0.0.1:39965"} 0
|
||||
# HELP sabnzbd_paused Is the target SabnzbD instance paused
|
||||
# TYPE sabnzbd_paused gauge
|
||||
sabnzbd_paused{target="http://127.0.0.1:39965"} 0
|
||||
# HELP sabnzbd_paused_all Are all the target SabnzbD instance's queues paused
|
||||
# TYPE sabnzbd_paused_all gauge
|
||||
sabnzbd_paused_all{target="http://127.0.0.1:39965"} 0
|
||||
# HELP sabnzbd_queue_length Total Number of Items in the SabnzbD instance's queue
|
||||
# TYPE sabnzbd_queue_length gauge
|
||||
sabnzbd_queue_length{target="http://127.0.0.1:39965"} 2
|
||||
# HELP sabnzbd_quota_bytes Total Bytes in the SabnzbD instance's quota
|
||||
# TYPE sabnzbd_quota_bytes gauge
|
||||
sabnzbd_quota_bytes{target="http://127.0.0.1:39965"} 1.07911053312e+12
|
||||
# HELP sabnzbd_remaining_bytes Total Bytes Remaining to Download by the SabnzbD instance
|
||||
# TYPE sabnzbd_remaining_bytes gauge
|
||||
sabnzbd_remaining_bytes{target="http://127.0.0.1:39965"} 3.21070825472e+09
|
||||
# HELP sabnzbd_remaining_quota_bytes Total Bytes Left in the SabnzbD instance's quota
|
||||
# TYPE sabnzbd_remaining_quota_bytes gauge
|
||||
sabnzbd_remaining_quota_bytes{target="http://127.0.0.1:39965"} 1.073741824e+12
|
||||
# HELP sabnzbd_server_articles_success Total Articles Successfully downloaded from UseNet Server
|
||||
# TYPE sabnzbd_server_articles_success counter
|
||||
sabnzbd_server_articles_success{server="server1.example.tld",target="http://127.0.0.1:39965"} 12618
|
||||
sabnzbd_server_articles_success{server="server2.example.tld",target="http://127.0.0.1:39965"} 9869
|
||||
# HELP sabnzbd_server_articles_total Total Articles Attempted to download from UseNet Server
|
||||
# TYPE sabnzbd_server_articles_total counter
|
||||
sabnzbd_server_articles_total{server="server1.example.tld",target="http://127.0.0.1:39965"} 12622
|
||||
sabnzbd_server_articles_total{server="server2.example.tld",target="http://127.0.0.1:39965"} 9869
|
||||
# HELP sabnzbd_server_downloaded_bytes Total Bytes Downloaded from UseNet Server
|
||||
# TYPE sabnzbd_server_downloaded_bytes counter
|
||||
sabnzbd_server_downloaded_bytes{server="server1.example.tld",target="http://127.0.0.1:39965"} 4.8069637e+07
|
||||
sabnzbd_server_downloaded_bytes{server="server2.example.tld",target="http://127.0.0.1:39965"} 1.10895796e+08
|
||||
# HELP sabnzbd_speed_bps Total Bytes Downloaded per Second by the SabnzbD instance
|
||||
# TYPE sabnzbd_speed_bps gauge
|
||||
sabnzbd_speed_bps{target="http://127.0.0.1:39965"} 358.4
|
||||
# HELP sabnzbd_status Status of the SabnzbD instance's queue (0=Unknown, 1=Idle, 2=Paused, 3=Downloading)
|
||||
# TYPE sabnzbd_status gauge
|
||||
sabnzbd_status{target="http://127.0.0.1:39965"} 3
|
||||
# HELP sabnzbd_time_estimate_seconds Estimated Time Remaining to Download by the SabnzbD instance
|
||||
# TYPE sabnzbd_time_estimate_seconds gauge
|
||||
sabnzbd_time_estimate_seconds{target="http://127.0.0.1:39965"} 8.985543e+06
|
||||
# HELP sabnzbd_total_bytes Total Bytes in queue to Download by the SabnzbD instance
|
||||
# TYPE sabnzbd_total_bytes gauge
|
||||
sabnzbd_total_bytes{target="http://127.0.0.1:39965"} 3.21175683072e+09
|
||||
# HELP sabnzbd_warnings Total Warnings in the SabnzbD instance's queue
|
||||
# TYPE sabnzbd_warnings gauge
|
||||
sabnzbd_warnings{target="http://127.0.0.1:39965"} 0
|
||||
37
internal/sabnzbd/test_fixtures/queue.json
Normal file
37
internal/sabnzbd/test_fixtures/queue.json
Normal file
@ -0,0 +1,37 @@
|
||||
{
|
||||
"queue": {
|
||||
"version": "3.7.2",
|
||||
"paused": false,
|
||||
"pause_int": "0",
|
||||
"paused_all": false,
|
||||
"diskspace1": "34773.60",
|
||||
"diskspace2": "34719.60",
|
||||
"diskspace1_norm": "34.0 T",
|
||||
"diskspace2_norm": "34.0 T",
|
||||
"diskspacetotal1": "42888.00",
|
||||
"diskspacetotal2": "42889.00",
|
||||
"speedlimit": "100",
|
||||
"speedlimit_abs": "1048576000",
|
||||
"have_warnings": "0",
|
||||
"finishaction": null,
|
||||
"quota": "1005.0 G",
|
||||
"have_quota": true,
|
||||
"left_quota": "1000.0 G",
|
||||
"cache_art": "0",
|
||||
"cache_size": "0 B",
|
||||
"kbpersec": "0.35",
|
||||
"speed": "357 ",
|
||||
"mbleft": "3061.97",
|
||||
"mb": "3062.97",
|
||||
"sizeleft": "3.0 GB",
|
||||
"size": "3.0 GB",
|
||||
"noofslots_total": 2,
|
||||
"noofslots": 2,
|
||||
"start": 0,
|
||||
"limit": 0,
|
||||
"finish": 0,
|
||||
"status": "Downloading",
|
||||
"timeleft": "103:23:59:03",
|
||||
"slots": []
|
||||
}
|
||||
}
|
||||
50
internal/sabnzbd/test_fixtures/server_stats.json
Normal file
50
internal/sabnzbd/test_fixtures/server_stats.json
Normal file
@ -0,0 +1,50 @@
|
||||
{
|
||||
"total": 5869995742788,
|
||||
"month": 338992874188,
|
||||
"week": 0,
|
||||
"day": 0,
|
||||
"servers": {
|
||||
"server1.example.tld": {
|
||||
"total": 48069637,
|
||||
"month": 1536,
|
||||
"week": 0,
|
||||
"day": 0,
|
||||
"daily": {
|
||||
"2022-12-27": 31593181,
|
||||
"2022-12-28": 1759145,
|
||||
"2022-12-29": 5017200
|
||||
},
|
||||
"articles_tried": {
|
||||
"2022-12-27": 2259,
|
||||
"2022-12-28": 8157,
|
||||
"2022-12-29": 12622
|
||||
},
|
||||
"articles_success": {
|
||||
"2022-12-27": 2259,
|
||||
"2022-12-28": 8157,
|
||||
"2022-12-29": 12618
|
||||
}
|
||||
},
|
||||
"server2.example.tld": {
|
||||
"total": 110895796,
|
||||
"month": 1536,
|
||||
"week": 0,
|
||||
"day": 0,
|
||||
"daily": {
|
||||
"2022-12-27": 30798776,
|
||||
"2022-12-28": 71492512,
|
||||
"2022-12-29": 2959967
|
||||
},
|
||||
"articles_tried": {
|
||||
"2022-12-27": 2151,
|
||||
"2022-12-28": 7891,
|
||||
"2022-12-29": 9869
|
||||
},
|
||||
"articles_success": {
|
||||
"2022-12-27": 2151,
|
||||
"2022-12-28": 7795,
|
||||
"2022-12-29": 9869
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user