Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6efa4e95d9 | ||
|
|
2518c3f263 | ||
|
|
9c4c0da004 | ||
|
|
e388723199 | ||
|
|
e24cb20f3b | ||
|
|
8f3d0c2077 | ||
|
|
8d353b69f4 | ||
|
|
a2ea6d7756 | ||
|
|
9dc8eaf285 | ||
|
|
21d5bad2a5 | ||
|
|
9faa727261 |
7
.gitignore
vendored
Normal file
7
.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
rssmon3
|
||||
exec-rssmon3
|
||||
**.sw*
|
||||
**/testdata
|
||||
**/._*
|
||||
**/exec-*
|
||||
exec-*
|
||||
@@ -26,7 +26,7 @@ func (h *Handler) Run() error {
|
||||
case j := <-h.Jobs:
|
||||
go func(key string) {
|
||||
if err := h.Job(key); err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[h.Run]", err)
|
||||
}
|
||||
}(j.Key)
|
||||
}
|
||||
@@ -41,12 +41,5 @@ func (h *Handler) Job(key string) error {
|
||||
if err := f.Pull(); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, tag := range f.Tags {
|
||||
if foo := ByTag(tag); foo != nil {
|
||||
if err := foo(key); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
package handlers
|
||||
|
||||
func ByTag(tag string) func(string) error {
|
||||
var foo func(string) error
|
||||
switch tag {
|
||||
case "torrent":
|
||||
foo = torrent
|
||||
case "podcast":
|
||||
foo = podcast
|
||||
}
|
||||
return foo
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import "errors"
|
||||
|
||||
func podcast(key string) error {
|
||||
return errors.New("not impl")
|
||||
}
|
||||
234
handlers/podcast/main.go
Normal file
234
handlers/podcast/main.go
Normal file
@@ -0,0 +1,234 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"local/args"
|
||||
"local/storage"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
)
|
||||
|
||||
const sessionHeader = "X-Transmission-Session-Id"
|
||||
|
||||
type Config struct {
|
||||
url string
|
||||
vpntor string
|
||||
outdir string
|
||||
interval time.Duration
|
||||
last time.Time
|
||||
db storage.DB
|
||||
ctx context.Context
|
||||
can context.CancelFunc
|
||||
}
|
||||
|
||||
func main() {
|
||||
config, err := config()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println(config)
|
||||
|
||||
for {
|
||||
if err := mainLoop(config); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func mainLoop(config *Config) error {
|
||||
block := config.interval - time.Since(config.last)
|
||||
log.Printf("Blocking %v", block)
|
||||
select {
|
||||
case <-time.After(block):
|
||||
if err := pull(config.db, config.vpntor, config.outdir, config.url); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
config.last = time.Now()
|
||||
case <-config.ctx.Done():
|
||||
if err := config.ctx.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func config() (*Config, error) {
|
||||
as := args.NewArgSet()
|
||||
as.Append(args.STRING, "url", "url of rss feed", "http://192.168.0.86:33419/api/tag/podcast")
|
||||
as.Append(args.STRING, "vpntor", "url of vpntor", "http://192.168.0.86:9091/transmission/rpc")
|
||||
as.Append(args.DURATION, "interval", "interval to check feed", "30m")
|
||||
as.Append(args.STRING, "outdir", "save dir", "/data/completed-rss")
|
||||
as.Append(args.STRING, "db", "db type", "map")
|
||||
as.Append(args.STRING, "addr", "db addr", "")
|
||||
as.Append(args.STRING, "user", "db user", "")
|
||||
as.Append(args.STRING, "pass", "db pass", "")
|
||||
if err := as.Parse(); err != nil {
|
||||
return &Config{}, err
|
||||
}
|
||||
|
||||
db, err := storage.New(
|
||||
storage.TypeFromString(as.Get("db").GetString()),
|
||||
as.Get("addr").GetString(),
|
||||
as.Get("user").GetString(),
|
||||
as.Get("pass").GetString(),
|
||||
)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ctx, can := context.WithCancel(context.Background())
|
||||
return &Config{
|
||||
url: as.Get("url").GetString(),
|
||||
vpntor: as.Get("vpntor").GetString(),
|
||||
interval: as.Get("interval").GetDuration(),
|
||||
outdir: as.Get("outdir").GetString(),
|
||||
db: db,
|
||||
ctx: ctx,
|
||||
can: can,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func pull(db storage.DB, vpntor, outdir, url string) error {
|
||||
gofeed, err := getGoFeed(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("feed: %v", gofeed.Title)
|
||||
for _, item := range gofeed.Items {
|
||||
if ok, err := isDone(db, item.Link); err != nil {
|
||||
return err
|
||||
} else if ok {
|
||||
continue
|
||||
}
|
||||
s, err := getItemContent(item)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := handle(vpntor, outdir, s); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := db.Set(item.Link, []byte{}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getGoFeed(url string) (*gofeed.Feed, error) {
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return gofeed.NewParser().Parse(resp.Body)
|
||||
}
|
||||
|
||||
func getItemContent(item *gofeed.Item) (string, error) {
|
||||
s := item.Description
|
||||
if s == "" {
|
||||
s = item.Content
|
||||
}
|
||||
if s == "" {
|
||||
resp, err := http.Get(item.Link)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
b, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
s = string(b)
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func isDone(db storage.DB, url string) (bool, error) {
|
||||
_, err := db.Get(url)
|
||||
if err == storage.ErrNotFound {
|
||||
return false, nil
|
||||
}
|
||||
return true, err
|
||||
}
|
||||
|
||||
func handle(vpntor, outdir, content string) error {
|
||||
links := findMagnets(content)
|
||||
sort.Strings(links)
|
||||
for i := range links {
|
||||
link := links[i]
|
||||
if i > 0 && link == links[i-1] {
|
||||
continue
|
||||
}
|
||||
log.Println(link)
|
||||
if err := fetch(link, outdir); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func findMagnets(s string) []string {
|
||||
magnetRegexp := regexp.MustCompile(`http[^"]*[0-9]+\.mp3`)
|
||||
return magnetRegexp.FindAllString(s, -1)
|
||||
}
|
||||
|
||||
func fetch(link, outdir string) error {
|
||||
out := path.Join(outdir, path.Base(link))
|
||||
if _, err := os.Stat(out); err == nil {
|
||||
return nil
|
||||
} else if os.IsNotExist(err) {
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := http.Get(link)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
f, err := os.Create(out)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
io.Copy(f, resp.Body)
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildReqBody(outdir, magnet string) io.Reader {
|
||||
return strings.NewReader(fmt.Sprintf(`
|
||||
{
|
||||
"method": "torrent-add",
|
||||
"arguments": {
|
||||
"filename": %q,
|
||||
"download-dir": %q
|
||||
}
|
||||
}
|
||||
`, magnet, outdir))
|
||||
}
|
||||
|
||||
func getSessionID(vpntor string) (string, error) {
|
||||
resp, err := http.Get(vpntor)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
id := resp.Header.Get(sessionHeader)
|
||||
if id == "" {
|
||||
err = errors.New("session id header not found")
|
||||
}
|
||||
return id, err
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import "errors"
|
||||
|
||||
func torrent(key string) error {
|
||||
return errors.New("not impl")
|
||||
}
|
||||
238
handlers/torrent/main.go
Normal file
238
handlers/torrent/main.go
Normal file
@@ -0,0 +1,238 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"local/args"
|
||||
"local/storage"
|
||||
"log"
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
)
|
||||
|
||||
const sessionHeader = "X-Transmission-Session-Id"
|
||||
|
||||
type Config struct {
|
||||
url string
|
||||
vpntor string
|
||||
outdir string
|
||||
interval time.Duration
|
||||
last time.Time
|
||||
db storage.DB
|
||||
ctx context.Context
|
||||
can context.CancelFunc
|
||||
}
|
||||
|
||||
func main() {
|
||||
config, err := config()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println(config)
|
||||
|
||||
for {
|
||||
if err := mainLoop(config); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func mainLoop(config *Config) error {
|
||||
block := config.interval - time.Since(config.last)
|
||||
log.Printf("Blocking %v", block)
|
||||
select {
|
||||
case <-time.After(block):
|
||||
if err := pull(config.db, config.vpntor, config.outdir, config.url); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
config.last = time.Now()
|
||||
case <-config.ctx.Done():
|
||||
if err := config.ctx.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func config() (*Config, error) {
|
||||
as := args.NewArgSet()
|
||||
as.Append(args.STRING, "url", "url of rss feed", "http://192.168.0.86:33419/api/tag/torrent")
|
||||
as.Append(args.STRING, "vpntor", "url of vpntor", "http://192.168.0.86:9091/transmission/rpc")
|
||||
as.Append(args.DURATION, "interval", "interval to check feed", "30m")
|
||||
as.Append(args.STRING, "outdir", "save dir", "/data/completed-rss")
|
||||
as.Append(args.STRING, "db", "db type", "map")
|
||||
as.Append(args.STRING, "addr", "db addr", "")
|
||||
as.Append(args.STRING, "user", "db user", "")
|
||||
as.Append(args.STRING, "pass", "db pass", "")
|
||||
if err := as.Parse(); err != nil {
|
||||
return &Config{}, err
|
||||
}
|
||||
|
||||
db, err := storage.New(
|
||||
storage.TypeFromString(as.Get("db").GetString()),
|
||||
as.Get("addr").GetString(),
|
||||
as.Get("user").GetString(),
|
||||
as.Get("pass").GetString(),
|
||||
)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ctx, can := context.WithCancel(context.Background())
|
||||
return &Config{
|
||||
url: as.Get("url").GetString(),
|
||||
vpntor: as.Get("vpntor").GetString(),
|
||||
interval: as.Get("interval").GetDuration(),
|
||||
outdir: as.Get("outdir").GetString(),
|
||||
db: db,
|
||||
ctx: ctx,
|
||||
can: can,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func pull(db storage.DB, vpntor, outdir, url string) error {
|
||||
gofeed, err := getGoFeed(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("feed: %v", gofeed.Title)
|
||||
for _, item := range gofeed.Items {
|
||||
if ok, err := isDone(db, item.Link); err != nil {
|
||||
return err
|
||||
} else if ok {
|
||||
continue
|
||||
}
|
||||
s, err := getItemContent(item)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := handle(vpntor, path.Join(outdir, gofeed.Copyright), s); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := db.Set(item.Link, []byte{}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getGoFeed(url string) (*gofeed.Feed, error) {
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return gofeed.NewParser().Parse(resp.Body)
|
||||
}
|
||||
|
||||
func getItemContent(item *gofeed.Item) (string, error) {
|
||||
s := item.Description
|
||||
if s == "" {
|
||||
s = item.Content
|
||||
}
|
||||
if s == "" {
|
||||
resp, err := http.Get(item.Link)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
b, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
s = string(b)
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func isDone(db storage.DB, url string) (bool, error) {
|
||||
_, err := db.Get(url)
|
||||
if err == storage.ErrNotFound {
|
||||
return false, nil
|
||||
}
|
||||
return true, err
|
||||
}
|
||||
|
||||
func handle(vpntor, outdir, content string) error {
|
||||
log.Printf("magnets: %v", findMagnets(content))
|
||||
for _, magnet := range findMagnets(content) {
|
||||
resp, err := submit(vpntor, outdir, magnet)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := succeeded(resp.Body); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func findMagnets(s string) []string {
|
||||
magnetRegexp := regexp.MustCompile(`magnet:.xt[^ $"]*`)
|
||||
return magnetRegexp.FindAllString(s, -1)
|
||||
}
|
||||
|
||||
func submit(vpntor, outdir, magnet string) (*http.Response, error) {
|
||||
session, err := getSessionID(vpntor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req, err := http.NewRequest("POST", vpntor, buildReqBody(outdir, magnet))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Add(sessionHeader, session)
|
||||
return (&http.Client{}).Do(req)
|
||||
}
|
||||
|
||||
func succeeded(body io.ReadCloser) error {
|
||||
defer body.Close()
|
||||
b, err := ioutil.ReadAll(body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var result struct {
|
||||
Result string `json:"result"`
|
||||
}
|
||||
if err := json.Unmarshal(b, &result); err != nil {
|
||||
return err
|
||||
}
|
||||
if result.Result != "success" {
|
||||
return fmt.Errorf("denied: %s", b)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildReqBody(outdir, magnet string) io.Reader {
|
||||
return strings.NewReader(fmt.Sprintf(`
|
||||
{
|
||||
"method": "torrent-add",
|
||||
"arguments": {
|
||||
"filename": %q,
|
||||
"download-dir": %q
|
||||
}
|
||||
}
|
||||
`, magnet, outdir))
|
||||
}
|
||||
|
||||
func getSessionID(vpntor string) (string, error) {
|
||||
resp, err := http.Get(vpntor)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
id := resp.Header.Get(sessionHeader)
|
||||
if id == "" {
|
||||
err = errors.New("session id header not found")
|
||||
}
|
||||
return id, err
|
||||
}
|
||||
250
handlers/torrent/main_test.go
Normal file
250
handlers/torrent/main_test.go
Normal file
@@ -0,0 +1,250 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"local/storage"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
)
|
||||
|
||||
type stringReaderCloser struct {
|
||||
io.Reader
|
||||
}
|
||||
|
||||
func mockReadClose(s string) io.ReadCloser {
|
||||
reader := strings.NewReader(s)
|
||||
return stringReaderCloser{Reader: reader}
|
||||
}
|
||||
|
||||
func (src stringReaderCloser) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func fakeRSSServer() *httptest.Server {
|
||||
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(`
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Item Title</title>
|
||||
<link>https://roosterteeth.com/episode/rooster-teeth-podcast-2019-549</link>
|
||||
<pubDate>Tue, 18 Jun 2019 19:00:00 +0000</pubDate>
|
||||
<description>Gavin Free discuss raditation, toilet paper face, Chris's continued haircuts, and more on this week's RT Podcast! magnet:-xt1 magnet:-xt2 <a href="magnet:-xt3">link</a></description>
|
||||
<enclosure url="http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/roosterteethpodcast/Rooster_Teeth_Podcast_549.mp3" type="audio/mpeg" />
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
`))
|
||||
}))
|
||||
}
|
||||
|
||||
func TestMainLoopCtx(t *testing.T) {
|
||||
ctx, can := context.WithCancel(context.Background())
|
||||
can()
|
||||
c := &Config{
|
||||
interval: time.Hour,
|
||||
ctx: ctx,
|
||||
}
|
||||
if err := mainLoop(c); err == nil || !strings.Contains(err.Error(), "cancel") {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfig(t *testing.T) {
|
||||
was := os.Args[:]
|
||||
defer func() {
|
||||
os.Args = was
|
||||
}()
|
||||
os.Args = []string{"a"}
|
||||
if _, err := config(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
os.Args = []string{"a", "-interval", "not a duration"}
|
||||
stderr := os.Stderr
|
||||
f, _ := os.Open("/dev/null")
|
||||
os.Stderr = f
|
||||
defer func() {
|
||||
os.Stderr = stderr
|
||||
}()
|
||||
if _, err := config(); err == nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetGoFeed(t *testing.T) {
|
||||
s := fakeRSSServer()
|
||||
defer s.Close()
|
||||
|
||||
f, err := getGoFeed(s.URL)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(f.Items) != 1 {
|
||||
t.Fatal(len(f.Items))
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetItemContent(t *testing.T) {
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(`Hello`))
|
||||
}))
|
||||
defer s.Close()
|
||||
|
||||
cases := []struct {
|
||||
item gofeed.Item
|
||||
body string
|
||||
err error
|
||||
}{
|
||||
{
|
||||
item: gofeed.Item{
|
||||
Description: "hi",
|
||||
Content: "hi2",
|
||||
},
|
||||
body: "hi",
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
item: gofeed.Item{
|
||||
Content: "hi2",
|
||||
},
|
||||
body: "hi2",
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
item: gofeed.Item{
|
||||
Link: s.URL,
|
||||
},
|
||||
body: "Hello",
|
||||
err: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
body, err := getItemContent(&c.item)
|
||||
cerrS := fmt.Sprintf("%v", c.err)
|
||||
errS := fmt.Sprintf("%v", err)
|
||||
if cerrS != errS {
|
||||
t.Errorf("[%d] unexpected err %v, want %v", i, err, c.err)
|
||||
}
|
||||
if body != c.body {
|
||||
t.Errorf("[%d] unexpected body %v, want %v", i, body, c.body)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsDone(t *testing.T) {
|
||||
db, _ := storage.New(storage.MAP)
|
||||
db.Set("a", []byte("hi"))
|
||||
if ok, err := isDone(db, "a"); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if !ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
|
||||
if ok, err := isDone(db, "b"); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSessionID(t *testing.T) {
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add(sessionHeader, "id")
|
||||
w.Write([]byte(`Hello`))
|
||||
}))
|
||||
defer s.Close()
|
||||
|
||||
session, err := getSessionID(s.URL)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if session != "id" {
|
||||
t.Fatal(session)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildReqBody(t *testing.T) {
|
||||
var want struct {
|
||||
Method string `json:"method"`
|
||||
Arguments struct {
|
||||
Filename string `json:"filename"`
|
||||
DownloadDir string `json:"download-dir"`
|
||||
} `json:"arguments"`
|
||||
}
|
||||
|
||||
b := buildReqBody("out", "mag")
|
||||
if err := json.NewDecoder(b).Decode(&want); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if want.Method != "torrent-add" {
|
||||
t.Fatal(want.Method)
|
||||
}
|
||||
if want.Arguments.Filename != "mag" {
|
||||
t.Fatal(want.Arguments.Filename)
|
||||
}
|
||||
if want.Arguments.DownloadDir != "out" {
|
||||
t.Fatal(want.Arguments.DownloadDir)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSucceeded(t *testing.T) {
|
||||
cases := []struct {
|
||||
s string
|
||||
err error
|
||||
}{
|
||||
{
|
||||
s: `{"result":"success"}`,
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
s: `this isnt json`,
|
||||
err: errors.New("invalid character 'h' in literal true (expecting 'r')"),
|
||||
},
|
||||
{
|
||||
s: `{"result":"failure"}`,
|
||||
err: errors.New(`denied: {"result":"failure"}`),
|
||||
},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
err := succeeded(mockReadClose(c.s))
|
||||
cerrS := fmt.Sprintf("%v", c.err)
|
||||
errS := fmt.Sprintf("%v", err)
|
||||
if cerrS != errS {
|
||||
t.Errorf("[%d] unexpected err %v, want %v", i, err, c.err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindMagnets(t *testing.T) {
|
||||
cases := []struct {
|
||||
s string
|
||||
l int
|
||||
}{
|
||||
{
|
||||
s: `here is some magnet:-xt1 and magnet:-xt2 another one <a href="magnet:-xt3">link</a>`,
|
||||
l: 3,
|
||||
},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
out := findMagnets(c.s)
|
||||
if len(out) != c.l {
|
||||
t.Errorf("[%d] found %v magnets, want %v", i, len(out), c.l)
|
||||
}
|
||||
}
|
||||
}
|
||||
4
main.go
4
main.go
@@ -20,7 +20,7 @@ func main() {
|
||||
if err := config.New(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println(config.Values())
|
||||
log.Println("[main]", config.Values())
|
||||
|
||||
m, err := monitor.New()
|
||||
if err != nil {
|
||||
@@ -50,7 +50,7 @@ func main() {
|
||||
|
||||
func InterruptAfter(foo func() error, c chan os.Signal) {
|
||||
if err := foo(); err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[InterruptAfter]", err)
|
||||
}
|
||||
c <- syscall.SIGINT
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ func (i *Item) Compare(other queue.Item) int {
|
||||
func (i *Item) Interval() time.Duration {
|
||||
t, err := i.getInterval()
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[Interval]", err)
|
||||
return forever
|
||||
}
|
||||
return t
|
||||
@@ -81,14 +81,14 @@ func (i *Item) setInterval(t time.Duration) error {
|
||||
func (i *Item) Last() time.Time {
|
||||
t, err := i.getLast()
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[Last]", err)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (i *Item) Mark() {
|
||||
if err := i.setLast(time.Now()); err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[Mark]", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ func (m *Monitor) triggered() <-chan time.Time {
|
||||
return nil
|
||||
}
|
||||
block := time.Until(top.Last().Add(top.Interval()))
|
||||
log.Printf("blocking %v until next task", block)
|
||||
log.Printf("[triggered] blocking %v until next task", block)
|
||||
return time.After(time.Until(top.Last().Add(top.Interval())))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package monitor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"local/rssmon3/config"
|
||||
"local/storage"
|
||||
"log"
|
||||
|
||||
"github.com/golang-collections/go-datastructures/queue"
|
||||
@@ -17,13 +19,15 @@ func newQueue() (*Queue, error) {
|
||||
q := queue.NewPriorityQueue(1)
|
||||
db := config.Values().DB
|
||||
keys, err := db.List([]string{nsQueued})
|
||||
if err != nil {
|
||||
if err == storage.ErrNotFound {
|
||||
keys = []string{}
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, key := range keys {
|
||||
b, err := db.Get(key, nsQueued)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to get %s:%s: %v", nsQueued, key, err)
|
||||
}
|
||||
i := &Item{}
|
||||
if err := i.Decode(b); err != nil {
|
||||
@@ -43,11 +47,11 @@ func (q *Queue) Push(i *Item) {
|
||||
q.queue.Put(i)
|
||||
b, err := i.Encode()
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[Push]", err)
|
||||
return
|
||||
}
|
||||
if err := config.Values().DB.Set(i.Key, b, nsQueued); err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[Push]", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
11
rss/feed.go
11
rss/feed.go
@@ -21,6 +21,7 @@ type Feed struct {
|
||||
TitleFilter string
|
||||
ContentFilter string
|
||||
Tags []string
|
||||
Copyright string
|
||||
}
|
||||
|
||||
func SubmitFeed(f *Feed) error {
|
||||
@@ -72,22 +73,22 @@ func (f *Feed) Pull() error {
|
||||
|
||||
itemTSs := []*time.Time{}
|
||||
for _, i := range gofeed.Items {
|
||||
item, err := newItem(i, f.ContentFilter)
|
||||
item, err := newItem(i, f.ContentFilter, f.Copyright)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[Pull]", err)
|
||||
continue
|
||||
}
|
||||
itemTSs = append(itemTSs, &item.TS)
|
||||
if item.TS.Before(f.Updated) {
|
||||
log.Println("Skipping old item")
|
||||
log.Println("[Pull]", "Skipping old item")
|
||||
continue
|
||||
}
|
||||
if ok := regexp.MustCompile(f.TitleFilter).MatchString(item.Title); !ok {
|
||||
log.Println("Skipping bad titled item")
|
||||
log.Println("[Pull]", "Skipping bad titled item")
|
||||
continue
|
||||
}
|
||||
if err := item.save(f.Key); err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[Pull]", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
23
rss/item.go
23
rss/item.go
@@ -16,20 +16,22 @@ import (
|
||||
const nsItems = "nsItems"
|
||||
|
||||
type Item struct {
|
||||
Title string
|
||||
Link string
|
||||
Content string
|
||||
TS time.Time
|
||||
Title string
|
||||
Link string
|
||||
Content string
|
||||
TS time.Time
|
||||
Copyright string
|
||||
}
|
||||
|
||||
type Items []*Item
|
||||
|
||||
func newItem(i *gofeed.Item, contentFilter string) (*Item, error) {
|
||||
func newItem(i *gofeed.Item, contentFilter, copyright string) (*Item, error) {
|
||||
item := &Item{
|
||||
Title: i.Title,
|
||||
Link: i.Link,
|
||||
Content: i.Content,
|
||||
TS: latestTSPtr(i.UpdatedParsed, i.PublishedParsed),
|
||||
Title: i.Title,
|
||||
Link: i.Link,
|
||||
Content: i.Content,
|
||||
TS: latestTSPtr(i.UpdatedParsed, i.PublishedParsed),
|
||||
Copyright: copyright,
|
||||
}
|
||||
|
||||
if item.Content == "" {
|
||||
@@ -47,6 +49,9 @@ func newItem(i *gofeed.Item, contentFilter string) (*Item, error) {
|
||||
}
|
||||
item.Content = string(b)
|
||||
}
|
||||
for _, enclosure := range i.Enclosures {
|
||||
item.Content += fmt.Sprintf(`<br><a href="%s">%s</a>`, enclosure.URL, enclosure.URL)
|
||||
}
|
||||
if unescaped, err := url.QueryUnescape(item.Content); err == nil {
|
||||
item.Content = unescaped
|
||||
}
|
||||
|
||||
@@ -30,12 +30,12 @@ func TestRSSItemNewEncodeDecode(t *testing.T) {
|
||||
gofeed.Items[0].Content = ""
|
||||
gofeed.Items[0].Description = ""
|
||||
|
||||
item, err := newItem(gofeed.Items[0], ".*")
|
||||
item, err := newItem(gofeed.Items[0], ".*", "")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
itemB, err := newItem(gofeed.Items[0], "Podcast")
|
||||
itemB, err := newItem(gofeed.Items[0], "Podcast", "")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ func (rss *RSS) Run() error {
|
||||
return nil
|
||||
case i := <-rss.items:
|
||||
if err := rss.update(i); err != nil {
|
||||
log.Println(err)
|
||||
log.Println("[rss.Run]", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,12 +31,18 @@ func TaggedFeeds(tag string) ([]*Feed, error) {
|
||||
}
|
||||
|
||||
func WriteFeed(w io.Writer, tag string, items []*Item) error {
|
||||
copyright := ""
|
||||
if len(items) > 0 {
|
||||
copyright = items[0].Copyright
|
||||
}
|
||||
copyright = "."
|
||||
sort.Sort(Items(items))
|
||||
feed := &feeds.Feed{
|
||||
Title: tag,
|
||||
Link: &feeds.Link{},
|
||||
Description: tag,
|
||||
Items: make([]*feeds.Item, len(items)),
|
||||
Copyright: copyright,
|
||||
}
|
||||
for i, item := range items {
|
||||
feed.Items[i] = &feeds.Item{
|
||||
|
||||
6061
rss/testdata/rss.xml
vendored
Normal file
6061
rss/testdata/rss.xml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
rss/testdata/rss.xml.REMOVED.git-id
vendored
1
rss/testdata/rss.xml.REMOVED.git-id
vendored
@@ -1 +0,0 @@
|
||||
d36f6c94dfbbaaeac339bfaec9d0dd13b0ff099b
|
||||
@@ -1 +0,0 @@
|
||||
49c1645f536cea7463202894e751295a3ec589f1
|
||||
@@ -35,13 +35,13 @@ func (s *Server) notFound(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (s *Server) userError(w http.ResponseWriter, r *http.Request, err error) {
|
||||
status := http.StatusBadRequest
|
||||
log.Printf("%d: %v", status, err)
|
||||
log.Printf("[userError] %d: %v", status, err)
|
||||
w.WriteHeader(status)
|
||||
}
|
||||
|
||||
func (s *Server) error(w http.ResponseWriter, r *http.Request, err error) {
|
||||
status := http.StatusInternalServerError
|
||||
log.Printf("%d: %v", status, err)
|
||||
log.Printf("[error] %d: %v", status, err)
|
||||
w.WriteHeader(status)
|
||||
}
|
||||
|
||||
@@ -79,6 +79,7 @@ func (s *Server) feed(w http.ResponseWriter, r *http.Request) {
|
||||
URL string `json:"url"`
|
||||
Interval string `json:"refresh"`
|
||||
TitleFilter string `json:"items"`
|
||||
Copyright string `json:"copyright"`
|
||||
ContentFilter string `json:"content"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
@@ -97,6 +98,7 @@ func (s *Server) feed(w http.ResponseWriter, r *http.Request) {
|
||||
TitleFilter: putFeed.TitleFilter,
|
||||
ContentFilter: putFeed.ContentFilter,
|
||||
Tags: putFeed.Tags,
|
||||
Copyright: putFeed.Copyright,
|
||||
}
|
||||
if err := rss.SubmitFeed(f); err != nil {
|
||||
s.error(w, r, err)
|
||||
|
||||
68
testdata/2add
vendored
68
testdata/2add
vendored
@@ -1,68 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
remote="${remote:-192.168.0.86}"
|
||||
|
||||
feeds='
|
||||
http://lizclimo.tumblr.com/rss comic na na "24h"
|
||||
https://mangadex.org/rss/follows/ryZGFkVgv9DAPqf45EsTHuQcxmteKBN6 manga na na "1h"
|
||||
https://siftrss.com/f/WXPo6AKQLX manga na na "1h"
|
||||
https://xkcd.com/atom.xml comic na na "24h"
|
||||
http://yaoi-blcd.tumblr.com/tagged/19-days/rss manga "Old.Xian.update.of..19" na "4h"
|
||||
http://www.shanaproject.com/feeds/secure/user/5310/5MUGNU9B3I/ anime na na "24h"
|
||||
http://www.manga.megchan.com/blog/?feed=rss2 manga na na "12h"
|
||||
https://siftrss.com/f/6Q8a0ZK0RmL manga na na "12h"
|
||||
https://dropoutmanga.wordpress.com/feed/ manga "(Hatsukoi|Tomo.chan)" na "6h"
|
||||
http://merakiscans.com/manga-rss/solo-leveling/ manga na na "8h"
|
||||
http://www.mangago.me/r/rsslink/a_man_like_you.xml manga na na "24h"
|
||||
http://www.mangago.me/r/rsslink/unriped_expression.xml manga na na "24h"
|
||||
http://www.mangago.me/r/rsslink/heesu_in_class_2.xml manga na na "24h"
|
||||
http://www.mangago.me/r/rsslink/bj_alex.xml manga na na "24h"
|
||||
https://heroscans.tumblr.com/rss manga "(Saizu)" na "24h"
|
||||
http://feeds.pinboard.in/rss/popular/ news ".*" ".*" "24h"
|
||||
https://www.vox.com/rss/index.xml news ".*" ".*" "24h"
|
||||
https://www.techdirt.com/techdirt_rss.xml news ".*" ".*" "24h"
|
||||
http://hotdiggedydemon.com/ comic ".*" ".*" "24h"
|
||||
https://nyaa.si/?page=rss&u=Nauti anime "(akegurui.*XX)" ".*Anime...English.translated.*" "3h"
|
||||
https://tapas.io/rss/series/72832 manga na na "24h"
|
||||
https://tapas.io/rss/series/88570 manga na na "24h"
|
||||
https://nyaa.si/?page=rss&q=kakegurui&c=0_0&f=0&u=Nauti anime "(akegurui.XX)" ".*Anime...English.translated.*" "3h"
|
||||
https://nyaa.si/?page=rss&q=horriblesubs+mob+psycho+720&c=0_0&f=0 anime ".*" ".*Anime...English.translated.*" "3h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UC9CuvdOVfMPvKCiwdGKL3cQ youtube","gampegrumps ".*" ".*" "6h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UCbGO1tCyjMlzqIT-tyrJNqQ youtube","kshaway ".*" ".*" "6h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UC9YLd0REiXxLqQQH_CpJKZQ youtube","instalok ".*" ".*" "6h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UCsvn_Po0SmunchJYOWpOxMg youtube","videogamedunkey ".*" ".*" "6h"
|
||||
https://eztv.ag/ezrss.xml momma","torrent "^(Wheel.[oO]f.Fortune.[S2]|Jeopardy.20|Gold.Rush.*720.*WEB|Survivor.S.*720p|Gold.Rush.*S.*E.*720.*WEB|Archer|Pointless.*720p.*WEB|Mock.The.Week.*480p|8.Out.Of.10.Cats.*480p|Masterchef.*Australia.*720p|Price.[iI]s.[rR]ight|Let.*[mM]ake.[aA].[dD]eal)" "<a.*?href=.magnet:.*?</a>" "2h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UCs-w7E2HZWwXmjt9RTvBB_A youtube","settled ".*" ".*" "6h"
|
||||
https://roosterteeth.com/show/rt-podcast/feed/mp3 podcast ".*" ".*" "6h"
|
||||
'
|
||||
feedsB="$(printf "%s\n" "$feeds" | grep -v '^$' | tail -n 1)"
|
||||
#feeds="$feedsB"
|
||||
|
||||
defaultIF='""'
|
||||
defaultCF='"<img.*?/(img)?>"'
|
||||
defaultIN='"6h"'
|
||||
|
||||
feeds="${feeds// /,,}"
|
||||
|
||||
for feed in $feeds; do
|
||||
feed="${feed//,,/ }"
|
||||
link="$(echo "$feed" | awk '{print $1}')"
|
||||
tag="$(echo "$feed" | awk '{print $2}')"
|
||||
if="$(echo "$feed" | awk '{print $3}')"
|
||||
cf="$(echo "$feed" | awk '{print $4}')"
|
||||
in="$(echo "$feed" | awk '{print $5}')"
|
||||
if [ "$if" == "na" ]; then
|
||||
if="$defaultIF"
|
||||
fi
|
||||
if [ "$cf" == "na" ]; then
|
||||
cf="$defaultCF"
|
||||
fi
|
||||
if [ "$in" == "na" ]; then
|
||||
in="$defaultIN"
|
||||
fi
|
||||
cmd="curl ${PROXY:-} -i ${remote}:33419/api/feed -X PUT --data '{\"url\":\"$link\", \"refresh\":$in, \"items\":$if, \"content\":$cf, \"tags\":[\"$tag\"]}'"
|
||||
echo "$cmd" >&2
|
||||
eval "$cmd"
|
||||
done
|
||||
7
testdata/2do
vendored
7
testdata/2do
vendored
@@ -1,7 +0,0 @@
|
||||
* implement torret+podcast handlers
|
||||
* fetch <img> and embed?
|
||||
|
||||
x render tags -> feeds for server
|
||||
x load feed jobs on startup and queue
|
||||
x push new jobs/queue items from server
|
||||
x implement endpoints for server
|
||||
6
vendor/vendor.json
vendored
Normal file
6
vendor/vendor.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"comment": "",
|
||||
"ignore": "test",
|
||||
"package": [],
|
||||
"rootPath": "local/rssmon3"
|
||||
}
|
||||
Reference in New Issue
Block a user