9 Commits
v0.0 ... v0.4

Author SHA1 Message Date
bel
9c4c0da004 Get enclosures in body
Former-commit-id: 54036d076a334394bc6e7b1f2071ce92bef96325
2019-06-26 18:00:58 -06:00
bel
e388723199 gitignore executable
Former-commit-id: 69dde9cfe2ed99ca2eefd25b96cb1328e622a6e2
2019-06-26 17:50:19 -06:00
bel
e24cb20f3b Fix torrent handler
Former-commit-id: 2bf1e2e21137e4020d2ace31e4560e9a97684403
2019-06-26 17:41:19 -06:00
Bel LaPointe
8f3d0c2077 create torrent handler
Former-commit-id: dc7a0cbdbbf2ddc985f5581e740ba59a9c18aff0
2019-06-25 09:49:50 -06:00
bel
8d353b69f4 betterer
Former-commit-id: c0f188d5d418c141ea9e47b80c1a0295a3c3be02
2019-06-22 17:10:35 -06:00
bel
a2ea6d7756 better
Former-commit-id: 3d39d00737f6606730bae559b4d73c4a64c5cba8
2019-06-22 17:10:26 -06:00
bel
9dc8eaf285 update gitignore
Former-commit-id: eac621f7f6f4bd953309e9624567feb58608ae8e
2019-06-22 17:10:07 -06:00
bel
21d5bad2a5 fix leveldb problems
Former-commit-id: f17fe73babf8910a2031323ffb2a28979082a4dd
2019-06-22 17:06:50 -06:00
bel
9faa727261 clarify logs and gitignore and skipping vendor from now on
Former-commit-id: c8b61b58379d768f04e1593c2a77cf429407cf62
2019-06-22 16:34:34 -06:00
19 changed files with 525 additions and 127 deletions

7
.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
rssmon3
exec-rssmon3
**.sw*
**/testdata
**/._*
**/exec-*
exec-*

View File

@@ -26,7 +26,7 @@ func (h *Handler) Run() error {
case j := <-h.Jobs: case j := <-h.Jobs:
go func(key string) { go func(key string) {
if err := h.Job(key); err != nil { if err := h.Job(key); err != nil {
log.Println(err) log.Println("[h.Run]", err)
} }
}(j.Key) }(j.Key)
} }
@@ -41,12 +41,5 @@ func (h *Handler) Job(key string) error {
if err := f.Pull(); err != nil { if err := f.Pull(); err != nil {
return err return err
} }
for _, tag := range f.Tags {
if foo := ByTag(tag); foo != nil {
if err := foo(key); err != nil {
return err
}
}
}
return nil return nil
} }

View File

@@ -1,12 +0,0 @@
package handlers
func ByTag(tag string) func(string) error {
var foo func(string) error
switch tag {
case "torrent":
foo = torrent
case "podcast":
foo = podcast
}
return foo
}

View File

@@ -1,7 +0,0 @@
package handlers
import "errors"
func podcast(key string) error {
return errors.New("not impl")
}

View File

@@ -1,7 +0,0 @@
package handlers
import "errors"
func torrent(key string) error {
return errors.New("not impl")
}

237
handlers/torrent/main.go Normal file
View File

@@ -0,0 +1,237 @@
package main
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"local/args"
"local/storage"
"log"
"net/http"
"regexp"
"strings"
"time"
"github.com/mmcdole/gofeed"
)
const sessionHeader = "X-Transmission-Session-Id"
type Config struct {
url string
vpntor string
outdir string
interval time.Duration
last time.Time
db storage.DB
ctx context.Context
can context.CancelFunc
}
func main() {
config, err := config()
if err != nil {
panic(err)
}
log.Println(config)
for {
if err := mainLoop(config); err != nil {
panic(err)
}
}
}
func mainLoop(config *Config) error {
block := config.interval - time.Since(config.last)
log.Printf("Blocking %v", block)
select {
case <-time.After(block):
if err := pull(config.db, config.vpntor, config.outdir, config.url); err != nil {
log.Println(err)
}
config.last = time.Now()
case <-config.ctx.Done():
if err := config.ctx.Err(); err != nil {
return err
}
}
return nil
}
func config() (*Config, error) {
as := args.NewArgSet()
as.Append(args.STRING, "url", "url of rss feed", "http://192.168.0.86:33419/api/tag/torrent")
as.Append(args.STRING, "vpntor", "url of vpntor", "http://192.168.0.86:9091/transmission/rpc")
as.Append(args.DURATION, "interval", "interval to check feed", "30m")
as.Append(args.STRING, "outdir", "save dir", "/data/completed-rss")
as.Append(args.STRING, "db", "db type", "map")
as.Append(args.STRING, "addr", "db addr", "")
as.Append(args.STRING, "user", "db user", "")
as.Append(args.STRING, "pass", "db pass", "")
if err := as.Parse(); err != nil {
return &Config{}, err
}
db, err := storage.New(
storage.TypeFromString(as.Get("db").GetString()),
as.Get("addr").GetString(),
as.Get("user").GetString(),
as.Get("pass").GetString(),
)
if err != nil {
panic(err)
}
ctx, can := context.WithCancel(context.Background())
return &Config{
url: as.Get("url").GetString(),
vpntor: as.Get("vpntor").GetString(),
interval: as.Get("interval").GetDuration(),
outdir: as.Get("outdir").GetString(),
db: db,
ctx: ctx,
can: can,
}, nil
}
func pull(db storage.DB, vpntor, outdir, url string) error {
gofeed, err := getGoFeed(url)
if err != nil {
return err
}
log.Printf("feed: %v", gofeed.Title)
for _, item := range gofeed.Items {
if ok, err := isDone(db, item.Link); err != nil {
return err
} else if ok {
continue
}
s, err := getItemContent(item)
if err != nil {
return err
}
if err := handle(vpntor, outdir, s); err != nil {
return err
}
if err := db.Set(item.Link, []byte{}); err != nil {
return err
}
}
return nil
}
func getGoFeed(url string) (*gofeed.Feed, error) {
resp, err := http.Get(url)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return gofeed.NewParser().Parse(resp.Body)
}
func getItemContent(item *gofeed.Item) (string, error) {
s := item.Description
if s == "" {
s = item.Content
}
if s == "" {
resp, err := http.Get(item.Link)
if err != nil {
return s, err
}
defer resp.Body.Close()
b, err := ioutil.ReadAll(resp.Body)
if err != nil {
return s, err
}
s = string(b)
}
return s, nil
}
func isDone(db storage.DB, url string) (bool, error) {
_, err := db.Get(url)
if err == storage.ErrNotFound {
return false, nil
}
return true, err
}
func handle(vpntor, outdir, content string) error {
log.Printf("magnets: %v", findMagnets(content))
for _, magnet := range findMagnets(content) {
resp, err := submit(vpntor, outdir, magnet)
if err != nil {
return err
}
if err := succeeded(resp.Body); err != nil {
return err
}
}
return nil
}
func findMagnets(s string) []string {
magnetRegexp := regexp.MustCompile(`magnet:.xt[^ $"]*`)
return magnetRegexp.FindAllString(s, -1)
}
func submit(vpntor, outdir, magnet string) (*http.Response, error) {
session, err := getSessionID(vpntor)
if err != nil {
return nil, err
}
req, err := http.NewRequest("POST", vpntor, buildReqBody(outdir, magnet))
if err != nil {
return nil, err
}
req.Header.Add(sessionHeader, session)
return (&http.Client{}).Do(req)
}
func succeeded(body io.ReadCloser) error {
defer body.Close()
b, err := ioutil.ReadAll(body)
if err != nil {
return err
}
var result struct {
Result string `json:"result"`
}
if err := json.Unmarshal(b, &result); err != nil {
return err
}
if result.Result != "success" {
return fmt.Errorf("denied: %s", b)
}
return nil
}
func buildReqBody(outdir, magnet string) io.Reader {
return strings.NewReader(fmt.Sprintf(`
{
"method": "torrent-add",
"arguments": {
"filename": %q,
"download-dir": %q
}
}
`, magnet, outdir))
}
func getSessionID(vpntor string) (string, error) {
resp, err := http.Get(vpntor)
if err != nil {
return "", err
}
defer resp.Body.Close()
id := resp.Header.Get(sessionHeader)
if id == "" {
err = errors.New("session id header not found")
}
return id, err
}

View File

@@ -0,0 +1,250 @@
package main
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"local/storage"
"net/http"
"net/http/httptest"
"os"
"strings"
"testing"
"time"
"github.com/mmcdole/gofeed"
)
type stringReaderCloser struct {
io.Reader
}
func mockReadClose(s string) io.ReadCloser {
reader := strings.NewReader(s)
return stringReaderCloser{Reader: reader}
}
func (src stringReaderCloser) Close() error {
return nil
}
func fakeRSSServer() *httptest.Server {
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`
<?xml version="1.0" encoding="UTF-8" ?>
<rss version="2.0">
<channel>
<item>
<title>Item Title</title>
<link>https://roosterteeth.com/episode/rooster-teeth-podcast-2019-549</link>
<pubDate>Tue, 18 Jun 2019 19:00:00 +0000</pubDate>
<description>Gavin Free discuss raditation, toilet paper face, Chris's continued haircuts, and more on this week's RT Podcast! magnet:-xt1 magnet:-xt2 <a href="magnet:-xt3">link</a></description>
<enclosure url="http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/roosterteethpodcast/Rooster_Teeth_Podcast_549.mp3" type="audio/mpeg" />
</item>
</channel>
</rss>
`))
}))
}
func TestMainLoopCtx(t *testing.T) {
ctx, can := context.WithCancel(context.Background())
can()
c := &Config{
interval: time.Hour,
ctx: ctx,
}
if err := mainLoop(c); err == nil || !strings.Contains(err.Error(), "cancel") {
t.Fatal(err)
}
}
func TestConfig(t *testing.T) {
was := os.Args[:]
defer func() {
os.Args = was
}()
os.Args = []string{"a"}
if _, err := config(); err != nil {
t.Fatal(err)
}
os.Args = []string{"a", "-interval", "not a duration"}
stderr := os.Stderr
f, _ := os.Open("/dev/null")
os.Stderr = f
defer func() {
os.Stderr = stderr
}()
if _, err := config(); err == nil {
t.Fatal(err)
}
}
func TestGetGoFeed(t *testing.T) {
s := fakeRSSServer()
defer s.Close()
f, err := getGoFeed(s.URL)
if err != nil {
t.Fatal(err)
}
if len(f.Items) != 1 {
t.Fatal(len(f.Items))
}
}
func TestGetItemContent(t *testing.T) {
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`Hello`))
}))
defer s.Close()
cases := []struct {
item gofeed.Item
body string
err error
}{
{
item: gofeed.Item{
Description: "hi",
Content: "hi2",
},
body: "hi",
err: nil,
},
{
item: gofeed.Item{
Content: "hi2",
},
body: "hi2",
err: nil,
},
{
item: gofeed.Item{
Link: s.URL,
},
body: "Hello",
err: nil,
},
}
for i, c := range cases {
body, err := getItemContent(&c.item)
cerrS := fmt.Sprintf("%v", c.err)
errS := fmt.Sprintf("%v", err)
if cerrS != errS {
t.Errorf("[%d] unexpected err %v, want %v", i, err, c.err)
}
if body != c.body {
t.Errorf("[%d] unexpected body %v, want %v", i, body, c.body)
}
}
}
func TestIsDone(t *testing.T) {
db, _ := storage.New(storage.MAP)
db.Set("a", []byte("hi"))
if ok, err := isDone(db, "a"); err != nil {
t.Fatal(err)
} else if !ok {
t.Fatal(ok)
}
if ok, err := isDone(db, "b"); err != nil {
t.Fatal(err)
} else if ok {
t.Fatal(ok)
}
}
func TestGetSessionID(t *testing.T) {
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Add(sessionHeader, "id")
w.Write([]byte(`Hello`))
}))
defer s.Close()
session, err := getSessionID(s.URL)
if err != nil {
t.Fatal(err)
}
if session != "id" {
t.Fatal(session)
}
}
func TestBuildReqBody(t *testing.T) {
var want struct {
Method string `json:"method"`
Arguments struct {
Filename string `json:"filename"`
DownloadDir string `json:"download-dir"`
} `json:"arguments"`
}
b := buildReqBody("out", "mag")
if err := json.NewDecoder(b).Decode(&want); err != nil {
t.Fatal(err)
}
if want.Method != "torrent-add" {
t.Fatal(want.Method)
}
if want.Arguments.Filename != "mag" {
t.Fatal(want.Arguments.Filename)
}
if want.Arguments.DownloadDir != "out" {
t.Fatal(want.Arguments.DownloadDir)
}
}
func TestSucceeded(t *testing.T) {
cases := []struct {
s string
err error
}{
{
s: `{"result":"success"}`,
err: nil,
},
{
s: `this isnt json`,
err: errors.New("invalid character 'h' in literal true (expecting 'r')"),
},
{
s: `{"result":"failure"}`,
err: errors.New(`denied: {"result":"failure"}`),
},
}
for i, c := range cases {
err := succeeded(mockReadClose(c.s))
cerrS := fmt.Sprintf("%v", c.err)
errS := fmt.Sprintf("%v", err)
if cerrS != errS {
t.Errorf("[%d] unexpected err %v, want %v", i, err, c.err)
}
}
}
func TestFindMagnets(t *testing.T) {
cases := []struct {
s string
l int
}{
{
s: `here is some magnet:-xt1 and magnet:-xt2 another one <a href="magnet:-xt3">link</a>`,
l: 3,
},
}
for i, c := range cases {
out := findMagnets(c.s)
if len(out) != c.l {
t.Errorf("[%d] found %v magnets, want %v", i, len(out), c.l)
}
}
}

View File

@@ -20,7 +20,7 @@ func main() {
if err := config.New(); err != nil { if err := config.New(); err != nil {
panic(err) panic(err)
} }
log.Println(config.Values()) log.Println("[main]", config.Values())
m, err := monitor.New() m, err := monitor.New()
if err != nil { if err != nil {
@@ -50,7 +50,7 @@ func main() {
func InterruptAfter(foo func() error, c chan os.Signal) { func InterruptAfter(foo func() error, c chan os.Signal) {
if err := foo(); err != nil { if err := foo(); err != nil {
log.Println(err) log.Println("[InterruptAfter]", err)
} }
c <- syscall.SIGINT c <- syscall.SIGINT
} }

View File

@@ -51,7 +51,7 @@ func (i *Item) Compare(other queue.Item) int {
func (i *Item) Interval() time.Duration { func (i *Item) Interval() time.Duration {
t, err := i.getInterval() t, err := i.getInterval()
if err != nil { if err != nil {
log.Println(err) log.Println("[Interval]", err)
return forever return forever
} }
return t return t
@@ -81,14 +81,14 @@ func (i *Item) setInterval(t time.Duration) error {
func (i *Item) Last() time.Time { func (i *Item) Last() time.Time {
t, err := i.getLast() t, err := i.getLast()
if err != nil { if err != nil {
log.Println(err) log.Println("[Last]", err)
} }
return t return t
} }
func (i *Item) Mark() { func (i *Item) Mark() {
if err := i.setLast(time.Now()); err != nil { if err := i.setLast(time.Now()); err != nil {
log.Println(err) log.Println("[Mark]", err)
} }
} }

View File

@@ -53,7 +53,7 @@ func (m *Monitor) triggered() <-chan time.Time {
return nil return nil
} }
block := time.Until(top.Last().Add(top.Interval())) block := time.Until(top.Last().Add(top.Interval()))
log.Printf("blocking %v until next task", block) log.Printf("[triggered] blocking %v until next task", block)
return time.After(time.Until(top.Last().Add(top.Interval()))) return time.After(time.Until(top.Last().Add(top.Interval())))
} }

View File

@@ -1,7 +1,9 @@
package monitor package monitor
import ( import (
"fmt"
"local/rssmon3/config" "local/rssmon3/config"
"local/storage"
"log" "log"
"github.com/golang-collections/go-datastructures/queue" "github.com/golang-collections/go-datastructures/queue"
@@ -17,13 +19,15 @@ func newQueue() (*Queue, error) {
q := queue.NewPriorityQueue(1) q := queue.NewPriorityQueue(1)
db := config.Values().DB db := config.Values().DB
keys, err := db.List([]string{nsQueued}) keys, err := db.List([]string{nsQueued})
if err != nil { if err == storage.ErrNotFound {
keys = []string{}
} else if err != nil {
return nil, err return nil, err
} }
for _, key := range keys { for _, key := range keys {
b, err := db.Get(key, nsQueued) b, err := db.Get(key, nsQueued)
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("failed to get %s:%s: %v", nsQueued, key, err)
} }
i := &Item{} i := &Item{}
if err := i.Decode(b); err != nil { if err := i.Decode(b); err != nil {
@@ -43,11 +47,11 @@ func (q *Queue) Push(i *Item) {
q.queue.Put(i) q.queue.Put(i)
b, err := i.Encode() b, err := i.Encode()
if err != nil { if err != nil {
log.Println(err) log.Println("[Push]", err)
return return
} }
if err := config.Values().DB.Set(i.Key, b, nsQueued); err != nil { if err := config.Values().DB.Set(i.Key, b, nsQueued); err != nil {
log.Println(err) log.Println("[Push]", err)
return return
} }
} }

View File

@@ -74,20 +74,20 @@ func (f *Feed) Pull() error {
for _, i := range gofeed.Items { for _, i := range gofeed.Items {
item, err := newItem(i, f.ContentFilter) item, err := newItem(i, f.ContentFilter)
if err != nil { if err != nil {
log.Println(err) log.Println("[Pull]", err)
continue continue
} }
itemTSs = append(itemTSs, &item.TS) itemTSs = append(itemTSs, &item.TS)
if item.TS.Before(f.Updated) { if item.TS.Before(f.Updated) {
log.Println("Skipping old item") log.Println("[Pull]", "Skipping old item")
continue continue
} }
if ok := regexp.MustCompile(f.TitleFilter).MatchString(item.Title); !ok { if ok := regexp.MustCompile(f.TitleFilter).MatchString(item.Title); !ok {
log.Println("Skipping bad titled item") log.Println("[Pull]", "Skipping bad titled item")
continue continue
} }
if err := item.save(f.Key); err != nil { if err := item.save(f.Key); err != nil {
log.Println(err) log.Println("[Pull]", err)
continue continue
} }
} }

View File

@@ -47,6 +47,9 @@ func newItem(i *gofeed.Item, contentFilter string) (*Item, error) {
} }
item.Content = string(b) item.Content = string(b)
} }
for _, enclosure := range i.Enclosures {
item.Content += fmt.Sprintf(`<br><a href="%s">%s</a>`, enclosure.URL, enclosure.URL)
}
if unescaped, err := url.QueryUnescape(item.Content); err == nil { if unescaped, err := url.QueryUnescape(item.Content); err == nil {
item.Content = unescaped item.Content = unescaped
} }

View File

@@ -24,7 +24,7 @@ func (rss *RSS) Run() error {
return nil return nil
case i := <-rss.items: case i := <-rss.items:
if err := rss.update(i); err != nil { if err := rss.update(i); err != nil {
log.Println(err) log.Println("[rss.Run]", err)
} }
} }
} }

View File

@@ -1 +0,0 @@
49c1645f536cea7463202894e751295a3ec589f1

View File

@@ -35,13 +35,13 @@ func (s *Server) notFound(w http.ResponseWriter, r *http.Request) {
func (s *Server) userError(w http.ResponseWriter, r *http.Request, err error) { func (s *Server) userError(w http.ResponseWriter, r *http.Request, err error) {
status := http.StatusBadRequest status := http.StatusBadRequest
log.Printf("%d: %v", status, err) log.Printf("[userError] %d: %v", status, err)
w.WriteHeader(status) w.WriteHeader(status)
} }
func (s *Server) error(w http.ResponseWriter, r *http.Request, err error) { func (s *Server) error(w http.ResponseWriter, r *http.Request, err error) {
status := http.StatusInternalServerError status := http.StatusInternalServerError
log.Printf("%d: %v", status, err) log.Printf("[error] %d: %v", status, err)
w.WriteHeader(status) w.WriteHeader(status)
} }

68
testdata/2add vendored
View File

@@ -1,68 +0,0 @@
#! /bin/bash
set -e
remote="${remote:-192.168.0.86}"
feeds='
http://lizclimo.tumblr.com/rss comic na na "24h"
https://mangadex.org/rss/follows/ryZGFkVgv9DAPqf45EsTHuQcxmteKBN6 manga na na "1h"
https://siftrss.com/f/WXPo6AKQLX manga na na "1h"
https://xkcd.com/atom.xml comic na na "24h"
http://yaoi-blcd.tumblr.com/tagged/19-days/rss manga "Old.Xian.update.of..19" na "4h"
http://www.shanaproject.com/feeds/secure/user/5310/5MUGNU9B3I/ anime na na "24h"
http://www.manga.megchan.com/blog/?feed=rss2 manga na na "12h"
https://siftrss.com/f/6Q8a0ZK0RmL manga na na "12h"
https://dropoutmanga.wordpress.com/feed/ manga "(Hatsukoi|Tomo.chan)" na "6h"
http://merakiscans.com/manga-rss/solo-leveling/ manga na na "8h"
http://www.mangago.me/r/rsslink/a_man_like_you.xml manga na na "24h"
http://www.mangago.me/r/rsslink/unriped_expression.xml manga na na "24h"
http://www.mangago.me/r/rsslink/heesu_in_class_2.xml manga na na "24h"
http://www.mangago.me/r/rsslink/bj_alex.xml manga na na "24h"
https://heroscans.tumblr.com/rss manga "(Saizu)" na "24h"
http://feeds.pinboard.in/rss/popular/ news ".*" ".*" "24h"
https://www.vox.com/rss/index.xml news ".*" ".*" "24h"
https://www.techdirt.com/techdirt_rss.xml news ".*" ".*" "24h"
http://hotdiggedydemon.com/ comic ".*" ".*" "24h"
https://nyaa.si/?page=rss&u=Nauti anime "(akegurui.*XX)" ".*Anime...English.translated.*" "3h"
https://tapas.io/rss/series/72832 manga na na "24h"
https://tapas.io/rss/series/88570 manga na na "24h"
https://nyaa.si/?page=rss&q=kakegurui&c=0_0&f=0&u=Nauti anime "(akegurui.XX)" ".*Anime...English.translated.*" "3h"
https://nyaa.si/?page=rss&q=horriblesubs+mob+psycho+720&c=0_0&f=0 anime ".*" ".*Anime...English.translated.*" "3h"
https://www.youtube.com/feeds/videos.xml?channel_id=UC9CuvdOVfMPvKCiwdGKL3cQ youtube","gampegrumps ".*" ".*" "6h"
https://www.youtube.com/feeds/videos.xml?channel_id=UCbGO1tCyjMlzqIT-tyrJNqQ youtube","kshaway ".*" ".*" "6h"
https://www.youtube.com/feeds/videos.xml?channel_id=UC9YLd0REiXxLqQQH_CpJKZQ youtube","instalok ".*" ".*" "6h"
https://www.youtube.com/feeds/videos.xml?channel_id=UCsvn_Po0SmunchJYOWpOxMg youtube","videogamedunkey ".*" ".*" "6h"
https://eztv.ag/ezrss.xml momma","torrent "^(Wheel.[oO]f.Fortune.[S2]|Jeopardy.20|Gold.Rush.*720.*WEB|Survivor.S.*720p|Gold.Rush.*S.*E.*720.*WEB|Archer|Pointless.*720p.*WEB|Mock.The.Week.*480p|8.Out.Of.10.Cats.*480p|Masterchef.*Australia.*720p|Price.[iI]s.[rR]ight|Let.*[mM]ake.[aA].[dD]eal)" "<a.*?href=.magnet:.*?</a>" "2h"
https://www.youtube.com/feeds/videos.xml?channel_id=UCs-w7E2HZWwXmjt9RTvBB_A youtube","settled ".*" ".*" "6h"
https://roosterteeth.com/show/rt-podcast/feed/mp3 podcast ".*" ".*" "6h"
'
feedsB="$(printf "%s\n" "$feeds" | grep -v '^$' | tail -n 1)"
#feeds="$feedsB"
defaultIF='""'
defaultCF='"<img.*?/(img)?>"'
defaultIN='"6h"'
feeds="${feeds// /,,}"
for feed in $feeds; do
feed="${feed//,,/ }"
link="$(echo "$feed" | awk '{print $1}')"
tag="$(echo "$feed" | awk '{print $2}')"
if="$(echo "$feed" | awk '{print $3}')"
cf="$(echo "$feed" | awk '{print $4}')"
in="$(echo "$feed" | awk '{print $5}')"
if [ "$if" == "na" ]; then
if="$defaultIF"
fi
if [ "$cf" == "na" ]; then
cf="$defaultCF"
fi
if [ "$in" == "na" ]; then
in="$defaultIN"
fi
cmd="curl ${PROXY:-} -i ${remote}:33419/api/feed -X PUT --data '{\"url\":\"$link\", \"refresh\":$in, \"items\":$if, \"content\":$cf, \"tags\":[\"$tag\"]}'"
echo "$cmd" >&2
eval "$cmd"
done

7
testdata/2do vendored
View File

@@ -1,7 +0,0 @@
* implement torret+podcast handlers
* fetch <img> and embed?
x render tags -> feeds for server
x load feed jobs on startup and queue
x push new jobs/queue items from server
x implement endpoints for server

6
vendor/vendor.json vendored Normal file
View File

@@ -0,0 +1,6 @@
{
"comment": "",
"ignore": "test",
"package": [],
"rootPath": "local/rssmon3"
}