Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e24cb20f3b | ||
|
|
8f3d0c2077 | ||
|
|
8d353b69f4 | ||
|
|
a2ea6d7756 | ||
|
|
9dc8eaf285 | ||
|
|
21d5bad2a5 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,3 +1,5 @@
|
||||
rssmon3
|
||||
exec-rssmon3
|
||||
**.sw*
|
||||
**/testdata
|
||||
**/._*
|
||||
|
||||
@@ -41,12 +41,5 @@ func (h *Handler) Job(key string) error {
|
||||
if err := f.Pull(); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, tag := range f.Tags {
|
||||
if foo := ByTag(tag); foo != nil {
|
||||
if err := foo(key); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
package handlers
|
||||
|
||||
func ByTag(tag string) func(string) error {
|
||||
var foo func(string) error
|
||||
switch tag {
|
||||
case "torrent":
|
||||
foo = torrent
|
||||
case "podcast":
|
||||
foo = podcast
|
||||
}
|
||||
return foo
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import "errors"
|
||||
|
||||
func podcast(key string) error {
|
||||
return errors.New("not impl")
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import "errors"
|
||||
|
||||
func torrent(key string) error {
|
||||
return errors.New("not impl")
|
||||
}
|
||||
237
handlers/torrent/main.go
Normal file
237
handlers/torrent/main.go
Normal file
@@ -0,0 +1,237 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"local/args"
|
||||
"local/storage"
|
||||
"log"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
)
|
||||
|
||||
const sessionHeader = "X-Transmission-Session-Id"
|
||||
|
||||
type Config struct {
|
||||
url string
|
||||
vpntor string
|
||||
outdir string
|
||||
interval time.Duration
|
||||
last time.Time
|
||||
db storage.DB
|
||||
ctx context.Context
|
||||
can context.CancelFunc
|
||||
}
|
||||
|
||||
func main() {
|
||||
config, err := config()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println(config)
|
||||
|
||||
for {
|
||||
if err := mainLoop(config); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func mainLoop(config *Config) error {
|
||||
block := config.interval - time.Since(config.last)
|
||||
log.Printf("Blocking %v", block)
|
||||
select {
|
||||
case <-time.After(block):
|
||||
if err := pull(config.db, config.vpntor, config.outdir, config.url); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
config.last = time.Now()
|
||||
case <-config.ctx.Done():
|
||||
if err := config.ctx.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func config() (*Config, error) {
|
||||
as := args.NewArgSet()
|
||||
as.Append(args.STRING, "url", "url of rss feed", "http://192.168.0.86:33419/api/tag/torrent")
|
||||
as.Append(args.STRING, "vpntor", "url of vpntor", "http://192.168.0.86:9091/transmission/rpc")
|
||||
as.Append(args.DURATION, "interval", "interval to check feed", "30m")
|
||||
as.Append(args.STRING, "outdir", "save dir", "/data/completed-rss")
|
||||
as.Append(args.STRING, "db", "db type", "map")
|
||||
as.Append(args.STRING, "addr", "db addr", "")
|
||||
as.Append(args.STRING, "user", "db user", "")
|
||||
as.Append(args.STRING, "pass", "db pass", "")
|
||||
if err := as.Parse(); err != nil {
|
||||
return &Config{}, err
|
||||
}
|
||||
|
||||
db, err := storage.New(
|
||||
storage.TypeFromString(as.Get("db").GetString()),
|
||||
as.Get("addr").GetString(),
|
||||
as.Get("user").GetString(),
|
||||
as.Get("pass").GetString(),
|
||||
)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ctx, can := context.WithCancel(context.Background())
|
||||
return &Config{
|
||||
url: as.Get("url").GetString(),
|
||||
vpntor: as.Get("vpntor").GetString(),
|
||||
interval: as.Get("interval").GetDuration(),
|
||||
outdir: as.Get("outdir").GetString(),
|
||||
db: db,
|
||||
ctx: ctx,
|
||||
can: can,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func pull(db storage.DB, vpntor, outdir, url string) error {
|
||||
gofeed, err := getGoFeed(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("feed: %v", gofeed.Title)
|
||||
for _, item := range gofeed.Items {
|
||||
if ok, err := isDone(db, item.Link); err != nil {
|
||||
return err
|
||||
} else if ok {
|
||||
continue
|
||||
}
|
||||
s, err := getItemContent(item)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := handle(vpntor, outdir, s); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := db.Set(item.Link, []byte{}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getGoFeed(url string) (*gofeed.Feed, error) {
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return gofeed.NewParser().Parse(resp.Body)
|
||||
}
|
||||
|
||||
func getItemContent(item *gofeed.Item) (string, error) {
|
||||
s := item.Description
|
||||
if s == "" {
|
||||
s = item.Content
|
||||
}
|
||||
if s == "" {
|
||||
resp, err := http.Get(item.Link)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
b, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
s = string(b)
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func isDone(db storage.DB, url string) (bool, error) {
|
||||
_, err := db.Get(url)
|
||||
if err == storage.ErrNotFound {
|
||||
return false, nil
|
||||
}
|
||||
return true, err
|
||||
}
|
||||
|
||||
func handle(vpntor, outdir, content string) error {
|
||||
log.Printf("magnets: %v", findMagnets(content))
|
||||
for _, magnet := range findMagnets(content) {
|
||||
resp, err := submit(vpntor, outdir, magnet)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := succeeded(resp.Body); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func findMagnets(s string) []string {
|
||||
magnetRegexp := regexp.MustCompile(`magnet:.xt[^ $"]*`)
|
||||
return magnetRegexp.FindAllString(s, -1)
|
||||
}
|
||||
|
||||
func submit(vpntor, outdir, magnet string) (*http.Response, error) {
|
||||
session, err := getSessionID(vpntor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req, err := http.NewRequest("POST", vpntor, buildReqBody(outdir, magnet))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Add(sessionHeader, session)
|
||||
return (&http.Client{}).Do(req)
|
||||
}
|
||||
|
||||
func succeeded(body io.ReadCloser) error {
|
||||
defer body.Close()
|
||||
b, err := ioutil.ReadAll(body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var result struct {
|
||||
Result string `json:"result"`
|
||||
}
|
||||
if err := json.Unmarshal(b, &result); err != nil {
|
||||
return err
|
||||
}
|
||||
if result.Result != "success" {
|
||||
return fmt.Errorf("denied: %s", b)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildReqBody(outdir, magnet string) io.Reader {
|
||||
return strings.NewReader(fmt.Sprintf(`
|
||||
{
|
||||
"method": "torrent-add",
|
||||
"arguments": {
|
||||
"filename": %q,
|
||||
"download-dir": %q
|
||||
}
|
||||
}
|
||||
`, magnet, outdir))
|
||||
}
|
||||
|
||||
func getSessionID(vpntor string) (string, error) {
|
||||
resp, err := http.Get(vpntor)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
id := resp.Header.Get(sessionHeader)
|
||||
if id == "" {
|
||||
err = errors.New("session id header not found")
|
||||
}
|
||||
return id, err
|
||||
}
|
||||
250
handlers/torrent/main_test.go
Normal file
250
handlers/torrent/main_test.go
Normal file
@@ -0,0 +1,250 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"local/storage"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
)
|
||||
|
||||
type stringReaderCloser struct {
|
||||
io.Reader
|
||||
}
|
||||
|
||||
func mockReadClose(s string) io.ReadCloser {
|
||||
reader := strings.NewReader(s)
|
||||
return stringReaderCloser{Reader: reader}
|
||||
}
|
||||
|
||||
func (src stringReaderCloser) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func fakeRSSServer() *httptest.Server {
|
||||
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(`
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Item Title</title>
|
||||
<link>https://roosterteeth.com/episode/rooster-teeth-podcast-2019-549</link>
|
||||
<pubDate>Tue, 18 Jun 2019 19:00:00 +0000</pubDate>
|
||||
<description>Gavin Free discuss raditation, toilet paper face, Chris's continued haircuts, and more on this week's RT Podcast! magnet:-xt1 magnet:-xt2 <a href="magnet:-xt3">link</a></description>
|
||||
<enclosure url="http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/roosterteethpodcast/Rooster_Teeth_Podcast_549.mp3" type="audio/mpeg" />
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
`))
|
||||
}))
|
||||
}
|
||||
|
||||
func TestMainLoopCtx(t *testing.T) {
|
||||
ctx, can := context.WithCancel(context.Background())
|
||||
can()
|
||||
c := &Config{
|
||||
interval: time.Hour,
|
||||
ctx: ctx,
|
||||
}
|
||||
if err := mainLoop(c); err == nil || !strings.Contains(err.Error(), "cancel") {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfig(t *testing.T) {
|
||||
was := os.Args[:]
|
||||
defer func() {
|
||||
os.Args = was
|
||||
}()
|
||||
os.Args = []string{"a"}
|
||||
if _, err := config(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
os.Args = []string{"a", "-interval", "not a duration"}
|
||||
stderr := os.Stderr
|
||||
f, _ := os.Open("/dev/null")
|
||||
os.Stderr = f
|
||||
defer func() {
|
||||
os.Stderr = stderr
|
||||
}()
|
||||
if _, err := config(); err == nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetGoFeed(t *testing.T) {
|
||||
s := fakeRSSServer()
|
||||
defer s.Close()
|
||||
|
||||
f, err := getGoFeed(s.URL)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(f.Items) != 1 {
|
||||
t.Fatal(len(f.Items))
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetItemContent(t *testing.T) {
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(`Hello`))
|
||||
}))
|
||||
defer s.Close()
|
||||
|
||||
cases := []struct {
|
||||
item gofeed.Item
|
||||
body string
|
||||
err error
|
||||
}{
|
||||
{
|
||||
item: gofeed.Item{
|
||||
Description: "hi",
|
||||
Content: "hi2",
|
||||
},
|
||||
body: "hi",
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
item: gofeed.Item{
|
||||
Content: "hi2",
|
||||
},
|
||||
body: "hi2",
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
item: gofeed.Item{
|
||||
Link: s.URL,
|
||||
},
|
||||
body: "Hello",
|
||||
err: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
body, err := getItemContent(&c.item)
|
||||
cerrS := fmt.Sprintf("%v", c.err)
|
||||
errS := fmt.Sprintf("%v", err)
|
||||
if cerrS != errS {
|
||||
t.Errorf("[%d] unexpected err %v, want %v", i, err, c.err)
|
||||
}
|
||||
if body != c.body {
|
||||
t.Errorf("[%d] unexpected body %v, want %v", i, body, c.body)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsDone(t *testing.T) {
|
||||
db, _ := storage.New(storage.MAP)
|
||||
db.Set("a", []byte("hi"))
|
||||
if ok, err := isDone(db, "a"); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if !ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
|
||||
if ok, err := isDone(db, "b"); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSessionID(t *testing.T) {
|
||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add(sessionHeader, "id")
|
||||
w.Write([]byte(`Hello`))
|
||||
}))
|
||||
defer s.Close()
|
||||
|
||||
session, err := getSessionID(s.URL)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if session != "id" {
|
||||
t.Fatal(session)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildReqBody(t *testing.T) {
|
||||
var want struct {
|
||||
Method string `json:"method"`
|
||||
Arguments struct {
|
||||
Filename string `json:"filename"`
|
||||
DownloadDir string `json:"download-dir"`
|
||||
} `json:"arguments"`
|
||||
}
|
||||
|
||||
b := buildReqBody("out", "mag")
|
||||
if err := json.NewDecoder(b).Decode(&want); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if want.Method != "torrent-add" {
|
||||
t.Fatal(want.Method)
|
||||
}
|
||||
if want.Arguments.Filename != "mag" {
|
||||
t.Fatal(want.Arguments.Filename)
|
||||
}
|
||||
if want.Arguments.DownloadDir != "out" {
|
||||
t.Fatal(want.Arguments.DownloadDir)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSucceeded(t *testing.T) {
|
||||
cases := []struct {
|
||||
s string
|
||||
err error
|
||||
}{
|
||||
{
|
||||
s: `{"result":"success"}`,
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
s: `this isnt json`,
|
||||
err: errors.New("invalid character 'h' in literal true (expecting 'r')"),
|
||||
},
|
||||
{
|
||||
s: `{"result":"failure"}`,
|
||||
err: errors.New(`denied: {"result":"failure"}`),
|
||||
},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
err := succeeded(mockReadClose(c.s))
|
||||
cerrS := fmt.Sprintf("%v", c.err)
|
||||
errS := fmt.Sprintf("%v", err)
|
||||
if cerrS != errS {
|
||||
t.Errorf("[%d] unexpected err %v, want %v", i, err, c.err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindMagnets(t *testing.T) {
|
||||
cases := []struct {
|
||||
s string
|
||||
l int
|
||||
}{
|
||||
{
|
||||
s: `here is some magnet:-xt1 and magnet:-xt2 another one <a href="magnet:-xt3">link</a>`,
|
||||
l: 3,
|
||||
},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
out := findMagnets(c.s)
|
||||
if len(out) != c.l {
|
||||
t.Errorf("[%d] found %v magnets, want %v", i, len(out), c.l)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
package monitor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"local/rssmon3/config"
|
||||
"local/storage"
|
||||
"log"
|
||||
|
||||
"github.com/golang-collections/go-datastructures/queue"
|
||||
@@ -17,13 +19,15 @@ func newQueue() (*Queue, error) {
|
||||
q := queue.NewPriorityQueue(1)
|
||||
db := config.Values().DB
|
||||
keys, err := db.List([]string{nsQueued})
|
||||
if err != nil {
|
||||
if err == storage.ErrNotFound {
|
||||
keys = []string{}
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, key := range keys {
|
||||
b, err := db.Get(key, nsQueued)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to get %s:%s: %v", nsQueued, key, err)
|
||||
}
|
||||
i := &Item{}
|
||||
if err := i.Decode(b); err != nil {
|
||||
|
||||
68
testdata/2add
vendored
68
testdata/2add
vendored
@@ -1,68 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
remote="${remote:-192.168.0.86}"
|
||||
|
||||
feeds='
|
||||
http://lizclimo.tumblr.com/rss comic na na "24h"
|
||||
https://mangadex.org/rss/follows/ryZGFkVgv9DAPqf45EsTHuQcxmteKBN6 manga na na "1h"
|
||||
https://siftrss.com/f/WXPo6AKQLX manga na na "1h"
|
||||
https://xkcd.com/atom.xml comic na na "24h"
|
||||
http://yaoi-blcd.tumblr.com/tagged/19-days/rss manga "Old.Xian.update.of..19" na "4h"
|
||||
http://www.shanaproject.com/feeds/secure/user/5310/5MUGNU9B3I/ anime na na "24h"
|
||||
http://www.manga.megchan.com/blog/?feed=rss2 manga na na "12h"
|
||||
https://siftrss.com/f/6Q8a0ZK0RmL manga na na "12h"
|
||||
https://dropoutmanga.wordpress.com/feed/ manga "(Hatsukoi|Tomo.chan)" na "6h"
|
||||
http://merakiscans.com/manga-rss/solo-leveling/ manga na na "8h"
|
||||
http://www.mangago.me/r/rsslink/a_man_like_you.xml manga na na "24h"
|
||||
http://www.mangago.me/r/rsslink/unriped_expression.xml manga na na "24h"
|
||||
http://www.mangago.me/r/rsslink/heesu_in_class_2.xml manga na na "24h"
|
||||
http://www.mangago.me/r/rsslink/bj_alex.xml manga na na "24h"
|
||||
https://heroscans.tumblr.com/rss manga "(Saizu)" na "24h"
|
||||
http://feeds.pinboard.in/rss/popular/ news ".*" ".*" "24h"
|
||||
https://www.vox.com/rss/index.xml news ".*" ".*" "24h"
|
||||
https://www.techdirt.com/techdirt_rss.xml news ".*" ".*" "24h"
|
||||
http://hotdiggedydemon.com/ comic ".*" ".*" "24h"
|
||||
https://nyaa.si/?page=rss&u=Nauti anime "(akegurui.*XX)" ".*Anime...English.translated.*" "3h"
|
||||
https://tapas.io/rss/series/72832 manga na na "24h"
|
||||
https://tapas.io/rss/series/88570 manga na na "24h"
|
||||
https://nyaa.si/?page=rss&q=kakegurui&c=0_0&f=0&u=Nauti anime "(akegurui.XX)" ".*Anime...English.translated.*" "3h"
|
||||
https://nyaa.si/?page=rss&q=horriblesubs+mob+psycho+720&c=0_0&f=0 anime ".*" ".*Anime...English.translated.*" "3h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UC9CuvdOVfMPvKCiwdGKL3cQ youtube","gampegrumps ".*" ".*" "6h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UCbGO1tCyjMlzqIT-tyrJNqQ youtube","kshaway ".*" ".*" "6h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UC9YLd0REiXxLqQQH_CpJKZQ youtube","instalok ".*" ".*" "6h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UCsvn_Po0SmunchJYOWpOxMg youtube","videogamedunkey ".*" ".*" "6h"
|
||||
https://eztv.ag/ezrss.xml momma","torrent "^(Wheel.[oO]f.Fortune.[S2]|Jeopardy.20|Gold.Rush.*720.*WEB|Survivor.S.*720p|Gold.Rush.*S.*E.*720.*WEB|Archer|Pointless.*720p.*WEB|Mock.The.Week.*480p|8.Out.Of.10.Cats.*480p|Masterchef.*Australia.*720p|Price.[iI]s.[rR]ight|Let.*[mM]ake.[aA].[dD]eal)" "<a.*?href=.magnet:.*?</a>" "2h"
|
||||
https://www.youtube.com/feeds/videos.xml?channel_id=UCs-w7E2HZWwXmjt9RTvBB_A youtube","settled ".*" ".*" "6h"
|
||||
https://roosterteeth.com/show/rt-podcast/feed/mp3 podcast ".*" ".*" "6h"
|
||||
'
|
||||
feedsB="$(printf "%s\n" "$feeds" | grep -v '^$' | tail -n 1)"
|
||||
#feeds="$feedsB"
|
||||
|
||||
defaultIF='""'
|
||||
defaultCF='"<img.*?/(img)?>"'
|
||||
defaultIN='"6h"'
|
||||
|
||||
feeds="${feeds// /,,}"
|
||||
|
||||
for feed in $feeds; do
|
||||
feed="${feed//,,/ }"
|
||||
link="$(echo "$feed" | awk '{print $1}')"
|
||||
tag="$(echo "$feed" | awk '{print $2}')"
|
||||
if="$(echo "$feed" | awk '{print $3}')"
|
||||
cf="$(echo "$feed" | awk '{print $4}')"
|
||||
in="$(echo "$feed" | awk '{print $5}')"
|
||||
if [ "$if" == "na" ]; then
|
||||
if="$defaultIF"
|
||||
fi
|
||||
if [ "$cf" == "na" ]; then
|
||||
cf="$defaultCF"
|
||||
fi
|
||||
if [ "$in" == "na" ]; then
|
||||
in="$defaultIN"
|
||||
fi
|
||||
cmd="curl ${PROXY:-} -i ${remote}:33419/api/feed -X PUT --data '{\"url\":\"$link\", \"refresh\":$in, \"items\":$if, \"content\":$cf, \"tags\":[\"$tag\"]}'"
|
||||
echo "$cmd" >&2
|
||||
eval "$cmd"
|
||||
done
|
||||
7
testdata/2do
vendored
7
testdata/2do
vendored
@@ -1,7 +0,0 @@
|
||||
* implement torret+podcast handlers
|
||||
* fetch <img> and embed?
|
||||
|
||||
x render tags -> feeds for server
|
||||
x load feed jobs on startup and queue
|
||||
x push new jobs/queue items from server
|
||||
x implement endpoints for server
|
||||
Reference in New Issue
Block a user