cron/main_test uses proxy to read testdata/*.rss
parent
49c44b9df8
commit
0628a678d8
|
|
@ -1,10 +1,12 @@
|
||||||
package cron_test
|
package cron_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"os"
|
||||||
"show-rss/src/cmd/cron"
|
"show-rss/src/cmd/cron"
|
||||||
"show-rss/src/db"
|
"show-rss/src/db"
|
||||||
"show-rss/src/feeds"
|
"show-rss/src/feeds"
|
||||||
|
|
@ -24,29 +26,46 @@ func TestOne(t *testing.T) {
|
||||||
},
|
},
|
||||||
"feeds": func(t *testing.T) context.Context {
|
"feeds": func(t *testing.T) context.Context {
|
||||||
gets := []string{}
|
gets := []string{}
|
||||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
sURL := "http://192.168.0.167:10000/"
|
||||||
|
s := &http.Server{
|
||||||
|
Addr: ":10000",
|
||||||
|
Handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
gets = append(gets, r.URL.String())
|
gets = append(gets, r.URL.String())
|
||||||
t.Logf("%s", gets[len(gets)-1])
|
t.Logf("serving fetch %s", gets[len(gets)-1])
|
||||||
|
|
||||||
switch r.URL.Query().Get("id") {
|
switch r.URL.Query().Get("idx") {
|
||||||
case "0":
|
case "0":
|
||||||
case "1":
|
case "1":
|
||||||
default:
|
default:
|
||||||
http.NotFound(w, r)
|
http.NotFound(w, r)
|
||||||
}
|
}
|
||||||
}))
|
|
||||||
t.Cleanup(s.Close)
|
b, _ := os.ReadFile(fmt.Sprintf("testdata/%s.rss", r.URL.Query().Get("idx")))
|
||||||
|
io.Copy(w, bytes.NewReader(b))
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
go s.ListenAndServe()
|
||||||
|
t.Cleanup(func() { s.Close() })
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
slices.Sort(gets)
|
slices.Sort(gets)
|
||||||
if len(gets) != 2+2+2 { // id=0+id=1 for each of 2 unrecycled ctx, id=0+id=1 for one across shared ctx
|
if len(gets) != 1+2+2+2 { // healthcheck, id=0+id=1 for each of 2 unrecycled ctx, id=0+id=1 for one across shared ctx
|
||||||
t.Errorf("didn't call urls exactly twice: %+v", gets)
|
t.Errorf("didn't call urls: %+v", gets)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
for {
|
||||||
|
resp, err := http.Get(sURL)
|
||||||
|
if err == nil {
|
||||||
|
resp.Body.Close()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
}
|
||||||
|
|
||||||
ctx := db.Test(t, ctx)
|
ctx := db.Test(t, ctx)
|
||||||
|
|
||||||
for i := 0; i < 2; i++ {
|
for i := 0; i < 2; i++ {
|
||||||
if _, err := feeds.Insert(ctx, fmt.Sprintf("%s?idx=%d", s.URL, i), "* * * * *", "matches", "tag"); err != nil {
|
if _, err := feeds.Insert(ctx, fmt.Sprintf("%s?idx=%d", sURL, i), "* * * * *", "matches", "tag"); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
<rdf:RDF>
|
||||||
|
<item>
|
||||||
|
<title>title matches</title>
|
||||||
|
<link>link</link>
|
||||||
|
<description>description</description>
|
||||||
|
<dc:date>2025-04-27T16:34:00+00:00</dc:date>
|
||||||
|
</item>
|
||||||
|
|
||||||
|
<item>
|
||||||
|
<title>title</title>
|
||||||
|
<link>link matches</link>
|
||||||
|
<description>description</description>
|
||||||
|
<dc:date>2025-04-27T16:34:00+00:00</dc:date>
|
||||||
|
</item>
|
||||||
|
|
||||||
|
<item>
|
||||||
|
<title>title</title>
|
||||||
|
<link>link</link>
|
||||||
|
<description>description matches</description>
|
||||||
|
<dc:date>2025-04-27T16:34:00+00:00</dc:date>
|
||||||
|
</item>
|
||||||
|
|
||||||
|
<item>
|
||||||
|
<title>title</title>
|
||||||
|
<link>link</link>
|
||||||
|
<description>description</description>
|
||||||
|
<dc:date>2025-04-27T16:34:00+00:00</dc:date>
|
||||||
|
</item>
|
||||||
|
</rdf:RDF>
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
<rdf:RDF>
|
||||||
|
<item>
|
||||||
|
<title>1 title matches</title>
|
||||||
|
<link>1 link</link>
|
||||||
|
<description>1 description</description>
|
||||||
|
<dc:date>2025-04-27T16:34:00+00:00</dc:date>
|
||||||
|
</item>
|
||||||
|
|
||||||
|
<item>
|
||||||
|
<title>1 title</title>
|
||||||
|
<link>1 link matches</link>
|
||||||
|
<description>1 description</description>
|
||||||
|
<dc:date>2025-04-27T16:34:00+00:00</dc:date>
|
||||||
|
</item>
|
||||||
|
|
||||||
|
<item>
|
||||||
|
<title>1 title</title>
|
||||||
|
<link>1 link</link>
|
||||||
|
<description>1 description matches</description>
|
||||||
|
<dc:date>2025-04-27T16:34:00+00:00</dc:date>
|
||||||
|
</item>
|
||||||
|
|
||||||
|
<item>
|
||||||
|
<title>1 title</title>
|
||||||
|
<link>1 link</link>
|
||||||
|
<description>1 description</description>
|
||||||
|
<dc:date>2025-04-27T16:34:00+00:00</dc:date>
|
||||||
|
</item>
|
||||||
|
</rdf:RDF>
|
||||||
|
|
||||||
|
|
@ -28,7 +28,7 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
func (feed Feed) ShouldExecute() (bool, error) {
|
func (feed Feed) ShouldExecute() (bool, error) {
|
||||||
if feed.Entry.Deleted.IsZero() {
|
if !feed.Entry.Deleted.IsZero() {
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -45,7 +45,7 @@ func (feed Feed) ShouldExecute() (bool, error) {
|
||||||
return false, fmt.Errorf("illegal cron %q", feed.Version.Cron)
|
return false, fmt.Errorf("illegal cron %q", feed.Version.Cron)
|
||||||
}
|
}
|
||||||
next := schedule.Next(feed.Execution.Executed)
|
next := schedule.Next(feed.Execution.Executed)
|
||||||
return time.Now().Before(next), nil
|
return time.Now().After(next), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (feed Feed) Fetch(ctx context.Context) (Items, error) {
|
func (feed Feed) Fetch(ctx context.Context) (Items, error) {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue