Compare commits

...

23 Commits

Author SHA1 Message Date
Bel LaPointe
dad5803297 fix err msg 2026-01-27 15:12:26 -07:00
bel
2e4e4b9b06 i said PLS no multi handle file 2025-05-09 07:52:36 -06:00
bel
61f9b9c724 hrm 2025-05-09 07:39:22 -06:00
Bel LaPointe
5b9bead96f ntfy webhook format 2025-04-22 20:40:19 -06:00
Bel LaPointe
54bbca8fea accept $RECURSIVE_MISSING_WEBHOOK $RECURSIVE_MISSING_WEBHOOK_CACHE_D 2025-04-22 20:28:53 -06:00
bel
3e8e33816e dogs 2025-04-05 11:47:51 -06:00
bel
00fdd4f976 test more 2025-04-05 11:44:40 -06:00
bel
1e3d6ee0a2 se 2025-04-05 11:26:48 -06:00
Bel LaPointe
d3c9b1a564 dual first 2025-04-05 11:20:10 -06:00
bel
e653c35275 debug, dry, more env 2025-04-05 11:16:36 -06:00
Bel LaPointe
596865ede3 noisy log may as well be useful 2025-04-05 11:06:00 -06:00
bel
62ea963807 typo 2025-04-05 11:04:10 -06:00
Bel LaPointe
82a13aea65 yaml files overriden via env 2025-04-05 11:03:27 -06:00
Bel LaPointe
d40a1f8fd4 test one smart default parser 2025-04-05 10:59:33 -06:00
Bel LaPointe
e85fec9bbf for each pattern { for each entry { try } } so patterns serve as tier list 2025-04-05 10:44:22 -06:00
bel
5f5015e152 default for me 2025-04-05 01:05:10 -06:00
bel
12bb9c808b go run ./ i to init conf file 2025-04-05 01:03:19 -06:00
bel
984b53c6f1 overrides win 2025-04-05 00:55:26 -06:00
bel
57d9b74c31 outd is a template 2025-04-05 00:51:30 -06:00
bel
d57206357e format dry run 2025-04-05 00:42:27 -06:00
bel
4c32fb0411 log recursive find 2025-04-05 00:37:27 -06:00
Bel LaPointe
daa520de7d test recursive and run as go run ./ r 2025-04-05 00:34:01 -06:00
Bel LaPointe
6af1f231df mv into func 2025-04-04 23:51:12 -06:00
4 changed files with 576 additions and 63 deletions

2
go.mod
View File

@@ -1,3 +1,5 @@
module gitea/show-ingestion module gitea/show-ingestion
go 1.23.3 go 1.23.3
require gopkg.in/yaml.v3 v3.0.1 // indirect

3
go.sum Normal file
View File

@@ -0,0 +1,3 @@
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

343
main.go
View File

@@ -1,19 +1,49 @@
package main package main
import ( import (
"bytes"
"context" "context"
"encoding/json" "encoding/json"
"flag" "flag"
"fmt" "fmt"
"io" "io"
"io/fs"
"io/ioutil"
"log"
"net/http"
"net/url"
"os" "os"
"os/signal" "os/signal"
"path" "path"
"regexp" "regexp"
"strings" "strings"
"syscall" "syscall"
"text/template"
yaml "gopkg.in/yaml.v3"
) )
var (
Debug = os.Getenv("DEBUG") == "true"
ConstTitle = os.Getenv("YAML_C_TITLE")
ConstSeason = os.Getenv("YAML_C_SEASON")
ConstEpisode = os.Getenv("YAML_C_EPISODE")
ConstOutd = os.Getenv("YAML_O")
Dry = os.Getenv("YAML_D") == "true" || os.Getenv("DRY") == "true"
ConstPatterns = os.Getenv("YAML_P")
WebhookOnRecursiveMiss = os.Getenv("RECURSIVE_MISSING_WEBHOOK")
WebhookOnRecursiveMissCacheD = os.Getenv("RECURSIVE_MISSING_WEBHOOK_CACHE_D")
)
type Yaml struct {
C Fields
O string
D bool
P []string
}
const YamlFile = ".show-ingestion.yaml"
type Fields struct { type Fields struct {
Title string Title string
Season string Season string
@@ -26,26 +56,117 @@ func main() {
ctx, can := signal.NotifyContext(context.Background(), syscall.SIGINT) ctx, can := signal.NotifyContext(context.Background(), syscall.SIGINT)
defer can() defer can()
if len(os.Args) < 2 { foo := Main
if err := Recursive(ctx); err != nil { if len(os.Args) == 2 && os.Args[1] == "r" {
panic(err) foo = Recursive
} else if len(os.Args) == 2 && os.Args[1] == "i" {
foo = Stage
} }
} else { if err := foo(ctx); err != nil {
if err := Main(ctx); err != nil {
panic(err) panic(err)
} }
} }
func Stage(ctx context.Context) error {
if _, err := os.Stat(YamlFile); err == nil {
return nil
}
b, _ := yaml.Marshal(Yaml{
D: true,
O: "/volume1/video/Bel/Anime/{{.Title}}/Season_{{.Season}}",
})
return ioutil.WriteFile(YamlFile, b, os.ModePerm)
} }
func Recursive(ctx context.Context) error { func Recursive(ctx context.Context) error {
return io.EOF q := []string{"./"}
for len(q) > 0 {
d := q[0]
q = q[1:]
p := path.Join(d, YamlFile)
if _, err := os.Stat(p); err != nil {
log.Printf("%s has no %s", d, YamlFile)
if WebhookOnRecursiveMiss != "" && WebhookOnRecursiveMissCacheD != "" {
cacheP := regexp.MustCompile(`[^a-zA-Z0-9]`).ReplaceAllString(p, `_`)
cacheP = path.Join(WebhookOnRecursiveMissCacheD, cacheP)
if _, err := os.Stat(cacheP); err != nil {
req, err := http.NewRequest(http.MethodPut, WebhookOnRecursiveMiss, strings.NewReader(p))
if err != nil {
panic(err)
}
u, err := url.Parse(WebhookOnRecursiveMiss)
if err != nil {
return fmt.Errorf("WebhookOnRecursiveMiss (%s) invalid: %w", WebhookOnRecursiveMiss, err)
}
user := u.User
u.User = nil
if username := user.Username(); username != "" {
password, _ := user.Password()
req.SetBasicAuth(username, password)
}
req.URL = u
resp, err := http.DefaultClient.Do(req)
if err != nil {
return fmt.Errorf("failed to call %s for missing %s: %w", WebhookOnRecursiveMiss, p, err)
}
defer resp.Body.Close()
defer io.Copy(io.Discard, resp.Body)
if resp.StatusCode > 250 {
b, _ := io.ReadAll(resp.Body)
return fmt.Errorf("unexpected status code from %s for %s: (%d) %s", WebhookOnRecursiveMiss, p, resp.StatusCode, b)
}
os.MkdirAll(path.Dir(cacheP), os.ModePerm)
ioutil.WriteFile(cacheP, []byte{}, os.ModePerm)
}
}
} else if err := func() error {
y, err := NewYaml(path.Join(d, YamlFile))
if err != nil {
return err
}
was, err := os.Getwd()
if err != nil {
return err
}
if err := os.Chdir(d); err != nil {
return err
}
defer os.Chdir(was)
log.Printf("Run(outd=%s, ind=%s, patterns=%+v, const=%+v, dry=%v)", y.O, d, y.P, y.C, y.D)
if err := Run(ctx, y.O, "./", y.P, y.C, y.D); err != nil {
return err
}
return os.Chdir(was)
}(); err != nil {
return fmt.Errorf("%s: %w", p, err)
}
entries, err := readDir(d)
if err != nil {
return err
}
for _, entry := range entries {
if entry.IsDir() {
q = append(q, path.Join(d, entry.Name()))
}
}
}
return nil
} }
func Main(ctx context.Context) error { func Main(ctx context.Context) error {
flags := flag.NewFlagSet(os.Args[0], flag.ContinueOnError) flags := flag.NewFlagSet(os.Args[0], flag.ContinueOnError)
overridesS := flags.String("c", `{"title":"","season":"","episode":""}`, "overrides") overridesS := flags.String("c", `{"title":"","season":"","episode":""}`, "overrides")
ind := flags.String("i", "/dev/null", "in dir") ind := flags.String("i", "/dev/null", "in dir")
outd := flags.String("o", "/dev/null", "out dir") outd := flags.String("o", "/dev/null", "out dir template accepts overrides format title case")
dry := flags.Bool("d", true, "dry run") dry := flags.Bool("d", true, "dry run")
if err := flags.Parse(os.Args[1:]); err != nil { if err := flags.Parse(os.Args[1:]); err != nil {
panic(err) panic(err)
@@ -54,60 +175,114 @@ func Main(ctx context.Context) error {
var overrides Fields var overrides Fields
json.Unmarshal([]byte(*overridesS), &overrides) json.Unmarshal([]byte(*overridesS), &overrides)
mvNLn := RealMvNLn
if *dry {
outd := map[string]struct{}{}
mvNLn = func(outf, inf string) error {
if _, err := os.Stat(outf); err == nil {
return nil
}
if _, ok := outd[outf]; ok {
return nil
}
outd[outf] = struct{}{}
fmt.Fprintf(os.Stderr, "mv %q %q\n", inf, outf)
return nil
}
}
return Run(ctx, return Run(ctx,
*outd, *outd,
*ind, *ind,
append(flags.Args(), flags.Args(),
`^\[[^\]]*\] (?P<title>.*) - (?P<episode>[0-9]+) .*`, overrides,
*dry,
)
}
const (
PatternGroupTitleHyphenSEDual = `^(\[[^\]]*\] )?(?P<title>.*?)( -)?[ \.](S(?P<season>[0-9]{2})E)?(?P<episode>[0-9]{2})[^0-9].*[dD][uU][aA][lL].*`
PatternGroupTitleHyphenSE = `^(\[[^\]]*\] )?(?P<title>.*?)( -)?[ \.](S(?P<season>[0-9]{2})E)?(?P<episode>[0-9]{2})[^0-9].*`
PatternTitleSEDual = `^(?P<title>.*) S(?P<season>[0-9]+)E(?P<episode>[0-9]+).*[dD][uU][aA][lL].*`
PatternTitleSE = `^(?P<title>.*) S(?P<season>[0-9]+)E(?P<episode>[0-9]+).*`
SEDual = `^S(?P<season>[0-9]+)E(?P<episode>[0-9]+).*[dD][uU][aA][lL].*`
SE = `^S(?P<season>[0-9]+)E(?P<episode>[0-9]+).*`
)
func Run(ctx context.Context, outd, ind string, patterns []string, overrides Fields, dry bool) error {
mvNLn := RealMvNLn
if dry {
mvNLn = DryMvNLn()
}
return RunWith(ctx,
outd,
ind,
append(patterns,
PatternGroupTitleHyphenSEDual,
PatternGroupTitleHyphenSE,
PatternTitleSEDual,
PatternTitleSE,
SEDual,
SE,
), ),
overrides, overrides,
mvNLn, mvNLn,
) )
} }
func Run(ctx context.Context, outd, ind string, patterns []string, overrides Fields, mvNLn MvNLn) error { func RunWith(ctx context.Context, outd, ind string, patterns []string, overrides Fields, mvNLn MvNLn) error {
entries, err := os.ReadDir(ind) entries, err := readDir(ind)
if err != nil { if err != nil {
return err return err
} }
for _, entry := range entries { done := map[int]bool{}
if !entry.Type().IsRegular() { for _, pattern := range patterns {
for i, entry := range entries {
if done[i] {
continue continue
} }
if err := one(ctx, outd, path.Join(ind, entry.Name()), patterns, overrides, mvNLn); err != nil { if !entry.Type().IsRegular() && !(Debug && Dry) {
continue
}
if match, err := one(ctx, outd, path.Join(ind, entry.Name()), []string{pattern}, overrides, mvNLn); err != nil {
return err return err
} else if match {
done[i] = true
}
} }
} }
return nil return nil
} }
func one(ctx context.Context, outd, inf string, patterns []string, overrides Fields, mvNLn MvNLn) error { func one(ctx context.Context, outd, inf string, patterns []string, overrides Fields, mvNLn MvNLn) (bool, error) {
f := path.Base(inf) f := path.Base(inf)
for _, pattern := range patterns { for _, pattern := range patterns {
re := regexp.MustCompile(pattern) found, match := Parse(f, pattern)
if !re.MatchString(f) { if !match {
if Debug {
log.Printf("%q does not match %q", pattern, f)
}
continue continue
} }
found := overrides for _, wr := range [][2]*string{
[2]*string{&found.Title, &overrides.Title},
[2]*string{&found.Season, &overrides.Season},
[2]*string{&found.Episode, &overrides.Episode},
} {
if *wr[1] != "" {
*wr[0] = *wr[1]
}
}
if found.Title == "" || found.Season == "" || found.Episode == "" {
if Debug {
log.Printf("%q does not match all %q: %+v", pattern, f, found)
}
continue
}
found.Title = strings.ReplaceAll(found.Title, ".", " ")
found.Title = strings.Join(strings.Fields(found.Title), "_")
if Debug {
log.Printf("%q matches %q as %+v", pattern, f, found)
}
return true, foundOne(ctx, outd, inf, found, mvNLn)
}
return false, nil
}
func Parse(f string, pattern string) (Fields, bool) {
re := regexp.MustCompile(pattern)
if !re.MatchString(f) {
return Fields{}, false
}
var found Fields
groupNames := re.SubexpNames() groupNames := re.SubexpNames()
groups := re.FindStringSubmatch(f) groups := re.FindStringSubmatch(f)
for i := 1; i < len(groupNames); i++ { for i := 1; i < len(groupNames); i++ {
@@ -120,22 +295,23 @@ func one(ctx context.Context, outd, inf string, patterns []string, overrides Fie
case "episode": case "episode":
found.Episode = v found.Episode = v
default: default:
return fmt.Errorf("unexpected capture group %q", groupNames[i]) //return fmt.Errorf("unexpected capture group %q", groupNames[i])
} }
} }
return found, true
if found.Title == "" || found.Season == "" || found.Episode == "" {
continue
}
found.Title = strings.Join(strings.Fields(found.Title), "_")
return foundOne(ctx, outd, inf, found, mvNLn)
}
return nil
} }
func foundOne(ctx context.Context, outd, inf string, fields Fields, mvNLn MvNLn) error { func foundOne(ctx context.Context, outd, inf string, fields Fields, mvNLn MvNLn) error {
outf := path.Join(outd, fmt.Sprintf("%s_S%sE%s%s", fields.Title, fields.Season, fields.Episode, path.Ext(inf))) tmpl, err := template.New(inf).Parse(outd)
if err != nil {
return err
}
buff := bytes.NewBuffer(nil)
if err := tmpl.Execute(buff, fields); err != nil {
return err
}
outf := path.Join(string(buff.Bytes()), fmt.Sprintf("%s_S%sE%s%s", fields.Title, fields.Season, fields.Episode, path.Ext(inf)))
return mvNLn(outf, inf) return mvNLn(outf, inf)
} }
@@ -144,10 +320,79 @@ func RealMvNLn(outf, inf string) error {
return fmt.Errorf("cannot mv_n_ln(%s): (%v) mode=%v", inf, err, stat.Mode()) return fmt.Errorf("cannot mv_n_ln(%s): (%v) mode=%v", inf, err, stat.Mode())
} }
if _, err := os.Stat(outf); err == nil { if _, err := os.Stat(outf); err == nil {
return nil // fmt.Errorf("conflict: %s already exists", path.Base(outf)) return nil
}
if err := os.MkdirAll(path.Dir(outf), os.ModePerm); err != nil {
return err
} }
if err := os.Rename(inf, outf); err != nil { if err := os.Rename(inf, outf); err != nil {
return err return err
} }
return os.Symlink(outf, inf) return os.Symlink(outf, inf)
} }
func DryMvNLn() func(string, string) error {
outd := map[string]struct{}{}
return func(outf, inf string) error {
if _, err := os.Stat(outf); err == nil {
if Debug {
fmt.Fprintf(os.Stderr, "no mv %q\n %q\n", inf, outf)
}
return nil
}
if _, ok := outd[outf]; ok {
if Debug {
fmt.Fprintf(os.Stderr, "no mv %q\n %q\n", inf, outf)
}
return nil
}
outd[outf] = struct{}{}
fmt.Fprintf(os.Stderr, "mv %q\n %q\n", inf, outf)
return nil
}
}
func readDir(d string) ([]fs.DirEntry, error) {
entries, err := os.ReadDir(d)
result := []fs.DirEntry{}
for _, entry := range entries {
if !strings.HasPrefix(entry.Name(), ".") {
result = append(result, entry)
}
}
return result, err
}
func NewYaml(p string) (Yaml, error) {
var y Yaml
b, _ := os.ReadFile(p)
if err := yaml.Unmarshal(b, &y); err != nil {
return y, fmt.Errorf("%s: %w", p, err)
}
if ConstTitle != "" {
y.C.Title = ConstTitle
}
if ConstSeason != "" {
y.C.Season = ConstSeason
}
if ConstEpisode != "" {
y.C.Episode = ConstEpisode
}
if ConstOutd != "" {
y.O = ConstOutd
}
if Dry {
y.D = true
}
if ConstPatterns != "" {
y.P = strings.Split(ConstPatterns, ",")
}
return y, nil
}

View File

@@ -3,6 +3,9 @@ package main_test
import ( import (
"context" "context"
"io/ioutil" "io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"os" "os"
"path" "path"
"slices" "slices"
@@ -28,7 +31,7 @@ func TestRunChoosesOne(t *testing.T) {
"Australian_Survivor_S12E12.mkv": false, "Australian_Survivor_S12E12.mkv": false,
} }
if err := main.Run(context.Background(), if err := main.RunWith(context.Background(),
outd, outd,
ind, ind,
[]string{ []string{
@@ -53,7 +56,7 @@ func TestRunChoosesOne(t *testing.T) {
} }
} }
func TestRun(t *testing.T) { func TestRunWith(t *testing.T) {
cases := map[string]struct { cases := map[string]struct {
given []string given []string
patterns []string patterns []string
@@ -79,6 +82,17 @@ func TestRun(t *testing.T) {
"Australian_Survivor_S12E12.mkv", "Australian_Survivor_S12E12.mkv",
}, },
}, },
"hard w group": {
given: []string{
"[Yameii] Dr. Stone - S04E12 [English Dub] [CR WEB-DL 720p] [F6EF1948].mkv",
},
patterns: []string{
main.PatternGroupTitleHyphenSE,
},
want: []string{
"Dr_Stone_S04E12.mkv",
},
},
"easy w group": { "easy w group": {
given: []string{ given: []string{
"[SubsPlease] Tokidoki Bosotto Russia-go de Dereru Tonari no Alya-san - 01 (720p) [A12844D5].mkv", "[SubsPlease] Tokidoki Bosotto Russia-go de Dereru Tonari no Alya-san - 01 (720p) [A12844D5].mkv",
@@ -106,9 +120,9 @@ func TestRun(t *testing.T) {
} }
outd := t.TempDir() outd := t.TempDir()
if err := main.Run(context.Background(), outd, ind, c.patterns, c.overrides, main.RealMvNLn); err != nil { if err := main.RunWith(context.Background(), outd, ind, c.patterns, c.overrides, main.RealMvNLn); err != nil {
t.Fatal("err on first run:", err) t.Fatal("err on first run:", err)
} else if err := main.Run(context.Background(), outd, ind, c.patterns, c.overrides, main.RealMvNLn); err != nil { } else if err := main.RunWith(context.Background(), outd, ind, c.patterns, c.overrides, main.RealMvNLn); err != nil {
t.Fatal("err on second run:", err) t.Fatal("err on second run:", err)
} }
@@ -147,3 +161,252 @@ func TestRun(t *testing.T) {
}) })
} }
} }
func TestRecursive(t *testing.T) {
webhooks := []string{}
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPut {
t.Errorf("unexpected webhook method %s", r.Method)
}
if r.URL.User.String() != "" {
t.Errorf("unexpected auth on url %s", r.URL.String())
}
if u, p, _ := r.BasicAuth(); u != "u" || p != "p" {
t.Errorf("webhook didnt translate u:p to basic auth")
}
b, _ := ioutil.ReadAll(r.Body)
t.Logf("%s { %s }", r.URL.String(), b)
webhooks = append(webhooks, string(b))
}))
t.Cleanup(s.Close)
t.Cleanup(func() {
t.Logf("webhooks: %+v", webhooks)
if len(webhooks) == 0 {
t.Errorf("expected webhook calls but got none")
}
deduped := slices.Clone(webhooks)
slices.Sort(deduped)
slices.Compact(deduped)
if len(deduped) != len(webhooks) {
t.Errorf("expected no duplicate webhooks but got %+v", webhooks)
}
})
u, _ := url.Parse(s.URL)
u.User = url.UserPassword("u", "p")
main.WebhookOnRecursiveMiss = u.String()
main.WebhookOnRecursiveMissCacheD = t.TempDir()
t.Cleanup(func() {
main.WebhookOnRecursiveMiss = ""
main.WebhookOnRecursiveMissCacheD = ""
})
was, _ := os.Getwd()
t.Cleanup(func() { os.Chdir(was) })
os.Chdir(t.TempDir())
outd := t.TempDir()
os.MkdirAll(path.Join(outd, "A"), os.ModePerm)
// use config
write("./showA/.show-ingestion.yaml", `{
"c": {
"title": "A",
"season": "A",
"episode": "A"
},
"p": [".*"],
"o": "`+outd+`/A"
}`)
write("./showA/file.a")
// parse files and const wins
write("./showB/.show-ingestion.yaml", `{
"o": "`+outd+`/B_{{.Title}}_{{.Season}}_{{.Episode}}",
"p": [],
"c": {"title": "TITLE"}
}`)
write("./showB/title S01E02.b")
// use file pattern
write("./dirA/showC/.show-ingestion.yaml", `{
"o": "`+outd+`/C",
"p": ["^(?P<title>.) (?P<season>.) (?P<episode>.)"]
}`)
write("./dirA/showC/t s e.c")
// dry run
write("./dirA/showD/.show-ingestion.yaml", `{
"o": "`+outd+`/D",
"d": true
}`)
write("./dirA/showD/title S02E04.d")
// not configured
os.MkdirAll("./dirB/showE", os.ModePerm)
write("./dirB/showE/title S03E06.e")
// defaults
write("./dirA/showF/.show-ingestion.yaml", `{
"o": "`+outd+`/F"
}`)
write("./dirA/showF/[Yameii] Dr. Stone - S04E12 [English Dub] [CR WEB-DL 720p] [F6EF1948].mkv")
if err := main.Recursive(context.Background()); err != nil {
t.Fatal(err)
} else if err := main.Recursive(context.Background()); err != nil {
t.Fatalf("failed second run: %v", err)
}
exists(t, path.Join(outd, "A", "A_SAEA.a"))
exists(t, path.Join(outd, "B_TITLE_01_02", "TITLE_S01E02.b"))
exists(t, path.Join(outd, "C", "t_SsEe.c"))
notExists(t, path.Join(outd, "D", "title_S02E04.d"))
notExists(t, path.Join(outd, "title_S03E06.e"))
exists(t, path.Join(outd, "F", "Dr_Stone_S04E12.mkv"))
notExists(t, path.Join(outd, "F", "[Yameii]_Dr_Stone_-_S04E12.mkv"))
}
func write(f string, b ...string) {
if len(b) == 0 {
b = append(b, "")
}
os.MkdirAll(path.Dir(f), os.ModePerm)
os.WriteFile(f, []byte(b[0]), os.ModePerm)
}
func exists(t *testing.T, p string) {
if _, err := os.Stat(p); os.IsNotExist(err) {
d := path.Dir(path.Dir(p))
t.Errorf("expected %s of %s (%+v)", path.Base(p), d, ls(d))
}
}
func notExists(t *testing.T, p string) {
if _, err := os.Stat(p); !os.IsNotExist(err) {
d := path.Dir(path.Dir(p))
t.Errorf("unexpected %s of %s (%+v)", path.Base(p), d, ls(d))
}
}
func ls(d string) []string {
result := []string{}
entries, _ := os.ReadDir(d)
for _, entry := range entries {
p := path.Join(d, entry.Name())
if entry.IsDir() {
result = append(result, ls(p)...)
} else {
result = append(result, p)
}
}
slices.Sort(result)
return result
}
func TestParse(t *testing.T) {
cases := map[string]struct {
pattern string
want main.Fields
}{
"[SubsPlease] Tokidoki Bosotto Russia-go de Dereru Tonari no Alya-san - 01 (720p) [A12844D5].mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "Tokidoki Bosotto Russia-go de Dereru Tonari no Alya-san",
Season: "",
Episode: "01",
},
},
"Survivor.AU.S12E11.1080p.HEVC.x265-MeGusta[EZTVx.to].mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "Survivor.AU",
Season: "12",
Episode: "11",
},
},
"DAN DA DAN (2024) S01E01v2 (1080p WEB-DL H264 AAC DDP 2.0 Dual-Audio) [MALD].mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "DAN DA DAN (2024)",
Season: "01",
Episode: "01",
},
},
"ZENSHU.S01E01.1080p.AMZN.WEB-DL.MULTi.DDP2.0.H.264.MSubs-ToonsHub.mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "ZENSHU",
Season: "01",
Episode: "01",
},
},
"[Yameii] My Hero Academia - S07E08 [English Dub] [CR WEB-DL 720p] [DE5FFC3E].mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "My Hero Academia",
Season: "07",
Episode: "08",
},
},
"Ranma1-2.2024.S01E03.Because.Theres.Someone.He.Likes.1080p.NF.WEB-DL.AAC2.0.H.264-VARYG.mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "Ranma1-2.2024",
Season: "01",
Episode: "03",
},
},
"[Yameii] The Apothecary Diaries - S02E03 [English Dub] [CR WEB-DL 720p] [FD3E7434].mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "The Apothecary Diaries",
Season: "02",
Episode: "03",
},
},
"The.Dinner.Table.Detective.S01E01.Welcome.to.the.murderous.party.File.1.1080p.AMZN.WEB-DL.DDP2.0.H.264-VARYG.mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "The.Dinner.Table.Detective",
Season: "01",
Episode: "01",
},
},
"[Reza] Wistoria Wand and Sword - S01E01.mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "Wistoria Wand and Sword",
Season: "01",
Episode: "01",
},
},
"[EMBER] Ao no Hako - 01.mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "Ao no Hako",
Season: "",
Episode: "01",
},
},
"Niehime to Kemono no Ou - 12 [darkflux].mkv": {
pattern: main.PatternGroupTitleHyphenSE,
want: main.Fields{
Title: "Niehime to Kemono no Ou",
Season: "",
Episode: "12",
},
},
}
for f, d := range cases {
c := d
t.Run(f, func(t *testing.T) {
got, _ := main.Parse(f, c.pattern)
if got != c.want {
t.Errorf("expected \n\t%+v but got \n\t%+v", c.want, got)
}
})
}
}