neverends
parent
ea1c5b982c
commit
529abd37e9
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
cache() (
|
cache() (
|
||||||
path() {
|
path() {
|
||||||
echo "$CACHE/$(echo "$*" | base64)"
|
echo "$CACHE/$(echo "$*" | base64 | md5sum | awk '{print $1}')"
|
||||||
}
|
}
|
||||||
get() {
|
get() {
|
||||||
local path="$(path "$*")"
|
local path="$(path "$*")"
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,10 @@ gitlab() (
|
||||||
local project="$(_url_to_project_root "$url" | head -n 1)"
|
local project="$(_url_to_project_root "$url" | head -n 1)"
|
||||||
project="$(urlencode "$project")"
|
project="$(urlencode "$project")"
|
||||||
local root="$(_url_to_project_root "$url" | tail -n 1)"
|
local root="$(_url_to_project_root "$url" | tail -n 1)"
|
||||||
|
blob="$(urlencode "$blob")"
|
||||||
|
if [ -n "$root" ]; then
|
||||||
blob="$(urlencode "$root/$blob")"
|
blob="$(urlencode "$root/$blob")"
|
||||||
|
fi
|
||||||
|
|
||||||
local path="api/v4/projects/$project/repository/files/$blob/raw"
|
local path="api/v4/projects/$project/repository/files/$blob/raw"
|
||||||
_gcurl "https://gitlab-app.eng.qops.net/$path"
|
_gcurl "https://gitlab-app.eng.qops.net/$path"
|
||||||
|
|
|
||||||
|
|
@ -65,7 +65,7 @@ _crawl() {
|
||||||
}
|
}
|
||||||
|
|
||||||
extract_crawlable_source() {
|
extract_crawlable_source() {
|
||||||
echo "$*" | head -n 1 | awk '{print $NF}' | sed 's/^<//' | sed 's/>$//'
|
echo "$*" | head -n 1 | awk '{print $NF}' | sed 's/^<//' | sed 's/>$//' | sed 's/^\///' | sed 's/\/$//'
|
||||||
}
|
}
|
||||||
|
|
||||||
crawl_with() {
|
crawl_with() {
|
||||||
|
|
@ -86,7 +86,7 @@ crawl_with() {
|
||||||
log expand $crawlable_source:
|
log expand $crawlable_source:
|
||||||
notes_mkdir_p() {
|
notes_mkdir_p() {
|
||||||
local id="$1"
|
local id="$1"
|
||||||
local subtitle="$2"
|
local subtitle="${2%/}"
|
||||||
notes put "$id" "$subtitle" "autogenerated content"
|
notes put "$id" "$subtitle" "autogenerated content"
|
||||||
}
|
}
|
||||||
one() {
|
one() {
|
||||||
|
|
@ -107,7 +107,9 @@ crawl_with() {
|
||||||
for subtitle in $(echo "$i" | base64 --decode | tr '/' '\n' | while read -r subtitle; do echo "$subtitle" | base64; done); do
|
for subtitle in $(echo "$i" | base64 --decode | tr '/' '\n' | while read -r subtitle; do echo "$subtitle" | base64; done); do
|
||||||
local subtitle="$(echo "$subtitle" | base64 --decode)"
|
local subtitle="$(echo "$subtitle" | base64 --decode)"
|
||||||
if [ -n "$title_so_far" ]; then
|
if [ -n "$title_so_far" ]; then
|
||||||
notes_mkdir_p "${sum%/}" "${title_so_far%/}" >&2
|
local mkdir_p_title="${title_so_far%/}"
|
||||||
|
mkdir_p_title="${mkdir_p_title##*/}"
|
||||||
|
notes_mkdir_p "${sum%/}" "${mkdir_p_title}" >&2
|
||||||
fi
|
fi
|
||||||
sum+="$(echo "$subtitle" | encode)/"
|
sum+="$(echo "$subtitle" | encode)/"
|
||||||
title_so_far+="$subtitle/"
|
title_so_far+="$subtitle/"
|
||||||
|
|
@ -118,7 +120,7 @@ crawl_with() {
|
||||||
log " $ID ($TITLE): ${#CONTENT}"
|
log " $ID ($TITLE): ${#CONTENT}"
|
||||||
push_crawled "$PID/$ID" "$TITLE" "$CONTENT"
|
push_crawled "$PID/$ID" "$TITLE" "$CONTENT"
|
||||||
}
|
}
|
||||||
for i in $(seq 1 $(("${#expanded[@]}"-1))); do
|
for i in $(seq 0 $(("${#expanded[@]}"-1))); do
|
||||||
one "${expanded[i]}"
|
one "${expanded[i]}"
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -296,7 +296,7 @@ func (server *Server) urlFileId(id []string) string {
|
||||||
|
|
||||||
func (server *Server) fileId(r *http.Request) []string {
|
func (server *Server) fileId(r *http.Request) []string {
|
||||||
return strings.Split(
|
return strings.Split(
|
||||||
strings.TrimPrefix(
|
strings.Trim(
|
||||||
strings.TrimPrefix(
|
strings.TrimPrefix(
|
||||||
strings.Trim(r.URL.Path, "/"),
|
strings.Trim(r.URL.Path, "/"),
|
||||||
"api/v0/files",
|
"api/v0/files",
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue