no submod
parent
026e7718ed
commit
8970da11e5
|
|
@ -1 +0,0 @@
|
|||
Subproject commit 0d9139cd73baef3e1647f2c0dd0a3a3f1d84286b
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
**/*.sw*
|
||||
server/exec-server
|
||||
server/ezmded
|
||||
server/exec-ezmded
|
||||
server/server
|
||||
server/testdata/files/**/*
|
||||
server/testdata/workd/**/*
|
||||
server/testdata/media/**/*
|
||||
server/testdata/index.html
|
||||
ui/render
|
||||
server/public/ui/**/.*.html
|
||||
**/*.ctmpl.html
|
||||
server/public/ui/render
|
||||
server/releasedata
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
#! /bin/bash
|
||||
|
||||
ODO_TOKEN="${ODO_TOKEN:-"ac9a9e4d-9c6b-4049-9e8d-c8b97fe053aa"}"
|
||||
BLOB="$BLOB"
|
||||
|
||||
urlencode() {
|
||||
# urlencode <string>
|
||||
|
||||
old_lc_collate=$LC_COLLATE
|
||||
LC_COLLATE=C
|
||||
|
||||
local length="${#1}"
|
||||
for (( i = 0; i < length; i++ )); do
|
||||
local c="${1:$i:1}"
|
||||
case $c in
|
||||
[a-zA-Z0-9.~_-]) printf '%s' "$c" ;;
|
||||
*) printf '%%%02X' "'$c" ;;
|
||||
esac
|
||||
done
|
||||
|
||||
LC_COLLATE=$old_lc_collate
|
||||
}
|
||||
|
||||
blob="$(urlencode "$BLOB")"
|
||||
|
||||
echo curl -i -sS -H "Authorization: Bearer $ODO_TOKEN" "https://odo-public-api.corp.qualtrics.com/odo-api/parsoid/odo.corp.qualtrics.com/v3/page/html/$blob?body_only=true"
|
||||
|
||||
echo
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
export GITLAB_PAT=LkaMCFFfqdHd-r4Cubnw
|
||||
export ODO_TOKEN=ac9a9e4d-9c6b-4049-9e8d-c8b97fe053aa
|
||||
export GDOC_TOKEN=
|
||||
|
|
@ -0,0 +1 @@
|
|||
../gitlab/expand.sh
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
#! /bin/bash
|
||||
|
||||
GITLAB_PAT="$GITLAB_PAT"
|
||||
PROJECT="$PROJECT"
|
||||
BLOB="$BLOB"
|
||||
|
||||
urlencode() {
|
||||
# urlencode <string>
|
||||
|
||||
old_lc_collate=$LC_COLLATE
|
||||
LC_COLLATE=C
|
||||
|
||||
local length="${#1}"
|
||||
for (( i = 0; i < length; i++ )); do
|
||||
local c="${1:$i:1}"
|
||||
case $c in
|
||||
[a-zA-Z0-9.~_-]) printf '%s' "$c" ;;
|
||||
*) printf '%%%02X' "'$c" ;;
|
||||
esac
|
||||
done
|
||||
|
||||
LC_COLLATE=$old_lc_collate
|
||||
}
|
||||
|
||||
project="$(urlencode $PROJECT)"
|
||||
blob="$(urlencode $BLOB)"
|
||||
path="api/v4/projects/$project/wikis/$blob"
|
||||
|
||||
curl -sS -H "Authorization: Bearer $GITLAB_PAT" "https://gitlab-app.eng.qops.net/$path" | jq -r .content
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
#! /bin/bash
|
||||
|
||||
GITLAB_PAT="$GITLAB_PAT"
|
||||
PROJECT="$PROJECT"
|
||||
GITLAB_PATH="$GITLAB_PATH"
|
||||
|
||||
urlencode() {
|
||||
# urlencode <string>
|
||||
|
||||
old_lc_collate=$LC_COLLATE
|
||||
LC_COLLATE=C
|
||||
|
||||
local length="${#1}"
|
||||
for (( i = 0; i < length; i++ )); do
|
||||
local c="${1:$i:1}"
|
||||
case $c in
|
||||
[a-zA-Z0-9.~_-]) printf '%s' "$c" ;;
|
||||
*) printf '%%%02X' "'$c" ;;
|
||||
esac
|
||||
done
|
||||
|
||||
LC_COLLATE=$old_lc_collate
|
||||
}
|
||||
|
||||
pat="${GITLAB_PAT:-"$(security find-generic-password -a "${USER}" -s GITLAB_PAT -w 2> /dev/null )"}"
|
||||
project="$(urlencode ${PROJECT:-"data-store/orchestration/runbooks"})"
|
||||
path="/api/v4/projects/$project/repository/tree"
|
||||
list() {
|
||||
local query="recursive=true&path=$(test -n "$GITLAB_PATH" && echo "$GITLAB_PATH")"
|
||||
curl -sS \
|
||||
-H "Authorization: Bearer $pat" \
|
||||
"https://gitlab-app.eng.qops.net/$path?$query"
|
||||
}
|
||||
list_wiki() {
|
||||
curl -sS \
|
||||
-H "Authorization: Bearer $pat" \
|
||||
"https://gitlab-app.eng.qops.net/api/v4/projects/$project/wikis?with_content=0"
|
||||
}
|
||||
files=()
|
||||
trees=("${GITLAB_PATH:-"/"}")
|
||||
i=0
|
||||
while [ "$i" -lt "${#trees[@]}" ]; do
|
||||
gitlab_path="${trees[i]}"
|
||||
echo gitlab_path=$gitlab_path, i=$i, trees=${#trees[@]}, files=${#files[@]}... >&2
|
||||
got="$(GITLAB_PATH=$gitlab_path list)"
|
||||
echo got="$got" >&2
|
||||
for nested_path in $(echo "$got" | jq -c .[] | grep '"type":"tree"' | jq -r .path | while read -r line; do echo "$line" | base64; done | grep .); do
|
||||
nested_path="$(echo "$nested_path" | base64 --decode)"
|
||||
if echo "${trees[@]}" | grep -q "[ ^]$nested_path[ $]"; then
|
||||
continue
|
||||
fi
|
||||
trees+=("$nested_path")
|
||||
done
|
||||
for nested_path in $(echo "$got" | jq -c .[] | grep '"type":"blob"' | jq -r .path | while read -r line; do echo "$line" | base64; done | grep .); do
|
||||
nested_path="$(echo "$nested_path" | base64 --decode)"
|
||||
if echo "${files[@]}" | grep -q "[ ^]$nested_path[ $]"; then
|
||||
continue
|
||||
fi
|
||||
files+=("$PROJECT.git/$nested_path")
|
||||
done
|
||||
i=$((i+1))
|
||||
if ((i>5)); then
|
||||
break
|
||||
fi
|
||||
done
|
||||
for file in "${files[@]}"; do
|
||||
echo "gitlab://$file"
|
||||
done | sort
|
||||
list_wiki | jq -c .[] | jq -r .title | sed 's/^/wiki:\/\//'
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
#! /bin/bash
|
||||
|
||||
GITLAB_PAT="$GITLAB_PAT"
|
||||
PROJECT="$PROJECT"
|
||||
BLOB="$BLOB"
|
||||
|
||||
urlencode() {
|
||||
# urlencode <string>
|
||||
|
||||
old_lc_collate=$LC_COLLATE
|
||||
LC_COLLATE=C
|
||||
|
||||
local length="${#1}"
|
||||
for (( i = 0; i < length; i++ )); do
|
||||
local c="${1:$i:1}"
|
||||
case $c in
|
||||
[a-zA-Z0-9.~_-]) printf '%s' "$c" ;;
|
||||
*) printf '%%%02X' "'$c" ;;
|
||||
esac
|
||||
done
|
||||
|
||||
LC_COLLATE=$old_lc_collate
|
||||
}
|
||||
|
||||
project="$(urlencode $PROJECT)"
|
||||
blob="$(urlencode $BLOB)"
|
||||
path="api/v4/projects/$project/repository/files/$blob/raw"
|
||||
|
||||
curl -sS -H "Authorization: Bearer $GITLAB_PAT" "https://gitlab-app.eng.qops.net/$path"
|
||||
|
|
@ -0,0 +1 @@
|
|||
# for each file, map potential-original-links to internal links
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
#! /bin/bash
|
||||
|
||||
curl -i -sS localhost:3000/api/notes/$ID
|
||||
|
||||
curl -sS localhost:3000/api/notes/$ID \
|
||||
| jq '{id:.id, content:.content, title:.title}'
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
#! /bin/bash
|
||||
|
||||
curl -sS localhost:3000/api/tree | jq -r '.items | to_entries[].value.id' | grep -v ^root$
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
#! /bin/bash
|
||||
|
||||
api_response="$(curl -i -sS localhost:3000/api)"
|
||||
csrf_key="xsrf-token"
|
||||
csrf_token="$(echo "$api_response" | grep -o '"csrfToken":[^,]*' | tr ':' '\n' | jq -r . | tail -n 1)"
|
||||
notea_auth_cookie="$(echo "$api_response" | grep ^set.cookie: | sed 's/^set.cookie: //' | tr ';' '\n' | head -n 1)"
|
||||
curl --fail -sS -X POST localhost:3000/api/notes/$ID -H "$csrf_key: $csrf_token" -b "$notea_auth_cookie" -H 'Content-Type: application/json' -d '{"content": "g"}'
|
||||
echo $?
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
* last write wins, no clobber detect
|
||||
* not made for multi-user
|
||||
* editor blips around a bit
|
||||
* doesnt import obsidian even close to correctly
|
||||
* very pleasant with auto-save and always-editing
|
||||
* [[ exists but doesnt work well and doesnt like being changed and un-renders on bad setup via click-away and back
|
||||
* hedge* collab editor for bigger use cases but per-bucket and minio based and ez backup and edit a page to create an import page is all too good
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
#! /bin/bash
|
||||
|
||||
killall -9 minio
|
||||
d=$(mktemp -d)
|
||||
cleanup() {
|
||||
killall -9 minio
|
||||
rm -rf $d
|
||||
}
|
||||
trap cleanup EXIT
|
||||
MINIO_ACCESS_KEY=access_key MINIO_SECRET_KEY=secret_key minio server --address :9000 $d &
|
||||
sleep 1
|
||||
mc mb sink/notea
|
||||
|
||||
docker run --rm -it \
|
||||
--name notea \
|
||||
-p 3001:3000 \
|
||||
-e STORE_ACCESS_KEY=access_key \
|
||||
-e STORE_SECRET_KEY=secret_key \
|
||||
-e STORE_BUCKET=notea \
|
||||
-e STORE_PREFIX=notea/ \
|
||||
-e STORE_END_POINT=http://$(ifconfig en0 | grep inet\ | awk '{print $2}'):9000 \
|
||||
-e STORE_FORCE_PATH_STYLE=true \
|
||||
-e PASSWORD=notea \
|
||||
-e DISABLE_PASSWORD=true \
|
||||
-e COOKIE_SECURE=false \
|
||||
cinwell/notea
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
* https://github.com/Ionaru/easy-markdown-editor
|
||||
* https://www.npmjs.com/package/rich-markdown-editor
|
||||
* npm+more dependences but is notea+bookstack's bread n butter
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
## running
|
||||
|
||||
export RCLONE_CONFIG=/tmp/rclone.temp.conf
|
||||
export RCLONE_CONFIG_PASS=abc
|
||||
export CACHE=/tmp/notea-team3
|
||||
export NOTES_ADDR=${NOTES_ADDR:-http://localhost:3004}
|
||||
export GITLAB_PAT=$(get_secret GITLAB_PAT)
|
||||
mkdir -p $CACHE
|
||||
bash main.sh
|
||||
echo $?
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
#! /bin/bash
|
||||
|
||||
cache() (
|
||||
path() {
|
||||
echo "$CACHE/$(echo "$*" | base64 | md5sum | awk '{print $1}')"
|
||||
}
|
||||
get() {
|
||||
local path="$(path "$*")"
|
||||
if ! [ -f "$path" ]; then
|
||||
return 1
|
||||
fi
|
||||
if wc -c "$path" | grep -q '^[ ]*0[ ]*$'; then
|
||||
return 1
|
||||
fi
|
||||
local created="$(date -r "$path" +%s)"
|
||||
local now="$(date +%s)"
|
||||
if ((now-created > CACHE_DURATION)); then
|
||||
return 1
|
||||
fi
|
||||
cat "$path"
|
||||
}
|
||||
put() {
|
||||
local path="$(path "$*")"
|
||||
tee "$path"
|
||||
}
|
||||
"$@"
|
||||
)
|
||||
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
#! /bin/bash
|
||||
|
||||
test_path() {
|
||||
cache path abc | tr '/' '\n' | tail -n 1 | grep -q .
|
||||
}
|
||||
|
||||
test_get_put_get() {
|
||||
export CACHE_DURATION=10
|
||||
! cache get abc
|
||||
echo hi | cache put abc > /dev/null
|
||||
cache get abc | grep -q .
|
||||
}
|
||||
|
|
@ -0,0 +1,156 @@
|
|||
#! /bin/bash
|
||||
|
||||
gitlab() (
|
||||
_is_gitlab() {
|
||||
echo "$*" | grep -q gitlab.app
|
||||
}
|
||||
|
||||
_is_wiki() {
|
||||
echo "$*" | grep -q '/wikis'
|
||||
}
|
||||
|
||||
is() {
|
||||
_is_gitlab "$@" && ! _is_wiki "$@"
|
||||
}
|
||||
|
||||
human_url() {
|
||||
_url "$@" | sed 's/api.v4.projects.//' | sed 's/%2F/\//g' | sed 's/.raw$//' | sed 's/repository\/files/-\/tree\/master/'
|
||||
}
|
||||
|
||||
_url() {
|
||||
local base_url="$1"
|
||||
local blob="$(echo "$2" | base64 --decode)"
|
||||
|
||||
local project="$(_url_to_project_root "$base_url" | head -n 1)"
|
||||
project="$(urlencode "$project")"
|
||||
local root="$(_url_to_project_root "$base_url" | tail -n 1)"
|
||||
if [ -n "$root" ]; then
|
||||
blob="${root%/}/${blob#/}"
|
||||
blob="${blob#/}"
|
||||
blob="${blob%/}"
|
||||
fi
|
||||
blob="$(urlencode "$blob")"
|
||||
|
||||
local path="api/v4/projects/$project/repository/files/$blob/raw"
|
||||
echo "https://gitlab-app.eng.qops.net/$path"
|
||||
}
|
||||
|
||||
get() {
|
||||
_gcurl "$(_url "$@")"
|
||||
}
|
||||
|
||||
expand() {
|
||||
local cache_key="gitlab expand $*"
|
||||
if cache get "$cache_key"; then
|
||||
return 0
|
||||
fi
|
||||
_expand "$@" | sort | cache put "$cache_key"
|
||||
}
|
||||
|
||||
_expand() {
|
||||
local url="$1"
|
||||
local project="$(_url_to_project_root "$url" | head -n 1)"
|
||||
local root="$(_url_to_project_root "$url" | tail -n 1)"
|
||||
__expand "$project" "$root"
|
||||
}
|
||||
|
||||
_url_to_project_root() {
|
||||
local url="$1"
|
||||
local url_path="${url#http*://gitlab*.net/}"
|
||||
local project=""
|
||||
if [[ "$url_path" == *"/-/"* ]]; then
|
||||
project="${url_path%%/-/*}"
|
||||
elif [[ "$url_path" == *"/tree/"* ]]; then
|
||||
project="${url_path%%/tree/*}"
|
||||
else
|
||||
project="$url_path"
|
||||
fi
|
||||
local root="${url_path#*"$project"}"
|
||||
root="${root#*/-/}"
|
||||
root="${root#/}"
|
||||
root="${root#blob/}"
|
||||
root="${root#tree/}"
|
||||
root="$(echo "$root" | sed 's/^[^\/]*//')"
|
||||
root="${root#/}"
|
||||
echo "$project"
|
||||
echo "$root"
|
||||
}
|
||||
|
||||
__expand() {
|
||||
local project="$1"
|
||||
local root="${2:-"/"}"
|
||||
|
||||
local b64_files=()
|
||||
local b64_trees=("$(echo "$root" | base64)")
|
||||
local i=0
|
||||
|
||||
find_each() {
|
||||
local type="$1"
|
||||
shift
|
||||
echo "$*" \
|
||||
| jq -c .[] \
|
||||
| grep "\"type\":\"$type\"" \
|
||||
| jq -r .path \
|
||||
| while read -r line; do echo "$line" | base64; done \
|
||||
| grep .
|
||||
}
|
||||
while [ "$i" -lt "${#b64_trees[@]}" ]; do
|
||||
got="$(_list_tree "$project" "$(echo "${b64_trees[i]}" | base64 --decode)")"
|
||||
for b64_tree in $(find_each "tree" "$got"); do
|
||||
if ! echo "${b64_trees[@]}" | grep -q "[ ^]$b64_tree[ $]"; then
|
||||
b64_trees+=("$b64_tree")
|
||||
fi
|
||||
done
|
||||
for b64_file in $(find_each "blob" "$got"); do
|
||||
if ! echo "${b64_files[@]}" | grep -q "[ ^]$b64_file[ $]"; then
|
||||
b64_files+=("$b64_file")
|
||||
fi
|
||||
done
|
||||
i=$((i+1))
|
||||
done
|
||||
for b64_file in "${b64_files[@]}"; do
|
||||
local file="$(echo "$b64_file" | base64 --decode)"
|
||||
file="${file#$root}"
|
||||
file="${file#/}"
|
||||
case "${file##*.}" in
|
||||
md|txt )
|
||||
echo "$file" | base64
|
||||
;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
_list_tree() {
|
||||
local project="$(urlencode "$1")"
|
||||
local path="api/v4/projects/$project/repository/tree"
|
||||
local query="recursive=true&path=$2"
|
||||
_gcurl "https://gitlab-app.eng.qops.net/$path?$query"
|
||||
}
|
||||
|
||||
_gcurl() {
|
||||
local cache_key="gitlab _gcurl $*"
|
||||
if cache get "$cache_key"; then
|
||||
return 0
|
||||
fi
|
||||
__gcurl "$@" | cache put "$cache_key"
|
||||
}
|
||||
|
||||
__gcurl() {
|
||||
curl -sS -H "Authorization: Bearer $GITLAB_PAT" "$@"
|
||||
}
|
||||
|
||||
"$@"
|
||||
)
|
||||
|
||||
urlencode() (
|
||||
LC_COLLATE=C
|
||||
local length="${#1}"
|
||||
for (( i = 0; i < length; i++ )); do
|
||||
local c="${1:$i:1}"
|
||||
case $c in
|
||||
[a-zA-Z0-9.~_-]) printf '%s' "$c" ;;
|
||||
*) printf '%%%02X' "'$c" ;;
|
||||
esac
|
||||
done
|
||||
)
|
||||
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
#! /bin/bash
|
||||
|
||||
test___expand() {
|
||||
gitlab eval "$(cat <<EOF
|
||||
_gcurl() {
|
||||
case "\$1" in
|
||||
'https://gitlab-app.eng.qops.net/api/v4/projects/project/repository/tree?recursive=true&path=dir' )
|
||||
echo '[
|
||||
{"id": "a", "name": "dir2", "type": "tree", "path": "dir/dir2", "mode": "040000"},
|
||||
{"id": "b", "name": "blob.md", "type": "blob", "path": "dir/blob.md", "mode": "100644"}
|
||||
]'
|
||||
;;
|
||||
'https://gitlab-app.eng.qops.net/api/v4/projects/project/repository/tree?recursive=true&path=dir/dir2' )
|
||||
echo '[
|
||||
{"id": "c", "name": "blob2.txt", "type": "blob", "path": "dir/dir2/blob2.txt", "mode": "100644"},
|
||||
{"id": "c", "name": "blob3.jpg", "type": "blob", "path": "dir/dir2/blob3.jpg", "mode": "100644"}
|
||||
]'
|
||||
;;
|
||||
* )
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
local blob=false
|
||||
local dir2blob2=false
|
||||
local others=0
|
||||
for result in \$(__expand project dir); do
|
||||
if echo \$result | base64 --decode | grep -q ^blob.md$; then
|
||||
blob=true
|
||||
elif echo \$result | base64 --decode | grep -q ^dir2.blob2.txt$; then
|
||||
dir2blob2=true
|
||||
else
|
||||
others=\$((others+1))
|
||||
fi
|
||||
done
|
||||
if [ \$others != 0 ]; then
|
||||
return 101
|
||||
fi
|
||||
if ! \$blob; then
|
||||
return 102
|
||||
fi
|
||||
if ! \$dir2blob2; then
|
||||
return 103
|
||||
fi
|
||||
EOF
|
||||
)"
|
||||
}
|
||||
|
||||
test_url_to_project_root() {
|
||||
log() { true; };
|
||||
gitlab _url_to_project_root https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks/tree/master | grep -q '^data-store/orchestration/runbooks$'
|
||||
gitlab _url_to_project_root https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks/tree/master | tail -n 1 | grep ^$
|
||||
|
||||
gitlab _url_to_project_root https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks/-/blob/master/Alerts/rems/README.md | grep -q 'data-store/orchestration/runbooks'
|
||||
gitlab _url_to_project_root https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks/-/blob/master/Alerts/rems/README.md | grep -q 'Alerts/rems/README.md'
|
||||
|
||||
gitlab _url_to_project_root https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks/-/tree/master/Alerts | grep -q 'data-store/orchestration/runbooks'
|
||||
gitlab _url_to_project_root https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks/-/tree/master/Alerts | grep -q 'Alerts'
|
||||
|
||||
gitlab _url_to_project_root https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks | grep -q 'data-store/orchestration/runbooks'
|
||||
gitlab _url_to_project_root https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks | grep -q '^$'
|
||||
}
|
||||
|
||||
test_is() {
|
||||
gitlab is https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks/-/blob/master/Alerts/rems/README.md
|
||||
gitlab is https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks/-/tree/master/Alerts
|
||||
gitlab is https://gitlab-app.eng.qops.net/data-store/orchestration/runbooks
|
||||
! gitlab is https://gitlab-app.eng.qops.net/surveys/marauders-map/wikis/Customer-impact-of-an-outage
|
||||
}
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
#! /bin/bash
|
||||
|
||||
gitlab_wiki() (
|
||||
is() {
|
||||
gitlab _is_gitlab "$@" && gitlab _is_wiki "$@"
|
||||
}
|
||||
|
||||
human_url() {
|
||||
local url="${1%/}"
|
||||
url="${url%%#*}"
|
||||
echo "$url/$(echo "$2" | base64 --decode)"
|
||||
}
|
||||
|
||||
_host() {
|
||||
local id="$1"
|
||||
local host="${id%%.net*}.net"
|
||||
echo "$host"
|
||||
}
|
||||
|
||||
_project() {
|
||||
local id="$1"
|
||||
local host="$(_host "$@")"
|
||||
local path="${id#$host}"
|
||||
local project="${path%%/wikis*}"
|
||||
project="${project%/-}"
|
||||
project="${project%/-/}"
|
||||
project="${project#/}"
|
||||
project="${project%/}"
|
||||
echo "${project%%#*}"
|
||||
}
|
||||
|
||||
_blob() {
|
||||
local id="$1"
|
||||
local host="$(_host "$@")"
|
||||
local project="$(_project "$@")"
|
||||
local path="${id#$host}"
|
||||
local blob="${path#*/wikis}"
|
||||
blob="${blob#/}"
|
||||
blob="${blob%/}"
|
||||
echo "${blob%%#*}"
|
||||
}
|
||||
|
||||
get() {
|
||||
local base="$1"
|
||||
local host="$(_host "$base")"
|
||||
local project="$(_project "$base")"
|
||||
local blob="$(_blob "$base")"
|
||||
if [ "$(echo "$2" | base64 --decode)" != "" ]; then
|
||||
blob="$blob/$(echo "$2" | base64 --decode)"
|
||||
fi
|
||||
log project=$project
|
||||
log "$host/api/v4/projects/$(urlencode "$project")/wikis/$(urlencode "$blob")"
|
||||
gitlab \
|
||||
_gcurl \
|
||||
"$host/api/v4/projects/$(urlencode "$project")/wikis/$(urlencode "$blob")" \
|
||||
| jq -r .content
|
||||
}
|
||||
|
||||
expand() {
|
||||
local cache_key="gitlab_wiki expand $*"
|
||||
if cache get "$cache_key"; then
|
||||
return 0
|
||||
fi
|
||||
_expand "$@" | sort | cache put "$cache_key"
|
||||
}
|
||||
|
||||
_expand() {
|
||||
local host="$(_host "$1")"
|
||||
local project="$(_project "$1")"
|
||||
local blob="$(_blob "$1")"
|
||||
if [ -n "$blob" ] && [ "$blob" != "" ]; then
|
||||
echo "" | base64
|
||||
return
|
||||
fi
|
||||
log host=$host, project=$project, blob=$blob
|
||||
gitlab \
|
||||
_gcurl \
|
||||
"$host/api/v4/projects/$(urlencode "$project")/wikis?with_content=0" \
|
||||
| jq -r .[].slug \
|
||||
| while read -r line; do
|
||||
echo "$line" | base64
|
||||
done
|
||||
}
|
||||
|
||||
"$@"
|
||||
)
|
||||
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
#! /bin/bash
|
||||
|
||||
google() (
|
||||
_is_slides() {
|
||||
echo "$@" | grep -q 'docs.google.com.presentation'
|
||||
}
|
||||
|
||||
_is_sheets() {
|
||||
echo "$@" | grep -q 'docs.google.com.spreadsheets'
|
||||
}
|
||||
|
||||
_is_doc() {
|
||||
echo "$@" | grep -q 'docs.google.com.document'
|
||||
}
|
||||
|
||||
is() {
|
||||
_is_sheets "$@" || _is_doc "$@" || _is_slides "$@"
|
||||
}
|
||||
|
||||
human_url() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
get() {
|
||||
local url="$1"
|
||||
local id="${url%/*}"
|
||||
id="${id##*/}"
|
||||
local downloaded="$(rclone get_google "$id")"
|
||||
echo "# ${downloaded##*/}"
|
||||
echo ""
|
||||
if [ "${downloaded##*.}" == "csv" ]; then
|
||||
_csv_to_md "$downloaded"
|
||||
elif [ "${downloaded##*.}" == "html" ]; then
|
||||
_html_to_md "$downloaded"
|
||||
else
|
||||
cat "$downloaded"
|
||||
fi
|
||||
}
|
||||
|
||||
_html_to_md() {
|
||||
which pandoc &> /dev/null
|
||||
local f="$1"
|
||||
#log f=$f
|
||||
cat "$f" \
|
||||
| sed 's/.*<body/<body/' \
|
||||
| sed 's/<\/body>.*/<\/body>/' \
|
||||
| sed 's/<[\/]*span[^>]*>//g' \
|
||||
| perl -pe 's|<div class="c[0-9][0-9]*">.*?<\/div>||g' \
|
||||
| sed 's/<\([a-z][a-z]*\)[^>]*/<\1/g' \
|
||||
| pandoc - -f html -t commonmark -s -o - \
|
||||
| sed 's/^<[\/]*div>$//g'
|
||||
}
|
||||
|
||||
_csv_to_md() {
|
||||
local f="$1"
|
||||
(
|
||||
head -n 1 "$f"
|
||||
head -n 1 "$f" \
|
||||
| sed 's/^[^,][^,]*/--- /' \
|
||||
| sed 's/[^,][^,]*$/ ---/' \
|
||||
| sed 's/,[^,][^,]*/, --- /g' \
|
||||
| sed 's/[^|]$/|/'
|
||||
tail -n +2 "$f"
|
||||
) \
|
||||
| grep . \
|
||||
| sed 's/,/ | /g' \
|
||||
| sed 's/^/| /'
|
||||
}
|
||||
|
||||
expand() {
|
||||
get "$@" | head -n 1 | sed 's/^[#]* //' | base64
|
||||
}
|
||||
|
||||
"$@"
|
||||
)
|
||||
|
||||
|
|
@ -0,0 +1,191 @@
|
|||
#! /bin/bash
|
||||
|
||||
main() {
|
||||
config
|
||||
log crawling ids...
|
||||
for id in $(crawlable_ids); do
|
||||
log crawling id $id
|
||||
crawl "$id"
|
||||
done
|
||||
log rewriting ids...
|
||||
for id in $(ids); do
|
||||
rewrite "$id"
|
||||
done
|
||||
}
|
||||
|
||||
config() {
|
||||
set -o pipefail
|
||||
set -e
|
||||
export CACHE="${CACHE:-"$(mktemp -d)"}"
|
||||
mkdir -p "$CACHE"
|
||||
export CACHE_DURATION=$((60*50))
|
||||
export NOTES_ADDR="${NOTES_ADDR:-"http://localhost:3004"}"
|
||||
export GITLAB_PAT="$GITLAB_PAT"
|
||||
export RCLONE_CONFIG="$RCLONE_CONFIG"
|
||||
export RCLONE_CONFIG_PASS="$RCLONE_CONFIG_PASS"
|
||||
source ./gitlab.sh
|
||||
source ./gitlab_wiki.sh
|
||||
source ./google.sh
|
||||
source ./rclone.sh
|
||||
source ./cache.sh
|
||||
source ./notes.sh
|
||||
}
|
||||
|
||||
log() {
|
||||
echo "$(echo "$(date +%H:%M:%S)> $*" | tr '\n' ' ')" >&2
|
||||
}
|
||||
|
||||
ids() {
|
||||
notes ids | sort
|
||||
}
|
||||
|
||||
crawlable_ids() {
|
||||
local all_ids=($(ids))
|
||||
local crawlable_ids=()
|
||||
for id in "${all_ids[@]}"; do
|
||||
if for crawlable_id in "${crawlable_ids[@]}"; do
|
||||
if [ "$id" != "${id#$crawlable_id/}" ]; then
|
||||
echo true
|
||||
fi
|
||||
done | grep -q true; then
|
||||
continue
|
||||
fi
|
||||
local content="$(notes get "$id")"
|
||||
if is_crawlable "$content"; then
|
||||
crawlable_ids+=("$id")
|
||||
fi
|
||||
done
|
||||
for crawlable_id in "${crawlable_ids[@]}"; do
|
||||
echo "$crawlable_id"
|
||||
done
|
||||
}
|
||||
|
||||
crawl() {
|
||||
_crawl "$@"
|
||||
}
|
||||
|
||||
_crawl() {
|
||||
local id="$1"
|
||||
local content="$(notes get "$id")"
|
||||
local json="$(
|
||||
printf '{"content": %s, "id": "%s"}' \
|
||||
"$(echo "$content" | jq -Rs)" \
|
||||
"$id"
|
||||
)"
|
||||
local crawlable_source="$(extract_crawlable_source "$content")"
|
||||
for backend in gitlab gitlab_wiki google; do
|
||||
if $backend is "$crawlable_source"; then
|
||||
crawl_with $backend "$json"
|
||||
return $?
|
||||
fi
|
||||
done
|
||||
log "unknown backend for $crawlable_source"
|
||||
return 1
|
||||
}
|
||||
|
||||
extract_crawlable_source() {
|
||||
echo "$*" | head -n 1 | awk '{print $NF}' | sed 's/^<//' | sed 's/>$//' | sed 's/^\///' | sed 's/\/$//'
|
||||
}
|
||||
|
||||
crawl_with() {
|
||||
local backend="$1"
|
||||
local json="$2"
|
||||
local pid="$(echo "$json" | jq -r .id)"
|
||||
|
||||
local content="$(echo "$json" | jq -r .content)"
|
||||
local crawlable_source="$(extract_crawlable_source "$content")"
|
||||
|
||||
notes put "$pid" "$(notes meta "$pid" | jq -r .Meta.Title)" "$crawlable_source"
|
||||
|
||||
local expanded=($($backend expand "$crawlable_source"))
|
||||
|
||||
log purge $crawlable_source:
|
||||
for subid in $(notes ids | grep "^$pid/"); do
|
||||
notes del "$subid"
|
||||
done
|
||||
|
||||
log expand $crawlable_source:"${#expanded[@]}: ${expanded[@]}"
|
||||
notes_mkdir_p() {
|
||||
local id="$1"
|
||||
local subtitle="${2%/}"
|
||||
notes put "$id" "$subtitle" "autogenerated content"
|
||||
}
|
||||
one() {
|
||||
encode() {
|
||||
base64 | md5sum | cut -c 1-10 | awk '{print $1}' | tr -d '\n'
|
||||
}
|
||||
local i="$1"
|
||||
local full_title="$(
|
||||
echo "$i" | base64 --decode | grep . || echo "${crawlable_source##*/}"
|
||||
)"
|
||||
full_title="${full_title%/}"
|
||||
full_title="${full_title#/}"
|
||||
export TITLE="${full_title##*/}"
|
||||
local human_url="$($backend human_url "$crawlable_source" "$i")"
|
||||
export CONTENT="$(
|
||||
echo "**!! WARNING !! This page is autogenerated and prone to destruction and replacement**"
|
||||
echo "**[See the original]($human_url)**"
|
||||
echo ""
|
||||
$backend get "$crawlable_source" "$i" \
|
||||
| sed 's/](\([^#h]\)/]\(%%%\1/g'
|
||||
)"
|
||||
export CONTENT="${CONTENT//"%%%"/"${human_url%/*}/"}"
|
||||
export CONTENT="$(
|
||||
printf "%s\n" "$CONTENT" \
|
||||
| sed 's/!\[\([^]]*\)](\([^)]*\)\/-\/tree\/\([^)]*\))//g'
|
||||
)"
|
||||
export ID="$(
|
||||
local sum="$pid/"
|
||||
local title_so_far=""
|
||||
for subtitle in $(echo $full_title | tr '/' '\n' | while read -r subtitle; do echo "$subtitle" | base64; done); do
|
||||
local subtitle="$(echo "$subtitle" | base64 --decode)"
|
||||
if [ -n "$title_so_far" ]; then
|
||||
local mkdir_p_title="${title_so_far%/}"
|
||||
mkdir_p_title="${mkdir_p_title##*/}"
|
||||
notes_mkdir_p "${sum%/}" "${mkdir_p_title}" >&2
|
||||
fi
|
||||
sum+="$(echo "$subtitle" | encode)/"
|
||||
title_so_far+="$subtitle/"
|
||||
done
|
||||
echo "$sum"
|
||||
)"
|
||||
ID="${ID%/}"
|
||||
if [ "${#expanded[@]}" -lt 2 ]; then
|
||||
ID="$pid"
|
||||
TITLE="$(notes meta "$ID" | jq -r .Meta.Title)"
|
||||
CONTENT="$(printf "%s\n\n%s", "$crawlable_source" "$CONTENT")"
|
||||
fi
|
||||
log " $ID ($TITLE): ${#CONTENT}"
|
||||
push_crawled "$ID" "$TITLE" "$CONTENT"
|
||||
log " /$ID ($TITLE): ${#CONTENT}"
|
||||
}
|
||||
if [ "${#expanded[@]}" -gt 0 ]; then
|
||||
for i in $(seq 0 $(("${#expanded[@]}"-1))); do
|
||||
one "${expanded[i]}"
|
||||
done
|
||||
else
|
||||
one ""
|
||||
fi
|
||||
}
|
||||
|
||||
push_crawled() {
|
||||
notes put "$@"
|
||||
}
|
||||
|
||||
is_crawlable() {
|
||||
local crawlable_source="$(extract_crawlable_source "$*")"
|
||||
# https://unix.stackexchange.com/questions/181254/how-to-use-grep-and-cut-in-script-to-obtain-website-urls-from-an-html-file
|
||||
local url_pattern="(http|https)://[a-zA-Z0-9./?=_%:\-\#--]*"
|
||||
echo "$crawlable_source" | cut -c 1-300 | grep -q -E "^[ ]*$url_pattern[ ]*$"
|
||||
}
|
||||
|
||||
rewrite() {
|
||||
log not impl: rewrite "./asdf" to "absolute.com/asdf"
|
||||
log not impl: rewrite "#abc-def?f=abc" to "#h-abc-def?f=abc" or better dont depend on query params so much
|
||||
log not impl rewrite, change images
|
||||
return 1
|
||||
}
|
||||
|
||||
if [ "$0" == "$BASH_SOURCE" ]; then
|
||||
main "$@"
|
||||
fi
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
#! /bin/bash
|
||||
|
||||
notes() (
|
||||
ids() {
|
||||
_recurse_ids "$(_tree)"
|
||||
}
|
||||
|
||||
_tree() {
|
||||
local cache_key="notes _tree"
|
||||
if CACHE_DURATION=5 cache get "$cache_key"; then
|
||||
return 0
|
||||
fi
|
||||
__tree "$@" | cache put "$cache_key"
|
||||
}
|
||||
|
||||
__tree() {
|
||||
_nncurl $NOTES_ADDR/api/v0/tree
|
||||
}
|
||||
|
||||
_nncurl() {
|
||||
curl -sS "$@"
|
||||
}
|
||||
|
||||
_recurse_ids() {
|
||||
local json="$1"
|
||||
if echo "$json" | jq .Branches | grep -q ^null$; then
|
||||
return 0
|
||||
fi
|
||||
local b64lines="$(echo "$json" | jq -r '.Branches | keys[]' | while read -r line; do echo "$line" | base64; done)"
|
||||
if [ -z "$b64lines" ]; then
|
||||
return 0
|
||||
fi
|
||||
for line in $b64lines; do
|
||||
line="$(echo "$line" | base64 --decode)"
|
||||
if ! _is_deleted "$line"; then
|
||||
echo "$line"
|
||||
_recurse_ids "$(echo "$json" | jq -c ".Branches[\"$line\"]")"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
meta() {
|
||||
local cache_key="notes meta $*"
|
||||
if CACHE_DURATION=5 cache get "$cache_key"; then
|
||||
return 0
|
||||
fi
|
||||
_meta "$@" | cache put "$cache_key"
|
||||
}
|
||||
|
||||
_meta() {
|
||||
local id="$1"
|
||||
local tree="$(_tree)"
|
||||
local pid="${id%%/*}"
|
||||
while [ "$id" != "$pid" ]; do
|
||||
tree="$(echo "$tree" | jq ".Branches[\"$pid\"]")"
|
||||
local to_add="${id#$pid/}"
|
||||
to_add="${to_add%%/*}"
|
||||
pid="$pid/$to_add"
|
||||
done
|
||||
echo "$tree" | jq ".Branches[\"$id\"].Leaf"
|
||||
}
|
||||
|
||||
_is_deleted() {
|
||||
local id="$1"
|
||||
while [ -n "$id" ]; do
|
||||
if meta "$id" | jq .Deleted | grep -q true; then
|
||||
return 0
|
||||
fi
|
||||
if [ "$id" == "${id%/*}" ]; then
|
||||
return 1
|
||||
fi
|
||||
id="${id%/*}"
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
get() {
|
||||
_get "$@"
|
||||
}
|
||||
|
||||
_get() {
|
||||
_nncurl $NOTES_ADDR/api/v0/files/$1
|
||||
}
|
||||
|
||||
del() {
|
||||
local id="$1"
|
||||
_nncurl \
|
||||
-X DELETE \
|
||||
$NOTES_ADDR/api/v0/files/$id
|
||||
}
|
||||
|
||||
put() {
|
||||
set -u
|
||||
local ret=0
|
||||
if ! _put "$@"; then
|
||||
ret=1
|
||||
fi
|
||||
set +u
|
||||
return $ret
|
||||
}
|
||||
|
||||
_put() {
|
||||
local id="$1"
|
||||
local title="$2"
|
||||
local body="$3"
|
||||
_nncurl \
|
||||
-X PUT \
|
||||
-H "Title: $title" \
|
||||
-H "Read-Only: true" \
|
||||
-d "$body" \
|
||||
$NOTES_ADDR/api/v0/files/$id >&2
|
||||
}
|
||||
|
||||
"$@"
|
||||
)
|
||||
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
#! /bin/bash
|
||||
|
||||
test_ids() {
|
||||
local two_levels='{
|
||||
"Branches": {
|
||||
"id": {
|
||||
"Branches": {
|
||||
"subid": {
|
||||
"Branches": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
notes eval "$(cat <<EOF
|
||||
_tree() { echo '$two_levels'; true; }
|
||||
(ids; true) | grep '^id$' > /dev/null || return 101
|
||||
(ids; true) | grep '^id\/subid$' > /dev/null || return 102
|
||||
ids | wc -l | grep 2 > /dev/null || return 103
|
||||
EOF
|
||||
)"
|
||||
}
|
||||
|
||||
test_meta() {
|
||||
local two_levels='{
|
||||
"Branches": {
|
||||
"id": {
|
||||
"Leaf": {"Title": "top level"},
|
||||
"Branches": {
|
||||
"subid": {
|
||||
"Leaf": {"Title": "sub level"},
|
||||
"Branches": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
notes eval "$(cat <<EOF
|
||||
_tree() { echo '$two_levels'; }
|
||||
meta id | jq .Title | grep -q top.level || return 201
|
||||
meta id/subid | jq .Title | grep -q sub.level || return 202
|
||||
EOF
|
||||
)"
|
||||
}
|
||||
|
||||
test__is_deleted() {
|
||||
local two_levels='{
|
||||
"Branches": {
|
||||
"id": {
|
||||
"Leaf": {"Title": "top level", "Deleted": true},
|
||||
"Branches": {
|
||||
"subid": {
|
||||
"Leaf": {"Title": "sub level"},
|
||||
"Branches": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
notes eval "$(cat <<EOF
|
||||
_tree() { echo '$two_levels'; }
|
||||
_is_deleted id || return 301
|
||||
_is_deleted id/subid || return 302
|
||||
EOF
|
||||
)"
|
||||
}
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
#! /bin/bash
|
||||
|
||||
rclone() (
|
||||
get_google() {
|
||||
local cache_key="rclone get google 2 $*"
|
||||
if cache get "$cache_key"; then
|
||||
return 0
|
||||
fi
|
||||
_get_google "$@" | cache put "$cache_key"
|
||||
}
|
||||
|
||||
_get_google() {
|
||||
_rate_limit
|
||||
local id="$1"
|
||||
local out="$(mktemp -d)"
|
||||
_cmd backend copyid work-notes-google: --drive-export-formats=csv,html,txt "$id" "$out/"
|
||||
find "$out" -type f
|
||||
}
|
||||
|
||||
_rate_limit() {
|
||||
local f="/tmp/rclone.rate.limit"
|
||||
local last=0
|
||||
if [ -f "$f" ]; then
|
||||
last="$(date -r "$f" +%s)"
|
||||
fi
|
||||
local now="$(date +%s)"
|
||||
local since_last=$((now-last))
|
||||
if ((since_last>2)); then
|
||||
dur=-2
|
||||
fi
|
||||
dur=$((dur+2))
|
||||
sleep $dur
|
||||
touch "$f"
|
||||
}
|
||||
|
||||
_ensure() {
|
||||
which rclone &> /dev/null && rclone version &> /dev/null
|
||||
}
|
||||
|
||||
_cmd() {
|
||||
_ensure_google_config
|
||||
__cmd "$@"
|
||||
}
|
||||
|
||||
__cmd() {
|
||||
_ensure
|
||||
RCLONE_CONFIG_PASS="$RCLONE_CONFIG_PASS" \
|
||||
$(which rclone) \
|
||||
--config "$RCLONE_CONFIG" \
|
||||
--size-only \
|
||||
--fast-list \
|
||||
--retries 10 \
|
||||
--retries-sleep 10s \
|
||||
"$@"
|
||||
}
|
||||
|
||||
_ensure_google_config() {
|
||||
__cmd config show | grep -q work-notes-google
|
||||
}
|
||||
|
||||
"$@"
|
||||
)
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
#! /bin/bash
|
||||
|
||||
main() {
|
||||
local ret=0
|
||||
for f in ./*_test.sh; do
|
||||
if ! one_main "$f"; then
|
||||
echo failed $f >&2
|
||||
ret=$((ret+1))
|
||||
fi
|
||||
done
|
||||
if [ $ret != 0 ]; then
|
||||
echo failed >&2
|
||||
fi
|
||||
return $ret
|
||||
}
|
||||
|
||||
one_main() (
|
||||
local f="$1"
|
||||
local ret=0
|
||||
for t in $(grep ^test_ "$f" | sed 's/(.*//'); do
|
||||
one_test "$f" "$t"
|
||||
local test_ret=$?
|
||||
if [ $test_ret != 0 ]; then
|
||||
echo failed $f:$t: $test_ret >&2
|
||||
ret=$((ret+1))
|
||||
fi
|
||||
done
|
||||
return $ret
|
||||
)
|
||||
|
||||
one_test() (
|
||||
local f="$1"
|
||||
local t="$2"
|
||||
each
|
||||
source "${f%_test.sh}.sh"
|
||||
source "$f"
|
||||
eval "$t"
|
||||
)
|
||||
|
||||
each() {
|
||||
export CACHE=$(mktemp -d)
|
||||
export GITLAB_PAT=gibberish
|
||||
export NOTES_ADDR=http://127.0.0.1:61111
|
||||
source ./cache.sh
|
||||
set -e
|
||||
set -o pipefail
|
||||
log() { echo "> $*" >&2; }
|
||||
}
|
||||
|
||||
if [ "$0" == "$BASH_SOURCE" ]; then
|
||||
main "$@"
|
||||
fi
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
.*
|
||||
**/.*
|
||||
**/*.sw*
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
FROM registry-app.eng.qops.net:5001/imported/alpine:3.15 as certs
|
||||
RUN apk update && apk add --no-cache ca-certificates
|
||||
|
||||
FROM registry-app.eng.qops.net:5001/imported/alpine:3.15 as encoder
|
||||
WORKDIR /main
|
||||
RUN apk update && apk add --no-cache gpg gpg-agent
|
||||
ARG KEY=""
|
||||
COPY ./releasedata ./releasedata
|
||||
RUN cat ./releasedata/users.yaml \
|
||||
| gpg --batch --no-tty --passphrase="$KEY" --cipher-algo AES256 --symmetric -z 0 \
|
||||
> ./users.yaml.gpg
|
||||
|
||||
FROM registry-app.eng.qops.net:5001/imported/alpine:3.15 as runner
|
||||
RUN apk update && apk --no-cache upgrade && apk add --no-cache bash gpg gpg-agent
|
||||
WORKDIR /main
|
||||
COPY --from=certs /etc/ssl/certs /etc/ssl/certs
|
||||
COPY --from=encoder /main/users.yaml.gpg ./
|
||||
|
||||
COPY ./exec-server ./
|
||||
COPY ./public ./public
|
||||
RUN test -e /main/exec-server
|
||||
RUN test -d /main/public
|
||||
RUN mkdir -p /var/log /main/public/files /main/public/media
|
||||
|
||||
ENV GOPATH=""
|
||||
VOLUME /main/public/files
|
||||
VOLUME /main/public/media
|
||||
ENV COOKIE_SECRET=""
|
||||
ENV KEY=""
|
||||
RUN echo 'cat /main/users.yaml.gpg | gpg --batch --no-tty --passphrase="$KEY" --decrypt > /main/users.yaml && /main/exec-server "$@"' > /main/entrypoint.sh
|
||||
ENTRYPOINT ["bash", "/main/entrypoint.sh"]
|
||||
CMD []
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
## Using File Auth
|
||||
|
||||
1. Build a linux binary with `GOOS=linux CGO_ENABLED=0 go build -o ./exec-server -a -installsuffix cgo -ldflags "-s -w"`
|
||||
1. Add your usernames, passwords, groups to `releasedata/users.yaml`
|
||||
1. {one time} Generate and store an encryption `KEY` in Vault+Lastpass
|
||||
1. Build a Docker image with `docker build -t registry-app.eng.qops.net:5001/breel/work-notes:latest --build-arg KEY='{{INSERT YOUR KEY HERE}}' .`
|
||||
1. Push with `docker push registry-app.eng.qops.net:5001/breel/work-notes:latest`
|
||||
1. Run like `docker run -v /mnt/files:/main/public/files -v /mnt/media:/main/public/media -e KEY='{{INSERT YOUR KEY HERE}}' -e COOKIE_SECRET='{{INSERT ANOTHER KEY HERE}}' -p 3005:3005 --rm -it registry-app.eng.qops.net:5001/breel/work-notes:latest -auth ./users.yaml -p 3005`
|
||||
|
||||
### `users.yaml` Format
|
||||
|
||||
```yaml
|
||||
users:
|
||||
breel:
|
||||
password: breel
|
||||
groups:
|
||||
- g1
|
||||
- g2
|
||||
```
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type auth interface {
|
||||
Login(string, string) (bool, error)
|
||||
Groups(string) ([]string, error)
|
||||
}
|
||||
|
||||
type FileAuth struct {
|
||||
path string
|
||||
}
|
||||
|
||||
type fileAuthContent struct {
|
||||
Users map[string]struct {
|
||||
Password string
|
||||
Groups []string
|
||||
}
|
||||
}
|
||||
|
||||
func NewFileAuth(path string) FileAuth {
|
||||
return FileAuth{path: path}
|
||||
}
|
||||
|
||||
func (fileAuth FileAuth) Login(u, p string) (bool, error) {
|
||||
content, err := fileAuth.load()
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
entry, ok := content.Users[u]
|
||||
return ok && entry.Password == p, nil
|
||||
}
|
||||
|
||||
func (fileAuth FileAuth) Groups(u string) ([]string, error) {
|
||||
content, err := fileAuth.load()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
entry, ok := content.Users[u]
|
||||
if !ok {
|
||||
return nil, errors.New("invalid user")
|
||||
}
|
||||
return entry.Groups, nil
|
||||
}
|
||||
|
||||
func (fileAuth FileAuth) load() (fileAuthContent, error) {
|
||||
var fileAuthContent fileAuthContent
|
||||
b, err := ioutil.ReadFile(fileAuth.path)
|
||||
if err != nil {
|
||||
return fileAuthContent, err
|
||||
}
|
||||
if err := yaml.Unmarshal(b, &fileAuthContent); err != nil {
|
||||
return fileAuthContent, err
|
||||
}
|
||||
return fileAuthContent, nil
|
||||
}
|
||||
|
|
@ -0,0 +1,118 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFileAuth(t *testing.T) {
|
||||
user := "username"
|
||||
passw := "password"
|
||||
g := "group"
|
||||
emptyp := func() string {
|
||||
d := t.TempDir()
|
||||
f, err := ioutil.TempFile(d, "login.yaml.*")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
f.Close()
|
||||
return path.Join(d, f.Name())
|
||||
}
|
||||
goodp := func() string {
|
||||
p := emptyp()
|
||||
if err := ensureAndWrite(p, []byte(fmt.Sprintf(`{
|
||||
"users": {
|
||||
%q: {
|
||||
"password": %q,
|
||||
"groups": [%q]
|
||||
}
|
||||
}
|
||||
}`, user, passw, g))); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
t.Run("no file", func(t *testing.T) {
|
||||
p := emptyp()
|
||||
os.Remove(p)
|
||||
fa := NewFileAuth(p)
|
||||
if _, err := fa.Login(user, passw); err == nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("bad file", func(t *testing.T) {
|
||||
p := emptyp()
|
||||
if err := ensureAndWrite(p, []byte(`{"hello:}`)); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
fa := NewFileAuth(p)
|
||||
if _, err := fa.Login(user, passw); err == nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("bad user", func(t *testing.T) {
|
||||
p := goodp()
|
||||
fa := NewFileAuth(p)
|
||||
if ok, err := fa.Login("bad"+user, passw); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("bad pass", func(t *testing.T) {
|
||||
p := goodp()
|
||||
fa := NewFileAuth(p)
|
||||
if ok, err := fa.Login(user, "bad"+passw); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("good load", func(t *testing.T) {
|
||||
p := goodp()
|
||||
fa := NewFileAuth(p)
|
||||
got, err := fa.load()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(got.Users) != 1 {
|
||||
t.Error(got.Users)
|
||||
}
|
||||
if entry, ok := got.Users[user]; !ok {
|
||||
t.Error(ok)
|
||||
} else if entry.Password != passw {
|
||||
t.Error(entry)
|
||||
} else if len(entry.Groups) != 1 {
|
||||
t.Error(entry.Groups)
|
||||
} else if entry.Groups[0] != g {
|
||||
t.Error(entry.Groups)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("good", func(t *testing.T) {
|
||||
p := goodp()
|
||||
b, _ := ioutil.ReadFile(p)
|
||||
t.Logf("goodp: %s: %s", p, b)
|
||||
fa := NewFileAuth(p)
|
||||
if ok, err := fa.Login(user, passw); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if !ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
if groups, err := fa.Groups(user); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if len(groups) != 1 {
|
||||
t.Fatal(groups)
|
||||
} else if groups[0] != g {
|
||||
t.Fatal(groups)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
@ -0,0 +1,251 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"hash/crc32"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
var cookieSecret = os.Getenv("COOKIE_SECRET")
|
||||
|
||||
type User struct {
|
||||
User string
|
||||
Group string
|
||||
Groups []string
|
||||
}
|
||||
|
||||
func (user User) Is(other User) bool {
|
||||
for i := range user.Groups {
|
||||
if i >= len(other.Groups) || user.Groups[i] != other.Groups[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return user.User == other.User &&
|
||||
user.Group == other.Group &&
|
||||
len(user.Groups) == len(other.Groups)
|
||||
}
|
||||
|
||||
type Cookie struct {
|
||||
Hash string
|
||||
Salt string
|
||||
Value string
|
||||
}
|
||||
|
||||
func (server *Server) authenticate(w http.ResponseWriter, r *http.Request) (*Server, bool, error) {
|
||||
if done, err := server.parseLogin(w, r); err != nil {
|
||||
log.Printf("error parsing login: %v", err)
|
||||
return nil, false, err
|
||||
} else if done {
|
||||
log.Printf("login rendered body")
|
||||
return nil, true, nil
|
||||
}
|
||||
|
||||
if ok, err := needsLogin(r); err != nil {
|
||||
log.Printf("error checking if login needed: %v", err)
|
||||
return nil, false, err
|
||||
} else if ok {
|
||||
log.Printf("needs login")
|
||||
promptLogin(w)
|
||||
return nil, true, nil
|
||||
}
|
||||
|
||||
if done, err := changeNamespace(w, r); err != nil {
|
||||
return nil, false, err
|
||||
} else if done {
|
||||
return nil, true, nil
|
||||
}
|
||||
|
||||
user, _ := loginCookie(r)
|
||||
return server.WithUser(user.User, user.Group, user.Groups), false, nil
|
||||
}
|
||||
|
||||
func promptLogin(w http.ResponseWriter) {
|
||||
w.Header().Set("WWW-Authenticate", "Basic")
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
func (server *Server) parseLogin(w http.ResponseWriter, r *http.Request) (bool, error) {
|
||||
username, password, ok := r.BasicAuth()
|
||||
if !ok {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
ok, err := server.auth.Login(username, password)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if !ok {
|
||||
promptLogin(w)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
groups, err := server.auth.Groups(username)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if len(groups) == 0 {
|
||||
return false, errors.New("user has no groups")
|
||||
}
|
||||
|
||||
user := User{
|
||||
User: username,
|
||||
Groups: groups,
|
||||
Group: groups[0],
|
||||
}
|
||||
|
||||
olduser, _ := loginCookie(r)
|
||||
for i := range groups {
|
||||
if groups[i] == olduser.Group {
|
||||
user.Group = olduser.Group
|
||||
}
|
||||
}
|
||||
log.Printf("%+v => %+v", olduser, user)
|
||||
|
||||
setLoginCookie(w, r, user)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func changeNamespace(w http.ResponseWriter, r *http.Request) (bool, error) {
|
||||
want := r.URL.Query().Get("namespace")
|
||||
if want == "" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
user, ok := loginCookie(r)
|
||||
if !ok {
|
||||
promptLogin(w)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if user.Group == want {
|
||||
return false, nil
|
||||
}
|
||||
for i := range user.Groups {
|
||||
if want == user.Groups[i] {
|
||||
user.Group = want
|
||||
setLoginCookie(w, r, user)
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func needsLogin(r *http.Request) (bool, error) {
|
||||
user, ok := loginCookie(r)
|
||||
if !ok {
|
||||
return true, nil
|
||||
}
|
||||
for i := range user.Groups {
|
||||
if user.Group == user.Groups[i] {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func setLoginCookie(w http.ResponseWriter, r *http.Request, user User) {
|
||||
cookie := &http.Cookie{
|
||||
Name: "login",
|
||||
Value: encodeUserCookie(user),
|
||||
Expires: time.Now().Add(time.Hour * 24),
|
||||
Path: "/",
|
||||
}
|
||||
if was, ok := requestLoginCookie(r); !ok || !was.Is(user) {
|
||||
w.Header().Set("Set-Cookie", cookie.String())
|
||||
}
|
||||
log.Printf("setting login cookie: %+v", user)
|
||||
*r = *r.WithContext(context.WithValue(r.Context(), "LOGIN_COOKIE", cookie.Value))
|
||||
}
|
||||
|
||||
func loginCookie(r *http.Request) (User, bool) {
|
||||
if v := r.Context().Value("LOGIN_COOKIE"); v != nil {
|
||||
log.Printf("login cookie from ctx")
|
||||
return decodeUserCookie(v.(string))
|
||||
}
|
||||
return requestLoginCookie(r)
|
||||
}
|
||||
|
||||
func requestLoginCookie(r *http.Request) (User, bool) {
|
||||
c, ok := getCookie("login", r)
|
||||
log.Printf("request login cookie: %v, %v", c, ok)
|
||||
if !ok {
|
||||
return User{}, false
|
||||
}
|
||||
return decodeUserCookie(c)
|
||||
}
|
||||
|
||||
func getCookie(key string, r *http.Request) (string, bool) {
|
||||
var cookie *http.Cookie
|
||||
cookies := r.Cookies()
|
||||
for i := range cookies {
|
||||
if cookies[i].Name == key && (cookies[i].Expires.IsZero() || time.Now().Before(cookies[i].Expires)) {
|
||||
cookie = cookies[i]
|
||||
}
|
||||
}
|
||||
if cookie == nil {
|
||||
return "", false
|
||||
}
|
||||
return cookie.Value, cookie.Expires.IsZero() || time.Now().Before(cookie.Expires)
|
||||
}
|
||||
|
||||
func decodeUserCookie(raw string) (User, bool) {
|
||||
decoded, ok := decodeCookie(raw)
|
||||
if !ok {
|
||||
return User{}, ok
|
||||
}
|
||||
var user User
|
||||
err := json.Unmarshal([]byte(decoded), &user)
|
||||
return user, err == nil
|
||||
}
|
||||
|
||||
func encodeUserCookie(user User) string {
|
||||
b, err := json.Marshal(user)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return encodeCookie(string(b))
|
||||
}
|
||||
|
||||
func encodeCookie(s string) string {
|
||||
cookie := Cookie{
|
||||
Salt: uuid.New().String(),
|
||||
Value: s,
|
||||
}
|
||||
hash := crc32.NewIEEE()
|
||||
hash.Write([]byte(cookieSecret))
|
||||
hash.Write([]byte(cookie.Salt))
|
||||
hash.Write([]byte(cookie.Value))
|
||||
cookie.Hash = base64.StdEncoding.EncodeToString(hash.Sum(nil))
|
||||
b, err := json.Marshal(cookie)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(b)
|
||||
}
|
||||
|
||||
func decodeCookie(s string) (string, bool) {
|
||||
b, err := base64.StdEncoding.DecodeString(s)
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
var cookie Cookie
|
||||
if err := json.Unmarshal(b, &cookie); err != nil {
|
||||
return "", false
|
||||
}
|
||||
hash := crc32.NewIEEE()
|
||||
hash.Write([]byte(cookieSecret))
|
||||
hash.Write([]byte(cookie.Salt))
|
||||
hash.Write([]byte(cookie.Value))
|
||||
if got := base64.StdEncoding.EncodeToString(hash.Sum(nil)); cookie.Hash != got {
|
||||
return "", false
|
||||
}
|
||||
return cookie.Value, true
|
||||
}
|
||||
|
|
@ -0,0 +1,361 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"path"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
func TestEncodeDecodeCookie(t *testing.T) {
|
||||
newTestServer(t)
|
||||
|
||||
for i := 0; i < 5; i++ {
|
||||
value := uuid.New().String()
|
||||
encoded := encodeCookie(value)
|
||||
for j := 0; j < 5; j++ {
|
||||
decoded, ok := decodeCookie(encoded)
|
||||
if !ok || decoded != value {
|
||||
t.Errorf("value=%s, encoded=%s, decoded=%s", value, encoded, decoded)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncodeDecodeUserCookie(t *testing.T) {
|
||||
newTestServer(t)
|
||||
|
||||
user := User{
|
||||
User: "abc",
|
||||
Groups: []string{"def", "ghi"},
|
||||
}
|
||||
encoded := encodeUserCookie(user)
|
||||
decoded, ok := decodeUserCookie(encoded)
|
||||
if !ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
if fmt.Sprint(user) != fmt.Sprint(decoded) {
|
||||
t.Fatal(user, decoded)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCookie(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
r.AddCookie(&http.Cookie{
|
||||
Name: "abc",
|
||||
Value: "def",
|
||||
Expires: time.Now().Add(time.Hour),
|
||||
})
|
||||
got, _ := getCookie("abc", r)
|
||||
if got != "def" {
|
||||
t.Fatal(r.Cookies(), got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSetLoginCookie(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
user := User{User: "a", Groups: []string{"g"}}
|
||||
|
||||
setLoginCookie(w, r, user)
|
||||
if w.Header().Get("Set-Cookie") == "" {
|
||||
t.Error(w.Header())
|
||||
}
|
||||
|
||||
got, ok := loginCookie(r)
|
||||
if !ok {
|
||||
t.Error(ok)
|
||||
}
|
||||
if fmt.Sprint(user) != fmt.Sprint(got) {
|
||||
t.Error(user, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestChangeNamespace(t *testing.T) {
|
||||
newTestServer(t)
|
||||
user := User{
|
||||
User: "user",
|
||||
Groups: []string{"group", "othergroup"},
|
||||
Group: "group",
|
||||
}
|
||||
|
||||
t.Run("noop", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
w := httptest.NewRecorder()
|
||||
done, err := changeNamespace(w, r)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if done {
|
||||
t.Error(done)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("change to ``", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/?namespace=", nil)
|
||||
w := httptest.NewRecorder()
|
||||
done, err := changeNamespace(w, r)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if done {
|
||||
t.Error(done)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("change to bad", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/?namespace=never", nil)
|
||||
w := httptest.NewRecorder()
|
||||
setLoginCookie(w, r, user)
|
||||
done, err := changeNamespace(w, r)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if done {
|
||||
t.Error(done)
|
||||
}
|
||||
user, ok := loginCookie(r)
|
||||
if !ok {
|
||||
t.Error(ok)
|
||||
}
|
||||
if user.Group == "never" {
|
||||
t.Error("change namespace acknowledged bad change")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("change without login", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/?namespace="+user.Group, nil)
|
||||
w := httptest.NewRecorder()
|
||||
done, err := changeNamespace(w, r)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if !done {
|
||||
t.Error(done)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("change to same", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/?namespace="+user.Group, nil)
|
||||
w := httptest.NewRecorder()
|
||||
setLoginCookie(w, r, user)
|
||||
done, err := changeNamespace(w, r)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if done {
|
||||
t.Error(done)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("change to ok", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/?namespace="+user.Groups[1], nil)
|
||||
w := httptest.NewRecorder()
|
||||
setLoginCookie(w, r, user)
|
||||
done, err := changeNamespace(w, r)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if done {
|
||||
t.Error(done)
|
||||
}
|
||||
user, ok := loginCookie(r)
|
||||
if !ok {
|
||||
t.Error(ok)
|
||||
}
|
||||
if user.Group != user.Groups[1] {
|
||||
t.Error(user.Group)
|
||||
}
|
||||
if w.Header().Get("Set-Cookie") == "" {
|
||||
t.Error(w.Header())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestNeedsLogin(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
user := User{User: "user", Groups: []string{"group0", "group1"}, Group: "group0"}
|
||||
|
||||
t.Run("no login provided", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
if ok, err := needsLogin(r); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if !ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("no namespace provided", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
u2 := user
|
||||
u2.Group = ""
|
||||
setLoginCookie(w, r, u2)
|
||||
if ok, err := needsLogin(r); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if !ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("cookie tampered", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
setLoginCookie(w, r, user)
|
||||
cookieSecret += "modified"
|
||||
if ok, err := needsLogin(r); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if !ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("bad namespace", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
u2 := user
|
||||
u2.Group = "teehee"
|
||||
setLoginCookie(w, r, u2)
|
||||
if ok, err := needsLogin(r); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if !ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("ok", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
setLoginCookie(w, r, user)
|
||||
if ok, err := needsLogin(r); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if ok {
|
||||
t.Fatal(ok)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestServerParseLogin(t *testing.T) {
|
||||
server := newTestServer(t)
|
||||
|
||||
t.Run("no basic auth", func(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
if done, err := server.parseLogin(w, r); done || err != nil {
|
||||
t.Fatal(done, err)
|
||||
}
|
||||
if w.Code == http.StatusUnauthorized {
|
||||
t.Error(w.Code)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("bad basic auth", func(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
r.SetBasicAuth("junk", "junk")
|
||||
if done, err := server.parseLogin(w, r); !done || err != nil {
|
||||
t.Fatal(done, err)
|
||||
}
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Error(w.Code)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("ok", func(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
r.SetBasicAuth("user", "passw")
|
||||
if done, err := server.parseLogin(w, r); done || err != nil {
|
||||
t.Fatal(done, err)
|
||||
}
|
||||
if w.Code == http.StatusUnauthorized {
|
||||
t.Error(w.Code)
|
||||
}
|
||||
if len(w.Header()["Set-Cookie"]) != 1 {
|
||||
t.Error(w.Header())
|
||||
}
|
||||
if user, ok := loginCookie(r); !ok || user.User != "user" || user.Groups[0] != "group" || user.Groups[1] != "othergroup" {
|
||||
t.Error(user)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestServerAuthenticate(t *testing.T) {
|
||||
server := newTestServer(t)
|
||||
|
||||
t.Run("ok: already logged in", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
setLoginCookie(httptest.NewRecorder(), r, User{User: "user", Group: "othergroup", Groups: []string{"group", "othergroup"}})
|
||||
s2, done, err := server.authenticate(nil, r)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if done {
|
||||
t.Error(done)
|
||||
}
|
||||
if server == s2 {
|
||||
t.Error(done)
|
||||
}
|
||||
if server.user != nil {
|
||||
t.Error(server.user)
|
||||
}
|
||||
if s2.user == nil {
|
||||
t.Error(s2.user)
|
||||
}
|
||||
if s2.user.User != "user" {
|
||||
t.Error(s2.user)
|
||||
}
|
||||
if s2.user.Group != "othergroup" {
|
||||
t.Error(s2.user)
|
||||
}
|
||||
if fmt.Sprint(s2.user.Groups) != fmt.Sprint([]string{"group", "othergroup"}) {
|
||||
t.Error(s2.user)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("ok: basic auth", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
w := httptest.NewRecorder()
|
||||
r.SetBasicAuth("user", "passw")
|
||||
s2, done, err := server.authenticate(w, r)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if done {
|
||||
t.Error(done)
|
||||
}
|
||||
if server == s2 {
|
||||
t.Error(done)
|
||||
}
|
||||
if server.user != nil {
|
||||
t.Error(server.user)
|
||||
}
|
||||
if s2.user == nil {
|
||||
t.Error(s2.user)
|
||||
}
|
||||
if s2.user.User != "user" {
|
||||
t.Error(s2.user)
|
||||
}
|
||||
if s2.user.Group != "group" {
|
||||
t.Error(s2.user)
|
||||
}
|
||||
if fmt.Sprint(s2.user.Groups) != fmt.Sprint([]string{"group", "othergroup"}) {
|
||||
t.Error(s2.user)
|
||||
}
|
||||
if w.Code != http.StatusOK {
|
||||
t.Error(w.Code)
|
||||
}
|
||||
if len(w.Header()["Set-Cookie"]) != 1 {
|
||||
t.Error(w.Header())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func newTestServer(t *testing.T) *Server {
|
||||
cookieSecret = uuid.New().String()
|
||||
p := path.Join(t.TempDir(), "auth.yaml")
|
||||
ensureAndWrite(p, []byte(`{"users":{"user":{"password":"passw", "groups":["group", "othergroup"]}}}`))
|
||||
return &Server{
|
||||
auth: NewFileAuth(p),
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
module ezmded
|
||||
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/gomarkdown/markdown v0.0.0-20220114203417-14399d5448c4
|
||||
github.com/google/uuid v1.3.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
local/args v0.0.0-00010101000000-000000000000
|
||||
local/gziphttp v0.0.0-00010101000000-000000000000
|
||||
local/router v0.0.0-00010101000000-000000000000
|
||||
local/simpleserve v0.0.0-00010101000000-000000000000
|
||||
)
|
||||
|
||||
replace local/args => ../../../../args
|
||||
|
||||
replace local/logb => ../../../../logb
|
||||
|
||||
replace local/storage => ../../../../storage
|
||||
|
||||
replace local/router => ../../../../router
|
||||
|
||||
replace local/simpleserve => ../../../../simpleserve
|
||||
|
||||
replace local/gziphttp => ../../../../gziphttp
|
||||
|
||||
replace local/notes-server => ../../../../notes-server
|
||||
|
||||
replace local/oauth2 => ../../../../oauth2
|
||||
|
|
@ -0,0 +1,209 @@
|
|||
bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
|
||||
cloud.google.com/go v0.33.1/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
github.com/Azure/azure-pipeline-go v0.1.8/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg=
|
||||
github.com/Azure/azure-storage-blob-go v0.0.0-20181023070848-cf01652132cc/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/Unknwon/goconfig v0.0.0-20181105214110-56bd8ab18619/go.mod h1:wngxua9XCNjvHjDiTiV26DaKDT+0c63QR6H5hjVUUxw=
|
||||
github.com/a8m/tree v0.0.0-20180321023834-3cf936ce15d6/go.mod h1:FSdwKX97koS5efgm8WevNf7XS3PqtyFkKDDXrz778cg=
|
||||
github.com/abbot/go-http-auth v0.4.0/go.mod h1:Cz6ARTIzApMJDzh5bRMSUou6UMSp0IEXg9km/ci7TJM=
|
||||
github.com/anacrolix/dms v0.0.0-20180117034613-8af4925bffb5/go.mod h1:DGqLjaZ3ziKKNRt+U5Q9PLWJ52Q/4rxfaaH/b3QYKaE=
|
||||
github.com/aws/aws-sdk-go v1.15.81/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM=
|
||||
github.com/billziss-gh/cgofuse v1.1.0/go.mod h1:LJjoaUojlVjgo5GQoEJTcJNqZJeRU0nCR84CyxKt2YM=
|
||||
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
||||
github.com/buraksezer/consistent v0.9.0/go.mod h1:6BrVajWq7wbKZlTOUPs/XVfR8c0maujuPowduSpZqmw=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/coreos/bbolt v0.0.0-20180318001526-af9db2027c98/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||
github.com/cpuguy83/go-md2man v1.0.8/go.mod h1:N6JayAiVKtlHSnuTCeuLSQVs75hb8q+dYQLjr7cDsKY=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/djherbis/times v1.1.0/go.mod h1:CGMZlo255K5r4Yw0b9RRfFQpM2y7uOmxg4jm9HsaVf8=
|
||||
github.com/dropbox/dropbox-sdk-go-unofficial v5.4.0+incompatible/go.mod h1:lr+LhMM3F6Y3lW1T9j2U5l7QeuWm87N9+PPXo3yH4qY=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/fairlyblank/md2min v0.0.0-20171213131418-39cd6e9904ac/go.mod h1:QAobgT+CwT/SRphqV6Jrz5jt3wkW9Q72QNquEvh6dLk=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0=
|
||||
github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY=
|
||||
github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg=
|
||||
github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
|
||||
github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
|
||||
github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs=
|
||||
github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
|
||||
github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
|
||||
github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk=
|
||||
github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28=
|
||||
github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo=
|
||||
github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk=
|
||||
github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw=
|
||||
github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360=
|
||||
github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg=
|
||||
github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE=
|
||||
github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8=
|
||||
github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
|
||||
github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
|
||||
github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4=
|
||||
github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4=
|
||||
github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ=
|
||||
github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0=
|
||||
github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw=
|
||||
github.com/goftp/file-driver v0.0.0-20180502053751-5d604a0fc0c9/go.mod h1:GpOj6zuVBG3Inr9qjEnuVTgBlk2lZ1S9DcoFiXWyKss=
|
||||
github.com/goftp/server v0.0.0-20190111142836-88de73f463af/go.mod h1:k/SS6VWkxY7dHPhoMQ8IdRu8L4lQtmGbhyXGg+vCnXE=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/gomarkdown/markdown v0.0.0-20210208175418-bda154fe17d8/go.mod h1:aii0r/K0ZnHv7G0KF7xy1v0A7s2Ljrb5byB7MO5p6TU=
|
||||
github.com/gomarkdown/markdown v0.0.0-20220114203417-14399d5448c4 h1:6GlsnS3GQYfrJZTJEUsheoyLE6kLXQJDvQKIKxgL/9Q=
|
||||
github.com/gomarkdown/markdown v0.0.0-20220114203417-14399d5448c4/go.mod h1:JDGcbDT52eL4fju3sZ4TeHGsQwhG9nbDV21aMyhwPoA=
|
||||
github.com/gomodule/redigo v1.8.5/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/jlaffaye/ftp v0.0.0-20181101112434-47f21d10f0ee/go.mod h1:lli8NYPQOFy3O++YmYbqVgOcQ1JPCwdOy+5zSjKJ9qY=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
|
||||
github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4=
|
||||
github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA=
|
||||
github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
|
||||
github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
|
||||
github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
|
||||
github.com/minio/minio-go/v6 v6.0.57/go.mod h1:5+R/nM9Pwrh0vqF+HbYYDQ84wdUFPyXHkrdT4AIkifM=
|
||||
github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/ncw/go-acd v0.0.0-20171120105400-887eb06ab6a2/go.mod h1:MLIrzg7gp/kzVBxRE1olT7CWYMCklcUWU+ekoxOD9x0=
|
||||
github.com/ncw/rclone v1.46.0/go.mod h1:+uFY4HNpat/yXXIEin5ETWXxIwEplC+eDe/vT8vlk1w=
|
||||
github.com/ncw/swift v1.0.44/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM=
|
||||
github.com/nsf/termbox-go v0.0.0-20181027232701-60ab7e3d12ed/go.mod h1:IuKpRQcYE1Tfu+oAQqaLisqDeXgjyyltCfsaoYN18NQ=
|
||||
github.com/okzk/sdnotify v0.0.0-20180710141335-d9becc38acbd/go.mod h1:4soZNh0zW0LtYGdQ416i0jO0EIqMGcbtaspRS4BDvRQ=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
|
||||
github.com/pengsrc/go-shared v0.2.0/go.mod h1:jVblp62SafmidSkvWrXyxAme3gaTfEtWwRPGz5cpvHg=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/sftp v1.8.3/go.mod h1:NxmoDg/QLVWluQDUYG7XBZTLUpKeFa8e3aMf1BfjyHk=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rfjakob/eme v0.0.0-20171028163933-2222dbd4ba46/go.mod h1:U2bmx0hDj8EyDdcxmD5t3XHDnBFnyNNc22n1R4008eM=
|
||||
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sevlyar/go-daemon v0.1.4/go.mod h1:6dJpPatBT9eUwM5VCw9Bt6CdX9Tk6UWvhW3MebLDRKE=
|
||||
github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||
github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo=
|
||||
github.com/skratchdot/open-golang v0.0.0-20160302144031-75fb7ed4208c/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
|
||||
github.com/t3rm1n4l/go-mega v0.0.0-20190205172012-55a226cf41da/go.mod h1:XWL4vDyd3JKmJx+hZWUVgCNmmhZ2dTBcaNDcxH465s0=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/xanzy/ssh-agent v0.2.0/go.mod h1:0NyE30eGUDliuLEHJgYte/zncp2zdTStcOnWhgSqHD8=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs=
|
||||
github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
github.com/yuin/goldmark v1.3.4-0.20210326114109-75d8cce5b78c/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yunify/qingstor-sdk-go v2.2.15+incompatible/go.mod h1:w6wqLDQ5bBTzxGJ55581UrSwLrsTAsdo9N6yX/8d9RY=
|
||||
go.mongodb.org/mongo-driver v1.7.2/go.mod h1:Q4oFMbo1+MSNqICAdYMlC/zSTrwCogR4R8NzkI+yfU8=
|
||||
golang.org/dl v0.0.0-20190829154251-82a15e2f2ead/go.mod h1:IUMfjQLJQd4UTqG1Z90tenwKoCX93Gn3MAQJMOSBsDQ=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190131182504-b8fe1690c613/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
||||
golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/oauth2 v0.0.0-20181120190819-8f65e3013eba/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.0.0-20181120235003-faade3cbb06a/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ID string
|
||||
|
||||
func NewID(s string) ID {
|
||||
return ID(path.Clean(s)).withClean()
|
||||
}
|
||||
|
||||
func (id ID) Push(child string) ID {
|
||||
return NewID(path.Join(id.String(), child)).withClean()
|
||||
}
|
||||
|
||||
func (id ID) Pop() ID {
|
||||
pid := path.Clean(NewID(path.Dir(id.String())).withClean().String())
|
||||
if strings.HasPrefix(pid, ".") {
|
||||
return ""
|
||||
}
|
||||
return NewID(pid)
|
||||
}
|
||||
|
||||
func (id ID) URLSafeString() string {
|
||||
splits := strings.Split(string(id), "/")
|
||||
for i := range splits {
|
||||
splits[i] = url.PathEscape(splits[i])
|
||||
}
|
||||
return strings.Join(splits, "/")
|
||||
}
|
||||
|
||||
func (id ID) String() string {
|
||||
return string(id)
|
||||
}
|
||||
|
||||
func (id ID) withClean() ID {
|
||||
splits := strings.Split(id.String(), string([]rune{os.PathSeparator}))
|
||||
for i := range splits {
|
||||
splits[i] = strings.Trim(splits[i], string([]rune{os.PathSeparator}))
|
||||
splits[i] = strings.Trim(splits[i], "/")
|
||||
t, err := url.PathUnescape(splits[i])
|
||||
if err == nil {
|
||||
splits[i] = t
|
||||
}
|
||||
}
|
||||
clean := path.Join(splits...)
|
||||
if clean == "" || clean == "." {
|
||||
clean = ""
|
||||
}
|
||||
return ID(clean)
|
||||
}
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Leaf struct {
|
||||
Meta Meta
|
||||
Content string
|
||||
}
|
||||
|
||||
type Meta struct {
|
||||
Title string
|
||||
ReadOnly bool
|
||||
Deleted bool
|
||||
}
|
||||
|
||||
func NewHTTPRequestLeaf(r *http.Request) (Leaf, error) {
|
||||
var leaf Leaf
|
||||
if b, err := ioutil.ReadAll(r.Body); err != nil {
|
||||
return leaf, err
|
||||
} else {
|
||||
leaf.Content = string(b)
|
||||
}
|
||||
if leaf.Meta.Title = r.Header.Get("Title"); leaf.Meta.Title == "" {
|
||||
leaf.Meta.Title = "Untitled"
|
||||
}
|
||||
if readOnly := r.Header.Get("Read-Only"); readOnly == "true" {
|
||||
leaf.Meta.ReadOnly = true
|
||||
} else if readOnly == "false" {
|
||||
leaf.Meta.ReadOnly = false
|
||||
}
|
||||
leaf.Meta.Deleted = r.Method == http.MethodDelete
|
||||
return leaf, nil
|
||||
}
|
||||
|
||||
func NewLeaf(title string, content string) (Leaf, error) {
|
||||
return NewHTTPRequestLeaf(&http.Request{
|
||||
Header: http.Header{
|
||||
"Title": []string{title},
|
||||
},
|
||||
Body: io.NopCloser(strings.NewReader(content)),
|
||||
})
|
||||
}
|
||||
|
||||
func (leaf Leaf) WriteHTTP(w http.ResponseWriter) error {
|
||||
w.Header().Set("Title", leaf.Meta.Title)
|
||||
w.Header().Set("Read-Only", strconv.FormatBool(leaf.Meta.ReadOnly))
|
||||
_, err := w.Write([]byte(leaf.Content))
|
||||
return err
|
||||
}
|
||||
|
||||
func (base Leaf) Merge(updated Leaf) Leaf {
|
||||
if updated.Meta.Title != "" {
|
||||
base.Meta.Title = updated.Meta.Title
|
||||
}
|
||||
if base.Meta.Title == "" {
|
||||
base.Meta.Title = "Untitled"
|
||||
}
|
||||
base.Meta.Deleted = updated.Meta.Deleted
|
||||
base.Meta.ReadOnly = updated.Meta.ReadOnly
|
||||
base.Content = updated.Content
|
||||
return base
|
||||
}
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"local/args"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func main() {
|
||||
as := args.NewArgSet()
|
||||
as.Append(args.INT, "p", "port to listen on", 3004)
|
||||
as.Append(args.STRING, "d", "root dir with /index.html and /media and /files", "./public")
|
||||
as.Append(args.STRING, "auth", "auth mode [none, path/to/some.yaml, ldap", "none")
|
||||
if err := as.Parse(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
auth, err := authFactory(as.GetString("auth"))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
s := NewServer(as.GetString("d"), auth)
|
||||
if err := s.Routes(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Printf("listening on %v with %s", as.GetInt("p"), as.GetString("auth"))
|
||||
if err := http.ListenAndServe(":"+strconv.Itoa(as.GetInt("p")), s); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func authFactory(key string) (auth, error) {
|
||||
switch path.Base(strings.ToLower(key)) {
|
||||
case "none", "":
|
||||
return nil, nil
|
||||
case "ldap":
|
||||
return nil, errors.New("not impl ldap auth")
|
||||
}
|
||||
stat, err := os.Stat(key)
|
||||
if os.IsNotExist(err) {
|
||||
return nil, errors.New("looks like auth path does not exist")
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
} else if stat.IsDir() {
|
||||
return nil, errors.New("looks like auth path is a dir")
|
||||
}
|
||||
return NewFileAuth(key), nil
|
||||
}
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
{{ define "files" }}
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<header>
|
||||
<title>{{ .This.Title }}</title>
|
||||
{{ template "_import" }}
|
||||
</header>
|
||||
<body class="fullscreen tb_fullscreen lr_fullscreen" style="position: absolute">
|
||||
<div class="rows" style="height: inherit;">
|
||||
{{ template "_topbar" . }}
|
||||
<div class="columns thic_flex tb_buffer" style="height: calc(100% - 4rem);">
|
||||
{{ template "_filetree" . }}
|
||||
<div class="thic_flex lr_fullscreen" style="margin-left: 1em; width: 5px;">
|
||||
{{ if eq .This.ID "" }}
|
||||
{{ template "_about" . }}
|
||||
{{ else }}
|
||||
{{ if .This.ReadOnly }}
|
||||
{{ template "_readonly" . }}
|
||||
{{ else }}
|
||||
{{ template "_editor" . }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,179 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func main() {
|
||||
all := []string{}
|
||||
always := []string{}
|
||||
if err := recursePwd(func(p string) error {
|
||||
switch path.Ext(p) {
|
||||
case ".ctmpl":
|
||||
if path.Base(p)[0] == '_' {
|
||||
all = append(all, p)
|
||||
}
|
||||
}
|
||||
switch path.Base(p) {
|
||||
case "_import.ctmpl":
|
||||
always = append(always, strings.TrimSuffix(path.Base(p), path.Ext(p)))
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
t := func(p ...string) *template.Template {
|
||||
p = append(all, p...)
|
||||
oneT, err := template.ParseFiles(p...)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return oneT
|
||||
}
|
||||
data := map[string]interface{}{
|
||||
"Namespaces": []string{"datastore", "dp-orchestration"},
|
||||
"This": map[string]interface{}{
|
||||
"ID": "id00/id11",
|
||||
"Title": "title id11",
|
||||
"ReadOnly": false,
|
||||
"PID": "id00",
|
||||
"PTitle": "title id00",
|
||||
"Content": `# hello
|
||||
|
||||
## world
|
||||
|
||||
| this | is | my | table |
|
||||
| ---- | --- | --- | ----- |
|
||||
| hey | ya | hey | ya |
|
||||
| a | b | c | d |
|
||||
|
||||
* and
|
||||
* a bulleted
|
||||
* list
|
||||
|
||||
> but here is a quote
|
||||
|
||||
` + "```" + `go
|
||||
// and some go code
|
||||
func main() {
|
||||
log.Println("hi")
|
||||
}
|
||||
` + "```" + `
|
||||
|
||||
and
|
||||
|
||||
now
|
||||
|
||||
the
|
||||
|
||||
newlines
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
`,
|
||||
},
|
||||
"Results": []struct {
|
||||
Title string
|
||||
ID string
|
||||
}{
|
||||
{Title: "title id00", ID: "id00"},
|
||||
{Title: "title id07 but it's really really really long", ID: "id07"},
|
||||
{Title: "title id00 / title id10", ID: "id00/id10/id10"},
|
||||
{Title: "title id00 / title id10 / title id20", ID: "id00/id10/id20"},
|
||||
},
|
||||
"Tree": `{
|
||||
"Leaf": {"Meta":{"Title": "","ReadOnly":false}},
|
||||
"Branches": {
|
||||
"id00": {
|
||||
"Leaf": {"Meta":{"Title": "title id00","ReadOnly":false}},
|
||||
"Branches": {
|
||||
"id10": {"Leaf":{"Meta":{"Title":"title id10","ReadOnly":false}},"Branches":{
|
||||
"id20": {"Leaf":{"Meta":{"Title":"title id20","ReadOnly":false}},"Branches":{}}
|
||||
}},
|
||||
"id11": {"Leaf":{"Meta":{"Title":"title id11","ReadOnly":false}},"Branches":{}}
|
||||
}
|
||||
},
|
||||
"id01": {"Leaf":{"Meta":{"Title":"title id01","ReadOnly":false}},"Branches":{}},
|
||||
"id02": {"Leaf":{"Meta":{"Title":"title id02","ReadOnly":false}},"Branches":{}},
|
||||
"id03": {"Leaf":{"Meta":{"Title":"title id03","ReadOnly":false}},"Branches":{}},
|
||||
"id04": {"Leaf":{"Meta":{"Title":"title id04","ReadOnly":false}},"Branches":{}},
|
||||
"id04": {"Leaf":{"Meta":{"Title":"title id04","ReadOnly":false}},"Branches":{}},
|
||||
"id05": {"Leaf":{"Meta":{"Title":"title id05","ReadOnly":false}},"Branches":{}},
|
||||
"id06": {"Leaf":{"Meta":{"Title":"title id06","ReadOnly":false}},"Branches":{}},
|
||||
"id07": {"Leaf":{"Meta":{"Title":"title id07 but it's really really really long","ReadOnly":false}},"Branches":{}}
|
||||
}
|
||||
}`,
|
||||
}
|
||||
if err := recursePwd(func(p string) error {
|
||||
switch path.Ext(p) {
|
||||
case ".ctmpl":
|
||||
target := path.Join(path.Dir(p), "."+path.Base(p)+".html")
|
||||
f, err := os.Create(path.Join(path.Dir(p), "."+path.Base(p)+".html"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
templateToExecute := strings.TrimSuffix(path.Base(p), path.Ext(p))
|
||||
tmpl := t(p)
|
||||
defer log.Printf("rendering %s (...%s) as %s", templateToExecute, path.Join(path.Base(path.Dir(p)), path.Base(p)), target)
|
||||
if strings.HasPrefix(templateToExecute, "_") {
|
||||
testTemplate := `
|
||||
{{ define "test" }}
|
||||
<body class="fullscreen" style="border: 10px solid red;">
|
||||
`
|
||||
for _, subtemplate := range always {
|
||||
testTemplate += fmt.Sprintf(`{{ template %q . }}`, subtemplate)
|
||||
}
|
||||
testTemplate += fmt.Sprintf(`{{ template %q . }}{{ end }}`, templateToExecute)
|
||||
testTemplate += `
|
||||
</body>
|
||||
`
|
||||
tmpl = template.Must(tmpl.Parse(testTemplate))
|
||||
templateToExecute = "test"
|
||||
}
|
||||
return tmpl.Lookup(templateToExecute).Execute(f, data)
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func recursePwd(foo func(string) error) error {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return recurseD(wd, foo)
|
||||
}
|
||||
|
||||
func recurseD(d string, foo func(string) error) error {
|
||||
entries, err := os.ReadDir(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
if err := recurseD(path.Join(d, entry.Name()), foo); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if strings.HasPrefix(entry.Name(), ".") {
|
||||
} else if err := foo(path.Join(d, entry.Name())); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
{{ define "search" }}
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<header>
|
||||
<title>Search</title>
|
||||
{{ template "_import" }}
|
||||
</header>
|
||||
<body class="fullscreen tb_fullscreen lr_fullscreen" style="position: absolute">
|
||||
<div class="rows" style="height: inherit;">
|
||||
{{ template "_topbar" . }}
|
||||
<div class="columns thic_flex tb_buffer" style="height: calc(100% - 4rem);">
|
||||
{{ template "_filetree" . }}
|
||||
<div class="thic_flex lr_fullscreen" style="margin-left: 1em; width: 5px;">
|
||||
{{ template "_results" . }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
{{ define "_about" }}
|
||||
<div class="fullscreen tb_fullscreen">
|
||||
<h1>Welcome!</h1>
|
||||
|
||||
<h2>TLDR; how do I write something?</h2>
|
||||
<div>
|
||||
<ol>
|
||||
<li>Click a `+` button somewhere in the tree on the left</li>
|
||||
<li>Hit "save"</li>
|
||||
<li>You'll see "Success!" in green at the bottom on save</li>
|
||||
</ol>
|
||||
</div>
|
||||
|
||||
<h2>What is this?</h2>
|
||||
<div>
|
||||
This is a one-stop shop for reading, searching, and optionally writing docs.
|
||||
</div>
|
||||
|
||||
<h2>Why would I use it? (It looks a little... "janky", no offense)</h2>
|
||||
<div>
|
||||
<ul>
|
||||
<li>Load your Gitlab, Gitlab Wikis, Google Docs, Google Spreadsheets, and Google Slides and enjoy that search bar above.</li>
|
||||
<li>No version control BUT very fast to edit and hit "save"</li>
|
||||
<li>Automagically updates, so throw a link to your docs here and continue using Gitlab/Google/etc. as you were</li>
|
||||
<li>Link to a Gitlab repo/path/wiki and automagically get the entire tree</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<h2>What's this about magic?</h2>
|
||||
<div>
|
||||
<ul>
|
||||
<li>Create a file that just contains "https://gitlab.com/my/repo/-/tree/master/README.md" or "https://docs.google.com/docs/my-doc/edit", wait some time, and now it's an updating version of that doc</li>
|
||||
<li>Create a file that just contains "https://gitlab.com/my/repo/-/tree/master/runbooks", wait some time, and now it's an updating version of all those docs</li>
|
||||
</ul>
|
||||
|
||||
<h3>Butt how do I use it?</h3>
|
||||
<div>
|
||||
<ol>
|
||||
<li>Make or edit a file</li>
|
||||
<li>The first line is a link to Gitlab or Google</li>
|
||||
<li>Save</li>
|
||||
<li>Wait</li>
|
||||
</ol>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2>I got a bone to pick with you!!! Who are you exactly?</h2>
|
||||
<div>
|
||||
<table>
|
||||
<tr><td> Slack User </td><td> @breel </td></tr>
|
||||
<tr><td> Email </td><td> breel@qualtrics.com </td></tr>
|
||||
<tr><td> Slack Channel </td><td> #storage-platform </td></tr>
|
||||
<tr><td> Gitlab </td><td> TODO </td></tr>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
{{ define "_editor" }}
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/easymde/dist/easymde.min.css">
|
||||
<script src="https://cdn.jsdelivr.net/npm/easymde/dist/easymde.min.js"></script>
|
||||
<style>
|
||||
#easyMDEwrap {
|
||||
flex-grow: 1;
|
||||
}
|
||||
.CodeMirror {
|
||||
min-height: 7em;
|
||||
}
|
||||
.CodeMirror-scroll, .CodeMirror-sizer {
|
||||
height: auto !important;
|
||||
}
|
||||
.CodeMirror-sizer {
|
||||
min-height: 10rem !important;
|
||||
}
|
||||
#article {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
#titlepath, #title {
|
||||
font-size: 2rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
.EasyMDEContainer button {
|
||||
color: black;
|
||||
}
|
||||
img {
|
||||
max-width: 100%;
|
||||
max-height: 100%;
|
||||
}
|
||||
.monospace {
|
||||
font-family: Consolas,Monaco,Lucida Console,Liberation Mono,DejaVu Sans Mono,Bitstream Vera Sans Mono,Courier New, monospace;
|
||||
}
|
||||
.lil_btn {
|
||||
width: initial;
|
||||
display: inline-block;
|
||||
}
|
||||
input, label, textarea {
|
||||
margin: initial;
|
||||
}
|
||||
.editor-toolbar > button.preview {
|
||||
color: #08c;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
var saveFeedbackInterval = null
|
||||
function pushFile() {
|
||||
const title = document.getElementById("title").innerHTML ? document.getElementById("title").innerHTML : ""
|
||||
const body = easyMDE.value() ? easyMDE.value() : ""
|
||||
const id = {{ js .This.ID }}
|
||||
headers = {}
|
||||
if (title)
|
||||
headers["Title"] = title
|
||||
http("PUT", "/api/v0/files/" + id, (body, status) => {
|
||||
if (status != 200) {
|
||||
alert(`failed to push file ${id}: ${status}: ${body}`)
|
||||
throw `failed to push file ${id}: ${status}: ${body}`
|
||||
}
|
||||
document.getElementById("saveFeedback").style.display = "block"
|
||||
if (saveFeedbackInterval) {
|
||||
clearTimeout(saveFeedbackInterval)
|
||||
}
|
||||
saveFeedbackInterval = setTimeout(() => {document.getElementById("saveFeedback").style.display = "none"}, 2500)
|
||||
}, body, headers)
|
||||
}
|
||||
function deleteFile() {
|
||||
const id = {{ js .This.ID }}
|
||||
const pid = {{ js .This.PID }}
|
||||
http("DELETE", "/api/v0/files/" + id, (body, status) => {
|
||||
if (status != 200) {
|
||||
alert(`failed to delete file ${id}: ${status}: ${body}`)
|
||||
throw `failed to delete file ${id}: ${status}: ${body}`
|
||||
}
|
||||
window.location.href = `${window.location.protocol}\/\/${window.location.host}/ui/files/${pid}`
|
||||
})
|
||||
}
|
||||
</script>
|
||||
<div class="fullscreen tb_fullscreen">
|
||||
<article id="article">
|
||||
<div class="columns">
|
||||
<span class="r_buffer">
|
||||
<form action="#" onsubmit="pushFile(); return false;">
|
||||
<input class="button-info lil_btn" type="submit" value="SAVE"/>
|
||||
</form>
|
||||
</span>
|
||||
<span id="titlePath">
|
||||
/
|
||||
{{ if ne .This.PID "" }}
|
||||
<a href="/ui/files/{{ .This.PID }}">{{ .This.PTitle }}</a> /
|
||||
{{ end }}
|
||||
</span>
|
||||
<span id="title" class="thic_flex" contenteditable>{{ .This.Title }}</span>
|
||||
<span class="l_buffer">
|
||||
<form onsubmit="deleteFile(); return false;"> <!-- TODO -->
|
||||
<input class="button-error lil_btn" type="submit" onclick="confirm('are you sure?');" value="DELETE"/>
|
||||
</form>
|
||||
</span>
|
||||
</div>
|
||||
<!-- todo: each line no is an anchor -->
|
||||
<div id="easyMDEwrap" class="monospace">
|
||||
<textarea id="my-text-area"></textarea>
|
||||
</div>
|
||||
<div style="min-height: 2em;"></div>
|
||||
<div id="saveFeedback" class="button success" style="text-align: right; cursor: auto; display: none;">
|
||||
Saved!
|
||||
</div>
|
||||
</div>
|
||||
</article>
|
||||
</div>
|
||||
<script>
|
||||
const easyMDE = new EasyMDE({
|
||||
autoDownloadFontAwesome: true,
|
||||
autofocus: true,
|
||||
autosave: {
|
||||
enabled: false,
|
||||
},
|
||||
element: document.getElementById('my-text-area'),
|
||||
forceSync: true,
|
||||
indentWithTabs: false,
|
||||
initialValue: "{{ .This.Content }}",
|
||||
showIcons: ["code", "table"],
|
||||
spellChecker: false,
|
||||
sideBySideFullscreen: false,
|
||||
tabSize: 3,
|
||||
previewImagesInEditor: true,
|
||||
insertTexts: {
|
||||
image: [""],
|
||||
link: ["[](", ")"],
|
||||
},
|
||||
lineNumbers: true,
|
||||
lineWrapping: false,
|
||||
uploadImage: true,
|
||||
imageUploadEndpoint: "/api/v0/media", // POST wants {data: {filePath: "/..."}}
|
||||
imagePathAbsolute: false,
|
||||
renderingConfig: {
|
||||
codeSyntaxHighlighting: true,
|
||||
},
|
||||
status: ["lines", "words", "cursor"],
|
||||
})
|
||||
</script>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
{{ define "_filetree" }}
|
||||
<style>
|
||||
details > details details {
|
||||
padding-inline-start: 2em;
|
||||
}
|
||||
summary {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
summary.no-children {
|
||||
list-style: none;
|
||||
}
|
||||
summary.no-children::-webkit-details-marker {
|
||||
display: none;
|
||||
}
|
||||
#filetree {
|
||||
padding-right: 1em;
|
||||
}
|
||||
details > summary > .hamburger::before {
|
||||
content: "+";
|
||||
}
|
||||
details[open] > summary > .hamburger::before {
|
||||
content: "-";
|
||||
}
|
||||
</style>
|
||||
<div class="fullscreen tb_fullscreen" style="max-width: 25em; margin: auto;">
|
||||
<details open>
|
||||
<summary style="outline: none;"><span class="border button hamburger"></span></summary>
|
||||
<details open id="filetree">
|
||||
</details>
|
||||
</details>
|
||||
</div>
|
||||
<script>
|
||||
function drawTree(tree) {
|
||||
document.getElementById("filetree").innerHTML = branchHTML("", tree)
|
||||
}
|
||||
function branchHTML(id, branch) {
|
||||
return `
|
||||
<summary class="${branchesHaveContent(branch.Branches) ? "" : "no-children"}">
|
||||
${leafHTML(id, branch)}
|
||||
</summary>
|
||||
${branchesHTML(id, branch.Branches)}
|
||||
`
|
||||
}
|
||||
function leafHTML(id, branch) {
|
||||
const href="/ui/files/" + (id ? id : "")
|
||||
var nameSafeId = id.replace(/\//g, "-")
|
||||
var parentNameSafeId = nameSafeId
|
||||
if (id.includes("/"))
|
||||
parentNameSafeId = id.slice(0, id.lastIndexOf("/")).replace(/\//g, "-")
|
||||
const name=`filetree-leaf-${nameSafeId}`
|
||||
const parentname=`filetree-leaf-${parentNameSafeId}`
|
||||
const title=id ? branch.Leaf.Meta.Title : "ROOT"
|
||||
const isLiveParent = '{{ .This.ID }}'.slice(0, id.length) == id
|
||||
const isLive = '{{ .This.ID }}' == id
|
||||
const linkToFile = `
|
||||
<div style="margin: 0; padding: 0; height: 0; width: 0;" id="${name}"></div>
|
||||
<a style="flex-grow: 1;" href="${href}#${parentname}">
|
||||
<button style="width: 100%; text-align: left; outline: none;" class="${isLiveParent ? `button button-info ${!isLive ? "button-border" : ""}` : ""}">
|
||||
${title}
|
||||
</button>
|
||||
</a>
|
||||
`
|
||||
return linkToFile + (branch.Leaf.Meta.ReadOnly ? "" : `<a href="${href}/${generateUUID().split("-")[0]}#${parentname}"><button>+</button></a>`)
|
||||
}
|
||||
function branchesHTML(id, branches) {
|
||||
if (!branchesHaveContent(branches))
|
||||
return ""
|
||||
var html = []
|
||||
var out = ``
|
||||
for(var i in branches) {
|
||||
html.push([branches[i].Leaf.Meta.Title, `<details open>` + branchHTML(i, branches[i]) + `</details>`])
|
||||
}
|
||||
html.sort()
|
||||
for(var i in html)
|
||||
out += html[i][1]
|
||||
return out
|
||||
}
|
||||
function branchesHaveContent(branches) {
|
||||
var n = 0
|
||||
for (var i in branches)
|
||||
n += 1
|
||||
return n > 0
|
||||
}
|
||||
drawTree(JSON.parse({{ .Tree }}))
|
||||
</script>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,117 @@
|
|||
{{ define "_import" }}
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/easymde/dist/easymde.min.css">
|
||||
<script src="https://cdn.jsdelivr.net/npm/easymde/dist/easymde.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/highlight.js/latest/highlight.min.js"></script>
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/highlight.js/latest/styles/github.min.css">
|
||||
|
||||
<link rel="stylesheet" href="https://unpkg.com/turretcss/dist/turretcss.min.css" crossorigin="anonymous">
|
||||
<!-- todo css
|
||||
<link rel="stylesheet" href="https://cdn.concisecss.com/concise.min.css">
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/water.css@2/out/light.css">
|
||||
<link rel="stylesheet" href="https://cdn.simplecss.org/simple.min.css">
|
||||
-->
|
||||
<style>
|
||||
html, body {
|
||||
background-color: #f8f8f8;
|
||||
}
|
||||
.columns {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
.rows {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.thic_flex {
|
||||
text-align: left;
|
||||
flex-grow: 1;
|
||||
}
|
||||
.mia {
|
||||
display: none;
|
||||
}
|
||||
.align_left {
|
||||
text-align: left;
|
||||
}
|
||||
.tb_buffer {
|
||||
margin-top: 1em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
.r_buffer {
|
||||
margin-right: 1em;
|
||||
}
|
||||
.l_buffer {
|
||||
margin-left: 1em;
|
||||
}
|
||||
.monospace {
|
||||
font-family: Consolas,Monaco,Lucida Console,Liberation Mono,DejaVu Sans Mono,Bitstream Vera Sans Mono,Courier New, monospace;
|
||||
}
|
||||
.lil_btn {
|
||||
width: initial;
|
||||
display: inline-block;
|
||||
}
|
||||
input, label, textarea {
|
||||
margin: initial;
|
||||
}
|
||||
.fullscreen {
|
||||
position: relative;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
padding: 5px;
|
||||
overflow: scroll;
|
||||
}
|
||||
.lr_fullscreen {
|
||||
width: 100%;
|
||||
/*max-width: 1024px;*/
|
||||
margin-right: auto;
|
||||
margin-left: auto;
|
||||
}
|
||||
.tb_fullscreen {
|
||||
height: 100%;
|
||||
}
|
||||
.button, button, input[type="button"] {
|
||||
height: auto;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
function http(method, remote, callback, body, headers) {
|
||||
var xmlhttp = new XMLHttpRequest();
|
||||
xmlhttp.onreadystatechange = function() {
|
||||
if (xmlhttp.readyState == XMLHttpRequest.DONE) {
|
||||
callback(xmlhttp.responseText, xmlhttp.status, (key) => xmlhttp.getResponseHeader(key))
|
||||
}
|
||||
};
|
||||
xmlhttp.open(method, remote, true);
|
||||
if (typeof body == "undefined") {
|
||||
body = null
|
||||
}
|
||||
if (headers) {
|
||||
for (var key in headers)
|
||||
xmlhttp.setRequestHeader(key, headers[key])
|
||||
}
|
||||
xmlhttp.send(body);
|
||||
}
|
||||
function generateUUID() { // Public Domain/MIT // https://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid
|
||||
var d = new Date().getTime();//Timestamp
|
||||
var d2 = ((typeof performance !== 'undefined') && performance.now && (performance.now()*1000)) || 0;//Time in microseconds since page-load or 0 if unsupported
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
var r = Math.random() * 16;//random number between 0 and 16
|
||||
if(d > 0){//Use timestamp until depleted
|
||||
r = (d + r)%16 | 0;
|
||||
d = Math.floor(d/16);
|
||||
} else {//Use microseconds since page-load if supported
|
||||
r = (d2 + r)%16 | 0;
|
||||
d2 = Math.floor(d2/16);
|
||||
}
|
||||
return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
|
||||
}
|
||||
);
|
||||
}
|
||||
</script>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
{{ define "_namespace" }}
|
||||
<script>
|
||||
function setNamespace() {
|
||||
document.getElementById("namespace").disabled = true
|
||||
window.location.href = `${window.location.protocol}`+"//"+`${window.location.host}/ui/files?namespace=${document.getElementById("namespace").value}`
|
||||
}
|
||||
</script>
|
||||
{{ $cur := .Namespace }}
|
||||
{{ if .Namespaces }}
|
||||
<select id="namespace" onload="markNamespace()" onchange="setNamespace()" style="max-width: 7rem;">
|
||||
{{ range .Namespaces }}
|
||||
<option {{ if eq $cur . }}selected{{ end }}>{{ . }}</option>
|
||||
{{ end }}
|
||||
</select>
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
{{ define "_readonly" }}
|
||||
<div class="fullscreen tb_fullscreen">
|
||||
<a href="/ui/files/{{ .This.ID }}?edit"><button>Edit this page</button></a>
|
||||
<article id="article"></article>
|
||||
<script>
|
||||
document.getElementById("article").innerHTML = {{ .This.Content }}
|
||||
</script>
|
||||
</div>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
{{ define "_results" }}
|
||||
<style>
|
||||
</style>
|
||||
</script>
|
||||
<div class="fullscreen tb_fullscreen">
|
||||
<ul id="results">
|
||||
{{ range .Results }}
|
||||
<li>
|
||||
<a href="/ui/files/{{ .ID }}">{{ .Title }}</a>
|
||||
</li>
|
||||
{{ end }}
|
||||
</ul>
|
||||
</div>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
{{ define "_searchbar" }}
|
||||
<form class="columns thic_flex" action="/ui/search" method="GET">
|
||||
<input class="thic_flex" type="text" name="q" placeholder="space delimited search regexp"/>
|
||||
<input class="info lil_btn" type="submit" value="search"/>
|
||||
</form>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
{{ define "_topbar" }}
|
||||
<div class="columns lr_fullscreen">
|
||||
{{ template "_namespace" . }}
|
||||
{{ template "_searchbar" . }}
|
||||
</div>
|
||||
{{ end }}
|
||||
|
|
@ -0,0 +1,582 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"local/gziphttp"
|
||||
"local/router"
|
||||
"local/simpleserve/simpleserve"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/gomarkdown/markdown"
|
||||
"github.com/gomarkdown/markdown/html"
|
||||
"github.com/gomarkdown/markdown/parser"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type Server struct {
|
||||
router *router.Router
|
||||
root string
|
||||
auth auth
|
||||
user *User
|
||||
}
|
||||
|
||||
func NewServer(root string, auth auth) *Server {
|
||||
return &Server{
|
||||
root: root,
|
||||
auth: auth,
|
||||
}
|
||||
}
|
||||
|
||||
func (server *Server) WithUser(user, group string, groups []string) *Server {
|
||||
s2 := *server
|
||||
s2.user = &User{
|
||||
User: user,
|
||||
Group: group,
|
||||
Groups: groups,
|
||||
}
|
||||
return &s2
|
||||
}
|
||||
|
||||
func (server *Server) Routes() error {
|
||||
server.router = router.New()
|
||||
wildcard := func(s string) string {
|
||||
return strings.TrimSuffix(s, "/") + "/" + router.Wildcard
|
||||
}
|
||||
wildcards := func(s string) string {
|
||||
return wildcard(s) + router.Wildcard
|
||||
}
|
||||
_ = wildcards
|
||||
for path, handler := range map[string]func(http.ResponseWriter, *http.Request) error{
|
||||
"/": server.rootHandler,
|
||||
"/api/v0/tree": server.apiV0TreeHandler,
|
||||
"/api/v0/media": server.apiV0MediaHandler,
|
||||
wildcard("/api/v0/media"): server.apiV0MediaIDHandler,
|
||||
wildcards("/api/v0/files"): server.apiV0FilesHandler,
|
||||
"/api/v0/search": server.apiV0SearchHandler,
|
||||
"/ui": server.rootHandler,
|
||||
"/ui/search": server.uiSearchHandler,
|
||||
wildcards("/ui/files"): server.uiFilesHandler,
|
||||
} {
|
||||
if err := server.router.Add(path, server.tryCatchHttpHandler(handler)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if server.auth != nil {
|
||||
s2, done, err := server.authenticate(w, r)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if done {
|
||||
return
|
||||
}
|
||||
if s2 != nil {
|
||||
server = s2
|
||||
}
|
||||
}
|
||||
if err := server.Routes(); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
server.router.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func (server *Server) tryCatchHttpHandler(handler func(http.ResponseWriter, *http.Request) error) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if gziphttp.Can(r) {
|
||||
w2 := gziphttp.New(w)
|
||||
defer w2.Close()
|
||||
w = w2
|
||||
}
|
||||
if err := handler(w, r); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
log.Printf("failed handling %s: %v", r.URL.String(), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (server *Server) apiV0TreeHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
tree := server.tree()
|
||||
branches, err := tree.GetRootMeta()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return json.NewEncoder(w).Encode(branches)
|
||||
}
|
||||
|
||||
func ensureAndWrite(p string, b []byte) error {
|
||||
if err := os.MkdirAll(path.Dir(p), os.ModePerm); err != nil {
|
||||
return err
|
||||
}
|
||||
return ioutil.WriteFile(p, b, os.ModePerm)
|
||||
}
|
||||
|
||||
func (server *Server) apiV0MediaHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
id := uuid.New().String()
|
||||
filePath, err := server.postContentHandler(server.diskMediaPath(id), w, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return json.NewEncoder(w).Encode(map[string]map[string]string{
|
||||
"data": map[string]string{
|
||||
"filePath": path.Join("/api/v0/media", path.Base(filePath)),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (server *Server) apiV0MediaIDHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
return server.apiV0MediaIDGetHandler(w, r)
|
||||
case http.MethodPut:
|
||||
return server.apiV0MediaIDPutHandler(w, r)
|
||||
case http.MethodDelete:
|
||||
return server.apiV0MediaIDDelHandler(w, r)
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) apiV0MediaIDPutHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
panic("not impl")
|
||||
}
|
||||
|
||||
func (server *Server) apiV0MediaIDDelHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
id := path.Base(r.URL.Path)
|
||||
os.Remove(server.diskMediaPath(id))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) apiV0MediaIDGetHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
id := path.Base(r.URL.Path)
|
||||
return server.getContentHandler(server.diskMediaPath(id), w, r)
|
||||
}
|
||||
|
||||
func (server *Server) getContentHandler(filePath string, w http.ResponseWriter, r *http.Request) error {
|
||||
if r.Method != http.MethodGet {
|
||||
return errors.New("not found")
|
||||
}
|
||||
f, err := os.Open(filePath)
|
||||
if os.IsNotExist(err) {
|
||||
http.NotFound(w, r)
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
simpleserve.SetContentTypeIfMedia(w, r)
|
||||
io.Copy(w, f)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) postContentHandler(filePath string, w http.ResponseWriter, r *http.Request) (string, error) {
|
||||
if r.Method != http.MethodPost {
|
||||
return "", errors.New("not found")
|
||||
}
|
||||
if strings.HasPrefix(r.Header.Get("Content-Type"), "multipart/form-data") {
|
||||
kb := int64(1 << 10)
|
||||
mb := kb << 10
|
||||
if err := r.ParseMultipartForm(10 * mb); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(r.MultipartForm.File) != 1 {
|
||||
return "", errors.New("not exactly 1 file found in request")
|
||||
}
|
||||
for _, infos := range r.MultipartForm.File {
|
||||
if len(infos) != 1 {
|
||||
return "", errors.New("not exactly 1 file info found in request")
|
||||
}
|
||||
ext := path.Ext(infos[0].Filename)
|
||||
if h, ok := infos[0].Header["Content-Type"]; ok {
|
||||
ext = path.Base(h[0])
|
||||
}
|
||||
filePath += "." + ext
|
||||
f, err := infos[0].Open()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer f.Close()
|
||||
r.Body = f
|
||||
}
|
||||
} else if strings.HasPrefix(r.Header.Get("Content-Type"), "application/x-www-form-urlencoded") {
|
||||
if err := r.ParseForm(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return "", fmt.Errorf("parse form: %+v", r.PostForm)
|
||||
}
|
||||
return filePath, server.putContentHandler(filePath, w, r)
|
||||
}
|
||||
|
||||
func (server *Server) putContentHandler(filePath string, w http.ResponseWriter, r *http.Request) error {
|
||||
defer r.Body.Close()
|
||||
b, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return ensureAndWrite(filePath, b)
|
||||
}
|
||||
|
||||
func (server *Server) uiSearchHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
t, err := server.uiSubTemplates()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t, err = t.ParseFiles(path.Join(server.root, "ui", "search.ctmpl"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
idsTitles, err := server._apiV0SearchHandler(r.URL.Query().Get("q"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
data := make([]struct {
|
||||
Title string
|
||||
ID ID
|
||||
}, len(idsTitles))
|
||||
for i := range idsTitles {
|
||||
data[i].ID = NewID(idsTitles[i][0])
|
||||
data[i].Title = idsTitles[i][1]
|
||||
}
|
||||
tree := server.tree()
|
||||
branches, err := tree.GetRootMeta()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
branchesJSON, err := json.Marshal(branches)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return t.Lookup("search").Execute(w, map[string]interface{}{
|
||||
"Results": data,
|
||||
"Tree": string(branchesJSON),
|
||||
"Namespaces": server.getUser().Groups,
|
||||
"Namespace": server.getUser().Group,
|
||||
})
|
||||
}
|
||||
|
||||
func (server *Server) getUser() User {
|
||||
if server.user != nil {
|
||||
return *server.user
|
||||
}
|
||||
return User{}
|
||||
}
|
||||
|
||||
func (server *Server) uiFilesHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
id := NewID(strings.TrimPrefix(r.URL.Path, "/ui/files"))
|
||||
t, err := server.uiSubTemplates()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t, err = t.ParseFiles(path.Join(server.root, "ui", "files.ctmpl"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tree := server.tree()
|
||||
branches, err := tree.GetRootMeta()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
branchesJSON, err := json.Marshal(branches)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var parent Leaf
|
||||
var leaf Leaf
|
||||
if id != "" {
|
||||
if id.Pop() != "" {
|
||||
parent, err = tree.Get(id.Pop())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get pid %q: %v", id.Pop(), err)
|
||||
}
|
||||
}
|
||||
leaf, err = tree.Get(id)
|
||||
if err != nil {
|
||||
leaf.Meta.Title = "My New File"
|
||||
}
|
||||
}
|
||||
if leaf.Meta.ReadOnly {
|
||||
if _, ok := r.URL.Query()["edit"]; !ok {
|
||||
leaf.Content = Gomarkdown([]byte(leaf.Content))
|
||||
} else {
|
||||
leaf.Meta.ReadOnly = false
|
||||
}
|
||||
}
|
||||
data := map[string]interface{}{
|
||||
"This": map[string]interface{}{
|
||||
"Title": leaf.Meta.Title,
|
||||
"ReadOnly": leaf.Meta.ReadOnly,
|
||||
"Content": leaf.Content,
|
||||
"ID": id.String(),
|
||||
"PID": id.Pop().String(),
|
||||
"PTitle": parent.Meta.Title,
|
||||
},
|
||||
"Tree": string(branchesJSON),
|
||||
"Namespaces": server.getUser().Groups,
|
||||
"Namespace": server.getUser().Group,
|
||||
}
|
||||
return t.Lookup("files").Execute(w, data)
|
||||
}
|
||||
|
||||
func (server *Server) uiSubTemplates() (*template.Template, error) {
|
||||
templateFiles := []string{}
|
||||
var loadTemplateFilesFromDir func(string) error
|
||||
loadTemplateFilesFromDir = func(root string) error {
|
||||
entries, err := os.ReadDir(root)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, entry := range entries {
|
||||
entryPath := path.Join(root, entry.Name())
|
||||
if entry.IsDir() {
|
||||
if err := loadTemplateFilesFromDir(entryPath); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if !strings.HasPrefix(path.Base(entryPath), "_") {
|
||||
} else if strings.HasSuffix(entryPath, ".ctmpl") {
|
||||
templateFiles = append(templateFiles, entryPath)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := loadTemplateFilesFromDir(path.Join(server.root, "ui")); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return template.ParseFiles(templateFiles...)
|
||||
}
|
||||
|
||||
func (server *Server) rootHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
http.Redirect(w, r, "/ui/files", 302)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) tree() Tree {
|
||||
return NewTree(path.Join(server.root, "files", server.getUser().Group))
|
||||
}
|
||||
|
||||
func (server *Server) diskMediaPath(id string) string {
|
||||
return path.Join(server.root, "media", id)
|
||||
}
|
||||
|
||||
func (server *Server) apiV0FilesHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
switch r.Method {
|
||||
case http.MethodPost:
|
||||
return server.apiV0FilesPostHandler(w, r)
|
||||
case http.MethodGet:
|
||||
return server.apiV0FilesIDGetHandler(w, r)
|
||||
case http.MethodPut:
|
||||
return server.apiV0FilesIDPutHandler(w, r)
|
||||
case http.MethodDelete:
|
||||
return server.apiV0FilesIDDelHandler(w, r)
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) apiV0FilesPostHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
f, err := ioutil.TempFile(os.TempDir(), "filesPost*")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f.Close()
|
||||
defer os.Remove(f.Name())
|
||||
|
||||
filePath, err := server.postContentHandler(f.Name(), w, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer os.Remove(filePath)
|
||||
|
||||
b, err := ioutil.ReadFile(filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
r.Body = io.NopCloser(bytes.NewReader(b))
|
||||
|
||||
pid := server.fileId(r)
|
||||
id := NewID(pid).Push(strings.Split(uuid.New().String(), "-")[0])
|
||||
leaf, err := NewHTTPRequestLeaf(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := server.tree().Put(id, leaf); err != nil {
|
||||
return err
|
||||
}
|
||||
return json.NewEncoder(w).Encode(map[string]map[string]string{
|
||||
"data": map[string]string{
|
||||
"filePath": path.Join("/api/v0/files/", id.URLSafeString()),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (server *Server) apiV0FilesIDGetHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
id := NewID(server.fileId(r))
|
||||
if id.String() == "" {
|
||||
return fmt.Errorf("no id found: %+v", id)
|
||||
}
|
||||
|
||||
leaf, err := server.tree().Get(id)
|
||||
if os.IsNotExist(err) {
|
||||
http.NotFound(w, r)
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return leaf.WriteHTTP(w)
|
||||
}
|
||||
|
||||
func (server *Server) apiV0FilesIDDelHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
id := NewID(server.fileId(r))
|
||||
if id.String() == "" {
|
||||
return fmt.Errorf("no id found: %+v", id)
|
||||
}
|
||||
|
||||
leaf, err := server.tree().Get(id)
|
||||
if os.IsNotExist(err) {
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
leaf.Meta.Deleted = true
|
||||
|
||||
return server.tree().Put(id, leaf)
|
||||
}
|
||||
|
||||
func (server *Server) fileId(r *http.Request) string {
|
||||
return strings.Trim(
|
||||
strings.TrimPrefix(
|
||||
strings.Trim(r.URL.Path, "/"),
|
||||
"api/v0/files",
|
||||
),
|
||||
"/",
|
||||
)
|
||||
}
|
||||
|
||||
func (server *Server) apiV0FilesIDPutHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
id := NewID(server.fileId(r))
|
||||
if id.String() == "" {
|
||||
return fmt.Errorf("no id found: %+v", id)
|
||||
}
|
||||
|
||||
leaf, err := server.tree().Get(id)
|
||||
if os.IsNotExist(err) {
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
updatedLeaf, err := NewHTTPRequestLeaf(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
leaf = leaf.Merge(updatedLeaf)
|
||||
|
||||
if err := server.tree().Put(id, leaf); err != nil {
|
||||
return err
|
||||
}
|
||||
return json.NewEncoder(w).Encode(map[string]map[string]string{
|
||||
"data": map[string]string{
|
||||
"filePath": path.Join("/api/v0/files/", id.URLSafeString()),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (server *Server) apiV0SearchHandler(w http.ResponseWriter, r *http.Request) error {
|
||||
query := r.URL.Query().Get("q")
|
||||
idsTitles, err := server._apiV0SearchHandler(query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
result := make([]string, len(idsTitles))
|
||||
for i := range idsTitles {
|
||||
result[i] = idsTitles[i][0]
|
||||
}
|
||||
return json.NewEncoder(w).Encode(result)
|
||||
}
|
||||
|
||||
func (server *Server) _apiV0SearchHandler(query string) ([][2]string, error) {
|
||||
queries := strings.Split(query, " ")
|
||||
if len(queries) == 0 {
|
||||
return [][2]string{}, nil
|
||||
}
|
||||
patterns := []*regexp.Regexp{}
|
||||
unsafepattern := regexp.MustCompile(`[^a-zA-Z0-9]`)
|
||||
for _, query := range queries {
|
||||
if len(query) > 0 {
|
||||
query = unsafepattern.ReplaceAllString(query, ".")
|
||||
patterns = append(patterns, regexp.MustCompile("(?i)"+query))
|
||||
}
|
||||
}
|
||||
if len(patterns) == 0 {
|
||||
return [][2]string{}, nil
|
||||
}
|
||||
tree, err := server.tree().GetRoot()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := [][2]string{}
|
||||
if err := tree.ForEach(func(id ID, leaf Leaf) error {
|
||||
for _, pattern := range patterns {
|
||||
if !pattern.MatchString(leaf.Content) && !pattern.MatchString(leaf.Meta.Title) {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
title := leaf.Meta.Title
|
||||
pid := id.Pop()
|
||||
for pid != "" {
|
||||
parent, err := server.tree().Get(pid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
title = path.Join(parent.Meta.Title, title)
|
||||
pid = pid.Pop()
|
||||
}
|
||||
result = append(result, [2]string{id.URLSafeString(), title})
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, fmt.Errorf("failed for each: %v", err)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func Gomarkdown(b []byte) string {
|
||||
renderer := html.NewRenderer(html.RendererOptions{
|
||||
Flags: html.CommonFlags | html.TOC,
|
||||
})
|
||||
ext := parser.NoExtensions
|
||||
for _, extension := range []parser.Extensions{
|
||||
parser.NoIntraEmphasis,
|
||||
parser.Tables,
|
||||
parser.FencedCode,
|
||||
parser.Autolink,
|
||||
parser.Strikethrough,
|
||||
parser.SpaceHeadings,
|
||||
parser.HeadingIDs,
|
||||
parser.BackslashLineBreak,
|
||||
parser.DefinitionLists,
|
||||
parser.MathJax,
|
||||
parser.Titleblock,
|
||||
parser.AutoHeadingIDs,
|
||||
parser.Includes,
|
||||
} {
|
||||
ext |= extension
|
||||
}
|
||||
parser := parser.NewWithExtensions(ext)
|
||||
content := markdown.ToHTML(b, parser, renderer)
|
||||
return string(content) + "\n"
|
||||
}
|
||||
|
|
@ -0,0 +1,255 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestServerRoutes(t *testing.T) {
|
||||
server := NewServer(t.TempDir(), nil)
|
||||
if err := server.Routes(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := ensureAndWrite(path.Join(server.root, "ui", "files.ctmpl"), []byte(`{{ define "files" }}{{ template "_import" }}HI FROM FILES{{ end }}`)); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if err := ensureAndWrite(path.Join(server.root, "ui", "search.ctmpl"), []byte(`{{ define "search" }}{{ template "_import" }}HI FROM SEARCH{{ end }}`)); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if err := ensureAndWrite(path.Join(server.root, "ui", "templates", "_import.ctmpl"), []byte(`{{ define "_import" }}HI FROM IMPORT{{ end }}`)); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := ensureAndWrite(server.diskMediaPath("id"), []byte("hi")); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
ensureAndWrite(server.diskMediaPath("delid"), []byte("hi"))
|
||||
|
||||
tree := server.tree()
|
||||
leaf, _ := NewLeaf("", "getfid body")
|
||||
if err := tree.Put(NewID("getfid"), leaf); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
leaf, _ = NewLeaf("putfid title", "initial putfid body")
|
||||
tree.Put(NewID("putfid"), leaf)
|
||||
|
||||
leaf, _ = NewLeaf("delfid title", "delfid body")
|
||||
tree.Put(NewID("delfid"), leaf)
|
||||
|
||||
t.Log(tree.GetRoot())
|
||||
|
||||
ensureAndWrite(path.Join(server.root, "index.html"), []byte("mom"))
|
||||
|
||||
cases := map[string]struct {
|
||||
path string
|
||||
method string
|
||||
body string
|
||||
want string
|
||||
}{
|
||||
"v0: /: get": {
|
||||
path: "/",
|
||||
method: http.MethodGet,
|
||||
want: "/ui/files",
|
||||
},
|
||||
"v0: search: get": {
|
||||
path: "/api/v0/search?q=getf%20bod",
|
||||
method: http.MethodGet,
|
||||
want: `["getfid"]`,
|
||||
},
|
||||
"v0: tree: get": {
|
||||
path: "/api/v0/tree",
|
||||
method: http.MethodGet,
|
||||
want: `"Title":"Untitled",`,
|
||||
},
|
||||
"v0: media: post": {
|
||||
path: "/api/v0/media",
|
||||
method: http.MethodPost,
|
||||
want: `{"data":{"filePath":"/api/v0/media/`,
|
||||
},
|
||||
"v0: media id: del": {
|
||||
path: "/api/v0/media/delid",
|
||||
method: http.MethodDelete,
|
||||
},
|
||||
"v0: media id: get": {
|
||||
path: "/api/v0/media/id",
|
||||
method: http.MethodGet,
|
||||
want: "hi",
|
||||
},
|
||||
"v0: files: post": {
|
||||
path: "/api/v0/files",
|
||||
method: http.MethodPost,
|
||||
},
|
||||
"v0: files id: get": {
|
||||
path: "/api/v0/files/getfid",
|
||||
method: http.MethodGet,
|
||||
want: "getfid body",
|
||||
},
|
||||
"v0: files id: put": {
|
||||
path: "/api/v0/files/putfid",
|
||||
method: http.MethodPut,
|
||||
body: "putfid body",
|
||||
},
|
||||
"v0: files id: del": {
|
||||
path: "/api/v0/files/delfid",
|
||||
method: http.MethodDelete,
|
||||
},
|
||||
"v0: /: redir": {
|
||||
path: "/",
|
||||
method: http.MethodGet,
|
||||
want: "/ui/files",
|
||||
},
|
||||
"v0: /ui/: redir": {
|
||||
path: "/ui/",
|
||||
method: http.MethodGet,
|
||||
want: "/ui/files",
|
||||
},
|
||||
"v0: /ui: redir": {
|
||||
path: "/ui",
|
||||
method: http.MethodGet,
|
||||
want: "/ui/files",
|
||||
},
|
||||
"v0: /ui/search": {
|
||||
path: "/ui/search",
|
||||
method: http.MethodGet,
|
||||
},
|
||||
"v0: /ui/search?q=abc": {
|
||||
path: "/ui/search?q=abc",
|
||||
method: http.MethodGet,
|
||||
},
|
||||
"v0: /ui/files/getfid": {
|
||||
path: "/ui/files/getfid",
|
||||
method: http.MethodGet,
|
||||
},
|
||||
"v0: /ui/files": {
|
||||
path: "/ui/files",
|
||||
method: http.MethodGet,
|
||||
},
|
||||
}
|
||||
|
||||
for name, d := range cases {
|
||||
c := d
|
||||
t.Run(name, func(t *testing.T) {
|
||||
r := httptest.NewRequest(c.method, c.path, strings.NewReader(c.body))
|
||||
w := httptest.NewRecorder()
|
||||
server.ServeHTTP(w, r)
|
||||
if w.Code == http.StatusNotFound {
|
||||
t.Fatal(w)
|
||||
}
|
||||
if w.Code >= 400 {
|
||||
t.Fatal(w)
|
||||
}
|
||||
if len(c.want) > 0 && !strings.Contains(string(w.Body.Bytes()), c.want) {
|
||||
t.Fatal(w)
|
||||
}
|
||||
t.Logf("%s %s (%+v) => %s", c.method, c.path, w.Header(), w.Body.Bytes())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestServerPutTreeGetFile(t *testing.T) {
|
||||
server := NewServer(t.TempDir(), nil)
|
||||
if err := server.Routes(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
server.tree().Put(NewID("my pid"), Leaf{})
|
||||
var id string
|
||||
t.Run("put to create an id", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodPut, "/my%20pid/my-put-id", strings.NewReader("body"))
|
||||
r.Header.Set("Title", "my put title")
|
||||
w := httptest.NewRecorder()
|
||||
if err := server.apiV0FilesIDPutHandler(w, r); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Logf("%s", w.Body.Bytes())
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatal(w)
|
||||
}
|
||||
var resp struct {
|
||||
Data struct {
|
||||
FilePath string `json:"filePath"`
|
||||
} `json:"data"`
|
||||
}
|
||||
if err := json.NewDecoder(w.Body).Decode(&resp); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if path.Base(resp.Data.FilePath) != "my-put-id" {
|
||||
t.Fatal(resp.Data.FilePath)
|
||||
}
|
||||
})
|
||||
t.Run("post", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodPost, "/my%20pid", strings.NewReader("body"))
|
||||
r.Header.Set("Title", "my title")
|
||||
w := httptest.NewRecorder()
|
||||
if err := server.apiV0FilesHandler(w, r); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Logf("%s", w.Body.Bytes())
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatal(w)
|
||||
}
|
||||
var resp struct {
|
||||
Data struct {
|
||||
FilePath string `json:"filePath"`
|
||||
} `json:"data"`
|
||||
}
|
||||
if err := json.NewDecoder(w.Body).Decode(&resp); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
id = strings.TrimPrefix(resp.Data.FilePath, "/api/v0/files/")
|
||||
})
|
||||
t.Run("tree", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
w := httptest.NewRecorder()
|
||||
if err := server.apiV0TreeHandler(w, r); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Logf("%s", w.Body.Bytes())
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatal(w)
|
||||
}
|
||||
if !bytes.Contains(w.Body.Bytes(), []byte(`{"Meta":{"Title":"my title","ReadOnly":false,"Deleted":false},"Content":"`)) {
|
||||
t.Fatal(w)
|
||||
}
|
||||
var branch Branch
|
||||
if err := json.NewDecoder(w.Body).Decode(&branch); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Logf("TODO: %+v", branch)
|
||||
if branch.Leaf != (Leaf{}) {
|
||||
t.Error(branch.Leaf)
|
||||
}
|
||||
if parent, ok := branch.Branches["my pid"]; !ok {
|
||||
t.Error(ok, branch)
|
||||
} else if parent.Leaf.Meta.Title != "Untitled" {
|
||||
t.Error(parent.Leaf)
|
||||
} else if child, ok := parent.Branches[NewID(id)]; !ok {
|
||||
t.Error(ok, NewID("my pid").Push(id), parent)
|
||||
} else if child.Leaf.Meta.Title != "my title" {
|
||||
t.Error(child.Leaf)
|
||||
}
|
||||
})
|
||||
t.Run("get", func(t *testing.T) {
|
||||
r := httptest.NewRequest(http.MethodGet, "/"+url.PathEscape(id), nil)
|
||||
t.Logf("%s", r.URL.String())
|
||||
w := httptest.NewRecorder()
|
||||
if err := server.apiV0FilesIDGetHandler(w, r); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Logf("%s", w.Body.Bytes())
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatal(w)
|
||||
}
|
||||
if !bytes.Contains(w.Body.Bytes(), []byte(`body`)) {
|
||||
t.Fatal(w)
|
||||
}
|
||||
if title := w.Header().Get("Title"); title != "my title" {
|
||||
t.Fatal(title)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
#! /bin/bash
|
||||
|
||||
curl localhost:3004/api/v0/files/A -X PUT -d 'A' -H 'Title: A'
|
||||
curl localhost:3004/api/v0/files/A/AA -X PUT -d 'AA' -H 'Title: AA'
|
||||
curl localhost:3004/api/v0/files/A/AA/AAA -X PUT -d 'AAA' -H 'Title: AAA'
|
||||
curl localhost:3004/api/v0/files/A/AB -X PUT -d 'AB' -H 'Title: AB'
|
||||
curl localhost:3004/api/v0/files/B -X PUT -d 'B' -H 'Title: B'
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
#! /bin/bash
|
||||
|
||||
cd "$(dirname "$BASH_SOURCE")"
|
||||
files_d="$PWD/files"
|
||||
runbooks_d="$HOME/repos/runbooks"
|
||||
find "$runbooks_d" -type f -name "*.md" | while read -r line; do
|
||||
relative="${line#$runbooks_d/}"
|
||||
echo relative=$relative
|
||||
mkdir -p "$files_d"/"${relative%.md}"
|
||||
content="$(cat "$line" | jq -sR)"
|
||||
printf '{
|
||||
"content": %s,
|
||||
"title": "%s"
|
||||
}' "$content" "${relative##*/}" > "$files_d"/"${relative%.md}"/data.yaml
|
||||
done
|
||||
find "$files_d" -type d | while read -r files_d; do
|
||||
echo asserting "$files_d/data.yaml"
|
||||
if [ ! -f "$files_d"/data.yaml ]; then
|
||||
printf '{"title": "%s"}' "${files_d##*/}" > "$files_d"/data.yaml
|
||||
fi
|
||||
done
|
||||
|
|
@ -0,0 +1 @@
|
|||
../public/ui
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
users:
|
||||
breel:
|
||||
password: breel
|
||||
groups:
|
||||
- g1
|
||||
- g2
|
||||
|
|
@ -0,0 +1,215 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type Branch struct {
|
||||
Leaf Leaf
|
||||
Branches map[ID]Branch
|
||||
}
|
||||
|
||||
func (branch Branch) IsZero() bool {
|
||||
return branch.Leaf == (Leaf{}) && len(branch.Branches) == 0
|
||||
}
|
||||
|
||||
func (branch Branch) ForEach(foo func(ID, Leaf) error) error {
|
||||
return branch.forEach(NewID(""), foo)
|
||||
}
|
||||
|
||||
func (branch Branch) forEach(preid ID, foo func(ID, Leaf) error) error {
|
||||
if err := foo(preid, branch.Leaf); err != nil {
|
||||
return err
|
||||
}
|
||||
for id, child := range branch.Branches {
|
||||
if err := child.forEach(id, foo); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Tree struct {
|
||||
root string
|
||||
}
|
||||
|
||||
func NewTree(root string) Tree {
|
||||
return Tree{root: root}
|
||||
}
|
||||
|
||||
func (tree Tree) WithRoot(root string) Tree {
|
||||
tree.root = root
|
||||
return tree
|
||||
}
|
||||
|
||||
func (tree Tree) GetRootMeta() (Branch, error) {
|
||||
if meta, ok := tree.getCachedRootMeta(); ok {
|
||||
return meta, nil
|
||||
}
|
||||
got, err := tree.getRoot(NewID(""), false, false)
|
||||
if err != nil {
|
||||
return Branch{}, err
|
||||
}
|
||||
tree.cacheRootMeta(got)
|
||||
return got, err
|
||||
}
|
||||
|
||||
func (tree Tree) GetRoot() (Branch, error) {
|
||||
if root, ok := tree.getCachedRoot(); ok {
|
||||
return root, nil
|
||||
}
|
||||
got, err := tree.getRoot(NewID(""), true, false)
|
||||
if err != nil {
|
||||
return Branch{}, err
|
||||
}
|
||||
tree.cacheRoot(got)
|
||||
return got, err
|
||||
}
|
||||
|
||||
func (tree Tree) getCachedRoot() (Branch, bool) {
|
||||
return tree.getCachedFrom("root.json")
|
||||
}
|
||||
|
||||
func (tree Tree) getCachedRootMeta() (Branch, bool) {
|
||||
return tree.getCachedFrom("root_meta.json")
|
||||
}
|
||||
|
||||
func (tree Tree) getCachedFrom(name string) (Branch, bool) {
|
||||
b, err := ioutil.ReadFile(path.Join(tree.root, name))
|
||||
if err != nil {
|
||||
return Branch{}, false
|
||||
}
|
||||
var branch Branch
|
||||
err = json.Unmarshal(b, &branch)
|
||||
return branch, err == nil
|
||||
}
|
||||
|
||||
func (tree Tree) cacheRoot(branch Branch) {
|
||||
tree.cacheRootFrom("root.json", branch)
|
||||
}
|
||||
|
||||
func (tree Tree) cacheRootMeta(branch Branch) {
|
||||
tree.cacheRootFrom("root_meta.json", branch)
|
||||
}
|
||||
|
||||
func (tree Tree) cacheRootFrom(name string, branch Branch) {
|
||||
b, err := json.Marshal(branch)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
ensureAndWrite(path.Join(tree.root, name), b)
|
||||
}
|
||||
|
||||
func (tree Tree) cacheClear() {
|
||||
os.Remove(path.Join(path.Join(tree.root, "root.json")))
|
||||
os.Remove(path.Join(path.Join(tree.root, "root_meta.json")))
|
||||
}
|
||||
|
||||
func (tree Tree) getRoot(pid ID, withContent, withDeleted bool) (Branch, error) {
|
||||
m := Branch{Branches: map[ID]Branch{}}
|
||||
entries, err := os.ReadDir(tree.root)
|
||||
if os.IsNotExist(err) {
|
||||
return m, nil
|
||||
}
|
||||
if err != nil {
|
||||
return Branch{}, err
|
||||
}
|
||||
for _, entry := range entries {
|
||||
if entry.Name() == "data.yaml" {
|
||||
if b, err := peekLeaf(withContent, path.Join(tree.root, entry.Name())); err != nil {
|
||||
return Branch{}, err
|
||||
} else if err := yaml.Unmarshal(b, &m.Leaf); err != nil {
|
||||
return Branch{}, err
|
||||
}
|
||||
if !withContent {
|
||||
m.Leaf.Content = ""
|
||||
}
|
||||
if m.Leaf.Meta.Deleted && !withDeleted {
|
||||
return Branch{Branches: map[ID]Branch{}}, nil
|
||||
}
|
||||
} else if entry.IsDir() {
|
||||
subtree := tree.WithRoot(path.Join(tree.root, entry.Name()))
|
||||
if branch, err := subtree.getRoot(pid.Push(entry.Name()), withContent, withDeleted); err != nil {
|
||||
return Branch{}, err
|
||||
} else if !branch.IsZero() && (!branch.Leaf.Meta.Deleted || withDeleted) {
|
||||
m.Branches[pid.Push(entry.Name())] = branch
|
||||
}
|
||||
}
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func peekLeaf(all bool, path string) ([]byte, error) {
|
||||
return ioutil.ReadFile(path)
|
||||
}
|
||||
|
||||
func (tree Tree) toDir(id ID) string {
|
||||
return path.Dir(tree.toData(id))
|
||||
}
|
||||
|
||||
func (tree Tree) toData(id ID) string {
|
||||
return path.Join(tree.root, string(id), "data.yaml")
|
||||
}
|
||||
|
||||
func (tree Tree) Put(id ID, input Leaf) error {
|
||||
tree.cacheClear()
|
||||
if _, err := os.Stat(tree.toData(id)); os.IsNotExist(err) {
|
||||
b, err := yaml.Marshal(Leaf{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ensureAndWrite(tree.toData(id), b); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
old, err := tree.Get(id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b, err := yaml.Marshal(old.Merge(input))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ensureAndWrite(tree.toData(id), b); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (tree Tree) Del(id ID) error {
|
||||
tree.cacheClear()
|
||||
got, err := tree.Get(id)
|
||||
if os.IsNotExist(err) {
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if got.Meta.Deleted {
|
||||
return nil
|
||||
}
|
||||
got.Meta.Deleted = true
|
||||
return tree.Put(id, got)
|
||||
}
|
||||
|
||||
func (tree Tree) HardDel(id ID) error {
|
||||
tree.cacheClear()
|
||||
os.RemoveAll(tree.toDir(id))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (tree Tree) Get(id ID) (Leaf, error) {
|
||||
f, err := os.Open(tree.toData(id))
|
||||
if err != nil {
|
||||
return Leaf{}, err
|
||||
}
|
||||
defer f.Close()
|
||||
var got Leaf
|
||||
err = yaml.NewDecoder(f).Decode(&got)
|
||||
return got, err
|
||||
}
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path"
|
||||
"strconv"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTreeForEach(t *testing.T) {
|
||||
tree := NewTree(t.TempDir())
|
||||
id := ""
|
||||
for i := 0; i < 5; i++ {
|
||||
id = path.Join(id, strconv.Itoa(i))
|
||||
leaf := Leaf{Content: id}
|
||||
leaf.Meta.Title = id
|
||||
if err := tree.Put(NewID(id), leaf); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
branch, err := tree.GetRoot()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := branch.ForEach(func(id ID, leaf Leaf) error {
|
||||
t.Logf("id=%+v, leaf=%+v", id, leaf)
|
||||
return nil
|
||||
}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTreeDel(t *testing.T) {
|
||||
tree := NewTree(t.TempDir())
|
||||
if err := tree.Put(NewID("id"), Leaf{}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := tree.Put(NewID("id/subid"), Leaf{}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := tree.Del(NewID("id")); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if got, err := tree.Get(NewID("id")); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if !got.Meta.Deleted {
|
||||
t.Fatal(got)
|
||||
}
|
||||
|
||||
if root, err := tree.GetRoot(); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if len(root.Branches) != 0 {
|
||||
t.Fatal(root.Branches)
|
||||
}
|
||||
|
||||
if root, err := tree.getRoot(NewID(""), false, true); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if len(root.Branches) != 1 {
|
||||
t.Fatal(root.Branches)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTreeCrud(t *testing.T) {
|
||||
tree := NewTree(t.TempDir())
|
||||
|
||||
if m, err := tree.GetRoot(); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if m.Branches == nil {
|
||||
t.Fatal(m)
|
||||
}
|
||||
|
||||
if err := tree.Del(NewID("id")); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
want := Leaf{}
|
||||
want.Meta.Title = "leaf title"
|
||||
want.Meta.Deleted = false
|
||||
want.Content = "leaf content"
|
||||
if err := tree.Put(NewID("id"), want); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if l, err := tree.Get(NewID("id")); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if l != want {
|
||||
t.Fatal(want, l)
|
||||
}
|
||||
|
||||
if withContent, err := tree.GetRoot(); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if withoutContent, err := tree.GetRootMeta(); err != nil {
|
||||
t.Fatal(err)
|
||||
} else if fmt.Sprint(withContent) == fmt.Sprint(withoutContent) {
|
||||
with, _ := json.MarshalIndent(withContent, "", " ")
|
||||
without, _ := json.MarshalIndent(withoutContent, "", " ")
|
||||
t.Fatalf("without content == with content: \n\twith=%s\n\twout=%s", with, without)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
todo:
|
||||
- create fileauth login file
|
||||
- secret for cookie encrypt+decrypt
|
||||
- secrets
|
||||
- team-specific deployment;; prob grab a VM
|
||||
- mark generated via meta so other files in the dir can be created, deleted, replaced safely
|
||||
- links like `/Smoktests` in user-files home wiki don't rewrite
|
||||
- map fullURLScraped->internalURL for relative links sometimes
|
||||
- LDAP login
|
||||
- scrape odo
|
||||
- rewrite links if available to local
|
||||
- anchor per line
|
||||
- anchor links work
|
||||
- ui; last updated; 2022.02.01T12:34:56
|
||||
done:
|
||||
- encrypt files at docker build time, put decrypt key in vault
|
||||
- gitlab/-/blob/about.md does NOT map to exactly 1 file
|
||||
- crawler does NOT modify title cause readme.md everywhere
|
||||
- use `meta` so no need for extra level for explicit single files
|
||||
- table of contents
|
||||
- min-height for easymde
|
||||
- /ui/files does not redir in b1
|
||||
- anchors on gitlab wikis at least are bad
|
||||
- gitlab wiki original links are empty
|
||||
- /ui/files is an about page over a redir
|
||||
- use `read-only` for autogenerated things;; could skip easymde and make google docs much faster
|
||||
- new line after original link
|
||||
- scrape gslide
|
||||
- scrape gsheet
|
||||
- scrape gdoc
|
||||
- alert box; https://concisecss.com/documentation/ui
|
||||
- hide checkbox for tree
|
||||
- do not rewrite .md title vs. link cause hrefs to ./gobs.md wont work
|
||||
- only one scroll bar
|
||||
- https://codepen.io/bisserof/pen/nrMveb
|
||||
- delete button does nothing
|
||||
- search page tree is empty
|
||||
- highlight current page
|
||||
- fix links
|
||||
- rewrite anchors (maybe gitlab already works :^))
|
||||
- link to original in generated/scraped
|
||||
- buttons to invis
|
||||
- damned width css
|
||||
- css
|
||||
- https://developer.mozilla.org/en-US/docs/Web/API/History/pushState#change_a_query_parameter
|
||||
- preview default via q param
|
||||
- only 1 pid link in tree as title
|
||||
- fix images
|
||||
- breadcrumb; https://concisecss.com/documentation/ui
|
||||
- convert hardcore to IDs are / so things can ignore this fact rather than partially [] and partially modified in frontend
|
||||
Loading…
Reference in New Issue