This commit is contained in:
bel
2020-01-13 03:37:51 +00:00
commit c8eb52f9ba
2023 changed files with 702080 additions and 0 deletions

297
.rclone_repo/bin/cross-compile.go Executable file
View File

@@ -0,0 +1,297 @@
// +build ignore
// Cross compile rclone - in go because I hate bash ;-)
package main
import (
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"path"
"path/filepath"
"regexp"
"runtime"
"sort"
"strings"
"sync"
"text/template"
"time"
)
var (
// Flags
debug = flag.Bool("d", false, "Print commands instead of running them.")
parallel = flag.Int("parallel", runtime.NumCPU(), "Number of commands to run in parallel.")
copyAs = flag.String("release", "", "Make copies of the releases with this name")
gitLog = flag.String("git-log", "", "git log to include as well")
include = flag.String("include", "^.*$", "os/arch regexp to include")
exclude = flag.String("exclude", "^$", "os/arch regexp to exclude")
cgo = flag.Bool("cgo", false, "Use cgo for the build")
noClean = flag.Bool("no-clean", false, "Don't clean the build directory before running.")
tags = flag.String("tags", "", "Space separated list of build tags")
compileOnly = flag.Bool("compile-only", false, "Just build the binary, not the zip.")
)
// GOOS/GOARCH pairs we build for
var osarches = []string{
"windows/386",
"windows/amd64",
"darwin/386",
"darwin/amd64",
"linux/386",
"linux/amd64",
"linux/arm",
"linux/arm64",
"linux/mips",
"linux/mipsle",
"freebsd/386",
"freebsd/amd64",
"freebsd/arm",
"netbsd/386",
"netbsd/amd64",
"netbsd/arm",
"openbsd/386",
"openbsd/amd64",
"plan9/386",
"plan9/amd64",
"solaris/amd64",
}
// Special environment flags for a given arch
var archFlags = map[string][]string{
"386": {"GO386=387"},
}
// runEnv - run a shell command with env
func runEnv(args, env []string) error {
if *debug {
args = append([]string{"echo"}, args...)
}
cmd := exec.Command(args[0], args[1:]...)
if env != nil {
cmd.Env = append(os.Environ(), env...)
}
if *debug {
log.Printf("args = %v, env = %v\n", args, cmd.Env)
}
out, err := cmd.CombinedOutput()
if err != nil {
log.Print("----------------------------")
log.Printf("Failed to run %v: %v", args, err)
log.Printf("Command output was:\n%s", out)
log.Print("----------------------------")
}
return err
}
// run a shell command
func run(args ...string) {
err := runEnv(args, nil)
if err != nil {
log.Fatalf("Exiting after error: %v", err)
}
}
// chdir or die
func chdir(dir string) {
err := os.Chdir(dir)
if err != nil {
log.Fatalf("Couldn't cd into %q: %v", dir, err)
}
}
// substitute data from go template file in to file out
func substitute(inFile, outFile string, data interface{}) {
t, err := template.ParseFiles(inFile)
if err != nil {
log.Fatalf("Failed to read template file %q: %v %v", inFile, err)
}
out, err := os.Create(outFile)
if err != nil {
log.Fatalf("Failed to create output file %q: %v %v", outFile, err)
}
defer func() {
err := out.Close()
if err != nil {
log.Fatalf("Failed to close output file %q: %v %v", outFile, err)
}
}()
err = t.Execute(out, data)
if err != nil {
log.Fatalf("Failed to substitute template file %q: %v %v", inFile, err)
}
}
// build the zip package return its name
func buildZip(dir string) string {
// Now build the zip
run("cp", "-a", "../MANUAL.txt", filepath.Join(dir, "README.txt"))
run("cp", "-a", "../MANUAL.html", filepath.Join(dir, "README.html"))
run("cp", "-a", "../rclone.1", dir)
if *gitLog != "" {
run("cp", "-a", *gitLog, dir)
}
zip := dir + ".zip"
run("zip", "-r9", zip, dir)
return zip
}
// Build .deb and .rpm packages
//
// It returns a list of artifacts it has made
func buildDebAndRpm(dir, version, goarch string) []string {
// Make internal version number acceptable to .deb and .rpm
pkgVersion := version[1:]
pkgVersion = strings.Replace(pkgVersion, "β", "-beta", -1)
pkgVersion = strings.Replace(pkgVersion, "-", ".", -1)
// Make nfpm.yaml from the template
substitute("../bin/nfpm.yaml", path.Join(dir, "nfpm.yaml"), map[string]string{
"Version": pkgVersion,
"Arch": goarch,
})
// build them
var artifacts []string
for _, pkg := range []string{".deb", ".rpm"} {
artifact := dir + pkg
run("bash", "-c", "cd "+dir+" && nfpm -f nfpm.yaml pkg -t ../"+artifact)
artifacts = append(artifacts, artifact)
}
return artifacts
}
// build the binary in dir returning success or failure
func compileArch(version, goos, goarch, dir string) bool {
log.Printf("Compiling %s/%s", goos, goarch)
output := filepath.Join(dir, "rclone")
if goos == "windows" {
output += ".exe"
}
err := os.MkdirAll(dir, 0777)
if err != nil {
log.Fatalf("Failed to mkdir: %v", err)
}
args := []string{
"go", "build",
"--ldflags", "-s -X github.com/ncw/rclone/fs.Version=" + version,
"-i",
"-o", output,
"-tags", *tags,
"..",
}
env := []string{
"GOOS=" + goos,
"GOARCH=" + goarch,
}
if !*cgo {
env = append(env, "CGO_ENABLED=0")
} else {
env = append(env, "CGO_ENABLED=1")
}
if flags, ok := archFlags[goarch]; ok {
env = append(env, flags...)
}
err = runEnv(args, env)
if err != nil {
log.Printf("Error compiling %s/%s: %v", goos, goarch, err)
return false
}
if !*compileOnly {
artifacts := []string{buildZip(dir)}
// build a .deb and .rpm if appropriate
if goos == "linux" {
artifacts = append(artifacts, buildDebAndRpm(dir, version, goarch)...)
}
if *copyAs != "" {
for _, artifact := range artifacts {
run("ln", artifact, strings.Replace(artifact, "-"+version, "-"+*copyAs, 1))
}
}
// tidy up
run("rm", "-rf", dir)
}
log.Printf("Done compiling %s/%s", goos, goarch)
return true
}
func compile(version string) {
start := time.Now()
wg := new(sync.WaitGroup)
run := make(chan func(), *parallel)
for i := 0; i < *parallel; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for f := range run {
f()
}
}()
}
includeRe, err := regexp.Compile(*include)
if err != nil {
log.Fatalf("Bad -include regexp: %v", err)
}
excludeRe, err := regexp.Compile(*exclude)
if err != nil {
log.Fatalf("Bad -exclude regexp: %v", err)
}
compiled := 0
var failuresMu sync.Mutex
var failures []string
for _, osarch := range osarches {
if excludeRe.MatchString(osarch) || !includeRe.MatchString(osarch) {
continue
}
parts := strings.Split(osarch, "/")
if len(parts) != 2 {
log.Fatalf("Bad osarch %q", osarch)
}
goos, goarch := parts[0], parts[1]
userGoos := goos
if goos == "darwin" {
userGoos = "osx"
}
dir := filepath.Join("rclone-" + version + "-" + userGoos + "-" + goarch)
run <- func() {
if !compileArch(version, goos, goarch, dir) {
failuresMu.Lock()
failures = append(failures, goos+"/"+goarch)
failuresMu.Unlock()
}
}
compiled++
}
close(run)
wg.Wait()
log.Printf("Compiled %d arches in %v", compiled, time.Since(start))
if len(failures) > 0 {
sort.Strings(failures)
log.Printf("%d compile failures:\n %s\n", len(failures), strings.Join(failures, "\n "))
os.Exit(1)
}
}
func main() {
flag.Parse()
args := flag.Args()
if len(args) != 1 {
log.Fatalf("Syntax: %s <version>", os.Args[0])
}
version := args[0]
if !*noClean {
run("rm", "-rf", "build")
run("mkdir", "build")
}
chdir("build")
err := ioutil.WriteFile("version.txt", []byte(fmt.Sprintf("rclone %s\n", version)), 0666)
if err != nil {
log.Fatalf("Couldn't write version.txt: %v", err)
}
compile(version)
}

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env python
"""
This is a tool to decrypt file names in rclone logs.
Pass two files in, the first should be a crypt mapping generated by
rclone ls --crypt-show-mapping remote:path
The second should be a log file that you want the paths decrypted in.
Note that if the crypt mappings file is large it can take some time to
run.
"""
import re
import sys
# Crypt line
match_crypt = re.compile(r'NOTICE: (.*?): Encrypts to "(.*?)"$')
def read_crypt_map(mapping_file):
"""
Read the crypt mapping file in, creating a dictionary of substitutions
"""
mapping = {}
with open(mapping_file) as fd:
for line in fd:
match = match_crypt.search(line)
if match:
plaintext, ciphertext = match.groups()
plaintexts = plaintext.split("/")
ciphertexts = ciphertext.split("/")
for plain, cipher in zip(plaintexts, ciphertexts):
mapping[cipher] = plain
return mapping
def map_log_file(crypt_map, log_file):
"""
Substitute the crypt_map in the log file.
This uses a straight forward O(N**2) algorithm. I tried using
regexps to speed it up but it made it slower!
"""
with open(log_file) as fd:
for line in fd:
for cipher, plain in crypt_map.iteritems():
line = line.replace(cipher, plain)
sys.stdout.write(line)
def main():
if len(sys.argv) < 3:
print "Syntax: %s <crypt-mapping-file> <log-file>" % sys.argv[0]
raise SystemExit(1)
mapping_file, log_file = sys.argv[1:]
crypt_map = read_crypt_map(mapping_file)
map_log_file(crypt_map, log_file)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,264 @@
// +build ignore
// Get the latest release from a github project
//
// If GITHUB_USER and GITHUB_TOKEN are set then these will be used to
// authenticate the request which is useful to avoid rate limits.
package main
import (
"encoding/json"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"os"
"os/exec"
"path/filepath"
"regexp"
"strings"
"time"
"golang.org/x/sys/unix"
)
var (
// Flags
install = flag.Bool("install", false, "Install the downloaded package using sudo dpkg -i.")
extract = flag.String("extract", "", "Extract the named executable from the .tar.gz and install into bindir.")
bindir = flag.String("bindir", defaultBinDir(), "Directory to install files downloaded with -extract.")
// Globals
matchProject = regexp.MustCompile(`^(\w+)/(\w+)$`)
)
// A github release
//
// Made by pasting the JSON into https://mholt.github.io/json-to-go/
type Release struct {
URL string `json:"url"`
AssetsURL string `json:"assets_url"`
UploadURL string `json:"upload_url"`
HTMLURL string `json:"html_url"`
ID int `json:"id"`
TagName string `json:"tag_name"`
TargetCommitish string `json:"target_commitish"`
Name string `json:"name"`
Draft bool `json:"draft"`
Author struct {
Login string `json:"login"`
ID int `json:"id"`
AvatarURL string `json:"avatar_url"`
GravatarID string `json:"gravatar_id"`
URL string `json:"url"`
HTMLURL string `json:"html_url"`
FollowersURL string `json:"followers_url"`
FollowingURL string `json:"following_url"`
GistsURL string `json:"gists_url"`
StarredURL string `json:"starred_url"`
SubscriptionsURL string `json:"subscriptions_url"`
OrganizationsURL string `json:"organizations_url"`
ReposURL string `json:"repos_url"`
EventsURL string `json:"events_url"`
ReceivedEventsURL string `json:"received_events_url"`
Type string `json:"type"`
SiteAdmin bool `json:"site_admin"`
} `json:"author"`
Prerelease bool `json:"prerelease"`
CreatedAt time.Time `json:"created_at"`
PublishedAt time.Time `json:"published_at"`
Assets []struct {
URL string `json:"url"`
ID int `json:"id"`
Name string `json:"name"`
Label string `json:"label"`
Uploader struct {
Login string `json:"login"`
ID int `json:"id"`
AvatarURL string `json:"avatar_url"`
GravatarID string `json:"gravatar_id"`
URL string `json:"url"`
HTMLURL string `json:"html_url"`
FollowersURL string `json:"followers_url"`
FollowingURL string `json:"following_url"`
GistsURL string `json:"gists_url"`
StarredURL string `json:"starred_url"`
SubscriptionsURL string `json:"subscriptions_url"`
OrganizationsURL string `json:"organizations_url"`
ReposURL string `json:"repos_url"`
EventsURL string `json:"events_url"`
ReceivedEventsURL string `json:"received_events_url"`
Type string `json:"type"`
SiteAdmin bool `json:"site_admin"`
} `json:"uploader"`
ContentType string `json:"content_type"`
State string `json:"state"`
Size int `json:"size"`
DownloadCount int `json:"download_count"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
BrowserDownloadURL string `json:"browser_download_url"`
} `json:"assets"`
TarballURL string `json:"tarball_url"`
ZipballURL string `json:"zipball_url"`
Body string `json:"body"`
}
// checks if a path has write access
func writable(path string) bool {
return unix.Access(path, unix.W_OK) == nil
}
// Directory to install releases in by default
//
// Find writable directories on $PATH. Use the first writable
// directory which is in $HOME or failing that the first writable
// directory.
//
// Returns "" if none of the above were found
func defaultBinDir() string {
home := os.Getenv("HOME")
var binDir string
for _, dir := range strings.Split(os.Getenv("PATH"), ":") {
if writable(dir) {
if strings.HasPrefix(dir, home) {
return dir
}
if binDir != "" {
binDir = dir
}
}
}
return binDir
}
// read the body or an error message
func readBody(in io.Reader) string {
data, err := ioutil.ReadAll(in)
if err != nil {
return fmt.Sprintf("Error reading body: %v", err.Error())
}
return string(data)
}
// Get an asset URL and name
func getAsset(project string, matchName *regexp.Regexp) (string, string) {
url := "https://api.github.com/repos/" + project + "/releases/latest"
log.Printf("Fetching asset info for %q from %q", project, url)
user, pass := os.Getenv("GITHUB_USER"), os.Getenv("GITHUB_TOKEN")
req, err := http.NewRequest("GET", url, nil)
if err != nil {
log.Fatalf("Failed to make http request %q: %v", url, err)
}
if user != "" && pass != "" {
log.Printf("Fetching using GITHUB_USER and GITHUB_TOKEN")
req.SetBasicAuth(user, pass)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
log.Fatalf("Failed to fetch release info %q: %v", url, err)
}
if resp.StatusCode != http.StatusOK {
log.Printf("Error: %s", readBody(resp.Body))
log.Fatalf("Bad status %d when fetching %q release info: %s", resp.StatusCode, url, resp.Status)
}
var release Release
err = json.NewDecoder(resp.Body).Decode(&release)
if err != nil {
log.Fatalf("Failed to decode release info: %v", err)
}
err = resp.Body.Close()
if err != nil {
log.Fatalf("Failed to close body: %v", err)
}
for _, asset := range release.Assets {
if matchName.MatchString(asset.Name) {
return asset.BrowserDownloadURL, asset.Name
}
}
log.Fatalf("Didn't find asset in info")
return "", ""
}
// get a file for download
func getFile(url, fileName string) {
log.Printf("Downloading %q from %q", fileName, url)
out, err := os.Create(fileName)
if err != nil {
log.Fatalf("Failed to open %q: %v", fileName, err)
}
resp, err := http.Get(url)
if err != nil {
log.Fatalf("Failed to fetch asset %q: %v", url, err)
}
if resp.StatusCode != http.StatusOK {
log.Printf("Error: %s", readBody(resp.Body))
log.Fatalf("Bad status %d when fetching %q asset: %s", resp.StatusCode, url, resp.Status)
}
n, err := io.Copy(out, resp.Body)
if err != nil {
log.Fatalf("Error while downloading: %v", err)
}
err = resp.Body.Close()
if err != nil {
log.Fatalf("Failed to close body: %v", err)
}
err = out.Close()
if err != nil {
log.Fatalf("Failed to close output file: %v", err)
}
log.Printf("Downloaded %q (%d bytes)", fileName, n)
}
// run a shell command
func run(args ...string) {
cmd := exec.Command(args[0], args[1:]...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
if err != nil {
log.Fatalf("Failed to run %v: %v", args, err)
}
}
func main() {
flag.Parse()
args := flag.Args()
if len(args) != 2 {
log.Fatalf("Syntax: %s <user/project> <name reg exp>", os.Args[0])
}
project, nameRe := args[0], args[1]
if !matchProject.MatchString(project) {
log.Fatalf("Project %q must be in form user/project", project)
}
matchName, err := regexp.Compile(nameRe)
if err != nil {
log.Fatalf("Invalid regexp for name %q: %v", nameRe, err)
}
assetURL, assetName := getAsset(project, matchName)
fileName := filepath.Join(os.TempDir(), assetName)
getFile(assetURL, fileName)
if *install {
log.Printf("Installing %s", fileName)
run("sudo", "dpkg", "--force-bad-version", "-i", fileName)
log.Printf("Installed %s", fileName)
} else if *extract != "" {
if *bindir == "" {
log.Fatalf("Need to set -bindir")
}
log.Printf("Unpacking %s from %s and installing into %s", *extract, fileName, *bindir)
run("tar", "xf", fileName, *extract)
run("chmod", "a+x", *extract)
run("mv", "-f", *extract, *bindir+"/")
}
}

View File

@@ -0,0 +1,173 @@
#!/usr/bin/python
"""
Generate a markdown changelog for the rclone project
"""
import os
import sys
import re
import datetime
import subprocess
from collections import defaultdict
IGNORE_RES = [
r"^Add .* to contributors$",
r"^Start v\d+.\d+-DEV development$",
r"^Version v\d.\d+$",
]
IGNORE_RE = re.compile("(?:" + "|".join(IGNORE_RES) + ")")
CATEGORY = re.compile(r"(^[\w/ ]+(?:, *[\w/ ]+)*):\s*(.*)$")
backends = [ x for x in os.listdir("backend") if x != "all"]
backend_aliases = {
"amazon cloud drive" : "amazonclouddrive",
"acd" : "amazonclouddrive",
"google cloud storage" : "googlecloudstorage",
"gcs" : "googlecloudstorage",
"azblob" : "azureblob",
"mountlib": "mount",
"cmount": "mount",
"mount/cmount": "mount",
}
backend_titles = {
"amazonclouddrive": "Amazon Cloud Drive",
"googlecloudstorage": "Google Cloud Storage",
"azureblob": "Azure Blob",
"ftp": "FTP",
"sftp": "SFTP",
"http": "HTTP",
"webdav": "WebDAV",
}
STRIP_FIX_RE = re.compile(r"(\s+-)?\s+((fixes|addresses)\s+)?#\d+", flags=re.I)
STRIP_PATH_RE = re.compile(r"^(backend|fs)/")
IS_FIX_RE = re.compile(r"\b(fix|fixes)\b", flags=re.I)
def make_out(data, indent=""):
"""Return a out, lines the first being a function for output into the second"""
out_lines = []
def out(category, title=None):
if title == None:
title = category
lines = data.get(category)
if not lines:
return
del(data[category])
if indent != "" and len(lines) == 1:
out_lines.append(indent+"* " + title+": " + lines[0])
return
out_lines.append(indent+"* " + title)
for line in lines:
out_lines.append(indent+" * " + line)
return out, out_lines
def process_log(log):
"""Process the incoming log into a category dict of lists"""
by_category = defaultdict(list)
for log_line in reversed(log.split("\n")):
log_line = log_line.strip()
hash, author, timestamp, message = log_line.split("|", 3)
message = message.strip()
if IGNORE_RE.search(message):
continue
match = CATEGORY.search(message)
categories = "UNKNOWN"
if match:
categories = match.group(1).lower()
message = match.group(2)
message = STRIP_FIX_RE.sub("", message)
message = message +" ("+author+")"
message = message[0].upper()+message[1:]
seen = set()
for category in categories.split(","):
category = category.strip()
category = STRIP_PATH_RE.sub("", category)
category = backend_aliases.get(category, category)
if category in seen:
continue
by_category[category].append(message)
seen.add(category)
#print category, hash, author, timestamp, message
return by_category
def main():
if len(sys.argv) != 3:
print >>sys.stderr, "Syntax: %s vX.XX vX.XY" % sys.argv[0]
sys.exit(1)
version, next_version = sys.argv[1], sys.argv[2]
log = subprocess.check_output(["git", "log", '''--pretty=format:%H|%an|%aI|%s'''] + [version+".."+next_version])
by_category = process_log(log)
# Output backends first so remaining in by_category are core items
out, backend_lines = make_out(by_category)
out("mount", title="Mount")
out("vfs", title="VFS")
out("local", title="Local")
out("cache", title="Cache")
out("crypt", title="Crypt")
backend_names = sorted(x for x in by_category.keys() if x in backends)
for backend_name in backend_names:
if backend_name in backend_titles:
backend_title = backend_titles[backend_name]
else:
backend_title = backend_name.title()
out(backend_name, title=backend_title)
# Split remaining in by_category into new features and fixes
new_features = defaultdict(list)
bugfixes = defaultdict(list)
for name, messages in by_category.iteritems():
for message in messages:
if IS_FIX_RE.search(message):
bugfixes[name].append(message)
else:
new_features[name].append(message)
# Output new features
out, new_features_lines = make_out(new_features, indent=" ")
for name in sorted(new_features.keys()):
out(name)
# Output bugfixes
out, bugfix_lines = make_out(bugfixes, indent=" ")
for name in sorted(bugfixes.keys()):
out(name)
# Read old changlog and split
with open("docs/content/changelog.md") as fd:
old_changelog = fd.read()
heading = "# Changelog"
i = old_changelog.find(heading)
if i < 0:
raise AssertionError("Couldn't find heading in old changelog")
i += len(heading)
old_head, old_tail = old_changelog[:i], old_changelog[i:]
# Update the build date
old_head = re.sub(r"\d\d\d\d-\d\d-\d\d", str(datetime.date.today()), old_head)
# Output combined changelog with new part
sys.stdout.write(old_head)
sys.stdout.write("""
## %s - %s
* New backends
* New commands
* New Features
%s
* Bug Fixes
%s
%s""" % (version, datetime.date.today(), "\n".join(new_features_lines), "\n".join(bugfix_lines), "\n".join(backend_lines)))
sys.stdout.write(old_tail)
if __name__ == "__main__":
main()

154
.rclone_repo/bin/make_manual.py Executable file
View File

@@ -0,0 +1,154 @@
#!/usr/bin/env python
"""
Make single page versions of the documentation for release and
conversion into man pages etc.
"""
import os
import re
from datetime import datetime
docpath = "docs/content"
outfile = "MANUAL.md"
# Order to add docs segments to make outfile
docs = [
"about.md",
"install.md",
"docs.md",
"remote_setup.md",
"filtering.md",
"rc.md",
"overview.md",
# Keep these alphabetical by full name
"alias.md",
"amazonclouddrive.md",
"s3.md",
"b2.md",
"box.md",
"cache.md",
"crypt.md",
"dropbox.md",
"ftp.md",
"googlecloudstorage.md",
"drive.md",
"http.md",
"hubic.md",
"jottacloud.md",
"mega.md",
"azureblob.md",
"onedrive.md",
"opendrive.md",
"qingstor.md",
"swift.md",
"pcloud.md",
"sftp.md",
"webdav.md",
"yandex.md",
"local.md",
"changelog.md",
"bugs.md",
"faq.md",
"licence.md",
"authors.md",
"contact.md",
]
# Order to put the commands in - any not on here will be in sorted order
commands_order = [
"rclone_config.md",
"rclone_copy.md",
"rclone_sync.md",
"rclone_move.md",
"rclone_delete.md",
"rclone_purge.md",
"rclone_mkdir.md",
"rclone_rmdir.md",
"rclone_check.md",
"rclone_ls.md",
"rclone_lsd.md",
"rclone_lsl.md",
"rclone_md5sum.md",
"rclone_sha1sum.md",
"rclone_size.md",
"rclone_version.md",
"rclone_cleanup.md",
"rclone_dedupe.md",
]
# Docs which aren't made into outfile
ignore_docs = [
"downloads.md",
"privacy.md",
"donate.md",
]
def read_doc(doc):
"""Read file as a string"""
path = os.path.join(docpath, doc)
with open(path) as fd:
contents = fd.read()
parts = contents.split("---\n", 2)
if len(parts) != 3:
raise ValueError("Couldn't find --- markers: found %d parts" % len(parts))
contents = parts[2].strip()+"\n\n"
# Remove icons
contents = re.sub(r'<i class="fa.*?</i>\s*', "", contents)
# Make [...](/links/) absolute
contents = re.sub(r'\((\/.*?\/)\)', r"(https://rclone.org\1)", contents)
# Interpret provider shortcode
# {{< provider name="Amazon S3" home="https://aws.amazon.com/s3/" config="/s3/" >}}
contents = re.sub(r'\{\{<\s+provider.*?name="(.*?)".*?>\}\}', r"\1", contents)
return contents
def check_docs(docpath):
"""Check all the docs are in docpath"""
files = set(f for f in os.listdir(docpath) if f.endswith(".md"))
files -= set(ignore_docs)
docs_set = set(docs)
if files == docs_set:
return
print "Files on disk but not in docs variable: %s" % ", ".join(files - docs_set)
print "Files in docs variable but not on disk: %s" % ", ".join(docs_set - files)
raise ValueError("Missing files")
def read_command(command):
doc = read_doc("commands/"+command)
doc = re.sub(r"### Options inherited from parent commands.*$", "", doc, 0, re.S)
doc = doc.strip()+"\n"
return doc
def read_commands(docpath):
"""Reads the commands an makes them into a single page"""
files = set(f for f in os.listdir(docpath + "/commands") if f.endswith(".md"))
docs = []
for command in commands_order:
docs.append(read_command(command))
files.remove(command)
for command in sorted(files):
if command != "rclone.md":
docs.append(read_command(command))
return "\n".join(docs)
def main():
check_docs(docpath)
command_docs = read_commands(docpath)
with open(outfile, "w") as out:
out.write("""\
%% rclone(1) User Manual
%% Nick Craig-Wood
%% %s
""" % datetime.now().strftime("%b %d, %Y"))
for doc in docs:
contents = read_doc(doc)
# Substitute the commands into doc.md
if doc == "docs.md":
contents = re.sub(r"The main rclone commands.*?for the full list.", command_docs, contents, 0, re.S)
out.write(contents)
print "Written '%s'" % outfile
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,21 @@
#!/bin/bash
# Insert the rc docs into docs/content/rc.md
set -e
go install
mkdir -p /tmp/rclone_cache_test
export RCLONE_CONFIG_RCDOCS_TYPE=cache
export RCLONE_CONFIG_RCDOCS_REMOTE=/tmp/rclone/cache_test
rclone -q --rc mount rcdocs: /mnt/tmp/ &
sleep 0.5
rclone rc > /tmp/z.md
fusermount -z -u /mnt/tmp/
awk '
BEGIN {p=1}
/^<!--- autogenerated start/ {print;system("cat /tmp/z.md");p=0}
/^<!--- autogenerated stop/ {p=1}
p' docs/content/rc.md > /tmp/rc.md
mv /tmp/rc.md docs/content/rc.md

View File

@@ -0,0 +1,146 @@
// +build ignore
// Build a directory structure with the required number of files in
//
// Run with go run make_test_files.go [flag] <directory>
package main
import (
cryptrand "crypto/rand"
"flag"
"io"
"log"
"math/rand"
"os"
"path/filepath"
)
var (
// Flags
numberOfFiles = flag.Int("n", 1000, "Number of files to create")
averageFilesPerDirectory = flag.Int("files-per-directory", 10, "Average number of files per directory")
maxDepth = flag.Int("max-depth", 10, "Maximum depth of directory heirachy")
minFileSize = flag.Int64("min-size", 0, "Minimum size of file to create")
maxFileSize = flag.Int64("max-size", 100, "Maximum size of files to create")
minFileNameLength = flag.Int("min-name-length", 4, "Minimum size of file to create")
maxFileNameLength = flag.Int("max-name-length", 12, "Maximum size of files to create")
directoriesToCreate int
totalDirectories int
fileNames = map[string]struct{}{} // keep a note of which file name we've used already
)
// randomString create a random string for test purposes
func randomString(n int) string {
const (
vowel = "aeiou"
consonant = "bcdfghjklmnpqrstvwxyz"
digit = "0123456789"
)
pattern := []string{consonant, vowel, consonant, vowel, consonant, vowel, consonant, digit}
out := make([]byte, n)
p := 0
for i := range out {
source := pattern[p]
p = (p + 1) % len(pattern)
out[i] = source[rand.Intn(len(source))]
}
return string(out)
}
// fileName creates a unique random file or directory name
func fileName() (name string) {
for {
length := rand.Intn(*maxFileNameLength-*minFileNameLength) + *minFileNameLength
name = randomString(length)
if _, found := fileNames[name]; !found {
break
}
}
fileNames[name] = struct{}{}
return name
}
// dir is a directory in the directory heirachy being built up
type dir struct {
name string
depth int
children []*dir
parent *dir
}
// Create a random directory heirachy under d
func (d *dir) createDirectories() {
for totalDirectories < directoriesToCreate {
newDir := &dir{
name: fileName(),
depth: d.depth + 1,
parent: d,
}
d.children = append(d.children, newDir)
totalDirectories++
switch rand.Intn(4) {
case 0:
if d.depth < *maxDepth {
newDir.createDirectories()
}
case 1:
return
}
}
return
}
// list the directory heirachy
func (d *dir) list(path string, output []string) []string {
dirPath := filepath.Join(path, d.name)
output = append(output, dirPath)
for _, subDir := range d.children {
output = subDir.list(dirPath, output)
}
return output
}
// writeFile writes a random file at dir/name
func writeFile(dir, name string) {
err := os.MkdirAll(dir, 0777)
if err != nil {
log.Fatalf("Failed to make directory %q: %v", dir, err)
}
path := filepath.Join(dir, name)
fd, err := os.Create(path)
if err != nil {
log.Fatalf("Failed to open file %q: %v", path, err)
}
size := rand.Int63n(*maxFileSize-*minFileSize) + *minFileSize
_, err = io.CopyN(fd, cryptrand.Reader, size)
if err != nil {
log.Fatalf("Failed to write %v bytes to file %q: %v", size, path, err)
}
err = fd.Close()
if err != nil {
log.Fatalf("Failed to close file %q: %v", path, err)
}
}
func main() {
flag.Parse()
args := flag.Args()
if len(args) != 1 {
log.Fatalf("Require 1 directory argument")
}
outputDirectory := args[0]
log.Printf("Output dir %q", outputDirectory)
directoriesToCreate = *numberOfFiles / *averageFilesPerDirectory
log.Printf("directoriesToCreate %v", directoriesToCreate)
root := &dir{name: outputDirectory, depth: 1}
for totalDirectories < directoriesToCreate {
root.createDirectories()
}
dirs := root.list("", []string{})
for i := 0; i < *numberOfFiles; i++ {
dir := dirs[rand.Intn(len(dirs))]
writeFile(dir, fileName())
}
}

23
.rclone_repo/bin/nfpm.yaml Executable file
View File

@@ -0,0 +1,23 @@
name: "rclone"
arch: "{{.Arch}}"
platform: "linux"
version: "{{.Version}}"
section: "default"
priority: "extra"
provides:
- rclone
maintainer: "Nick Craig-Wood <nick@craig-wood.com>"
description: |
Rclone - "rsync for cloud storage"
is a command line program to sync files and directories to and
from most cloud providers. It can also mount, tree, ncdu and lots
of other useful things.
vendor: "rclone"
homepage: "https://rclone.org"
license: "MIT"
bindir: "/usr/bin"
files:
./rclone: "/usr/bin/rclone"
./README.html: "/usr/share/doc/rclone/README.html"
./README.txt: "/usr/share/doc/rclone/README.txt"
./rclone.1: "/usr/share/man/man1/rclone.1"

View File

@@ -0,0 +1,4 @@
# Encrypted rclone configuration File
RCLONE_ENCRYPT_V0:
XIkAr3p+y+zai82cHFH8UoW1y1XTe6dpTzo/g4uSwqI2pfsnSSJ4JbAsRZ9nGVpx3NzROKEewlusVHNokiA4/nD4NbT+2DJrpMLg/OtLREICfuRk3tVWPKLGsmA+TLKU+IfQMO4LfrrCe2DF/lW0qA5Xu16E0Vn++jNhbwW2oB+JTkaGka8Ae3CyisM/3NUGnCOG/yb5wLH7ybUstNYPHsNFCiU1brFXQ4DNIbUFMmca+5S44vrOWvhp9QijQXlG7/JjwrkqbB/LK2gMJPTuhY2OW+4tRw1IoCXbWmwJXv5xmhPqanW92A==

View File

@@ -0,0 +1,46 @@
#!/usr/bin/env python
"""
Update the authors.md file with the authors from the git log
"""
import re
import subprocess
AUTHORS = "docs/content/authors.md"
IGNORE = [ "nick@raig-wood.com" ]
def load():
"""
returns a set of emails already in authors.md
"""
with open(AUTHORS) as fd:
authors = fd.read()
emails = set(re.findall(r"<(.*?)>", authors))
emails.update(IGNORE)
return emails
def add_email(name, email):
"""
adds the email passed in to the end of authors.md
"""
print "Adding %s <%s>" % (name, email)
with open(AUTHORS, "a+") as fd:
print >>fd, " * %s <%s>" % (name, email)
subprocess.check_call(["git", "commit", "-m", "Add %s to contributors" % name, AUTHORS])
def main():
out = subprocess.check_output(["git", "log", '--reverse', '--format=%an|%ae', "master"])
previous = load()
for line in out.split("\n"):
line = line.strip()
if line == "":
continue
name, email = line.split("|")
if email in previous:
continue
previous.add(email)
add_email(name, email)
if __name__ == "__main__":
main()

50
.rclone_repo/bin/upload-github Executable file
View File

@@ -0,0 +1,50 @@
#!/usr/bin/env bash
#
# Upload a release
#
# Needs github-release from https://github.com/aktau/github-release
set -e
REPO="rclone"
if [ "$1" == "" ]; then
echo "Syntax: $0 Version"
exit 1
fi
VERSION="$1"
if [ "$GITHUB_USER" == "" ]; then
echo 1>&2 "Need GITHUB_USER environment variable"
exit 1
fi
if [ "$GITHUB_TOKEN" == "" ]; then
echo 1>&2 "Need GITHUB_TOKEN environment variable"
exit 1
fi
echo "Making release ${VERSION}"
github-release release \
--repo ${REPO} \
--tag ${VERSION} \
--name "rclone" \
--description "Rclone - rsync for cloud storage. Sync files to and from many cloud storage providers."
for build in `ls build | grep -v current`; do
echo "Uploading ${build}"
base="${build%.*}"
parts=(${base//-/ })
os=${parts[3]}
arch=${parts[4]}
github-release upload \
--repo ${REPO} \
--tag ${VERSION} \
--name "${build}" \
--file build/${build}
done
github-release info \
--repo ${REPO} \
--tag ${VERSION}
echo "Done"

5
.rclone_repo/bin/win-build.bat Executable file
View File

@@ -0,0 +1,5 @@
@echo off
echo Setting environment variables for mingw+WinFsp compile
set GOPATH=X:\go
set PATH=C:\Program Files\mingw-w64\i686-7.1.0-win32-dwarf-rt_v5-rev0\mingw32\bin;%PATH%
set CPATH=C:\Program Files\WinFsp\inc\fuse;C:\Program Files (x86)\WinFsp\inc\fuse