Merge pull request #185 from mmcgrana/tools-standard-formatting

Allows tools to use Go-standard formatting
This commit is contained in:
Mark McGranaghan 2018-02-01 14:09:45 -08:00 committed by GitHub
commit 791db5d8c4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 239 additions and 239 deletions

View File

@ -2,7 +2,7 @@
set -eo pipefail set -eo pipefail
paths=$(ls tools/*.go examples/*/*.go) paths=$(ls examples/*/*.go)
gbe_to_4spaces() { gbe_to_4spaces() {
local os=$(tr [A-Z] [a-z] <<< "`uname`") local os=$(tr [A-Z] [a-z] <<< "`uname`")

View File

@ -1,17 +1,17 @@
package main package main
import ( import (
"crypto/sha1" "crypto/sha1"
"fmt" "fmt"
"github.com/russross/blackfriday" "github.com/russross/blackfriday"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings" "strings"
"text/template" "text/template"
) )
var cacheDir = "/tmp/gobyexample-cache" var cacheDir = "/tmp/gobyexample-cache"
@ -19,99 +19,99 @@ var siteDir = "./public"
var pygmentizeBin = "./vendor/pygments/pygmentize" var pygmentizeBin = "./vendor/pygments/pygmentize"
func check(err error) { func check(err error) {
if err != nil { if err != nil {
panic(err) panic(err)
} }
} }
func ensureDir(dir string) { func ensureDir(dir string) {
err := os.MkdirAll(dir, 0755) err := os.MkdirAll(dir, 0755)
check(err) check(err)
} }
func copyFile(src, dst string) { func copyFile(src, dst string) {
dat, err := ioutil.ReadFile(src) dat, err := ioutil.ReadFile(src)
check(err) check(err)
err = ioutil.WriteFile(dst, dat, 0644) err = ioutil.WriteFile(dst, dat, 0644)
check(err) check(err)
} }
func pipe(bin string, arg []string, src string) []byte { func pipe(bin string, arg []string, src string) []byte {
cmd := exec.Command(bin, arg...) cmd := exec.Command(bin, arg...)
in, err := cmd.StdinPipe() in, err := cmd.StdinPipe()
check(err) check(err)
out, err := cmd.StdoutPipe() out, err := cmd.StdoutPipe()
check(err) check(err)
err = cmd.Start() err = cmd.Start()
check(err) check(err)
_, err = in.Write([]byte(src)) _, err = in.Write([]byte(src))
check(err) check(err)
err = in.Close() err = in.Close()
check(err) check(err)
bytes, err := ioutil.ReadAll(out) bytes, err := ioutil.ReadAll(out)
check(err) check(err)
err = cmd.Wait() err = cmd.Wait()
check(err) check(err)
return bytes return bytes
} }
func sha1Sum(s string) string { func sha1Sum(s string) string {
h := sha1.New() h := sha1.New()
h.Write([]byte(s)) h.Write([]byte(s))
b := h.Sum(nil) b := h.Sum(nil)
return fmt.Sprintf("%x", b) return fmt.Sprintf("%x", b)
} }
func mustReadFile(path string) string { func mustReadFile(path string) string {
bytes, err := ioutil.ReadFile(path) bytes, err := ioutil.ReadFile(path)
check(err) check(err)
return string(bytes) return string(bytes)
} }
func cachedPygmentize(lex string, src string) string { func cachedPygmentize(lex string, src string) string {
ensureDir(cacheDir) ensureDir(cacheDir)
arg := []string{"-l", lex, "-f", "html"} arg := []string{"-l", lex, "-f", "html"}
cachePath := cacheDir + "/pygmentize-" + strings.Join(arg, "-") + "-" + sha1Sum(src) cachePath := cacheDir + "/pygmentize-" + strings.Join(arg, "-") + "-" + sha1Sum(src)
cacheBytes, cacheErr := ioutil.ReadFile(cachePath) cacheBytes, cacheErr := ioutil.ReadFile(cachePath)
if cacheErr == nil { if cacheErr == nil {
return string(cacheBytes) return string(cacheBytes)
} }
renderBytes := pipe(pygmentizeBin, arg, src) renderBytes := pipe(pygmentizeBin, arg, src)
// Newer versions of Pygments add silly empty spans. // Newer versions of Pygments add silly empty spans.
renderCleanString := strings.Replace(string(renderBytes), "<span></span>", "", -1) renderCleanString := strings.Replace(string(renderBytes), "<span></span>", "", -1)
writeErr := ioutil.WriteFile(cachePath, []byte(renderCleanString), 0600) writeErr := ioutil.WriteFile(cachePath, []byte(renderCleanString), 0600)
check(writeErr) check(writeErr)
return renderCleanString return renderCleanString
} }
func markdown(src string) string { func markdown(src string) string {
return string(blackfriday.MarkdownCommon([]byte(src))) return string(blackfriday.MarkdownCommon([]byte(src)))
} }
func readLines(path string) []string { func readLines(path string) []string {
src := mustReadFile(path) src := mustReadFile(path)
return strings.Split(src, "\n") return strings.Split(src, "\n")
} }
func mustGlob(glob string) []string { func mustGlob(glob string) []string {
paths, err := filepath.Glob(glob) paths, err := filepath.Glob(glob)
check(err) check(err)
return paths return paths
} }
func whichLexer(path string) string { func whichLexer(path string) string {
if strings.HasSuffix(path, ".go") { if strings.HasSuffix(path, ".go") {
return "go" return "go"
} else if strings.HasSuffix(path, ".sh") { } else if strings.HasSuffix(path, ".sh") {
return "console" return "console"
} }
panic("No lexer for " + path) panic("No lexer for " + path)
} }
func debug(msg string) { func debug(msg string) {
if os.Getenv("DEBUG") == "1" { if os.Getenv("DEBUG") == "1" {
fmt.Fprintln(os.Stderr, msg) fmt.Fprintln(os.Stderr, msg)
} }
} }
var docsPat = regexp.MustCompile("^\\s*(\\/\\/|#)\\s") var docsPat = regexp.MustCompile("^\\s*(\\/\\/|#)\\s")
@ -119,169 +119,169 @@ var dashPat = regexp.MustCompile("\\-+")
// Seg is a segment of an example // Seg is a segment of an example
type Seg struct { type Seg struct {
Docs, DocsRendered string Docs, DocsRendered string
Code, CodeRendered string Code, CodeRendered string
CodeEmpty, CodeLeading, CodeRun bool CodeEmpty, CodeLeading, CodeRun bool
} }
// Example is info extracted from an example file // Example is info extracted from an example file
type Example struct { type Example struct {
ID, Name string ID, Name string
GoCode, GoCodeHash, URLHash string GoCode, GoCodeHash, URLHash string
Segs [][]*Seg Segs [][]*Seg
NextExample *Example NextExample *Example
} }
func parseHashFile(sourcePath string) (string, string) { func parseHashFile(sourcePath string) (string, string) {
lines := readLines(sourcePath) lines := readLines(sourcePath)
return lines[0], lines[1] return lines[0], lines[1]
} }
func resetURLHashFile(codehash, code, sourcePath string) string { func resetURLHashFile(codehash, code, sourcePath string) string {
payload := strings.NewReader(code) payload := strings.NewReader(code)
resp, err := http.Post("https://play.golang.org/share", "text/plain", payload) resp, err := http.Post("https://play.golang.org/share", "text/plain", payload)
if err != nil { if err != nil {
panic(err) panic(err)
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body) body, err := ioutil.ReadAll(resp.Body)
urlkey := string(body) urlkey := string(body)
data := fmt.Sprintf("%s\n%s\n", codehash, urlkey) data := fmt.Sprintf("%s\n%s\n", codehash, urlkey)
ioutil.WriteFile(sourcePath, []byte(data), 0644) ioutil.WriteFile(sourcePath, []byte(data), 0644)
return urlkey return urlkey
} }
func parseSegs(sourcePath string) ([]*Seg, string) { func parseSegs(sourcePath string) ([]*Seg, string) {
lines := readLines(sourcePath) lines := readLines(sourcePath)
filecontent := strings.Join(lines, "\n") filecontent := strings.Join(lines, "\n")
segs := []*Seg{} segs := []*Seg{}
lastSeen := "" lastSeen := ""
for _, line := range lines { for _, line := range lines {
if line == "" { if line == "" {
lastSeen = "" lastSeen = ""
continue continue
} }
matchDocs := docsPat.MatchString(line) matchDocs := docsPat.MatchString(line)
matchCode := !matchDocs matchCode := !matchDocs
newDocs := (lastSeen == "") || ((lastSeen != "docs") && (segs[len(segs)-1].Docs != "")) newDocs := (lastSeen == "") || ((lastSeen != "docs") && (segs[len(segs)-1].Docs != ""))
newCode := (lastSeen == "") || ((lastSeen != "code") && (segs[len(segs)-1].Code != "")) newCode := (lastSeen == "") || ((lastSeen != "code") && (segs[len(segs)-1].Code != ""))
if newDocs || newCode { if newDocs || newCode {
debug("NEWSEG") debug("NEWSEG")
} }
if matchDocs { if matchDocs {
trimmed := docsPat.ReplaceAllString(line, "") trimmed := docsPat.ReplaceAllString(line, "")
if newDocs { if newDocs {
newSeg := Seg{Docs: trimmed, Code: ""} newSeg := Seg{Docs: trimmed, Code: ""}
segs = append(segs, &newSeg) segs = append(segs, &newSeg)
} else { } else {
segs[len(segs)-1].Docs = segs[len(segs)-1].Docs + "\n" + trimmed segs[len(segs)-1].Docs = segs[len(segs)-1].Docs + "\n" + trimmed
} }
debug("DOCS: " + line) debug("DOCS: " + line)
lastSeen = "docs" lastSeen = "docs"
} else if matchCode { } else if matchCode {
if newCode { if newCode {
newSeg := Seg{Docs: "", Code: line} newSeg := Seg{Docs: "", Code: line}
segs = append(segs, &newSeg) segs = append(segs, &newSeg)
} else { } else {
segs[len(segs)-1].Code = segs[len(segs)-1].Code + "\n" + line segs[len(segs)-1].Code = segs[len(segs)-1].Code + "\n" + line
} }
debug("CODE: " + line) debug("CODE: " + line)
lastSeen = "code" lastSeen = "code"
} }
} }
for i, seg := range segs { for i, seg := range segs {
seg.CodeEmpty = (seg.Code == "") seg.CodeEmpty = (seg.Code == "")
seg.CodeLeading = (i < (len(segs) - 1)) seg.CodeLeading = (i < (len(segs) - 1))
seg.CodeRun = strings.Contains(seg.Code, "package main") seg.CodeRun = strings.Contains(seg.Code, "package main")
} }
return segs, filecontent return segs, filecontent
} }
func parseAndRenderSegs(sourcePath string) ([]*Seg, string) { func parseAndRenderSegs(sourcePath string) ([]*Seg, string) {
segs, filecontent := parseSegs(sourcePath) segs, filecontent := parseSegs(sourcePath)
lexer := whichLexer(sourcePath) lexer := whichLexer(sourcePath)
for _, seg := range segs { for _, seg := range segs {
if seg.Docs != "" { if seg.Docs != "" {
seg.DocsRendered = markdown(seg.Docs) seg.DocsRendered = markdown(seg.Docs)
} }
if seg.Code != "" { if seg.Code != "" {
seg.CodeRendered = cachedPygmentize(lexer, seg.Code) seg.CodeRendered = cachedPygmentize(lexer, seg.Code)
} }
} }
// we are only interested in the 'go' code to pass to play.golang.org // we are only interested in the 'go' code to pass to play.golang.org
if lexer != "go" { if lexer != "go" {
filecontent = "" filecontent = ""
} }
return segs, filecontent return segs, filecontent
} }
func parseExamples() []*Example { func parseExamples() []*Example {
exampleNames := readLines("examples.txt") exampleNames := readLines("examples.txt")
examples := make([]*Example, 0) examples := make([]*Example, 0)
for _, exampleName := range exampleNames { for _, exampleName := range exampleNames {
if (exampleName != "") && !strings.HasPrefix(exampleName, "#") { if (exampleName != "") && !strings.HasPrefix(exampleName, "#") {
example := Example{Name: exampleName} example := Example{Name: exampleName}
exampleID := strings.ToLower(exampleName) exampleID := strings.ToLower(exampleName)
exampleID = strings.Replace(exampleID, " ", "-", -1) exampleID = strings.Replace(exampleID, " ", "-", -1)
exampleID = strings.Replace(exampleID, "/", "-", -1) exampleID = strings.Replace(exampleID, "/", "-", -1)
exampleID = strings.Replace(exampleID, "'", "", -1) exampleID = strings.Replace(exampleID, "'", "", -1)
exampleID = dashPat.ReplaceAllString(exampleID, "-") exampleID = dashPat.ReplaceAllString(exampleID, "-")
example.ID = exampleID example.ID = exampleID
example.Segs = make([][]*Seg, 0) example.Segs = make([][]*Seg, 0)
sourcePaths := mustGlob("examples/" + exampleID + "/*") sourcePaths := mustGlob("examples/" + exampleID + "/*")
for _, sourcePath := range sourcePaths { for _, sourcePath := range sourcePaths {
if strings.HasSuffix(sourcePath, ".hash") { if strings.HasSuffix(sourcePath, ".hash") {
example.GoCodeHash, example.URLHash = parseHashFile(sourcePath) example.GoCodeHash, example.URLHash = parseHashFile(sourcePath)
} else { } else {
sourceSegs, filecontents := parseAndRenderSegs(sourcePath) sourceSegs, filecontents := parseAndRenderSegs(sourcePath)
if filecontents != "" { if filecontents != "" {
example.GoCode = filecontents example.GoCode = filecontents
} }
example.Segs = append(example.Segs, sourceSegs) example.Segs = append(example.Segs, sourceSegs)
} }
} }
newCodeHash := sha1Sum(example.GoCode) newCodeHash := sha1Sum(example.GoCode)
if example.GoCodeHash != newCodeHash { if example.GoCodeHash != newCodeHash {
example.URLHash = resetURLHashFile(newCodeHash, example.GoCode, "examples/"+example.ID+"/"+example.ID+".hash") example.URLHash = resetURLHashFile(newCodeHash, example.GoCode, "examples/"+example.ID+"/"+example.ID+".hash")
} }
examples = append(examples, &example) examples = append(examples, &example)
} }
} }
for i, example := range examples { for i, example := range examples {
if i < (len(examples) - 1) { if i < (len(examples) - 1) {
example.NextExample = examples[i+1] example.NextExample = examples[i+1]
} }
} }
return examples return examples
} }
func renderIndex(examples []*Example) { func renderIndex(examples []*Example) {
indexTmpl := template.New("index") indexTmpl := template.New("index")
_, err := indexTmpl.Parse(mustReadFile("templates/index.tmpl")) _, err := indexTmpl.Parse(mustReadFile("templates/index.tmpl"))
check(err) check(err)
indexF, err := os.Create(siteDir + "/index.html") indexF, err := os.Create(siteDir + "/index.html")
check(err) check(err)
err = indexTmpl.Execute(indexF, examples) err = indexTmpl.Execute(indexF, examples)
check(err) check(err)
} }
func renderExamples(examples []*Example) { func renderExamples(examples []*Example) {
exampleTmpl := template.New("example") exampleTmpl := template.New("example")
_, err := exampleTmpl.Parse(mustReadFile("templates/example.tmpl")) _, err := exampleTmpl.Parse(mustReadFile("templates/example.tmpl"))
check(err) check(err)
for _, example := range examples { for _, example := range examples {
exampleF, err := os.Create(siteDir + "/" + example.ID) exampleF, err := os.Create(siteDir + "/" + example.ID)
check(err) check(err)
exampleTmpl.Execute(exampleF, example) exampleTmpl.Execute(exampleF, example)
} }
} }
func main() { func main() {
copyFile("templates/site.css", siteDir+"/site.css") copyFile("templates/site.css", siteDir+"/site.css")
copyFile("templates/favicon.ico", siteDir+"/favicon.ico") copyFile("templates/favicon.ico", siteDir+"/favicon.ico")
copyFile("templates/404.html", siteDir+"/404.html") copyFile("templates/404.html", siteDir+"/404.html")
copyFile("templates/play.png", siteDir+"/play.png") copyFile("templates/play.png", siteDir+"/play.png")
examples := parseExamples() examples := parseExamples()
renderIndex(examples) renderIndex(examples)
renderExamples(examples) renderExamples(examples)
} }

View File

@ -1,45 +1,45 @@
package main package main
import ( import (
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings" "strings"
"unicode/utf8" "unicode/utf8"
) )
func check(err error) { func check(err error) {
if err != nil { if err != nil {
panic(err) panic(err)
} }
} }
func readLines(path string) []string { func readLines(path string) []string {
srcBytes, err := ioutil.ReadFile(path) srcBytes, err := ioutil.ReadFile(path)
check(err) check(err)
return strings.Split(string(srcBytes), "\n") return strings.Split(string(srcBytes), "\n")
} }
var commentPat = regexp.MustCompile("\\s*\\/\\/") var commentPat = regexp.MustCompile("\\s*\\/\\/")
func main() { func main() {
sourcePaths, err := filepath.Glob("./examples/*/*") sourcePaths, err := filepath.Glob("./examples/*/*")
check(err) check(err)
foundLongFile := false foundLongFile := false
for _, sourcePath := range sourcePaths { for _, sourcePath := range sourcePaths {
foundLongLine := false foundLongLine := false
lines := readLines(sourcePath) lines := readLines(sourcePath)
for i, line := range lines { for i, line := range lines {
if !foundLongLine && !commentPat.MatchString(line) && (utf8.RuneCountInString(line) > 58) { if !foundLongLine && !commentPat.MatchString(line) && (utf8.RuneCountInString(line) > 58) {
fmt.Printf("measure: %s:%d\n", sourcePath, i+1) fmt.Printf("measure: %s:%d\n", sourcePath, i+1)
foundLongLine = true foundLongLine = true
foundLongFile = true foundLongFile = true
} }
} }
} }
if foundLongFile { if foundLongFile {
os.Exit(1) os.Exit(1)
} }
} }