Merge branch 'master' into clipboard
This commit is contained in:
36
tools/build
36
tools/build
@@ -2,6 +2,40 @@
|
||||
|
||||
set -e
|
||||
|
||||
verbose() {
|
||||
! test -z "$VERBOSE"
|
||||
}
|
||||
|
||||
verbose && echo "Running tests..."
|
||||
tools/test
|
||||
|
||||
verbose && echo "Formatting code..."
|
||||
tools/format
|
||||
|
||||
verbose && echo "Measuring line lengths..."
|
||||
tools/measure
|
||||
tools/generate
|
||||
|
||||
# SITE_DIR is the final location where we want generated content to be
|
||||
SITE_DIR="public"
|
||||
|
||||
# GENERATE_DIR is where the content will be generated initially
|
||||
GENERATE_DIR="$(mktemp -d)"
|
||||
|
||||
function cleanup() {
|
||||
rm -rf "$GENERATE_DIR"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
verbose && echo "Generating HTML to $GENERATE_DIR..."
|
||||
tools/generate $GENERATE_DIR
|
||||
|
||||
# In TESTING mode, make sure that the generated content is identical to
|
||||
# what's already in SITE_DIR. If a difference is found, this script exits
|
||||
# with an error.
|
||||
if [[ ! -z "$TESTING" ]]; then
|
||||
echo "Comparing $GENERATE_DIR with $SITE_DIR..."
|
||||
diff -r "$GENERATE_DIR" "$SITE_DIR"
|
||||
fi
|
||||
|
||||
verbose && echo "Copying $GENERATE_DIR to $SITE_DIR"
|
||||
cp -rf "${GENERATE_DIR}/." "$SITE_DIR"
|
||||
|
||||
20
tools/format
20
tools/format
@@ -2,26 +2,8 @@
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
paths=$(ls tools/*.go examples/*/*.go)
|
||||
|
||||
gbe_to_4spaces() {
|
||||
local os=$(tr [A-Z] [a-z] <<< "`uname`")
|
||||
case $os in
|
||||
darwin*)
|
||||
sed -i '' -e 's/ / /g' $1
|
||||
;;
|
||||
linux*)
|
||||
sed -i -e 's/ / /g' $1
|
||||
;;
|
||||
*)
|
||||
echo "$os is not supported."
|
||||
echo "Add a proper 'sed' command for your platform to ./tools/format"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
paths=$(ls examples/*/*.go)
|
||||
|
||||
for path in $paths; do
|
||||
gofmt -w=true $path
|
||||
gbe_to_4spaces $path
|
||||
done
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
exec go run tools/generate.go
|
||||
exec go run tools/generate.go $@
|
||||
|
||||
@@ -1,291 +1,330 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"github.com/russross/blackfriday"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template"
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/russross/blackfriday"
|
||||
)
|
||||
|
||||
var cacheDir = "/tmp/gobyexample-cache"
|
||||
// siteDir is the target directory into which the HTML gets generated. Its
|
||||
// default is set here but can be changed by an argument passed into the
|
||||
// program.
|
||||
var siteDir = "./public"
|
||||
|
||||
var cacheDir = "/tmp/gobyexample-cache"
|
||||
var pygmentizeBin = "./vendor/pygments/pygmentize"
|
||||
|
||||
func verbose() bool {
|
||||
return len(os.Getenv("VERBOSE")) > 0
|
||||
}
|
||||
|
||||
func check(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func ensureDir(dir string) {
|
||||
err := os.MkdirAll(dir, 0755)
|
||||
check(err)
|
||||
err := os.MkdirAll(dir, 0755)
|
||||
check(err)
|
||||
}
|
||||
|
||||
func copyFile(src, dst string) {
|
||||
dat, err := ioutil.ReadFile(src)
|
||||
check(err)
|
||||
err = ioutil.WriteFile(dst, dat, 0644)
|
||||
check(err)
|
||||
dat, err := ioutil.ReadFile(src)
|
||||
check(err)
|
||||
err = ioutil.WriteFile(dst, dat, 0644)
|
||||
check(err)
|
||||
}
|
||||
|
||||
func pipe(bin string, arg []string, src string) []byte {
|
||||
cmd := exec.Command(bin, arg...)
|
||||
in, err := cmd.StdinPipe()
|
||||
check(err)
|
||||
out, err := cmd.StdoutPipe()
|
||||
check(err)
|
||||
err = cmd.Start()
|
||||
check(err)
|
||||
_, err = in.Write([]byte(src))
|
||||
check(err)
|
||||
err = in.Close()
|
||||
check(err)
|
||||
bytes, err := ioutil.ReadAll(out)
|
||||
check(err)
|
||||
err = cmd.Wait()
|
||||
check(err)
|
||||
return bytes
|
||||
cmd := exec.Command(bin, arg...)
|
||||
in, err := cmd.StdinPipe()
|
||||
check(err)
|
||||
out, err := cmd.StdoutPipe()
|
||||
check(err)
|
||||
err = cmd.Start()
|
||||
check(err)
|
||||
_, err = in.Write([]byte(src))
|
||||
check(err)
|
||||
err = in.Close()
|
||||
check(err)
|
||||
bytes, err := ioutil.ReadAll(out)
|
||||
check(err)
|
||||
err = cmd.Wait()
|
||||
check(err)
|
||||
return bytes
|
||||
}
|
||||
|
||||
func sha1Sum(s string) string {
|
||||
h := sha1.New()
|
||||
h.Write([]byte(s))
|
||||
b := h.Sum(nil)
|
||||
return fmt.Sprintf("%x", b)
|
||||
h := sha1.New()
|
||||
h.Write([]byte(s))
|
||||
b := h.Sum(nil)
|
||||
return fmt.Sprintf("%x", b)
|
||||
}
|
||||
|
||||
func mustReadFile(path string) string {
|
||||
bytes, err := ioutil.ReadFile(path)
|
||||
check(err)
|
||||
return string(bytes)
|
||||
bytes, err := ioutil.ReadFile(path)
|
||||
check(err)
|
||||
return string(bytes)
|
||||
}
|
||||
|
||||
func cachedPygmentize(lex string, src string) string {
|
||||
ensureDir(cacheDir)
|
||||
arg := []string{"-l", lex, "-f", "html"}
|
||||
cachePath := cacheDir + "/pygmentize-" + strings.Join(arg, "-") + "-" + sha1Sum(src)
|
||||
cacheBytes, cacheErr := ioutil.ReadFile(cachePath)
|
||||
if cacheErr == nil {
|
||||
return string(cacheBytes)
|
||||
}
|
||||
renderBytes := pipe(pygmentizeBin, arg, src)
|
||||
// Newer versions of Pygments add silly empty spans.
|
||||
renderCleanString := strings.Replace(string(renderBytes), "<span></span>", "", -1)
|
||||
writeErr := ioutil.WriteFile(cachePath, []byte(renderCleanString), 0600)
|
||||
check(writeErr)
|
||||
return renderCleanString
|
||||
ensureDir(cacheDir)
|
||||
arg := []string{"-l", lex, "-f", "html"}
|
||||
cachePath := cacheDir + "/pygmentize-" + strings.Join(arg, "-") + "-" + sha1Sum(src)
|
||||
cacheBytes, cacheErr := ioutil.ReadFile(cachePath)
|
||||
if cacheErr == nil {
|
||||
return string(cacheBytes)
|
||||
}
|
||||
renderBytes := pipe(pygmentizeBin, arg, src)
|
||||
// Newer versions of Pygments add silly empty spans.
|
||||
renderCleanString := strings.Replace(string(renderBytes), "<span></span>", "", -1)
|
||||
writeErr := ioutil.WriteFile(cachePath, []byte(renderCleanString), 0600)
|
||||
check(writeErr)
|
||||
return renderCleanString
|
||||
}
|
||||
|
||||
func markdown(src string) string {
|
||||
return string(blackfriday.MarkdownCommon([]byte(src)))
|
||||
return string(blackfriday.MarkdownCommon([]byte(src)))
|
||||
}
|
||||
|
||||
func readLines(path string) []string {
|
||||
src := mustReadFile(path)
|
||||
return strings.Split(src, "\n")
|
||||
src := mustReadFile(path)
|
||||
return strings.Split(src, "\n")
|
||||
}
|
||||
|
||||
func mustGlob(glob string) []string {
|
||||
paths, err := filepath.Glob(glob)
|
||||
check(err)
|
||||
return paths
|
||||
paths, err := filepath.Glob(glob)
|
||||
check(err)
|
||||
return paths
|
||||
}
|
||||
|
||||
func whichLexer(path string) string {
|
||||
if strings.HasSuffix(path, ".go") {
|
||||
return "go"
|
||||
} else if strings.HasSuffix(path, ".sh") {
|
||||
return "console"
|
||||
}
|
||||
panic("No lexer for " + path)
|
||||
return ""
|
||||
if strings.HasSuffix(path, ".go") {
|
||||
return "go"
|
||||
} else if strings.HasSuffix(path, ".sh") {
|
||||
return "console"
|
||||
}
|
||||
panic("No lexer for " + path)
|
||||
}
|
||||
|
||||
func debug(msg string) {
|
||||
if os.Getenv("DEBUG") == "1" {
|
||||
fmt.Fprintln(os.Stderr, msg)
|
||||
}
|
||||
if os.Getenv("DEBUG") == "1" {
|
||||
fmt.Fprintln(os.Stderr, msg)
|
||||
}
|
||||
}
|
||||
|
||||
var docsPat = regexp.MustCompile("^\\s*(\\/\\/|#)\\s")
|
||||
var dashPat = regexp.MustCompile("\\-+")
|
||||
|
||||
// Seg is a segment of an example
|
||||
type Seg struct {
|
||||
Docs, DocsRendered string
|
||||
Docs, DocsRendered string
|
||||
Code, CodeRendered, CodeForJs string
|
||||
CodeEmpty, CodeLeading, CodeRun bool
|
||||
}
|
||||
|
||||
// Example is info extracted from an example file
|
||||
type Example struct {
|
||||
Id, Name string
|
||||
GoCode, GoCodeHash, UrlHash string
|
||||
Segs [][]*Seg
|
||||
NextExample *Example
|
||||
ID, Name string
|
||||
GoCode, GoCodeHash, URLHash string
|
||||
Segs [][]*Seg
|
||||
PrevExample *Example
|
||||
NextExample *Example
|
||||
}
|
||||
|
||||
func parseHashFile(sourcePath string) (string, string) {
|
||||
lines := readLines(sourcePath)
|
||||
return lines[0], lines[1]
|
||||
lines := readLines(sourcePath)
|
||||
return lines[0], lines[1]
|
||||
}
|
||||
|
||||
func resetUrlHashFile(codehash, code, sourcePath string) string {
|
||||
payload := strings.NewReader(code)
|
||||
resp, err := http.Post("https://play.golang.org/share", "text/plain", payload)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
urlkey := string(body)
|
||||
data := fmt.Sprintf("%s\n%s\n", codehash, urlkey)
|
||||
ioutil.WriteFile(sourcePath, []byte(data), 0644)
|
||||
return urlkey
|
||||
func resetURLHashFile(codehash, code, sourcePath string) string {
|
||||
if verbose() {
|
||||
fmt.Println(" Sending request to play.golang.org")
|
||||
}
|
||||
payload := strings.NewReader(code)
|
||||
resp, err := http.Post("https://play.golang.org/share", "text/plain", payload)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
urlkey := string(body)
|
||||
data := fmt.Sprintf("%s\n%s\n", codehash, urlkey)
|
||||
ioutil.WriteFile(sourcePath, []byte(data), 0644)
|
||||
return urlkey
|
||||
}
|
||||
|
||||
func parseSegs(sourcePath string) ([]*Seg, string) {
|
||||
lines := readLines(sourcePath)
|
||||
filecontent := strings.Join(lines, "\n")
|
||||
segs := []*Seg{}
|
||||
lastSeen := ""
|
||||
for _, line := range lines {
|
||||
if line == "" {
|
||||
lastSeen = ""
|
||||
continue
|
||||
}
|
||||
matchDocs := docsPat.MatchString(line)
|
||||
matchCode := !matchDocs
|
||||
newDocs := (lastSeen == "") || ((lastSeen != "docs") && (segs[len(segs)-1].Docs != ""))
|
||||
newCode := (lastSeen == "") || ((lastSeen != "code") && (segs[len(segs)-1].Code != ""))
|
||||
if newDocs || newCode {
|
||||
debug("NEWSEG")
|
||||
}
|
||||
if matchDocs {
|
||||
trimmed := docsPat.ReplaceAllString(line, "")
|
||||
if newDocs {
|
||||
newSeg := Seg{Docs: trimmed, Code: ""}
|
||||
segs = append(segs, &newSeg)
|
||||
} else {
|
||||
segs[len(segs)-1].Docs = segs[len(segs)-1].Docs + "\n" + trimmed
|
||||
}
|
||||
debug("DOCS: " + line)
|
||||
lastSeen = "docs"
|
||||
} else if matchCode {
|
||||
if newCode {
|
||||
newSeg := Seg{Docs: "", Code: line}
|
||||
segs = append(segs, &newSeg)
|
||||
} else {
|
||||
segs[len(segs)-1].Code = segs[len(segs)-1].Code + "\n" + line
|
||||
}
|
||||
debug("CODE: " + line)
|
||||
lastSeen = "code"
|
||||
}
|
||||
}
|
||||
for i, seg := range segs {
|
||||
seg.CodeEmpty = (seg.Code == "")
|
||||
seg.CodeLeading = (i < (len(segs) - 1))
|
||||
seg.CodeRun = strings.Contains(seg.Code, "package main")
|
||||
}
|
||||
return segs, filecontent
|
||||
var lines []string
|
||||
// Convert tabs to spaces for uniform rendering.
|
||||
for _, line := range readLines(sourcePath) {
|
||||
lines = append(lines, strings.Replace(line, "\t", " ", -1))
|
||||
}
|
||||
filecontent := strings.Join(lines, "\n")
|
||||
segs := []*Seg{}
|
||||
lastSeen := ""
|
||||
for _, line := range lines {
|
||||
if line == "" {
|
||||
lastSeen = ""
|
||||
continue
|
||||
}
|
||||
matchDocs := docsPat.MatchString(line)
|
||||
matchCode := !matchDocs
|
||||
newDocs := (lastSeen == "") || ((lastSeen != "docs") && (segs[len(segs)-1].Docs != ""))
|
||||
newCode := (lastSeen == "") || ((lastSeen != "code") && (segs[len(segs)-1].Code != ""))
|
||||
if newDocs || newCode {
|
||||
debug("NEWSEG")
|
||||
}
|
||||
if matchDocs {
|
||||
trimmed := docsPat.ReplaceAllString(line, "")
|
||||
if newDocs {
|
||||
newSeg := Seg{Docs: trimmed, Code: ""}
|
||||
segs = append(segs, &newSeg)
|
||||
} else {
|
||||
segs[len(segs)-1].Docs = segs[len(segs)-1].Docs + "\n" + trimmed
|
||||
}
|
||||
debug("DOCS: " + line)
|
||||
lastSeen = "docs"
|
||||
} else if matchCode {
|
||||
if newCode {
|
||||
newSeg := Seg{Docs: "", Code: line}
|
||||
segs = append(segs, &newSeg)
|
||||
} else {
|
||||
segs[len(segs)-1].Code = segs[len(segs)-1].Code + "\n" + line
|
||||
}
|
||||
debug("CODE: " + line)
|
||||
lastSeen = "code"
|
||||
}
|
||||
}
|
||||
for i, seg := range segs {
|
||||
seg.CodeEmpty = (seg.Code == "")
|
||||
seg.CodeLeading = (i < (len(segs) - 1))
|
||||
seg.CodeRun = strings.Contains(seg.Code, "package main")
|
||||
}
|
||||
return segs, filecontent
|
||||
}
|
||||
|
||||
func parseAndRenderSegs(sourcePath string) ([]*Seg, string) {
|
||||
segs, filecontent := parseSegs(sourcePath)
|
||||
lexer := whichLexer(sourcePath)
|
||||
for _, seg := range segs {
|
||||
if seg.Docs != "" {
|
||||
seg.DocsRendered = markdown(seg.Docs)
|
||||
}
|
||||
if seg.Code != "" {
|
||||
seg.CodeRendered = cachedPygmentize(lexer, seg.Code)
|
||||
// adding the content to the js code for copying to the clipboard
|
||||
segs, filecontent := parseSegs(sourcePath)
|
||||
lexer := whichLexer(sourcePath)
|
||||
for _, seg := range segs {
|
||||
if seg.Docs != "" {
|
||||
seg.DocsRendered = markdown(seg.Docs)
|
||||
}
|
||||
if seg.Code != "" {
|
||||
seg.CodeRendered = cachedPygmentize(lexer, seg.Code)
|
||||
// adding the content to the js code for copying to the clipboard
|
||||
if strings.HasSuffix(sourcePath, ".go") {
|
||||
seg.CodeForJs = strings.Trim(seg.Code, "\n") + "\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
// we are only interested in the 'go' code to pass to play.golang.org
|
||||
if lexer != "go" {
|
||||
filecontent = ""
|
||||
}
|
||||
return segs, filecontent
|
||||
}
|
||||
}
|
||||
// we are only interested in the 'go' code to pass to play.golang.org
|
||||
if lexer != "go" {
|
||||
filecontent = ""
|
||||
}
|
||||
return segs, filecontent
|
||||
}
|
||||
|
||||
func parseExamples() []*Example {
|
||||
exampleNames := readLines("examples.txt")
|
||||
examples := make([]*Example, 0)
|
||||
for _, exampleName := range exampleNames {
|
||||
if (exampleName != "") && !strings.HasPrefix(exampleName, "#") {
|
||||
example := Example{Name: exampleName}
|
||||
exampleId := strings.ToLower(exampleName)
|
||||
exampleId = strings.Replace(exampleId, " ", "-", -1)
|
||||
exampleId = strings.Replace(exampleId, "/", "-", -1)
|
||||
exampleId = strings.Replace(exampleId, "'", "", -1)
|
||||
exampleId = dashPat.ReplaceAllString(exampleId, "-")
|
||||
example.Id = exampleId
|
||||
example.Segs = make([][]*Seg, 0)
|
||||
sourcePaths := mustGlob("examples/" + exampleId + "/*")
|
||||
for _, sourcePath := range sourcePaths {
|
||||
if strings.HasSuffix(sourcePath, ".hash") {
|
||||
example.GoCodeHash, example.UrlHash = parseHashFile(sourcePath)
|
||||
} else {
|
||||
sourceSegs, filecontents := parseAndRenderSegs(sourcePath)
|
||||
if filecontents != "" {
|
||||
example.GoCode = filecontents
|
||||
}
|
||||
example.Segs = append(example.Segs, sourceSegs)
|
||||
}
|
||||
}
|
||||
newCodeHash := sha1Sum(example.GoCode)
|
||||
if example.GoCodeHash != newCodeHash {
|
||||
example.UrlHash = resetUrlHashFile(newCodeHash, example.GoCode, "examples/"+example.Id+"/"+example.Id+".hash")
|
||||
}
|
||||
examples = append(examples, &example)
|
||||
}
|
||||
}
|
||||
for i, example := range examples {
|
||||
if i < (len(examples) - 1) {
|
||||
example.NextExample = examples[i+1]
|
||||
}
|
||||
}
|
||||
return examples
|
||||
var exampleNames []string
|
||||
for _, line := range readLines("examples.txt") {
|
||||
if line != "" && !strings.HasPrefix(line, "#") {
|
||||
exampleNames = append(exampleNames, line)
|
||||
}
|
||||
}
|
||||
examples := make([]*Example, 0)
|
||||
for i, exampleName := range exampleNames {
|
||||
if verbose() {
|
||||
fmt.Printf("Processing %s [%d/%d]\n", exampleName, i+1, len(exampleNames))
|
||||
}
|
||||
example := Example{Name: exampleName}
|
||||
exampleID := strings.ToLower(exampleName)
|
||||
exampleID = strings.Replace(exampleID, " ", "-", -1)
|
||||
exampleID = strings.Replace(exampleID, "/", "-", -1)
|
||||
exampleID = strings.Replace(exampleID, "'", "", -1)
|
||||
exampleID = dashPat.ReplaceAllString(exampleID, "-")
|
||||
example.ID = exampleID
|
||||
example.Segs = make([][]*Seg, 0)
|
||||
sourcePaths := mustGlob("examples/" + exampleID + "/*")
|
||||
for _, sourcePath := range sourcePaths {
|
||||
if strings.HasSuffix(sourcePath, ".hash") {
|
||||
example.GoCodeHash, example.URLHash = parseHashFile(sourcePath)
|
||||
} else {
|
||||
sourceSegs, filecontents := parseAndRenderSegs(sourcePath)
|
||||
if filecontents != "" {
|
||||
example.GoCode = filecontents
|
||||
}
|
||||
example.Segs = append(example.Segs, sourceSegs)
|
||||
}
|
||||
}
|
||||
newCodeHash := sha1Sum(example.GoCode)
|
||||
if example.GoCodeHash != newCodeHash {
|
||||
example.URLHash = resetURLHashFile(newCodeHash, example.GoCode, "examples/"+example.ID+"/"+example.ID+".hash")
|
||||
}
|
||||
examples = append(examples, &example)
|
||||
}
|
||||
for i, example := range examples {
|
||||
if i > 0 {
|
||||
example.PrevExample = examples[i-1]
|
||||
}
|
||||
if i < (len(examples) - 1) {
|
||||
example.NextExample = examples[i+1]
|
||||
}
|
||||
}
|
||||
return examples
|
||||
}
|
||||
|
||||
func renderIndex(examples []*Example) {
|
||||
indexTmpl := template.New("index")
|
||||
_, err := indexTmpl.Parse(mustReadFile("templates/index.tmpl"))
|
||||
check(err)
|
||||
indexF, err := os.Create(siteDir + "/index.html")
|
||||
check(err)
|
||||
indexTmpl.Execute(indexF, examples)
|
||||
if verbose() {
|
||||
fmt.Println("Rendering index")
|
||||
}
|
||||
indexTmpl := template.New("index")
|
||||
_, err := indexTmpl.Parse(mustReadFile("templates/index.tmpl"))
|
||||
check(err)
|
||||
indexF, err := os.Create(siteDir + "/index.html")
|
||||
check(err)
|
||||
err = indexTmpl.Execute(indexF, examples)
|
||||
check(err)
|
||||
}
|
||||
|
||||
func renderExamples(examples []*Example) {
|
||||
exampleTmpl := template.New("example")
|
||||
_, err := exampleTmpl.Parse(mustReadFile("templates/example.tmpl"))
|
||||
check(err)
|
||||
for _, example := range examples {
|
||||
exampleF, err := os.Create(siteDir + "/" + example.Id)
|
||||
check(err)
|
||||
exampleTmpl.Execute(exampleF, example)
|
||||
}
|
||||
if verbose() {
|
||||
fmt.Println("Rendering examples")
|
||||
}
|
||||
exampleTmpl := template.New("example")
|
||||
_, err := exampleTmpl.Parse(mustReadFile("templates/example.tmpl"))
|
||||
check(err)
|
||||
for _, example := range examples {
|
||||
exampleF, err := os.Create(siteDir + "/" + example.ID)
|
||||
check(err)
|
||||
exampleTmpl.Execute(exampleF, example)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
copyFile("templates/site.css", siteDir+"/site.css")
|
||||
copyFile("templates/site.js", siteDir+"/site.js")
|
||||
copyFile("templates/favicon.ico", siteDir+"/favicon.ico")
|
||||
copyFile("templates/404.html", siteDir+"/404.html")
|
||||
copyFile("templates/play.png", siteDir+"/play.png")
|
||||
copyFile("templates/clipboard.png", siteDir+"/clipboard.png")
|
||||
examples := parseExamples()
|
||||
renderIndex(examples)
|
||||
renderExamples(examples)
|
||||
if len(os.Args) > 1 {
|
||||
siteDir = os.Args[1]
|
||||
}
|
||||
ensureDir(siteDir)
|
||||
|
||||
copyFile("templates/site.css", siteDir+"/site.css")
|
||||
copyFile("templates/site.js", siteDir+"/site.js")
|
||||
copyFile("templates/favicon.ico", siteDir+"/favicon.ico")
|
||||
copyFile("templates/404.html", siteDir+"/404.html")
|
||||
copyFile("templates/play.png", siteDir+"/play.png")
|
||||
copyFile("templates/clipboard.png", siteDir+"/clipboard.png")
|
||||
examples := parseExamples()
|
||||
renderIndex(examples)
|
||||
renderExamples(examples)
|
||||
}
|
||||
|
||||
@@ -1,45 +1,48 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func check(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func readLines(path string) []string {
|
||||
srcBytes, err := ioutil.ReadFile(path)
|
||||
check(err)
|
||||
return strings.Split(string(srcBytes), "\n")
|
||||
srcBytes, err := ioutil.ReadFile(path)
|
||||
check(err)
|
||||
return strings.Split(string(srcBytes), "\n")
|
||||
}
|
||||
|
||||
var commentPat = regexp.MustCompile("\\s*\\/\\/")
|
||||
|
||||
func main() {
|
||||
sourcePaths, err := filepath.Glob("./examples/*/*")
|
||||
check(err)
|
||||
foundLongFile := false
|
||||
for _, sourcePath := range sourcePaths {
|
||||
foundLongLine := false
|
||||
lines := readLines(sourcePath)
|
||||
for i, line := range lines {
|
||||
if !foundLongLine && !commentPat.MatchString(line) && (utf8.RuneCountInString(line) > 58) {
|
||||
fmt.Printf("measure: %s:%d\n", sourcePath, i+1)
|
||||
foundLongLine = true
|
||||
foundLongFile = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if foundLongFile {
|
||||
os.Exit(1)
|
||||
}
|
||||
sourcePaths, err := filepath.Glob("./examples/*/*")
|
||||
check(err)
|
||||
foundLongFile := false
|
||||
for _, sourcePath := range sourcePaths {
|
||||
foundLongLine := false
|
||||
lines := readLines(sourcePath)
|
||||
for i, line := range lines {
|
||||
// Convert tabs to spaces before measuring, so we get an accurate measure
|
||||
// of how long the output will end up being.
|
||||
line := strings.Replace(line, "\t", " ", -1)
|
||||
if !foundLongLine && !commentPat.MatchString(line) && (utf8.RuneCountInString(line) > 58) {
|
||||
fmt.Printf("measure: %s:%d\n", sourcePath, i+1)
|
||||
foundLongLine = true
|
||||
foundLongFile = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if foundLongFile {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
3
tools/serve
Executable file
3
tools/serve
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
exec go run tools/serve.go
|
||||
13
tools/serve.go
Normal file
13
tools/serve.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func main() {
|
||||
port := "8000"
|
||||
publicDir := "public"
|
||||
fmt.Printf("Serving Go by Example at http://127.0.0.1:%s\n", port)
|
||||
http.ListenAndServe(":"+port, http.FileServer(http.Dir(publicDir)))
|
||||
}
|
||||
10
tools/test
Executable file
10
tools/test
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Sanity testing of the examples.
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
# go vet will attempt to build each example, making sure it compiles. It will
|
||||
# also report known issues with the code. Disabling the -unreachable check
|
||||
# because it will fire false positives for some examples demonstrating panics.
|
||||
go vet -unreachable=false ./examples/...
|
||||
Reference in New Issue
Block a user