sorting more chapter and formatting stuff

This commit is contained in:
Mark McGranaghan 2012-10-02 08:01:28 -07:00
parent ca787947b1
commit 3d8912204b
12 changed files with 156 additions and 135 deletions

View File

@ -5,9 +5,9 @@ package main
import "strings"
import "fmt"
func Index(elems []string, val string) int {
for i, v := range elems {
if v == val {
func Index(strs []string, s string) int {
for i, str := range strs {
if s == str {
return i
}
}
@ -36,7 +36,7 @@ func All(elems []string, f func(string) bool) bool {
return true
}
func Filter(elems []string, f func(string) bool) []string {
func Filter(vs []string, f func(string) bool) []string {
filtered := []string{}
for _, v := range elems {
if f(v) {
@ -46,47 +46,47 @@ func Filter(elems []string, f func(string) bool) []string {
return filtered
}
func Map(elems []string, f func(string) string) []string {
mapped := make([]string, len(elems))
for i, v := range elems {
func Map(strs []string, f func(string) string) []string {
mapped := make([]string, len(strs))
for i, v := range strs {
mapped[i] = f(v)
}
return mapped
}
func main() {
var elems = []string{"peach", "apple", "pear", "banana"}
var strs = []string{"peach", "apple", "pear", "plum"}
fmt.Println(Index(elems, "pear"))
fmt.Println(Index(elems, "grape"))
fmt.Println(Index(strs, "pear"))
fmt.Println(Index(strs, "grape"))
fmt.Println()
fmt.Println(Include(elems, "pear"))
fmt.Println(Include(elems, "grape"))
fmt.Println(Include(strs, "pear"))
fmt.Println(Include(strs, "grape"))
fmt.Println()
fmt.Println(Any(elems, func(v string) bool {
fmt.Println(Any(strs, func(v string) bool {
return strings.HasPrefix(v, "p")
}))
fmt.Println(Any(elems, func(v string) bool {
fmt.Println(Any(strs, func(v string) bool {
return strings.HasPrefix(v, "g")
}))
fmt.Println()
fmt.Println(All(elems, func(v string) bool {
fmt.Println(All(strs, func(v string) bool {
return strings.Contains(v, "a")
}))
fmt.Println(All(elems, func(v string) bool {
fmt.Println(All(strs, func(v string) bool {
return strings.Contains(v, "p")
}))
fmt.Println()
fmt.Println(Filter(elems, func(v string) bool {
fmt.Println(Filter(strs, func(v string) bool {
return strings.Contains(v, "p")
}))
fmt.Println()
fmt.Println(Map(elems, func(s string) string {
fmt.Println(Map(strs, func(s string) string {
return strings.ToUpper(s)
}))
fmt.Println()

View File

@ -20,7 +20,8 @@ func main() {
d, _ := strconv.ParseInt("0x1b3e", 0, 64)
println(d)
// `Atoi` is a convenienice function for `int` parsing.
// `Atoi` is a convenienice function for `int`
// parsing.
k, _ := strconv.Atoi("456")
println(k)

View File

@ -7,7 +7,7 @@ import "net/url"
import "strings"
func main() {
s := "postgres://user:pass@host.com:5432/path?k=v#frag"
s := "postgres://user:pass@host.com:5432/path?k=v#f"
u, err := url.Parse(s)
if err != nil {
panic(err)

View File

@ -1,6 +1,6 @@
package main
import "encoding/base64"
import b64 "encoding/base64"
import "fmt"
func main() {
@ -10,15 +10,15 @@ func main() {
fmt.Println()
// Standard base64 encoding/decoding.
sEnc := base64.StdEncoding.EncodeToString([]byte(data))
sEnc := b64.StdEncoding.EncodeToString([]byte(data))
fmt.Println(sEnc)
sDec, _ := base64.StdEncoding.DecodeString(sEnc)
sDec, _ := b64.StdEncoding.DecodeString(sEnc)
fmt.Println(string(sDec))
fmt.Println()
// URL base64 encoding/decoding.
uEnc := base64.URLEncoding.EncodeToString([]byte(data))
uEnc := b64.URLEncoding.EncodeToString([]byte(data))
fmt.Println(uEnc)
uDec, _ := base64.URLEncoding.DecodeString(uEnc)
uDec, _ := b64.URLEncoding.DecodeString(uEnc)
fmt.Println(string(uDec))
}

View File

@ -22,7 +22,8 @@ func main() {
out := os.Stdout
// If successful, each `ReadLine` returns bytes and a
// boolean indicating if don't have the whole line yet.
// boolean indicating if don't have the whole line
// yet.
for {
inBytes, pfx, err := in.ReadLine()

View File

@ -9,7 +9,7 @@ import "fmt"
func main() {
// `os.Args` includes the program name as the first
// value.
// value.
argsWithProg := os.Args
argsWithoutProg := os.Args[1:]

View File

@ -15,21 +15,22 @@ func runLogging(logs chan string) {
func wrapLogging(f http.HandlerFunc) http.HandlerFunc {
logs := make(chan string, 10000)
go runLogging(logs)
return func(rs http.ResponseWriter, rq *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
start := time.Now()
f(rs, rq)
method := req.Method
path := req.URL.Path
f(w, r)
method := r.Method
path := r.URL.Path
elapsed := float64(time.Since(start)) / 1000000.0
logs <- fmt.Sprintf("method=%s path=%s elapsed=%f",
logs <- fmt.Sprintf(
"method=%s path=%s elapsed=%f",
method, path, elapsed)
}
}
func hello(rs http.ResponseWriter, rq *http.Request) {
rs.Header().Set("Content-Type", "text/plain")
func hello(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
time.Sleep(time.Millisecond * 50)
fmt.Fprintln(rs, "Hello logged world")
fmt.Fprintln(w, "Hello logged world")
}
func main() {

View File

@ -10,6 +10,7 @@ import (
)
type Auth func(string, string) bool
type handler http.HandlerFunc
func testAuth(r *http.Request, auth Auth) bool {
header := r.Header.Get("Authorization")
@ -35,7 +36,7 @@ func requireAuth(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("401 Unauthorized\n"))
}
func wrapAuth(h http.HandlerFunc, a Auth) http.HandlerFunc {
func wrapAuth(h handler, a Auth) handler {
return func(w http.ResponseWriter, r *http.Request) {
if testAuth(r, a) {
h(w, r)

View File

@ -49,7 +49,8 @@ func main() {
stop := make(chan bool, 1)
sig := make(chan os.Signal, 1)
server := &http.Server{Handler: http.HandlerFunc(slow)}
handler := http.HandlerFunc(slow)
server := &http.Server{Handler: handler}
fmt.Println("listen at=start")
listener, listenErr := net.Listen("tcp", ":5000")
if listenErr != nil {
@ -69,7 +70,9 @@ func main() {
}()
go func() {
signal.Notify(sig, syscall.SIGINT, syscall.SIGTERM)
signal.Notify(
sig, syscall.SIGINT,
syscall.SIGTERM)
fmt.Println("trap at=start")
<-sig
stop <- true

View File

@ -1,5 +1,5 @@
/* PDF Formatting */
@page { margin: 10pt 0pt 10pt 0pt }
@page { margin: 10px 10px 10px 10px }
/*--------------------- Layout and Typography ----------------------------*/
body {
@ -30,8 +30,9 @@ hr {
#container {
position: relative;
}
div.chapter {
table {
page-break-inside: avoid;
width: 775px;
}
table td {
border: 0;
@ -47,8 +48,8 @@ td.docs {
text-align: left;
}
td.code {
max-width: 450px;
min-width: 450px;
max-width: 400px;
min-width: 400px;
padding: 10px 10px 10px 10px;
vertical-align: top;
background: #f0f0f0;

View File

@ -64,6 +64,12 @@ func readLines(path string) []string {
return strings.Split(string(srcBytes), "\n")
}
func mustGlob(glob string) []string {
paths, err := filepath.Glob(glob)
check(err)
return paths
}
func whichLexer(path string) string {
if strings.HasSuffix(path, ".go") {
return "go"
@ -88,102 +94,106 @@ type seg struct {
docs, code, docsRendered, codeRendered string
}
func main() {
ensureCache()
sourcePaths, err := filepath.Glob("./src/0*/*")
check(err)
fmt.Print(`<!DOCTYPE html>
<html>
<head>
<meta http-eqiv="content-type" content="text/html;charset=utf-8">
<title>Go by Example</title>
<link rel=stylesheet href="../style/book.css">
</head>
<body>
<div id="container">
<table cellspacing="0" cellpadding="0">
<tbody>`)
for _, sourcePath := range sourcePaths {
lexer := whichLexer(sourcePath)
lines := readLines(sourcePath)
segs := []*seg{}
segs = append(segs, &seg{code: "", docs: ""})
lastSeen := ""
for _, line := range lines {
if todoPat.MatchString(line) {
continue
}
headerMatch := headerPat.MatchString(line)
docsMatch := docsPat.MatchString(line)
emptyMatch := line == ""
lastSeg := segs[len(segs)-1]
lastHeader := lastSeen == "header"
lastDocs := lastSeen == "docs"
newHeader := lastSeen != "header" && lastSeg.docs != ""
newDocs := lastSeen == "code" || lastSeen == "header"
newCode := (lastSeen != "code" && lastSeg.code != "") || lastSeen == "header"
if newHeader || newDocs || newCode {
debug("NEWSEG")
}
if headerMatch || (emptyMatch && lastHeader) {
trimmed := docsPat.ReplaceAllString(line, "")
if newHeader {
newSeg := seg{docs: trimmed, code: ""}
segs = append(segs, &newSeg)
} else {
lastSeg.docs = lastSeg.docs + "\n" + trimmed
}
debug("HEAD")
lastSeen = "header"
} else if docsMatch || (emptyMatch && lastDocs) {
trimmed := docsPat.ReplaceAllString(line, "")
if newDocs {
debug("NEWSEG")
newSeg := seg{docs: trimmed, code: ""}
segs = append(segs, &newSeg)
} else {
lastSeg.docs = lastSeg.docs + "\n" + trimmed
}
debug("DOCS")
lastSeen = "docs"
func parseSegs(sourcePath string) []*seg {
lines := readLines(sourcePath)
segs := []*seg{}
segs = append(segs, &seg{code: "", docs: ""})
lastSeen := ""
for _, line := range lines {
if todoPat.MatchString(line) {
continue
}
headerMatch := headerPat.MatchString(line)
docsMatch := docsPat.MatchString(line)
emptyMatch := line == ""
lastSeg := segs[len(segs)-1]
lastHeader := lastSeen == "header"
lastDocs := lastSeen == "docs"
newHeader := lastSeen != "header" && lastSeg.docs != ""
newDocs := lastSeen == "code" || lastSeen == "header"
newCode := (lastSeen != "code" && lastSeg.code != "") || lastSeen == "header"
if newHeader || newDocs || newCode {
debug("NEWSEG")
}
if headerMatch || (emptyMatch && lastHeader) {
trimmed := docsPat.ReplaceAllString(line, "")
if newHeader {
newSeg := seg{docs: trimmed, code: ""}
segs = append(segs, &newSeg)
} else {
if newCode {
newSeg := seg{docs: "", code: line}
segs = append(segs, &newSeg)
} else {
lastSeg.code = lastSeg.code + "\n" + line
}
debug("CODE")
lastSeen = "code"
lastSeg.docs = lastSeg.docs + "\n" + trimmed
}
}
segs = append(segs, &seg{code: "", docs: ""})
for _, seg := range segs {
if seg.docs != "" {
seg.docsRendered = string(blackfriday.MarkdownCommon([]byte(seg.docs)))
debug("HEAD")
lastSeen = "header"
} else if docsMatch || (emptyMatch && lastDocs) {
trimmed := docsPat.ReplaceAllString(line, "")
if newDocs {
debug("NEWSEG")
newSeg := seg{docs: trimmed, code: ""}
segs = append(segs, &newSeg)
} else {
lastSeg.docs = lastSeg.docs + "\n" + trimmed
}
if seg.code != "" {
seg.codeRendered = cachedRender("/usr/local/bin/pygmentize", []string{"-l", lexer, "-f", "html"}, seg.code)
debug("DOCS")
lastSeen = "docs"
} else {
if newCode {
newSeg := seg{docs: "", code: line}
segs = append(segs, &newSeg)
} else {
lastSeg.code = lastSeg.code + "\n" + line
}
}
for _, seg := range segs {
codeClasses := "code"
if seg.code == "" {
codeClasses = codeClasses + " empty"
}
fmt.Printf(
`<tr>
<td class=docs>%s</td>
<td class="%s">%s</td>
</tr>`, seg.docsRendered, codeClasses, seg.codeRendered)
debug("CODE")
lastSeen = "code"
}
}
fmt.Print(`</tbody></table></div></body></html>`)
return append(segs, &seg{code: "", docs: ""})
}
func parseAndRenderSegs(sourcePath string) []*seg {
segs := parseSegs(sourcePath)
lexer := whichLexer(sourcePath)
for _, seg := range segs {
if seg.docs != "" {
seg.docsRendered = string(blackfriday.MarkdownCommon([]byte(seg.docs)))
}
if seg.code != "" {
seg.codeRendered = cachedRender("/usr/local/bin/pygmentize", []string{"-l", lexer, "-f", "html"}, seg.code)
}
}
return segs
}
func main() {
ensureCache()
fmt.Print(`<!DOCTYPE html>
<html>
<head>
<meta http-eqiv="content-type" content="text/html;charset=utf-8">
<title>Go by Example</title>
<link rel=stylesheet href="../style/book.css">
</head>
<body>
<div id="container">`)
chapterPaths := mustGlob("./src/0*")
for _, chapterPath := range chapterPaths {
fmt.Print(`<table cellspacing="0" cellpadding="0"><tbody>`)
sourcePaths := mustGlob(chapterPath + "/*")
for _, sourcePath := range sourcePaths {
segs := parseAndRenderSegs(sourcePath)
for _, seg := range segs {
codeClasses := "code"
if seg.code == "" {
codeClasses = codeClasses + " empty"
}
fmt.Printf(
`<tr>
<td class=docs>%s</td>
<td class="%s">%s</td>
</tr>`, seg.docsRendered, codeClasses, seg.codeRendered)
}
}
fmt.Print(`</tbody></table>`)
}
fmt.Print(`</div></body></html>`)
}

View File

@ -5,6 +5,7 @@ import (
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
)
@ -20,6 +21,8 @@ func readLines(path string) []string {
return strings.Split(string(srcBytes), "\n")
}
var todoPat = regexp.MustCompile("\\/\\/ todo: ")
func main() {
sourcePaths, err := filepath.Glob("./src/0*/*")
check(err)
@ -28,7 +31,7 @@ func main() {
foundLongLine := false
lines := readLines(sourcePath)
for _, line := range lines {
if len(line) > 60 && !foundLongLine {
if !foundLongLine && !todoPat.MatchString(line) && (len(line) > 58) {
fmt.Println(sourcePath)
foundLongLine = true
foundLongFile = true