sorting more chapter and formatting stuff
This commit is contained in:
parent
ca787947b1
commit
3d8912204b
@ -5,9 +5,9 @@ package main
|
|||||||
import "strings"
|
import "strings"
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
func Index(elems []string, val string) int {
|
func Index(strs []string, s string) int {
|
||||||
for i, v := range elems {
|
for i, str := range strs {
|
||||||
if v == val {
|
if s == str {
|
||||||
return i
|
return i
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -36,7 +36,7 @@ func All(elems []string, f func(string) bool) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func Filter(elems []string, f func(string) bool) []string {
|
func Filter(vs []string, f func(string) bool) []string {
|
||||||
filtered := []string{}
|
filtered := []string{}
|
||||||
for _, v := range elems {
|
for _, v := range elems {
|
||||||
if f(v) {
|
if f(v) {
|
||||||
@ -46,47 +46,47 @@ func Filter(elems []string, f func(string) bool) []string {
|
|||||||
return filtered
|
return filtered
|
||||||
}
|
}
|
||||||
|
|
||||||
func Map(elems []string, f func(string) string) []string {
|
func Map(strs []string, f func(string) string) []string {
|
||||||
mapped := make([]string, len(elems))
|
mapped := make([]string, len(strs))
|
||||||
for i, v := range elems {
|
for i, v := range strs {
|
||||||
mapped[i] = f(v)
|
mapped[i] = f(v)
|
||||||
}
|
}
|
||||||
return mapped
|
return mapped
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var elems = []string{"peach", "apple", "pear", "banana"}
|
var strs = []string{"peach", "apple", "pear", "plum"}
|
||||||
|
|
||||||
fmt.Println(Index(elems, "pear"))
|
fmt.Println(Index(strs, "pear"))
|
||||||
fmt.Println(Index(elems, "grape"))
|
fmt.Println(Index(strs, "grape"))
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
fmt.Println(Include(elems, "pear"))
|
fmt.Println(Include(strs, "pear"))
|
||||||
fmt.Println(Include(elems, "grape"))
|
fmt.Println(Include(strs, "grape"))
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
fmt.Println(Any(elems, func(v string) bool {
|
fmt.Println(Any(strs, func(v string) bool {
|
||||||
return strings.HasPrefix(v, "p")
|
return strings.HasPrefix(v, "p")
|
||||||
}))
|
}))
|
||||||
fmt.Println(Any(elems, func(v string) bool {
|
fmt.Println(Any(strs, func(v string) bool {
|
||||||
return strings.HasPrefix(v, "g")
|
return strings.HasPrefix(v, "g")
|
||||||
}))
|
}))
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
fmt.Println(All(elems, func(v string) bool {
|
fmt.Println(All(strs, func(v string) bool {
|
||||||
return strings.Contains(v, "a")
|
return strings.Contains(v, "a")
|
||||||
}))
|
}))
|
||||||
fmt.Println(All(elems, func(v string) bool {
|
fmt.Println(All(strs, func(v string) bool {
|
||||||
return strings.Contains(v, "p")
|
return strings.Contains(v, "p")
|
||||||
}))
|
}))
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
fmt.Println(Filter(elems, func(v string) bool {
|
fmt.Println(Filter(strs, func(v string) bool {
|
||||||
return strings.Contains(v, "p")
|
return strings.Contains(v, "p")
|
||||||
}))
|
}))
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
fmt.Println(Map(elems, func(s string) string {
|
fmt.Println(Map(strs, func(s string) string {
|
||||||
return strings.ToUpper(s)
|
return strings.ToUpper(s)
|
||||||
}))
|
}))
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
@ -20,7 +20,8 @@ func main() {
|
|||||||
d, _ := strconv.ParseInt("0x1b3e", 0, 64)
|
d, _ := strconv.ParseInt("0x1b3e", 0, 64)
|
||||||
println(d)
|
println(d)
|
||||||
|
|
||||||
// `Atoi` is a convenienice function for `int` parsing.
|
// `Atoi` is a convenienice function for `int`
|
||||||
|
// parsing.
|
||||||
k, _ := strconv.Atoi("456")
|
k, _ := strconv.Atoi("456")
|
||||||
println(k)
|
println(k)
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ import "net/url"
|
|||||||
import "strings"
|
import "strings"
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
s := "postgres://user:pass@host.com:5432/path?k=v#frag"
|
s := "postgres://user:pass@host.com:5432/path?k=v#f"
|
||||||
u, err := url.Parse(s)
|
u, err := url.Parse(s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import "encoding/base64"
|
import b64 "encoding/base64"
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
@ -10,15 +10,15 @@ func main() {
|
|||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
// Standard base64 encoding/decoding.
|
// Standard base64 encoding/decoding.
|
||||||
sEnc := base64.StdEncoding.EncodeToString([]byte(data))
|
sEnc := b64.StdEncoding.EncodeToString([]byte(data))
|
||||||
fmt.Println(sEnc)
|
fmt.Println(sEnc)
|
||||||
sDec, _ := base64.StdEncoding.DecodeString(sEnc)
|
sDec, _ := b64.StdEncoding.DecodeString(sEnc)
|
||||||
fmt.Println(string(sDec))
|
fmt.Println(string(sDec))
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
// URL base64 encoding/decoding.
|
// URL base64 encoding/decoding.
|
||||||
uEnc := base64.URLEncoding.EncodeToString([]byte(data))
|
uEnc := b64.URLEncoding.EncodeToString([]byte(data))
|
||||||
fmt.Println(uEnc)
|
fmt.Println(uEnc)
|
||||||
uDec, _ := base64.URLEncoding.DecodeString(uEnc)
|
uDec, _ := b64.URLEncoding.DecodeString(uEnc)
|
||||||
fmt.Println(string(uDec))
|
fmt.Println(string(uDec))
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,8 @@ func main() {
|
|||||||
out := os.Stdout
|
out := os.Stdout
|
||||||
|
|
||||||
// If successful, each `ReadLine` returns bytes and a
|
// If successful, each `ReadLine` returns bytes and a
|
||||||
// boolean indicating if don't have the whole line yet.
|
// boolean indicating if don't have the whole line
|
||||||
|
// yet.
|
||||||
for {
|
for {
|
||||||
inBytes, pfx, err := in.ReadLine()
|
inBytes, pfx, err := in.ReadLine()
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ import "fmt"
|
|||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
// `os.Args` includes the program name as the first
|
// `os.Args` includes the program name as the first
|
||||||
// value.
|
// value.
|
||||||
argsWithProg := os.Args
|
argsWithProg := os.Args
|
||||||
argsWithoutProg := os.Args[1:]
|
argsWithoutProg := os.Args[1:]
|
||||||
|
|
||||||
|
@ -15,21 +15,22 @@ func runLogging(logs chan string) {
|
|||||||
func wrapLogging(f http.HandlerFunc) http.HandlerFunc {
|
func wrapLogging(f http.HandlerFunc) http.HandlerFunc {
|
||||||
logs := make(chan string, 10000)
|
logs := make(chan string, 10000)
|
||||||
go runLogging(logs)
|
go runLogging(logs)
|
||||||
return func(rs http.ResponseWriter, rq *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
f(rs, rq)
|
f(w, r)
|
||||||
method := req.Method
|
method := r.Method
|
||||||
path := req.URL.Path
|
path := r.URL.Path
|
||||||
elapsed := float64(time.Since(start)) / 1000000.0
|
elapsed := float64(time.Since(start)) / 1000000.0
|
||||||
logs <- fmt.Sprintf("method=%s path=%s elapsed=%f",
|
logs <- fmt.Sprintf(
|
||||||
|
"method=%s path=%s elapsed=%f",
|
||||||
method, path, elapsed)
|
method, path, elapsed)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func hello(rs http.ResponseWriter, rq *http.Request) {
|
func hello(w http.ResponseWriter, r *http.Request) {
|
||||||
rs.Header().Set("Content-Type", "text/plain")
|
w.Header().Set("Content-Type", "text/plain")
|
||||||
time.Sleep(time.Millisecond * 50)
|
time.Sleep(time.Millisecond * 50)
|
||||||
fmt.Fprintln(rs, "Hello logged world")
|
fmt.Fprintln(w, "Hello logged world")
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Auth func(string, string) bool
|
type Auth func(string, string) bool
|
||||||
|
type handler http.HandlerFunc
|
||||||
|
|
||||||
func testAuth(r *http.Request, auth Auth) bool {
|
func testAuth(r *http.Request, auth Auth) bool {
|
||||||
header := r.Header.Get("Authorization")
|
header := r.Header.Get("Authorization")
|
||||||
@ -35,7 +36,7 @@ func requireAuth(w http.ResponseWriter, r *http.Request) {
|
|||||||
w.Write([]byte("401 Unauthorized\n"))
|
w.Write([]byte("401 Unauthorized\n"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func wrapAuth(h http.HandlerFunc, a Auth) http.HandlerFunc {
|
func wrapAuth(h handler, a Auth) handler {
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
if testAuth(r, a) {
|
if testAuth(r, a) {
|
||||||
h(w, r)
|
h(w, r)
|
||||||
|
@ -49,7 +49,8 @@ func main() {
|
|||||||
stop := make(chan bool, 1)
|
stop := make(chan bool, 1)
|
||||||
sig := make(chan os.Signal, 1)
|
sig := make(chan os.Signal, 1)
|
||||||
|
|
||||||
server := &http.Server{Handler: http.HandlerFunc(slow)}
|
handler := http.HandlerFunc(slow)
|
||||||
|
server := &http.Server{Handler: handler}
|
||||||
fmt.Println("listen at=start")
|
fmt.Println("listen at=start")
|
||||||
listener, listenErr := net.Listen("tcp", ":5000")
|
listener, listenErr := net.Listen("tcp", ":5000")
|
||||||
if listenErr != nil {
|
if listenErr != nil {
|
||||||
@ -69,7 +70,9 @@ func main() {
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
signal.Notify(sig, syscall.SIGINT, syscall.SIGTERM)
|
signal.Notify(
|
||||||
|
sig, syscall.SIGINT,
|
||||||
|
syscall.SIGTERM)
|
||||||
fmt.Println("trap at=start")
|
fmt.Println("trap at=start")
|
||||||
<-sig
|
<-sig
|
||||||
stop <- true
|
stop <- true
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/* PDF Formatting */
|
/* PDF Formatting */
|
||||||
@page { margin: 10pt 0pt 10pt 0pt }
|
@page { margin: 10px 10px 10px 10px }
|
||||||
|
|
||||||
/*--------------------- Layout and Typography ----------------------------*/
|
/*--------------------- Layout and Typography ----------------------------*/
|
||||||
body {
|
body {
|
||||||
@ -30,8 +30,9 @@ hr {
|
|||||||
#container {
|
#container {
|
||||||
position: relative;
|
position: relative;
|
||||||
}
|
}
|
||||||
div.chapter {
|
table {
|
||||||
page-break-inside: avoid;
|
page-break-inside: avoid;
|
||||||
|
width: 775px;
|
||||||
}
|
}
|
||||||
table td {
|
table td {
|
||||||
border: 0;
|
border: 0;
|
||||||
@ -47,8 +48,8 @@ td.docs {
|
|||||||
text-align: left;
|
text-align: left;
|
||||||
}
|
}
|
||||||
td.code {
|
td.code {
|
||||||
max-width: 450px;
|
max-width: 400px;
|
||||||
min-width: 450px;
|
min-width: 400px;
|
||||||
padding: 10px 10px 10px 10px;
|
padding: 10px 10px 10px 10px;
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
background: #f0f0f0;
|
background: #f0f0f0;
|
||||||
|
@ -64,6 +64,12 @@ func readLines(path string) []string {
|
|||||||
return strings.Split(string(srcBytes), "\n")
|
return strings.Split(string(srcBytes), "\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func mustGlob(glob string) []string {
|
||||||
|
paths, err := filepath.Glob(glob)
|
||||||
|
check(err)
|
||||||
|
return paths
|
||||||
|
}
|
||||||
|
|
||||||
func whichLexer(path string) string {
|
func whichLexer(path string) string {
|
||||||
if strings.HasSuffix(path, ".go") {
|
if strings.HasSuffix(path, ".go") {
|
||||||
return "go"
|
return "go"
|
||||||
@ -88,102 +94,106 @@ type seg struct {
|
|||||||
docs, code, docsRendered, codeRendered string
|
docs, code, docsRendered, codeRendered string
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func parseSegs(sourcePath string) []*seg {
|
||||||
ensureCache()
|
lines := readLines(sourcePath)
|
||||||
|
segs := []*seg{}
|
||||||
sourcePaths, err := filepath.Glob("./src/0*/*")
|
segs = append(segs, &seg{code: "", docs: ""})
|
||||||
check(err)
|
lastSeen := ""
|
||||||
|
for _, line := range lines {
|
||||||
fmt.Print(`<!DOCTYPE html>
|
if todoPat.MatchString(line) {
|
||||||
<html>
|
continue
|
||||||
<head>
|
}
|
||||||
<meta http-eqiv="content-type" content="text/html;charset=utf-8">
|
headerMatch := headerPat.MatchString(line)
|
||||||
<title>Go by Example</title>
|
docsMatch := docsPat.MatchString(line)
|
||||||
<link rel=stylesheet href="../style/book.css">
|
emptyMatch := line == ""
|
||||||
</head>
|
lastSeg := segs[len(segs)-1]
|
||||||
<body>
|
lastHeader := lastSeen == "header"
|
||||||
<div id="container">
|
lastDocs := lastSeen == "docs"
|
||||||
<table cellspacing="0" cellpadding="0">
|
newHeader := lastSeen != "header" && lastSeg.docs != ""
|
||||||
<tbody>`)
|
newDocs := lastSeen == "code" || lastSeen == "header"
|
||||||
|
newCode := (lastSeen != "code" && lastSeg.code != "") || lastSeen == "header"
|
||||||
for _, sourcePath := range sourcePaths {
|
if newHeader || newDocs || newCode {
|
||||||
lexer := whichLexer(sourcePath)
|
debug("NEWSEG")
|
||||||
lines := readLines(sourcePath)
|
}
|
||||||
|
if headerMatch || (emptyMatch && lastHeader) {
|
||||||
segs := []*seg{}
|
trimmed := docsPat.ReplaceAllString(line, "")
|
||||||
segs = append(segs, &seg{code: "", docs: ""})
|
if newHeader {
|
||||||
lastSeen := ""
|
newSeg := seg{docs: trimmed, code: ""}
|
||||||
for _, line := range lines {
|
segs = append(segs, &newSeg)
|
||||||
if todoPat.MatchString(line) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
headerMatch := headerPat.MatchString(line)
|
|
||||||
docsMatch := docsPat.MatchString(line)
|
|
||||||
emptyMatch := line == ""
|
|
||||||
lastSeg := segs[len(segs)-1]
|
|
||||||
lastHeader := lastSeen == "header"
|
|
||||||
lastDocs := lastSeen == "docs"
|
|
||||||
newHeader := lastSeen != "header" && lastSeg.docs != ""
|
|
||||||
newDocs := lastSeen == "code" || lastSeen == "header"
|
|
||||||
newCode := (lastSeen != "code" && lastSeg.code != "") || lastSeen == "header"
|
|
||||||
if newHeader || newDocs || newCode {
|
|
||||||
debug("NEWSEG")
|
|
||||||
}
|
|
||||||
if headerMatch || (emptyMatch && lastHeader) {
|
|
||||||
trimmed := docsPat.ReplaceAllString(line, "")
|
|
||||||
if newHeader {
|
|
||||||
newSeg := seg{docs: trimmed, code: ""}
|
|
||||||
segs = append(segs, &newSeg)
|
|
||||||
} else {
|
|
||||||
lastSeg.docs = lastSeg.docs + "\n" + trimmed
|
|
||||||
}
|
|
||||||
debug("HEAD")
|
|
||||||
lastSeen = "header"
|
|
||||||
} else if docsMatch || (emptyMatch && lastDocs) {
|
|
||||||
trimmed := docsPat.ReplaceAllString(line, "")
|
|
||||||
if newDocs {
|
|
||||||
debug("NEWSEG")
|
|
||||||
newSeg := seg{docs: trimmed, code: ""}
|
|
||||||
segs = append(segs, &newSeg)
|
|
||||||
} else {
|
|
||||||
lastSeg.docs = lastSeg.docs + "\n" + trimmed
|
|
||||||
}
|
|
||||||
debug("DOCS")
|
|
||||||
lastSeen = "docs"
|
|
||||||
} else {
|
} else {
|
||||||
if newCode {
|
lastSeg.docs = lastSeg.docs + "\n" + trimmed
|
||||||
newSeg := seg{docs: "", code: line}
|
|
||||||
segs = append(segs, &newSeg)
|
|
||||||
} else {
|
|
||||||
lastSeg.code = lastSeg.code + "\n" + line
|
|
||||||
}
|
|
||||||
debug("CODE")
|
|
||||||
lastSeen = "code"
|
|
||||||
}
|
}
|
||||||
}
|
debug("HEAD")
|
||||||
segs = append(segs, &seg{code: "", docs: ""})
|
lastSeen = "header"
|
||||||
|
} else if docsMatch || (emptyMatch && lastDocs) {
|
||||||
for _, seg := range segs {
|
trimmed := docsPat.ReplaceAllString(line, "")
|
||||||
if seg.docs != "" {
|
if newDocs {
|
||||||
seg.docsRendered = string(blackfriday.MarkdownCommon([]byte(seg.docs)))
|
debug("NEWSEG")
|
||||||
|
newSeg := seg{docs: trimmed, code: ""}
|
||||||
|
segs = append(segs, &newSeg)
|
||||||
|
} else {
|
||||||
|
lastSeg.docs = lastSeg.docs + "\n" + trimmed
|
||||||
}
|
}
|
||||||
if seg.code != "" {
|
debug("DOCS")
|
||||||
seg.codeRendered = cachedRender("/usr/local/bin/pygmentize", []string{"-l", lexer, "-f", "html"}, seg.code)
|
lastSeen = "docs"
|
||||||
|
} else {
|
||||||
|
if newCode {
|
||||||
|
newSeg := seg{docs: "", code: line}
|
||||||
|
segs = append(segs, &newSeg)
|
||||||
|
} else {
|
||||||
|
lastSeg.code = lastSeg.code + "\n" + line
|
||||||
}
|
}
|
||||||
}
|
debug("CODE")
|
||||||
|
lastSeen = "code"
|
||||||
for _, seg := range segs {
|
|
||||||
codeClasses := "code"
|
|
||||||
if seg.code == "" {
|
|
||||||
codeClasses = codeClasses + " empty"
|
|
||||||
}
|
|
||||||
fmt.Printf(
|
|
||||||
`<tr>
|
|
||||||
<td class=docs>%s</td>
|
|
||||||
<td class="%s">%s</td>
|
|
||||||
</tr>`, seg.docsRendered, codeClasses, seg.codeRendered)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return append(segs, &seg{code: "", docs: ""})
|
||||||
fmt.Print(`</tbody></table></div></body></html>`)
|
}
|
||||||
|
|
||||||
|
func parseAndRenderSegs(sourcePath string) []*seg {
|
||||||
|
segs := parseSegs(sourcePath)
|
||||||
|
lexer := whichLexer(sourcePath)
|
||||||
|
for _, seg := range segs {
|
||||||
|
if seg.docs != "" {
|
||||||
|
seg.docsRendered = string(blackfriday.MarkdownCommon([]byte(seg.docs)))
|
||||||
|
}
|
||||||
|
if seg.code != "" {
|
||||||
|
seg.codeRendered = cachedRender("/usr/local/bin/pygmentize", []string{"-l", lexer, "-f", "html"}, seg.code)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return segs
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
ensureCache()
|
||||||
|
fmt.Print(`<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta http-eqiv="content-type" content="text/html;charset=utf-8">
|
||||||
|
<title>Go by Example</title>
|
||||||
|
<link rel=stylesheet href="../style/book.css">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="container">`)
|
||||||
|
chapterPaths := mustGlob("./src/0*")
|
||||||
|
for _, chapterPath := range chapterPaths {
|
||||||
|
fmt.Print(`<table cellspacing="0" cellpadding="0"><tbody>`)
|
||||||
|
sourcePaths := mustGlob(chapterPath + "/*")
|
||||||
|
for _, sourcePath := range sourcePaths {
|
||||||
|
segs := parseAndRenderSegs(sourcePath)
|
||||||
|
for _, seg := range segs {
|
||||||
|
codeClasses := "code"
|
||||||
|
if seg.code == "" {
|
||||||
|
codeClasses = codeClasses + " empty"
|
||||||
|
}
|
||||||
|
fmt.Printf(
|
||||||
|
`<tr>
|
||||||
|
<td class=docs>%s</td>
|
||||||
|
<td class="%s">%s</td>
|
||||||
|
</tr>`, seg.docsRendered, codeClasses, seg.codeRendered)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Print(`</tbody></table>`)
|
||||||
|
}
|
||||||
|
fmt.Print(`</div></body></html>`)
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ import (
|
|||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -20,6 +21,8 @@ func readLines(path string) []string {
|
|||||||
return strings.Split(string(srcBytes), "\n")
|
return strings.Split(string(srcBytes), "\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var todoPat = regexp.MustCompile("\\/\\/ todo: ")
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
sourcePaths, err := filepath.Glob("./src/0*/*")
|
sourcePaths, err := filepath.Glob("./src/0*/*")
|
||||||
check(err)
|
check(err)
|
||||||
@ -28,7 +31,7 @@ func main() {
|
|||||||
foundLongLine := false
|
foundLongLine := false
|
||||||
lines := readLines(sourcePath)
|
lines := readLines(sourcePath)
|
||||||
for _, line := range lines {
|
for _, line := range lines {
|
||||||
if len(line) > 60 && !foundLongLine {
|
if !foundLongLine && !todoPat.MatchString(line) && (len(line) > 58) {
|
||||||
fmt.Println(sourcePath)
|
fmt.Println(sourcePath)
|
||||||
foundLongLine = true
|
foundLongLine = true
|
||||||
foundLongFile = true
|
foundLongFile = true
|
||||||
|
Loading…
x
Reference in New Issue
Block a user