Mijn oplossingen voor oefeningen op A Tour of Go
Bijgwerkt voor Go 1.
Errors
1 type ErrNegativeSqrt float64
2
3 func (e ErrNegativeSqrt) Error() string {
4 return fmt.Sprint("cannot Sqrt negative number: ", float64(e))
5 }
6
7 func Sqrt(f float64) (float64, error) {
8 if f < 0.0 {
9 return 0, ErrNegativeSqrt(f)
10 }
11 if f == 0.0 {
12 return 0, nil
13 }
14 var z, z1 float64 = 0, -1
15 for z != z1 {
16 z1 = z
17 z = z - (z*z - f) / (2*f)
18 }
19 return z, nil
20 }
HTTP Handlers
1 package main
2
3 import (
4 "fmt"
5 "net/http"
6 )
7
8 type String string
9
10 type Struct struct {
11 Greeting string
12 Punct string
13 Who string
14 }
15
16 func (s String) ServeHTTP(w http.ResponseWriter, r *http.Request) {
17 fmt.Fprint(w, s)
18 }
19
20 func (s Struct) ServeHTTP(w http.ResponseWriter, r *http.Request) {
21 fmt.Fprintf(w, "%s%s %s", s.Greeting, s.Punct, s.Who)
22 }
23
24 func main() {
25 // your http.Handle calls here
26 http.Handle("/string", String("I'm a frayed knot."))
27 http.Handle("/struct", &Struct{"Hello", ":", "Gophers!"})
28 http.ListenAndServe("localhost:4000", nil)
29 }
Images
1 type Image struct{
2 dx, dy int
3 p [][]uint8
4 }
5
6 func Pic(dx, dy int) Image {
7 img := Image{}
8 img.dx = dx
9 img.dy = dy
10 img.p = make([][]uint8, dx)
11 for x := 0; x < dx; x++ {
12 img.p[x] = make([]uint8, dy)
13 for y := 0; y < dy; y++ {
14 img.p[x][y] = uint8(x * y)
15 }
16 }
17 return img
18 }
19
20 func (img Image) Bounds() image.Rectangle {
21 return image.Rect(0, 0, img.dx, img.dy)
22 }
23
24 func (img Image) ColorModel() color.Model {
25 return color.RGBAModel
26 }
27
28 func (img Image) At(x, y int) color.Color {
29 i := img.p[x][y]
30 return color.RGBA{i, i, 255, 255}
31 }
Rot13 Reader
1 func (r *rot13Reader) Read(p []byte) (n int, err error) {
2 n, err = r.r.Read(p)
3 for i := 0; i < n; i++ {
4 ii := p[i]
5 switch {
6 case ii >= 'A' && ii <= 'M' || ii >= 'a' && ii <= 'm':
7 p[i] = ii + 13
8 case ii >= 'N' && ii <= 'Z' || ii >= 'n' && ii <= 'z':
9 p[i] = ii - 13
10 }
11 }
12 return
13 }
Equivalent Binary Trees
Versie 1
1 // Walk walks the tree t sending all values
2 // from the tree to the channel ch.
3 func Walk(t *tree.Tree, ch chan int) {
4 var F func(*tree.Tree)
5 F = func (t *tree.Tree) {
6 if t != nil {
7 F(t.Left)
8 ch <- t.Value
9 F(t.Right)
10 }
11 }
12 F(t)
13 close(ch)
14 }
15
16 // Same determines whether the trees
17 // t1 and t2 contain the same values.
18 func Same(t1, t2 *tree.Tree) bool {
19 ch1 := make(chan int)
20 ch2 := make(chan int)
21 go Walk(t1, ch1)
22 go Walk(t2, ch2)
23 for {
24 i1, ok1 := <- ch1
25 i2, ok2 := <- ch2
26 if ! (ok1 || ok2) {
27 return true
28 }
29 if ok1 != ok2 || i1 != i2 {
30 if ok1 {
31 fmt.Print("Same(): Flushing channel 1:")
32 for i1 = range ch1 {
33 // flush channel
34 fmt.Print(" ", i1)
35 }
36 fmt.Println()
37 }
38 if ok2 {
39 fmt.Print("Same(): Flushing channel 2:")
40 for i2 = range ch2 {
41 // flush channel
42 fmt.Print(" ", i2)
43 }
44 fmt.Println()
45 }
46 return false
47 }
48 }
49 return false
50 }
Versie 2
1 // Walk walks the tree t sending all values
2 // from the tree to the channel ch.
3 func Walk(t *tree.Tree, ch chan int, abort chan bool) {
4 aborted := false
5 var F func(*tree.Tree)
6 F = func(t *tree.Tree) {
7 if t != nil && !aborted {
8 F(t.Left)
9 if !aborted {
10 select {
11 case <-abort:
12 aborted = true
13 case ch <- t.Value:
14 }
15 }
16 F(t.Right)
17 }
18 }
19 F(t)
20 close(ch)
21 if aborted {
22 fmt.Println("Walk aborted")
23 }
24 }
25
26 // Same determines whether the trees
27 // t1 and t2 contain the same values.
28 func Same(t1, t2 *tree.Tree) bool {
29 ch1 := make(chan int)
30 ch2 := make(chan int)
31 ab1 := make(chan bool)
32 ab2 := make(chan bool)
33 go Walk(t1, ch1, ab1)
34 go Walk(t2, ch2, ab2)
35 for {
36 i1, ok1 := <-ch1
37 i2, ok2 := <-ch2
38 if !(ok1 || ok2) {
39 return true
40 }
41 if ok1 != ok2 || i1 != i2 {
42 if ok1 {
43 ab1 <- true
44 }
45 if ok2 {
46 ab2 <- true
47 }
48 return false
49 }
50 }
51 return false
52 }
Web Crawler
1 // Crawl uses fetcher to recursively crawl
2 // pages starting with url, to a maximum of depth.
3 func Crawl(url string, depth int, fetcher Fetcher) {
4 // DONE: Fetch URLs in parallel.
5 // DONE: Don't fetch the same URL twice.
6
7 if depth < 1 {
8 return
9 }
10
11 type Target struct {
12 url string
13 depth int
14 }
15
16 targets := make(chan *Target)
17
18 queue := make(chan *Target)
19
20 // This marks the end of a series of new targets
21 eot := &Target{depth: -1}
22
23 // Start some fetchers, each fetcher fetches at most 1 url at a time
24 for i := 0; i < 3; i++ {
25 go func() {
26 for {
27 t := <-queue
28 body, urls, err := fetcher.Fetch(t.url)
29 if err != nil {
30 fmt.Println(err)
31 } else {
32 fmt.Printf("found: %s %q\n", t.url, body)
33 if d := t.depth - 1; d > 0 {
34 for _, u := range urls {
35 targets <- &Target{u, d}
36 }
37 }
38 }
39 targets <- eot
40 }
41 }()
42 }
43
44 seen := make(map[string]bool)
45
46 queue <- &Target{url, depth}
47 for counter := 1; counter > 0; {
48 t := <-targets
49 if t.depth < 0 {
50 counter--
51 } else {
52 if !seen[t.url] {
53 seen[t.url] = true
54 counter++
55 go func() {
56 queue <- t
57 }()
58 }
59 }
60 }
61 return
62 }