Compare commits

..

No commits in common. "f16bfef10354f1f70bed6c342f9644a171ba8eca" and "2a77e80aec524f139258794a43418cf5cff21889" have entirely different histories.

3 changed files with 1 additions and 173 deletions

View File

@ -36,16 +36,3 @@ select {
// receive when c is blocked // receive when c is blocked
} }
``` ```
## sync.Mutex
Channels are great for communication, but what if we don't need communication?
What if we just want to make sure only one goroutine can access a variable at a time to avoid conflicts? This concept is called `mutual exclusion`, and the conventional name for the data structure that provides it is `mutex`
Go's standard library provides mutual exclusion with `sync.Mutex` and its two methods: `Lock` and `Unlock`
## sync.WaitGroup
https://tutorialedge.net/golang/go-waitgroup-tutorial/

View File

@ -1,39 +0,0 @@
package main
import (
"fmt"
"sync"
"time"
)
// SafeCounter is safe to use concurrently.
type SafeCounter struct {
mu sync.Mutex
v map[string]int
}
// Inc increments the counter for the given key.
func (c *SafeCounter) Inc(key string) {
c.mu.Lock()
// Lock so only one goroutine at a time can access the map c.v.
c.v[key]++
c.mu.Unlock()
}
// Value returns the current value of the counter for the given key.
func (c *SafeCounter) Value(key string) int {
c.mu.Lock()
// Lock so only one goroutine at a time can access the map c.v.
defer c.mu.Unlock()
return c.v[key]
}
func main() {
c := SafeCounter{v: make(map[string]int)}
for i := 0; i < 1000; i++ {
go c.Inc("somekey")
}
time.Sleep(time.Second)
fmt.Println(c.Value("somekey"))
}

View File

@ -1,120 +0,0 @@
package main
import (
"fmt"
"sync"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
type SafeCounter struct {
mu sync.Mutex
wg sync.WaitGroup
visited map[string]struct{}
}
var (
safeCounter *SafeCounter
)
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
defer safeCounter.wg.Done()
safeCounter.mu.Lock()
defer safeCounter.mu.Unlock()
if url != "" {
_, ok := safeCounter.visited[url]
if ok {
return
}
safeCounter.visited[url] = struct{}{}
}
if depth <= 0 {
safeCounter.wg.Done()
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
safeCounter.wg.Add(1)
go Crawl(u, depth-1, fetcher)
}
return
}
func main() {
safeCounter = &SafeCounter{
visited: make(map[string]struct{}),
}
safeCounter.wg.Add(1)
go Crawl("https://golang.org/", 4, fetcher)
safeCounter.wg.Wait()
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"https://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"https://golang.org/pkg/",
"https://golang.org/cmd/",
},
},
"https://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"https://golang.org/",
"https://golang.org/cmd/",
"https://golang.org/pkg/fmt/",
"https://golang.org/pkg/os/",
},
},
"https://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
"https://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
}