From 06dcef8884bba341119e10a460611bd39408447e Mon Sep 17 00:00:00 2001 From: Michael Thomson Date: Mon, 3 Jun 2024 12:48:21 -0400 Subject: [PATCH] concurrency --- concurrency/websitechecker.go | 25 ++++++++++++++++ concurrency/websitechecker_test.go | 47 ++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+) create mode 100644 concurrency/websitechecker.go create mode 100644 concurrency/websitechecker_test.go diff --git a/concurrency/websitechecker.go b/concurrency/websitechecker.go new file mode 100644 index 0000000..495fa3b --- /dev/null +++ b/concurrency/websitechecker.go @@ -0,0 +1,25 @@ +package concurrency + +type WebsiteChecker func(string) bool +type result struct { + string + bool +} + +func CheckWebsites(wc WebsiteChecker, urls []string) map[string]bool { + results := make(map[string]bool) + resultChannel := make(chan result) + + for _, url := range urls { + go func(u string) { + resultChannel <- result{u, wc(u)} + }(url) + } + + for i := 0; i < len(urls); i++ { + r := <-resultChannel + results[r.string] = r.bool + } + + return results +} diff --git a/concurrency/websitechecker_test.go b/concurrency/websitechecker_test.go new file mode 100644 index 0000000..83fa69d --- /dev/null +++ b/concurrency/websitechecker_test.go @@ -0,0 +1,47 @@ +package concurrency + +import ( + "reflect" + "testing" + "time" +) + +func mockWebsiteChecker(url string) bool { + return url != "waat://furhurterwe.geds" +} + +func TestCheckWebsites(t *testing.T) { + websites := []string{ + "http://google.com", + "http://blog.gypsydave5.com", + "waat://furhurterwe.geds", + } + + want := map[string]bool{ + "http://google.com": true, + "http://blog.gypsydave5.com": true, + "waat://furhurterwe.geds": false, + } + + got := CheckWebsites(mockWebsiteChecker, websites) + + if !reflect.DeepEqual(want, got) { + t.Fatalf("wanted %v, got %v", got, want) + } +} + +func slowStubWesiteChecker(_ string) bool { + time.Sleep(20 * time.Millisecond) + return true +} + +func BenchmarkCheckWebsites(b *testing.B) { + urls := make([]string, 100) + for i := 0; i < len(urls); i++ { + urls[i] = "a url" + } + b.ResetTimer() + for i := 0; i < b.N; i++ { + CheckWebsites(slowStubWesiteChecker, urls) + } +}