You can have two wait groups one for y routines and another for (x-y) routines. For example : `
package main
import (
"fmt"
"sync"
)
//Implement fanin fanout pattern
//Scrape url for multiple urls in a list
//Code for 10 urls and 3 workers
func fanOut(results chan string, numOfWorkers int, urls []string, pwg *sync.WaitGroup) {
urlChannel := make(chan string, len(urls))
addUrlToChannel(urls, urlChannel)
for i := 0; i < numOfWorkers; i++ {
pwg.Add(1)
go processWorker(pwg, urlChannel, results)
}
pwg.Wait()
close(results)
}
func addUrlToChannel(urls []string, urlChannel chan string) {
for _, url := range urls {
urlChannel <- url
}
close(urlChannel)
}
func processWorker(pwg *sync.WaitGroup, urlChannel chan string, results chan string) {
for url := range urlChannel {
scrapeUrl(url, results)
}
pwg.Done()
}
func scrapeUrl(url string, results chan<- string) {
results <- fmt.Sprintf("Successfully scraped %s: ", url)
}
func fanIn(scrapedUrls chan string, cwg *sync.WaitGroup) {
defer cwg.Done()
for url := range scrapedUrls {
fmt.Println("Scraped url", url)
}
}
func main() {
urls := []string{
"https://www.google.com",
"https://www.github.com",
"https://www.stackoverflow.com",
"https://www.github.com",
"https://www.stackoverflow.com",
"https://www.google.com",
"https://www.github.com",
"https://www.stackoverflow.com",
"https://www.google.com",
"https://www.github.com",
}
results := make(chan string)
var pwg sync.WaitGroup
var cwg sync.WaitGroup
numOfWorkers := 3
//FanIn
cwg.Add(1)
go fanIn(results, &cwg)
//FanOut
fanOut(results, numOfWorkers, urls, &pwg)
cwg.Wait()
fmt.Println("Application ended")
}
`