gpt4 book ai didi

csv - 处理大型 csv 文件并限制 goroutines

转载 作者:IT王子 更新时间:2023-10-29 01:50:36 27 4
gpt4 key购买 nike

我正在尝试找到读取 csv 文件(~1M 行)的最有效方法。每行都包含指向我需要下载的图像的 HTTP 链接。

这是我当前使用工作池的代码:

func worker(queue chan []string, worknumber int, done, ks chan bool) {
for true {
select {
case url := <-queue:
fmt.Println("doing work!", url, "worknumber", worknumber)
processData(url) // HTTP download
done <- true
case <-ks:
fmt.Println("worker halted, number", worknumber)
return
}
}
}

func main() {
start := time.Now()
flag.Parse()
fmt.Print(strings.Join(flag.Args(), "\n"))
if *filename == "REQUIRED" {
return
}

csvfile, err := os.Open(*filename)
if err != nil {
fmt.Println(err)
return
}
count, _ := lineCounter(csvfile)
fmt.Printf("Total count: %d\n", count)
csvfile.Seek(0, 0)

defer csvfile.Close()

//bar := pb.StartNew(count)
bar := progressbar.NewOptions(count)
bar.RenderBlank()

reader := csv.NewReader(csvfile)

//channel for terminating the workers
killsignal := make(chan bool)

//queue of jobs
q := make(chan []string)
// done channel takes the result of the job
done := make(chan bool)

numberOfWorkers := *numChannels
for i := 0; i < numberOfWorkers; i++ {
go worker(q, i, done, killsignal)
}

i := 0
for {
record, err := reader.Read()
if err == io.EOF {
break
} else if err != nil {
fmt.Println(err)
return
}
i++

go func(r []string, i int) {
q <- r
bar.Add(1)
}(record, i)
}

// a deadlock occurs if c >= numberOfJobs
for c := 0; c < count; c++ {
<-done
}

fmt.Println("finished")

// cleaning workers
close(killsignal)
time.Sleep(2 * time.Second)

fmt.Printf("\n%2fs", time.Since(start).Seconds())
}

我的问题是它打开了很多 goroutines,使用了所有内存并崩溃了。

限制它的最佳方法是什么?

最佳答案

我去掉了进度条,因为我不想打扰它,但总的来说,这更接近您正在寻找的东西。

它并不真正处理错误,它们只是在致命状态下失败。

我添加了上下文和取消支持。

您可能需要检查 https://godoc.org/golang.org/x/sync/errgroup#Group.Go

作为一般建议,您需要学习 golang 模式及其用法。

很明显你还不够努力,或者你正在学习中。

它根本不是最快的程序,但它完成了工作。

这只是一个草稿,可以让您回到更好的方向。

package main

import (
"context"
"encoding/csv"
"flag"
"fmt"
"io"
"log"
"os"
"os/signal"
"sync"
"time"
)

func worker(ctx context.Context, dst chan string, src chan []string) {
for {
select {
case url, ok := <-src: // you must check for readable state of the channel.
if !ok {
return
}
dst <- fmt.Sprintf("out of %v", url) // do somethingg useful.
case <-ctx.Done(): // if the context is cancelled, quit.
return
}
}
}

func main() {

// create a context
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
// that cancels at ctrl+C
go onSignal(os.Interrupt, cancel)

// parse command line arguments
var filename string
var numberOfWorkers int
flag.StringVar(&filename, "filename", "", "src file")
flag.IntVar(&numberOfWorkers, "c", 2, "concurrent workers")
flag.Parse()

// check arguments
if filename == "" {
log.Fatal("filename required")
}

start := time.Now()

csvfile, err := os.Open(filename)
if err != nil {
log.Fatal(err)
}
defer csvfile.Close()

reader := csv.NewReader(csvfile)

// create the pair of input/output channels for the controller=>workers com.
src := make(chan []string)
out := make(chan string)

// use a waitgroup to manage synchronization
var wg sync.WaitGroup

// declare the workers
for i := 0; i < numberOfWorkers; i++ {
wg.Add(1)
go func() {
defer wg.Done()
worker(ctx, out, src)
}()
}

// read the csv and write it to src
go func() {
for {
record, err := reader.Read()
if err == io.EOF {
break
} else if err != nil {
log.Fatal(err)
}
src <- record // you might select on ctx.Done().
}
close(src) // close src to signal workers that no more job are incoming.
}()

// wait for worker group to finish and close out
go func() {
wg.Wait() // wait for writers to quit.
close(out) // when you close(out) it breaks the below loop.
}()

// drain the output
for res := range out {
fmt.Println(res)
}

fmt.Printf("\n%2fs", time.Since(start).Seconds())
}

func onSignal(s os.Signal, h func()) {
c := make(chan os.Signal, 1)
signal.Notify(c, s)
<-c
h()
}

关于csv - 处理大型 csv 文件并限制 goroutines,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/56325466/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com