gpt4 book ai didi

dictionary - 嵌套字典 Golang 的麻烦

转载 作者:数据小太阳 更新时间:2023-10-29 03:35:41 27 4
gpt4 key购买 nike

去代码:

package main

import (
"bufio"
_ "bytes"
"fmt"
_ "io"
"log"
"os"
"os/user"
"path/filepath"
_ "reflect"
"regexp"
"runtime"
"strconv"
"strings"
"sync"
"time"

"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
)

var (
LocalDirectory = "s3logs" // Into this directory
Lock sync.Mutex
totalImpressions int
)

var data = make(map[string]map[string]int)

func main() {
start := time.Now()
// by adding this line i'm telling the program to run the threads on 4 different cores at the same time, Parallelism!!
//REMEMBER TO ADD BLOCKS TO STOP RACE CONDITIONS
runtime.GOMAXPROCS(4)
var wg sync.WaitGroup

var year, month, day = time.Now().Date()
str_year := strconv.Itoa(year)
str_month := strconv.Itoa(int(month))
str_day := strconv.Itoa(day)

if int(month) < 10 {
str_month = "0" + strconv.Itoa(int(month))
}
if day < 10 {
str_day = "0" + strconv.Itoa(day)
}

regBuckets := map[string]string{
"us-west-1": "pubgears-ca",
"test": "test",
}

for region, bucket := range regBuckets {
prefix := fmt.Sprintf("tagserver/logs/AWSLogs/978380792767/elasticloadbalancing/%s/%s/%s/%s/", region, str_year, str_month, str_day)
wg.Add(1)
go getLogs(region, bucket, LocalDirectory+bucket, &prefix, &wg)
}
wg.Wait()

//salon/t1/728x90/index
//totalImpressions := 0
// var provider = make(map[string]int)
// for key, value := range data {
// key = strings.TrimSpace(key)
// pro := strings.Split(key, "_")[3]
//
// if strings.Contains(pro, "pp") == true || (pro == "pulsepoint") || (pro == "cweb") {
// provider["pulsepoint"] += value
// } else if (pro == "openx") || (pro == "openx3") {
// provider["openx"] += value
// } else if key == " " {
// continue
// } else {
// provider[pro] += value
// }
// totalImpressions += value
// }

for tag, value := range data {
for hour, imp := range value {
fmt.Printf("tag: %s \n hour: %s impression %s\n", tag, hour, imp)
}
}

//sl = sl[:len(sl)-1]

elapsed := time.Since(start)
fmt.Printf("\nTime took %s\n", elapsed)

}

func getLogs(region string, bucket string, directory string, prefix *string, wg *sync.WaitGroup) {
sess := session.New()
client := s3.New(sess, &aws.Config{Region: aws.String(region)})

params := &s3.ListObjectsInput{Bucket: &bucket, Prefix: prefix}
manager := s3manager.NewDownloaderWithClient(client, func(d *s3manager.Downloader) {
d.PartSize = 5 * 1024 * 1024 // 6MB per part
d.Concurrency = 4
})
d := downloader{bucket: bucket, dir: directory, Downloader: manager}
client.ListObjectsPages(params, d.eachPage)
wg.Done()
}

// downloader object and methods
type downloader struct {
*s3manager.Downloader
bucket, dir string
}

func (d *downloader) eachPage(page *s3.ListObjectsOutput, more bool) bool {
for _, obj := range page.Contents {
// fmt.Println(obj)
// return true
d.downloadToFile(*obj.Key)
}
return true
}

func (d *downloader) downloadToFile(key string) {
// Create the directories in the path
// desktop path
user, errs := user.Current()
if errs != nil {
panic(errs)
}
homedir := user.HomeDir
desktop := homedir + "/Desktop/" + d.dir
file := filepath.Join(desktop, key)
if err := os.MkdirAll(filepath.Dir(file), 0775); err != nil {
panic(err)
}

// Setup the local file
fd, err := os.Create(file)
if err != nil {
panic(err)
}
defer fd.Close()

// Download the file using the AWS SDK
//fmt.Printf("Downloading s3://%s/%s to %s...\n", d.bucket, key, file)
params := &s3.GetObjectInput{Bucket: &d.bucket, Key: &key}
d.Download(fd, params)
_, e := d.Download(fd, params)
if e != nil {
panic(e)
}

f, err := os.Open(file)
if err != nil {
log.Fatal(err)
}
defer f.Close()

tag := regexp.MustCompile("/([a-zA-Z0-9_]+/{1}[a-zA-Z0-9_]+/{1}[a-zA-Z0-9_]+/{1}[a-zA-Z0-9_]+)")
date := regexp.MustCompile("T([^:]+)")
scanner := bufio.NewScanner(f)

// HAVING TROUBLE HERE
for scanner.Scan() {
//dateCollection := make(map[string]int)
m := tag.FindString(scanner.Text())
if m != "" {
// stop races
Lock.Lock()
arr := strings.Split(m, "/")
taghash := strings.Join(arr, "_")
taghash = strings.TrimLeft(taghash, "_")
//data[taghash]++
m = date.FindString(scanner.Text())
if m != "" {
hour := m
data[taghash] = make(map[string]int)
data[taghash][hour]++
}
Lock.Unlock()
}
}
fmt.Println(file)
os.Remove(file)
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
}

我遇到问题的区域从下面第 167 行部分开始:

for scanner.Scan() {
//dateCollection := make(map[string]int)
m := tag.FindString(scanner.Text())
if m != "" {
// stop races
Lock.Lock()
arr := strings.Split(m, "/")
taghash := strings.Join(arr, "_")
taghash = strings.TrimLeft(taghash, "_")
//data[taghash]++
m = date.FindString(scanner.Text())
if m != "" {
hour := m
data[taghash] = make(map[string]int)
data[taghash][hour]++
}
Lock.Unlock()
}
}
fmt.Println(file)
os.Remove(file)
if err := scanner.Err(); err != nil {
log.Fatal(err)
}

当我在主函数中打印值时得到的输出

fstvt1_ros_300x600_pp8_1 T07 1

我期待一个看起来像这样的输出:

fstvt1_ros_300x600_pp8_1
T00 213434
T01 23432
T02 3324
T03 324
T04 324
T05 0 ...
(this isn't real data)

我想做什么:我有一个数据映射 var data = make(map[string]map[string]int) 其键等于 taghash,例如fstvt1_ros_300x600_pp8_1。该值是数据映射。该映射的键值应该是一个字符串和一个整数。我想要多张 map 。每个键一个... T01,T02 .. 当前输出的是我正在迭代的最后一项中的最后一项,而不是每个标签哈希的键和值的集合。我该怎么做,而不是覆盖数据,它会添加新的键值 T01、T02.. 如果标签哈希和小时是增加该特定对象的一些。

我目前使用的代码行:

T01, T02..

data[taghash][hour]++

如果 taghash 和 hour 存在,那么它应该递增。如果标签哈希和小时不存在,则创建标签哈希并添加新 key 和增量。

最佳答案

错误来自

data[taghash] = make(map[string]int)

每次都将 data[taghash] 设置为新分配的映射。那绝对不是你想要做的。相反:

if _, ok := data[taghash]; !ok {
// data[taghash] does not exist -- create it!
data[taghash] := make(map[string]int)
}
data[taghash][hour]++

这相当于:

# Python
data.setdefault(taghash, {}).setdefault(hour, 0) += 1

或者

if taghash not in data:
data[taghash] = {}
if hour not in data[taghash]:
data[taghash][hour] = 1
else:
data[taghash][hour] += 1

关于dictionary - 嵌套字典 Golang 的麻烦,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/34500527/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com