gpt4 book ai didi

c# - 在 Azure 缓存中存储大于 8MB 的对象的技术

转载 作者:太空宇宙 更新时间:2023-11-03 21:48:05 25 4
gpt4 key购买 nike

对于如何在 Azure 缓存中存储大于 8MB 的对象,有人有任何建议吗?就我而言,我使用 byte[] 将文件存储在 blob 中。但是,如果我能以某种方式将 byte[] 分割成更小的 block 并将其存储为部分文件,然后在从缓存中检索文件后执行合并。

伪代码:

存储

bs <- split file into byte[] that are smaller than 8MB
s <- string[bs.Length]
foreach bs with index i
s[i] <- name of bs[i]
Add bs[i] to Azure cache using s[i] as key

Add s to cache

检索:

s <- Get list of byte[] names
bs <- byte[s.Length]
foreach s with index i
bs[i] <- Get byte[] using s[i]

outBs <- Join bs to one byte[]
  • 这里是否存在任何性能问题?

  • 还有其他方法可以超越 Azure 缓存吗?

最佳答案

经过几个小时的工作,我发现可以将大文件拆分为较小的文件并将其存储到 Azure 缓存中。我想与您分享代码。

用于分割和连接字节[]的类

    public class CacheHelper
{
private const int kMaxFileSize = 8000000;
private readonly int fFileSize;
private readonly string fFileName;
public CacheHelper(int sizeOfFile, string nameOfFile)
{
fFileSize = sizeOfFile;
fFileName = nameOfFile;
}

public CachingObjectHolder Split(byte[] file)
{
var remainingSize = file.Length;
var partialList = new List<byte[]>();
var partial = new byte[file.Length > kMaxFileSize ? kMaxFileSize : file.Length];
for (int i = 0; i < file.Length; i++)
{
if (i % kMaxFileSize == 0 && i > 0)
{
partialList.Add(partial);
partial = new byte[remainingSize > kMaxFileSize ? kMaxFileSize : remainingSize];
}

partial[i % kMaxFileSize] = file[i];
remainingSize--;
}

partialList.Add(partial);

return new CachingObjectHolder(fFileName, partialList);
}

public static byte[] Join(CachingObjectHolder cachingObjectHolder)
{
var totalByteSize = cachingObjectHolder.Partials.Sum(x => x.Length);
var output = new byte[totalByteSize];
var globalCounter = 0;
for (int i = 0; i < cachingObjectHolder.Partials.Count; i++)
{
for (int j = 0; j < cachingObjectHolder.Partials[i].Length; j++)
{
output[globalCounter] = cachingObjectHolder.Partials[i][j];
globalCounter++;
}
}

return output;
}

public static byte[] CreateFile(int size)
{
var tempFile = Path.GetTempFileName();
using (var stream = new FileStream(tempFile, FileMode.OpenOrCreate))
{
using (var memStream = new MemoryStream())
{
stream.SetLength(size);
stream.CopyTo(memStream);
return memStream.ToArray();
}
}
}
}

这是与Azure缓存通信的代码

    public class Cache
{
private const string kFileListName = "FileList";

public static DataCacheFactory DataCacheFactory
{
get
{
return new DataCacheFactory();
}
}

private static DataCache fDataCache;
public static DataCache DataCache
{
get
{
if(fDataCache == null)
{
fDataCache = DataCacheFactory.GetDefaultCache();
}

return fDataCache;
}
}

public static byte[] Get(string name)
{
var dic = GetFileList();
if (dic == null)
{
return (byte[])DataCache.Get(name);
}
if (dic.ContainsKey(name))
{
var list = dic[name];
var input = new List<byte[]>();
var cache = DataCache;
list = list.OrderBy(x => x.Item2).ToList();
for (int i = 0; i < list.Count; i++)
{
input.Add(cache.Get(list[i].Item1) as byte[]);
}

var holder = new CachingObjectHolder(name, input);
return CacheHelper.Join(holder);
}
else
{
return (byte[])DataCache.Get(name);
}
}

public static void Put(string name, byte[] file)
{
if (file.Length > CacheHelper.kMaxFileSize)
{
var helper = new CacheHelper(file.Length, name);
var output = helper.Split(file);
var dic = GetFileList();
if (dic == null)
{
dic = new Dictionary<string, List<Tuple<string, int>>>();
}

var partials = new List<Tuple<string, int>>();
for (int i = 0; i < output.CachingObjects.Count; i++)
{
DataCache.Add(output.CachingObjects[i].Name, output.Partials[output.CachingObjects[i].Index]);
partials.Add(new Tuple<string, int>(output.CachingObjects[i].Name,
output.CachingObjects[i].Index));
}

dic.Add(name, partials.OrderBy(x => x.Item2).ToList());
PutFileList(dic);
}
else
{
DataCache.Add(name, file);
}
}

public static void Remove(string name)
{
var dic = GetFileList();
if (dic == null)
{
DataCache.Remove(name);
return;
}

if (dic.ContainsKey(name))
{
var list = dic[name];
for (int i = 0; i < list.Count; i++)
{
DataCache.Remove(list[i].Item1);
}

dic.Remove(name);
PutFileList(dic);
}
else
{
DataCache.Remove(name);
}
}

private static void PutFileList(Dictionary<string, List<Tuple<string, int>>> input)
{
DataCache.Put(kFileListName, input);
}

private static Dictionary<string, List<Tuple<string, int>>> GetFileList()
{
return DataCache.Get(kFileListName) as Dictionary<string, List<Tuple<string, int>>>;
}
}

Aaa和两个用于数据持有者的类

    public class CachingObjectHolder
{
public readonly List<byte[]> Partials;
public readonly List<CachingObject> CachingObjects;
public readonly string CacheName;

public CachingObjectHolder(string name, List<byte[]> partialList)
{
Partials = partialList;
CacheName = name;
CachingObjects = new List<CachingObject>();
CreateCachingObjects();
}

private void CreateCachingObjects()
{
for (int i = 0; i < Partials.Count; i++)
{
CachingObjects.Add(new CachingObject(string.Format("{0}_{1}", CacheName, i), i));
}
}
}

public class CachingObject
{
public int Index { get; set; }
public string Name { get; set; }

public CachingObject(string name, int index)
{
Index = index;
Name = name;
}
}

以下是在云上测试该解决方案的结果。读/写时间以毫秒为单位。 Results from live testing

关于c# - 在 Azure 缓存中存储大于 8MB 的对象的技术,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/15895685/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com