gpt4 book ai didi

c# - System.OutOfMemoryException - 当 Entity Framework 正在查询 Varbinary 类型的过大数据时

转载 作者:行者123 更新时间:2023-11-30 15:28:58 25 4
gpt4 key购买 nike

我正在尝试查询包含文件 (1,2 Gb) 的 varbinary 列。

我正在使用 Entity Framework 。见下文:

要测试的数据库

CREATE TABLE [dbo].[BIGDATA]
(
[id] [bigint] IDENTITY(1,1) NOT NULL,
[BIGDATA] [varbinary](max) NULL,

CONSTRAINT [PK_BIGDATA] PRIMARY KEY CLUSTERED ([id] ASC)
) ON [PRIMARY]

要测试的数据(任何 1 Gb 的文件)

INSERT INTO [dbo].[BIGDATA]([BIGDATA])
VALUES
((SELECT BulkColumn FROM OPENROWSET(BULK N'C:\BigTest.txt', SINGLE_BLOB) AS Document))

Controller 下载文件

public FileResult Download()
{
try
{
var context = new Models.ELOGTESTEEntities();

var idArquivo = Convert.ToInt32(1);

// The problem is here, when trying send command to SQL Server to read register
var arquivo = (from item in context.BIGDATA
where item.id.Equals(idArquivo)
select item).Single();
var mimeType = ".txt";

byte[] bytes = System.Text.Encoding.GetEncoding("iso-8859-8").GetBytes("BigTest.txt");
return File(arquivo.BIGDATA1, mimeType, System.Text.Encoding.UTF8.GetString(bytes));
}
catch (Exception ex)
{
throw ex;
}
}

我可以使用 Select * From BigData 在 SQL Server 上正常查询。

但是,在 Entity Framework (或使用 ADO 的命令)中我得到了这个异常:

System.OutOfMemoryException

有人知道如何解决这个问题吗?

最佳答案

哇,好大的数据。我真的认为您不需要使用 EF 来获取这些数据,而是使用好的 'ol SqlDataReader。

考虑到您的 .net 4.0 限制,我发现了一个自定义实现,可以从大量 varbinary 列流式读取。除了检查代码并确保其中没有 .net 4.5 快捷方式外,我不能对此给予任何赞誉:

http://www.syntaxwarriors.com/2013/stream-varbinary-data-to-and-from-mssql-using-csharp/

Mods - 让我知道是否应该将类似的内容复制/粘贴到答案中,因为原始 URL 可能不会持久存在。

编辑:以下是链接中的代码,以防 URL 消失:

用法:

// reading and returning data to the client
VarbinaryStream filestream = new VarbinaryStream(
DbContext.Database.Connection.ConnectionString,
"FileContents",
"Content",
"ID",
(int)filepost.ID,
true);

// Do what you want with the stream here.

代码:

public class VarbinaryStream : Stream
{
private SqlConnection _Connection;

private string _TableName;
private string _BinaryColumn;
private string _KeyColumn;
private int _KeyValue;

private long _Offset;

private SqlDataReader _SQLReader;
private long _SQLReadPosition;

private bool _AllowedToRead = false;

public VarbinaryStream(
string ConnectionString,
string TableName,
string BinaryColumn,
string KeyColumn,
int KeyValue,
bool AllowRead = false)
{
// create own connection with the connection string.
_Connection = new SqlConnection(ConnectionString);

_TableName = TableName;
_BinaryColumn = BinaryColumn;
_KeyColumn = KeyColumn;
_KeyValue = KeyValue;


// only query the database for a result if we are going to be reading, otherwise skip.
_AllowedToRead = AllowRead;
if (_AllowedToRead == true)
{
try
{
if (_Connection.State != ConnectionState.Open)
_Connection.Open();

SqlCommand cmd = new SqlCommand(
@"SELECT TOP 1 [" + _BinaryColumn + @"]
FROM [dbo].[" + _TableName + @"]
WHERE [" + _KeyColumn + "] = @id",
_Connection);

cmd.Parameters.Add(new SqlParameter("@id", _KeyValue));

_SQLReader = cmd.ExecuteReader(
CommandBehavior.SequentialAccess |
CommandBehavior.SingleResult |
CommandBehavior.SingleRow |
CommandBehavior.CloseConnection);

_SQLReader.Read();
}
catch (Exception e)
{
// log errors here
}
}
}

// this method will be called as part of the Stream ímplementation when we try to write to our VarbinaryStream class.
public override void Write(byte[] buffer, int index, int count)
{
try
{
if (_Connection.State != ConnectionState.Open)
_Connection.Open();

if (_Offset == 0)
{
// for the first write we just send the bytes to the Column
SqlCommand cmd = new SqlCommand(
@"UPDATE [dbo].[" + _TableName + @"]
SET [" + _BinaryColumn + @"] = @firstchunk
WHERE [" + _KeyColumn + "] = @id",
_Connection);

cmd.Parameters.Add(new SqlParameter("@firstchunk", buffer));
cmd.Parameters.Add(new SqlParameter("@id", _KeyValue));

cmd.ExecuteNonQuery();

_Offset = count;
}
else
{
// for all updates after the first one we use the TSQL command .WRITE() to append the data in the database
SqlCommand cmd = new SqlCommand(
@"UPDATE [dbo].[" + _TableName + @"]
SET [" + _BinaryColumn + @"].WRITE(@chunk, NULL, @length)
WHERE [" + _KeyColumn + "] = @id",
_Connection);

cmd.Parameters.Add(new SqlParameter("@chunk", buffer));
cmd.Parameters.Add(new SqlParameter("@length", count));
cmd.Parameters.Add(new SqlParameter("@id", _KeyValue));

cmd.ExecuteNonQuery();

_Offset += count;
}
}
catch (Exception e)
{
// log errors here
}
}

// this method will be called as part of the Stream ímplementation when we try to read from our VarbinaryStream class.
public override int Read(byte[] buffer, int offset, int count)
{
try
{
long bytesRead = _SQLReader.GetBytes(0, _SQLReadPosition, buffer, offset, count);
_SQLReadPosition += bytesRead;
return (int)bytesRead;
}
catch (Exception e)
{
// log errors here
}
return -1;
}
public override bool CanRead
{
get { return _AllowedToRead; }
}

#region unimplemented methods
public override bool CanSeek
{
get { return false; }
}

public override bool CanWrite
{
get { return true; }
}

public override void Flush()
{
throw new NotImplementedException();
}

public override long Length
{
get { throw new NotImplementedException(); }
}

public override long Position
{
get
{
throw new NotImplementedException();
}
set
{
throw new NotImplementedException();
}
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}

public override void SetLength(long value)
{
throw new NotImplementedException();
}
#endregion unimplemented methods
}

关于c# - System.OutOfMemoryException - 当 Entity Framework 正在查询 Varbinary 类型的过大数据时,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/24613201/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com