1GB以上文件的AES加密

本文关键字:AES 加密 文件 1GB | 更新日期: 2023-09-27 18:25:08

我正在制作一个将加密5个视频文件的应用程序。问题是,它只加密5个文件中的4个(<1gb的文件)。在超过1GB的第5个文件上,会引发System.OutOfMemoryException。(我知道我以前问过它,但我按照建议做了一些更改,但它仍然不起作用,我不是想垃圾邮件)这是我的代码:

//Encrypts single file
public void EncryptFile(string file, string password)
{
    byte[] bytesToBeEncrypted = File.ReadAllBytes(file);
    byte[] passwordBytes = Encoding.UTF8.GetBytes(password);
    // Hash the password with SHA256
    passwordBytes = SHA256.Create().ComputeHash(passwordBytes);
    byte[] salt = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 };
    FileStream fsCrypt = new FileStream(file + ".enc", FileMode.Create);
    //Set Rijndael symmetric encryption algorithm
    RijndaelManaged AES = new RijndaelManaged();
    AES.KeySize = AES.LegalKeySizes[0].MaxSize;
    AES.BlockSize = AES.LegalBlockSizes[0].MaxSize;
    AES.Padding = PaddingMode.PKCS7;
    //"What it does is repeatedly hash the user password along with the salt." High iteration counts.
   var key = new Rfc2898DeriveBytes(passwordBytes, salt, 1000);
   AES.Key = key.GetBytes(AES.KeySize / 8);
   AES.IV = key.GetBytes(AES.BlockSize / 8);
   AES.Mode = CipherMode.CFB;
   //write salt to the beginning of the output file, so in this case can be random every time
   fsCrypt.Write(salt, 0, salt.Length);
   CryptoStream cs = new CryptoStream(fsCrypt, AES.CreateEncryptor(), CryptoStreamMode.Write);
   FileStream fsIn = new FileStream(file, FileMode.Open);
   //create a buffer (1mb) so only this amount will allocate in the memory and not the whole file
   byte[] buffer = new byte[5048576];
   int read;
   try
   {
       while ((read = fsIn.Read(buffer, 0, buffer.Length)) > 0)
       {
           Application.DoEvents();
           cs.Write(buffer, 0, read);
       }
       fsIn.Close();
       fsIn.Dispose();
    }
    catch (System.OutOfMemoryException ex)
    {
        cs.Flush();
        cs.Dispose();
    }
    finally
    {
        cs.Close();
        fsCrypt.Close();
    }

1GB以上文件的AES加密

在方法开始时读取整个文件:

byte[] bytesToBeEncrypted = File.ReadAllBytes(file); 

这导致OutOfMemoryException。以下是关于如何做到这一点的想法

static void EncryptFile(string file, string password)
{
    byte[] passwordBytes = Encoding.UTF8.GetBytes(password);
    byte[] salt = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 };
    passwordBytes = SHA256.Create().ComputeHash(passwordBytes);
    RijndaelManaged AES = new RijndaelManaged();
    AES.KeySize = AES.LegalKeySizes[0].MaxSize;
    AES.BlockSize = AES.LegalBlockSizes[0].MaxSize;
    AES.Padding = PaddingMode.PKCS7;
    //"What it does is repeatedly hash the user password along with the salt." High iteration counts.
    using (var key = new Rfc2898DeriveBytes(passwordBytes, salt, 1000)) // automatically dispose key
    {
        AES.Key = key.GetBytes(AES.KeySize / 8);
        AES.IV = key.GetBytes(AES.BlockSize / 8);
        AES.Mode = CipherMode.CFB;
    }
    using (FileStream fsCrypt = new FileStream(file + ".enc", FileMode.Create)) // automatically dispose fsCrypt
    {
        //write salt to the beginning of the output file, so in this case can be random every time
        fsCrypt.Write(salt, 0, salt.Length);
    }
    int bytesToRead = 128 * 1024 * 1024; // 128MB 
    byte[] buffer = new byte[bytesToRead]; // create the array that will be used encrypted
    long fileOffset = 0;
    int read = 0;
    bool allRead = false;
    while (!allRead)
    {
       using (FileStream fs = new FileStream(file, FileMode.Open, FileAccess.Read))
       {
           fs.Seek(fileOffset, SeekOrigin.Begin); // continue reading from where we were...
           read = fs.Read(buffer, 0, bytesToRead); // read the next chunk
       }
       if (read == 0)
           allRead = true;
       else
           fileOffset += read;
       using (FileStream fsCrypt = new FileStream(file + ".enc", FileMode.Open)) // automatically dispose fsCrypt
       {
           using (CryptoStream cs = new CryptoStream(fsCrypt, AES.CreateEncryptor(), CryptoStreamMode.Write))
           {
               fsCrypt.Seek(fileOffset, SeekOrigin.End);
               cs.Write(buffer, 0, read);
           }
        }
    }