Web API之基于H5客户端分段上传大文件

2021-05-03 12:26

阅读:477

标签:9.png   position   后台   match   order   cell   sea   使用   cto   

http://www.cnblogs.com/OneDirection/articles/7285739.html

查询很多资料没有遇到合适的,对于MultipartFormDataStreamProvider 也并是很适合,总会出现问题。于是放弃,使用了传统的InputStream 分段处理完之后做merge处理。

前台分段规则 命名要规范,传递分段总文件数便于判定分段上传完成做merge处理。merge需要根据分段顺序并在merge成功后删除分段文件。

var FilePartName = file.name + ".part_" + PartCount + "." + TotalParts;

 前台JS代码:

技术分享
技术分享
MaxFileSizeMB参数设置分段大小 ,测试例子为5M
chunk = FileChunk.shift() 在分段上传成功之后删除已上传分段
1
HTML代码:
  

Test Multiple Chunk Upload


Upload file

 

后台webapi controller代码:

技术分享
   /// 
        /// Upload sanger camera order 
        /// 
        /// user id and user email are required
        /// 
        [HttpPost]
        [Route("api/customer/GA/SaveSangerCameraOrder")]
        [MimeMultipart]
        public HttpResponseMessage SaveSangerCameraOrder()
        {
            var files = HttpContext.Current.Request.Files;
            if (files.Count  0)
                {
                    // take the input stream, and save it to a temp folder using
                    // the original file.part name posted
                    var stream = FileDataContent.InputStream;
                    var fileName = Path.GetFileName(FileDataContent.FileName);
                    var UploadPath = HttpContext.Current.Request.MapPath(fileSavePath);
                    Directory.CreateDirectory(UploadPath);
                    string path = Path.Combine(UploadPath, fileName);

                    if (System.IO.File.Exists(path))
                        System.IO.File.Delete(path);

                    using (var fileStream = System.IO.File.Create(path))
                    {
                        stream.CopyTo(fileStream);
                    }
                    // Once the file part is saved, see if we have enough to merge it
                    Utils UT = new Utils();
                    var isMergedSuccess = UT.MergeFile(path);

                    if (isMergedSuccess)
                    {
                        //generate txt document for customer comment
                        string timeTicket = string.Format("{0:yyyyMMddHHmmss}", System.DateTime.Now);
                        FileStream fs = new FileStream(UploadPath + timeTicket + "OrderComment.txt", FileMode.Create);
                        //get byte
                        byte[] data = System.Text.Encoding.Default.GetBytes(comment);
                        //write
                        fs.Write(data, 0, data.Length);
                        //clear and clost stream
                        fs.Flush();
                        fs.Close();
                    }
                }
            }

            return new HttpResponseMessage()
            {
                StatusCode = System.Net.HttpStatusCode.OK,
                Content = new StringContent(ConstantStringHelper.API_SUCCESS)
            };
        }
技术分享

 

 工具类代码:

记得关闭stream

技术分享
public class Utils
    {
        public string FileName { get; set; }
        public string TempFolder { get; set; }
        public int MaxFileSizeMB { get; set; }
        public List FileParts { get; set; }

        public Utils()
        {
            FileParts = new List();
        }

        /// 
        /// original name + ".part_N.X" (N = file part number, X = total files)
        /// Objective = enumerate files in folder, look for all matching parts of split file. If found, merge and return true.
        /// 
        /// 
        /// 
        public bool MergeFile(string FileName)
        {
            bool rslt = false;
            // parse out the different tokens from the filename according to the convention
            string partToken = ".part_";
            string baseFileName = FileName.Substring(0, FileName.IndexOf(partToken));
            string trailingTokens = FileName.Substring(FileName.IndexOf(partToken) + partToken.Length);
            int FileIndex = 0;
            int FileCount = 0;
            int.TryParse(trailingTokens.Substring(0, trailingTokens.IndexOf(".")), out FileIndex);
            int.TryParse(trailingTokens.Substring(trailingTokens.IndexOf(".") + 1), out FileCount);
            // get a list of all file parts in the temp folder
            string Searchpattern = Path.GetFileName(baseFileName) + partToken + "*";
            string[] FilesList = Directory.GetFiles(Path.GetDirectoryName(FileName), Searchpattern);
            //  merge .. improvement would be to confirm individual parts are there / correctly in sequence, a security check would also be important
            // only proceed if we have received all the file chunks
            if (FilesList.Count() == FileCount)
            {
                // use a singleton to stop overlapping processes
                if (!MergeFileManager.Instance.InUse(baseFileName))
                {
                    MergeFileManager.Instance.AddFile(baseFileName);
                    if (File.Exists(baseFileName))
                        File.Delete(baseFileName);
                    // add each file located to a list so we can get them into 
                    // the correct order for rebuilding the file
                    List MergeList = new List();
                    foreach (string file in FilesList)
                    {
                        SortedFile sFile = new SortedFile();
                        sFile.FileName = file;
                        baseFileName = file.Substring(0, file.IndexOf(partToken));
                        trailingTokens = file.Substring(file.IndexOf(partToken) + partToken.Length);
                        int.TryParse(trailingTokens.Substring(0, trailingTokens.IndexOf(".")), out FileIndex);
                        sFile.FileOrder = FileIndex;
                        MergeList.Add(sFile);
                    }
                    // sort by the file-part number to ensure we merge back in the correct order
                    var MergeOrder = MergeList.OrderBy(s => s.FileOrder).ToList();
                    using (FileStream FS = new FileStream(baseFileName, FileMode.Create))
                    {
                        try
                        {
                            // merge each file chunk back into one contiguous file stream
                            foreach (var chunk in MergeOrder)
                            {
                                using (FileStream fileChunk = new FileStream(chunk.FileName, FileMode.Open))
                                {
                                    fileChunk.CopyTo(FS);
                                    fileChunk.Flush();
                                    fileChunk.Close();
                                    fileChunk.Dispose();
                                }
                              
                            }

                            foreach (var item in FilesList)
                            {
                                if (File.Exists(item))
                                    File.Delete(item);
                            }
                        }
                        catch (Exception ex)
                        {
                            FS.Flush();
                            FS.Close();
                            FS.Dispose();
                            throw new Exception(ex.Message);
                        }
                    }
                    rslt = true;
                    // unlock the file from singleton
                    MergeFileManager.Instance.RemoveFile(baseFileName);
                }
            }
            return rslt;
        }

        public List SplitFiles()
        {

            //bool rslt = false;
            string BaseFileName = Path.GetFileName(FileName);
            // set the size of file chunk we are going to split into
            int BufferChunkSize = 5 * (1024 * 1024); //5MB
            // set a buffer size and an array to store the buffer data as we read it
            const int READBUFFER_SIZE = 1024;
            byte[] FSBuffer = new byte[READBUFFER_SIZE];
            // open the file to read it into chunks
            using (FileStream FS = new FileStream(FileName, FileMode.Open, FileAccess.Read, FileShare.Read))
            {
                // calculate the number of files that will be created
                int TotalFileParts = 0;
                if (FS.Length  0 && (bytesRead = FS.Read(FSBuffer, 0,
                         Math.Min(bytesRemaining, READBUFFER_SIZE))) > 0)
                        {
                            FilePart.Write(FSBuffer, 0, bytesRead);
                            bytesRemaining -= bytesRead;
                        }
                    }
                    // file written, loop for next chunk
                    FilePartCount++;
                }

            }
            return FileParts;
            //return rslt;
        }
    }

    public struct SortedFile
    {
        public int FileOrder { get; set; }
        public String FileName { get; set; }
    }

    public class MergeFileManager
    {
        private static MergeFileManager instance;
        private List MergeFileList;

        private MergeFileManager()
        {
            try
            {
                MergeFileList = new List();
            }
            catch { }
        }

        public static MergeFileManager Instance
        {
            get
            {
                if (instance == null)
                    instance = new MergeFileManager();
                return instance;
            }
        }

        public void AddFile(string BaseFileName)
        {
            MergeFileList.Add(BaseFileName);
        }

        public bool InUse(string BaseFileName)
        {
            return MergeFileList.Contains(BaseFileName);
        }

        public bool RemoveFile(string BaseFileName)
        {
            return MergeFileList.Remove(BaseFileName);
        }
    }
技术分享

 

技术分享

 

技术分享

 

如该文件分六段上传,则会提示六次success,可以按照自己业务处理。

技术分享

 

Web API之基于H5客户端分段上传大文件

标签:9.png   position   后台   match   order   cell   sea   使用   cto   

原文地址:http://www.cnblogs.com/yibinboy/p/7746973.html


评论


亲,登录后才可以留言!