Note that there are some explanatory texts on larger screens.

plurals
  1. PO
    primarykey
    data
    text
    <p>After a few hours of work i found that it is possible to split files that are large into smaller files and store it to Azure cache. I would like to share the code with you.</p> <p>Class for splitting and joining byte[]</p> <pre><code> public class CacheHelper { private const int kMaxFileSize = 8000000; private readonly int fFileSize; private readonly string fFileName; public CacheHelper(int sizeOfFile, string nameOfFile) { fFileSize = sizeOfFile; fFileName = nameOfFile; } public CachingObjectHolder Split(byte[] file) { var remainingSize = file.Length; var partialList = new List&lt;byte[]&gt;(); var partial = new byte[file.Length &gt; kMaxFileSize ? kMaxFileSize : file.Length]; for (int i = 0; i &lt; file.Length; i++) { if (i % kMaxFileSize == 0 &amp;&amp; i &gt; 0) { partialList.Add(partial); partial = new byte[remainingSize &gt; kMaxFileSize ? kMaxFileSize : remainingSize]; } partial[i % kMaxFileSize] = file[i]; remainingSize--; } partialList.Add(partial); return new CachingObjectHolder(fFileName, partialList); } public static byte[] Join(CachingObjectHolder cachingObjectHolder) { var totalByteSize = cachingObjectHolder.Partials.Sum(x =&gt; x.Length); var output = new byte[totalByteSize]; var globalCounter = 0; for (int i = 0; i &lt; cachingObjectHolder.Partials.Count; i++) { for (int j = 0; j &lt; cachingObjectHolder.Partials[i].Length; j++) { output[globalCounter] = cachingObjectHolder.Partials[i][j]; globalCounter++; } } return output; } public static byte[] CreateFile(int size) { var tempFile = Path.GetTempFileName(); using (var stream = new FileStream(tempFile, FileMode.OpenOrCreate)) { using (var memStream = new MemoryStream()) { stream.SetLength(size); stream.CopyTo(memStream); return memStream.ToArray(); } } } } </code></pre> <p>Here is the code for communication with the Azure Cache</p> <pre><code> public class Cache { private const string kFileListName = "FileList"; public static DataCacheFactory DataCacheFactory { get { return new DataCacheFactory(); } } private static DataCache fDataCache; public static DataCache DataCache { get { if(fDataCache == null) { fDataCache = DataCacheFactory.GetDefaultCache(); } return fDataCache; } } public static byte[] Get(string name) { var dic = GetFileList(); if (dic == null) { return (byte[])DataCache.Get(name); } if (dic.ContainsKey(name)) { var list = dic[name]; var input = new List&lt;byte[]&gt;(); var cache = DataCache; list = list.OrderBy(x =&gt; x.Item2).ToList(); for (int i = 0; i &lt; list.Count; i++) { input.Add(cache.Get(list[i].Item1) as byte[]); } var holder = new CachingObjectHolder(name, input); return CacheHelper.Join(holder); } else { return (byte[])DataCache.Get(name); } } public static void Put(string name, byte[] file) { if (file.Length &gt; CacheHelper.kMaxFileSize) { var helper = new CacheHelper(file.Length, name); var output = helper.Split(file); var dic = GetFileList(); if (dic == null) { dic = new Dictionary&lt;string, List&lt;Tuple&lt;string, int&gt;&gt;&gt;(); } var partials = new List&lt;Tuple&lt;string, int&gt;&gt;(); for (int i = 0; i &lt; output.CachingObjects.Count; i++) { DataCache.Add(output.CachingObjects[i].Name, output.Partials[output.CachingObjects[i].Index]); partials.Add(new Tuple&lt;string, int&gt;(output.CachingObjects[i].Name, output.CachingObjects[i].Index)); } dic.Add(name, partials.OrderBy(x =&gt; x.Item2).ToList()); PutFileList(dic); } else { DataCache.Add(name, file); } } public static void Remove(string name) { var dic = GetFileList(); if (dic == null) { DataCache.Remove(name); return; } if (dic.ContainsKey(name)) { var list = dic[name]; for (int i = 0; i &lt; list.Count; i++) { DataCache.Remove(list[i].Item1); } dic.Remove(name); PutFileList(dic); } else { DataCache.Remove(name); } } private static void PutFileList(Dictionary&lt;string, List&lt;Tuple&lt;string, int&gt;&gt;&gt; input) { DataCache.Put(kFileListName, input); } private static Dictionary&lt;string, List&lt;Tuple&lt;string, int&gt;&gt;&gt; GetFileList() { return DataCache.Get(kFileListName) as Dictionary&lt;string, List&lt;Tuple&lt;string, int&gt;&gt;&gt;; } } </code></pre> <p>Aaaand two classes used for data holders</p> <pre><code> public class CachingObjectHolder { public readonly List&lt;byte[]&gt; Partials; public readonly List&lt;CachingObject&gt; CachingObjects; public readonly string CacheName; public CachingObjectHolder(string name, List&lt;byte[]&gt; partialList) { Partials = partialList; CacheName = name; CachingObjects = new List&lt;CachingObject&gt;(); CreateCachingObjects(); } private void CreateCachingObjects() { for (int i = 0; i &lt; Partials.Count; i++) { CachingObjects.Add(new CachingObject(string.Format("{0}_{1}", CacheName, i), i)); } } } public class CachingObject { public int Index { get; set; } public string Name { get; set; } public CachingObject(string name, int index) { Index = index; Name = name; } } </code></pre> <p>Here are the results from testing the solution on the cloud. The R/W times are in ms. <img src="https://i.stack.imgur.com/Spsq8.png" alt="Results from live testing"></p>
    singulars
    1. This table or related slice is empty.
    plurals
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. VO
      singulars
      1. This table or related slice is empty.
    2. VO
      singulars
      1. This table or related slice is empty.
 

Querying!

 
Guidance

SQuiL has stopped working due to an internal error.

If you are curious you may find further information in the browser console, which is accessible through the devtools (F12).

Reload