//-----------------------------------------------------------------------
// 
//     Copyright (c) Microsoft Corporation.
// 
//-----------------------------------------------------------------------
namespace Microsoft.Isam.Esent.Interop
{
    using System;
    using System.Diagnostics;
    using System.Threading;
    /// 
    /// Cache allocated chunks of memory that are needed for very short periods
    /// of time. The memory is not zeroed on allocation.
    /// 
    internal sealed class MemoryCache
    {
        /// 
        /// A zero-length array that should be used whenever we want to return one.
        /// 
        private static readonly byte[] ZeroLengthArray = new byte[0];
        /// 
        /// Default size for newly allocated buffers.
        /// 
        private readonly int bufferSize;
        
        /// 
        /// Currently cached buffers.
        /// 
        private readonly byte[][] cachedBuffers;
        /// 
        /// Initializes a new instance of the  class. 
        /// 
        /// 
        /// The size of the buffers to cache.
        /// 
        /// 
        /// The maximum number of buffers to cache.
        /// 
        public MemoryCache(int bufferSize, int maxCachedBuffers)
        {
            this.bufferSize = bufferSize;
            this.cachedBuffers = new byte[maxCachedBuffers][];
        }
        /// 
        /// Gets the size of the buffers that this cache returns.
        /// 
        public int BufferSize
        {
            get
            {
                return this.bufferSize;
            }
        }
        /// 
        /// Creates a new array containing a copy of 'length' bytes of data.
        /// 
        /// The data to copy.
        /// The length of data to copy.
        /// An array containing the first length bytes of data.
        public static byte[] Duplicate(byte[] data, int length)
        {
            Debug.Assert(data.Length >= length, "length parameter is too long");
            if (0 == length)
            {
                return ZeroLengthArray;
            }
            var output = new byte[length];
            Buffer.BlockCopy(data, 0, output, 0, length);
            return output;
        }
        /// 
        /// Allocates a chunk of memory. If memory is cached it is returned. If no memory
        /// is cached then it is allocated. Check the size of the returned buffer to determine
        /// how much memory was allocated.
        /// 
        /// A new memory buffer.
        public byte[] Allocate()
        {
            int offset = this.GetStartingOffset();
            for (int i = 0; i < this.cachedBuffers.Length; ++i)
            {
                int index = (i + offset) % this.cachedBuffers.Length;
                byte[] buffer = Interlocked.Exchange(ref this.cachedBuffers[index], null);
                if (null != buffer)
                {
                    return buffer;
                }
            }
           return new byte[this.bufferSize];
        }
        /// 
        /// Frees an unused buffer. This may be added to the cache.
        /// 
        /// The memory to free.
        public void Free(ref byte[] data)
        {
            if (null == data)
            {
                throw new ArgumentNullException("data");
            }
            if (data.Length != this.bufferSize)
            {
                throw new ArgumentOutOfRangeException("data", data.Length, "buffer is not correct size for this MemoryCache");    
            }
            int offset = this.GetStartingOffset();
            // The buffers are garbage collected so we don't need to make Free()
            // completely safe. In a multi-threaded situation we may see a null
            // slot and then overwrite a buffer which was just freed into the slot.
            // That will cause us to lose a buffer which could have been placed
            // in a different slot, but in return we can do the Free() without
            // expensive interlocked operations.
            for (int i = 0; i < this.cachedBuffers.Length; ++i)
            {
                int index = (i + offset) % this.cachedBuffers.Length;
                if (null == this.cachedBuffers[index])
                {
                    this.cachedBuffers[index] = data;
                    break;
                }
            }
            data = null;
        }
        /// 
        /// Get the offset in the cached buffers array to start allocating or freeing 
        /// buffers to. This is done so that all threads don't start operating on
        /// slot zero, which would increase contention.
        /// 
        /// The starting offset for Allocate/Free operations.
        private int GetStartingOffset()
        {
            // Using the current CPU number would be ideal, but there doesn't seem to 
            // be a cheap way to get that information in managed code.
            return LibraryHelpers.GetCurrentManagedThreadId() % this.cachedBuffers.Length;
        }
    }
}