Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mono/corert.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authordotnet-bot <dotnet-bot@microsoft.com>2017-01-21 04:23:21 +0300
committerJan Kotas <jkotas@microsoft.com>2017-01-21 04:23:21 +0300
commit08b78f8c9c8195637ae16a41ec96343f7092d90b (patch)
tree1f09ccf0a791a5871d36b8596efcd23dbf7ced3a
parenta8e83020658bc908fe6f829fba828ec04905d440 (diff)
Add sources for Dictionary, BinaryReader and MemoryStream from corefx
-rw-r--r--src/System.Private.CoreLib/src/System/Collections/Generic/Dictionary.cs1382
-rw-r--r--src/System.Private.CoreLib/src/System/Collections/Generic/IDictionaryDebugView.cs80
-rw-r--r--src/System.Private.CoreLib/src/System/IO/BinaryReader.cs681
-rw-r--r--src/System.Private.CoreLib/src/System/IO/MemoryStream.cs802
4 files changed, 2945 insertions, 0 deletions
diff --git a/src/System.Private.CoreLib/src/System/Collections/Generic/Dictionary.cs b/src/System.Private.CoreLib/src/System/Collections/Generic/Dictionary.cs
new file mode 100644
index 000000000..8dd5d845b
--- /dev/null
+++ b/src/System.Private.CoreLib/src/System/Collections/Generic/Dictionary.cs
@@ -0,0 +1,1382 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections;
+using System.Diagnostics;
+using System.Diagnostics.Contracts;
+using System.Runtime.CompilerServices;
+using System.Runtime.Serialization;
+
+namespace System.Collections.Generic
+{
+ [DebuggerTypeProxy(typeof(IDictionaryDebugView<,>))]
+ [DebuggerDisplay("Count = {Count}")]
+ [Serializable]
+ public class Dictionary<TKey, TValue> : IDictionary<TKey, TValue>, IDictionary, IReadOnlyDictionary<TKey, TValue>, ISerializable, IDeserializationCallback
+ {
+ private struct Entry
+ {
+ public int hashCode; // Lower 31 bits of hash code, -1 if unused
+ public int next; // Index of next entry, -1 if last
+ public TKey key; // Key of entry
+ public TValue value; // Value of entry
+ }
+
+ private int[] buckets;
+ private Entry[] entries;
+ private int count;
+ private int version;
+
+ private int freeList;
+ private int freeCount;
+ private IEqualityComparer<TKey> comparer;
+ private KeyCollection keys;
+ private ValueCollection values;
+ private object _syncRoot;
+
+ // constants for serialization
+ private const string VersionName = "Version";
+ private const string HashSizeName = "HashSize"; // Must save buckets.Length
+ private const string KeyValuePairsName = "KeyValuePairs";
+ private const string ComparerName = "Comparer";
+
+ public Dictionary() : this(0, null) { }
+
+ public Dictionary(int capacity) : this(capacity, null) { }
+
+ public Dictionary(IEqualityComparer<TKey> comparer) : this(0, comparer) { }
+
+ public Dictionary(int capacity, IEqualityComparer<TKey> comparer)
+ {
+ if (capacity < 0) throw new ArgumentOutOfRangeException(nameof(capacity), capacity, SR.ArgumentOutOfRange_NeedNonNegNum);
+ if (capacity > 0) Initialize(capacity);
+ this.comparer = comparer ?? EqualityComparer<TKey>.Default;
+ }
+
+ public Dictionary(IDictionary<TKey, TValue> dictionary) : this(dictionary, null) { }
+
+ public Dictionary(IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer) :
+ this(dictionary != null ? dictionary.Count : 0, comparer)
+ {
+ if (dictionary == null)
+ {
+ throw new ArgumentNullException(nameof(dictionary));
+ }
+
+ // It is likely that the passed-in dictionary is Dictionary<TKey,TValue>. When this is the case,
+ // avoid the enumerator allocation and overhead by looping through the entries array directly.
+ // We only do this when dictionary is Dictionary<TKey,TValue> and not a subclass, to maintain
+ // back-compat with subclasses that may have overridden the enumerator behavior.
+ if (dictionary.GetType() == typeof(Dictionary<TKey, TValue>))
+ {
+ Dictionary<TKey, TValue> d = (Dictionary<TKey, TValue>)dictionary;
+ int count = d.count;
+ Entry[] entries = d.entries;
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0)
+ {
+ Add(entries[i].key, entries[i].value);
+ }
+ }
+ return;
+ }
+
+ foreach (KeyValuePair<TKey, TValue> pair in dictionary)
+ {
+ Add(pair.Key, pair.Value);
+ }
+ }
+
+ public Dictionary(IEnumerable<KeyValuePair<TKey, TValue>> collection) : this(collection, null) { }
+
+ public Dictionary(IEnumerable<KeyValuePair<TKey, TValue>> collection, IEqualityComparer<TKey> comparer) :
+ this((collection as ICollection<KeyValuePair<TKey, TValue>>)?.Count ?? 0, comparer)
+ {
+ if (collection == null)
+ {
+ throw new ArgumentNullException(nameof(collection));
+ }
+
+ foreach (KeyValuePair<TKey, TValue> pair in collection)
+ {
+ Add(pair.Key, pair.Value);
+ }
+ }
+
+ protected Dictionary(SerializationInfo info, StreamingContext context)
+ {
+ // We can't do anything with the keys and values until the entire graph has been deserialized
+ // and we have a resonable estimate that GetHashCode is not going to fail. For the time being,
+ // we'll just cache this. The graph is not valid until OnDeserialization has been called.
+ HashHelpers.SerializationInfoTable.Add(this, info);
+ }
+
+ public IEqualityComparer<TKey> Comparer
+ {
+ get
+ {
+ return comparer;
+ }
+ }
+
+ public int Count
+ {
+ get { return count - freeCount; }
+ }
+
+ public KeyCollection Keys
+ {
+ get
+ {
+ Contract.Ensures(Contract.Result<KeyCollection>() != null);
+ if (keys == null) keys = new KeyCollection(this);
+ return keys;
+ }
+ }
+
+ ICollection<TKey> IDictionary<TKey, TValue>.Keys
+ {
+ get
+ {
+ if (keys == null) keys = new KeyCollection(this);
+ return keys;
+ }
+ }
+
+ IEnumerable<TKey> IReadOnlyDictionary<TKey, TValue>.Keys
+ {
+ get
+ {
+ if (keys == null) keys = new KeyCollection(this);
+ return keys;
+ }
+ }
+
+ public ValueCollection Values
+ {
+ get
+ {
+ Contract.Ensures(Contract.Result<ValueCollection>() != null);
+ if (values == null) values = new ValueCollection(this);
+ return values;
+ }
+ }
+
+ ICollection<TValue> IDictionary<TKey, TValue>.Values
+ {
+ get
+ {
+ if (values == null) values = new ValueCollection(this);
+ return values;
+ }
+ }
+
+ IEnumerable<TValue> IReadOnlyDictionary<TKey, TValue>.Values
+ {
+ get
+ {
+ if (values == null) values = new ValueCollection(this);
+ return values;
+ }
+ }
+
+ public TValue this[TKey key]
+ {
+ get
+ {
+ int i = FindEntry(key);
+ if (i >= 0) return entries[i].value;
+ throw new KeyNotFoundException();
+ }
+ set
+ {
+ Insert(key, value, false);
+ }
+ }
+
+ public void Add(TKey key, TValue value)
+ {
+ Insert(key, value, true);
+ }
+
+ void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> keyValuePair)
+ {
+ Add(keyValuePair.Key, keyValuePair.Value);
+ }
+
+ bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> keyValuePair)
+ {
+ int i = FindEntry(keyValuePair.Key);
+ if (i >= 0 && EqualityComparer<TValue>.Default.Equals(entries[i].value, keyValuePair.Value))
+ {
+ return true;
+ }
+ return false;
+ }
+
+ bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> keyValuePair)
+ {
+ int i = FindEntry(keyValuePair.Key);
+ if (i >= 0 && EqualityComparer<TValue>.Default.Equals(entries[i].value, keyValuePair.Value))
+ {
+ Remove(keyValuePair.Key);
+ return true;
+ }
+ return false;
+ }
+
+ public void Clear()
+ {
+ if (count > 0)
+ {
+ for (int i = 0; i < buckets.Length; i++) buckets[i] = -1;
+ Array.Clear(entries, 0, count);
+ freeList = -1;
+ count = 0;
+ freeCount = 0;
+ version++;
+ }
+ }
+
+ public bool ContainsKey(TKey key)
+ {
+ return FindEntry(key) >= 0;
+ }
+
+ public bool ContainsValue(TValue value)
+ {
+ if (value == null)
+ {
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0 && entries[i].value == null) return true;
+ }
+ }
+ else
+ {
+ EqualityComparer<TValue> c = EqualityComparer<TValue>.Default;
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0 && c.Equals(entries[i].value, value)) return true;
+ }
+ }
+ return false;
+ }
+
+ private void CopyTo(KeyValuePair<TKey, TValue>[] array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (index < 0 || index > array.Length)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), index, SR.ArgumentOutOfRange_Index);
+ }
+
+ if (array.Length - index < Count)
+ {
+ throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
+ }
+
+ int count = this.count;
+ Entry[] entries = this.entries;
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0)
+ {
+ array[index++] = new KeyValuePair<TKey, TValue>(entries[i].key, entries[i].value);
+ }
+ }
+ }
+
+ public Enumerator GetEnumerator()
+ {
+ return new Enumerator(this, Enumerator.KeyValuePair);
+ }
+
+ IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator()
+ {
+ return new Enumerator(this, Enumerator.KeyValuePair);
+ }
+
+ public virtual void GetObjectData(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new ArgumentNullException(nameof(info));
+ }
+
+ info.AddValue(VersionName, version);
+ info.AddValue(ComparerName, HashHelpers.GetEqualityComparerForSerialization(comparer), typeof(IEqualityComparer<TKey>));
+ info.AddValue(HashSizeName, buckets == null ? 0 : buckets.Length); // This is the length of the bucket array
+
+ if (buckets != null)
+ {
+ var array = new KeyValuePair<TKey, TValue>[Count];
+ CopyTo(array, 0);
+ info.AddValue(KeyValuePairsName, array, typeof(KeyValuePair<TKey, TValue>[]));
+ }
+ }
+
+ private int FindEntry(TKey key)
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+
+ if (buckets != null)
+ {
+ int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
+ for (int i = buckets[hashCode % buckets.Length]; i >= 0; i = entries[i].next)
+ {
+ if (entries[i].hashCode == hashCode && comparer.Equals(entries[i].key, key)) return i;
+ }
+ }
+ return -1;
+ }
+
+ private void Initialize(int capacity)
+ {
+ int size = HashHelpers.GetPrime(capacity);
+ buckets = new int[size];
+ for (int i = 0; i < buckets.Length; i++) buckets[i] = -1;
+ entries = new Entry[size];
+ freeList = -1;
+ }
+
+ private void Insert(TKey key, TValue value, bool add)
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+
+ if (buckets == null) Initialize(0);
+ int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
+ int targetBucket = hashCode % buckets.Length;
+
+#if FEATURE_RANDOMIZED_STRING_HASHING
+ int collisionCount = 0;
+#endif
+
+ for (int i = buckets[targetBucket]; i >= 0; i = entries[i].next)
+ {
+ if (entries[i].hashCode == hashCode && comparer.Equals(entries[i].key, key))
+ {
+ if (add)
+ {
+ throw new ArgumentException(SR.Format(SR.Argument_AddingDuplicate, key));
+ }
+ entries[i].value = value;
+ version++;
+ return;
+ }
+#if FEATURE_RANDOMIZED_STRING_HASHING
+ collisionCount++;
+#endif
+ }
+
+ int index;
+
+ if (freeCount > 0)
+ {
+ index = freeList;
+ freeList = entries[index].next;
+ freeCount--;
+ }
+ else
+ {
+ if (count == entries.Length)
+ {
+ Resize();
+ targetBucket = hashCode % buckets.Length;
+ }
+ index = count;
+ count++;
+ }
+
+ entries[index].hashCode = hashCode;
+ entries[index].next = buckets[targetBucket];
+ entries[index].key = key;
+ entries[index].value = value;
+ buckets[targetBucket] = index;
+ version++;
+#if FEATURE_RANDOMIZED_STRING_HASHING
+ if (collisionCount > HashHelpers.HashCollisionThreshold && HashHelpers.IsWellKnownEqualityComparer(comparer))
+ {
+ comparer = (IEqualityComparer<TKey>)HashHelpers.GetRandomizedEqualityComparer(comparer);
+ Resize(entries.Length, true);
+ }
+#endif
+ }
+
+ public virtual void OnDeserialization(object sender)
+ {
+ SerializationInfo siInfo;
+ HashHelpers.SerializationInfoTable.TryGetValue(this, out siInfo);
+ if (siInfo == null)
+ {
+ // We can return immediately if this function is called twice.
+ // Note we remove the serialization info from the table at the end of this method.
+ return;
+ }
+
+ int realVersion = siInfo.GetInt32(VersionName);
+ int hashsize = siInfo.GetInt32(HashSizeName);
+ comparer = (IEqualityComparer<TKey>)siInfo.GetValue(ComparerName, typeof(IEqualityComparer<TKey>));
+
+ if (hashsize != 0)
+ {
+ buckets = new int[hashsize];
+ for (int i = 0; i < buckets.Length; i++) buckets[i] = -1;
+ entries = new Entry[hashsize];
+ freeList = -1;
+
+ KeyValuePair<TKey, TValue>[] array =
+ (KeyValuePair<TKey, TValue>[])siInfo.GetValue(KeyValuePairsName, typeof(KeyValuePair<TKey, TValue>[]));
+
+ if (array == null)
+ {
+ throw new SerializationException(SR.Serialization_MissingKeys);
+ }
+
+ for (int i = 0; i < array.Length; i++)
+ {
+ if (array[i].Key == null)
+ {
+ throw new SerializationException(SR.Serialization_NullKey);
+ }
+ Insert(array[i].Key, array[i].Value, true);
+ }
+ }
+ else
+ {
+ buckets = null;
+ }
+
+ version = realVersion;
+ HashHelpers.SerializationInfoTable.Remove(this);
+ }
+
+ private void Resize()
+ {
+ Resize(HashHelpers.ExpandPrime(count), false);
+ }
+
+ private void Resize(int newSize, bool forceNewHashCodes)
+ {
+ Debug.Assert(newSize >= entries.Length);
+ int[] newBuckets = new int[newSize];
+ for (int i = 0; i < newBuckets.Length; i++) newBuckets[i] = -1;
+
+ Entry[] newEntries = new Entry[newSize];
+ Array.Copy(entries, 0, newEntries, 0, count);
+
+ if (forceNewHashCodes)
+ {
+ for (int i = 0; i < count; i++)
+ {
+ if (newEntries[i].hashCode != -1)
+ {
+ newEntries[i].hashCode = (comparer.GetHashCode(newEntries[i].key) & 0x7FFFFFFF);
+ }
+ }
+ }
+
+ for (int i = 0; i < count; i++)
+ {
+ if (newEntries[i].hashCode >= 0)
+ {
+ int bucket = newEntries[i].hashCode % newSize;
+ newEntries[i].next = newBuckets[bucket];
+ newBuckets[bucket] = i;
+ }
+ }
+
+ buckets = newBuckets;
+ entries = newEntries;
+ }
+
+ public bool Remove(TKey key)
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+
+ if (buckets != null)
+ {
+ int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
+ int bucket = hashCode % buckets.Length;
+ int last = -1;
+ for (int i = buckets[bucket]; i >= 0; last = i, i = entries[i].next)
+ {
+ if (entries[i].hashCode == hashCode && comparer.Equals(entries[i].key, key))
+ {
+ if (last < 0)
+ {
+ buckets[bucket] = entries[i].next;
+ }
+ else
+ {
+ entries[last].next = entries[i].next;
+ }
+ entries[i].hashCode = -1;
+ entries[i].next = freeList;
+ entries[i].key = default(TKey);
+ entries[i].value = default(TValue);
+ freeList = i;
+ freeCount++;
+ version++;
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ public bool TryGetValue(TKey key, out TValue value)
+ {
+ int i = FindEntry(key);
+ if (i >= 0)
+ {
+ value = entries[i].value;
+ return true;
+ }
+ value = default(TValue);
+ return false;
+ }
+
+ // This is a convenience method for the internal callers that were converted from using Hashtable.
+ // Many were combining key doesn't exist and key exists but null value (for non-value types) checks.
+ // This allows them to continue getting that behavior with minimal code delta. This is basically
+ // TryGetValue without the out param
+ internal TValue GetValueOrDefault(TKey key)
+ {
+ int i = FindEntry(key);
+ if (i >= 0)
+ {
+ return entries[i].value;
+ }
+ return default(TValue);
+ }
+
+ bool ICollection<KeyValuePair<TKey, TValue>>.IsReadOnly
+ {
+ get { return false; }
+ }
+
+ void ICollection<KeyValuePair<TKey, TValue>>.CopyTo(KeyValuePair<TKey, TValue>[] array, int index)
+ {
+ CopyTo(array, index);
+ }
+
+ void ICollection.CopyTo(Array array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (array.Rank != 1)
+ {
+ throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array));
+ }
+
+ if (array.GetLowerBound(0) != 0)
+ {
+ throw new ArgumentException(SR.Arg_NonZeroLowerBound, nameof(array));
+ }
+
+ if (index < 0 || index > array.Length)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), index, SR.ArgumentOutOfRange_Index);
+ }
+
+ if (array.Length - index < Count)
+ {
+ throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
+ }
+
+ KeyValuePair<TKey, TValue>[] pairs = array as KeyValuePair<TKey, TValue>[];
+ if (pairs != null)
+ {
+ CopyTo(pairs, index);
+ }
+ else if (array is DictionaryEntry[])
+ {
+ DictionaryEntry[] dictEntryArray = array as DictionaryEntry[];
+ Entry[] entries = this.entries;
+
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0)
+ {
+ dictEntryArray[index++] = new DictionaryEntry(entries[i].key, entries[i].value);
+ }
+ }
+ }
+ else
+ {
+ object[] objects = array as object[];
+ if (objects == null)
+ {
+ throw new ArgumentException(SR.Argument_InvalidArrayType, nameof(array));
+ }
+
+ try
+ {
+ int count = this.count;
+ Entry[] entries = this.entries;
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0)
+ {
+ objects[index++] = new KeyValuePair<TKey, TValue>(entries[i].key, entries[i].value);
+ }
+ }
+ }
+ catch (ArrayTypeMismatchException)
+ {
+ throw new ArgumentException(SR.Argument_InvalidArrayType, nameof(array));
+ }
+ }
+ }
+
+ IEnumerator IEnumerable.GetEnumerator()
+ {
+ return new Enumerator(this, Enumerator.KeyValuePair);
+ }
+
+ bool ICollection.IsSynchronized
+ {
+ get { return false; }
+ }
+
+ object ICollection.SyncRoot
+ {
+ get
+ {
+ if (_syncRoot == null)
+ {
+ System.Threading.Interlocked.CompareExchange<object>(ref _syncRoot, new object(), null);
+ }
+ return _syncRoot;
+ }
+ }
+
+ bool IDictionary.IsFixedSize
+ {
+ get { return false; }
+ }
+
+ bool IDictionary.IsReadOnly
+ {
+ get { return false; }
+ }
+
+ ICollection IDictionary.Keys
+ {
+ get { return (ICollection)Keys; }
+ }
+
+ ICollection IDictionary.Values
+ {
+ get { return (ICollection)Values; }
+ }
+
+ object IDictionary.this[object key]
+ {
+ get
+ {
+ if (IsCompatibleKey(key))
+ {
+ int i = FindEntry((TKey)key);
+ if (i >= 0)
+ {
+ return entries[i].value;
+ }
+ }
+ return null;
+ }
+ set
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+ if (value == null && !(default(TValue) == null))
+ throw new ArgumentNullException(nameof(value));
+
+ try
+ {
+ TKey tempKey = (TKey)key;
+ try
+ {
+ this[tempKey] = (TValue)value;
+ }
+ catch (InvalidCastException)
+ {
+ throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(TValue)), nameof(value));
+ }
+ }
+ catch (InvalidCastException)
+ {
+ throw new ArgumentException(SR.Format(SR.Arg_WrongType, key, typeof(TKey)), nameof(key));
+ }
+ }
+ }
+
+ private static bool IsCompatibleKey(object key)
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+ return (key is TKey);
+ }
+
+ void IDictionary.Add(object key, object value)
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+
+ if (value == null && !(default(TValue) == null))
+ throw new ArgumentNullException(nameof(value));
+
+ try
+ {
+ TKey tempKey = (TKey)key;
+
+ try
+ {
+ Add(tempKey, (TValue)value);
+ }
+ catch (InvalidCastException)
+ {
+ throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(TValue)), nameof(value));
+ }
+ }
+ catch (InvalidCastException)
+ {
+ throw new ArgumentException(SR.Format(SR.Arg_WrongType, key, typeof(TKey)), nameof(key));
+ }
+ }
+
+ bool IDictionary.Contains(object key)
+ {
+ if (IsCompatibleKey(key))
+ {
+ return ContainsKey((TKey)key);
+ }
+
+ return false;
+ }
+
+ IDictionaryEnumerator IDictionary.GetEnumerator()
+ {
+ return new Enumerator(this, Enumerator.DictEntry);
+ }
+
+ void IDictionary.Remove(object key)
+ {
+ if (IsCompatibleKey(key))
+ {
+ Remove((TKey)key);
+ }
+ }
+
+ [Serializable]
+ public struct Enumerator : IEnumerator<KeyValuePair<TKey, TValue>>,
+ IDictionaryEnumerator
+ {
+ private Dictionary<TKey, TValue> dictionary;
+ private int version;
+ private int index;
+ private KeyValuePair<TKey, TValue> current;
+ private int getEnumeratorRetType; // What should Enumerator.Current return?
+
+ internal const int DictEntry = 1;
+ internal const int KeyValuePair = 2;
+
+ internal Enumerator(Dictionary<TKey, TValue> dictionary, int getEnumeratorRetType)
+ {
+ this.dictionary = dictionary;
+ version = dictionary.version;
+ index = 0;
+ this.getEnumeratorRetType = getEnumeratorRetType;
+ current = new KeyValuePair<TKey, TValue>();
+ }
+
+ public bool MoveNext()
+ {
+ if (version != dictionary.version)
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
+ }
+
+ // Use unsigned comparison since we set index to dictionary.count+1 when the enumeration ends.
+ // dictionary.count+1 could be negative if dictionary.count is Int32.MaxValue
+ while ((uint)index < (uint)dictionary.count)
+ {
+ if (dictionary.entries[index].hashCode >= 0)
+ {
+ current = new KeyValuePair<TKey, TValue>(dictionary.entries[index].key, dictionary.entries[index].value);
+ index++;
+ return true;
+ }
+ index++;
+ }
+
+ index = dictionary.count + 1;
+ current = new KeyValuePair<TKey, TValue>();
+ return false;
+ }
+
+ public KeyValuePair<TKey, TValue> Current
+ {
+ get { return current; }
+ }
+
+ public void Dispose()
+ {
+ }
+
+ object IEnumerator.Current
+ {
+ get
+ {
+ if (index == 0 || (index == dictionary.count + 1))
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
+ }
+
+ if (getEnumeratorRetType == DictEntry)
+ {
+ return new System.Collections.DictionaryEntry(current.Key, current.Value);
+ }
+ else
+ {
+ return new KeyValuePair<TKey, TValue>(current.Key, current.Value);
+ }
+ }
+ }
+
+ void IEnumerator.Reset()
+ {
+ if (version != dictionary.version)
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
+ }
+
+ index = 0;
+ current = new KeyValuePair<TKey, TValue>();
+ }
+
+ DictionaryEntry IDictionaryEnumerator.Entry
+ {
+ get
+ {
+ if (index == 0 || (index == dictionary.count + 1))
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return new DictionaryEntry(current.Key, current.Value);
+ }
+ }
+
+ object IDictionaryEnumerator.Key
+ {
+ get
+ {
+ if (index == 0 || (index == dictionary.count + 1))
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return current.Key;
+ }
+ }
+
+ object IDictionaryEnumerator.Value
+ {
+ get
+ {
+ if (index == 0 || (index == dictionary.count + 1))
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return current.Value;
+ }
+ }
+ }
+
+ [DebuggerTypeProxy(typeof(DictionaryKeyCollectionDebugView<,>))]
+ [DebuggerDisplay("Count = {Count}")]
+ [Serializable]
+ public sealed class KeyCollection : ICollection<TKey>, ICollection, IReadOnlyCollection<TKey>
+ {
+ private Dictionary<TKey, TValue> dictionary;
+
+ public KeyCollection(Dictionary<TKey, TValue> dictionary)
+ {
+ if (dictionary == null)
+ {
+ throw new ArgumentNullException(nameof(dictionary));
+ }
+ this.dictionary = dictionary;
+ }
+
+ public Enumerator GetEnumerator()
+ {
+ return new Enumerator(dictionary);
+ }
+
+ public void CopyTo(TKey[] array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (index < 0 || index > array.Length)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), index, SR.ArgumentOutOfRange_Index);
+ }
+
+ if (array.Length - index < dictionary.Count)
+ {
+ throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
+ }
+
+ int count = dictionary.count;
+ Entry[] entries = dictionary.entries;
+
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0) array[index++] = entries[i].key;
+ }
+ }
+
+ public int Count
+ {
+ get { return dictionary.Count; }
+ }
+
+ bool ICollection<TKey>.IsReadOnly
+ {
+ get { return true; }
+ }
+
+ void ICollection<TKey>.Add(TKey item)
+ {
+ throw new NotSupportedException(SR.NotSupported_KeyCollectionSet);
+ }
+
+ void ICollection<TKey>.Clear()
+ {
+ throw new NotSupportedException(SR.NotSupported_KeyCollectionSet);
+ }
+
+ bool ICollection<TKey>.Contains(TKey item)
+ {
+ return dictionary.ContainsKey(item);
+ }
+
+ bool ICollection<TKey>.Remove(TKey item)
+ {
+ throw new NotSupportedException(SR.NotSupported_KeyCollectionSet);
+ }
+
+ IEnumerator<TKey> IEnumerable<TKey>.GetEnumerator()
+ {
+ return new Enumerator(dictionary);
+ }
+
+ IEnumerator IEnumerable.GetEnumerator()
+ {
+ return new Enumerator(dictionary);
+ }
+
+ void ICollection.CopyTo(Array array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (array.Rank != 1)
+ {
+ throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array));
+ }
+
+ if (array.GetLowerBound(0) != 0)
+ {
+ throw new ArgumentException(SR.Arg_NonZeroLowerBound, nameof(array));
+ }
+
+ if (index < 0 || index > array.Length)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), index, SR.ArgumentOutOfRange_Index);
+ }
+
+ if (array.Length - index < dictionary.Count)
+ {
+ throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
+ }
+
+ TKey[] keys = array as TKey[];
+ if (keys != null)
+ {
+ CopyTo(keys, index);
+ }
+ else
+ {
+ object[] objects = array as object[];
+ if (objects == null)
+ {
+ throw new ArgumentException(SR.Argument_InvalidArrayType, nameof(array));
+ }
+
+ int count = dictionary.count;
+ Entry[] entries = dictionary.entries;
+
+ try
+ {
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0) objects[index++] = entries[i].key;
+ }
+ }
+ catch (ArrayTypeMismatchException)
+ {
+ throw new ArgumentException(SR.Argument_InvalidArrayType, nameof(array));
+ }
+ }
+ }
+
+ bool ICollection.IsSynchronized
+ {
+ get { return false; }
+ }
+
+ object ICollection.SyncRoot
+ {
+ get { return ((ICollection)dictionary).SyncRoot; }
+ }
+
+ [Serializable]
+ public struct Enumerator : IEnumerator<TKey>, System.Collections.IEnumerator
+ {
+ private Dictionary<TKey, TValue> dictionary;
+ private int index;
+ private int version;
+ private TKey currentKey;
+
+ internal Enumerator(Dictionary<TKey, TValue> dictionary)
+ {
+ this.dictionary = dictionary;
+ version = dictionary.version;
+ index = 0;
+ currentKey = default(TKey);
+ }
+
+ public void Dispose()
+ {
+ }
+
+ public bool MoveNext()
+ {
+ if (version != dictionary.version)
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
+ }
+
+ while ((uint)index < (uint)dictionary.count)
+ {
+ if (dictionary.entries[index].hashCode >= 0)
+ {
+ currentKey = dictionary.entries[index].key;
+ index++;
+ return true;
+ }
+ index++;
+ }
+
+ index = dictionary.count + 1;
+ currentKey = default(TKey);
+ return false;
+ }
+
+ public TKey Current
+ {
+ get
+ {
+ return currentKey;
+ }
+ }
+
+ object System.Collections.IEnumerator.Current
+ {
+ get
+ {
+ if (index == 0 || (index == dictionary.count + 1))
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return currentKey;
+ }
+ }
+
+ void System.Collections.IEnumerator.Reset()
+ {
+ if (version != dictionary.version)
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
+ }
+
+ index = 0;
+ currentKey = default(TKey);
+ }
+ }
+ }
+
+ [DebuggerTypeProxy(typeof(DictionaryValueCollectionDebugView<,>))]
+ [DebuggerDisplay("Count = {Count}")]
+ [Serializable]
+ public sealed class ValueCollection : ICollection<TValue>, ICollection, IReadOnlyCollection<TValue>
+ {
+ private Dictionary<TKey, TValue> dictionary;
+
+ public ValueCollection(Dictionary<TKey, TValue> dictionary)
+ {
+ if (dictionary == null)
+ {
+ throw new ArgumentNullException(nameof(dictionary));
+ }
+ this.dictionary = dictionary;
+ }
+
+ public Enumerator GetEnumerator()
+ {
+ return new Enumerator(dictionary);
+ }
+
+ public void CopyTo(TValue[] array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (index < 0 || index > array.Length)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), index, SR.ArgumentOutOfRange_Index);
+ }
+
+ if (array.Length - index < dictionary.Count)
+ {
+ throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
+ }
+
+ int count = dictionary.count;
+ Entry[] entries = dictionary.entries;
+
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0) array[index++] = entries[i].value;
+ }
+ }
+
+ public int Count
+ {
+ get { return dictionary.Count; }
+ }
+
+ bool ICollection<TValue>.IsReadOnly
+ {
+ get { return true; }
+ }
+
+ void ICollection<TValue>.Add(TValue item)
+ {
+ throw new NotSupportedException(SR.NotSupported_ValueCollectionSet);
+ }
+
+ bool ICollection<TValue>.Remove(TValue item)
+ {
+ throw new NotSupportedException(SR.NotSupported_ValueCollectionSet);
+ }
+
+ void ICollection<TValue>.Clear()
+ {
+ throw new NotSupportedException(SR.NotSupported_ValueCollectionSet);
+ }
+
+ bool ICollection<TValue>.Contains(TValue item)
+ {
+ return dictionary.ContainsValue(item);
+ }
+
+ IEnumerator<TValue> IEnumerable<TValue>.GetEnumerator()
+ {
+ return new Enumerator(dictionary);
+ }
+
+ IEnumerator IEnumerable.GetEnumerator()
+ {
+ return new Enumerator(dictionary);
+ }
+
+ void ICollection.CopyTo(Array array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (array.Rank != 1)
+ {
+ throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array));
+ }
+
+ if (array.GetLowerBound(0) != 0)
+ {
+ throw new ArgumentException(SR.Arg_NonZeroLowerBound, nameof(array));
+ }
+
+ if (index < 0 || index > array.Length)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), index, SR.ArgumentOutOfRange_Index);
+ }
+
+ if (array.Length - index < dictionary.Count)
+ throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
+
+ TValue[] values = array as TValue[];
+ if (values != null)
+ {
+ CopyTo(values, index);
+ }
+ else
+ {
+ object[] objects = array as object[];
+ if (objects == null)
+ {
+ throw new ArgumentException(SR.Argument_InvalidArrayType, nameof(array));
+ }
+
+ int count = dictionary.count;
+ Entry[] entries = dictionary.entries;
+
+ try
+ {
+ for (int i = 0; i < count; i++)
+ {
+ if (entries[i].hashCode >= 0) objects[index++] = entries[i].value;
+ }
+ }
+ catch (ArrayTypeMismatchException)
+ {
+ throw new ArgumentException(SR.Argument_InvalidArrayType, nameof(array));
+ }
+ }
+ }
+
+ bool ICollection.IsSynchronized
+ {
+ get { return false; }
+ }
+
+ object ICollection.SyncRoot
+ {
+ get { return ((ICollection)dictionary).SyncRoot; }
+ }
+
+ [Serializable]
+ public struct Enumerator : IEnumerator<TValue>, System.Collections.IEnumerator
+ {
+ private Dictionary<TKey, TValue> dictionary;
+ private int index;
+ private int version;
+ private TValue currentValue;
+
+ internal Enumerator(Dictionary<TKey, TValue> dictionary)
+ {
+ this.dictionary = dictionary;
+ version = dictionary.version;
+ index = 0;
+ currentValue = default(TValue);
+ }
+
+ public void Dispose()
+ {
+ }
+
+ public bool MoveNext()
+ {
+ if (version != dictionary.version)
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
+ }
+
+ while ((uint)index < (uint)dictionary.count)
+ {
+ if (dictionary.entries[index].hashCode >= 0)
+ {
+ currentValue = dictionary.entries[index].value;
+ index++;
+ return true;
+ }
+ index++;
+ }
+ index = dictionary.count + 1;
+ currentValue = default(TValue);
+ return false;
+ }
+
+ public TValue Current
+ {
+ get
+ {
+ return currentValue;
+ }
+ }
+
+ object System.Collections.IEnumerator.Current
+ {
+ get
+ {
+ if (index == 0 || (index == dictionary.count + 1))
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return currentValue;
+ }
+ }
+
+ void System.Collections.IEnumerator.Reset()
+ {
+ if (version != dictionary.version)
+ {
+ throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
+ }
+ index = 0;
+ currentValue = default(TValue);
+ }
+ }
+ }
+ }
+}
diff --git a/src/System.Private.CoreLib/src/System/Collections/Generic/IDictionaryDebugView.cs b/src/System.Private.CoreLib/src/System/Collections/Generic/IDictionaryDebugView.cs
new file mode 100644
index 000000000..4721642fe
--- /dev/null
+++ b/src/System.Private.CoreLib/src/System/Collections/Generic/IDictionaryDebugView.cs
@@ -0,0 +1,80 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System.Diagnostics;
+
+namespace System.Collections.Generic
+{
+ internal sealed class IDictionaryDebugView<K, V>
+ {
+ private readonly IDictionary<K, V> _dict;
+
+ public IDictionaryDebugView(IDictionary<K, V> dictionary)
+ {
+ if (dictionary == null)
+ throw new ArgumentNullException(nameof(dictionary));
+
+ _dict = dictionary;
+ }
+
+ [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
+ public KeyValuePair<K, V>[] Items
+ {
+ get
+ {
+ KeyValuePair<K, V>[] items = new KeyValuePair<K, V>[_dict.Count];
+ _dict.CopyTo(items, 0);
+ return items;
+ }
+ }
+ }
+
+ internal sealed class DictionaryKeyCollectionDebugView<TKey, TValue>
+ {
+ private readonly ICollection<TKey> _collection;
+
+ public DictionaryKeyCollectionDebugView(ICollection<TKey> collection)
+ {
+ if (collection == null)
+ throw new ArgumentNullException(nameof(collection));
+
+ _collection = collection;
+ }
+
+ [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
+ public TKey[] Items
+ {
+ get
+ {
+ TKey[] items = new TKey[_collection.Count];
+ _collection.CopyTo(items, 0);
+ return items;
+ }
+ }
+ }
+
+ internal sealed class DictionaryValueCollectionDebugView<TKey, TValue>
+ {
+ private readonly ICollection<TValue> _collection;
+
+ public DictionaryValueCollectionDebugView(ICollection<TValue> collection)
+ {
+ if (collection == null)
+ throw new ArgumentNullException(nameof(collection));
+
+ _collection = collection;
+ }
+
+ [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
+ public TValue[] Items
+ {
+ get
+ {
+ TValue[] items = new TValue[_collection.Count];
+ _collection.CopyTo(items, 0);
+ return items;
+ }
+ }
+ }
+}
diff --git a/src/System.Private.CoreLib/src/System/IO/BinaryReader.cs b/src/System.Private.CoreLib/src/System/IO/BinaryReader.cs
new file mode 100644
index 000000000..4ca3a7648
--- /dev/null
+++ b/src/System.Private.CoreLib/src/System/IO/BinaryReader.cs
@@ -0,0 +1,681 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System.Text;
+using System.Diagnostics;
+
+namespace System.IO
+{
+ public class BinaryReader : IDisposable
+ {
+ private const int MaxCharBytesSize = 128;
+
+ private Stream _stream;
+ private byte[] _buffer;
+ private Decoder _decoder;
+ private byte[] _charBytes;
+ private char[] _singleChar;
+ private char[] _charBuffer;
+ private int _maxCharsSize; // From MaxCharBytesSize & Encoding
+
+ // Performance optimization for Read() w/ Unicode. Speeds us up by ~40%
+ private bool _2BytesPerChar;
+ private bool _isMemoryStream; // "do we sit on MemoryStream?" for Read/ReadInt32 perf
+ private bool _leaveOpen;
+
+ public BinaryReader(Stream input) : this(input, Encoding.UTF8, false)
+ {
+ }
+
+ public BinaryReader(Stream input, Encoding encoding) : this(input, encoding, false)
+ {
+ }
+
+ public BinaryReader(Stream input, Encoding encoding, bool leaveOpen)
+ {
+ if (input == null)
+ {
+ throw new ArgumentNullException(nameof(input));
+ }
+ if (encoding == null)
+ {
+ throw new ArgumentNullException(nameof(encoding));
+ }
+ if (!input.CanRead)
+ {
+ throw new ArgumentException(SR.Argument_StreamNotReadable);
+ }
+
+ _stream = input;
+ _decoder = encoding.GetDecoder();
+ _maxCharsSize = encoding.GetMaxCharCount(MaxCharBytesSize);
+ int minBufferSize = encoding.GetMaxByteCount(1); // max bytes per one char
+ if (minBufferSize < 16)
+ {
+ minBufferSize = 16;
+ }
+
+ _buffer = new byte[minBufferSize];
+ // _charBuffer and _charBytes will be left null.
+
+ // For Encodings that always use 2 bytes per char (or more),
+ // special case them here to make Read() & Peek() faster.
+ _2BytesPerChar = encoding is UnicodeEncoding;
+ // check if BinaryReader is based on MemoryStream, and keep this for it's life
+ // we cannot use "as" operator, since derived classes are not allowed
+ _isMemoryStream = (_stream.GetType() == typeof(MemoryStream));
+ _leaveOpen = leaveOpen;
+
+ Debug.Assert(_decoder != null, "[BinaryReader.ctor]_decoder!=null");
+ }
+
+ public virtual Stream BaseStream
+ {
+ get
+ {
+ return _stream;
+ }
+ }
+
+ protected virtual void Dispose(bool disposing)
+ {
+ if (disposing)
+ {
+ Stream copyOfStream = _stream;
+ _stream = null;
+ if (copyOfStream != null && !_leaveOpen)
+ {
+ copyOfStream.Dispose();
+ }
+ }
+ _stream = null;
+ _buffer = null;
+ _decoder = null;
+ _charBytes = null;
+ _singleChar = null;
+ _charBuffer = null;
+ }
+
+ public void Dispose()
+ {
+ Dispose(true);
+ }
+
+ /// <remarks>
+ /// Override Dispose(bool) instead of Close(). This API exists for compatibility purposes.
+ /// </remarks>
+ public virtual void Close()
+ {
+ Dispose(true);
+ }
+
+ public virtual int PeekChar()
+ {
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ if (!_stream.CanSeek)
+ {
+ return -1;
+ }
+
+ long origPos = _stream.Position;
+ int ch = Read();
+ _stream.Position = origPos;
+ return ch;
+ }
+
+ public virtual int Read()
+ {
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+ return InternalReadOneChar();
+ }
+
+ public virtual bool ReadBoolean()
+ {
+ FillBuffer(1);
+ return (_buffer[0] != 0);
+ }
+
+ public virtual byte ReadByte()
+ {
+ // Inlined to avoid some method call overhead with FillBuffer.
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ int b = _stream.ReadByte();
+ if (b == -1)
+ {
+ throw new EndOfStreamException(SR.IO_EOF_ReadBeyondEOF);
+ }
+
+ return (byte)b;
+ }
+
+ [CLSCompliant(false)]
+ public virtual sbyte ReadSByte()
+ {
+ FillBuffer(1);
+ return (sbyte)(_buffer[0]);
+ }
+
+ public virtual char ReadChar()
+ {
+ int value = Read();
+ if (value == -1)
+ {
+ throw new EndOfStreamException(SR.IO_EOF_ReadBeyondEOF);
+ }
+ return (char)value;
+ }
+
+ public virtual short ReadInt16()
+ {
+ FillBuffer(2);
+ return (short)(_buffer[0] | _buffer[1] << 8);
+ }
+
+ [CLSCompliant(false)]
+ public virtual ushort ReadUInt16()
+ {
+ FillBuffer(2);
+ return (ushort)(_buffer[0] | _buffer[1] << 8);
+ }
+
+ public virtual int ReadInt32()
+ {
+ if (_isMemoryStream)
+ {
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ // read directly from MemoryStream buffer
+ MemoryStream mStream = _stream as MemoryStream;
+ Debug.Assert(mStream != null, "_stream as MemoryStream != null");
+
+ return mStream.InternalReadInt32();
+ }
+ else
+ {
+ FillBuffer(4);
+ return (int)(_buffer[0] | _buffer[1] << 8 | _buffer[2] << 16 | _buffer[3] << 24);
+ }
+ }
+
+ [CLSCompliant(false)]
+ public virtual uint ReadUInt32()
+ {
+ FillBuffer(4);
+ return (uint)(_buffer[0] | _buffer[1] << 8 | _buffer[2] << 16 | _buffer[3] << 24);
+ }
+
+ public virtual long ReadInt64()
+ {
+ FillBuffer(8);
+ uint lo = (uint)(_buffer[0] | _buffer[1] << 8 |
+ _buffer[2] << 16 | _buffer[3] << 24);
+ uint hi = (uint)(_buffer[4] | _buffer[5] << 8 |
+ _buffer[6] << 16 | _buffer[7] << 24);
+ return (long)((ulong)hi) << 32 | lo;
+ }
+
+ [CLSCompliant(false)]
+ public virtual ulong ReadUInt64()
+ {
+ FillBuffer(8);
+ uint lo = (uint)(_buffer[0] | _buffer[1] << 8 |
+ _buffer[2] << 16 | _buffer[3] << 24);
+ uint hi = (uint)(_buffer[4] | _buffer[5] << 8 |
+ _buffer[6] << 16 | _buffer[7] << 24);
+ return ((ulong)hi) << 32 | lo;
+ }
+
+ public virtual unsafe float ReadSingle()
+ {
+ FillBuffer(4);
+ uint tmpBuffer = (uint)(_buffer[0] | _buffer[1] << 8 | _buffer[2] << 16 | _buffer[3] << 24);
+ return *((float*)&tmpBuffer);
+ }
+
+ public virtual unsafe double ReadDouble()
+ {
+ FillBuffer(8);
+ uint lo = (uint)(_buffer[0] | _buffer[1] << 8 |
+ _buffer[2] << 16 | _buffer[3] << 24);
+ uint hi = (uint)(_buffer[4] | _buffer[5] << 8 |
+ _buffer[6] << 16 | _buffer[7] << 24);
+
+ ulong tmpBuffer = ((ulong)hi) << 32 | lo;
+ return *((double*)&tmpBuffer);
+ }
+
+ public virtual decimal ReadDecimal()
+ {
+ FillBuffer(16);
+ int[] ints = new int[4];
+ Buffer.BlockCopy(_buffer, 0, ints, 0, 16);
+ try
+ {
+ return new decimal(ints);
+ }
+ catch (ArgumentException e)
+ {
+ // ReadDecimal cannot leak out ArgumentException
+ throw new IOException(SR.Arg_DecBitCtor, e);
+ }
+ }
+
+ public virtual string ReadString()
+ {
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ int currPos = 0;
+ int n;
+ int stringLength;
+ int readLength;
+ int charsRead;
+
+ // Length of the string in bytes, not chars
+ stringLength = Read7BitEncodedInt();
+ if (stringLength < 0)
+ {
+ throw new IOException(SR.Format(SR.IO_IO_InvalidStringLen_Len, stringLength));
+ }
+
+ if (stringLength == 0)
+ {
+ return string.Empty;
+ }
+
+ if (_charBytes == null)
+ {
+ _charBytes = new byte[MaxCharBytesSize];
+ }
+
+ if (_charBuffer == null)
+ {
+ _charBuffer = new char[_maxCharsSize];
+ }
+
+ StringBuilder sb = null;
+ do
+ {
+ readLength = ((stringLength - currPos) > MaxCharBytesSize) ? MaxCharBytesSize : (stringLength - currPos);
+
+ n = _stream.Read(_charBytes, 0, readLength);
+ if (n == 0)
+ {
+ throw new EndOfStreamException(SR.IO_EOF_ReadBeyondEOF);
+ }
+
+ charsRead = _decoder.GetChars(_charBytes, 0, n, _charBuffer, 0);
+
+ if (currPos == 0 && n == stringLength)
+ {
+ return new string(_charBuffer, 0, charsRead);
+ }
+
+ if (sb == null)
+ {
+ sb = StringBuilderCache.Acquire(stringLength); // Actual string length in chars may be smaller.
+ }
+
+ sb.Append(_charBuffer, 0, charsRead);
+ currPos += n;
+ } while (currPos < stringLength);
+
+ return StringBuilderCache.GetStringAndRelease(sb);
+ }
+
+ public virtual int Read(char[] buffer, int index, int count)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
+ }
+ if (index < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (buffer.Length - index < count)
+ {
+ throw new ArgumentException(SR.Argument_InvalidOffLen);
+ }
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ // SafeCritical: index and count have already been verified to be a valid range for the buffer
+ return InternalReadChars(buffer, index, count);
+ }
+
+ private int InternalReadChars(char[] buffer, int index, int count)
+ {
+ Debug.Assert(buffer != null);
+ Debug.Assert(index >= 0 && count >= 0);
+ Debug.Assert(_stream != null);
+
+ int numBytes = 0;
+ int charsRemaining = count;
+
+ if (_charBytes == null)
+ {
+ _charBytes = new byte[MaxCharBytesSize];
+ }
+
+ while (charsRemaining > 0)
+ {
+ int charsRead = 0;
+ // We really want to know what the minimum number of bytes per char
+ // is for our encoding. Otherwise for UnicodeEncoding we'd have to
+ // do ~1+log(n) reads to read n characters.
+ numBytes = charsRemaining;
+
+ if (_2BytesPerChar)
+ {
+ numBytes <<= 1;
+ }
+ if (numBytes > MaxCharBytesSize)
+ {
+ numBytes = MaxCharBytesSize;
+ }
+
+ int position = 0;
+ byte[] byteBuffer = null;
+ if (_isMemoryStream)
+ {
+ MemoryStream mStream = _stream as MemoryStream;
+ Debug.Assert(mStream != null, "_stream as MemoryStream != null");
+
+ position = mStream.InternalGetPosition();
+ numBytes = mStream.InternalEmulateRead(numBytes);
+ byteBuffer = mStream.InternalGetBuffer();
+ }
+ else
+ {
+ numBytes = _stream.Read(_charBytes, 0, numBytes);
+ byteBuffer = _charBytes;
+ }
+
+ if (numBytes == 0)
+ {
+ return (count - charsRemaining);
+ }
+
+ Debug.Assert(byteBuffer != null, "expected byteBuffer to be non-null");
+ charsRead = _decoder.GetChars(byteBuffer, position, numBytes, buffer, index, flush: false);
+
+ charsRemaining -= charsRead;
+ index += charsRead;
+ }
+
+ // this should never fail
+ Debug.Assert(charsRemaining >= 0, "We read too many characters.");
+
+ // we may have read fewer than the number of characters requested if end of stream reached
+ // or if the encoding makes the char count too big for the buffer (e.g. fallback sequence)
+ return (count - charsRemaining);
+ }
+
+ private int InternalReadOneChar()
+ {
+ // I know having a separate InternalReadOneChar method seems a little
+ // redundant, but this makes a scenario like the security parser code
+ // 20% faster, in addition to the optimizations for UnicodeEncoding I
+ // put in InternalReadChars.
+ int charsRead = 0;
+ int numBytes = 0;
+ long posSav = posSav = 0;
+
+ if (_stream.CanSeek)
+ {
+ posSav = _stream.Position;
+ }
+
+ if (_charBytes == null)
+ {
+ _charBytes = new byte[MaxCharBytesSize]; //REVIEW: We need at most 2 bytes/char here?
+ }
+ if (_singleChar == null)
+ {
+ _singleChar = new char[1];
+ }
+
+ while (charsRead == 0)
+ {
+ // We really want to know what the minimum number of bytes per char
+ // is for our encoding. Otherwise for UnicodeEncoding we'd have to
+ // do ~1+log(n) reads to read n characters.
+ // Assume 1 byte can be 1 char unless _2BytesPerChar is true.
+ numBytes = _2BytesPerChar ? 2 : 1;
+
+ int r = _stream.ReadByte();
+ _charBytes[0] = (byte)r;
+ if (r == -1)
+ {
+ numBytes = 0;
+ }
+ if (numBytes == 2)
+ {
+ r = _stream.ReadByte();
+ _charBytes[1] = (byte)r;
+ if (r == -1)
+ {
+ numBytes = 1;
+ }
+ }
+
+ if (numBytes == 0)
+ {
+ // Console.WriteLine("Found no bytes. We're outta here.");
+ return -1;
+ }
+
+ Debug.Assert(numBytes == 1 || numBytes == 2, "BinaryReader::InternalReadOneChar assumes it's reading one or 2 bytes only.");
+
+ try
+ {
+ charsRead = _decoder.GetChars(_charBytes, 0, numBytes, _singleChar, 0);
+ }
+ catch
+ {
+ // Handle surrogate char
+
+ if (_stream.CanSeek)
+ {
+ _stream.Seek((posSav - _stream.Position), SeekOrigin.Current);
+ }
+ // else - we can't do much here
+
+ throw;
+ }
+
+ Debug.Assert(charsRead < 2, "InternalReadOneChar - assuming we only got 0 or 1 char, not 2!");
+ // Console.WriteLine("That became: " + charsRead + " characters.");
+ }
+
+ Debug.Assert(charsRead != 0);
+
+ return _singleChar[0];
+ }
+
+ public virtual char[] ReadChars(int count)
+ {
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ if (count == 0)
+ {
+ return Array.Empty<char>();
+ }
+
+ // SafeCritical: we own the chars buffer, and therefore can guarantee that the index and count are valid
+ char[] chars = new char[count];
+ int n = InternalReadChars(chars, 0, count);
+ if (n != count)
+ {
+ char[] copy = new char[n];
+ Buffer.BlockCopy(chars, 0, copy, 0, 2 * n); // sizeof(char)
+ chars = copy;
+ }
+
+ return chars;
+ }
+
+ public virtual int Read(byte[] buffer, int index, int count)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
+ }
+ if (index < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (buffer.Length - index < count)
+ {
+ throw new ArgumentException(SR.Argument_InvalidOffLen);
+ }
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ return _stream.Read(buffer, index, count);
+ }
+
+ public virtual byte[] ReadBytes(int count)
+ {
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ if (count == 0)
+ {
+ return Array.Empty<byte>();
+ }
+
+ byte[] result = new byte[count];
+ int numRead = 0;
+ do
+ {
+ int n = _stream.Read(result, numRead, count);
+ if (n == 0)
+ {
+ break;
+ }
+
+ numRead += n;
+ count -= n;
+ } while (count > 0);
+
+ if (numRead != result.Length)
+ {
+ // Trim array. This should happen on EOF & possibly net streams.
+ byte[] copy = new byte[numRead];
+ Buffer.BlockCopy(result, 0, copy, 0, numRead);
+ result = copy;
+ }
+
+ return result;
+ }
+
+ protected virtual void FillBuffer(int numBytes)
+ {
+ if (_buffer != null && (numBytes < 0 || numBytes > _buffer.Length))
+ {
+ throw new ArgumentOutOfRangeException(nameof(numBytes), SR.ArgumentOutOfRange_BinaryReaderFillBuffer);
+ }
+
+ int bytesRead = 0;
+ int n = 0;
+
+ if (_stream == null)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_FileClosed);
+ }
+
+ // Need to find a good threshold for calling ReadByte() repeatedly
+ // vs. calling Read(byte[], int, int) for both buffered & unbuffered
+ // streams.
+ if (numBytes == 1)
+ {
+ n = _stream.ReadByte();
+ if (n == -1)
+ {
+ throw new EndOfStreamException(SR.IO_EOF_ReadBeyondEOF);
+ }
+
+ _buffer[0] = (byte)n;
+ return;
+ }
+
+ do
+ {
+ n = _stream.Read(_buffer, bytesRead, numBytes - bytesRead);
+ if (n == 0)
+ {
+ throw new EndOfStreamException(SR.IO_EOF_ReadBeyondEOF);
+ }
+ bytesRead += n;
+ } while (bytesRead < numBytes);
+ }
+
+ protected internal int Read7BitEncodedInt()
+ {
+ // Read out an Int32 7 bits at a time. The high bit
+ // of the byte when on means to continue reading more bytes.
+ int count = 0;
+ int shift = 0;
+ byte b;
+ do
+ {
+ // Check for a corrupted stream. Read a max of 5 bytes.
+ // In a future version, add a DataFormatException.
+ if (shift == 5 * 7) // 5 bytes max per Int32, shift += 7
+ {
+ throw new FormatException(SR.Format_Bad7BitInt32);
+ }
+
+ // ReadByte handles end of stream cases for us.
+ b = ReadByte();
+ count |= (b & 0x7F) << shift;
+ shift += 7;
+ } while ((b & 0x80) != 0);
+ return count;
+ }
+ }
+}
diff --git a/src/System.Private.CoreLib/src/System/IO/MemoryStream.cs b/src/System.Private.CoreLib/src/System/IO/MemoryStream.cs
new file mode 100644
index 000000000..f073ed242
--- /dev/null
+++ b/src/System.Private.CoreLib/src/System/IO/MemoryStream.cs
@@ -0,0 +1,802 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System.Threading;
+using System.Threading.Tasks;
+using System.Diagnostics;
+using System.Diagnostics.Contracts;
+
+namespace System.IO
+{
+ // A MemoryStream represents a Stream in memory (i.e, it has no backing store).
+ // This stream may reduce the need for temporary buffers and files in
+ // an application.
+ //
+ // There are two ways to create a MemoryStream. You can initialize one
+ // from an unsigned byte array, or you can create an empty one. Empty
+ // memory streams are resizable, while ones created with a byte array provide
+ // a stream "view" of the data.
+ [Serializable]
+ public class MemoryStream : Stream
+ {
+ private byte[] _buffer; // Either allocated internally or externally.
+ private int _origin; // For user-provided arrays, start at this origin
+ private int _position; // read/write head.
+ [ContractPublicPropertyName("Length")]
+ private int _length; // Number of bytes within the memory stream
+ private int _capacity; // length of usable portion of buffer for stream
+ // Note that _capacity == _buffer.Length for non-user-provided byte[]'s
+
+ private bool _expandable; // User-provided buffers aren't expandable.
+ private bool _writable; // Can user write to this stream?
+ private bool _exposable; // Whether the array can be returned to the user.
+ private bool _isOpen; // Is this stream open or closed?
+
+ // <TODO>In V2, if we get support for arrays of more than 2 GB worth of elements,
+ // consider removing this constraint, or setting it to Int64.MaxValue.</TODO>
+ private const int MemStreamMaxLength = int.MaxValue;
+
+ public MemoryStream()
+ : this(0)
+ {
+ }
+
+ public MemoryStream(int capacity)
+ {
+ if (capacity < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(capacity), SR.ArgumentOutOfRange_NegativeCapacity);
+ }
+
+ _buffer = capacity != 0 ? new byte[capacity] : Array.Empty<byte>();
+ _capacity = capacity;
+ _expandable = true;
+ _writable = true;
+ _exposable = true;
+ _origin = 0; // Must be 0 for byte[]'s created by MemoryStream
+ _isOpen = true;
+ }
+
+ public MemoryStream(byte[] buffer)
+ : this(buffer, true)
+ {
+ }
+
+ public MemoryStream(byte[] buffer, bool writable)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
+ }
+
+ _buffer = buffer;
+ _length = _capacity = buffer.Length;
+ _writable = writable;
+ _exposable = false;
+ _origin = 0;
+ _isOpen = true;
+ }
+
+ public MemoryStream(byte[] buffer, int index, int count)
+ : this(buffer, index, count, true, false)
+ {
+ }
+
+ public MemoryStream(byte[] buffer, int index, int count, bool writable)
+ : this(buffer, index, count, writable, false)
+ {
+ }
+
+ public MemoryStream(byte[] buffer, int index, int count, bool writable, bool publiclyVisible)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
+ }
+ if (index < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (buffer.Length - index < count)
+ {
+ throw new ArgumentException(SR.Argument_InvalidOffLen);
+ }
+
+ _buffer = buffer;
+ _origin = _position = index;
+ _length = _capacity = index + count;
+ _writable = writable;
+ _exposable = publiclyVisible; // Can TryGetBuffer return the array?
+ _expandable = false;
+ _isOpen = true;
+ }
+
+ public override bool CanRead
+ {
+ [Pure]
+ get
+ { return _isOpen; }
+ }
+
+ public override bool CanSeek
+ {
+ [Pure]
+ get
+ { return _isOpen; }
+ }
+
+ public override bool CanWrite
+ {
+ [Pure]
+ get
+ { return _writable; }
+ }
+
+ private void EnsureWriteable()
+ {
+ if (!CanWrite)
+ {
+ throw new NotSupportedException(SR.NotSupported_UnwritableStream);
+ }
+ }
+
+ protected override void Dispose(bool disposing)
+ {
+ try
+ {
+ if (disposing)
+ {
+ _isOpen = false;
+ _writable = false;
+ _expandable = false;
+ // Don't set buffer to null - allow TryGetBuffer & ToArray to work.
+ }
+ }
+ finally
+ {
+ // Call base.Close() to cleanup async IO resources
+ base.Dispose(disposing);
+ }
+ }
+
+ // returns a bool saying whether we allocated a new array.
+ private bool EnsureCapacity(int value)
+ {
+ // Check for overflow
+ if (value < 0)
+ {
+ throw new IOException(SR.IO_IO_StreamTooLong);
+ }
+ if (value > _capacity)
+ {
+ int newCapacity = value;
+ if (newCapacity < 256)
+ {
+ newCapacity = 256;
+ }
+ if (newCapacity < _capacity * 2)
+ {
+ newCapacity = _capacity * 2;
+ }
+
+ Capacity = newCapacity;
+ return true;
+ }
+ return false;
+ }
+
+ public override void Flush()
+ {
+ }
+
+#pragma warning disable 1998 //async method with no await operators
+ public override async Task FlushAsync(CancellationToken cancellationToken)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ Flush();
+ }
+#pragma warning restore 1998
+
+ public virtual bool TryGetBuffer(out ArraySegment<byte> buffer)
+ {
+ if (!_exposable)
+ {
+ buffer = default(ArraySegment<byte>);
+ return false;
+ }
+
+ buffer = new ArraySegment<byte>(_buffer, offset: _origin, count: (_length - _origin));
+ return true;
+ }
+
+ public virtual byte[] GetBuffer()
+ {
+ if (!_exposable)
+ throw new UnauthorizedAccessException(SR.UnauthorizedAccess_MemStreamBuffer);
+ return _buffer;
+ }
+
+ // PERF: Internal sibling of GetBuffer, always returns a buffer (cf. GetBuffer())
+ internal byte[] InternalGetBuffer()
+ {
+ return _buffer;
+ }
+
+ // PERF: True cursor position, we don't need _origin for direct access
+ internal int InternalGetPosition()
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+ return _position;
+ }
+
+ // PERF: Takes out Int32 as fast as possible
+ internal int InternalReadInt32()
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+
+ int pos = (_position += 4); // use temp to avoid race
+ if (pos > _length)
+ {
+ _position = _length;
+ throw new EndOfStreamException(SR.IO_EOF_ReadBeyondEOF);
+ }
+ return (int)(_buffer[pos - 4] | _buffer[pos - 3] << 8 | _buffer[pos - 2] << 16 | _buffer[pos - 1] << 24);
+ }
+
+ // PERF: Get actual length of bytes available for read; do sanity checks; shift position - i.e. everything except actual copying bytes
+ internal int InternalEmulateRead(int count)
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+
+ int n = _length - _position;
+ if (n > count)
+ {
+ n = count;
+ }
+ if (n < 0)
+ {
+ n = 0;
+ }
+
+ Debug.Assert(_position + n >= 0, "_position + n >= 0"); // len is less than 2^31 -1.
+ _position += n;
+ return n;
+ }
+
+ // Gets & sets the capacity (number of bytes allocated) for this stream.
+ // The capacity cannot be set to a value less than the current length
+ // of the stream.
+ //
+ public virtual int Capacity
+ {
+ get
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+
+ return _capacity - _origin;
+ }
+ set
+ {
+ // Only update the capacity if the MS is expandable and the value is different than the current capacity.
+ // Special behavior if the MS isn't expandable: we don't throw if value is the same as the current capacity
+ if (value < Length)
+ {
+ throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_SmallCapacity);
+ }
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+ if (!_expandable && (value != Capacity))
+ {
+ throw new NotSupportedException(SR.NotSupported_MemStreamNotExpandable);
+ }
+
+ // MemoryStream has this invariant: _origin > 0 => !expandable (see ctors)
+ if (_expandable && value != _capacity)
+ {
+ if (value > 0)
+ {
+ byte[] newBuffer = new byte[value];
+ if (_length > 0)
+ {
+ Buffer.BlockCopy(_buffer, 0, newBuffer, 0, _length);
+ }
+
+ _buffer = newBuffer;
+ }
+ else
+ {
+ _buffer = null;
+ }
+ _capacity = value;
+ }
+ }
+ }
+
+ public override long Length
+ {
+ get
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+
+ return _length - _origin;
+ }
+ }
+
+ public override long Position
+ {
+ get
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+
+ return _position - _origin;
+ }
+ set
+ {
+ if (value < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+ if (value > MemStreamMaxLength)
+ {
+ throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_StreamLength);
+ }
+
+ _position = _origin + (int)value;
+ }
+ }
+
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
+ }
+ if (offset < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (buffer.Length - offset < count)
+ {
+ throw new ArgumentException(SR.Argument_InvalidOffLen);
+ }
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+
+ int n = _length - _position;
+ if (n > count)
+ {
+ n = count;
+ }
+ if (n <= 0)
+ {
+ return 0;
+ }
+
+ Debug.Assert(_position + n >= 0, "_position + n >= 0"); // len is less than 2^31 -1.
+
+ if (n <= 8)
+ {
+ int byteCount = n;
+ while (--byteCount >= 0)
+ buffer[offset + byteCount] = _buffer[_position + byteCount];
+ }
+ else
+ Buffer.BlockCopy(_buffer, _position, buffer, offset, n);
+ _position += n;
+
+ return n;
+ }
+
+ public override Task<int> ReadAsync(Byte[] buffer, int offset, int count, CancellationToken cancellationToken)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
+ }
+ if (offset < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (buffer.Length - offset < count)
+ {
+ throw new ArgumentException(SR.Argument_InvalidOffLen);
+ }
+
+ return ReadAsyncImpl(buffer, offset, count, cancellationToken);
+ }
+
+#pragma warning disable 1998 //async method with no await operators
+ private async Task<int> ReadAsyncImpl(Byte[] buffer, int offset, int count, CancellationToken cancellationToken)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ return Read(buffer, offset, count);
+ }
+#pragma warning restore 1998
+
+ public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) =>
+ TaskToApm.Begin(ReadAsync(buffer, offset, count, CancellationToken.None), callback, state);
+
+ public override int EndRead(IAsyncResult asyncResult) =>
+ TaskToApm.End<int>(asyncResult);
+
+ public override int ReadByte()
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+
+ if (_position >= _length)
+ {
+ return -1;
+ }
+
+ return _buffer[_position++];
+ }
+
+ public override void CopyTo(Stream destination, int bufferSize)
+ {
+ // Since we did not originally override this method, validate the arguments
+ // the same way Stream does for back-compat.
+ StreamHelpers.ValidateCopyToArgs(this, destination, bufferSize);
+
+ // If we have been inherited into a subclass, the following implementation could be incorrect
+ // since it does not call through to Read() which a subclass might have overridden.
+ // To be safe we will only use this implementation in cases where we know it is safe to do so,
+ // and delegate to our base class (which will call into Read) when we are not sure.
+ if (GetType() != typeof(MemoryStream))
+ {
+ base.CopyTo(destination, bufferSize);
+ return;
+ }
+
+ int originalPosition = _position;
+
+ // Seek to the end of the MemoryStream.
+ int remaining = InternalEmulateRead(_length - originalPosition);
+
+ // If we were already at or past the end, there's no copying to do so just quit.
+ if (remaining > 0)
+ {
+ // Call Write() on the other Stream, using our internal buffer and avoiding any
+ // intermediary allocations.
+ destination.Write(_buffer, originalPosition, remaining);
+ }
+ }
+
+ public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken)
+ {
+ // This implementation offers better performance compared to the base class version.
+
+ StreamHelpers.ValidateCopyToArgs(this, destination, bufferSize);
+
+ // If we have been inherited into a subclass, the following implementation could be incorrect
+ // since it does not call through to ReadAsync() which a subclass might have overridden.
+ // To be safe we will only use this implementation in cases where we know it is safe to do so,
+ // and delegate to our base class (which will call into ReadAsync) when we are not sure.
+ if (GetType() != typeof(MemoryStream))
+ {
+ return base.CopyToAsync(destination, bufferSize, cancellationToken);
+ }
+
+ return CopyToAsyncImpl(destination, bufferSize, cancellationToken);
+ }
+
+ private async Task CopyToAsyncImpl(Stream destination, int bufferSize, CancellationToken cancellationToken)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ // Avoid copying data from this buffer into a temp buffer:
+ // (require that InternalEmulateRead does not throw,
+ // otherwise it needs to be wrapped into try-catch-Task.FromException like memStrDest.Write below)
+
+ int pos = _position;
+ int n = InternalEmulateRead(_length - _position);
+
+ // If destination is not a memory stream, write there asynchronously:
+ MemoryStream memStrDest = destination as MemoryStream;
+ if (memStrDest == null)
+ {
+ await destination.WriteAsync(_buffer, pos, n, cancellationToken).ConfigureAwait(false);
+ }
+ else
+ {
+ memStrDest.Write(_buffer, pos, n);
+ }
+ }
+
+
+ public override long Seek(long offset, SeekOrigin loc)
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+ if (offset > MemStreamMaxLength)
+ {
+ throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_StreamLength);
+ }
+
+ switch (loc)
+ {
+ case SeekOrigin.Begin:
+ {
+ int tempPosition = unchecked(_origin + (int)offset);
+ if (offset < 0 || tempPosition < _origin)
+ {
+ throw new IOException(SR.IO_IO_SeekBeforeBegin);
+ }
+
+ _position = tempPosition;
+ break;
+ }
+ case SeekOrigin.Current:
+ {
+ int tempPosition = unchecked(_position + (int)offset);
+ if (unchecked(_position + offset) < _origin || tempPosition < _origin)
+ {
+ throw new IOException(SR.IO_IO_SeekBeforeBegin);
+ }
+
+ _position = tempPosition;
+ break;
+ }
+ case SeekOrigin.End:
+ {
+ int tempPosition = unchecked(_length + (int)offset);
+ if (unchecked(_length + offset) < _origin || tempPosition < _origin)
+ {
+ throw new IOException(SR.IO_IO_SeekBeforeBegin);
+ }
+
+ _position = tempPosition;
+ break;
+ }
+ default:
+ throw new ArgumentException(SR.Argument_InvalidSeekOrigin);
+ }
+
+ Debug.Assert(_position >= 0, "_position >= 0");
+ return _position;
+ }
+
+ // Sets the length of the stream to a given value. The new
+ // value must be nonnegative and less than the space remaining in
+ // the array, Int32.MaxValue - origin
+ // Origin is 0 in all cases other than a MemoryStream created on
+ // top of an existing array and a specific starting offset was passed
+ // into the MemoryStream constructor. The upper bounds prevents any
+ // situations where a stream may be created on top of an array then
+ // the stream is made longer than the maximum possible length of the
+ // array (Int32.MaxValue).
+ //
+ public override void SetLength(long value)
+ {
+ if (value < 0 || value > int.MaxValue)
+ {
+ throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_StreamLength);
+ }
+ EnsureWriteable();
+
+ // Origin wasn't publicly exposed above.
+ Debug.Assert(MemStreamMaxLength == int.MaxValue); // Check parameter validation logic in this method if this fails.
+ if (value > (int.MaxValue - _origin))
+ {
+ throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_StreamLength);
+ }
+
+ int newLength = _origin + (int)value;
+ bool allocatedNewArray = EnsureCapacity(newLength);
+ if (!allocatedNewArray && newLength > _length)
+ {
+ Array.Clear(_buffer, _length, newLength - _length);
+ }
+
+ _length = newLength;
+ if (_position > newLength)
+ {
+ _position = newLength;
+ }
+ }
+
+ public virtual byte[] ToArray()
+ {
+ //BCLDebug.Perf(_exposable, "MemoryStream::GetBuffer will let you avoid a copy.");
+ int count = _length - _origin;
+ if (count == 0)
+ {
+ return Array.Empty<byte>();
+ }
+
+ byte[] copy = new byte[count];
+ Buffer.BlockCopy(_buffer, _origin, copy, 0, _length - _origin);
+ return copy;
+ }
+
+ public override void Write(byte[] buffer, int offset, int count)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
+ }
+ if (offset < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (buffer.Length - offset < count)
+ {
+ throw new ArgumentException(SR.Argument_InvalidOffLen);
+ }
+
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+ EnsureWriteable();
+
+ int i = _position + count;
+ // Check for overflow
+ if (i < 0)
+ {
+ throw new IOException(SR.IO_IO_StreamTooLong);
+ }
+
+ if (i > _length)
+ {
+ bool mustZero = _position > _length;
+ if (i > _capacity)
+ {
+ bool allocatedNewArray = EnsureCapacity(i);
+ if (allocatedNewArray)
+ {
+ mustZero = false;
+ }
+ }
+ if (mustZero)
+ {
+ Array.Clear(_buffer, _length, i - _length);
+ }
+ _length = i;
+ }
+ if ((count <= 8) && (buffer != _buffer))
+ {
+ int byteCount = count;
+ while (--byteCount >= 0)
+ {
+ _buffer[_position + byteCount] = buffer[offset + byteCount];
+ }
+ }
+ else
+ {
+ Buffer.BlockCopy(buffer, offset, _buffer, _position, count);
+ }
+ _position = i;
+ }
+
+ public override Task WriteAsync(Byte[] buffer, int offset, int count, CancellationToken cancellationToken)
+ {
+ if (buffer == null)
+ {
+ throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
+ }
+ if (offset < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (count < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
+ }
+ if (buffer.Length - offset < count)
+ {
+ throw new ArgumentException(SR.Argument_InvalidOffLen);
+ }
+
+ return WriteAsyncImpl(buffer, offset, count, cancellationToken);
+ }
+
+#pragma warning disable 1998 //async method with no await operators
+ private async Task WriteAsyncImpl(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ Write(buffer, offset, count);
+ }
+#pragma warning restore 1998
+
+ public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) =>
+ TaskToApm.Begin(WriteAsync(buffer, offset, count, CancellationToken.None), callback, state);
+
+ public override void EndWrite(IAsyncResult asyncResult) =>
+ TaskToApm.End(asyncResult);
+
+ public override void WriteByte(byte value)
+ {
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+ EnsureWriteable();
+
+ if (_position >= _length)
+ {
+ int newLength = _position + 1;
+ bool mustZero = _position > _length;
+ if (newLength >= _capacity)
+ {
+ bool allocatedNewArray = EnsureCapacity(newLength);
+ if (allocatedNewArray)
+ {
+ mustZero = false;
+ }
+ }
+ if (mustZero)
+ {
+ Array.Clear(_buffer, _length, _position - _length);
+ }
+ _length = newLength;
+ }
+ _buffer[_position++] = value;
+ }
+
+ // Writes this MemoryStream to another stream.
+ public virtual void WriteTo(Stream stream)
+ {
+ if (stream == null)
+ {
+ throw new ArgumentNullException(nameof(stream), SR.ArgumentNull_Stream);
+ }
+ if (!_isOpen)
+ {
+ throw new ObjectDisposedException(null, SR.ObjectDisposed_StreamClosed);
+ }
+
+ stream.Write(_buffer, _origin, _length - _origin);
+ }
+ }
+}