/* Copyright (c) 2003-2006 Niels Kokholm and Peter Sestoft Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #define HASHINDEX using System; using System.Diagnostics; using SCG = System.Collections.Generic; namespace C5 { /// /// A list collection class based on a doubly linked list data structure. /// [Serializable] public class HashedLinkedList : SequencedBase, IList, SCG.IList #if HASHINDEX #else , IStack, IQueue #endif { #region Fields /// /// IExtensible.Add(T) always does AddLast(T), fIFO determines /// if T Remove() does RemoveFirst() or RemoveLast() /// bool fIFO = true; #region Events /// /// /// /// public override EventTypeEnum ListenableEvents { get { return underlying == null ? EventTypeEnum.All : EventTypeEnum.None; } } #endregion //Invariant: startsentinel != null && endsentinel != null //If size==0: startsentinel.next == endsentinel && endsentinel.prev == startsentinel //Else: startsentinel.next == First && endsentinel.prev == Last) /// /// Node to the left of first node /// Node startsentinel; /// /// Node to the right of last node /// Node endsentinel; /// /// Offset of this view in underlying list /// #if HASHINDEX int? offset; #else int offset; #endif /// /// underlying list of this view (or null for the underlying list) /// HashedLinkedList underlying; //Note: all views will have the same views list since all view objects are created by MemberwiseClone() WeakViewList> views; WeakViewList>.Node myWeakReference; /// /// Has this list or view not been invalidated by some operation (by someone calling Dispose()) /// bool isValid = true; #if HASHINDEX HashDictionary dict; /// /// Number of taggroups /// int taggroups; /// /// /// /// int Taggroups { get { return underlying == null ? taggroups : underlying.taggroups; } set { if (underlying == null) taggroups = value; else underlying.taggroups = value; } } #endif #endregion #region Util bool equals(T i1, T i2) { return itemequalityComparer.Equals(i1, i2); } #region Check utilities /// /// Check if it is valid to perform updates and increment stamp of /// underlying if this is a view. /// This method should be called in every public modifying /// methods before any modifications are performed. /// /// /// if check fails. protected override void updatecheck() { validitycheck(); base.updatecheck(); if (underlying != null) underlying.stamp++; } /// /// Check if we are a view that the underlyinglist has only been updated through us. ///
/// This method should be called from enumerators etc to guard against /// modification of the base collection. ///
/// if check fails. void validitycheck() { if (!isValid) throw new ViewDisposedException(); } /// /// Check that the list has not been updated since a particular time. /// /// The stamp indicating the time. /// if check fails. protected override void modifycheck(int stamp) { validitycheck(); if ((underlying != null ? underlying.stamp : this.stamp) != stamp) throw new CollectionModifiedException(); } #endregion #region Searching bool contains(T item, out Node node) { #if HASHINDEX if (dict.Find(item, out node)) return insideview(node); #else //TODO: search from both ends? Or search from the end selected by FIFO? node = startsentinel.next; while (node != endsentinel) { if (equals(item, node.item)) return true; node = node.next; } #endif return false; } /// /// Search forwards from a node for a node with a particular item. /// /// The item to look for /// On input, the node to start at. If item was found, the node found on output. /// If node was found, the value will be the number of links followed higher than /// the value on input. If item was not found, the value on output is undefined. /// True if node was found. bool find(T item, ref Node node, ref int index) { while (node != endsentinel) { //if (item.Equals(node.item)) if (itemequalityComparer.Equals(item, node.item)) return true; index++; node = node.next; } return false; } bool dnif(T item, ref Node node, ref int index) { while (node != startsentinel) { //if (item.Equals(node.item)) if (itemequalityComparer.Equals(item, node.item)) return true; index--; node = node.prev; } return false; } #if HASHINDEX bool insideview(Node node) { if (underlying == null) return true; return (startsentinel.precedes(node) && node.precedes(endsentinel)); } #endif #endregion #region Indexing /// /// Return the node at position pos /// /// /// Node get(int pos) { if (pos < 0 || pos >= size) throw new IndexOutOfRangeException(); else if (pos < size / 2) { // Closer to front Node node = startsentinel; for (int i = 0; i <= pos; i++) node = node.next; return node; } else { // Closer to end Node node = endsentinel; for (int i = size; i > pos; i--) node = node.prev; return node; } } /// /// Find the distance from pos to the set given by positions. Return the /// signed distance as return value and as an out parameter, the /// array index of the nearest position. This is used for up to length 5 of /// positions, and we do not assume it is sorted. /// /// /// /// /// int dist(int pos, out int nearest, int[] positions) { nearest = -1; int bestdist = int.MaxValue; int signeddist = bestdist; for (int i = 0; i < positions.Length; i++) { int thisdist = positions[i] - pos; if (thisdist >= 0 && thisdist < bestdist) { nearest = i; bestdist = thisdist; signeddist = thisdist; } if (thisdist < 0 && -thisdist < bestdist) { nearest = i; bestdist = -thisdist; signeddist = thisdist; } } return signeddist; } /// /// Find the node at position pos, given known positions of several nodes. /// /// /// /// /// Node get(int pos, int[] positions, Node[] nodes) { int nearest; int delta = dist(pos, out nearest, positions); Node node = nodes[nearest]; if (delta > 0) for (int i = 0; i < delta; i++) node = node.prev; else for (int i = 0; i > delta; i--) node = node.next; return node; } /// /// Get nodes at positions p1 and p2, given nodes at several positions. /// /// /// /// /// /// /// void getPair(int p1, int p2, out Node n1, out Node n2, int[] positions, Node[] nodes) { int nearest1, nearest2; int delta1 = dist(p1, out nearest1, positions), d1 = delta1 < 0 ? -delta1 : delta1; int delta2 = dist(p2, out nearest2, positions), d2 = delta2 < 0 ? -delta2 : delta2; if (d1 < d2) { n1 = get(p1, positions, nodes); n2 = get(p2, new int[] { positions[nearest2], p1 }, new Node[] { nodes[nearest2], n1 }); } else { n2 = get(p2, positions, nodes); n1 = get(p1, new int[] { positions[nearest1], p2 }, new Node[] { nodes[nearest1], n2 }); } } #endregion #region Insertion #if HASHINDEX void insert(int index, Node succ, T item) { Node newnode = new Node(item); if (dict.FindOrAdd(item, ref newnode)) throw new DuplicateNotAllowedException("Item already in indexed list"); insertNode(true, succ, newnode); } /// /// Insert a Node before another one. Unchecked version. /// /// The successor to be /// Node to insert /// update overlapping view in this call void insertNode(bool updateViews, Node succ, Node newnode) { newnode.next = succ; Node pred = newnode.prev = succ.prev; succ.prev.next = newnode; succ.prev = newnode; size++; if (underlying != null) underlying.size++; settag(newnode); if (updateViews) fixViewsAfterInsert(succ, pred, 1, 0); } #else /// /// /// /// The index in this view /// /// /// Node insert(int index, Node succ, T item) { Node newnode = new Node(item, succ.prev, succ); succ.prev.next = newnode; succ.prev = newnode; size++; if (underlying != null) underlying.size++; fixViewsAfterInsert(succ, newnode.prev, 1, Offset + index); return newnode; } #endif #endregion #region Removal T remove(Node node, int index) { fixViewsBeforeSingleRemove(node, Offset + index); node.prev.next = node.next; node.next.prev = node.prev; size--; if (underlying != null) underlying.size--; #if HASHINDEX removefromtaggroup(node); #endif return node.item; } #if HASHINDEX private bool dictremove(T item, out Node node) { if (underlying == null) { if (!dict.Remove(item, out node)) return false; } else { //We cannot avoid calling dict twice - have to intersperse the listorder test! if (!contains(item, out node)) return false; dict.Remove(item); } return true; } #endif #endregion #region fixView utilities /// /// /// /// The actual number of inserted nodes /// The predecessor of the inserted nodes /// The successor of the added nodes /// void fixViewsAfterInsert(Node succ, Node pred, int added, int realInsertionIndex) { if (views != null) foreach (HashedLinkedList view in views) { if (view != this) { #if HASHINDEX if (pred.precedes(view.startsentinel) || (view.startsentinel == pred && view.size > 0)) view.offset += added; if (view.startsentinel.precedes(pred) && succ.precedes(view.endsentinel)) view.size += added; if (view.startsentinel == pred && view.size > 0) view.startsentinel = succ.prev; if (view.endsentinel == succ) view.endsentinel = pred.next; #else if (view.Offset == realInsertionIndex && view.size > 0) view.startsentinel = succ.prev; if (view.Offset + view.size == realInsertionIndex) view.endsentinel = pred.next; if (view.Offset < realInsertionIndex && view.Offset + view.size > realInsertionIndex) view.size += added; if (view.Offset > realInsertionIndex || (view.Offset == realInsertionIndex && view.size > 0)) view.offset += added; #endif } } } void fixViewsBeforeSingleRemove(Node node, int realRemovalIndex) { if (views != null) foreach (HashedLinkedList view in views) { if (view != this) { #if HASHINDEX if (view.startsentinel.precedes(node) && node.precedes(view.endsentinel)) view.size--; if (!view.startsentinel.precedes(node)) view.offset--; if (view.startsentinel == node) view.startsentinel = node.prev; if (view.endsentinel == node) view.endsentinel = node.next; #else if (view.offset - 1 == realRemovalIndex) view.startsentinel = node.prev; if (view.offset + view.size == realRemovalIndex) view.endsentinel = node.next; if (view.offset <= realRemovalIndex && view.offset + view.size > realRemovalIndex) view.size--; if (view.offset > realRemovalIndex) view.offset--; #endif } } } #if HASHINDEX #else void fixViewsBeforeRemove(int start, int count, Node first, Node last) { int clearend = start + count - 1; if (views != null) foreach (HashedLinkedList view in views) { if (view == this) continue; int viewoffset = view.Offset, viewend = viewoffset + view.size - 1; //sentinels if (start < viewoffset && viewoffset - 1 <= clearend) view.startsentinel = first.prev; if (start <= viewend + 1 && viewend < clearend) view.endsentinel = last.next; //offsets and sizes if (start < viewoffset) { if (clearend < viewoffset) view.offset = viewoffset - count; else { view.offset = start; view.size = clearend < viewend ? viewend - clearend : 0; } } else if (start <= viewend) view.size = clearend <= viewend ? view.size - count : start - viewoffset; } } #endif /// /// /// /// /// The position of View(otherOffset, otherSize) wrt. this view MutualViewPosition viewPosition(HashedLinkedList otherView) { #if HASHINDEX Node otherstartsentinel = otherView.startsentinel, otherendsentinel = otherView.endsentinel, first = startsentinel.next, last = endsentinel.prev, otherfirst = otherstartsentinel.next, otherlast = otherendsentinel.prev; if (last.precedes(otherfirst) || otherlast.precedes(first)) return MutualViewPosition.NonOverlapping; if (size == 0 || (otherstartsentinel.precedes(first) && last.precedes(otherendsentinel))) return MutualViewPosition.Contains; if (otherView.size == 0 || (startsentinel.precedes(otherfirst) && otherlast.precedes(endsentinel))) return MutualViewPosition.ContainedIn; return MutualViewPosition.Overlapping; #else int end = offset + size, otherOffset = otherView.offset, otherSize = otherView.size, otherEnd = otherOffset + otherSize; if (otherOffset >= end || otherEnd <= offset) return MutualViewPosition.NonOverlapping; if (size == 0 || (otherOffset <= offset && end <= otherEnd)) return MutualViewPosition.Contains; if (otherSize == 0 || (offset <= otherOffset && otherEnd <= end)) return MutualViewPosition.ContainedIn; return MutualViewPosition.Overlapping; #endif } void disposeOverlappingViews(bool reverse) { if (views != null) { foreach (HashedLinkedList view in views) { if (view != this) { switch (viewPosition(view)) { case MutualViewPosition.ContainedIn: if (reverse) { } else view.Dispose(); break; case MutualViewPosition.Overlapping: view.Dispose(); break; case MutualViewPosition.Contains: case MutualViewPosition.NonOverlapping: break; } } } } } #endregion #endregion #region Constructors /// /// Create a linked list with en external item equalityComparer /// /// The external equalityComparer public HashedLinkedList(SCG.IEqualityComparer itemequalityComparer) : base(itemequalityComparer) { offset = 0; size = stamp = 0; startsentinel = new Node(default(T)); endsentinel = new Node(default(T)); startsentinel.next = endsentinel; endsentinel.prev = startsentinel; #if HASHINDEX //It is important that the sentinels are different: startsentinel.taggroup = new TagGroup(); startsentinel.taggroup.tag = int.MinValue; startsentinel.taggroup.count = 0; endsentinel.taggroup = new TagGroup(); endsentinel.taggroup.tag = int.MaxValue; endsentinel.taggroup.count = 0; dict = new HashDictionary(itemequalityComparer); #endif } /// /// Create a linked list with the natural item equalityComparer /// public HashedLinkedList() : this(EqualityComparer.Default) { } #endregion #region Node nested class /// /// An individual cell in the linked list /// [Serializable] class Node { public Node prev; public Node next; public T item; #region Tag support #if HASHINDEX internal int tag; internal TagGroup taggroup; internal bool precedes(Node that) { //Debug.Assert(taggroup != null, "taggroup field null"); //Debug.Assert(that.taggroup != null, "that.taggroup field null"); int t1 = taggroup.tag; int t2 = that.taggroup.tag; return t1 < t2 ? true : t1 > t2 ? false : tag < that.tag; } #endif #endregion [Tested] internal Node(T item) { this.item = item; } [Tested] internal Node(T item, Node prev, Node next) { this.item = item; this.prev = prev; this.next = next; } public override string ToString() { #if HASHINDEX return String.Format("Node: (item={0}, tag={1})", item, tag); #else return String.Format("Node(item={0})", item); #endif } } #endregion #region Taggroup nested class and tag maintenance utilities #if HASHINDEX /// /// A group of nodes with the same high tag. Purpose is to be /// able to tell the sequence order of two nodes without having to scan through /// the list. /// [Serializable] class TagGroup { internal int tag, count; internal Node first, last; /// /// Pretty print a tag group /// /// Formatted tag group public override string ToString() { return String.Format("TagGroup(tag={0}, cnt={1}, fst={2}, lst={3})", tag, count, first, last); } } //Constants for tag maintenance const int wordsize = 32; const int lobits = 3; const int hibits = lobits + 1; const int losize = 1 << lobits; const int hisize = 1 << hibits; const int logwordsize = 5; TagGroup gettaggroup(Node pred, Node succ, out int lowbound, out int highbound) { TagGroup predgroup = pred.taggroup, succgroup = succ.taggroup; if (predgroup == succgroup) { lowbound = pred.tag + 1; highbound = succ.tag - 1; return predgroup; } else if (predgroup.first != null) { lowbound = pred.tag + 1; highbound = int.MaxValue; return predgroup; } else if (succgroup.first != null) { lowbound = int.MinValue; highbound = succ.tag - 1; return succgroup; } else { lowbound = int.MinValue; highbound = int.MaxValue; return new TagGroup(); } } /// /// Put a tag on a node (already inserted in the list). Split taggroups and renumber as /// necessary. /// /// The node to tag void settag(Node node) { Node pred = node.prev, succ = node.next; TagGroup predgroup = pred.taggroup, succgroup = succ.taggroup; if (predgroup == succgroup) { node.taggroup = predgroup; predgroup.count++; if (pred.tag + 1 == succ.tag) splittaggroup(predgroup); else node.tag = (pred.tag + 1) / 2 + (succ.tag - 1) / 2; } else if (predgroup.first != null) { node.taggroup = predgroup; predgroup.last = node; predgroup.count++; if (pred.tag == int.MaxValue) splittaggroup(predgroup); else node.tag = pred.tag / 2 + int.MaxValue / 2 + 1; } else if (succgroup.first != null) { node.taggroup = succgroup; succgroup.first = node; succgroup.count++; if (succ.tag == int.MinValue) splittaggroup(node.taggroup); else node.tag = int.MinValue / 2 + (succ.tag - 1) / 2; } else { Debug.Assert(Taggroups == 0); TagGroup newgroup = new TagGroup(); Taggroups = 1; node.taggroup = newgroup; newgroup.first = newgroup.last = node; newgroup.count = 1; return; } } /// /// Remove a node from its taggroup. ///
When this is called, node must already have been removed from the underlying list ///
/// The node to remove void removefromtaggroup(Node node) { TagGroup taggroup = node.taggroup; if (--taggroup.count == 0) { Taggroups--; return; } if (node == taggroup.first) taggroup.first = node.next; if (node == taggroup.last) taggroup.last = node.prev; //node.taggroup = null; if (taggroup.count != losize || Taggroups == 1) return; TagGroup otg; // bug20070911: Node neighbor; if ((neighbor = taggroup.first.prev) != startsentinel && (otg = neighbor.taggroup).count <= losize) taggroup.first = otg.first; else if ((neighbor = taggroup.last.next) != endsentinel && (otg = neighbor.taggroup).count <= losize) taggroup.last = otg.last; else return; Node n = otg.first; for (int i = 0, length = otg.count; i < length; i++) { n.taggroup = taggroup; n = n.next; } taggroup.count += otg.count; Taggroups--; n = taggroup.first; const int ofs = wordsize - hibits; for (int i = 0, count = taggroup.count; i < count; i++) { n.tag = (i - losize) << ofs; //(i-8)<<28 n = n.next; } } /// /// Split a tag group to make rom for more tags. /// /// The tag group void splittaggroup(TagGroup taggroup) { Node n = taggroup.first; int ptgt = taggroup.first.prev.taggroup.tag; int ntgt = taggroup.last.next.taggroup.tag; Debug.Assert(ptgt + 1 <= ntgt - 1); int ofs = wordsize - hibits; int newtgs = (taggroup.count - 1) / hisize; int tgtdelta = (int)((ntgt + 0.0 - ptgt) / (newtgs + 2)), tgtag = ptgt; tgtdelta = tgtdelta == 0 ? 1 : tgtdelta; for (int j = 0; j < newtgs; j++) { TagGroup newtaggroup = new TagGroup(); newtaggroup.tag = (tgtag = tgtag >= ntgt - tgtdelta ? ntgt : tgtag + tgtdelta); newtaggroup.first = n; newtaggroup.count = hisize; for (int i = 0; i < hisize; i++) { n.taggroup = newtaggroup; n.tag = (i - losize) << ofs; //(i-8)<<28 n = n.next; } newtaggroup.last = n.prev; } int rest = taggroup.count - hisize * newtgs; taggroup.first = n; taggroup.count = rest; taggroup.tag = (tgtag = tgtag >= ntgt - tgtdelta ? ntgt : tgtag + tgtdelta); ofs--; for (int i = 0; i < rest; i++) { n.tag = (i - hisize) << ofs; //(i-16)<<27 n = n.next; } taggroup.last = n.prev; Taggroups += newtgs; if (tgtag == ntgt) redistributetaggroups(taggroup); } private void redistributetaggroups(TagGroup taggroup) { TagGroup pred = taggroup, succ = taggroup, tmp; double limit = 1, bigt = Math.Pow(Taggroups, 1.0 / 30);//????? int bits = 1, count = 1, lowmask = 0, himask = 0, target = 0; do { bits++; lowmask = (1 << bits) - 1; himask = ~lowmask; target = taggroup.tag & himask; while ((tmp = pred.first.prev.taggroup).first != null && (tmp.tag & himask) == target) { count++; pred = tmp; } while ((tmp = succ.last.next.taggroup).last != null && (tmp.tag & himask) == target) { count++; succ = tmp; } limit *= bigt; } while (count > limit); //redistibute tags int lob = pred.first.prev.taggroup.tag, upb = succ.last.next.taggroup.tag; int delta = upb / (count + 1) - lob / (count + 1); Debug.Assert(delta > 0); for (int i = 0; i < count; i++) { pred.tag = lob + (i + 1) * delta; pred = pred.last.next.taggroup; } } #endif #endregion #region Position, PositionComparer and ViewHandler nested types class PositionComparer : SCG.IComparer { static PositionComparer _default; PositionComparer() { } public static PositionComparer Default { get { return _default ?? (_default = new PositionComparer()); } } public int Compare(Position a, Position b) { #if HASHINDEX return a.Endpoint == b.Endpoint ? 0 : a.Endpoint.precedes(b.Endpoint) ? -1 : 1; #else return a.Index.CompareTo(b.Index); #endif } } /// /// During RemoveAll, we need to cache the original endpoint indices of views /// struct Position { public readonly HashedLinkedList View; public bool Left; #if HASHINDEX public readonly Node Endpoint; #else public readonly int Index; #endif public Position(HashedLinkedList view, bool left) { View = view; Left = left; #if HASHINDEX Endpoint = left ? view.startsentinel.next : view.endsentinel.prev; #else Index = left ? view.Offset : view.Offset + view.size - 1; #endif } #if HASHINDEX public Position(Node node, int foo) { this.Endpoint = node; View = null; Left = false; } #else public Position(int index) { this.Index = index; View = null; Left = false; } #endif } //TODO: merge the two implementations using Position values as arguments /// /// Handle the update of (other) views during a multi-remove operation. /// struct ViewHandler { ArrayList leftEnds; ArrayList rightEnds; int leftEndIndex, rightEndIndex, leftEndIndex2, rightEndIndex2; internal readonly int viewCount; internal ViewHandler(HashedLinkedList list) { leftEndIndex = rightEndIndex = leftEndIndex2 = rightEndIndex2 = viewCount = 0; leftEnds = rightEnds = null; if (list.views != null) foreach (HashedLinkedList v in list.views) if (v != list) { if (leftEnds == null) { leftEnds = new ArrayList(); rightEnds = new ArrayList(); } leftEnds.Add(new Position(v, true)); rightEnds.Add(new Position(v, false)); } if (leftEnds == null) return; viewCount = leftEnds.Count; leftEnds.Sort(PositionComparer.Default); rightEnds.Sort(PositionComparer.Default); } #if HASHINDEX internal void skipEndpoints(int removed, Node n) { if (viewCount > 0) { Position endpoint; while (leftEndIndex < viewCount && ((endpoint = leftEnds[leftEndIndex]).Endpoint.prev.precedes(n))) { HashedLinkedList view = endpoint.View; view.offset = view.offset - removed;//TODO: extract offset.Value? view.size += removed; leftEndIndex++; } while (rightEndIndex < viewCount && (endpoint = rightEnds[rightEndIndex]).Endpoint.precedes(n)) { HashedLinkedList view = endpoint.View; view.size -= removed; rightEndIndex++; } } if (viewCount > 0) { Position endpoint; while (leftEndIndex2 < viewCount && (endpoint = leftEnds[leftEndIndex2]).Endpoint.prev.precedes(n)) leftEndIndex2++; while (rightEndIndex2 < viewCount && (endpoint = rightEnds[rightEndIndex2]).Endpoint.next.precedes(n)) rightEndIndex2++; } } /// /// To be called with n pointing to the right of each node to be removed in a stretch. /// And at the endsentinel. /// /// Update offset of a view whose left endpoint (has not already been handled and) is n or precedes n. /// I.e. startsentinel precedes n. /// Also update the size as a prelude to handling the right endpoint. /// /// Update size of a view not already handled and whose right endpoint precedes n. /// /// The number of nodes left of n to be removed /// internal void updateViewSizesAndCounts(int removed, Node n) { if (viewCount > 0) { Position endpoint; while (leftEndIndex < viewCount && ((endpoint = leftEnds[leftEndIndex]).Endpoint.prev.precedes(n))) { HashedLinkedList view = endpoint.View; view.offset = view.offset - removed; //TODO: fix use of offset view.size += removed; leftEndIndex++; } while (rightEndIndex < viewCount && (endpoint = rightEnds[rightEndIndex]).Endpoint.precedes(n)) { HashedLinkedList view = endpoint.View; view.size -= removed; rightEndIndex++; } } } /// /// To be called with n being the first not-to-be-removed node after a (stretch of) node(s) to be removed. /// /// It will update the startsentinel of views (that have not been handled before and) /// whose startsentinel precedes n, i.e. is to be deleted. /// /// It will update the endsentinel of views (...) whose endsentinel precedes n, i.e. is to be deleted. /// /// PROBLEM: DOESNT WORK AS ORIGINALLY ADVERTISED. WE MUST DO THIS BEFORE WE ACTUALLY REMOVE THE NODES. WHEN THE /// NODES HAVE BEEN REMOVED, THE precedes METHOD WILL NOT WORK! /// /// /// /// internal void updateSentinels(Node n, Node newstart, Node newend) { if (viewCount > 0) { Position endpoint; while (leftEndIndex2 < viewCount && (endpoint = leftEnds[leftEndIndex2]).Endpoint.prev.precedes(n)) { HashedLinkedList view = endpoint.View; view.startsentinel = newstart; leftEndIndex2++; } while (rightEndIndex2 < viewCount && (endpoint = rightEnds[rightEndIndex2]).Endpoint.next.precedes(n)) { HashedLinkedList view = endpoint.View; view.endsentinel = newend; rightEndIndex2++; } } } #else /// /// This is to be called with realindex pointing to the first node to be removed after a (stretch of) node that was not removed /// /// /// internal void skipEndpoints(int removed, int realindex) { if (viewCount > 0) { Position endpoint; while (leftEndIndex < viewCount && (endpoint = leftEnds[leftEndIndex]).Index <= realindex) { HashedLinkedList view = endpoint.View; view.offset = view.offset - removed; view.size += removed; leftEndIndex++; } while (rightEndIndex < viewCount && (endpoint = rightEnds[rightEndIndex]).Index < realindex) { HashedLinkedList view = endpoint.View; view.size -= removed; rightEndIndex++; } } if (viewCount > 0) { Position endpoint; while (leftEndIndex2 < viewCount && (endpoint = leftEnds[leftEndIndex2]).Index <= realindex) leftEndIndex2++; while (rightEndIndex2 < viewCount && (endpoint = rightEnds[rightEndIndex2]).Index < realindex - 1) rightEndIndex2++; } } internal void updateViewSizesAndCounts(int removed, int realindex) { if (viewCount > 0) { Position endpoint; while (leftEndIndex < viewCount && (endpoint = leftEnds[leftEndIndex]).Index <= realindex) { HashedLinkedList view = endpoint.View; view.offset = view.Offset - removed; view.size += removed; leftEndIndex++; } while (rightEndIndex < viewCount && (endpoint = rightEnds[rightEndIndex]).Index < realindex) { HashedLinkedList view = endpoint.View; view.size -= removed; rightEndIndex++; } } } internal void updateSentinels(int realindex, Node newstart, Node newend) { if (viewCount > 0) { Position endpoint; while (leftEndIndex2 < viewCount && (endpoint = leftEnds[leftEndIndex2]).Index <= realindex) { HashedLinkedList view = endpoint.View; view.startsentinel = newstart; leftEndIndex2++; } while (rightEndIndex2 < viewCount && (endpoint = rightEnds[rightEndIndex2]).Index < realindex - 1) { HashedLinkedList view = endpoint.View; view.endsentinel = newend; rightEndIndex2++; } } } #endif } #endregion #region Range nested class class Range : DirectedCollectionValueBase, IDirectedCollectionValue { int start, count, rangestamp; Node startnode, endnode; HashedLinkedList list; bool forwards; internal Range(HashedLinkedList list, int start, int count, bool forwards) { this.list = list; this.rangestamp = list.underlying != null ? list.underlying.stamp : list.stamp; this.start = start; this.count = count; this.forwards = forwards; if (count > 0) { startnode = list.get(start); endnode = list.get(start + count - 1); } } public override bool IsEmpty { get { list.modifycheck(rangestamp); return count == 0; } } [Tested] public override int Count { [Tested]get { list.modifycheck(rangestamp); return count; } } public override Speed CountSpeed { get { list.modifycheck(rangestamp); return Speed.Constant; } } public override T Choose() { list.modifycheck(rangestamp); if (count > 0) return startnode.item; throw new NoSuchItemException(); } [Tested] public override SCG.IEnumerator GetEnumerator() { int togo = count; list.modifycheck(rangestamp); if (togo == 0) yield break; Node cursor = forwards ? startnode : endnode; yield return cursor.item; while (--togo > 0) { cursor = forwards ? cursor.next : cursor.prev; list.modifycheck(rangestamp); yield return cursor.item; } } [Tested] public override IDirectedCollectionValue Backwards() { list.modifycheck(rangestamp); Range b = (Range)MemberwiseClone(); b.forwards = !forwards; return b; } [Tested] IDirectedEnumerable IDirectedEnumerable.Backwards() { return Backwards(); } [Tested] public override EnumerationDirection Direction { [Tested] get { return forwards ? EnumerationDirection.Forwards : EnumerationDirection.Backwards; } } } #endregion #region IDisposable Members /// /// Invalidate this list. If a view, just invalidate the view. /// If not a view, invalidate the list and all views on it. /// public virtual void Dispose() { Dispose(false); } void Dispose(bool disposingUnderlying) { if (isValid) { if (underlying != null) { isValid = false; if (!disposingUnderlying && views != null) views.Remove(myWeakReference); endsentinel = null; startsentinel = null; underlying = null; views = null; myWeakReference = null; } else { //isValid = false; //endsentinel = null; //startsentinel = null; if (views != null) foreach (HashedLinkedList view in views) view.Dispose(true); //views = null; Clear(); } } } #endregion IDisposable stuff #region IList Members /// /// /// if this list is empty. /// The first item in this list. [Tested] public virtual T First { [Tested] get { validitycheck(); if (size == 0) throw new NoSuchItemException(); return startsentinel.next.item; } } /// /// /// if this list is empty. /// The last item in this list. [Tested] public virtual T Last { [Tested] get { validitycheck(); if (size == 0) throw new NoSuchItemException(); return endsentinel.prev.item; } } /// /// Since Add(T item) always add at the end of the list, /// this describes if list has FIFO or LIFO semantics. /// /// True if the Remove() operation removes from the /// start of the list, false if it removes from the end. THe default for a new linked list is true. [Tested] public virtual bool FIFO { [Tested] get { validitycheck(); return fIFO; } [Tested] set { updatecheck(); fIFO = value; } } /// /// /// public virtual bool IsFixedSize { get { validitycheck(); return false; } } /// /// On this list, this indexer is read/write. /// if i is negative or /// >= the size of the collection. /// /// The i'th item of this list. /// The index of the item to fetch or store. [Tested] public virtual T this[int index] { [Tested] get { validitycheck(); return get(index).item; } [Tested] set { updatecheck(); Node n = get(index); // T item = n.item; #if HASHINDEX if (itemequalityComparer.Equals(value, item)) { n.item = value; dict.Update(value, n); } else if (!dict.FindOrAdd(value, ref n)) { dict.Remove(item); n.item = value; } else throw new ArgumentException("Item already in indexed list"); #else n.item = value; #endif (underlying ?? this).raiseForSetThis(index, value, item); } } /// /// /// /// public virtual Speed IndexingSpeed { get { return Speed.Linear; } } /// /// Insert an item at a specific index location in this list. /// if i is negative or /// > the size of the collection. /// The index at which to insert. /// The item to insert. [Tested] public virtual void Insert(int i, T item) { updatecheck(); insert(i, i == size ? endsentinel : get(i), item); if (ActiveEvents != EventTypeEnum.None) (underlying ?? this).raiseForInsert(i + Offset, item); } /// /// Insert an item at the end of a compatible view, used as a pointer. /// The pointer must be a view on the same list as /// this and the endpoitn of pointer must be /// a valid insertion point of this /// /// If pointer /// is not a view on the same list as this /// ?????? if the endpoint of /// pointer is not inside this /// if the list has /// AllowsDuplicates==false and the item is /// already in the list. /// /// public void Insert(IList pointer, T item) { updatecheck(); if ((pointer == null) || ((pointer.Underlying ?? pointer) != (underlying ?? this))) throw new IncompatibleViewException(); #warning INEFFICIENT //TODO: make this efficient (the whole point of the method: //Do NOT use Insert, but insert the node at pointer.endsentinel, checking //via the ordering that this is a valid insertion point Insert(pointer.Offset + pointer.Count - Offset, item); } /// /// Insert into this list all items from an enumerable collection starting /// at a particular index. /// if i is negative or /// > the size of the collection. /// /// Index to start inserting at /// Items to insert /// [Tested] public virtual void InsertAll(int i, SCG.IEnumerable items) where U : T { insertAll(i, items, true); } void insertAll(int i, SCG.IEnumerable items, bool insertion) where U : T { updatecheck(); Node succ, node, pred; int count = 0; succ = i == size ? endsentinel : get(i); pred = node = succ.prev; #if HASHINDEX TagGroup taggroup = null; int taglimit = 0, thetag = 0; taggroup = gettaggroup(node, succ, out thetag, out taglimit); try { foreach (T item in items) { Node tmp = new Node(item, node, null); if (!dict.FindOrAdd(item, ref tmp)) { tmp.tag = thetag < taglimit ? ++thetag : thetag; tmp.taggroup = taggroup; node.next = tmp; count++; node = tmp; } else throw new DuplicateNotAllowedException("Item already in indexed list"); } } finally { if (count != 0) { taggroup.count += count; if (taggroup != pred.taggroup) taggroup.first = pred.next; if (taggroup != succ.taggroup) taggroup.last = node; succ.prev = node; node.next = succ; if (node.tag == node.prev.tag) splittaggroup(taggroup); size += count; if (underlying != null) underlying.size += count; fixViewsAfterInsert(succ, pred, count, 0); raiseForInsertAll(pred, i, count, insertion); } } #else foreach (T item in items) { Node tmp = new Node(item, node, null); node.next = tmp; count++; node = tmp; } if (count == 0) return; succ.prev = node; node.next = succ; size += count; if (underlying != null) underlying.size += count; if (count > 0) { fixViewsAfterInsert(succ, pred, count, offset + i); raiseForInsertAll(pred, i, count, insertion); } #endif } private void raiseForInsertAll(Node node, int i, int added, bool insertion) { if (ActiveEvents != 0) { int index = Offset + i; if ((ActiveEvents & (EventTypeEnum.Added | EventTypeEnum.Inserted)) != 0) for (int j = index; j < index + added; j++) { #warning must we check stamps here? node = node.next; T item = node.item; if (insertion) raiseItemInserted(item, j); raiseItemsAdded(item, 1); } raiseCollectionChanged(); } } /// /// Insert an item at the front of this list. /// /// The item to insert. [Tested] public virtual void InsertFirst(T item) { updatecheck(); insert(0, startsentinel.next, item); if (ActiveEvents != EventTypeEnum.None) (underlying ?? this).raiseForInsert(0 + Offset, item); } /// /// Insert an item at the back of this list. /// /// The item to insert. [Tested] public virtual void InsertLast(T item) { updatecheck(); insert(size, endsentinel, item); if (ActiveEvents != EventTypeEnum.None) (underlying ?? this).raiseForInsert(size - 1 + Offset, item); } /// /// Create a new list consisting of the results of mapping all items of this /// list. /// /// The delegate defining the map. /// The new list. [Tested] public IList Map(Fun mapper) { validitycheck(); HashedLinkedList retval = new HashedLinkedList(); return map(mapper, retval); } /// /// Create a new list consisting of the results of mapping all items of this /// list. The new list will use a specified equalityComparer for the item type. /// /// The type of items of the new list /// The delegate defining the map. /// The equalityComparer to use for the new list /// The new list. public IList Map(Fun mapper, SCG.IEqualityComparer equalityComparer) { validitycheck(); HashedLinkedList retval = new HashedLinkedList(equalityComparer); return map(mapper, retval); } private IList map(Fun mapper, HashedLinkedList retval) { if (size == 0) return retval; int stamp = this.stamp; Node cursor = startsentinel.next; HashedLinkedList.Node mcursor = retval.startsentinel; #if HASHINDEX double tagdelta = int.MaxValue / (size + 1.0); int count = 1; HashedLinkedList.TagGroup taggroup = null; taggroup = new HashedLinkedList.TagGroup(); retval.taggroups = 1; taggroup.count = size; #endif while (cursor != endsentinel) { V v = mapper(cursor.item); modifycheck(stamp); mcursor.next = new HashedLinkedList.Node(v, mcursor, null); cursor = cursor.next; mcursor = mcursor.next; #if HASHINDEX retval.dict.Add(v, mcursor); mcursor.taggroup = taggroup; mcursor.tag = (int)(tagdelta * count++); #endif } #if HASHINDEX taggroup.first = retval.startsentinel.next; taggroup.last = mcursor; #endif retval.endsentinel.prev = mcursor; mcursor.next = retval.endsentinel; retval.size = size; return retval; } /// /// Remove one item from the list: from the front if FIFO /// is true, else from the back. /// if this list is empty. /// /// The removed item. [Tested] public virtual T Remove() { updatecheck(); if (size == 0) throw new NoSuchItemException("List is empty"); T item = fIFO ? remove(startsentinel.next, 0) : remove(endsentinel.prev, size - 1); #if HASHINDEX dict.Remove(item); #endif (underlying ?? this).raiseForRemove(item); return item; } /// /// Remove one item from the front of the list. /// if this list is empty. /// /// The removed item. [Tested] public virtual T RemoveFirst() { updatecheck(); if (size == 0) throw new NoSuchItemException("List is empty"); T item = remove(startsentinel.next, 0); #if HASHINDEX dict.Remove(item); #endif if (ActiveEvents != EventTypeEnum.None) (underlying ?? this).raiseForRemoveAt(Offset, item); return item; } /// /// Remove one item from the back of the list. /// if this list is empty. /// /// The removed item. [Tested] public virtual T RemoveLast() { updatecheck(); if (size == 0) throw new NoSuchItemException("List is empty"); T item = remove(endsentinel.prev, size - 1); #if HASHINDEX dict.Remove(item); #endif if (ActiveEvents != EventTypeEnum.None) (underlying ?? this).raiseForRemoveAt(size + Offset, item); return item; } /// /// Create a list view on this list. /// /// if the start or count is negative /// if the range does not fit within list. /// The index in this list of the start of the view. /// The size of the view. /// The new list view. [Tested] public virtual IList View(int start, int count) { checkRange(start, count); validitycheck(); if (views == null) views = new WeakViewList>(); HashedLinkedList retval = (HashedLinkedList)MemberwiseClone(); retval.underlying = underlying != null ? underlying : this; retval.offset = offset + start; retval.size = count; getPair(start - 1, start + count, out retval.startsentinel, out retval.endsentinel, new int[] { -1, size }, new Node[] { startsentinel, endsentinel }); //retval.startsentinel = start == 0 ? startsentinel : get(start - 1); //retval.endsentinel = start + count == size ? endsentinel : get(start + count); //TODO: for the purpose of Dispose, we need to retain a ref to the node retval.myWeakReference = views.Add(retval); return retval; } /// /// Create a list view on this list containing the (first) occurrence of a particular item. /// /// if the item is not in this list. /// The item to find. /// The new list view. public virtual IList ViewOf(T item) { #if HASHINDEX Node n; validitycheck(); if (!contains(item, out n)) return null; HashedLinkedList retval = (HashedLinkedList)MemberwiseClone(); retval.underlying = underlying != null ? underlying : this; retval.offset = null; retval.startsentinel = n.prev; retval.endsentinel = n.next; retval.size = 1; return retval; #else int index = 0; Node n = startsentinel.next; if (!find(item, ref n, ref index)) return null; //TODO: optimize with getpair! return View(index, 1); #endif } /// /// Create a list view on this list containing the last occurrence of a particular item. /// if the item is not in this list. /// /// The item to find. /// The new list view. public virtual IList LastViewOf(T item) { #if HASHINDEX return ViewOf(item); #else int index = size - 1; Node n = endsentinel.prev; if (!dnif(item, ref n, ref index)) return null; return View(index, 1); #endif } /// /// Null if this list is not a view. /// /// Underlying list for view. [Tested] public virtual IList Underlying { [Tested]get { validitycheck(); return underlying; } } /// /// /// /// public virtual bool IsValid { get { return isValid; } } /// /// /// Offset for this list view or 0 for a underlying list. [Tested] public virtual int Offset { [Tested] get { validitycheck(); #if HASHINDEX if (offset == null && underlying != null) { //TODO: search from both ends simultaneously! Node n = underlying.startsentinel; int i = 0; while (n != startsentinel) { n = n.next; i++; } offset = i; } #endif return (int)offset; } } /// /// Slide this list view along the underlying list. /// /// if this list is not a view. /// if the operation /// would bring either end of the view outside the underlying list. /// The signed amount to slide: positive to slide /// towards the end. [Tested] public IList Slide(int offset) { if (!TrySlide(offset, size)) throw new ArgumentOutOfRangeException(); return this; } //TODO: more test cases /// /// Slide this list view along the underlying list, perhaps changing its size. /// /// if this list is not a view. /// if the operation /// would bring either end of the view outside the underlying list. /// The signed amount to slide: positive to slide /// towards the end. /// The new size of the view. public IList Slide(int offset, int size) { if (!TrySlide(offset, size)) throw new ArgumentOutOfRangeException(); return this; } /// /// /// /// /// public virtual bool TrySlide(int offset) { return TrySlide(offset, size); } /// /// /// /// /// /// public virtual bool TrySlide(int offset, int size) { updatecheck(); if (underlying == null) throw new NotAViewException("List not a view"); #pragma warning disable 472 if (this.offset == null) //Note: only possible with HASHINDEX #pragma warning restore 472 { #pragma warning disable 162 try { getPair(offset - 1, offset + size, out startsentinel, out endsentinel, new int[] { -1, this.size }, new Node[] { startsentinel, endsentinel }); //TODO: maybe-update offset field } catch (NullReferenceException) { return false; } #pragma warning restore 162 } else { if (offset + this.offset < 0 || offset + this.offset + size > underlying.size) return false; int oldoffset = (int)(this.offset); getPair(offset - 1, offset + size, out startsentinel, out endsentinel, new int[] { -oldoffset - 1, -1, this.size, underlying.size - oldoffset }, new Node[] { underlying.startsentinel, startsentinel, endsentinel, underlying.endsentinel }); } this.size = size; this.offset += offset; return true; } //TODO: improve the complexity of the implementation /// /// /// Returns null if otherView is strictly to the left of this view /// /// /// If otherView does not have the same underlying list as this /// public virtual IList Span(IList otherView) { if ((otherView == null) || ((otherView.Underlying ?? otherView) != (underlying ?? this))) throw new IncompatibleViewException(); if (otherView.Offset + otherView.Count - Offset < 0) return null; return (underlying ?? this).View(Offset, otherView.Offset + otherView.Count - Offset); } //Question: should we swap items or move nodes around? //The first seems much more efficient unless the items are value types //with a large memory footprint. //(Swapping will do count*3/2 T assignments, linking around will do // 4*count ref assignments; note that ref assignments are more expensive //than copying non-ref bits) /// /// Reverse the list so the items are in the opposite sequence order. /// [Tested] public virtual void Reverse() { updatecheck(); if (size == 0) return; Position[] positions = null; int poslow = 0, poshigh = 0; if (views != null) { CircularQueue _positions = null; foreach (HashedLinkedList view in views) { if (view != this) { switch (viewPosition(view)) { case MutualViewPosition.ContainedIn: (_positions ?? (_positions = new CircularQueue())).Enqueue(new Position(view, true)); _positions.Enqueue(new Position(view, false)); break; case MutualViewPosition.Overlapping: view.Dispose(); break; case MutualViewPosition.Contains: case MutualViewPosition.NonOverlapping: break; } } } if (_positions != null) { positions = _positions.ToArray(); Sorting.IntroSort(positions, 0, positions.Length, PositionComparer.Default); poshigh = positions.Length - 1; } } Node a = get(0), b = get(size - 1); for (int i = 0; i < size / 2; i++) { T swap; swap = a.item; a.item = b.item; b.item = swap; #if HASHINDEX dict[a.item] = a; dict[b.item] = b; #endif if (positions != null) mirrorViewSentinelsForReverse(positions, ref poslow, ref poshigh, a, b, i); a = a.next; b = b.prev; } if (positions != null && size % 2 != 0) mirrorViewSentinelsForReverse(positions, ref poslow, ref poshigh, a, b, size / 2); (underlying ?? this).raiseCollectionChanged(); } private void mirrorViewSentinelsForReverse(Position[] positions, ref int poslow, ref int poshigh, Node a, Node b, int i) { #if HASHINDEX int? aindex = offset + i, bindex = offset + size - 1 - i; #else int aindex = offset + i, bindex = offset + size - 1 - i; #endif Position pos; #if HASHINDEX while (poslow <= poshigh && (pos = positions[poslow]).Endpoint == a) #else while (poslow <= poshigh && (pos = positions[poslow]).Index == aindex) #endif { //TODO: Note: in the case og hashed linked list, if this.offset == null, but pos.View.offset!=null //we may at this point compute this.offset and non-null values of aindex and bindex if (pos.Left) pos.View.endsentinel = b.next; else { pos.View.startsentinel = b.prev; pos.View.offset = bindex; } poslow++; } #if HASHINDEX while (poslow < poshigh && (pos = positions[poshigh]).Endpoint == b) #else while (poslow < poshigh && (pos = positions[poshigh]).Index == bindex) #endif { if (pos.Left) pos.View.endsentinel = a.next; else { pos.View.startsentinel = a.prev; pos.View.offset = aindex; } poshigh--; } } /// /// Check if this list is sorted according to the default sorting order /// for the item type T, as defined by the class /// /// if T is not comparable /// True if the list is sorted, else false. public bool IsSorted() { return IsSorted(Comparer.Default); } /// /// Check if this list is sorted according to a specific sorting order. /// /// The comparer defining the sorting order. /// True if the list is sorted, else false. [Tested] public virtual bool IsSorted(SCG.IComparer c) { validitycheck(); if (size <= 1) return true; Node node = startsentinel.next; T prevItem = node.item; node = node.next; while (node != endsentinel) { if (c.Compare(prevItem, node.item) > 0) return false; else { prevItem = node.item; node = node.next; } } return true; } /// /// Sort the items of the list according to the default sorting order /// for the item type T, as defined by the Comparer[T] class. /// (). /// The sorting is stable. /// /// if T is not comparable public virtual void Sort() { Sort(Comparer.Default); } // Sort the linked list using mergesort /// /// Sort the items of the list according to a specific sorting order. /// The sorting is stable. /// /// The comparer defining the sorting order. [Tested] public virtual void Sort(SCG.IComparer c) { updatecheck(); if (size == 0) return; disposeOverlappingViews(false); #if HASHINDEX if (underlying != null) { Node cursor = startsentinel.next; while (cursor != endsentinel) { cursor.taggroup.count--; cursor = cursor.next; } } #endif // Build a linked list of non-empty runs. // The prev field in first node of a run points to next run's first node Node runTail = startsentinel.next; Node prevNode = startsentinel.next; endsentinel.prev.next = null; while (prevNode != null) { Node node = prevNode.next; while (node != null && c.Compare(prevNode.item, node.item) <= 0) { prevNode = node; node = prevNode.next; } // Completed a run; prevNode is the last node of that run prevNode.next = null; // Finish the run runTail.prev = node; // Link it into the chain of runs runTail = node; if (c.Compare(endsentinel.prev.item, prevNode.item) <= 0) endsentinel.prev = prevNode; // Update last pointer to point to largest prevNode = node; // Start a new run } // Repeatedly merge runs two and two, until only one run remains while (startsentinel.next.prev != null) { Node run = startsentinel.next; Node newRunTail = null; while (run != null && run.prev != null) { // At least two runs, merge Node nextRun = run.prev.prev; Node newrun = mergeRuns(run, run.prev, c); if (newRunTail != null) newRunTail.prev = newrun; else startsentinel.next = newrun; newRunTail = newrun; run = nextRun; } if (run != null) // Add the last run, if any newRunTail.prev = run; } endsentinel.prev.next = endsentinel; startsentinel.next.prev = startsentinel; //assert invariant(); //assert isSorted(); #if HASHINDEX { Node cursor = startsentinel.next, end = endsentinel; int tag, taglimit; TagGroup t = gettaggroup(startsentinel, endsentinel, out tag, out taglimit); int tagdelta = taglimit / (size + 1) - tag / (size + 1); tagdelta = tagdelta == 0 ? 1 : tagdelta; if (underlying == null) taggroups = 1; while (cursor != end) { tag = tag + tagdelta > taglimit ? taglimit : tag + tagdelta; cursor.tag = tag; t.count++; cursor.taggroup = t; cursor = cursor.next; } if (t != startsentinel.taggroup) t.first = startsentinel.next; if (t != endsentinel.taggroup) t.last = endsentinel.prev; if (tag == taglimit) splittaggroup(t); } #endif (underlying ?? this).raiseCollectionChanged(); } private static Node mergeRuns(Node run1, Node run2, SCG.IComparer c) { //assert run1 != null && run2 != null; Node prev; bool prev1; // is prev from run1? if (c.Compare(run1.item, run2.item) <= 0) { prev = run1; prev1 = true; run1 = run1.next; } else { prev = run2; prev1 = false; run2 = run2.next; } Node start = prev; //assert start != null; start.prev = null; while (run1 != null && run2 != null) { if (prev1) { //assert prev.next == run1; //Comparable run2item = (Comparable)run2.item; while (run1 != null && c.Compare(run2.item, run1.item) >= 0) { prev = run1; run1 = prev.next; } if (run1 != null) { // prev.item <= run2.item < run1.item; insert run2 prev.next = run2; run2.prev = prev; prev = run2; run2 = prev.next; prev1 = false; } } else { //assert prev.next == run2; //Comparable run1item = (Comparable)run1.item; while (run2 != null && c.Compare(run1.item, run2.item) > 0) { prev = run2; run2 = prev.next; } if (run2 != null) { // prev.item < run1.item <= run2.item; insert run1 prev.next = run1; run1.prev = prev; prev = run1; run1 = prev.next; prev1 = true; } } } //assert !(run1 != null && prev1) && !(run2 != null && !prev1); if (run1 != null) { // last run2 < all of run1; attach run1 at end prev.next = run1; run1.prev = prev; } else if (run2 != null) { // last run1 prev.next = run2; run2.prev = prev; } return start; } /// /// Randomly shuffle the items of this list. /// Will invalidate overlapping views??? /// public virtual void Shuffle() { Shuffle(new C5Random()); } /// /// Shuffle the items of this list according to a specific random source. /// Will invalidate overlapping views??? /// /// The random source. public virtual void Shuffle(Random rnd) { updatecheck(); if (size == 0) return; disposeOverlappingViews(false); ArrayList a = new ArrayList(); a.AddAll(this); a.Shuffle(rnd); Node cursor = startsentinel.next; int j = 0; while (cursor != endsentinel) { cursor.item = a[j++]; #if HASHINDEX dict[cursor.item] = cursor; #endif cursor = cursor.next; } (underlying ?? this).raiseCollectionChanged(); } #endregion #region IIndexed Members /// /// . /// /// The directed collection of items in a specific index interval. /// The low index of the interval (inclusive). /// The size of the range. [Tested] public IDirectedCollectionValue this[int start, int count] { [Tested] get { validitycheck(); checkRange(start, count); return new Range(this, start, count, true); } } /// /// Searches for an item in the list going forwrds from the start. /// /// Item to search for. /// Index of item from start. [Tested] public virtual int IndexOf(T item) { validitycheck(); Node node; #if HASHINDEX if (!dict.Find(item, out node) || !insideview(node)) return ~size; #endif node = startsentinel.next; int index = 0; if (find(item, ref node, ref index)) return index; else return ~size; } /// /// Searches for an item in the list going backwords from the end. /// /// Item to search for. /// Index of of item from the end. [Tested] public virtual int LastIndexOf(T item) { #if HASHINDEX return IndexOf(item); #else validitycheck(); Node node = endsentinel.prev; int index = size - 1; if (dnif(item, ref node, ref index)) return index; else return ~size; #endif } /// /// Remove the item at a specific position of the list. /// if i is negative or /// >= the size of the collection. /// /// The index of the item to remove. /// The removed item. [Tested] public virtual T RemoveAt(int i) { updatecheck(); T retval = remove(get(i), i); #if HASHINDEX dict.Remove(retval); #endif if (ActiveEvents != EventTypeEnum.None) (underlying ?? this).raiseForRemoveAt(Offset + i, retval); return retval; } /// /// Remove all items in an index interval. /// ???. /// /// The index of the first item to remove. /// The number of items to remove. [Tested] public virtual void RemoveInterval(int start, int count) { #if HASHINDEX updatecheck(); checkRange(start, count); if (count == 0) return; View(start, count).Clear(); #else //Note: this is really almost equaivalent to Clear on a view updatecheck(); checkRange(start, count); if (count == 0) return; //for small count: optimize //use an optimal get(int i, int j, ref Node ni, ref Node nj)? Node a = get(start), b = get(start + count - 1); fixViewsBeforeRemove(start, count, a, b); a.prev.next = b.next; b.next.prev = a.prev; if (underlying != null) underlying.size -= count; size -= count; if (ActiveEvents != EventTypeEnum.None) (underlying ?? this).raiseForRemoveInterval(start + Offset, count); #endif } void raiseForRemoveInterval(int start, int count) { if (ActiveEvents != 0) { raiseCollectionCleared(size == 0, count, start); raiseCollectionChanged(); } } #endregion #region ISequenced Members /// /// /// /// [Tested] public override int GetSequencedHashCode() { validitycheck(); return base.GetSequencedHashCode(); } /// /// /// /// /// [Tested] public override bool SequencedEquals(ISequenced that) { validitycheck(); return base.SequencedEquals(that); } #endregion #region IDirectedCollection Members /// /// Create a collection containing the same items as this collection, but /// whose enumerator will enumerate the items backwards. The new collection /// will become invalid if the original is modified. Method typicaly used as in /// foreach (T x in coll.Backwards()) {...} /// /// The backwards collection. [Tested] public override IDirectedCollectionValue Backwards() { return this[0, size].Backwards(); } #endregion #region IDirectedEnumerable Members [Tested] IDirectedEnumerable IDirectedEnumerable.Backwards() { return Backwards(); } #endregion #region IEditableCollection Members /// /// The value is symbolic indicating the type of asymptotic complexity /// in terms of the size of this collection (worst-case or amortized as /// relevant). /// /// Speed.Linear [Tested] public virtual Speed ContainsSpeed { [Tested] get { #if HASHINDEX return Speed.Constant; #else return Speed.Linear; #endif } } /// /// Performs a check for view validity before calling base.GetUnsequencedHashCode() /// /// [Tested] public override int GetUnsequencedHashCode() { validitycheck(); return base.GetUnsequencedHashCode(); } /// /// /// /// /// [Tested] public override bool UnsequencedEquals(ICollection that) { validitycheck(); return base.UnsequencedEquals(that); } /// /// Check if this collection contains (an item equivalent to according to the /// itemequalityComparer) a particular value. /// /// The value to check for. /// True if the items is in this collection. [Tested] public virtual bool Contains(T item) { validitycheck(); Node node; return contains(item, out node); } /// /// Check if this collection contains an item equivalent according to the /// itemequalityComparer to a particular value. If so, return in the ref argument (a /// binary copy of) the actual value found. /// /// The value to look for. /// True if the items is in this collection. [Tested] public virtual bool Find(ref T item) { validitycheck(); Node node; if (contains(item, out node)) { item = node.item; return true; } return false; } /// /// Check if this collection contains an item equivalent according to the /// itemequalityComparer to a particular value. If so, update the item in the collection /// to with a binary copy of the supplied value. Will update a single item. /// /// Value to update. /// True if the item was found and hence updated. [Tested] public virtual bool Update(T item) { T olditem; return Update(item, out olditem); } /// /// /// /// /// /// public virtual bool Update(T item, out T olditem) { updatecheck(); Node node; if (contains(item, out node)) { olditem = node.item; node.item = item; #if HASHINDEX //Avoid clinging onto a reference to olditem via dict! dict.Update(item, node); #endif (underlying ?? this).raiseForUpdate(item, olditem); return true; } olditem = default(T); return false; } /// /// Check if this collection contains an item equivalent according to the /// itemequalityComparer to a particular value. If so, return in the ref argument (a /// binary copy of) the actual value found. Else, add the item to the collection. /// /// The value to look for. /// True if the item was found (hence not added). [Tested] public virtual bool FindOrAdd(ref T item) { updatecheck(); #if HASHINDEX //This is an extended myinsert: Node node = new Node(item); if (!dict.FindOrAdd(item, ref node)) { insertNode(true, endsentinel, node); (underlying ?? this).raiseForAdd(item); return false; } if (!insideview(node)) throw new ArgumentException("Item alredy in indexed list but outside view"); item = node.item; return true; #else if (Find(ref item)) return true; Add(item); return false; #endif } /// /// Check if this collection contains an item equivalent according to the /// itemequalityComparer to a particular value. If so, update the item in the collection /// to with a binary copy of the supplied value; else add the value to the collection. /// /// Value to add or update. /// True if the item was found and updated (hence not added). [Tested] public virtual bool UpdateOrAdd(T item) { T olditem; return UpdateOrAdd(item, out olditem); } /// /// /// /// /// /// public virtual bool UpdateOrAdd(T item, out T olditem) { updatecheck(); #if HASHINDEX Node node = new Node(item); //NOTE: it is hard to do this without double access to the dictionary //in the update case if (dict.FindOrAdd(item, ref node)) { if (!insideview(node)) throw new ArgumentException("Item in indexed list but outside view"); olditem = node.item; //Avoid clinging onto a reference to olditem via dict! dict.Update(item, node); node.item = item; (underlying ?? this).raiseForUpdate(item, olditem); return true; } insertNode(true, endsentinel, node); (underlying ?? this).raiseForAdd(item); #else if (Update(item, out olditem)) return true; Add(item); #endif olditem = default(T); return false; } /// /// Remove a particular item from this collection. Since the collection has bag /// semantics only one copy equivalent to the supplied item is removed. /// /// The value to remove. /// True if the item was found (and removed). [Tested] public virtual bool Remove(T item) { updatecheck(); int i = 0; Node node; #if HASHINDEX if (!dictremove(item, out node)) #else node = fIFO ? startsentinel.next : endsentinel.prev; if (!(fIFO ? find(item, ref node, ref i) : dnif(item, ref node, ref i))) #endif return false; T removeditem = remove(node, i); (underlying ?? this).raiseForRemove(removeditem); return true; } /// /// Remove a particular item from this collection if found (only one copy). /// If an item was removed, report a binary copy of the actual item removed in /// the argument. /// /// The value to remove on input. /// The value removed. /// True if the item was found (and removed). [Tested] public virtual bool Remove(T item, out T removeditem) { updatecheck(); int i = 0; Node node; #if HASHINDEX if (!dictremove(item, out node)) #else node = fIFO ? startsentinel.next : endsentinel.prev; if (!(fIFO ? find(item, ref node, ref i) : dnif(item, ref node, ref i))) #endif { removeditem = default(T); return false; } removeditem = node.item; remove(node, i); (underlying ?? this).raiseForRemove(removeditem); return true; } /// /// Remove all items in another collection from this one, taking multiplicities into account. /// Always removes from the front of the list. /// /// The asymptotic running time complexity of this method is O(n+m+v*log(v)), /// where n is the size of this list, m is the size of the /// items collection and v is the number of views. /// The method will temporarily allocate memory of size O(m+v). /// /// /// /// The items to remove. [Tested] public virtual void RemoveAll(SCG.IEnumerable items) where U : T { updatecheck(); if (size == 0) return; RaiseForRemoveAllHandler raiseHandler = new RaiseForRemoveAllHandler(underlying ?? this); bool mustFire = raiseHandler.MustFire; #if HASHINDEX Node node; foreach (T item in items) if (dictremove(item, out node)) { if (mustFire) raiseHandler.Remove(node.item); remove(node, 118); } #else HashBag toremove = new HashBag(itemequalityComparer); toremove.AddAll(items); ViewHandler viewHandler = new ViewHandler(this); int index = 0, removed = 0, myoffset = Offset; Node node = startsentinel.next; while (node != endsentinel) { //pass by a stretch of nodes while (node != endsentinel && !toremove.Contains(node.item)) { node = node.next; index++; } viewHandler.skipEndpoints(removed, myoffset + index); //Remove a stretch of nodes Node localend = node.prev; //Latest node not to be removed while (node != endsentinel && toremove.Remove(node.item)) { if (mustFire) raiseHandler.Remove(node.item); removed++; node = node.next; index++; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); } viewHandler.updateSentinels(myoffset + index, localend, node); localend.next = node; node.prev = localend; } index = underlying != null ? underlying.size + 1 - myoffset : size + 1 - myoffset; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); size -= removed; if (underlying != null) underlying.size -= removed; #endif raiseHandler.Raise(); } /// /// /// /// void RemoveAll(Fun predicate) { updatecheck(); if (size == 0) return; RaiseForRemoveAllHandler raiseHandler = new RaiseForRemoveAllHandler(underlying ?? this); bool mustFire = raiseHandler.MustFire; #if HASHINDEX { Node n = startsentinel.next; while (n != endsentinel) { bool removeIt = predicate(n.item); updatecheck(); if (removeIt) { dict.Remove(n.item); remove(n, 119); if (mustFire) raiseHandler.Remove(n.item); } n = n.next; } } #else ViewHandler viewHandler = new ViewHandler(this); int index = 0, removed = 0, myoffset = Offset; Node node = startsentinel.next; while (node != endsentinel) { //pass by a stretch of nodes while (node != endsentinel && !predicate(node.item)) { updatecheck(); node = node.next; index++; } updatecheck(); viewHandler.skipEndpoints(removed, myoffset + index); //Remove a stretch of nodes Node localend = node.prev; //Latest node not to be removed while (node != endsentinel && predicate(node.item)) { updatecheck(); if (mustFire) raiseHandler.Remove(node.item); removed++; node = node.next; index++; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); } updatecheck(); viewHandler.updateSentinels(myoffset + index, localend, node); localend.next = node; node.prev = localend; } index = underlying != null ? underlying.size + 1 - myoffset : size + 1 - myoffset; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); size -= removed; if (underlying != null) underlying.size -= removed; #endif raiseHandler.Raise(); } /// /// Remove all items from this collection. /// [Tested] public virtual void Clear() { updatecheck(); if (size == 0) return; int oldsize = size; #if HASHINDEX if (underlying == null) dict.Clear(); else foreach (T item in this) dict.Remove(item); #endif clear(); (underlying ?? this).raiseForRemoveInterval(Offset, oldsize); } void clear() { if (size == 0) return; #if HASHINDEX //TODO: mix with tag maintenance to only run through list once? ViewHandler viewHandler = new ViewHandler(this); if (viewHandler.viewCount > 0) { int removed = 0; Node n = startsentinel.next; viewHandler.skipEndpoints(0, n); while (n != endsentinel) { removed++; n = n.next; viewHandler.updateViewSizesAndCounts(removed, n); } viewHandler.updateSentinels(endsentinel, startsentinel, endsentinel); if (underlying != null) viewHandler.updateViewSizesAndCounts(removed, underlying.endsentinel); } #else fixViewsBeforeRemove(Offset, size, startsentinel.next, endsentinel.prev); #endif #if HASHINDEX if (underlying != null) { Node n = startsentinel.next; while (n != endsentinel) { n.next.prev = startsentinel; startsentinel.next = n.next; removefromtaggroup(n); n = n.next; } } else taggroups = 0; #endif endsentinel.prev = startsentinel; startsentinel.next = endsentinel; if (underlying != null) underlying.size -= size; size = 0; } /// /// Remove all items not in some other collection from this one, taking multiplicities into account. /// The asymptotic running time complexity of this method is O(n+m+v*log(v)), /// where n is the size of this collection, m is the size of the /// items collection and v is the number of views. /// The method will temporarily allocate memory of size O(m+v). The stated complexitiy /// holds under the assumption that the itemequalityComparer of this list is well-behaved. /// /// /// /// The items to retain. [Tested] public virtual void RetainAll(SCG.IEnumerable items) where U : T { updatecheck(); if (size == 0) return; RaiseForRemoveAllHandler raiseHandler = new RaiseForRemoveAllHandler(underlying ?? this); bool mustFire = raiseHandler.MustFire; #if HASHINDEX /*if (underlying == null) { HashDictionary newdict = new HashDictionary(itemequalityComparer); foreach (T item in items) { Node node; if (dict.Remove(item, out node)) newdict.Add(item, node); } foreach (KeyValuePair pair in dict) { Node n = pair.Value; fixViewsBeforeSingleRemove(n, 117); Node p = n.prev, s = n.next; s.prev = p; p.next = s; removefromtaggroup(n); } dict = newdict; size = dict.Count; //For a small number of items to retain it might be faster to //iterate through the list and splice out the chunks not needed } else*/ { HashSet toremove = new HashSet(itemequalityComparer); foreach (T item in this) toremove.Add(item); foreach (T item in items) toremove.Remove(item); Node n = startsentinel.next; while (n != endsentinel && toremove.Count > 0) { if (toremove.Contains(n.item)) { dict.Remove(n.item); remove(n, 119); if (mustFire) raiseHandler.Remove(n.item); } n = n.next; } } #else HashBag toretain = new HashBag(itemequalityComparer); toretain.AddAll(items); ViewHandler viewHandler = new ViewHandler(this); int index = 0, removed = 0, myoffset = Offset; Node node = startsentinel.next; while (node != endsentinel) { //Skip a stretch of nodes while (node != endsentinel && toretain.Remove(node.item)) { node = node.next; index++; } viewHandler.skipEndpoints(removed, myoffset + index); //Remove a stretch of nodes Node localend = node.prev; //Latest node not to be removed while (node != endsentinel && !toretain.Contains(node.item)) { if (mustFire) raiseHandler.Remove(node.item); removed++; node = node.next; index++; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); } viewHandler.updateSentinels(myoffset + index, localend, node); localend.next = node; node.prev = localend; } index = underlying != null ? underlying.size + 1 - myoffset : size + 1 - myoffset; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); size -= removed; if (underlying != null) underlying.size -= removed; #endif raiseHandler.Raise(); } /// /// /// /// void RetainAll(Fun predicate) { updatecheck(); if (size == 0) return; RaiseForRemoveAllHandler raiseHandler = new RaiseForRemoveAllHandler(underlying ?? this); bool mustFire = raiseHandler.MustFire; #if HASHINDEX { Node n = startsentinel.next; while (n != endsentinel) { bool removeIt = !predicate(n.item); updatecheck(); if (removeIt) { dict.Remove(n.item); remove(n, 119); if (mustFire) raiseHandler.Remove(n.item); } n = n.next; } } #else ViewHandler viewHandler = new ViewHandler(this); int index = 0, removed = 0, myoffset = Offset; Node node = startsentinel.next; while (node != endsentinel) { //Skip a stretch of nodes while (node != endsentinel && predicate(node.item)) { updatecheck(); node = node.next; index++; } updatecheck(); viewHandler.skipEndpoints(removed, myoffset + index); //Remove a stretch of nodes Node localend = node.prev; //Latest node not to be removed while (node != endsentinel && !predicate(node.item)) { updatecheck(); if (mustFire) raiseHandler.Remove(node.item); removed++; node = node.next; index++; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); } updatecheck(); viewHandler.updateSentinels(myoffset + index, localend, node); localend.next = node; node.prev = localend; } index = underlying != null ? underlying.size + 1 - myoffset : size + 1 - myoffset; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); size -= removed; if (underlying != null) underlying.size -= removed; #endif raiseHandler.Raise(); } /// /// Check if this collection contains all the values in another collection /// with respect to multiplicities. /// /// The /// /// True if all values in itemsis in this collection. [Tested] public virtual bool ContainsAll(SCG.IEnumerable items) where U : T { validitycheck(); #if HASHINDEX Node node; foreach (T item in items) if (!contains(item, out node)) return false; return true; #else HashBag tocheck = new HashBag(itemequalityComparer); tocheck.AddAll(items); if (tocheck.Count > size) return false; Node node = startsentinel.next; while (node != endsentinel) { tocheck.Remove(node.item); node = node.next; } return tocheck.IsEmpty; #endif } /// /// Create a new list consisting of the items of this list satisfying a /// certain predicate. /// /// The filter delegate defining the predicate. /// The new list. [Tested] public IList FindAll(Fun filter) { validitycheck(); int stamp = this.stamp; HashedLinkedList retval = new HashedLinkedList(); Node cursor = startsentinel.next; Node mcursor = retval.startsentinel; #if HASHINDEX double tagdelta = int.MaxValue / (size + 1.0); int count = 1; TagGroup taggroup = new TagGroup(); retval.taggroups = 1; #endif while (cursor != endsentinel) { bool found = filter(cursor.item); modifycheck(stamp); if (found) { mcursor.next = new Node(cursor.item, mcursor, null); mcursor = mcursor.next; retval.size++; #if HASHINDEX retval.dict.Add(cursor.item, mcursor); mcursor.taggroup = taggroup; mcursor.tag = (int)(tagdelta * count++); #endif } cursor = cursor.next; } #if HASHINDEX if (retval.size > 0) { taggroup.count = retval.size; taggroup.first = retval.startsentinel.next; taggroup.last = mcursor; } #endif retval.endsentinel.prev = mcursor; mcursor.next = retval.endsentinel; return retval; } /// /// Count the number of items of the collection equal to a particular value. /// Returns 0 if and only if the value is not in the collection. /// /// The value to count. /// The number of copies found. [Tested] public virtual int ContainsCount(T item) { #if HASHINDEX return Contains(item) ? 1 : 0; #else validitycheck(); int retval = 0; Node node = startsentinel.next; while (node != endsentinel) { if (itemequalityComparer.Equals(node.item, item)) retval++; node = node.next; } return retval; #endif } /// /// /// /// public virtual ICollectionValue UniqueItems() { #if HASHINDEX return this; #else HashBag hashbag = new HashBag(itemequalityComparer); hashbag.AddAll(this); return hashbag.UniqueItems(); #endif } /// /// /// /// public virtual ICollectionValue> ItemMultiplicities() { #if HASHINDEX return new MultiplicityOne(this); #else HashBag hashbag = new HashBag(itemequalityComparer); hashbag.AddAll(this); return hashbag.ItemMultiplicities(); #endif } /// /// Remove all items equivalent to a given value. /// The asymptotic complexity of this method is O(n+v*log(v)), /// where n is the size of the collection and v /// is the number of views. /// /// /// The value to remove. [Tested] public virtual void RemoveAllCopies(T item) { #if HASHINDEX Remove(item); #else updatecheck(); if (size == 0) return; RaiseForRemoveAllHandler raiseHandler = new RaiseForRemoveAllHandler(underlying ?? this); bool mustFire = raiseHandler.MustFire; ViewHandler viewHandler = new ViewHandler(this); int index = 0, removed = 0, myoffset = Offset; // Node node = startsentinel.next; while (node != endsentinel) { //pass by a stretch of nodes while (node != endsentinel && !itemequalityComparer.Equals(node.item, item)) { node = node.next; index++; } viewHandler.skipEndpoints(removed, myoffset + index); //Remove a stretch of nodes Node localend = node.prev; //Latest node not to be removed while (node != endsentinel && itemequalityComparer.Equals(node.item, item)) { if (mustFire) raiseHandler.Remove(node.item); removed++; node = node.next; index++; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); } viewHandler.updateSentinels(myoffset + index, localend, node); localend.next = node; node.prev = localend; } index = underlying != null ? underlying.size + 1 - myoffset : size + 1 - myoffset; viewHandler.updateViewSizesAndCounts(removed, myoffset + index); size -= removed; if (underlying != null) underlying.size -= removed; raiseHandler.Raise(); #endif } #endregion #region ICollectionValue Members /// /// /// /// The number of items in this collection [Tested] public override int Count { [Tested]get { validitycheck(); return size; } } /// /// Choose some item of this collection. /// /// if collection is empty. /// [Tested] public override T Choose() { return First; } /// /// Create an enumerable, enumerating the items of this collection that satisfies /// a certain condition. /// /// The T->bool filter delegate defining the condition /// The filtered enumerable public override SCG.IEnumerable Filter(Fun filter) { validitycheck(); return base.Filter(filter); } #endregion #region IEnumerable Members /// /// Create an enumerator for the collection /// /// The enumerator [Tested] public override SCG.IEnumerator GetEnumerator() { validitycheck(); Node cursor = startsentinel.next; int enumeratorstamp = underlying != null ? underlying.stamp : this.stamp; while (cursor != endsentinel) { modifycheck(enumeratorstamp); yield return cursor.item; cursor = cursor.next; } } #endregion #region IExtensible Members /// /// Add an item to this collection if possible. /// /// The item to add. /// True. [Tested] public virtual bool Add(T item) { updatecheck(); #if HASHINDEX Node node = new Node(item); if (!dict.FindOrAdd(item, ref node)) { insertNode(true, endsentinel, node); (underlying ?? this).raiseForAdd(item); return true; } return false; #else insert(size, endsentinel, item); (underlying ?? this).raiseForAdd(item); return true; #endif } /// /// /// /// True since this collection has bag semantics. [Tested] public virtual bool AllowsDuplicates { [Tested] get { #if HASHINDEX return false; #else return true; #endif } } /// /// By convention this is true for any collection with set semantics. /// /// True if only one representative of a group of equal items /// is kept in the collection together with the total count. public virtual bool DuplicatesByCounting { get { #if HASHINDEX return true; #else return false; #endif } } /// /// Add the elements from another collection with a more specialized item type /// to this collection. /// /// The type of items to add /// The items to add [Tested] public virtual void AddAll(SCG.IEnumerable items) where U : T { #if HASHINDEX updatecheck(); int added = 0; Node pred = endsentinel.prev; foreach (U item in items) { Node node = new Node(item); if (!dict.FindOrAdd(item, ref node)) { insertNode(false, endsentinel, node); added++; } } if (added > 0) { fixViewsAfterInsert(endsentinel, pred, added, 0); raiseForInsertAll(pred, size - added, added, false); } #else insertAll(size, items, false); #endif } #endregion #if HASHINDEX #else #region IStack Members /// /// Push an item to the top of the stack. /// /// The item [Tested] public void Push(T item) { InsertLast(item); } /// /// Pop the item at the top of the stack from the stack. /// /// The popped item. [Tested] public T Pop() { return RemoveLast(); } #endregion #region IQueue Members /// /// Enqueue an item at the back of the queue. /// /// The item [Tested] public virtual void Enqueue(T item) { InsertLast(item); } /// /// Dequeue an item from the front of the queue. /// /// The item [Tested] public virtual T Dequeue() { return RemoveFirst(); } #endregion #endif #region Diagnostic private bool checkViews() { if (underlying != null) throw new InternalException(System.Reflection.MethodInfo.GetCurrentMethod() + " called on a view"); if (views == null) return true; bool retval = true; Node[] nodes = new Node[size + 2]; int i = 0; Node n = startsentinel; while (n != null) { nodes[i++] = n; n = n.next; } //Console.WriteLine("###"); foreach (HashedLinkedList view in views) { if (!view.isValid) { Console.WriteLine("Invalid view(hash {0}, offset {1}, size {2})", view.GetHashCode(), view.offset, view.size); retval = false; continue; } if (view.Offset > size || view.Offset < 0) { Console.WriteLine("Bad view(hash {0}, offset {1}, size {2}), Offset > underlying.size ({2})", view.GetHashCode(), view.offset, view.size, size); retval = false; } else if (view.startsentinel != nodes[view.Offset]) { Console.WriteLine("Bad view(hash {0}, offset {1}, size {2}), startsentinel {3} should be {4}", view.GetHashCode(), view.offset, view.size, view.startsentinel + " " + view.startsentinel.GetHashCode(), nodes[view.Offset] + " " + nodes[view.Offset].GetHashCode()); retval = false; } if (view.Offset + view.size > size || view.Offset + view.size < 0) { Console.WriteLine("Bad view(hash {0}, offset {1}, size {2}), end index > underlying.size ({3})", view.GetHashCode(), view.offset, view.size, size); retval = false; } else if (view.endsentinel != nodes[view.Offset + view.size + 1]) { Console.WriteLine("Bad view(hash {0}, offset {1}, size {2}), endsentinel {3} should be {4}", view.GetHashCode(), view.offset, view.size, view.endsentinel + " " + view.endsentinel.GetHashCode(), nodes[view.Offset + view.size + 1] + " " + nodes[view.Offset + view.size + 1].GetHashCode()); retval = false; } if (view.views != views) { Console.WriteLine("Bad view(hash {0}, offset {1}, size {2}), wrong views list {3} <> {4}", view.GetHashCode(), view.offset, view.size, view.views.GetHashCode(), views.GetHashCode()); retval = false; } if (view.underlying != this) { Console.WriteLine("Bad view(hash {0}, offset {1}, size {2}), wrong underlying {3} <> this {4}", view.GetHashCode(), view.offset, view.size, view.underlying.GetHashCode(), GetHashCode()); retval = false; } if (view.stamp != stamp) { //Console.WriteLine("Bad view(hash {0}, offset {1}, size {2}), wrong stamp view:{2} underlying: {3}", view.GetHashCode(),view.offset, view.size, view.stamp, stamp); //retval = false; } } return retval; } string zeitem(Node node) { return node == null ? "(null node)" : node.item.ToString(); } /// /// Check the sanity of this list /// /// true if sane [Tested] public virtual bool Check() { bool retval = true; /*if (underlying != null && underlying.stamp != stamp) { Console.WriteLine("underlying != null && underlying.stamp({0}) != stamp({1})", underlying.stamp, stamp); retval = false; }*/ if (underlying != null) { //TODO: check that this view is included in viewsEndpoints tree return underlying.Check(); } if (startsentinel == null) { Console.WriteLine("startsentinel == null"); retval = false; } if (endsentinel == null) { Console.WriteLine("endsentinel == null"); retval = false; } if (size == 0) { if (startsentinel != null && startsentinel.next != endsentinel) { Console.WriteLine("size == 0 but startsentinel.next != endsentinel"); retval = false; } if (endsentinel != null && endsentinel.prev != startsentinel) { Console.WriteLine("size == 0 but endsentinel.prev != startsentinel"); retval = false; } } if (startsentinel == null) { Console.WriteLine("NULL startsentinel"); return retval; } int count = 0; Node node = startsentinel.next, prev = startsentinel; #if HASHINDEX int taggroupsize = 0, oldtaggroupsize = losize + 1, seentaggroups = 0; TagGroup oldtg = null; if (underlying == null) { TagGroup tg = startsentinel.taggroup; if (tg.count != 0 || tg.first != null || tg.last != null || tg.tag != int.MinValue) { Console.WriteLine("Bad startsentinel tag group: {0}", tg); retval = false; } tg = endsentinel.taggroup; if (tg.count != 0 || tg.first != null || tg.last != null || tg.tag != int.MaxValue) { Console.WriteLine("Bad endsentinel tag group: {0}", tg); retval = false; } } #endif while (node != endsentinel) { count++; if (node.prev != prev) { Console.WriteLine("Bad backpointer at node {0}", count); retval = false; } #if HASHINDEX if (underlying == null) { if (!node.prev.precedes(node)) { Console.WriteLine("node.prev.tag ({0}, {1}) >= node.tag ({2}, {3}) at index={4} item={5} ", node.prev.taggroup.tag, node.prev.tag, node.taggroup.tag, node.tag, count, node.item); retval = false; } if (node.taggroup != oldtg) { if (node.taggroup.first != node) { string ntfi = zeitem(node.taggroup.first); Console.WriteLine("Bad first pointer in taggroup: node.taggroup.first.item ({0}), node.item ({1}) at index={2} item={3}", ntfi, node.item, count, node.item); retval = false; } if (oldtg != null) { if (oldtg.count != taggroupsize) { Console.WriteLine("Bad taggroupsize: oldtg.count ({0}) != taggroupsize ({1}) at index={2} item={3}", oldtg.count, taggroupsize, count, node.item); retval = false; } if (oldtaggroupsize <= losize && taggroupsize <= losize) { Console.WriteLine("Two small taggroups in a row: oldtaggroupsize ({0}), taggroupsize ({1}) at index={2} item={3}", oldtaggroupsize, taggroupsize, count, node.item); retval = false; } if (node.taggroup.tag <= oldtg.tag) { Console.WriteLine("Taggroup tags not strictly increasing: oldtaggrouptag ({0}), taggrouptag ({1}) at index={2} item={3}", oldtg.tag, node.taggroup.tag, count, node.item); retval = false; } if (oldtg.last != node.prev) { Console.WriteLine("Bad last pointer in taggroup: oldtg.last.item ({0}), node.prev.item ({1}) at index={2} item={3}", oldtg.last.item, node.prev.item, count, node.item); retval = false; } oldtaggroupsize = taggroupsize; } seentaggroups++; oldtg = node.taggroup; taggroupsize = 1; } else { taggroupsize++; } } #endif prev = node; node = node.next; if (node == null) { Console.WriteLine("Null next pointer at node {0}", count); return false; } } #if HASHINDEX if (underlying == null && size == 0 && taggroups != 0) { Console.WriteLine("Bad taggroups for empty list: size={0} taggroups={1}", size, taggroups); retval = false; } if (underlying == null && size > 0) { oldtg = node.prev.taggroup; if (oldtg != null) { if (oldtg.count != taggroupsize) { Console.WriteLine("Bad taggroupsize: oldtg.count ({0}) != taggroupsize ({1}) at index={2} item={3}", oldtg.count, taggroupsize, count, node.item); retval = false; } if (oldtaggroupsize <= losize && taggroupsize <= losize) { Console.WriteLine("Two small taggroups in a row: oldtaggroupsize ({0}), taggroupsize ({1}) at index={2} item={3}", oldtaggroupsize, taggroupsize, count, node.item); retval = false; } if (node.taggroup.tag <= oldtg.tag) { Console.WriteLine("Taggroup tags not strictly increasing: oldtaggrouptag ({0}), taggrouptag ({1}) at index={2} item={3}", oldtg.tag, node.taggroup.tag, count, node.item); retval = false; } if (oldtg.last != node.prev) { Console.WriteLine("Bad last pointer in taggroup: oldtg.last.item ({0}), node.prev.item ({1}) at index={2} item={3}", zeitem(oldtg.last), zeitem(node.prev), count, node.item); retval = false; } } if (seentaggroups != taggroups) { Console.WriteLine("seentaggroups ({0}) != taggroups ({1}) (at size {2})", seentaggroups, taggroups, size); retval = false; } } #endif if (count != size) { Console.WriteLine("size={0} but enumeration gives {1} nodes ", size, count); retval = false; } retval = checkViews() && retval; #if HASHINDEX if (!retval) return false; if (underlying == null) { if (size != dict.Count) { Console.WriteLine("list.size ({0}) != dict.Count ({1})", size, dict.Count); retval = false; } Node n = startsentinel.next, n2; while (n != endsentinel) { if (!dict.Find(n.item, out n2)) { Console.WriteLine("Item in list but not dict: {0}", n.item); retval = false; } else if (n != n2) { Console.WriteLine("Wrong node in dict for item: {0}", n.item); retval = false; } n = n.next; } } #endif return retval; } #endregion #region ICloneable Members /// /// Make a shallow copy of this HashedLinkedList. /// /// public virtual object Clone() { HashedLinkedList clone = new HashedLinkedList(itemequalityComparer); clone.AddAll(this); return clone; } #endregion #region System.Collections.Generic.IList Members void System.Collections.Generic.IList.RemoveAt(int index) { RemoveAt(index); } void System.Collections.Generic.ICollection.Add(T item) { Add(item); } #endregion #region System.Collections.ICollection Members bool System.Collections.ICollection.IsSynchronized { get { return false; } } [Obsolete] Object System.Collections.ICollection.SyncRoot { // Presumably safe to use the startsentinel (of type Node, always != null) as SyncRoot // since the class Node is private. get { return underlying != null ? ((System.Collections.ICollection)underlying).SyncRoot : startsentinel; } } void System.Collections.ICollection.CopyTo(Array arr, int index) { if (index < 0 || index + Count > arr.Length) throw new ArgumentOutOfRangeException(); foreach (T item in this) arr.SetValue(item, index++); } #endregion #region System.Collections.IList Members Object System.Collections.IList.this[int index] { get { return this[index]; } set { this[index] = (T)value; } } int System.Collections.IList.Add(Object o) { bool added = Add((T)o); // What position to report if item not added? SC.IList.Add doesn't say return added ? Count-1 : -1; } bool System.Collections.IList.Contains(Object o) { return Contains((T)o); } int System.Collections.IList.IndexOf(Object o) { return Math.Max(-1, IndexOf((T)o)); } void System.Collections.IList.Insert(int index, Object o) { Insert(index, (T)o); } void System.Collections.IList.Remove(Object o) { Remove((T)o); } void System.Collections.IList.RemoveAt(int index) { RemoveAt(index); } #endregion } }