using System; using System.Collections.Generic; public class Test { public static void Main() { var listM = new List<int>(); var listN = new List<int>(); for(int i = 0, x = 0; x < 50; i+=13, x++) { listM.Add(i); } for(int i = 0, x = 0; x < 10000; i+=7, x++) { listN.Add(i); } Console.WriteLine(SortedExcept(listM, listN).Count); } public static List<T> SortedExcept<T>(List<T> m, List<T> n) { var result = new List<T>(); foreach(var itm in m) { var index = n.BinarySearch(itm); if(index < 0) { result.Add(itm); } } return result; } }
EDIT Here is also version O (M + N)
public static List<T> SortedExcept2<T>(List<T> m, List<T> n) where T : IComparable<T> { var result = new List<T>(); int i = 0, j = 0; if(n.Count == 0) { result.AddRange(m); return result; } while(i < m.Count) { if(m[i].CompareTo(n[j]) < 0) { result.Add(m[i]); i++; } else if(m[i].CompareTo(n[j]) > 0) { j++; } else { i++; } if(j >= n.Count) { for(; i < m.Count; i++) { result.Add(m[i]); } break; } } return result; }
In the quick and dirty test, http://ideone.com/Y2oEQD M + N is always faster, even when N is 10 million. BinarySearch suffers fines as it gains access to array memory in a non-linear fashion; this leads to a cache failure, which slows down the algorithm, so a larger N gets more memory access restrictions using BinarySearch.
source share