If you are absolutely sure that it is critical not to have redundant requests, I would generally avoid blocking. ASP.NET cache is inherently thread safe, so the only drawback of the following code is that you can temporarily view a few redundant requests that race each other when their associated cache expires:
public static string DoSearch(string query) { var results = (string)HttpContext.Current.Cache[query]; if (results == null) { results = GetResultsFromSlowDb(query); HttpContext.Current.Cache.Insert(query, results, null, DateTime.Now.AddHours(1), Cache.NoSlidingExpiration); } return results; }
If you decide that you really need to avoid all redundant requests, you can use a set of more granular locks, one lock for each request:
public static string DoSearch(string query) { var results = (string)HttpContext.Current.Cache[query]; if (results == null) { object miniLock = _miniLocks.GetOrAdd(query, k => new object()); lock (miniLock) { results = (string)HttpContext.Current.Cache[query]; if (results == null) { results = GetResultsFromSlowDb(query); HttpContext.Current.Cache.Insert(query, results, null, DateTime.Now.AddHours(1), Cache.NoSlidingExpiration); } object temp; if (_miniLocks.TryGetValue(query, out temp) && (temp == miniLock)) _miniLocks.TryRemove(query); } } return results; } private static readonly ConcurrentDictionary<string, object> _miniLocks = new ConcurrentDictionary<string, object>();
Lukeh
source share