The scenario is as follows:
There are a couple of low priority threads that can be interrupted by high priority threads. Whenever a high priority thread asks the low priority threads to pause, they will go to Wait state (if they are not in wait state already). However when a high priority thread signals that the low priority threads can Resume, the low priority threads should not resume until all the high priority threads that asked the low priority threads to pause have consented.
To solve this I am keeping a track of Pause() calls from the high priority threads to the low priority thread in a counter variable. Whenever the high priority thread asks the low priority thread to Pause(), the value of the counter is incremented by 1. If after the increment the counter has a value of 1, it means the thread was not in Wait, so ask it to go in Wait state. Otherwise just increment counter value. On the contrary when a high priority thread calls Resume() we decrement the counter value and if after the decrement the value is 0, it means the low priority threads can Resume now.
Here is a simplified implementation of my problem. The comparison operation inside if statements with Interlocked.XXX is not correct i.e.
if (Interlocked.Increment(ref _remain) == 1)
, as the read/modify and comparison operations are not atomic.
What am I missing here? I don't want to use thread priority.
using System;
using System.Collections.Generic;
using System.Threading;
namespace TestConcurrency
{
// I borrowed this class from Joe Duffy's blog and modified it
public class LatchCounter
{
private long _remain;
private EventWaitHandle m_event;
private readonly object _lockObject;
public LatchCounter()
{
_remain = 0;
m_event = new ManualResetEvent(true);
_lockObject = new object();
}
public void Check()
{
if (Interlocked.Read(ref _remain) > 0)
{
m_event.WaitOne();
}
}
public void Increment()
{
lock(_lockObject)
{
if (Interlocked.Increment(ref _remain) == 1)
m_event.Reset();
}
}
public void Decrement()
{
lock(_lockObject)
{
// The last thread to signal also sets the event.
if (Interlocked.Decrement(ref _remain) == 0)
m_event.Set();
}
}
}
public class LowPriorityThreads
{
private List<Thread> _threads;
private LatchCounter _latch;
private int _threadCount = 1;
internal LowPriorityThreads(int threadCount)
{
_threadCount = threadCount;
_threads = new List<Thread>();
for (int i = 0; i < _threadCount; i++)
{
_threads.Add(new Thread(ThreadProc));
}
_latch = new CountdownLatch();
}
public void Start()
{
foreach (Thread t in _threads)
{
t.Start();
}
}
void ThreadProc()
{
while (true)
{
//Do something
Thread.Sleep(Rand.Next());
_latch.Check();
}
}
internal void Pause()
{
_latch.Increment();
}
internal void Resume()
{
_latch.Decrement();
}
}
public class HighPriorityThreads
{
private Thread _thread;
private LowPriorityThreads _lowPriorityThreads;
internal HighPriorityThreads(LowPriorityThreads lowPriorityThreads)
{
_lowPriorityThreads = lowPriorityThreads;
_thread = new Thread(RandomlyInterruptLowPriortyThreads);
}
public void Start()
{
_thread.Start();
}
void RandomlyInterruptLowPriortyThreads()
{
while (true)
{
Thread.Sleep(Rand.Next());
_lowPriorityThreads.Pause();
Thread.Sleep(Rand.Next());
_lowPriorityThreads.Resume();
}
}
}
class Program
{
static void Main(string[] args)
{
LowPriorityThreads lowPriorityThreads = new LowPriorityThreads(3);
HighPriorityThreads highPriorityThreadOne = new HighPriorityThreads(lowPriorityThreads);
HighPriorityThreads highPriorityThreadTwo = new HighPriorityThreads(lowPriorityThreads);
lowPriorityThreads.Start();
highPriorityThreadOne.Start();
highPriorityThreadTwo.Start();
}
}
class Rand
{
internal static int Next()
{
// Guid idea has been borrowed from somewhere on StackOverFlow coz I like it
return new System.Random(Guid.NewGuid().GetHashCode()).Next() % 30000;
}
}
I don't know about your requirements hence I won't discuss them here. As far as the implementation goes, I would introduce a "dispatcher" class that will handle inter-threads interaction and also acts a a factory for "runnable" objects.
The implementation, of course is very rough and open for criticism.
}