Best way to accept multiple tcp clients?

I have a client / server infrastructure. They currently use TcpClient and TcpListener to send receive data between all clients and the server.

What I'm doing now is when the data is received (it has its own stream), it is queued to process another stream in order to free the socket so that it is ready and open to receive new data.

                // Enter the listening loop.
                while (true)
                {
                    Debug.WriteLine("Waiting for a connection... ");

                    // Perform a blocking call to accept requests.
                    using (client = server.AcceptTcpClient())
                    {
                        data = new List<byte>();

                        // Get a stream object for reading and writing
                        using (NetworkStream stream = client.GetStream())
                        {
                            // Loop to receive all the data sent by the client.
                            int length;

                            while ((length = stream.Read(bytes, 0, bytes.Length)) != 0)
                            {
                                var copy = new byte[length];
                                Array.Copy(bytes, 0, copy, 0, length);
                                data.AddRange(copy);
                            }
                        }
                    }

                    receivedQueue.Add(data);
                }

However, I wanted to know if there is a better way to do this. For example, if there are 10 clients, and all of them want to send data to the server at the same time, then they will pass until all the others work. If one client has a slow connection and confuses the socket, all other communication will stop,

, ?

+5
4

, , - , .

.Net , Begin * End *. - BeginReceive EndReceive. ( Receive); .

, , - -, IOCP (IO Completion Ports, Linux/Mono , ), ; IOCP , , .

/

Begin * 2 , . - AsyncCallback, - . , " , ", " , ". , , , End *, , , . , :

private void BeginReceiveBuffer()
{
   _socket.BeginReceive(buffer, 0, buffer.Length, BufferEndReceive, buffer);
}

private void EndReceiveBuffer(IAsyncResult state)
{
   var buffer = (byte[])state.AsyncState; // This is the last parameter.
   var length = _socket.EndReceive(state); // This is the return value of the method call.
   DataReceived(buffer, 0, length); // Do something with the data.
}

.Net , , EndReceiveBuffer " " ( buffer) state.AsyncResult, EndReceive, ( , - ).

- , "" (, TCP , , Stream ).

private Socket _socket;
private ArraySegment<byte> _buffer;
public void StartReceive()
{
    ReceiveAsyncLoop(null);
}

// Note that this method is not guaranteed (in fact
// unlikely) to remain on a single thread across
// async invocations.
private void ReceiveAsyncLoop(IAsyncResult result)
{
    try
    {
        // This only gets called once - via StartReceive()
        if (result != null)
        {
            int numberOfBytesRead = _socket.EndReceive(result);
            if(numberOfBytesRead == 0)
            {
                OnDisconnected(null); // 'null' being the exception. The client disconnected normally in this case.
                return;
            }

            var newSegment = new ArraySegment<byte>(_buffer.Array, _buffer.Offset, numberOfBytesRead);
            // This method needs its own error handling. Don't let it throw exceptions unless you
            // want to disconnect the client.
            OnDataReceived(newSegment);
        }

        // Because of this method call, it as though we are creating a 'while' loop.
        // However this is called an async loop, but you can see it the same way.
        _socket.BeginReceive(_buffer.Array, _buffer.Offset, _buffer.Count, SocketFlags.None, ReceiveAsyncLoop, null);
    }
    catch (Exception ex)
    {
        // Socket error handling here.
    }
}

, , .. ( ) . , :

public class InboundConnection
{
    private Socket _socket;
    private ArraySegment<byte> _buffer;

    public InboundConnection(Socket clientSocket)
    {
        _socket = clientSocket;
        _buffer = new ArraySegment<byte>(new byte[4096], 0, 4096);
        StartReceive(); // Start the read async loop.
    }

    private void StartReceive() ...
    private void ReceiveAsyncLoop() ...
    private void OnDataReceived() ...
}

( , , / ).

+15

, :

// Enter the listening loop.
while (true)
{
    Debug.WriteLine("Waiting for a connection... ");

    client = server.AcceptTcpClient();

    ThreadPool.QueueUserWorkItem(new WaitCallback(HandleTcp), client);
}

private void HandleTcp(object tcpClientObject)
{
    TcpClient client = (TcpClient)tcpClientObject;
    // Perform a blocking call to accept requests.

    data = new List<byte>();

    // Get a stream object for reading and writing
    using (NetworkStream stream = client.GetStream())
    {
        // Loop to receive all the data sent by the client.
        int length;

        while ((length = stream.Read(bytes, 0, bytes.Length)) != 0)
        {
            var copy = new byte[length];
            Array.Copy(bytes, 0, copy, 0, length);
            data.AddRange(copy);
        }
    }

    receivedQueue.Add(data);
} 

AutoResetEvent ManualResetEvent , , , , , , 4.0, BlockingCollection Queue.

+1

. , MSDN.

+1

. ( - , using) .

Thus, you achieve both performance, as you allow several connections accepted at the same time, and also limit the number of resources (threads, etc.) that you allocate to handle incoming connections.

You have a good example here

Luck

0
source

All Articles