I'm having problems with the streaming code that I use with Oracle ManagedDataAccess.dll. To simulate a problem, I created a small test application that opens a connection, executes a request, and closes the connection.
The thing that happens is that the random connection time request (ODP-1000) occurs in conn.open () when this code is run. Now I have done a lot of Googeling there, and he tells me that the pool size is too small, but here it is not, because then I get the exception "Connection time to pool" (ODP-1012). I also played with the properties of the connection string, and in the test application I can achieve a perfect war when I set a higher "connection timeout", but the fact is that this does not help me with the application I am working with.
Any help or understanding will be greatly appreciated!
class Program
{
static readonly object _object = new object();
static string connectionstring = @"Data Source=(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST=oralinux.contoso.com)(PORT=1521)))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=DB)));User Id=system;Password=xxxxxx;Pooling=True;Min Pool Size=;Max Pool Size=20;Incr Pool Size=10;Decr Pool Size=1;Connection Lifetime=0;Connection Timeout=1;Self Tuning=false";
static string query = "select to_char(max(end_time),'Mon-DD-YYYY HH24:MI:SS') \"SPFILE\" from V$RMAN_STATUS where object_type='SPFILE' and status='COMPLETED'";
static void Main(string[] args)
{
for (int i = 0; i < 1000; i++)
{
Thread myNewThread = new Thread(DoWork);
Console.WriteLine(i.ToString());
}
}
static void DoWork()
{
lock (_object)
{
DataTable dt = new DataTable();
using (OracleConnection conn = new OracleConnection(connectionstring))
{
conn.Open();
using (OracleCommand cmd = new OracleCommand(query, conn))
{
using (OracleDataAdapter adap = new OracleDataAdapter(cmd))
{
adap.Fill(dt);
}
while (conn.State != ConnectionState.Closed) conn.Close();
}
conn.Dispose();
}
}
}
}
source
share