Reputation:
I am having a loss. I have a table with 5 years of data each year has about 4 million records so the table in total has 20 million records. I wrote this C# app that will select the year and then quarter and move that data to the Archive table. I tried playing with the BatchSize and BulkCopyTimeout but it always timeouts at 1129500 or 2093,000
records.
Is there a better way to do this or something that I am missing in the code?
Copied 1129500 so far...
Exception = Execution Timeout Expired. The timeout period elapsed prior to completion of the operation or the server is not responding.
public static bool SqlBulkCopy()
{
string connectionString = ConfigurationManager.AppSettings.Get("EcommerceConnectionString");
string BKConnectionString = ConfigurationManager.AppSettings.Get("BKConnectionString");
// Open a sourceConnection to the AdventureWorks database.
using (SqlConnection sourceConnection = new SqlConnection(BKConnectionString))
{
sourceConnection.Open();
// Perform an initial count on the destination table.
string queryString = "SELECT * from GuidelineLog";
string queryClause = string.Format("where DATEPART(YEAR,LogDate) = '{0}' and DATEPART(QUARTER,LogDate) = '{1}'", 2015, 3);
string TSQL = string.Format("{0} {1}", queryString, queryClause);
SqlCommand commandRowCount = new SqlCommand("SELECT COUNT(*) FROM " + "dbo.GuidelineLogArchive", sourceConnection);
commandRowCount.CommandTimeout = 900;
long countStart = System.Convert.ToInt32(
commandRowCount.ExecuteScalar());
Console.WriteLine("Starting row count = {0}", countStart);
WriteLog("Log_10_11_18.txt", String.Format("Starting row count = {0}", countStart));
// Get data from the source table as a SqlDataReader.
Console.WriteLine("Source table = {0}", TSQL);
SqlCommand commandSourceData = new SqlCommand(TSQL, sourceConnection);
commandSourceData.CommandTimeout = 900;
SqlDataReader reader = commandSourceData.ExecuteReader();
// Create the SqlBulkCopy object using a connection string.
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(connectionString))
{
bulkCopy.DestinationTableName = "dbo.GuidelineLogArchive";
// How many Rows you want to insert at a time
//bulkCopy.BatchSize = 100000;
bulkCopy.BatchSize = 500;
// Set the timeout.
bulkCopy.BulkCopyTimeout = 0;
// Set up the event handler to notify after 4500 rows.
bulkCopy.SqlRowsCopied += new SqlRowsCopiedEventHandler(OnSqlRowsCopied);
bulkCopy.NotifyAfter = 4500;
//( 2093,000 row(s) affected)
//Always stopping at 2093,000
try
{
// Write from the source to the destination.
bulkCopy.WriteToServer(reader);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
WriteLog("Log_10_11_18.txt", String.Format("Exception = {0}", ex.Message));
return false;
}
finally
{
// Close the SqlDataReader. The SqlBulkCopy
// object is automatically closed at the end
// of the using block.
reader.Close();
}
return true;
}
// Perform a final count on the destination
// table to see how many rows were added.
long countEnd = System.Convert.ToInt32(commandRowCount.ExecuteScalar());
Console.WriteLine("Ending row count = {0}", countEnd);
WriteLog("Log_10_11_18.txt", String.Format("Ending row count = {0}", countEnd));
Console.WriteLine("{0} rows were added.", countEnd - countStart);
WriteLog("Log_10_11_18.txt", String.Format("{0} rows were added.", countEnd - countStart));
}
}
Upvotes: 1
Views: 1018
Reputation:
I had to increase the timeout on the reader 900 MS isn't enough to execute and transfer
See this note for SqlCommand.CommandTimeout
This property is the cumulative time-out (for all network packets that are read during the invocation of a method) for all network reads during command execution or processing of the results. A time-out can still occur after the first row is returned, and does not include user processing time, only network read time.
Upvotes: 1