Reputation: 321
I am importing excel data into sql server using SqlbulkCopy, but the problem is i want to prevent any duplicate records being entered. Is there a way by which duplicate can be ignored or deleted automatically?
protected void Button1_Click(object sender, EventArgs e)
{
string strFileType = System.IO.Path.GetExtension(FileUpload1.FileName).ToString().ToLower();
string strFileName = FileUpload1.PostedFile.FileName.ToString();
FileUpload1.SaveAs(Server.MapPath("~/Import/" + strFileName + strFileType));
string strNewPath = Server.MapPath("~/Import/" + strFileName + strFileType);
string excelConnectionString = String.Format(@"Provider=Microsoft.Jet.OLEDB.4.0;" + "Data Source=" + strNewPath + "; Extended Properties=Excel 8.0;");
// Create Connection to Excel Workbook
using (OleDbConnection connection = new OleDbConnection(excelConnectionString))
{
var command = new OleDbCommand("Select ID,Data FROM [Sheet1$]", connection);
connection.Open();
// Create DbDataReader to Data Worksheet
using (DbDataReader dr = command.ExecuteReader())
{
// SQL Server Connection String
string sqlConnectionString = "Data Source=ARBAAZ-1B14C081;Initial Catalog=abc;Integrated Security=True";
con.Open();
DataTable dt1 = new DataTable();
string s = "select count(*) from ExcelTable";
string r = "";
SqlCommand cmd1 = new SqlCommand(s, con);
try
{
SqlDataAdapter da1 = new SqlDataAdapter(cmd1);
da1.Fill(dt1);
}
catch
{
}
int RecordCount;
RecordCount = Convert.ToInt32(cmd1.ExecuteScalar());
r = RecordCount.ToString();
Label1.Text = r;
con.Close();
int prv = Convert.ToInt32(r);
// Bulk Copy to SQL Server
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(sqlConnectionString))
{
bulkCopy.DestinationTableName = "ExcelTable";
SqlBulkCopyColumnMapping mapping1 = new SqlBulkCopyColumnMapping("excelid", "id");
SqlBulkCopyColumnMapping mapping2 = new SqlBulkCopyColumnMapping("exceldata", "data");
bulkCopy.ColumnMappings.Add(mapping1);
bulkCopy.ColumnMappings.Add(mapping2);
bulkCopy.WriteToServer(dr);
}
con.Open();
DataTable dt = new DataTable();
s = "select count(*) from ExcelTable";
r = "";
SqlCommand cmd = new SqlCommand(s, con);
try
{
SqlDataAdapter da = new SqlDataAdapter(cmd);
da.Fill(dt);
}
catch
{
}
RecordCount = Convert.ToInt32(cmd.ExecuteScalar());
r = RecordCount.ToString();
Label1.Text = r;
con.Close();
int ltr = Convert.ToInt32(r);
if (prv == ltr)
{
Label1.Text = "No records Added";
}
else
{
Label1.Text = "Records Added Successfully !";
}
}
}
}
Can this problem be fixed by creating a trigger to delete duplicates? if yes how? i am a newbie i have never created a trigger.
Another problem is no matter what i try i am unable to get column mapping to work. I am unable to upload data when column names in excel sheet and database are different.
Upvotes: 4
Views: 9196
Reputation: 1
as far as I remember, you could filter out the redundant rows when importing from the excel file itself.
the following SQL query should be used in the OleDbCommand constructor.
var command = new OleDbCommand("Select DISTINCT ID,Data FROM [Sheet1$]", connection);
Upvotes: 0
Reputation: 36156
no, either you manage it on your dr object on you code before you load it into the db (like running a DISTINCT operation) or you create a trigger on the DB to check. The trigger will reduce the bulk insert's performace though.
Another option would be to bulk insert into a temp table and then insert into your destination table FROM your temp table using a select DISTINCT
Upvotes: 0
Reputation: 19296
You can create INDEX
CREATE UNIQUE INDEX MyIndex ON ExcelTable(id, data) WITH IGNORE_DUP_KEY
And when you will insert data with bulk to db, all duplicate values will not inserted.
Upvotes: 13