我的程序现在仍在运行,可以将日志文件中的数据导入远程SQL Server数据库。日志文件的大小约为80MB,包含约470000行,约25000行数据。我的程序只能输入300行/秒,这真的很糟糕。 :(从文本文件导入到SQL Server数据库,是ADO.NET太慢了吗?
public static int ImportData(string strPath)
{
//NameValueCollection collection = ConfigurationManager.AppSettings;
using (TextReader sr = new StreamReader(strPath))
{
sr.ReadLine(); //ignore three first lines of log file
sr.ReadLine();
sr.ReadLine();
string strLine;
var cn = new SqlConnection(ConnectionString);
cn.Open();
while ((strLine = sr.ReadLine()) != null)
{
{
if (strLine.Trim() != "") //if not a blank line, then import into database
{
InsertData(strLine, cn);
_count++;
}
}
}
cn.Close();
sr.Close();
return _count;
}
}
InsertData只是使用ADO.NET正常插入方法它采用解析法:
public Data(string strLine)
{
string[] list = strLine.Split(new[] {'\t'});
try
{
Senttime = DateTime.Parse(list[0] + " " + list[1]);
}
catch (Exception)
{
}
Clientip = list[2];
Clienthostname = list[3];
Partnername = list[4];
Serverhostname = list[5];
Serverip = list[6];
Recipientaddress = list[7];
Eventid = Convert.ToInt16(list[8]);
Msgid = list[9];
Priority = Convert.ToInt16(list[10]);
Recipientreportstatus = Convert.ToByte(list[11]);
Totalbytes = Convert.ToInt32(list[12]);
Numberrecipient = Convert.ToInt16(list[13]);
DateTime temp;
if (DateTime.TryParse(list[14], out temp))
{
OriginationTime = temp;
}
else
{
OriginationTime = null;
}
Encryption = list[15];
ServiceVersion = list[16];
LinkedMsgid = list[17];
MessageSubject = list[18];
SenderAddress = list[19];
}
InsertData方法:
private static void InsertData(string strLine, SqlConnection cn)
{
var dt = new Data(strLine); //parse the log line into proper fields
const string cnnStr =
"INSERT INTO LOGDATA ([SentTime]," + "[client-ip]," +
"[Client-hostname]," + "[Partner-Name]," + "[Server-hostname]," +
"[server-IP]," + "[Recipient-Address]," + "[Event-ID]," + "[MSGID]," +
"[Priority]," + "[Recipient-Report-Status]," + "[total-bytes]," +
"[Number-Recipients]," + "[Origination-Time]," + "[Encryption]," +
"[service-Version]," + "[Linked-MSGID]," + "[Message-Subject]," +
"[Sender-Address]) " + " VALUES ( " + "@Senttime," + "@Clientip," +
"@Clienthostname," + "@Partnername," + "@Serverhostname," + "@Serverip," +
"@Recipientaddress," + "@Eventid," + "@Msgid," + "@Priority," +
"@Recipientreportstatus," + "@Totalbytes," + "@Numberrecipient," +
"@OriginationTime," + "@Encryption," + "@ServiceVersion," +
"@LinkedMsgid," + "@MessageSubject," + "@SenderAddress)";
var cmd = new SqlCommand(cnnStr, cn) {CommandType = CommandType.Text};
cmd.Parameters.AddWithValue("@Senttime", dt.Senttime);
cmd.Parameters.AddWithValue("@Clientip", dt.Clientip);
cmd.Parameters.AddWithValue("@Clienthostname", dt.Clienthostname);
cmd.Parameters.AddWithValue("@Partnername", dt.Partnername);
cmd.Parameters.AddWithValue("@Serverhostname", dt.Serverhostname);
cmd.Parameters.AddWithValue("@Serverip", dt.Serverip);
cmd.Parameters.AddWithValue("@Recipientaddress", dt.Recipientaddress);
cmd.Parameters.AddWithValue("@Eventid", dt.Eventid);
cmd.Parameters.AddWithValue("@Msgid", dt.Msgid);
cmd.Parameters.AddWithValue("@Priority", dt.Priority);
cmd.Parameters.AddWithValue("@Recipientreportstatus", dt.Recipientreportstatus);
cmd.Parameters.AddWithValue("@Totalbytes", dt.Totalbytes);
cmd.Parameters.AddWithValue("@Numberrecipient", dt.Numberrecipient);
if (dt.OriginationTime != null)
cmd.Parameters.AddWithValue("@OriginationTime", dt.OriginationTime);
else
cmd.Parameters.AddWithValue("@OriginationTime", DBNull.Value);
//if OriginationTime was null, then insert with null value to this column
cmd.Parameters.AddWithValue("@Encryption", dt.Encryption);
cmd.Parameters.AddWithValue("@ServiceVersion", dt.ServiceVersion);
cmd.Parameters.AddWithValue("@LinkedMsgid", dt.LinkedMsgid);
cmd.Parameters.AddWithValue("@MessageSubject", dt.MessageSubject);
cmd.Parameters.AddWithValue("@SenderAddress", dt.SenderAddress);
cmd.ExecuteNonQuery();
}
哪有我的程序运行速度更快? 非常感谢!
SQLBulkCopy是要走的路。我曾经在SQL 6.5/7.0天后使用bcp从CSV导入数据,发现它的速度非常快。 SqlBulkCopy本质上是暴露给托管代码的相同功能。 – davewasthere 2009-07-20 09:03:08
我的日志文件包含三行三行的头文件,并且需要2个字段来表示日期时间。我必须将它们合并以转换为Datetime值。我怎样才能做到这一点? – Vimvq1987 2009-07-20 09:46:25