S
Sudip_inn
Guest
here i am pasting my code where i read multiple files in loop. so many files i am reading and file size is huge which takes long time. so tell me best way to read multiple large files simultaneously and append their data in StringBuilder
i can use parallel.foreach but scared that data may overlap and if i use Lock{} statement then multiple thread make no sense. so please see my code and give me best idea to speed up reading multiple large xml files.
private string GetXMLFromBrokerBogey()
{
bool selectedBroker = false;
string brokerBogeypath = "", strbrokercode = "";
StringBuilder sbBrokerBogey = new StringBuilder();
string strBrokerXML = "";
var GridRows = (from row in dgvBrokers.ToDataTable("dtdgvBrokers").AsEnumerable()
orderby row["BrokerName"] ascending
select row);
DataTable dtdgvBrokers = GridRows.AsDataView().ToTable();
for (int i = 0; i <= dtdgvBrokers.Rows.Count - 1; i++)
{
strbrokercode = dtdgvBrokers.Rows["Brokers"].ToString();
brokerBogeypath = Path.Combine(ConfigurationManager.AppSettings["DBPath"].ToString(),
Ticker, "###~$$$~Bogey.xml".Replace("###", Ticker).Trim()
.Replace("$$$", strbrokercode).Trim());
selectedBroker = (dtdgvBrokers.AsEnumerable().FirstOrDefault(x => x.Field<string>("Brokers").Equals(strbrokercode))
.Field<string>("Select")).Equals("True") ? true : false;
if (selectedBroker)
{
if (File.Exists(brokerBogeypath))
{
DataSet ds = new DataSet();
ds.ReadXml(brokerBogeypath);
if (ds.Tables.Count > 1)
{
DataSet ds1 = new DataSet();
DataSet ds2 = new DataSet();
ds1.Tables.Add(ds.Tables[2].Copy());
//strBrokerXML = ds1.GetXml();
IEnumerable<DataRow> result = ds1.Tables[0]
.AsEnumerable()
.Where(myRow => !string.IsNullOrEmpty(myRow.Field<string>("StandardValue")));
ds2.Tables.Add(result.CopyToDataTable());
strBrokerXML = ds2.GetXml();
sbBrokerBogey.Append("<Broker Code=" + '"' + strbrokercode + '"' + ">" + strBrokerXML + "</Broker>");
ds1.Dispose();
ds1 = null;
ds2.Dispose();
ds2 = null;
}
ds.Dispose();
ds = null;
}
}
}
return sbBrokerBogey.ToString();
}
Continue reading...
i can use parallel.foreach but scared that data may overlap and if i use Lock{} statement then multiple thread make no sense. so please see my code and give me best idea to speed up reading multiple large xml files.
private string GetXMLFromBrokerBogey()
{
bool selectedBroker = false;
string brokerBogeypath = "", strbrokercode = "";
StringBuilder sbBrokerBogey = new StringBuilder();
string strBrokerXML = "";
var GridRows = (from row in dgvBrokers.ToDataTable("dtdgvBrokers").AsEnumerable()
orderby row["BrokerName"] ascending
select row);
DataTable dtdgvBrokers = GridRows.AsDataView().ToTable();
for (int i = 0; i <= dtdgvBrokers.Rows.Count - 1; i++)
{
strbrokercode = dtdgvBrokers.Rows["Brokers"].ToString();
brokerBogeypath = Path.Combine(ConfigurationManager.AppSettings["DBPath"].ToString(),
Ticker, "###~$$$~Bogey.xml".Replace("###", Ticker).Trim()
.Replace("$$$", strbrokercode).Trim());
selectedBroker = (dtdgvBrokers.AsEnumerable().FirstOrDefault(x => x.Field<string>("Brokers").Equals(strbrokercode))
.Field<string>("Select")).Equals("True") ? true : false;
if (selectedBroker)
{
if (File.Exists(brokerBogeypath))
{
DataSet ds = new DataSet();
ds.ReadXml(brokerBogeypath);
if (ds.Tables.Count > 1)
{
DataSet ds1 = new DataSet();
DataSet ds2 = new DataSet();
ds1.Tables.Add(ds.Tables[2].Copy());
//strBrokerXML = ds1.GetXml();
IEnumerable<DataRow> result = ds1.Tables[0]
.AsEnumerable()
.Where(myRow => !string.IsNullOrEmpty(myRow.Field<string>("StandardValue")));
ds2.Tables.Add(result.CopyToDataTable());
strBrokerXML = ds2.GetXml();
sbBrokerBogey.Append("<Broker Code=" + '"' + strbrokercode + '"' + ">" + strBrokerXML + "</Broker>");
ds1.Dispose();
ds1 = null;
ds2.Dispose();
ds2 = null;
}
ds.Dispose();
ds = null;
}
}
}
return sbBrokerBogey.ToString();
}
Continue reading...