0

It is necessary to add 500 000 lines in the table. But my version of the code takes too much time. Table Question 5 fields: id, NameQuestion, DetailsQuestion, UserId (Guid), Category (another table, a many-to-many)   Example of code added to the table Question 500 000 rows.

 public void AddFive()
        {
            var q = new Question();
            int counQ = 0;
            for (int i=1; i<=100000; i++)
            {

                 foreach (var x in GetUsers()) // 5 пользователей
                 {
                     ++counQ;
           q = new Question
            {
                NameQuestion = "TestQuestion" + counQ,
                DetailsQuestion = "TestQuestion" + counQ,
                UserId =x,
                Category = GetcategoryList()
            };
           _context.Question.Add(q);

                }
                 _context.SaveChanges();
            }


        }

        public List<Guid> GetUsers()
        {
            List<Guid> userList = (from x in _context.aspnet_Users
                select x.UserId).ToList();
            return userList;
        }

        public List<Category> GetcategoryList()
        {

               var category = new List<Category>();
            Random rand = new Random();
            var count = rand.Next(1, 7);

            for (int i = 1; i <= count; i++)
            {
                var idCategory = rand.Next(1, 7);

                    category.Add(_context.Category.Find(idCategory));

            }

            return category.ToList();

        }

How can I speed up the process?

Daniil_Kosta
  • 41
  • 1
  • 7

2 Answers2

0

You should shut down and recreate a new context every 100 iterations for each new user to clear the context and avoid to surcharge it

Consider to turn off the Configuration.AutoDetectChangesEnabled property from your context in order to prevent automatic calls of the DetectChanges() method

Source

Community
  • 1
  • 1
Paradise228
  • 699
  • 3
  • 16
0

Such a huge number of records to be inserted cnan be done best with bcp (bulk copy):

            questionDT = new DataTable();
        questionDT.Columns.Add(new DataColumn("NameQuestion", typeof(string)));
        questionDT.Columns.Add(new DataColumn("DetailsQuestion", typeof(string)));
        questionDT.Columns.Add(new DataColumn("UserId", typeof(int)));
        var q = new Question();
        int counQ = 0;
        for (int i=1; i<=100000; i++)
        {

            foreach (var x in GetUsers()) 
            {
                ++counQ;
                var dr = questionDT.NewRow();

                dr["NameQuestion"] = "TestQuestion" + counQ;
                dr["DetailsQuestion"] = "TestQuestion" + counQ;
                dr["UserId"] = x;
                questionDT.Rows.Add(dr);
            }
        }
        using (var c = new SqlConnection(connectionstring))
        {
            c.Open();
            using (var bcp = new SqlBulkCopy(c))
            {
                bcp.BatchSize = 10000;
                bcp.ColumnMappings.Add("NameQuestion", "NameQuestion");
                bcp.ColumnMappings.Add("DetailsQuestion", "DetailsQuestion");
                bcp.ColumnMappings.Add("UserId", "UserId");
                bcp.DestinationTableName = "Questions";
                bcp.BulkCopyTimeout = 0;
                bcp.WriteToServer(questionDT);
            }
            c.Close();
        }

BCP is made for huge inserts, it removes indexes and foreign keys befor the update and re-adds them after to update

Luc
  • 1,393
  • 1
  • 11
  • 19