Issue in bulk data import in SQL Server with NodeJS

102 Views Asked by At

I'm having trouble importing data from an Excel sheet into SQL Server. When I try to insert 4-5 entries, everything goes smoothly, but if the Excel sheet has more than 100 records, things go wrong and nothing is inserted into the SQL Server table.

I've tried a number of things:

I've tried using bulk method:

router.post('/dnatestres', upload.single('file'), async (req, res) => {
  try {
    const filePath = req.file.path;
    const fileExtension = path.extname(filePath).toLowerCase();

    let jsonData = [];

    if (fileExtension === '.csv') {
      jsonData = await csvtojson().fromFile(filePath);
    } else if (fileExtension === '.xlsx') {
      await new Promise((resolve, reject) => {
        xlsxj(
          {
            input: filePath,
            output: null,
          },
          function (err, result) {
            if (err) {
              reject(err);
            } else {
              jsonData = result;
              resolve();
            }
          }
        );
      });
    } else {
      return res.status(400).send('Unsupported file type');
    }

    const pool = await sql.connect(config);

    const batchSize = 100; // Number of records to insert at a time
    let start = 0;

    while (start < jsonData.length) {
      const end = Math.min(start + batchSize, jsonData.length);
      const batchData = jsonData.slice(start, end);

      const table = new sql.Table(config.database + '.dbo.labImport_tempGEN');

      table.create = true;

      table.columns.add('AssociationCode', sql.NVarChar(25));
      table.columns.add('[Case]', sql.NVarChar(50));
      table.columns.add('SampleID', sql.NVarChar(50));
      table.columns.add('Marker', sql.NVarChar(50));
      table.columns.add('ResultsAllele', sql.NVarChar(50));
      table.columns.add('Units', sql.NVarChar(250));
      table.columns.add('DateTested', sql.DateTime);

      for (const data of batchData) {
        table.rows.add(
          data['Association Code'],
          data['Case'],
          data['Sample ID'],
          data['Marker'],
          data['Results'],
          '',
          new Date()
        );
      }

      try {
        
        const request = new sql.Request(pool);
        const result = request.bulk(table, (err, res) => {
          if(err) {
            console.log('err===>', err);
          } else {
            console.log('res===>', res);
          }
        });
        
        if (result && result.rowsAffected) {
          console.log('Bulk insert completed successfully');
          console.log('Rows affected:', result.rowsAffected);
          console.log(`All records inserted successfully`);
        } else {
          console.log('No rows were affected by the bulk insert.');
        }
        res.sendStatus(200);
      } catch (error) {
        console.error('Error executing bulk insert:', error);
        res.status(500).send('Internal Server Error');
        return;
      }

      start += batchSize;
    }

  } catch (error) {
    console.error('Error processing request:', error);
    res.status(500).send('Internal Server Error');
  }
});

I've tried to use the BULK INSERT query like this:

router.post('/dnatestres', upload.single('file'), async (req, res) => {
  try {
    const filePath = req.file.path;
    const fileExtension = path.extname(filePath).toLowerCase();

    let jsonData = [];

    if (fileExtension === '.csv') {
      jsonData = await csvtojson().fromFile(filePath);
    } else if (fileExtension === '.xlsx') {
      jsonData = await new Promise((resolve, reject) => {
        xlsxj({
          input: filePath,
          output: null,
        }, function (err, result) {
          if (err) {
            reject(err);
          } else {
            resolve(result);
          }
        });
      });
    } else {
      return res.status(400).send('Unsupported file type');
    }

    const pool = await sql.connect(config);

    const batchSize = 100; // Number of records to insert at a time
    let start = 0;

    while (start < jsonData.length) {
      const end = Math.min(start + batchSize, jsonData.length);
      const batchData = jsonData.slice(start, end);

      // Prepare the data as a CSV string
      const csvData = batchData.map((data) => (
        `${data['Association Code']},${data['Case']},${data['Sample ID']},${data['Marker']},${data['Results']}`
      )).join('\n');
      

      try {
        // Execute the BULK INSERT statement
        const request = new sql.Request(pool);
        await request.query(`BULK INSERT dbo.labImport_tempGEN
          FROM '${filePath}'
          WITH (
            FIELDTERMINATOR = ',',
            ROWTERMINATOR = '\\n',
            FIRSTROW = 2 
          )`);

        console.log("Bulk insert completed successfully");
      } catch (error) {
        console.error('Error executing bulk insert:', error);
        res.status(500).send('Internal Server Error');
        return;
      }

      start += batchSize;
    }

    res.sendStatus(200);
  } catch (error) {
    res.status(500).send('Internal Server Error');
  }
});

Insert data into batches of 10-10 with making one insert query with multiple values:

router.post('/dnatestres', upload.single('file'), async (req, res) => {
  try {
    const filePath = req.file.path;
    const fileExtension = path.extname(filePath).toLowerCase();
    let jsonData = [];
    if (fileExtension === '.csv') {
      jsonData = await csvtojson().fromFile(filePath);
    } else if (fileExtension === '.xlsx') {
      await new Promise((resolve, reject) => {
        xlsxj(
          {
            input: filePath,
            output: null,
          },
          function (err, result) {
            if (err) {
              reject(err);
            } else {
              jsonData = result;
              resolve();
            }
          }
        );
      });
    } else {
      res.status(400).send('Unsupported file type');
      return;
    }

    const data = jsonData.map(row => ({
      AssociationCode: row['Association Code'],
      Case: row['Case'],
      SampleID: row['Sample ID'],
      Marker: row['Marker'],
      Results: row['Results']
    }));

    const batchSize = 10; // Set the batch size according to your needs

    for (let i = 0; i < data.length; i += batchSize) {
      const batchData = data.slice(i, i + batchSize);

      const sql = batchData.map(element => `(
        '${element.AssociationCode}',
        '${element.Case}',
        '${element.SampleID}',
        '${element.Marker}',
        '${element.Results}'
      )`).join(',');

      const query = `INSERT INTO labImport_tempGEN (AssociationCode, [Case], SampleID, Marker, ResultsAllele) VALUES ${sql}`;

      try {
        await request.query(query);
      } catch (err) {
        console.log("catch--err", err);
      }
    }

    res.send('Data inserted successfully');
  } catch (error) {
    console.error("Error processing request:", error);
    res.status(500).send('Internal Server Error');
  }
});

I tried using sql.Transaction();:

router.post('/dnatestres', upload.single('file'), async (req, res) => {
  try {
    const filePath = req.file.path;
    const fileExtension = path.extname(filePath).toLowerCase();
    if (fileExtension === '.csv') {
      const csv = require('csvtojson');
      const jsonObj = await csv().fromFile(filePath);

      const request = new sql.Request();
      await sql.connect(config);

      const transaction = new sql.Transaction();
      await transaction.begin();

      try {
        for (const record of jsonObj) {
          const { AssociationCode, Case, SampleID, Marker, Results } = record;
          await insertIntoLabImportTempGEN(request, AssociationCode, Case, SampleID, Marker, Results, transaction);
        }

        await transaction.commit();
        res.sendStatus(200);
      } catch (error) {
        await transaction.rollback();
        throw error;
      }
    } else if (fileExtension === '.xlsx') {
      const XLSX = require('xlsx');
      const workbook = XLSX.readFile(filePath);
      const sheet_name_list = workbook.SheetNames;
      const jsonObj = XLSX.utils.sheet_to_json(workbook.Sheets[sheet_name_list[0]]);
    
      const request = new sql.Request();
      await sql.connect(config);
    
      const transaction = new sql.Transaction();
      await transaction.begin();
    
      try {
        for (const record of jsonObj) {
          const { AssociationCode, Case, SampleID, Marker, Results } = record;
          await insertIntoLabImportTempGEN(request, AssociationCode, Case, SampleID, Marker, Results, transaction);
        }
    
        await transaction.commit();
        res.sendStatus(200);
      } catch (error) {
        await transaction.rollback();
        throw error;
      }
    } else {
      res.status(400).send('Unsupported file type');
    }
  } catch (error) {
    res.status(500).send('Internal Server Error');
  }
});

const insertIntoLabImportTempGEN = async (request, AssociationCode, Case, SampleID, Marker, Results, transaction) => {
  return new Promise((resolve, reject) => {
    request.input('AssociationCode', sql.NVarChar(25), AssociationCode);
    request.input('Case', sql.NVarChar(50), Case);
    request.input('SampleID', sql.NVarChar(50), SampleID);
    request.input('Marker', sql.NVarChar(50), Marker);
    request.input('Results', sql.NVarChar(50), Results);

    const sqlQuery = `
      INSERT INTO labImport_tempGEN (AssociationCode, [Case], SampleID, Marker, ResultsAllele)
      VALUES (@AssociationCode, @Case, @SampleID, @Marker, @Results)
    `;
    request.query(sqlQuery, (err, result) => {
      if (err) {
        reject(err);
      } else {
        resolve(result);
      }
    });
  });
};

I created a multi-input query pass as a string, then execute this:

router.post('/dnatestres', upload.single('file'), async (req, res) => {
    try {
      const filePath = req.file.path;
      const fileExtension = path.extname(filePath).toLowerCase();
      if (fileExtension === '.xlsx') {
        const XLSX = require('xlsx');
  
        const workbook = XLSX.readFile(filePath);
        const sheet_name_list = workbook.SheetNames;
        const jsonObj = XLSX.utils.sheet_to_json(workbook.Sheets[sheet_name_list[0]]);
        
        try {
          const pool = await sql.connect(config);
      
          let query = '';
          for (let i = 0; i < jsonObj.length; i++) {
            const element = jsonObj[i];
            console.log("element",element);
            const AssociationCode = element['Association Code'];
            const Case = element['Case'];
            const SampleID = element['Sample ID'];
            const Marker = element['Marker'];
            const Results = element['Results'];
            
            query += `INSERT INTO labImport_tempGEN (AssociationCode, [Case], SampleID, Marker, ResultsAllele) VALUES ('${AssociationCode}', '${Case}', '${SampleID}', '${Marker}', '${Results}'); `;
                    
          }
          console.log("query----",query);
          let que = pool.request().query(query);
          res.sendStatus(200);
        } catch (err) {
          console.error('Error:', err.message);
        }
  
      } else {
        res.status(400).send('Unsupported file type');
      }
    } catch (error) {
      console.error("Error processing request:", error);
      res.status(500).send('Internal Server Error');
    }
  });

I've tried with a stored procedure:

router.post('/dnatestres', upload.single('file'), function (req, res) {
  sql.connect(config, function (err) {
    if (err) console.log(err);
    // create Request object
    var request = new sql.Request();
    const csvFilePath = req.file.path;
    const csv = require('csvtojson');
    csv()
      .fromFile(csvFilePath)
      .then((jsonObj) => {
        var data = JSON.stringify(jsonObj);
        request.input('data', sql.VarChar('max'), data);
        request.execute(
          'ImportLabRes',
          function (err, recordsets, returnValue, affected) {
            if (err) {
              console.log(err);
            } else {
              res.sendStatus(200);
            }
          }
        );
      });
  });
});

Any ideas or solutions that might be helpful?

0

There are 0 best solutions below