首页 > 解决方案 > Postgres NodeJS 批量更新插入超时

问题描述

我正在尝试将 10k 行的 CSV 插入 Postgres。它适用于前 400 行,然后出现以下错误:

"Error: connection terminated" 

我不确定如何调试它,因为错误非常普遍。这是我的代码:

const fs = require("fs");
const Pool = require("pg").Pool;
const csvFilePath='../../li.csv' 
const csv=require('csvtojson') 

async function run(){
const jsonArray=await csv().fromFile(csvFilePath);
let datas=[];

jsonArray.map((item)=>{
    datas.push(item);
})

// create a new connection to the database
const pool = new Pool({
    host: "host",
    user: "host",
    database: "db",
    password: "host",
    port: 5432
    });

    const query =
    "INSERT INTO table_name (profile, name, tle, loc, rel, current, time) VALUES ($1, $2, $3, $4, $5, $6, $7)";

    pool.connect((err, client, done) => {
        if (err) throw err;
  
        try {
          datas.forEach(row => {

            client.query(query, [row.profile, row.name, row.tle, row.loc, row.rel, row.current, row.time], (err, res) => {
              if (err) {
                console.log(err.stack);
              } else {
                console.log("inserted " + res.rowCount + " row:", row);
              }
            });
          });
        } finally {
          done();
        }
      })
    }

    run();

标签: node.jspostgresqlpg

解决方案


推荐阅读