inserter.js 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455
  1. const Promise = require('bluebird');
  2. const { chunk } = require('../../utils/objectUtils');
  3. const { isPostgres } = require('../../utils/knexUtils');
  4. const POSTGRES_INSERT_BATCH_SIZE = 100;
  5. module.exports = builder => {
  6. // Postgres is the only db engine that returns identifiers of all inserted rows. Therefore
  7. // we can insert batches only with postgres.
  8. const batchSize = isPostgres(builder.knex()) ? POSTGRES_INSERT_BATCH_SIZE : 1;
  9. return tableInsertion => {
  10. const inputs = [];
  11. const others = [];
  12. const queries = [];
  13. let insertQuery = tableInsertion.modelClass.query().childQueryOf(builder);
  14. const items = tableInsertion.items;
  15. for (let i = 0, l = items.length; i < l; ++i) {
  16. const item = items[i];
  17. const { model, relation } = item;
  18. // We need to validate here since at this point the models should no longer contain any special properties.
  19. const json = model.$validate(model, {
  20. dataPath: item.node.dataPath
  21. });
  22. // Set the return value back to model in case defaults were set.
  23. model.$set(json);
  24. if (relation) {
  25. others.push(model);
  26. } else {
  27. inputs.push(model);
  28. }
  29. }
  30. batchInsert(inputs, insertQuery.clone().copyFrom(builder, /returning/), batchSize, queries);
  31. batchInsert(others, insertQuery.clone(), batchSize, queries);
  32. return Promise.map(queries, query => query.query, {
  33. concurrency: tableInsertion.modelClass.concurrency
  34. });
  35. };
  36. };
  37. function batchInsert(models, queryBuilder, batchSize, queries) {
  38. const batches = chunk(models, batchSize);
  39. for (let i = 0, l = batches.length; i < l; ++i) {
  40. queries.push({
  41. query: queryBuilder.clone().insert(batches[i])
  42. });
  43. }
  44. }