perf: do a bulk insert in fetchCollection

Improve performance over inserting each item individually
This commit is contained in:
Alexander Navarro 2024-11-22 20:20:22 -03:00
parent 8c660053e5
commit 3cf643c83d
5 changed files with 118 additions and 12 deletions

View file

@ -451,3 +451,67 @@ func (conn *DB) DeleteRelation(from int64, to int64) error {
return nil
}
// Creates a new node
func BulkCreateNode[T StandardNode](
conn *DB,
nodes []T,
) error {
if len(nodes) == 0 {
return nil
}
tx, err := conn.Connection.Begin()
if err != nil {
return err
}
defer tx.Rollback()
// Build the query dynamically based on number of nodes
valueStrings := make([]string, 0, len(nodes))
valueArgs := make([]interface{}, 0, len(nodes)*3)
for i := range nodes {
// Create ($1, $2, $3), ($4, $5, $6), etc.
n := i * 3
valueStrings = append(valueStrings, fmt.Sprintf("($%d, $%d, $%d)", n+1, n+2, n+3))
valueArgs = append(
valueArgs,
nodes[i].GetClass(),
nodes[i].GetName(),
nodes[i].GetMetadata(),
)
}
sql := fmt.Sprintf(`
INSERT INTO nodes (_class, name, metadata)
VALUES %s
RETURNING id;`, strings.Join(valueStrings, ","))
conn.log(DEBUG, "Bulk creating nodes:", sql, valueArgs)
// Execute and scan returned IDs
rows, err := tx.Query(sql, valueArgs...)
if err != nil {
return fmt.Errorf("bulk insert failed: %w", err)
}
defer rows.Close()
// Assign IDs back to the nodes
i := 0
for rows.Next() {
var id int64
if err := rows.Scan(&id); err != nil {
return fmt.Errorf("scanning returned id failed: %w", err)
}
nodes[i].SetId(id)
nodes[i].SetConnection(conn)
i++
}
tx.Commit()
return rows.Err()
}