skip to Main Content

I am inserting around 2 million records from an excel sheet, separated them to chunks. I added some config lines incase it was a config issue but still same outcome. This is my seeder code. It contains the class that reads the csv file, and the seeder handler. When I run the seeder command on this file, after around 30mins, "killed" is printed on the terminal.

class readLargeCSV
{

  public function __construct($filename, $delimiter = "t")
  {
    $this->file = fopen($filename, 'r');
    $this->delimiter = $delimiter;
    $this->iterator = 0;
    $this->header = null;
  }

  public function csvToArray()
  {
    $data = array();
    while (($row = fgetcsv($this->file, 1000, $this->delimiter)) !== false) {
      $is_mul_1000 = false;
      if (!$this->header) {
        $this->header = $row;
      } else {
        $this->iterator++;
        $data[] = array_combine($this->header, $row);
        if ($this->iterator != 0 && $this->iterator % 1000 == 0) {
          $is_mul_1000 = true;
          $chunk = $data;
          $data = array();
          yield $chunk;
        }
      }
    }
    fclose($this->file);
    if (!$is_mul_1000) {
      yield $data;
    }
    return;
  }
}

class ImportAreasSeeder extends Seeder
{
  /**
   * Run the database seeds.
   *
   * @return void
   */
  public function run()
  {
    ini_set('memory_limit', '-1');
    ini_set('max_execution_time', 0);
    DB::connection()->disableQueryLog();

    DB::beginTransaction();

    try {
      $file = base_path("..../cities.csv");
      $csv_reader = new readLargeCSV($file, ",");

      foreach ($csv_reader->csvToArray() as $data) {
        // Preprocessing of the array.
        foreach ($data as $record) {
          DB::table('areas')->insert([
            [
              'name' => $record['name'],
              'zone_id' => ....,
              'country_id' => $record['country_id'],
              'created_at' => ....,
              'updated_at' => ....
            ]
          ]);
        }
      }
    } catch (Exception $e) {
      DB::rollBack();
      throw $e;
    }

    DB::commit();
  }
}

2

Answers


  1. Please try this. it should work for you

    when your csv file large we want to add enough memory limit to server. if it is not enough script will fail. so you can use like this

    ini_set('max_execution_time', 0); // 0 = Unlimited
    ini_set('memory_limit','5G');
    
    Login or Signup to reply.
  2. If you do not care about data exists in table ( you truncate first), maybe another solution is more suitable.

    For mysql: (load data)

    LOAD DATA INFILE '/path/to/cities.csv' 
    INTO TABLE discounts 
    FIELDS TERMINATED BY ',' 
    ENCLOSED BY '"'
    LINES TERMINATED BY 'n'
    IGNORE 1 ROWS;
    

    For postgresql: (copy)

    COPY country FROM '/path/to/cities.csv';
    

    PS: Another option is to create a temporary table
    load csv in this and move data to original table with insert into ... select ...

    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search