Reputation: 93
I created wordpress plugin with add new products and category from csv custom file. It works fine, in time_limit (60s) my script add 4500 product to 'wp_posts' and about 90k rows to 'wp_postmeta'.
Now when I try update (UPDATE or DELETE + INSERT) stock for this products from csv, after about 1k rows script stops (timeout).
Do you know any better way (better than '$wpdb->query' and php loop) to update this stock?
if ( isset( $_POST['wool_stock_import_form_submitted'] ) ) {
$hidden_field = esc_html($_POST['woostol_stock_import_form_submitted']);
if ($hidden_field == 'Y') {
// Prepare csv file
$addlist = $_FILES['addlist'];
$addlist_tmp_name = $_FILES['addlist']['tmp_name'];
$addlist_name = $_FILES['addlist']['name'];
$addlist_size = $_FILES['addlist']['size'];
$addlist_type = $_FILES['addlist']['type'];
$addlist_error = $_FILES['addlist']['error'];
$addlist_array = array();
}
if ($_POST['submit']){
$row = 1;
if (($handle_addlist = fopen($addlist_tmp_name, "r")) !== FALSE) {
while (($data = fgetcsv($handle_addlist, 1000, ";")) !== FALSE) {
$row++;
$addlist_array_tmp = array();
for ($i=0; $i < 99; $i++) {
if(!empty($data[$i])){
array_push($addlist_array_tmp, $data[$i]);
}
}
array_push($addlist_array, $addlist_array_tmp);
}
fclose($handle_addlist);
}
}
foreach ($addlist_array as $stock) {
$results = $wpdb->get_var( $wpdb->prepare(
"
SELECT post_id
FROM wp_postmeta
WHERE meta_value = %s
",
$stock[0]
) );
/* $wpdb->update(
'wp_postmeta',
array(
'meta_value' => $stock[2]
),
array(
'post_id' => $results,
'meta_key' => '_stock'
),
array(
'%s',
),
array( '%d','%s' )
); */
if ( !empty($results) ) {
$wpdb->query(
$wpdb->prepare(
"
DELETE FROM wp_postmeta
WHERE post_id = %d
AND meta_key = %s
",
$results, '_stock'
)
);
$wpdb->insert(
'wp_postmeta',
array(
'post_id' => $results,
'meta_key' => '_stock'
),
array(
'%d',
'%s'
)
);
$wpdb->show_errors();
echo 'blabla ' . $stock[0] . ' OK ' . date('Y-m-d H:i:s') . '<br>';
} else {
echo 'blabla ' . $stock[0] . ' ' . date('Y-m-d H:i:s') . '<br>';
}
}
}
Upvotes: 0
Views: 653
Reputation: 34285
Create a temporary table within the database with the same structure as the data in the csv file.
Import the contents of the csv file into the temporary table using load data infile command.
Either use an insert ... on duplicate key update ... statement (if you have appropriate unique indexes or primary keys defined) , or use delete with a subquery and an insert ... select ... statement to update the data.
--delete matching records
delete from wp_postmeta
using wp_postmeta inner join #your_temp_table on wp_postmeta.meta_value=#your_temp_table.second_column_name
--insert the data from the temp table
insert into wp_postmeta
select * -- if the fields are different order in your temp table than in the wp_postmeta table, then list the fields
from #your_temp_table
Drop the temporary table.
This way there is no need to use any loop within php. Load data infile
is optimised to load large amount of data into MySQL quickly.
If you would like to keep the loop, even then do not prepare the delete statement in every loop. You can prepare it only once and then use it with different parameters. That's what prepared statements are primarily for. Also, create bulk insert statement in the loop to insert multiple records in 1 sql query.
Upvotes: 2