I use mysqli
API to query a large table, every 1000 rows, but the memory of my server grows up very fast. The memory is 0, even the swap. I don't know how to fix it.
The table has 4 million rows so I query the table each time by 1000.
Here is my code:
<?php
ini_set('memory_limit','32M');
$config = require_once('config.php');
$attachmentRoot = $config['attachment_root'];
$mysqli = new mysqli($config['DB_HOST'],$config['DB_USER'],$config['DB_PASSWORD'],$config['DB_NAME']);
$mysqli->set_charset('gbk');
if(!$mysqli)
throw new Exception('DB connnect faild:'.mysqli_connect_errno().mysqli_connect_error());
echo "\nRename The Dup Files With Suffix: .es201704111728es \n";
$startTime = microtime(true);
/**
*
* Move dup file to $name + .es201704111728es
*/
$suffix = ".es201704111728es";
$fileLinesLimit = 100000;
$listSuffix = 0;
$lines = 0;
/**
* Create File List.
*/
$fileList = '/tmp/Dupfilelist.txt';
$baseListName = $fileList.$listSuffix;
//$fs = fopen($baseListName,'w');
$totalSize = 0;
$start = 0;
$step = 10000;
$sql = "SELECT id,filepath,ids,duplicatefile,filesize FROM duplicate_attachment WHERE id> $start AND duplicatefile IS NOT NULL LIMIT $step";
$result = $mysqli->query($sql);
while($result->num_rows > 0)
{
while($result->fetch_row())
{
/*$fiepath = $row[1];
$uniqueIdsArray = array_unique(explode(',',$row[2]));if(empty($row[3]))throw new \Exception("\n".'ERROR:'.$row[0]."\n".var_export($row[3],true)."\n");
$uniqueFilesArray = array_unique(explode(',',$row[3]));
$hasFile = array_search($fiepath,$uniqueFilesArray);
if($hasFile !== false)
unset($uniqueFilesArray[$hasFile]);
$num = count($uniqueIdsArray);
$fileNum = count($uniqueFilesArray);
$ids = implode(',',$uniqueIdsArray);
if($num>1 && $fileNum>0){
//echo "\nID: $row[0] . File Need To Rename:".var_export($uniqueFilesArray,true)."\n";
$size = intval($row[4]);
if($lines >= $fileLinesLimit){
$lines = 0;
$listSuffix++;
//$fileList .= $listSuffix;
}
array_map(function($file) use ($attachmentRoot,$suffix,$fiepath,$totalSize,$size,$fileLinesLimit,&$listSuffix,&$lines,$fileList){
//$fs = fopen($fileList.$listSuffix,'a');
if($file === $fiepath)
return -1;
$source = $file;
$target = $source.$suffix;
//rename($source,$target);
//fwrite($fs,$source.','.$target."\n");
//file_put_contents($fileList.$listSuffix, $source.','.$target."\n",FILE_APPEND);
//$totalSize += intval($size);
$lines ++;
//echo memory_get_usage()."\n";
//fclose($fs);
//unset($fs);
//try to write file without amount memory cost
//$ts = fopen('/tmp/tempfile-0412','w');
},$uniqueFilesArray);
//echo "Test Just One Attachment Record.\n";
//echo "Ids:$ids\n";
//exit();
}*/
}
echo memory_get_peak_usage(true)."\n";
if(!$mysqli->ping())
{
echo "Mysql Conncect Failed.Reconnecting.\n";
$mysqli = new mysqli($config['DB_HOST'],$config['DB_USER'],$config['DB_PASSWORD'],$config['DB_NAME']);
$mysqli->set_charset('gbk');
if(!$mysqli)
throw new Exception('DB connnect faild:'.mysqli_connect_errno().mysqli_connect_error());
}
//mysqli_free_result($result);
$result->close();
unset($result);
$start += $step;
$sql = "SELECT id,filepath,ids,duplicatefile,filesize FROM duplicate_attachment WHERE id> $start AND duplicatefile IS NOT NULL LIMIT $step";
$result = $mysqli->query($sql);
}
echo "Dup File Total Size: $totalSize\n";
echo "Script cost time :".(microtime(true)-$startTime)." ms\n";sleep(1000*10);
mysqli_close($mysqli);
exit();
I enable the XDEBUG extension.Sorry for that.
I disable this extension and everything goes well.