Saving data from localhost to online - php

I am trying to save my data coming from my localhost database to our live server database using PHP. I am getting a successful response but whenever I try to check the our live database server there is no data in it or the data was not inserted or updated.
Also the response I get is this "Import successful! Found a total of 4 records in patient.file" even though the file I am importing is only to data.
Here is my code:
<?php
//============FUNCTIONS=================
function export_table($target_table,$sdate,$edate,$station){
$i = mysql_num_rows(mysql_query("DESCRIBE $target_table"));
$get_columns = mysql_query("SHOW COLUMNS FROM " . $target_table);
while($row_columns = mysql_fetch_array($get_columns)){
$column_name = $row_columns['Field'] . "|";
$csv_output .= $column_name;
}
$csv_output = rtrim($csv_output, "|");
$csv_output .= "\r\n";
$values = mysql_query("SELECT * FROM ".$target_table." WHERE
dateEncoded >= '2017-07-01' and dateEncoded <= '2017-07-07'");
while ($rowr = mysql_fetch_row($values)) {
for ($j=0;$j<$i;$j++) {
if($rowr[$j] == NULL){$rowr[$j] = "NULL";}
if($j==($i-1)){$csv_output .= str_replace(array("\n", "\r"), '', $rowr[$j]);
}else{$csv_output .= str_replace(array("\n", "\r"), '', $rowr[$j])."|";}
}// end for
$csv_output .= "\r\n";
}// end while
return $csv_output;
}// end function export_table
//========CREATION OF THE FILE===========================
include_once "../ewcfg8.php";
include_once "../dbcon.php";
date_default_timezone_set('Asia/Manila');
ini_set('memory_limit', '-1');
set_time_limit(0);
// $startdate = '2017-06-01';
// $enddate = '2017-07-07'; ;
// $defaulStation = $_POST['defaultStation'];
$tables = array('patient',
'tb_adr',
'tb_case',
'tb_casecomment',
'tb_comorbidity',
'tb_consilium',
'tb_consultations',
'tb_contact',
'tb_pe',
'tb_prescript',
'tb_prevcase',
'tb_resultculture',
'tb_resultgx',
'tb_resultdst',
'tb_resultdstdrug',
'tb_resulthiv',
'tb_resultxray',
'tb_symptom',
'tb_resultdssm',
'tb_dot');
//put tables in an array within an array(make a multi-dimensional array)
$dl_table = array();
foreach ($tables as $table) {
$content = export_table($table,$startdate,$enddate,$defaulStation);
$dl_table[$table] = $content;//multi-dimensional array
}
//#mysql_close($con); //close localhost connection
//=========INSERTING VALUES TO DATABASE===============
//===============Open New Connection And Connect to Live Database========
$dbhost = "http/mywebsite.example";
$localport = "3306";
$dbuser = "user";
$dbpass = "12345";
$dbname = "myonlinedb";
$conn = mysql_connect($dbhost, $dbuser, $dbpass);
if(! $conn ) {
die('Could not connect: ' . mysql_error());
}
mysql_select_db($dbname, $conn);
$fieldseparator = "|";
$lineseparator = "\n";
foreach ($tables as $table) {
$databasetable = $table;
$csvfile = $dl_table[$table]; //variable that hold the multi-dimensional array
$lines = 0;
$header = 0;
$queries = "";
$linearray = array();
$columns = array();
//====================================Get the Columns (First line in the CSV)===================================
while(($header == 0 && $columns = fgetcsv(${$table},0,"|")) != false){
$header = 1;
$num_columns = count($columns);
$list_columns = "";
for ($c=0; $c < $num_columns; $c++) {
$list_columns .= "`" . $columns[$c] . "`" . ", ";
}
$list_columns = substr($list_columns,0,-2);
//echo $list_columns . "<br /><br />";
break;
}// close while(($columns = fgetcsv($file,0,"|")) !== false && $header == 0)
//====================================Get the contents of the CSV===================================
//Set the header to skip the first row
$header = 0;
//opening the csv file
// $size = filesize($csvfile);
// echo $size;
// if(!$size) {echo "File is empty.\n";}
// $csvcontent = fread($file,$size);
// fclose($file);
//foreach(split($lineseparator,$csvcontent) as $line)
foreach(explode($lineseparator,$csvfile) as $line) {
//if($header == 0){
//$header = 1;
//}else{
$lines++;
$line = trim($line," \t");
$line = str_replace("\r","",$line);
/************************************
This line escapes the special character. remove it if entries are already escaped in the csv file
************************************/
$line = str_replace("'","\'",$line);
/*************************************/
$linearray = explode($fieldseparator,$line);
//*********get the csvID id and the columns in the db table to know if an insert or update will be performed****************
if ($databasetable == "patient"){
$csvID = $linearray[2];
$searchField = "patientID";
}else if($databasetable == "tb_case"){
$csvID = $linearray[0];
$searchField = "caseID";
}else if($databasetable == "tb_resultdstdrug"){
$csvID = $linearray[1]. $linearray[2];
$searchField = "CONCAT(caseIDplus, series)";
}else{
$csvID = $linearray[1]. $linearray[2];
$searchField = "CONCAT(caseID, series)";
}
//***********convert id of each row to 0*******************************
if($databasetable != "tb_case"){$linearray['0'] = '';}
//*****************search the csvID in the database************************************
$searchID = mysql_query("SELECT * FROM $databasetable WHERE $searchField = '$csvID'");
//******************count the number of columns in csv and the db table**************
$countcsv = count($linearray);
$countColumns = $num_columns;
//$countColumns = mysql_num_fields($searchID);
//****************count if the csvID exists in the database********************************
$countResults = mysql_num_rows($searchID);
//*************************get the date encoded in the csv and db table***************
$row = mysql_fetch_assoc($searchID);
$db_dateEncoded = $row['dateEncoded'];
if($databasetable == "tb_dot"){$csv_dateEncoded = $linearray[($countcsv-3)];}
else{$csv_dateEncoded = $linearray[($countcsv-2)];}
//*****************if else statement for checking number of fields****************************************
if ($countResults > 0){
//if a result was found, UPDATE the row
if ($csv_dateEncoded>$db_dateEncoded){
// if table is dot, delete all dot of the tb_case
$query = "UPDATE $databasetable SET ";
$i = 1;
while ($i < $countColumns){
$meta = mysql_fetch_field($searchID, $i);
if($linearray[$i] == "NULL"){
$query .= $columns[$i] . "=" . $linearray[$i];
//if last field
if($i == ($countColumns-1)){$query .= " WHERE $searchField = '$csvID';";}else{$query .= ",";}
}else{
$query .= $columns[$i] . "='" . $linearray[$i];
//if last field
if($i == ($countColumns-1)){$query .= "' WHERE $searchField = '$csvID';";}else{$query .= "',";}
}//end if($linearray[$i] == "NULL")
$i = $i + 1;
}//close while ($i < $countColumns)
}// end if($csv_dateEncoded>=$db_dateEncoded)
}else{//if no id was found, INSERT a new row
//$query = "INSERT INTO $databasetable VALUES(";
if($databasetable == 'patient'){$query = "INSERT INTO $databasetable VALUES(";}
else{$query = "INSERT INTO $databasetable ($list_columns) VALUES(";}
$i = 0;
while ($i < $countColumns){
if($linearray[$i] == "NULL"){
$query .= $linearray[$i];
if($i == ($countColumns-1)){$query .= ");";}else{$query .= ",";}
}else{
$query .= "'" . $linearray[$i];
if($i == ($countColumns-1)){$query .= "');";}else{$query .= "',";}
}
$i = $i + 1;
}//end While
}// close if ($countResults > 0){
//$queries .= $query . "\n";
//print "$query<br />";
#mysql_query($query);
//}//close if($header == 0)
}// close foreach(split($lineseparator,$csvcontent) as $line)
//unlink("data_uploading/files/" . $user . "/" . $databasetable . ".csv");
echo "Import successful! Found a total of $lines records in $table.file.\n<br /><br />";
//mysql_close($conn);
#mysql_close($con);
//====================================Get the Contents===================================
}//close foreach ($tables as $table)
//rmdir("data_uploading/files/" . $user);
//==========END OF INSERTING VALUES TO DATABASE ===========

Related

Improve Export speed of database using PHP

I am trying to create a function that exports the mysql database. I researched many ways to do that. I have arrived to the function below. The problem with this is the exporting is so slow my database size is only 10mb. Is there a way to make this faster or improve it?
public function backup_database($tables = '*'){
if ($this->databaseConnection()) {
$return = '';
if($tables == '*') {
$tables = array();
$sql = $this->db_connection->prepare('SHOW TABLES');
if($sql->execute()){
while($row = $sql->fetch()){
$tables[] = $row[0];
}
}
}
else {
$tables = is_array($tables) ? $tables : explode(',',$tables);
}
foreach($tables as $table) {
$result = $this->db_connection->prepare('SELECT * FROM ' . $table);
$result->execute();
$num_fields = $result->rowCount();
$return .= 'DROP TABLE ' . $table . ';';
$row2 = $this->db_connection->prepare('SHOW CREATE TABLE '.$table);
$row2->execute();
$row2 = $row2->fetch();
$return .= "\n\n".$row2[1].";\n\n";
for ($i = 0; $i < $num_fields; $i++){
while($row = $result->fetch())
{
$return .= 'INSERT INTO '. $table .' VALUES(';
for($j = 0; $j < $num_fields; $j++){
$row[$j] = addslashes($row[$j]);
$row[$j] = preg_replace("/\\n/","\\n",$row[$j]);
if (isset($row[$j])) { $return .= '"'. $row[$j] .'"' ; } else { $return .= '""'; }
if ($j<($num_fields-1)) { $return .= ','; }
}
$return .= ");\n";
}
}
$return .="\n\n\n";
}
if (!file_exists('database_backups')) {
mkdir('database_backups', 0777, true);
}
$filename = 'database_backups/db-backup-'.time().'-'.(md5(implode(',',$tables))).'.sql.gz';
$handle = fopen($filename,'w+');
$gzdata = gzencode($return, 9);
fwrite($handle,$gzdata);
fclose($handle);
return '1';
}
If you want to export all database tables at once, you can just use this php code ---
$backup_file = "/var/www/html/DB_backup/evidyaa" . date("Y-m-d-H-i-s") . '.sql'; //this would the path of the filename for backup
$command = "mysqldump --user=dbuser --password=dbpass --host=dbhost ". "dbname > $backup_file";
passthru($command);
echo 'done';
If you are using windows then command would be
$backup_file = "/var/www/html/DB_backup/evidyaa" . date("Y-m-d-H-i-s") . '.sql'; //this would the path of the filename for backup
$command = "c:\xampp\mysql\bin\mysql.exe --user=dbuser --password=dbpass --host=dbhost ". "dbname > $backup_file";

Backup MySQL tables in Laravel without third-party libraries

I new to Laravel. I've been trying to create a controller that backups tables in the form of backup_date_.sql format without using any third-party library at all, but I'm getting frustrated. I've searched and I found some code examples. I've tried to use them within my BackupsController, but things are getting more and more difficult. Any help is really appreciated. This is my code, Thanks in advance.
<?php
public function query($data, $mode = \PDO::FETCH_ASSOC)
{
$pdo = DB::connection()->getPdo();
$stmt = $pdo->query($data);
$results = $stmt->fetchAll($mode);
// $results = $stmt->fetch($mode);
return $results;
}
public function backup(Request $request)
{
if ($request->all()) {
$output = '';
foreach (request('table') as $table) {
$show_table_query = $this->query("SHOW CREATE TABLE " . stripslashes($table) . "");
foreach ($show_table_query as $show_table_row)
{
array_shift($show_table_row);
$output .= implode(", ", $show_table_row);
}
$single_result = DB::select('select * from ' . stripslashes($table));
foreach ($single_result as $key => $value)
{
$value = array_map(function($obj) {
return (array) $obj;
}, $single_result);
$keys = array_keys($value[$key]);
$val = array_values($value[$key]);
$get_keys = array_shift($keys);
$get_values = array_shift($val);
$table_column = implode(",", $keys);
// $table_value ="'" . implode("','", $val) . "'\n";
$table_value ="'" . implode("','", $val) . "'";
$output .= DB::insert(
"INSERT INTO " . stripslashes($table) . "("
. $table_column . ") VALUES(" . $table_value . ")"
);
}
}
?>
Write a
command https://laravel.com/docs/5.6/artisan#writing-commands
with SSH mysqldump -uUSERNAME -p DATABASE > backup.sql and
schedule https://laravel.com/docs/5.6/scheduling and
DONE :))
This is a function I found and later modified to export my databases including all the data and stored procedures and functions if any exists in the database. The code was written for a codeigniter application but you can easily convert it to laravel.
Codeigniter version:
<?php
if(!function_exists("export_database")){
function export_database($bkpFileName = null){
$ci =& get_instance();
$targetTables = [];
$newLine = "\r\n";
$queryTables = $ci->db->query('SHOW TABLES');
foreach($queryTables->result() as $table){
$targetTables[] = $table->Tables_in_my_db;
}
foreach($targetTables as $table){
$tableData = $ci->db->query('SELECT * FROM '.$table);
$res = $ci->db->query('SHOW CREATE TABLE '.$table);
$cnt = 0;
$content = (!isset($content) ? '' : $content) . $res->row_array()["Create Table"].";" . $newLine . $newLine;
foreach($tableData->result_array() as $row){
$subContent = "";
$firstQueryPart = "";
if($cnt == 0 || $cnt % 100 == 0){
$firstQueryPart .= "INSERT INTO {$table} VALUES ";
if($tableData->num_rows() > 1)
$firstQueryPart .= $newLine;
}
$valuesQuery = "(";
foreach($row as $key => $value){
$valuesQuery .= $ci->db->escape($value) . ", ";
}
$subContent = $firstQueryPart . rtrim($valuesQuery, ", ") . ")";
if( (($cnt+1) % 100 == 0 && $cnt != 0) || $cnt+1 == $tableData->num_rows())
$subContent .= ";" . $newLine;
else
$subContent .= ",";
$content .= $subContent;
$cnt++;
}
$content .= $newLine;
}
$content = trim($content);
//check for stored procedures
$storedProcedures = $ci->db->query("SHOW PROCEDURE STATUS WHERE Db = '{$ci->db->database}'");
if($storedProcedures->num_rows() > 0){
foreach($storedProcedures->result() as $procedure){
$data = $ci->db->query("SHOW CREATE PROCEDURE {$procedure->Name}");
if($data->num_rows() > 0){
$dropProcedureSQL = "DROP PROCEDURE IF EXISTS {$procedure->Name};";
$sqlQuery = $data->row_array()["Create Procedure"];
$sqlQuery = preg_replace("/CREATE DEFINER=.+? PROCEDURE/", "CREATE PROCEDURE IF NOT EXISTS", $sqlQuery);
$sqlQuery = "\r\n" . $sqlQuery . "//";
$content .= $newLine . $newLine . $dropProcedureSQL . $sqlQuery ;
}
}
}
//check for functions
$functions = $ci->db->query("SHOW FUNCTION STATUS WHERE Db = '{$ci->db->database}';");
if($functions->num_rows() > 0){
foreach($functions->result() as $function){
$data = $ci->db->query("SHOW CREATE FUNCTION {$function->Name}");
if($data->num_rows() > 0){
$dropFunctionSQL = "DROP function IF EXISTS {$function->Name};";
$sqlQuery = $data->row_array()["Create Function"];
$sqlQuery = preg_replace("/CREATE DEFINER=.+? FUNCTION/", "CREATE FUNCTION IF NOT EXISTS", $sqlQuery);
$sqlQuery = "\r\n" . $sqlQuery . "//";
$content .= $newLine . $newLine . $dropFunctionSQL . $sqlQuery ;
}
}
}
$dbBackupFile = FCPATH . BKP_FILE_DIR;
if(is_null($bkpFileName))
$dbBackupFile .= "{$ci->db->database}.sql";
else
$dbBackupFile .= "{$bkpFileName}.sql";
$handle = fopen($dbBackupFile, "w+");
fwrite($handle, $content);
fclose($handle);
return $dbBackupFile;
}
}
Laravel version:
<?php
if(!function_exists("export_database")){
function export_database($bkpFileName = null){
//$ci =& get_instance();
$targetTables = [];
$newLine = "\r\n";
$queryTables = DB::select(DB::raw('SHOW TABLES'));
foreach($queryTables->result() as $table){
$targetTables[] = $table->Tables_in_my_database;
}
foreach($targetTables as $table){
$tableData = DB::select(DB::raw('SELECT * FROM '.$table));
$res = DB::select(DB::raw('SHOW CREATE TABLE '.$table));
$cnt = 0;
$content = (!isset($content) ? '' : $content) . $res->row_array()["Create Table"].";" . $newLine . $newLine;
foreach($tableData as $row){
$subContent = "";
$firstQueryPart = "";
if($cnt == 0 || $cnt % 100 == 0){
$firstQueryPart .= "INSERT INTO {$table} VALUES ";
if(count($tableData) > 1)
$firstQueryPart .= $newLine;
}
$valuesQuery = "(";
foreach($row as $key => $value){
$valuesQuery .= $value . ", ";
}
$subContent = $firstQueryPart . rtrim($valuesQuery, ", ") . ")";
if( (($cnt+1) % 100 == 0 && $cnt != 0) || $cnt+1 == count($tableData))
$subContent .= ";" . $newLine;
else
$subContent .= ",";
$content .= $subContent;
$cnt++;
}
$content .= $newLine;
}
$content = trim($content);
//check for stored procedures
$storedProcedures = DB::select(DB::raw("SHOW PROCEDURE STATUS WHERE Db = '{$ci->db->database}'");
if($storedProcedures->count() > 0){
foreach($storedProcedures->result() as $procedure){
$data = DB::select(DB::raw("SHOW CREATE PROCEDURE {$procedure->Name}"));
if($data->count() > 0){
$dropProcedureSQL = "DROP PROCEDURE IF EXISTS {$procedure->Name};";
$sqlQuery = $data->row_array()["Create Procedure"];
$sqlQuery = preg_replace("/CREATE DEFINER=.+? PROCEDURE/", "CREATE PROCEDURE IF NOT EXISTS", $sqlQuery);
$sqlQuery = "\r\n" . $sqlQuery . "//";
$content .= $newLine . $newLine . $dropProcedureSQL . $sqlQuery ;
}
}
}
//check for functions
$functions = DB::select(DB::raw("SHOW FUNCTION STATUS WHERE Db = '{$ci->db->database}';"));
if($functions->count() > 0){
foreach($functions->result() as $function){
$data = DB::select(DB::raw("SHOW CREATE FUNCTION {$function->Name}");
if($data->count() > 0){
$dropFunctionSQL = "DROP function IF EXISTS {$function->Name};";
$sqlQuery = $data->row_array()["Create Function"];
$sqlQuery = preg_replace("/CREATE DEFINER=.+? FUNCTION/", "CREATE FUNCTION IF NOT EXISTS", $sqlQuery);
$sqlQuery = "\r\n" . $sqlQuery . "//";
$content .= $newLine . $newLine . $dropFunctionSQL . $sqlQuery ;
}
}
}
/*$dbBackupFile = FCPATH . BKP_FILE_DIR;
if(is_null($bkpFileName))
$dbBackupFile .= "{$ci->db->database}.sql";
else
$dbBackupFile .= "{$bkpFileName}.sql";
$handle = fopen($dbBackupFile, "w+");
fwrite($handle, $content);
fclose($handle);*/
return $content;
}
}
Note:
I have tried my best to convert the above code from codeigniter to laravel. But since I don't have running instance of laravel to test it out I'm not sure it will work
I refactored #Igor Ilic's answer to be laravel compatible and improved it a little bit, I hope it is useful :)
It is well tested with laravel 9
function ExportDatabase(array $tablesToBackup = null, string $backupFilename = null): string
{
$targetTables = [];
$newLine = "\n";
if ($tablesToBackup == null)
{
$queryTables = DB::select(DB::raw('SHOW TABLES'));
foreach ($queryTables as $table)
{
$targetTables[] = $table->Tables_in_my_database;
}
}
else
{
foreach ($tablesToBackup as $table)
{
$targetTables[] = $table;
}
}
foreach ($targetTables as $table)
{
$tableData = DB::select(DB::raw('SELECT * FROM ' . $table));
$res = DB::select(DB::raw('SHOW CREATE TABLE ' . $table))[0];
$cnt = 0;
$content = (!isset($content) ? '' : $content) . $res->{"Create Table"} . ";" . $newLine . $newLine;
foreach ($tableData as $row)
{
$subContent = "";
$firstQueryPart = "";
if ($cnt == 0 || $cnt % 100 == 0)
{
$firstQueryPart .= "INSERT INTO {$table} VALUES ";
if (count($tableData) > 1)
{
$firstQueryPart .= $newLine;
}
}
$valuesQuery = "(";
foreach ($row as $key => $value)
{
$valuesQuery .= "'$value'" . ", ";
}
$subContent = $firstQueryPart . rtrim($valuesQuery, ", ") . ")";
if ((($cnt + 1) % 100 == 0 && $cnt != 0) || $cnt + 1 == count($tableData))
{
$subContent .= ";" . $newLine;
}
else
{
$subContent .= ",";
}
$content .= $subContent;
$cnt++;
}
$content .= $newLine;
}
$content = trim($content);
if (is_null($backupFilename))
{
return $content;
}
$dbBackupFile = storage_path('backups/database/');
if (!File::exists($dbBackupFile))
{
File::makeDirectory($dbBackupFile, 0755, true);
}
$dbBackupFile .= "{$backupFilename}.sql";
$handle = fopen($dbBackupFile, "w+");
fwrite($handle, $content);
fclose($handle);
return $content;
}
I created this specifically to clone WordPress subsite tables from one database to another (hence, the $prefix parameter). Leaving the default value of $prefix ('%') will get all tables in the source database.
This has been tested with Laravel 9.x.
use Illuminate\Support\Facades\DB;
use Illuminate\Support\Collection;
class CloneService
{
public function clone(string $sourceDb, string $destDb, string $prefix): void
{
$tables = $this->getTables($sourceDb, $prefix);
if ($tables->count() > 0) {
$tables->each(function ($table) use ($sourceDb, $destDb) {
$success = DB::statement("CREATE TABLE {$destDb}.{$table} LIKE {$sourceDb}.{$table};");
if ($success) {
$this->insertData($sourceDb, $destDb, $table);
}
});
}
}
public function getTables(string $dbName, string $prefix = '%'): Collection
{
$tables = collect();
// Escape underscores
$prefix = str_replace('_', '\_', $prefix);
collect(DB::select("SHOW TABLES FROM {$dbName} LIKE '{$prefix}%';"))
->each(function ($result) use (&$tables) {
// Convert the stdClass to an array, and get the first element
$table = current((array)$result);
$tables->push($table);
});
return $tables;
}
protected function insertData(string $sourceDb, string $destDb, string $table): void
{
$tableData = DB::select(DB::raw("SELECT * FROM {$sourceDb}.{$table};"));
collect($tableData)->each(function ($row) use ($destDb, $table) {
$rowData = get_object_vars($row);
// Create a comma-separated string of the columns
$columns = implode(',', array_keys($rowData));
$values = array_values($rowData);
// Create a comma-separated string of "?'s"
$prep = implode(',', array_fill(0, count($values), '?'));
$query = "INSERT INTO {$destDb}.{$table} ({$columns}) VALUES ({$prep})";
DB::insert($query, $values);
});
}
}

Need Help on Export SQL to CSV through PHP

I have this PHP script which is supposed to take an SQL Query and output it to a CSV file, I know that when I run it i'm getting the right Statement put in but it does not seem to generate a file to my uploads folder.
Could anyone debug this for me?
<?php
function ExportExcel($statement)
{
$filename = "uploads/".strtotime("now").'.csv';
$sql = mysql_query("$statement") or die(mysql_error());
$num_rows = mysql_num_rows($sql);
if($num_rows >= 1)
{
$row = mysql_fetch_assoc($sql);
$fp = fopen($filename, "w");
$seperator = "";
$comma = "";
foreach ($row as $name => $value)
{
$seperator .= $comma . '' .str_replace('', '""', $name);
$comma = ",";
}
$seperator .= "\n";
fputs($fp, $seperator);
mysql_data_seek($sql, 0);
while($row = mysql_fetch_assoc($sql))
{
$seperator = "";
$comma = "";
foreach ($row as $name => $value)
{
$seperator .= $comma . '' .str_replace('', '""', $value);
$comma = ",";
}
$seperator .= "\n";
fputs($fp, $seperator);
}
fclose($fp);
echo "<a href='$filename'>Download</a>";
echo $statement;
}
else
{
echo "error";
}
}
?>
If someone has a similar script that uses mysqli that would be nice
here is a example of a export script I use on my apps using MySqli. This will get you started...
<?php
function ExportExcel($statement){
$output = "";
$sql = mysqli_query($db , $statement);
$columns_total = mysqli_num_fields($sql);
// Get The Field Name
for ($i = 0; $i < $columns_total; $i++) {
$heading = mysqli_fetch_field_direct($sql, $i);
$output .= '"'.$heading->name.'",';
}
$output .="\n";
// Get Records from the table
while ($row = mysqli_fetch_array($sql)) {
for ($i = 0; $i < $columns_total; $i++) {
$output .='"'.$row["$i"].'",';
}
$output .="\n";
}
// Download the file
$filename = "CSV_NAME_GOES_HERE.csv";
header('Content-type: application/csv');
header('Content-Disposition: attachment; filename='.$filename);
echo $output;
exit;
}
?>

Export MySQL database using PHP [closed]

Closed. This question does not meet Stack Overflow guidelines. It is not currently accepting answers.
We don’t allow questions seeking recommendations for books, tools, software libraries, and more. You can edit the question so it can be answered with facts and citations.
Closed 4 years ago.
Improve this question
I've build a php/mysql (wamp) application and deployed on a local workstation.
My customer wants to save db and restore it when he likes.
I've found this code for saving:
<?php
$DB_HOST = "localhost";
$DB_USER = "root";
$DB_PASS = "admin";
$DB_NAME = "dbname";
$con = new mysqli($DB_HOST, $DB_USER, $DB_PASS, $DB_NAME);
$tables = array();
$result = mysqli_query($con,"SHOW TABLES");
while ($row = mysqli_fetch_row($result)) {
$tables[] = $row[0];
}
$return = '';
foreach ($tables as $table) {
$result = mysqli_query($con, "SELECT * FROM ".$table);
$num_fields = mysqli_num_fields($result);
$return .= 'DROP TABLE '.$table.';';
$row2 = mysqli_fetch_row(mysqli_query($con, 'SHOW CREATE TABLE '.$table));
$return .= "\n\n".$row2[1].";\n\n";
for ($i=0; $i < $num_fields; $i++) {
while ($row = mysqli_fetch_row($result)) {
$return .= 'INSERT INTO '.$table.' VALUES(';
for ($j=0; $j < $num_fields; $j++) {
$row[$j] = addslashes($row[$j]);
if (isset($row[$j])) {
$return .= '"'.$row[$j].'"';} else { $return .= '""';}
if($j<$num_fields-1){ $return .= ','; }
}
$return .= ");\n";
}
}
$return .= "\n\n\n";
}
$handle = fopen('backup.sql', 'w+');
fwrite($handle, $return);
fclose($handle);
echo "success";
?>
This code saves file in a default folder.
What I need is to let user to decide where to save backup file or simply download it through browser.
On the other hand user needs to restore from the file he wants so I need a 'browse' button to let him choose the file in any of his folder.
My database is utf8_general_ci and has english, french and italian language I don't need complex codes because I wouldn't know how to manage them :-(
Thanks in advance.
Best way to export database using php script.
Or add 5th parameter(array) of specific tables: array("mytable1","mytable2","mytable3") for multiple tables
<?php
//ENTER THE RELEVANT INFO BELOW
$mysqlUserName = "Your Username";
$mysqlPassword = "Your Password";
$mysqlHostName = "Your Host";
$DbName = "Your Database Name here";
$backup_name = "mybackup.sql";
$tables = "Your tables";
//or add 5th parameter(array) of specific tables: array("mytable1","mytable2","mytable3") for multiple tables
Export_Database($mysqlHostName,$mysqlUserName,$mysqlPassword,$DbName, $tables=false, $backup_name=false );
function Export_Database($host,$user,$pass,$name, $tables=false, $backup_name=false )
{
$mysqli = new mysqli($host,$user,$pass,$name);
$mysqli->select_db($name);
$mysqli->query("SET NAMES 'utf8'");
$queryTables = $mysqli->query('SHOW TABLES');
while($row = $queryTables->fetch_row())
{
$target_tables[] = $row[0];
}
if($tables !== false)
{
$target_tables = array_intersect( $target_tables, $tables);
}
foreach($target_tables as $table)
{
$result = $mysqli->query('SELECT * FROM '.$table);
$fields_amount = $result->field_count;
$rows_num=$mysqli->affected_rows;
$res = $mysqli->query('SHOW CREATE TABLE '.$table);
$TableMLine = $res->fetch_row();
$content = (!isset($content) ? '' : $content) . "\n\n".$TableMLine[1].";\n\n";
for ($i = 0, $st_counter = 0; $i < $fields_amount; $i++, $st_counter=0)
{
while($row = $result->fetch_row())
{ //when started (and every after 100 command cycle):
if ($st_counter%100 == 0 || $st_counter == 0 )
{
$content .= "\nINSERT INTO ".$table." VALUES";
}
$content .= "\n(";
for($j=0; $j<$fields_amount; $j++)
{
$row[$j] = str_replace("\n","\\n", addslashes($row[$j]) );
if (isset($row[$j]))
{
$content .= '"'.$row[$j].'"' ;
}
else
{
$content .= '""';
}
if ($j<($fields_amount-1))
{
$content.= ',';
}
}
$content .=")";
//every after 100 command cycle [or at last line] ....p.s. but should be inserted 1 cycle eariler
if ( (($st_counter+1)%100==0 && $st_counter!=0) || $st_counter+1==$rows_num)
{
$content .= ";";
}
else
{
$content .= ",";
}
$st_counter=$st_counter+1;
}
} $content .="\n\n\n";
}
//$backup_name = $backup_name ? $backup_name : $name."___(".date('H-i-s')."_".date('d-m-Y').")__rand".rand(1,11111111).".sql";
$backup_name = $backup_name ? $backup_name : $name.".sql";
header('Content-Type: application/octet-stream');
header("Content-Transfer-Encoding: Binary");
header("Content-disposition: attachment; filename=\"".$backup_name."\"");
echo $content; exit;
}
?>
This tool might be useful, it's a pure PHP based export utility: https://github.com/2createStudio/shuttle-export
Try the following.
Execute a database backup query from PHP file. Below is an example of using SELECT INTO OUTFILE query for creating table backup:
<?php
$DB_HOST = "localhost";
$DB_USER = "xxx";
$DB_PASS = "xxx";
$DB_NAME = "xxx";
$con = new mysqli($DB_HOST, $DB_USER, $DB_PASS, $DB_NAME);
if($con->connect_errno > 0) {
die('Connection failed [' . $con->connect_error . ']');
}
$tableName = 'yourtable';
$backupFile = 'backup/yourtable.sql';
$query = "SELECT * INTO OUTFILE '$backupFile' FROM $tableName";
$result = mysqli_query($con,$query);
?>
To restore the backup you just need to run LOAD DATA INFILE query like this:
<?php
$DB_HOST = "localhost";
$DB_USER = "xxx";
$DB_PASS = "xxx";
$DB_NAME = "xxx";
$con = new mysqli($DB_HOST, $DB_USER, $DB_PASS, $DB_NAME);
if($con->connect_errno > 0) {
die('Connection failed [' . $con->connect_error . ']');
}
$tableName = 'yourtable';
$backupFile = 'yourtable.sql';
$query = "LOAD DATA INFILE 'backupFile' INTO TABLE $tableName";
$result = mysqli_query($con,$query);
?>
In *nix systems, use the WHICH command to show the location of the mysqldump, try this :
<?php
$dbhost = 'localhost';
$dbuser = 'root';
$dbpass = 'password';
$dbname = 'test';
$mysqldump=exec('which mysqldump');
$command = "$mysqldump --opt -h $dbhost -u $dbuser -p $dbpass $dbname > $dbname.sql";
exec($command);
?>
<?php
$dbhost = 'localhost:3036';
$dbuser = 'root';
$dbpass = 'rootpassword';
$conn = mysql_connect($dbhost, $dbuser, $dbpass);
if(! $conn ) {
die('Could not connect: ' . mysql_error());
}
$table_name = "employee";
$backup_file = "/tmp/employee.sql";
$sql = "SELECT * INTO OUTFILE '$backup_file' FROM $table_name";
mysql_select_db('test_db');
$retval = mysql_query( $sql, $conn );
if(! $retval ) {
die('Could not take data backup: ' . mysql_error());
}
echo "Backedup data successfully\n";
mysql_close($conn);
?>
Here is my code, This will backup MySQL database and store it in the specified path.
<?php
function backup_mysql_database($options){
$mtables = array(); $contents = "-- Database: `".$options['db_to_backup']."` --\n";
$mysqli = new mysqli($options['db_host'], $options['db_uname'], $options['db_password'], $options['db_to_backup']);
if ($mysqli->connect_error) {
die('Error : ('. $mysqli->connect_errno .') '. $mysqli->connect_error);
}
$results = $mysqli->query("SHOW TABLES");
while($row = $results->fetch_array()){
if (!in_array($row[0], $options['db_exclude_tables'])){
$mtables[] = $row[0];
}
}
foreach($mtables as $table){
$contents .= "-- Table `".$table."` --\n";
$results = $mysqli->query("SHOW CREATE TABLE ".$table);
while($row = $results->fetch_array()){
$contents .= $row[1].";\n\n";
}
$results = $mysqli->query("SELECT * FROM ".$table);
$row_count = $results->num_rows;
$fields = $results->fetch_fields();
$fields_count = count($fields);
$insert_head = "INSERT INTO `".$table."` (";
for($i=0; $i < $fields_count; $i++){
$insert_head .= "`".$fields[$i]->name."`";
if($i < $fields_count-1){
$insert_head .= ', ';
}
}
$insert_head .= ")";
$insert_head .= " VALUES\n";
if($row_count>0){
$r = 0;
while($row = $results->fetch_array()){
if(($r % 400) == 0){
$contents .= $insert_head;
}
$contents .= "(";
for($i=0; $i < $fields_count; $i++){
$row_content = str_replace("\n","\\n",$mysqli->real_escape_string($row[$i]));
switch($fields[$i]->type){
case 8: case 3:
$contents .= $row_content;
break;
default:
$contents .= "'". $row_content ."'";
}
if($i < $fields_count-1){
$contents .= ', ';
}
}
if(($r+1) == $row_count || ($r % 400) == 399){
$contents .= ");\n\n";
}else{
$contents .= "),\n";
}
$r++;
}
}
}
if (!is_dir ( $options['db_backup_path'] )) {
mkdir ( $options['db_backup_path'], 0777, true );
}
$backup_file_name = $options['db_to_backup'] . " sql-backup- " . date( "d-m-Y--h-i-s").".sql";
$fp = fopen($options['db_backup_path'] . '/' . $backup_file_name ,'w+');
if (($result = fwrite($fp, $contents))) {
echo "Backup file created '--$backup_file_name' ($result)";
}
fclose($fp);
return $backup_file_name;
}
$options = array(
'db_host'=> 'localhost', //mysql host
'db_uname' => 'root', //user
'db_password' => '', //pass
'db_to_backup' => 'attendance', //database name
'db_backup_path' => '/htdocs', //where to backup
'db_exclude_tables' => array() //tables to exclude
);
$backup_file_name=backup_mysql_database($options);
If you dont have phpMyAdmin, you can write in php CLI commands such as login to mysql and perform db dump. In this case you would use shell_exec function.
I would Suggest that you do the folllowing,
<?php
function EXPORT_TABLES($host, $user, $pass, $name, $tables = false, $backup_name = false)
{
$mysqli = new mysqli($host, $user, $pass, $name);
$mysqli->select_db($name);
$mysqli->query("SET NAMES 'utf8'");
$queryTables = $mysqli->query('SHOW TABLES');
while ($row = $queryTables->fetch_row())
{
$target_tables[] = $row[0];
}
if ($tables !== false)
{
$target_tables = array_intersect($target_tables, $tables);
}
$content = "SET SQL_MODE = \"NO_AUTO_VALUE_ON_ZERO\";\r\nSET time_zone = \"+00:00\";\r\n\r\n\r\n/*!40101 SET #OLD_CHARACTER_SET_CLIENT=##CHARACTER_SET_CLIENT */;\r\n/*!40101 SET #OLD_CHARACTER_SET_RESULTS=##CHARACTER_SET_RESULTS */;\r\n/*!40101 SET #OLD_COLLATION_CONNECTION=##COLLATION_CONNECTION */;\r\n/*!40101 SET NAMES utf8 */;\r\n--Database: `" . $name . "`\r\n\r\n\r\n";
foreach ($target_tables as $table)
{
$result = $mysqli->query('SELECT * FROM ' . $table);
$fields_amount = $result->field_count;
$rows_num = $mysqli->affected_rows;
$res = $mysqli->query('SHOW CREATE TABLE ' . $table);
$TableMLine = $res->fetch_row();
$content .= "\n\n" . $TableMLine[1] . ";\n\n";
for ($i = 0, $st_counter = 0; $i < $fields_amount; $i++, $st_counter = 0)
{
while ($row = $result->fetch_row())
{ //when started (and every after 100 command cycle):
if ($st_counter % 100 == 0 || $st_counter == 0)
{
$content .= "\nINSERT INTO " . $table . " VALUES";
}
$content .= "\n(";
for ($j = 0; $j < $fields_amount; $j++)
{
$row[$j] = str_replace("\n", "\\n", addslashes($row[$j]));
if (isset($row[$j]))
{
$content .= '"' . $row[$j] . '"';
}
else
{
$content .= '""';
} if ($j < ($fields_amount - 1))
{
$content.= ',';
}
}
$content .=")";
//every after 100 command cycle [or at last line] ....p.s. but should be inserted 1 cycle eariler
if ((($st_counter + 1) % 100 == 0 && $st_counter != 0) || $st_counter + 1 == $rows_num)
{
$content .= ";";
}
else
{
$content .= ",";
} $st_counter = $st_counter + 1;
}
} $content .="\n\n\n";
}
$content .= "\r\n\r\n/*!40101 SET CHARACTER_SET_CLIENT=#OLD_CHARACTER_SET_CLIENT */;\r\n/*!40101 SET CHARACTER_SET_RESULTS=#OLD_CHARACTER_SET_RESULTS */;\r\n/*!40101 SET COLLATION_CONNECTION=#OLD_COLLATION_CONNECTION */;";
$backup_name = $backup_name ? $backup_name : $name . "___(" . date('H-i-s') . "_" . date('d-m-Y') . ")__rand" . rand(1, 11111111) . ".sql";
header('Content-Type: application/octet-stream');
header("Content-Transfer-Encoding: Binary");
header("Content-disposition: attachment; filename=\"" . $backup_name . "\"");
echo $content;
exit;
}
?>
The enitre project for export and import can be found at https://github.com/tazotodua/useful-php-scripts.
I would Suggest that you do the folllowing,
<?php
$con = mysqli_connect('HostName', 'UserName', 'Password', 'DatabaseName');
$tables = array();
$result = mysqli_query($con,"SHOW TABLES");
while ($row = mysqli_fetch_row($result)) {
$tables[] = $row[0];
}
$return = '';
foreach ($tables as $table) {
$result = mysqli_query($con, "SELECT * FROM ".$table);
$num_fields = mysqli_num_fields($result);
$return .= 'DROP TABLE '.$table.';';
$row2 = mysqli_fetch_row(mysqli_query($con, 'SHOW CREATE TABLE '.$table));
$return .= "\n\n".$row2[1].";\n\n";
for ($i=0; $i < $num_fields; $i++) {
while ($row = mysqli_fetch_row($result)) {
$return .= 'INSERT INTO '.$table.' VALUES(';
for ($j=0; $j < $num_fields; $j++) {
$row[$j] = addslashes($row[$j]);
if (isset($row[$j])) {
$return .= '"'.$row[$j].'"';} else { $return .= '""';}
if($j<$num_fields-1){ $return .= ','; }
}
$return .= ");\n";
}
}
$return .= "\n\n\n";
}
$handle = fopen('backup.sql', 'w+');
fwrite($handle, $return);
fclose($handle);
echo "success";
?>
upd. fixed error in code, added space before VALUES in line $return .= 'INSERT INTO '.$table.'VALUES(';
You can use this command it works or me 100%
exec('C:\\wamp\\bin\\mysql\\mysql5.6.17\\bin\\mysqldump.exe -uroot DatabaseName> c:\\database_backup.sql');
note:
C:\\wamp\\bin\\mysql\\mysql5.6.17\\bin\\mysqldump.exe is the path for mysqldump app , check on your pc.
-uroot is -u{UserName}
If your database is protected with password then add after -uroot this sentense -p{YourPassword}

"PHP Fatal error: Allowed memory size of 134217728 bytes exhausted" while generating database backup

I get the subj. when I try to make backup of my database in a text file.
function backup_tables($backup_filename, $tables = '*')
{
$conf = new JConfig();
$dbhost = $conf->host;
$dbuser = $conf->user;
$dbpassword = $conf->password;
$dbname = $conf->db;
$link = mysql_connect($dbhost, $dbuser, $dbpassword);
mysql_select_db($dbname, $link) or die(mysql_error());
$return = "drop database if exists `$dbname`;\n\ncreate database `$dbname`;\n\nuse `$dbname`;\n\n";
$return .= "/*!40101 SET #OLD_CHARACTER_SET_CLIENT=##CHARACTER_SET_CLIENT */;\n\n";
$return .= "/*!40101 SET #OLD_CHARACTER_SET_RESULTS=##CHARACTER_SET_RESULTS */;\n\n";
$return .= "/*!40101 SET #OLD_COLLATION_CONNECTION=##COLLATION_CONNECTION */;\n\n";
$return .= "/*!40101 SET NAMES utf8 */;\n\n";
$handle = fopen($backup_filename, 'w+');
fwrite($handle, $return); $return = "";
// get all of the tables
if ($tables == '*') {
$tables = array();
$result = mysql_query('SHOW TABLES');
while ($row = mysql_fetch_row($result)) {
$tables[] = $row[0];
}
} else {
$tables = is_array($tables) ? $tables : explode(',', $tables);
}
// cycle through
foreach ($tables as $table) {
$result = mysql_query('SELECT * FROM ' . $table);
$num_fields = mysql_num_fields($result);
$return .= 'DROP TABLE IF EXISTS `' . $table . '`;';
$return .= "\n\n" . mysql_fetch_row(mysql_query('SHOW CREATE TABLE `' . $table . '`;'))[1] . " DEFAULT CHARSET=cp1251;\n\n";
while ($row = mysql_fetch_row($result)) {
$return .= 'INSERT INTO ' . $table . ' VALUES(';
for ($i = 0; $i < $num_fields; $i++) {
$row[$i] = str_replace("\n", "\\n", addslashes($row[$i]));
$return .= '"' . (isset($row[$i])? $row[$i] : '') . '"';
if ($num_fields - $i - 1) {
$return .= ',';
}
}
$return .= ");\n";
fwrite($handle, $return); $return = "";
}
if($return) {
fwrite($handle, $return);
$return .= "\n\n\n";
}
}
fclose($handle);
}
This function works well by the exception that there is an memory leaks somewhere. It creates a file ~30 MiB and hungs with mentioned error. Memory usage of the httpd process increases uniformly while file generation is in progress. And one more: generation hungs at a large table (containing a log), but I think this is no matter 'cause information written row by row.
And one more: generation hungs at a large table (containing a log),
but I think this is no matter 'cause information written row by row.
Actually this is the cause: I should use mysql_unbuffered_query instead mysql_query. Now this function looks like this:
function backup_tables($backup_filename, $tables = '*')
{
$conf = new JConfig();
$dbhost = $conf->host;
$dbuser = $conf->user;
$dbpassword = $conf->password;
$dbname = $conf->db;
$link = mysql_connect($dbhost, $dbuser, $dbpassword);
mysql_select_db($dbname, $link) or die(mysql_error());
$return = "drop database if exists `$dbname`;\n\ncreate database `$dbname`;\n\nuse `$dbname`;\n\n";
$return .= "/*!40101 SET #OLD_CHARACTER_SET_CLIENT=##CHARACTER_SET_CLIENT */;\n\n";
$return .= "/*!40101 SET #OLD_CHARACTER_SET_RESULTS=##CHARACTER_SET_RESULTS */;\n\n";
$return .= "/*!40101 SET #OLD_COLLATION_CONNECTION=##COLLATION_CONNECTION */;\n\n";
$return .= "/*!40101 SET NAMES utf8 */;\n\n";
$handle = fopen($backup_filename, 'w+');
fwrite($handle, $return); $return = "";
// get all of the tables
if ($tables == '*') {
$tables = array();
$result = mysql_query("SHOW TABLES");
while ($row = mysql_fetch_row($result)) {
$tables[] = $row[0];
}
} else {
$tables = is_array($tables) ? $tables : explode(',', $tables);
}
// cycle through
foreach ($tables as $table) {
$return .= "DROP TABLE IF EXISTS `$table`;";
$return .= "\n\n" . mysql_fetch_row(mysql_query("SHOW CREATE TABLE `$table`;"))[1] . " DEFAULT CHARSET=cp1251;\n\n";
$result = mysql_unbuffered_query("SELECT * FROM `$table`");
$num_fields = mysql_num_fields($result);
while ($row = mysql_fetch_row($result)) {
$return .= "INSERT INTO `$table` VALUES(";
for ($i = 0; $i < $num_fields; $i++) {
$row[$i] = str_replace("\n", "\\n", addslashes($row[$i]));
$return .= '"' . (isset($row[$i])? $row[$i] : '') . '"';
if ($num_fields - $i - 1) {
$return .= ',';
}
}
$return .= ");\n";
fwrite($handle, $return); $return = "";
}
if($return)
fwrite($handle, $return);
$return = "\n\n\n";
}
fclose($handle);
}
The PHP answer here is to increase your max memory size if not your max execution time at the same time.
Outside of this being an exercise in re-creating the mysqldump command, is there a reason to perform this from within PHP code?
You might be better off using mysqldump or something like Holland http://hollandbackup.org/ to go through and dump each table individually.
Current answer uses a deprecated function. The new way to do this is using mysqli::use_result.
In my case I ran into the exhausted memory error trying to write a large sql table into a file. Here's how I used it.
$conn = new mysqli("localhost", "my_user", "my_password", "my_db");
$sql = 'SELECT row1, row2 from table';
$fp = fopen('output.json', 'w');
if ($conn->multi_query($sql)) {
do {
if ($result = $conn->use_result()) {
while ($row = $result->fetch_row()) {
$row1 = $row[0];
$row2 = $row[1];
$item = array('row1'=>$row1, 'row2'=>$row2);
fwrite($fp, json_encode($item));
}
$result->close();
}
} while ($conn->more_results() && $conn->next_result());
}
fclose($fp);
$conn->close();

Categories