Saving binary string to file in php sent from POST - php

I have a drag and drop uploader for (.jpg,.ai,.pdf,.flv,.psd ....etc.)
I'm reading the file as binary and sending the string in a jquery post:
function importZoneDrop(evt) {
evt.stopPropagation();
evt.preventDefault();
var files = evt.dataTransfer.files; // FileList object.
// files is a FileList of File objects. List some properties.
for (var i = 0, f; f = files[i]; i++) {
var start = 0;
var stop = files[0].size - 1;
var reader1 = new FileReader();
var reader2 = new FileReader();
var ext = f.name.substring(f.name.indexOf(".")+1);
if(ext == "JPEG" || ext == "jpeg" || ext == "JPG"){
ext ="jpg";
}
reader1.onload = (function(theFile) {
return function(e) {
// Render thumbnail.
$("#import-drop-zone").append('<img src="'+e.target.result+'" />');
};
})(f);
reader2.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
$.post("/process/upload.php",{"blob":evt.target.result,"extension":ext},function(data){
console.log(data);
});
}
};
reader1.readAsDataURL(f);
var blob = f.slice(start, stop + 1);
reader2.readAsBinaryString(f);
}
}
This works and send the file. Next Get the string and write it using file_put_contents:
$extension = $_POST['extension'];
$file = $_POST['blob'];//sent from jquery post
$filePath = "../_temp/monkey.".$extension;
file_put_contents($filePath,$file);
if(file_put_contents($filePath,$file)){
echo json_encode("it worked");
}else{
echo json_encode("it failed");
}
This will successfully write the file. But the file does not work, it's broke.
What am I doing wrong?

You need to use base64_decode.
file_put_contents($filePath, base64_decode($file));
Note, you're currently writing the data twice. Don't.
if (file_put_contents($filePath, base64_decode($file))) {
is fine
Edit
Also worth nothing that it's more efficient to upload the binary file directly, then you can skip base64_decode. Something like this:
var xhr = new XMLHttpRequest(),
data = new FormData();
data.append("file", f); // You don't need to use a FileReader
// append your post fields
// attach your events
xhr.addEventListener('load', function(e) {});
xhr.upload.addEventListener('progress', function(e) {});
xhr.open('POST', '/process/upload.php', true);
xhr.send(data);
You can view the rest of the events here with some samples here.

Related

Upload multiple files to server side with php from dart

.dart
Future<http.Response> uploadFile(String fileName, List<int> fileBytes) async {
try {
var request = new http.MultipartRequest("POST", Uri.parse("https://****/***/fileupload.php"));
for (var i = 0; i < uploadedImage.length; i++) {
selectedFilesBytes = List.from(uploadedImage[i]);
request.files.add(http.MultipartFile.fromBytes('file', selectedFilesBytes, contentType: MediaType('application', 'octet-stream'), filename: files[i].name));
}
print("request.files.length");
print(request.files.length);
var streamedResponse = await request.send();
return await http.Response.fromStream(streamedResponse);
} catch (e) {
print(e);
}
fileupload.php
<?php
// Count total files
$countfiles = count($_FILES['file']);
error_log($countfiles);
// Looping all files
for($i=0;$i<$countfiles;$i++){
$filename = $_FILES['file']['name'][$i];
// Upload file
move_uploaded_file($_FILES['file']['tmp_name'][$i],'upload/'.$filename);
}
?>
Hi,
I have a problem about file upload from dart to php. When I print out print(request.files.length); it gives me the file count which I choose for upload.
But in php side $countfiles = count($_FILES['file']); always returns 1.
why it could be?
Thanks for the answer #CBroe. It works. I added the recent codes down.
.dart
Future<http.Response> uploadFile(String fileName, List<int> fileBytes) async {
try {
var request = new http.MultipartRequest("POST", Uri.parse("https://****/***/fileupload.php"));
for (var i = 0; i < uploadedImage.length; i++) {
selectedFilesBytes = List.from(uploadedImage[i]);
request.files.add(http.MultipartFile.fromBytes('file[]', selectedFilesBytes, contentType: MediaType('application', 'octet-stream'), filename: files[i].name));
}
print("request.files.length");
print(request.files.length);
var streamedResponse = await request.send();
return await http.Response.fromStream(streamedResponse);
} catch (e) {
print(e);
}
.php
<?php
// Count total files
$countfiles = count($_FILES['file']['name']);
error_log($countfiles);
// Looping all files
for($i=0;$i<$countfiles;$i++){
$filename = $_FILES['file']['name'][$i];
// Upload file
move_uploaded_file($_FILES['file']['tmp_name'][$i],'upload/'.$filename);
}
?>

Export multiple Highchart as Zip file to my project directory

I have successfully created total 4 different charts in one my page.
I have one button "Download Selected Chart" and on click of this button I need to create one ZIP file with selected chart PDF, Means if I select three charts then create a ZIP with three charts PDF.
Highchair provides functionality to download chart as PDF, SVG, PNG etc but I amn't able to do for multiple selected charts.
I check Highchart export server document -
http://www.highcharts.com/docs/export-module/setting-up-the-server but I don't understand how to use this.
please help me if anyone has an idea, how I can do this?
An example solution to this would be:
Create your charts
var options1 = {
// ...
};
$('#chart1').highcharts(options1);
Ask the Highcharts export server to generate an image of the chart
var exportUrl = 'http://export.highcharts.com/';
var d1 = $.ajax({
url: exportUrl,
// ...
});
Fetch the contents of the generated image
$.when(d1).done(function(v1) {
var p1 = new JSZip.external.Promise(function (resolve, reject) {
JSZipUtils.getBinaryContent(exportUrl + v1[0], function(err, data) {
// ...
});
});
// ...
Use JSZip to construct and save the ZIP file with the contents of the generated images
// ...
Promise.all([p1]).then(function(values) {
var zip = new JSZip();
zip.file("chart1.png", values[0], {binary:true});
zip.generateAsync({type:"blob"})
.then(function(content) {
saveAs(content, "charts.zip");
});
});
});
You can see this (very scrappy) JSFiddle demonstration of how you could get the ZIP file. The steps are as described above, but not connected to any button and instead executed immediately upon entering the site.
Here i post my solution that is work for me.
On Button click, get svg image using High chart export server.
// get selected checkbox
$('.selected_checkbox').each(function( index ){
var obj = {},chart;
chart = $('#each_chart').highcharts();
obj.svg = chart.getSVG();
obj.type = 'image/png';
obj.async = true;
obj.id = chart_id;
// ajax call for svg
$.ajax({
type: "POST",
url: url,// u need to save svg in your folder
data: obj,
success: function(data)
{
// redirect to php function and create zip
window.location = 'php function call';
}
)}
Ajax Function call to create SVG..
ini_set('magic_quotes_gpc', 'off');
$type = $_POST['type'];
$svg = (string) $_POST['svg'];
$filename = 'name';
$id = $_POST['id'];
if (get_magic_quotes_gpc()) {
$svg = stripslashes($svg);
}
// check for malicious attack in SVG
if(strpos($svg,"<!ENTITY") !== false || strpos($svg,"<!DOCTYPE") !== false){
exit("the posted SVG could contain code for a malicious attack");
}
$tempName = $filename.'_'.$id;
// allow no other than predefined types
if ($type == 'image/png') {
$typeString = '-m image/png';
$ext = 'png';
} elseif ($type == 'image/jpeg') {
$typeString = '-m image/jpeg';
$ext = 'jpg';
} elseif ($type == 'application/pdf') {
$typeString = '-m application/pdf';
$ext = 'pdf';
} elseif ($type == 'image/svg+xml') {
$ext = 'svg';
} else { // prevent fallthrough from global variables
$ext = 'txt';
}
$outfile = APPPATH."tempp/$tempName.$ext";
if (!file_put_contents(APPPATH."tempp/$tempName.svg", $svg)) {
die("Couldn't create temporary file.");
}}
Ajax Success function redirect code..
Here you need to read directory files and create PDF from SVG.
After add each PDF to ZIP.
This is Example solution.You have to change code as per your requirement

Move uploaded file fails after ajax request

I know this issue has been tackled a few times but no solution works for me,
I have a javascript function which pulls a file referenced by an which is as follows
function imagePreload(str)
{
var timestamp = new Date().getTime();
str = str + "&timestamp=" + timestamp;
var key = [];
var value = [];
var queriesarray = str.split('&');
for(i = 0; i < queriesarray.length; i++)
{
var pair = queriesarray[i].split('=');
key[i]= pair[0];
value[i]= pair[1];
}
for(i = 0; i < queriesarray.length; i++)
{
if (key[i]=="menu_id") {var menuid = value[i];}
if (key[i]=="menucategories_id") {var catid = value[i];}
}
for(i = 0; i < queriesarray.length; i++)
{
if (value[i]=="business") {var fileurlfield = "uploadbizimageid";}
if (value[i]=="category") {var fileurlfield = "uploadcatimageid" + catid;}
if (value[i]=="item") {var fileurlfield = "uploaditemimageid" + menuid;}
}
var fileInput = document.getElementById(fileurlfield);
var file = fileInput.files[0];
var imageType = /image.*/;
if (file.type.match(imageType)) {
var reader = new FileReader();
reader.onload = function(e) {
var img = new Image();
img.src = reader.result;
}
reader.readAsDataURL(file);
} else {
alert("File not supported!");
}
document.getElementById("maskid").style.display = "block";
document.getElementById("imageuploadcontainerid").style.display = "block";
var filetosend = new FormData();
filetosend.append( 'image', file);
$.ajax({
url: "index.php?option=com_jumi&fileid=13&format=raw&" + encodeURI(str),
type: "POST",
data: filetosend,
processData: false,
contentType: false,
error: function (jqXHR, textStatus, errorThrown) {
alert("AJAX error: " + textStatus + ' : ' + errorThrown);
},
success: function(html) {alert("Orwight!");
document.getElementById('imageuploadcontainerid').innerHTML = html;
}
});
}
As you can see it is designed to make an AJAX call to a php file which is supposed to save that image file to a directory on the same webserver running the above function.
The php in that file looks like this.
$rest_id = $_GET['rest_id'];
$menu_id = $_GET['menu_id'];
$menucategories_id = $_GET['menucategories_id'];
$imagetype = $_GET['imagetype'];
if($imagetype=="business")
{
$db = &JFactory::getDBO();
$db->setQuery("SELECT * FROM g56s_restaurants WHERE rest_id = '$rest_id'");
$det = $db->loadObject();
$ext = pathinfo($_FILES['image']['name'], PATHINFO_EXTENSION);
$target_path = "/images/restaurants/".$rest_id."/";
$target_path = $target_path ."businesslogo.".$ext."";
echo $target_path;
if(move_uploaded_file($_FILES['image']['tmp_name'], $target_path)) {
echo "The file ".basename( $_FILES['image']['name'])." has been uploaded";
} else {
echo "Not uploaded because of error #".$_FILES["file"]["error"];
}
}
Every time I call his script, the upload fails and no error is reported (i.e. no error number). A var dump shows that the the file error variable has a value of 0, and the file size is reported to be in the same order as that of the original file, and it has a tmp name. So in other words the file IS there in the TMP directory.
Directory permissions in the directory being written to are 777. There are no cross domain issues (and I guess no CORS issues) since the script is called from the same website (the PHP is actually in a JUMI application in a Joomla 3.4 website). However, the script ALWAYS fails to upload the file (the page returns "/images/restaurants/1/businesslogo.jpgNot uploaded because of error #." (since I also echoed the target_path before the error string echoed).
Does anyone know the reason for this and how to get the script to upload correctly ? I am completely stuck on this issue because as far as I can see, everything should work.
I solved the issue quicker than I thought, it turns out that I also have to specify the document root in the target path, so I amended
$target_path = "/images/restaurants/".$rest_id."/";
as
$target_path = $_SERVER['DOCUMENT_ROOT']."/images/restaurants/".$rest_id."/";
and it now works :-)

How to decode and save a file sent to php from js

I have a drag and drop uploader. I am using File Reader to send data As url.
var files = evt.dataTransfer.files;
for (var i = 0, f; f = files[i]; i++) {
var start = 0;
var stop = files[0].size - 1;
var reader2 = new FileReader();
var ext = f.name.substring(f.name.indexOf(".")+1);
reader2.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
$.post("/process/upload.php",{"blob":evt.target.result,"extension":ext},function(data){
console.log(data);
});
}
};
var blob = f.slice(start, stop + 1);
reader2.readAsDataURL(blob);
}
PHP recieves it but once I decode it I get "null" returned
$extension = $_POST['extension'];
$file = base64_decode($_POST['blob']);
$filePath = "../tmp/monkey.".$extension;
echo json_encode(base64_decode($_POST['blob']));
I also tried:
if(file_put_contents($filePath,$file)){
echo json_encode("it worked");
}else{
echo json_encode("it failed");
}
EDIT: I maintain the POST info all the way until I decode it. So once I decode it how do I determine the file is intact and can be saved?
Is there a better way to decode and save file?
What exactly am i doing wrong?
Thanks in advance!
$_POST['extension'] and $_POST['blob'] don't magically get populated into $_POST array when posting in JSON format. In fact based on your JSON format, the JSON will decode into an object, not an array.
You will need to get the contents directly from input like this:
$post = file_get_contents('php://input');
$post_obj = json_decode($post);
$extension = $post_obj->extension;
$file = base64_decode($post_obj->blob);
Or using $HTTP_RAW_POST_DATA variable like:
$post_obj = json_decode($HTTP_RAW_POST_DATA);
$extension = $post_obj->extension;
$file = base64_decode($post_obj->blob);

How to upload multiple file under 1 http request

Using HTML5 chunking, I could do file upload with smaller piece. But the problem starts when it started using multiple http POST request which will cause the computer slowing down, or probably crash. Is there anyway to have the splitted file under one http request.. so if I have 5 files it would be only 5 http request eventhough I use html5 split chunk
e.g: if I upload 5 files, each file will be split to 1mb chunk, so if first file is 10mb, then it will become 10 pieces of 1mb chunk. And the problem is, each chunk will be under 1 http request so just the first file it will be 10 HTTP request.
Imagine if I have 1gb files, it will become 1000 HTTP request and slow down the computer.
This is example code:
//Prepare element progress after the page load completely
var uploaders = [];
var totalChunks = 0;
var progress;
var bars;
$(document).ready(function() {
//progress = document.querySelector('progress');
//bars = document.querySelector('#bars');
});
//function for after the button is clicked, slice the file
//and call upload function
function sendRequest() {
//clean the screen
//bars.innerHTML = '';
var file = document.getElementById('fileToUpload');
for(var i = 0; i < file.files.length; i++) {
var blob = file.files[i];
var originalFileName = blob.name;
var filePart = 0
const BYTES_PER_CHUNK = 10 * 1024 * 1024; // 10MB chunk sizes.
const SIZE = blob.size;
var start = 0;
var end = BYTES_PER_CHUNK;
totalChunks = Math.ceil(SIZE / BYTES_PER_CHUNK);
while( start < SIZE ) {
if (blob.webkitSlice) {
//for Google Chrome
var chunk = blob.webkitSlice(start, end);
} else if (blob.mozSlice) {
//for Mozilla Firefox
var chunk = blob.mozSlice(start, end);
}
uploadFile(chunk, originalFileName, filePart, totalChunks, i);
filePart++;
start = end;
end = start + BYTES_PER_CHUNK;
}
}
}
function uploadFile(blobFile, fileName) {
var fd = new FormData();
fd.append("fileToUpload", blobFile);
var xm = $.ajax({
url: "upload.php"+"?"+"file1="+fileName,
type: "POST",
data: fd,
processData: false,
contentType: false,
});
}
function uploadFile(blobFile, fileName, filePart, totalChunks, divBarsSelector) {
if(filePart == 0) {
bars = document.querySelector('#bars' + divBarsSelector);
}
var progress = document.createElement('progress');
progress.min = 0;
progress.max = 100;
progress.value = 0;
bars.appendChild(progress);
var fd = new FormData();
fd.append("fileToUpload", blobFile);
var xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php"+"?"+"file="+fileName + filePart, true);
xhr.onload = function(e) {
//make sure if finish progress bar at 100%
progress.value = 100;
//counter if everything is done using stack
uploaders.pop();
if (!uploaders.length) {
bars.appendChild(document.createElement('br'));
bars.appendChild(document.createTextNode('DONE :)'));
//mergeFile(fileName, totalChunks);
}
};
// Listen to the upload progress for each upload.
xhr.upload.onprogress = function(e) {;
if (e.lengthComputable) {
progress.value = (e.loaded / e.total) * 100;
}
};
uploaders.push(xhr);
xhr.send(fd);
}
and the server part for receiving will be upload.php
$target_path = "uploads/";
$tmp_name = $_FILES['fileToUpload']['tmp_name'];
$size = $_FILES['fileToUpload']['size'];
$name = $_FILES['fileToUpload']['name'];
$originalName = $_GET['file'];
print_r("*******************************************\n");
print_r($originalName);
print_r("\n");
print_r($_FILES);
print_r("\n");
print_r("*******************************************\n");
$target_file = $target_path . basename($name);
//Result File
$complete = $originalName;
$com = fopen("uploads/".$complete, "ab");
error_log($target_path);
if ( $com ) {
// Read binary input stream and append it to temp file
$in = fopen($tmp_name, "rb");
if ( $in ) {
while ( $buff = fread( $in, 1048576 ) ) {
fwrite($com, $buff);
}
}
fclose($in);
fclose($com);
}
After reading your motivation in your comment I would like to point out a few 'misconceptions'. First of all, it's not advisable to split a file up and next upload all the splitted parts at once. The entire point of splitting a file up is not to bypass the PHP upload limit (which, if applicable, should be changed and that may be a real solution*), but rather by doing the different part sequentially this allows the load on the client computer to be minimal, especially if you are considering uploading 1GB of content. Either way, there is seriously no reason to split a file up and next combine it in a single request (although this would be theoretically possible with XMLHttpRequest2, but if you can use XMLHttpRequest2 then you shouldn't worry about splitting the file up either way, as it provides the necessary controls to upload multiple files cleanly).
*Please note that in case you do that you will have to make sure your php memory settings are correctly set up (to prevent php trying to load it entirely into memory before writing it to a temp file, but this shouldn't happen on recent versions of PHP with the default settings I believe). (I feel obliged to add that I haven't worked with PHP and PHP uploads for a few years, so I might be very well mistaken with this last comment)
Either way, chunking the files to about 5-25MB (depending on how good you expect the connection to be :P ) + sequential uploads (plus a nice progressbar if XMLHttpRequest2 is available, otherwise a progressbar per chunk) seem a sensible way to go whilst preventing the browser from getting overloaded. (Oh and, if you need to support older browser I would really advise you to look into flash uploaders, because despite Apple preaching flash to be evil, on the majority of (outdated) computers it will give the best experience by far)
Java uploaders [namely, JumpLoader] - I am not saying "use them", but learn how they work. So far, the best upload practice I have seen is: 1) split files to chunks of certain size, 2) upload chunks sequentially (additionally by providing hashes of chunks, if data is sensitive), 3) unite chunks at server-side (but verify data-integrity through hashes, if you are using them).
Thus you will bypass PHP's max_upload_size restriction. Otherwise, I personally don't see any merit why someone should split the data into chunks at first place.
Try this:
<script type="text/javascript">
//Prepare element progress after the page load completely
var uploaders = [];
var totalChunks = 0;
var progress;
var bars;
$ (document).ready(function() {
//progress = document.querySelector('progress');
//bars = document.querySelector('#bars');
});
//function for after the button is clicked, slice the file
//and call upload function
function sendRequest() {
//clean the screen
//bars.innerHTML = '';
var file = document.getElementById('fileToUpload');
for(var i = 0; i < file.files.length; i++) {
var blob = file.files[i];
var originalFileName = blob.name;
var filePart = 0
const BYTES_PER_CHUNK = 10 * 1024 * 1024; // 10MB chunk sizes.
const SIZE = blob.size;
var start = 0;
var end = BYTES_PER_CHUNK;
totalChunks = Math.ceil(SIZE / BYTES_PER_CHUNK);
while( start < SIZE ) {
if (blob.webkitSlice) {
//for Google Chrome
var chunk = blob.webkitSlice(start, end);
} else if (blob.mozSlice) {
//for Mozilla Firefox
var chunk = blob.mozSlice(start, end);
}
uploadFile(chunk, originalFileName, filePart, totalChunks, i);
filePart++;
start = end;
end = start + BYTES_PER_CHUNK;
}
}
}
function uploadFile(blobFile, fileName) {
var fd = new FormData();
fd.append("fileToUpload", blobFile);
var xm = $ .ajax({
url: "upload.php"+"?"+"file1="+fileName,
type: "POST",
data: fd,
processData: false,
contentType: false,
});
}
function uploadFile(blobFile, fileName, filePart, totalChunks, divBarsSelector) {
if(filePart == 0) {
bars = document.querySelector('#bars' + divBarsSelector);
}
var progress = document.createElement('progress');
progress.min = 0;
progress.max = 100;
progress.value = 0;
bars.appendChild(progress);
var fd = new FormData();
fd.append("fileToUpload", blobFile);
var xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php"+"?"+"file="+fileName + filePart, true);
xhr.onload = function(e) {
//make sure if finish progress bar at 100%
progress.value = 100;
//counter if everything is done using stack
uploaders.pop();
if (!uploaders.length) {
bars.appendChild(document.createElement('br'));
bars.appendChild(document.createTextNode('DONE :) '));
//mergeFile(fileName, totalChunks);
}
};
// Listen to the upload progress for each upload.
xhr.upload.onprogress = function(e) {;
if (e.lengthComputable) {
progress.value = (e.loaded / e.total) * 100;
}
};
uploaders.push(xhr);
xhr.send(fd);
}
</script>

Categories