I have json file in text having a sample data
{"msisdn":"xxxxxxxxxx","productID":"YYYYYYYY","subdate":"2018-09-28 16:30:35","Status":"1"}
{"msisdn":"xxxxxxxxxx","productID":"YYYYYYYY","subdate":"2018-09-28 16:30:35","Status":"1"}
and I have a php code that check the json file for existing msisdn
class JSONObject implements JsonSerializable
{
public function __construct($json = false)
{
if ($json)
$this->set(json_decode($json, true));
}
public function set($data)
{
foreach ($data AS $key => $value) {
if (is_array($value)) {
$sub = new JSONObject;
$sub->set($value);
$value = $sub;
}
$this->{$key} = $value;
}
}
public function jsonSerialize()
{
return (object) get_object_vars($this);
}
}
function checkmsisdnallreadyexists($file,$msisdn)
{
if (is_file($file)) {
if (($handle = fopen($file, 'r'))) {
while (!feof($handle)) {
$line = trim(fgets($handle));
$jsonString = json_encode(json_decode($line));
// Here's the sweetness.
//$class = new JSONObject($jsonString);
$class = new JSONObject($jsonString);
if($class->msisdn == $msisdn)
{
$date1=date_create($class->subdate);
$date2=date_create(date('Y-m-d H:i:s'));
$diff=date_diff($date1,$date2);
if($diff->format('%a') < 31)
{
fclose($handle);
return true;
}
}
}
fclose($handle);
}
}
return false;
}
Everything is working fine initially but when my json file has more than 30 000 records, we have a read timeout. as we have a huge request on my server approx 200k request per hour resulting in efficiency of the whole process.
Can any one provide a solution or a alternate method?
Note: I can't use database here
You can use file() instead of fopen() and fclose()
function checkmsisdnallreadyexists($msisdn){
$file_array = file('give file path',FILE_IGNORE_NEW_LINES | FILE_SKIP_EMPTY_LINES);
foreach($file_array as $arr){
$msisdn_array = json_decode($arr,true);
$msisdn_value = $msisdn_array['msisdn'];
if($msisdn_value == $msisdn) {
$date1=date_create($msisdn_array['subdate']);
$date2=date_create(date('Y-m-d H:i:s'));
$diff=date_diff($date1,$date2);
if($diff->format('%a') < 31) {
return true;
}
}
}
}
Related
{"Files": [
{"file_name": "Text_file.txt","path": "to_folder","file_id": "abc12345"},
{"file_name": "Img.jpg","path": "to_folder","file_id": "abc12346"}
]}
I want to save information of file uploads to JSON file using php.
//Code: PHP class : Save()
<?php
class Save
{
private $file = './files/data.json', $jsonstring = null, $jsonarray = array(), $temp = array(), $data = array();
public function __construct()
{
$this->init();
}
private function init()
{
if (!file_exists($this->file)) {
touch($this->file);
$this->read();
} else {
$this->read();
}
}
private function read()
{
$this->jsonstring = file_get_contents($this->file);
$this->jsonarray = empty($this->jsonstring) ? array() : json_decode($this->jsonstring, true);
$this->temp = (object) $this->jsonarray;
}
private function write($data, $collection = false)
{
if ($collection) {
if (empty($this->jsonarray) || $this->jsonarray == null && $this->jsonarray[$collection] == null) {
unset($this->jsonarray);
$this->jsonarray = array();
$this->jsonarray[$collection] = array();
array_push($this->jsonarray[$collection], $data);
$this->jsonarray = json_encode($this->jsonarray);
file_put_contents($this->file, $this->jsonarray);
return 1;
} elseif (property_exists($this->temp, $collection)) {
// $this->jsonarray[$collection] = array_values($this->jsonarray[$collection]);
array_push($this->jsonarray[$collection], $data);
$this->jsonarray = json_encode($this->jsonarray);
file_put_contents($this->file, $this->jsonarray);
return 2;
} elseif (!property_exists($this->temp, $collection)) {
$this->jsonarray[$collection] = array();
array_push($this->jsonarray[$collection], $data);
$this->jsonarray = json_encode($this->jsonarray);
file_put_contents($this->file, $this->jsonarray);
return 3;
}
} else {
if (empty($this->jsonarray) || $this->jsonarray == null) {
unset($this->jsonarray);
$this->jsonarray = array();
array_push($this->jsonarray, $data);
$this->jsonarray = json_encode($this->jsonarray);
file_put_contents($this->file, $this->jsonarray);
return 4;
} else {
$this->jsonarray = array_values($this->jsonarray);
array_push($this->jsonarray, $data);
$this->jsonarray = json_encode($this->jsonarray);
file_put_contents($this->file, $this->jsonarray);
return 5;
}
}
return false;
}
public function push($data, $collection = false)
{
if (is_array($data)) {
$a =$this->write($data, $collection);
if ($a) {
return $a;
}
return false;
}
return false;
}
public function get($collection = false)
{
if ($collection) {
return json_encode($this->jsonarray[$collection]);
}
return json_encode($this->jsonarray);
}
}
Now Problem is when I upload a single file then it will work fine for 3-6 times then again it resets the JSON file because of null or error in JSON format. & when I Upload 30 files together sending each file automatically. JS code [...files].forEach(upload(file)) it behaves oddly. Here is the callback from write function
Update JS:
function handelSelect(e) {
let files;
if (e.type == 'drop') {
files = e.originalEvent.dataTransfer.files;
} else {
files = e.target.files;
}
if (files.length > 0) handleFiles(files);
}
//handelFiles(files) send files using ajax(single request).
Where am I doing it wrong?
After examining the execution time i came to conclusion:
Ajax Sending request way faster then PHP can write in file;
https://www.php.net/manual/en/function.file-put-contents.php
Benchmark below:
file_put_contents() for 1,000,000 writes - average of 3 benchmarks:
real 0m3.932s
user 0m2.487s
sys 0m1.437s
fopen() fwrite() for 1,000,000 writes, fclose() - average of 3 benchmarks:
real 0m2.265s
user 0m1.819s
sys 0m0.445s
so I delayed the request in javascript.
Object.keys(files).forEach((file) => {
setTimeout(() => {
uploadFile(files[file], file);
}, file*2500);
});
If anyone has a better solution please share.
In the following code, I don't get 'handled' on my output. I see the filehandle a resource, the file gets opened en the contructor of FqReader is called, I checked all that. But with execution of FqReader::getread() I don't see output and the returned array is empty. The first while loop also does not get exectuted when I put while(1) instead of the logic test as in the code now.
<?php
class FastqFile {
function __construct($filename) {
if (substr($filename, -3, 3) == '.gz') {
$this->handle = gzopen($filename, 'r');
return $this->handle;
}
else
$this->handle = fopen($filename, 'r');
return $this->handle;
}
}
class FqReader {
function __construct($file_handle) {
$this->handle = $file_handle;
}
function getread() {
while ($header = fgets($this->handle) !== false) {
echo "handled";
$bases = fgets($this->handle);
$plus = fgets($this->handle);
$scores = fgets($this->handle);
yield array($header, $plus, $scores);
}
}
}
$filename = $argv[1];
$file_handle = new FastqFile($filename);
var_dump($file_handle);
$reader = new FqReader($file_handle);
var_dump($reader->getread());
It outputs:
object(FastqFile)#1 (1) {
["handle"]=>
resource(5) of type (stream)
}
object(Generator)#3 (0) {
}
$file_handle is a FastqFileinstance. Then you pass that object to fgets(), but you need to pass that object's handle to fgets(). For instance:
class FqReader {
function __construct($file_handle) {
$this->handle = $file_handle->handle;
}
function getread() {
while ($header = fgets($this->handle) !== false) {
echo "handled";
$bases = fgets($this->handle);
$plus = fgets($this->handle);
$scores = fgets($this->handle);
yield array($header, $plus, $scores);
}
}
}
The usage of yield was not showing you that error.
Exactly, this works like a charm:
(using a function to open file, not a class)
function openfq($filename)
{
if (substr($filename, -3, 3) == '.gz') {
$handle = gzopen($filename, 'r');
return $handle;
}
else
$handle = fopen($filename, 'r');
return $handle;
}
class FqReader {
function __construct($file_handle) {
$this->handle = $file_handle;
}
function getread() {
while (($header = fgets($this->handle)) !== false) {
echo "handled";
$bases = fgets($this->handle);
$plus = fgets($this->handle);
$scores = fgets($this->handle);
yield array($header, $bases, $scores);
}
}
}
$filename = $argv[1];
$file_handle = openfq($filename);
var_dump($file_handle);
$reader = new FqReader($file_handle);
var_dump($reader->getread());
foreach($reader->getread() as $read) {
var_dump($read);
}
Hello I am trying to get all the data from XML File , so i have used xmltoassoc function. It is working for 5 mb file but not for more than 9mb file.
Here is my code:
I have modified this function to get json code,
function xml2assoc($xml, $name)
{
$tree = null;
while($xml->read())
{
if($xml->nodeType == XMLReader::END_ELEMENT)
{
return $tree;
}
else if($xml->nodeType == XMLReader::ELEMENT)
{
$node = array();
if($xml->hasAttributes)
{
$attributes = array();
while($xml->moveToNextAttribute())
{
$attributes[$xml->name] = $xml->value;
}
}
if(!$xml->isEmptyElement)
{
$childs = xml2assoc($xml, $node['tag']);
if(isset($childs['text']))
{
$tree = $childs;
} else {
$tree['text'] = $childs[0];
}
}
}
else if($xml->nodeType == XMLReader::TEXT)
{
if(isset($xmlArr['text']))
{
$tree = $xmlArr;
} else {
$tree['text'] = $xmlArr[0];
}
}
}
return $tree;
}
I have used this function to return JSON by passing URL.
function PARSE_XML_JSON($url)
{
$text = "";
$xml = new XMLReader();
$xml->open($url);
$assoc = xml2assoc($xml, "root");
$xml->close();
if(isset($assoc['text']))
{
$text = $assoc['text'];
}
//StoreInTxtFile($text);
return $text;
}
I have also tried to save data in files by doing this:
function StoreInTxtFile($data)
{
$myFile = 'jsonfile-'.time().'.txt';
$fh = fopen($myFile, 'w') or die("can't open file");
fwrite($fh, $data);
fclose($fh);
}
Please tell me what i'm missing.
Thanks
use LIBXML_PARSEHUGE
$xml = new XMLReader();
$xml->open($url, NULL, LIBXML_PARSEHUGE);
i need a help ^^
What i need is script which will open and read all .csv files in folder 'csv/files' and then do that thing in "if". Well, when i had only one file it worked fine. I managed to construct some script which is not working but no "error line" popping up either ...
So can somebody look at my code and tell me what i am doing wrong ?
<?php
foreach (glob("*.csv") as $filename) {
echo $filename."<br />";
if (($handle = fopen($filename, "r")) !== FALSE) {
while (($data = fgetcsv($handle, 1000, ";")) !== FALSE) {
$url = $data[0];
$path = $data[1];
$ch = curl_init($url);
$fp = fopen($path, 'wb');
curl_setopt($ch, CURLOPT_FILE, $fp);
curl_setopt($ch, CURLOPT_HEADER, 0);
curl_exec($ch);
curl_close($ch);
fclose($fp);
}
fclose($handle);
}
}
?>
This is a prime candidate for multi-threading, and here's some code to do it:
<?php
class WebWorker extends Worker {
public function run() {}
}
class WebTask extends Stackable {
public function __construct($input, $output) {
$this->input = $input;
$this->output = $output;
$this->copied = 0;
}
public function run() {
$data = file_get_contents($this->input);
if ($data) {
file_put_contents(
$this->output, $data);
$this->copied = strlen($data);
}
}
public $input;
public $output;
public $copied;
}
class WebPool {
public function __construct($max) {
$this->max = $max;
$this->workers = [];
}
public function submit(WebTask $task) {
$random = rand(0, $this->max);
if (isset($this->workers[$random])) {
return $this->workers[$random]
->stack($task);
} else {
$this->workers[$random] = new WebWorker();
$this->workers[$random]
->start();
return $this->workers[$random]
->stack($task);
}
}
public function shutdown() {
foreach ($this->workers as $worker)
$worker->shutdown();
}
protected $max;
protected $workers;
}
$pool = new WebPool(8);
$work = [];
$start = microtime(true);
foreach (glob("csv/*.csv") as $file) {
$file = fopen($file, "r");
if ($file) {
while (($line = fgetcsv($file, 0, ";"))) {
$wid = count($work);
$work[$wid] = new WebTask(
$line[0], $line[1]);
$pool->submit($work[$wid]);
}
}
}
$pool->shutdown();
$runtime = microtime(true) - $start;
$total = 0;
foreach ($work as $job) {
printf(
"[%s] %s -> %s %.3f kB\n",
$job->copied ? "OK" : "FAIL",
$job->input,
$job->output,
$job->copied/1024);
$total += $job->copied;
}
printf(
"[TOTAL] %.3f kB in %.3f seconds\n",
$total/1024, $runtime);
?>
This will create a maximum number of pooled threads, it will then read through a directory of semi-colon seperated csv files where each line is input;output, it will then submit the task to read the input and write the output asynchronously to the pool for execution, while the main thread continues to read csv files.
I have used the simplest input/output file_get_contents and file_put_contents so that you can see how it works without cURL.
The worker selected when a task is submitted to the pool is random, this may not be desirable, it's possible to detect if a worker is busy but this would complicate the example.
Further reading:
https://gist.github.com/krakjoe/6437782
http://php.net/pthreads
i want to select file form directory where filezise in less then 100kb. please check my code its not working
<?php
ob_start();
$dir = '/home/couponsc/public_html/testfile';
$ext = '.mp3';
$search = $_GET['s'];
$results = glob("$dir/*$search*$ext");
foreach($results as $item)
{
$sizes= filesize($item);
if($sizes < 100);
{
echo $item;
}
}
?>
if($sizes < 100);
You have a semi-colon after your if-clause -> empty statement.
foreach($results as $item) {
$sizes= filesize($item);
if($sizes < 40*1024) {
echo $item, "\n";
}
}
Or with a bit more spl/lambda fun + recursion:
<?php
$path = 'c:/temp';
$it = new RecursiveIteratorIterator(
new RecursiveDirectoryIterator($path),
RecursiveIteratorIterator::LEAVES_ONLY
);
$it = new FooFilterIterator($it, function($e) { return $e->getSize(); }, function($e) { return $e < 40*1024; } );
foreach($it as $f) {
printf("% 6d %s\n", $f->getSize(), $f);
}
class FooFilterIterator extends FilterIterator {
protected $getter;
protected $filter;
public function __construct(Iterator $source, $getter, $filter) {
parent::__construct($source);
$this->getter = $getter;
$this->filter = $filter;
}
public function accept() {
$f = $this->filter;
$g = $this->getter;
return $f( $g($this->current()) );
}
}
there's also the GlobIterator.
The filesize() function returns bytes, and not kilobytes, thus your condition doesn't work.
the correct condition is:
if($sizes < 102400);