The application Pipedrive gives me inconsistent json data. For example, in some array elements, it gives me "formatted_value":"$3,500","weighted_value":2100,"formatted_weighted_value":"$2,100","rotten_time":null, while in others, it gives me, "formatted_value":"$2,950","rotten_time":null,"weighted_value":2950,"formatted_weighted_value":"$2,950". I would like the json data to be in the order of formatted_value,weighted_value,formatted_weighted_value,rotten_time in every array element, but that's not the case sadly.
Does anyone know of a way to check that the right data is written the right column based on column name and key name?
Below is my code to parse the json data:
function parseFunction($startPos) {
$url = 'urlToCallJsonData';
$ch = curl_init($url); //initialize connection with a URL
if(is_callable('curl_init'))
{
echo "Enabled";
}
else
{
echo "Not enabled";
}
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, true);
curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 2);
curl_setopt ($ch, CURLOPT_CAINFO, dirname(__FILE__)."/cacert.pem");
$json_response = curl_exec($ch);
$info = curl_getinfo($ch);
$status = curl_getinfo($ch, CURLINFO_HTTP_CODE);
if ( $status != 200 )
{
die("Error: call to URL $url failed with status $status, response $json_response, curl_error " . curl_error($ch) . ", curl_errno " . curl_errno($ch));
}
curl_close($ch);
$response = json_decode($json_response, true);
$count = Count($response['data']);
for ($x=0; $x<$count; $x++)
{
$currentRecord = $response['data'][$x];
}
//open writing to file
if($startPos == 0)
{
$fp = fopen('cacheDeals.csv', 'w');
}
else
{
$fp = fopen('cacheDeals.csv', 'a');
}
$test_array = $response['data'][0];//test_array = first row of data
// writes the headers to the csv file.
if($startPos == 0)
{
$keys = array_keys($test_array);
fputcsv($fp, $keys);
}
$array_records = $response['data'];//all of incoming data
//write data to csv file
foreach ($array_records as $fields)
{
fputcsv($fp, $fields);
}
//check to see if more data should be written
$more = $response[additional_data][pagination][more_items_in_collection];
$nextStart = $response[additional_data][pagination][next_start];
if($more =="true")
{
downloadPipedriveDealsData($nextStart);
}
}//end of function
parseFunction(0);
Sorry but I cant point this out in comments alone, some of this could be cleaner such as
//open writing to file
if($startPos == 0)
{
$fp = fopen('cacheDeals.csv', 'w');
}
else
{
$fp = fopen('cacheDeals.csv', 'a');
}
$test_array = $response['data'][0];//test_array = first row of data
// writes the headers to the csv file.
if($startPos == 0)
{
$keys = array_keys($test_array);
fputcsv($fp, $keys);
}
Could be simply this
// writes the headers to the csv file.
if($startPos == 0){
$fp = fopen('cacheDeals.csv', 'w');
fputcsv($fp, array_keys($response['data'][0]));
}else{
$fp = fopen('cacheDeals.csv', 'a');
}
I don't see a purpose to this whole block at all
$count = Count($response['data']);
for ($x=0; $x<$count; $x++)
{
$currentRecord = $response['data'][$x];
}
This syntax is invalid
$more = $response[additional_data][pagination][more_items_in_collection];
$nextStart = $response[additional_data][pagination][next_start];
And will issue a Notice ( undefined constant assuming '' ) etc. because there are no quotes around the string keys in the arrays. In the unlikly event that one of those keys is a constant, that's a whole other can of worms. Because you will never get your data out then. They should be done this way with ' or " 's around them. see also What does the PHP error message "Notice: Use of undefined constant" mean?
$more = $response['additional_data']['pagination']['more_items_in_collection'];
$nextStart = $response['additional_data']['pagination']['next_start'];
Just saying.
Related
I want to know if you can help me to get wordpress to do the same as when I just open the file online using the url and not through a function like
add_action( 'admin_post_uploadImage', 'uploadImage' );
Is there a way I can do that.
It says: fopen(uploads/916224322.jpg): failed to open stream: No such file or directory in ********\wp-content\plugins\tandhjuletodderapi\functions.php on line 135
[31-Jul-2022 16:19:09 UTC] PHP Warning: curl_setopt(): supplied argument is not a valid File-Handle resource in
Is it something else that makes it not able to.
Have looked at chmod and it works as it should. it works fine when I don't run it from a function but just through the file without any post submit.
What I had thought if I could now set the file to perform that job, without being able to open the file without an admin process
Hope it makes sense.
ini_set('memory_limit', '-1');
ini_set('max_execution_time', 1300);
function uploadImage()
{
if (isset($_POST['submitUploadImage'])) {
if (!current_user_can('manage_options')) {
echo 'Fejl du har ikke ret til at gøre denne handling';
exit;
}
if (!wp_verify_nonce($_POST['UploadBilleder_nonce'], 'UploadBilleder_nonce')) {
wp_die('Validering fejlede!!');
} else {
function multiple_download(array $urls, $save_path = 'uploads')
{
$multi_handle = curl_multi_init();
$file_pointers = [];
$curl_handles = [];
foreach ($urls as $key => $url) {
$file = $save_path . '/' . basename($url);
if(!is_file($file)) {
$curl_handles[$key] = curl_init($url);
$file_pointers[$key] = fopen($file, "x");
curl_setopt($curl_handles[$key], CURLOPT_FILE, $file_pointers[$key]);
curl_setopt($curl_handles[$key], CURLOPT_HEADER, 0);
curl_setopt($curl_handles[$key], CURLOPT_CONNECTTIMEOUT, 60);
curl_multi_add_handle($multi_handle,$curl_handles[$key]);
}
}
// Download the files
do {
curl_multi_exec($multi_handle,$running);
} while ($running > 0);
foreach ($urls as $key => $url) {
curl_multi_remove_handle($multi_handle, $curl_handles[$key]);
curl_close($curl_handles[$key]);
fclose ($file_pointers[$key]);
}
curl_multi_close($multi_handle);
}
$getFunction = new TandhjuletCSV;
$getData = $getFunction->getImageToUpload();
$getUrl = array();
$count = 0;
foreach ($getData as $key => $values) {
$count++;
$getUrl[] = $values;
if(count($getData) == $count) {
break;
}
}
multiple_big_download($getUrl);
multiple_download($getUrl);
}
wp_redirect(admin_url('options-general.php?page=cykel_visning'));
}
}
add_action( 'admin_post_uploadImage', 'uploadImage' );
Best regards
Morten
So, I have one curl API call which works fine when I do foreach outside the while loop. Once I move the foreach inside (because I need the values inside) it becomes an infinity loop.
This is the setup
$query = "SELECT id, vote FROM `administrators` WHERE type = 'approved'";
$result = $DB->query($query);
$offset = 0;
$length = 5000;
$ch = curl_init();
curl_setopt($ch, CURLOPT_HEADER, 0);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
do {
curl_setopt($ch, CURLOPT_URL, "https://api.gov/data?api_key=xxxxxxxxxx&start=1960&sort[0][direction]=desc&offset=$offset&length=$length");
$jsonData = curl_exec($ch);
$response = json_decode($jsonData);
foreach($response->response->data as $finalData){
$allData[] = $finalData;
}
$offset += count($response->response->data);
} while ( count($response->response->data) > 0 );
curl_close($ch);
while($row = $DB->fetch_object($result)) {
foreach ( $allData as $key => $finalData1 ) {
// rest of the code
}
}
Once I run the page it goes infinity or until my browser crash. If I move foreach ( $allData as $key => $finalData1 ) { } outside the while(){} there is no such problem.
Any ideas on what can be the problem here?
UPDATE: // rest of the code
$dataValue = str_replace(array("--","(s)","NA"),"NULL",$finalData1->value);
if($frequency == "dayly") {
if($dataValue) {
$query = "UPDATE table SET $data_field = $dataValue WHERE year = $finalData1->period AND id = $row->id LIMIT 1";
}
}
if(isset($query))
$DB->query($query);
unset($query);
One of the issues could be that where
// rest of the code
is, you have duplicate variable names, thus overriding current positions in arrays and loops.
However, you should change your approach to something like
$rows = Array();
while($row = $DB->fetch_object($result)) $rows[] = $row;
foreach ($rows as $row) {
foreach ($allData as $key => $finalData1) {
// rest of the code
}
}
That way you can read resultset from database faster and free it before you continue.
I've been trying to run CURL in a foreach loop to extract information from the cryptocompare.com API. As soon as I call the following function, my code just stops working. There is no output.
$fullArray[$symbol]['Price'] = getThePrice($fullArray[$symbol]['Symbol']);
What am I doing wrong? I pasted the code below
include 'helper.php';
$fullArray = array();
//Get List of All Coins and store symbol and ID
$url = "https://min-api.cryptocompare.com/data/all/coinlist";
$jsonArray = getConnection($url);
foreach($jsonArray['Data'] as $value)
{
$symbol = $value['Symbol'];
$fullArray[$symbol]['Symbol'] = $value['Symbol'];
$fullArray[$symbol]['Id'] = $value['Id'];
//call getThePrice function to get Price of ticker
$fullArray[$symbol]['Price'] = getThePrice($fullArray[$symbol]['Symbol']);
}
function getThePrice($input)
{
//Get current price of each coin and store in full array
$url = "https://www.cryptocompare.com/api/data/coinsnapshot/?fsym=".$input."&tsym=USD";
$jsonNewArray = getConnection($url);
if(array_key_exists('PRICE',$jsonNewArray['Data']['AggregatedData']))
{
$returnVariable = $jsonNewArray['Data']['AggregatedData']['PRICE'];
echo "The price of : ".$input." is ".$returnVariable;
}
else{
$returnVariable = "NA";
echo "This price is not available";
}
return $returnVariable;
}
The code in helper.php:
function getConnection($inputHelp)
{
$ch = curl_init();
curl_setopt($ch,CURLOPT_URL,$inputHelp);
curl_setopt($ch,CURLOPT_RETURNTRANSFER,true);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
//curl_setopt($ch,CURLOPT_CONNECTTIMEOUT, 4);
$json = curl_exec($ch);
if(!$json) {
echo curl_error($ch);
}
curl_close($ch);
$jsonArray = json_decode($json, true);
return $jsonArray;
}
Appreciate any help. Thanks in advance.
Currently when I execute this function with say 60 URL's I get a HTTP 504 error. Is there anyway to multithread this so that I no longer get a 504 error and iterate throughout the entire list of URL's?
<?php
namespace App\Http\Controllers;
use Request;
use App\Http\Controllers\Controller;
class MainController extends Controller
{
public function parse()
{
$input = Request::all();
$csv = $input['laraCsv'];
$new_csv = trim(preg_replace('/\s\s+/', ',', $csv));
$headerInfo = [];
//$titles = [];
$csvArray = str_getcsv($new_csv, ",");
$csvLength = count($csvArray);
$i = 0;
while ($i < $csvLength) {
if(strpos($csvArray[$i], '.pdf') !== false) {
print_r($csvArray[$i]);
}
else{
array_push($headerInfo, get_headers($csvArray[$i], 1));
}
//sleep(3);
//echo file_get_contents($csvArray[$i]);
$i++;
}
return view('csvViewer')->with('data', $headerInfo)->with('urls', $csvArray);
}
}
I've used digitalocean in the past before but I'm not sure what error codes they give if you run out of time, (also set_time_limit(0); should already be in your code).
See if this works:
<?php
function getHeaders($data) {
$curly = array();
$result = array();
$mh = curl_multi_init();
foreach ($data as $id => $url) {
$curly[$id] = curl_init();
curl_setopt($curly[$id], CURLOPT_URL, $url);
curl_setopt($curly[$id], CURLOPT_HEADER, true);
curl_setopt($curly[$id], CURLOPT_NOBODY, true);
curl_setopt($curly[$id], CURLOPT_RETURNTRANSFER, true);
curl_multi_add_handle($mh, $curly[$id]);
}
$running = null;
do {
curl_multi_exec($mh, $running);
} while ($running > 0);
foreach($curly as $id => $c) {
$result[$id] = array_filter(explode("\n", curl_multi_getcontent($c)));
curl_multi_remove_handle($mh, $c);
}
curl_multi_close($mh);
return $result;
}
$urls = array(
'http://google.com',
'http://yahoo.com',
'http://doesnotexistwillitplease.com'
);
$r = getHeaders($urls);
echo '<pre>';
print_r($r);
So once you've gotten all your URLs into an array, run it like getHeaders($urls);.
If it doesn't work try it only with 3 or 4 urls first. Also set_time_limit(0); at the top as mentioned before.
Are you sure it is because of your code ? it could also be the server configuration.
about HTTP 504
This problem is entirely due to slow IP communication between back-end
computers, possibly including the Web server. Only the people who set
up the network at the site which hosts the Web server can fix this
problem.
I am trying to get this script to output results based on a greater than/less than script. When I run this script all it does it output the first line in the text file. Any suggestions as to what I am missing?
<?php
$lines = file('unique.txt'); // Reads the file with the list of user numbers
$timestamp = time(); // Defines time for below renaming
foreach ($lines as $usernumber) { // Loops line by line
$link = 'http://backpack.tf/api/IGetUsers/v2/?&steamids=' . $usernumber . '&format=json';
$json = file_get_contents($link); // Reads link (this ^)
$data = json_decode($json); // Defines decode as json
if (!empty($data)) {
$profiles = array(); //init array so we can use $profiles[] later
foreach ($data->response->players as $player) { // Loop thrugh all the players
$player2 = $player->backpack_value;
if ($player2 < 9999999) { // Check the backpack_value
$profiles[] = $player; // Assign the required players to a new array
var_dump($profiles); // Dump the array to browser for debugning
$fh = fopen("final." . $timestamp . ".txt", 'a') or die("can't open file"); // Opens final.txt to write in
fwrite($fh, $usernumber); // Writes the parsed results to final.txt
} //closes if $playtime
} //closes foreach $data
} //closes if !empty
else {
echo $data;
}
} //closes foreach $lines
?>
Unique.txt contains
76561197992831594
76561197992707820
76561197992146126
76561197992694522
76561197992707820
76561197992831594
JSON Example
{
"response": {
"success": 1,
"current_time": 1369685515,
"players": {
"0": {
"steamid": "76561197992831594",
"success": 1,
"backpack_value": 47.97,
"backpack_update": 1369683750,
"name": "WesFox13",
"notifications": 0
}
}
}
}
Okay There is two fundamental problems.
The fopen call needs to move outside of the loop.
the file call has an annoying habit of keeping the trailing newline. When you are building up your url you should use trim($usernumber) to get rid of it.
Here is an update with those two things in place.
<?php
$lines = file('unique.txt'); // Reads the file with the list of user numbers
$timestamp = time(); // Defines time for below renaming
$fh = fopen("final." . $timestamp . ".txt", 'a') or die("can't open file"); // Opens final.txt to write in
foreach ($lines as $usernumber) { // Loops line by line
$link = 'http://backpack.tf/api/IGetUsers/v2/?&steamids=' . trim($usernumber) . '&format=json';
$json = file_get_contents($link); // Reads link (this ^)
$data = json_decode($json); // Defines decode as json
print_r($json);
if (!empty($data)) {
$profiles = array(); //init array so we can use $profiles[] later
foreach ($data->response->players as $player) { // Loop thrugh all the players
$player2 = $player->backpack_value;
if ($player2 < 9999999) { // Check the backpack_value
$profiles[] = $player; // Assign the required players to a new array
var_dump($profiles); // Dump the array to browser for debugning
fwrite($fh, $usernumber); // Writes the parsed results to final.txt
} //closes if $playtime
} //closes foreach $data
} //closes if !empty
else {
echo $data;
}
} //closes foreach $lines
I've done this with CURL and it works too.
The code:
$lines = array('76561197992831594','76561197992707820','76561197992146126');
$timestamp = time(); // Defines time for below renaming
foreach ($lines as $usernumber) { // Loops line by line
$link = 'http://backpack.tf/api/IGetUsers/v2/?&steamids=' . $usernumber . '&format=json';
$json = curl_download($link); // Reads link (this ^)
$data = json_decode($json); // Defines decode as json
if (!empty($data)) {
$profiles = array(); //init array so we can use $profiles[] later
foreach ($data->response->players as $player) { // Loop thrugh all the players
$player2 = $player->backpack_value;
if ($player2 < 9999999) { // Check the backpack_value
$profiles[] = $player; // Assign the required players to a new array
var_dump($profiles); // Dump the array to browser for debugning
file_put_contents("final." . $timestamp . ".txt", $usernumber);
} //closes if $playtime
} //closes foreach $data
} //closes if !empty
else {
echo $data;
}
}
curl download function:
function curl_download($Url){
// is cURL installed yet?
if (!function_exists('curl_init')){
die('Sorry cURL is not installed!');
}
// OK cool - then let's create a new cURL resource handle
$ch = curl_init();
// Now set some options (most are optional)
// Set URL to download
curl_setopt($ch, CURLOPT_URL, $Url);
// Set a referer
curl_setopt($ch, CURLOPT_REFERER, "http://www.google.pl");
// User agent
curl_setopt($ch, CURLOPT_USERAGENT, "Mozilla Firefox/1.0");
// Include header in result? (0 = yes, 1 = no)
curl_setopt($ch, CURLOPT_HEADER, 0);
// Should cURL return or print out the data? (true = return, false = print)
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
// Timeout in seconds
curl_setopt($ch, CURLOPT_TIMEOUT, 10);
// Download the given URL, and return output
$output = curl_exec($ch);
// Close the cURL resource, and free system resources
curl_close($ch);
return $output;
Output:
array(1) { [0]=> object(stdClass)#102 (6) { ["steamid"]=> string(17) "76561197992831594" ["success"]=> int(1) ["backpack_value"]=> float(47.97) ["backpack_update"]=> int(1369683750) ["name"]=> string(8) "WesFox13" ["notifications"]=> int(0) } } array(1) { [0]=> object(stdClass)#106 (6) { ["steamid"]=> string(17) "76561197992707820" ["success"]=> int(1) ["backpack_value"]=> float(59.78) ["backpack_update"]=> int(1369689171) ["name"]=> string(10) "Alexsutton" ["notifications"]=> int(0) } } array(1) { [0]=> object(stdClass)#98 (6) { ["steamid"]=> string(17) "76561197992146126" ["success"]=> int(1) ["backpack_value"]=> float(36181.59) ["backpack_update"]=> int(1369689000) ["name"]=> string(25) ":HIT: Bobo the Monkey Boy" ["notifications"]=> int(0) } }
My suggestion to you is to use CURL when you want to download something from Web. Moreover use file_get_contents() and file_put_contents() syntax is short and they do the same and they are easier to use.