<?php
function cuload($url, $got = array()){
//include(MDL.'socket_adapter.php');
$user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0";
$default_options = array(
'data' => 'og',
'post_data' => false,
'referer' => false,
'cookie' => false,
'auth' => false,
'proxy' => false,
'pauth' => false,
'returndata' => true,
);
foreach($default_options as $opt=>$value) {
if(!isset($default_options[$opt])) {$got[$opt] = $value; }
}
$curl = curl_init();
//if(strstr($referer,"://")){
//curl_setopt ($curl, CURLOPT_REFERER, $got['referer']);}
curl_setopt ($curl, CURLOPT_URL, $url);
curl_setopt ($curl, CURLOPT_USERAGENT, $user_agent);
curl_setopt ($curl, CURLOPT_HEADER, 1);
if(isset($got['returndata'])){
curl_setopt ($curl, CURLOPT_RETURNTRANSFER, 1);
}else{
curl_setopt ($curl, CURLOPT_RETURNTRANSFER, false);
}
curl_setopt ($curl, CURLOPT_ENCODING, 'gzip, deflate');
curl_setopt ($curl, CURLOPT_SSL_VERIFYPEER, 0);
//curl_setopt($curl ,CURLOPT_USERAGENT, $got_opt['user_agent']); //The Name of the UserAgent we will be using ;)
if(isset($got['post'])){curl_setopt($curl ,CURLOPT_POST , true); curl_setpot($curl ,CURLOPT_POSTFIELDS , $post); }
if(isset($got['referer'])) curl_setopt($curl,CURLOPT_REFERER, $options['referer']);
if(isset($got['cookie'])){ if($got['cookie'] == "0"){
curl_setopt($curl ,CURLOPT_COOKIEJAR, TBP."cookie.txt"); }//If ever needed...
elseif($got['cookie'] != "0"){
curl_setopt($curl ,CURLOPT_COOKIE, $got['cookie']); }}
//curl_setopt($curl ,CURLOPT_FOLLOWLOCATION, true);
//curl_setopt($curl ,CURLOPT_MAXREDIRS, 5);
//curl_setopt($curl ,CURLOPT_SSL_VERIFYPEER, false);
$custom_headers = array();
$custom_headers[] = "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
$custom_headers[] = "Pragma: no-cache";
$custom_headers[] = "Cache-Control: no-cache";
$custom_headers[] = "Accept-Language: en-us;q=0.7,en;q=0.3";
$custom_headers[] = "Accept-Charset: utf-8,windows-1251;q=0.7,*;q=0.7";
if(isset($url_parts['user']) and isset($url_parts['pass'])) {
$custom_headers[] = "Authorization: Basic ".base64_encode($url_parts['user'].':'.$url_parts['pass']);
}elseif(isset($got['auth'])){
$uj = explode(":",$got['auth']); $custom_headers[] = "Authorization: Basic ".base64_encode($uj[0].':'.$uj[1]); }
if(isset($got['pauth'])){
curl_setopt($curl ,CURLOPT_PROXYUSERPWD ,$pauth); }
if(isset($got['proxy'])){
curl_setopt($curl ,CURLOPT_PROXY ,$proxy); }
curl_setopt($curl ,CURLOPT_HTTPHEADER, $custom_headers);
$response = curl_exec($curl);
$info = curl_getinfo($curl);
curl_close ($curl);
ob_end_clean();
return $response;
}
?>
ok now when i tries this script on same page :
<?php
$page = cuload('http://www.google.com');
?>
But now without echoing the $page variable it echo the content . i donot need to echo anything here is just need to find some link from the $page variable content .. after exploring the script deeply i find that may be there was a problem in curl_exec cause if i remove the return $response still is shows the content ! i cant understand why it giving output of $page when i am not echoing $page .
please help me !
Instead of
foreach($default_options as $opt=>$value) {
if(!isset($default_options[$opt])) {$got[$opt] = $value; }
}
try this:
foreach($default_options as $opt=>$value) {
if(!empty($default_options[$opt])) {$got[$opt] = $value;}
}
Related
I can't loop through an array, stil get Curl error "3"
i have checked and inside array are all links I wanted to get.
function to make a get request
<?php
function geturl($url){
$starttime = microtime(TRUE);
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL,$url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);
curl_setopt($ch, CURLOPT_USERAGENT, "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0)
Gecko/20100101 Firefox/47.0");
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 4);
curl_setopt($ch, CURLOPT_TIMEOUT, 8);
curl_setopt($ch, CURLOPT_HTTPHEADER,
[
'https://betsapi.com/',
'authority: betsapi.com',
'upgrade-insecure-requests: 1',
'accept:text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,
application/signed-exchange;v=b3;q=0.9',
'sec-fetch-site: cross-site' ,
'sec-fetch-mode: navigate',
'sec-fetch-user: ?1',
'sec-fetch-dest: document',
//'sec-ch-ua: " Not A;Brand";v="99", "Chromium";v="98", "Microsoft Edge";v="98"',
'sec-ch-ua-mobile: ?0',
'sec-ch-ua-platform: "Windows"',
'referer: https://betsapi.com/',
'accept-language: pl,en;q=0.9,en-GB;q=0.8,en-US;q=0.7',
]);
$response = curl_exec($ch);
here i check errors and request time
if(curl_errno($ch))
{
echo "Curl error no: ". curl_errno($ch);
exit;
}
curl_close($ch);
$endtime = microtime(TRUE);
echo "Request time"." ". $endtime - $starttime."\n";
return $response;
}
function to load html
function simplehtml($item){
include_once('simple_html_dom.php');
$html = new simple_html_dom();
$html->load($item);
return $html;
}
when i call for the first time the geturl function and simplehtml function it works and puts all links into array
$starturl= geturl('https://betsapi.com/cin/soccer');
$starturlhtml= simplehtml($starturl);
foreach($starturlhtml->find("tr[class='c_1'] td[class='text-center'] a")as $url){
$url="https://betsapi.com".$url->href."\n";
$urlarray[]=$url;
}
here i call it in loop and i get Curl error 3
foreach($urlarray as $urls){
$urlss= geturl($urls);
$urlshtml=simplehtml($urlss);
foreach($urlshtml->find("div[class='col-md-6 text-center'] p b")as $stadium){
echo $stadium;
sleep(2);
}
}
I am attempting to buy Notes of the secondary market of Lending club and I keep getting "Internal server error". I have asked Lending club support multiple times too, but they are clueless. I also tried following this post , but no luck LendingClub.com API 500 Error for Buying Notes on Secondary Market.
Please help
<?php
$invester_id = "516xxxxxx";
$api_key = "GVsZuxxxxxxxxx";
$ContentType = "application/json";
define("DEBUG_LENDING_API", true);
$buy = buy_notes($invester_id, $api_key);
print_r($buy);die;
function buy_notes($invester_id, $api_key){
$buy_notes_url = "https://api.lendingclub.com/api/investor/v1/accounts/$invester_id/trades/buy";
$note = array("loanId" => "97277470", "orderId" => "139320895", "noteID" => "149206918", "bidPrice" => "19.45");
$datas = array("aid" => "70654", "notes" => $note);
$buy_notes = call_curl($buy_notes_url, $api_key, json_encode($datas));
$notes = json_decode($buy_notes['data']);
return $notes;
}
function call_curl($url, $api_key, $post = "0"){
$invester_id = "516xxxxxx";
$ContentType = "application/json";
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt ( $ch, CURLOPT_USERAGENT, "Mozilla/5.0 (Windows; U; Windows NT 5.1; pl; rv:1.9) Gecko/2008052906 Firefox/3.0" );
if($post != "0"){
curl_setopt($ch,CURLOPT_POST, 1);
curl_setopt($ch,CURLOPT_POSTFIELDS, $post);
}
curl_setopt ( $ch, CURLOPT_AUTOREFERER, true );
curl_setopt ( $ch, CURLOPT_FOLLOWLOCATION, true );
$headers = array();
$headers[] = "Authorization: $api_key";
$headers[] = "Content-type: $ContentType";
$headers[] = "Accept: $ContentType";
$headers[] = "X-LC-Application-Key: $invester_id";
//print_r(array_values($headers));
//exit;
curl_setopt($ch, CURLOPT_HTTPHEADER, $headers);
$server_output = curl_exec ($ch);
echo $server_output. "<br>";
exit;
$info = curl_getinfo($ch);
curl_close ($ch);
if(DEBUG_LENDING_API == true){
return array("data" => $server_output, "response" => $info);
}else{
return json_decode($server_output);
}
}
?>
I managed to figure it out. I set "aid" to $invester_id and that finally did the trick. Thanks Aynber for your tips along the way.
I have a PHP Simple HTML DOM Parser locally in MAMP that pulls information and works well with the Japan version of a website, since I'm located in Japan. However, I would like to pull information from the UK version of the site. What is the simplest way to do this?
I tried the following from the documentation and it didn't work.
$context = array('http' => array('proxy' => '212.82.126.32:80','request_fulluri' => true,),);
$stream = stream_context_create($context);
$html = file_get_html('http://www.supremenewyork.com/shop/new', false, $stream);
I also tried the curl version with modifications as the site has safe mode enabled. That didn't work as well.
function curl_exec_follow(/*resource*/ $ch, /*int*/ &$maxredirect = null) {
$mr = $maxredirect === null ? 5 : intval($maxredirect);
if (ini_get('open_basedir') == '' && ini_get('safe_mode' == 'Off')) {
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, $mr > 0);
curl_setopt($ch, CURLOPT_MAXREDIRS, $mr);
} else {
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, false);
if ($mr > 0) {
$newurl = curl_getinfo($ch, CURLINFO_EFFECTIVE_URL);
$rch = curl_copy_handle($ch);
curl_setopt($rch, CURLOPT_HEADER, true);
curl_setopt($rch, CURLOPT_NOBODY, true);
curl_setopt($rch, CURLOPT_FORBID_REUSE, false);
curl_setopt($rch, CURLOPT_RETURNTRANSFER, true);
do {
curl_setopt($rch, CURLOPT_URL, $newurl);
$header = curl_exec($rch);
if (curl_errno($rch)) {
$code = 0;
} else {
$code = curl_getinfo($rch, CURLINFO_HTTP_CODE);
if ($code == 301 || $code == 302) {
preg_match('/Location:(.*?)\n/', $header, $matches);
$newurl = trim(array_pop($matches));
} else {
$code = 0;
}
}
} while ($code && --$mr);
curl_close($rch);
if (!$mr) {
if ($maxredirect === null) {
trigger_error('Too many redirects. When following redirects, libcurl hit the maximum amount.', E_USER_WARNING);
} else {
$maxredirect = 0;
}
return false;
}
curl_setopt($ch, CURLOPT_URL, $newurl);
}
}
return curl_exec($ch);
}
$url = 'http://www.supremenewyork.com/shop/new';
$proxy = '212.82.126.32:80';
$options = array(
CURLOPT_PROXY => $proxy,
CURLOPT_HTTPPROXYTUNNEL => 0,
CURLOPT_REFERER => "http://www.google.com",
CURLOPT_FOLLOWLOCATION => true,
CURLOPT_RETURNTRANSFER => true,
CURLOPT_USERAGENT => "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.1) Gecko/20061204 Firefox/2.0.0.1",
CURLOPT_CONNECTTIMEOUT => 20,
CURLOPT_TIMEOUT => 20,
CURLOPT_MAXREDIRS => 10,
CURLOPT_HEADER => true,
);
$ch = curl_init( $url );
//curl_setopt_array( $ch, $options );
$content = curl_exec_follow( $ch );
$html = new simple_html_dom();
$html->load($content,true,false);
I tried uploading to US and UK servers as well, but that didn't work and it just pulls US data. Some help please?
Curl works whatever safe mode is enable or disable.
Your Curl script is too complex, make it simple and try again.
$content = curl_exec_follow('http://www.supremenewyork.com/shop/new');
$html = new simple_html_dom();
$html->load($content,true,false);
I modified your code, you can try.
// define cookie file path here
define('CRAWLER_COOKIE_FILENAME', 'cookie.txt');
function curl_exec_follow($url) {
$proxy = '212.82.126.32:80';
$agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.1) Gecko/20061204 Firefox/2.0.0.1';
// Some websites check referrer
$host = parse_url($url, PHP_URL_HOST);
$scheme = parse_url($url, PHP_URL_SCHEME);
$referrer = $scheme . '://' . $host;
$ch = curl_init();
$curl_defaults = array(
CURLOPT_HEADER => 0,
CURLOPT_FOLLOWLOCATION => 1,
CURLOPT_RETURNTRANSFER => 1,
);
curl_setopt_array($ch, $curl_defaults);
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_PROXY, $proxy);
curl_setopt($ch, CURLOPT_USERAGENT, $agent);
curl_setopt($ch, CURLOPT_REFERER, $referrer);
if ( !file_exists(CRAWLER_COOKIE_FILENAME) || !is_writable(CRAWLER_COOKIE_FILENAME) ) {
echo 'Cookie file is missing or not writable.';
exit;
}
curl_setopt($ch, CURLOPT_COOKIESESSION, 0);
curl_setopt($ch, CURLOPT_COOKIEFILE, CRAWLER_COOKIE_FILENAME);
curl_setopt($ch, CURLOPT_COOKIEJAR, CRAWLER_COOKIE_FILENAME);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 5);
// allow to crawl https webpages
curl_setopt($ch,CURLOPT_SSL_VERIFYHOST,0);
curl_setopt($ch,CURLOPT_SSL_VERIFYPEER,0);
// the download speed must be at least 1 byte per second
curl_setopt($ch,CURLOPT_LOW_SPEED_LIMIT, 1);
// if the download speed is below 1 byte per second for more than 30 seconds curl will give up
curl_setopt($ch,CURLOPT_LOW_SPEED_TIME, 30);
$content = curl_exec($ch);
if ($ret === FALSE) {
echo curl_error($ch);
}
$code = curl_getinfo($ch, CURLINFO_HTTP_CODE);
if ( $code != '200' ) echo 'http error code: ' . $code;
curl_close($ch);
return $content;
}
I'm using CURL to scrape the html from url's. It works great in 80% of the urls I use. But some url's don't seem "scrapeable". For example, when I try to scrape http://www.thefancy.com , it doesn't work. the website keeps loading and at the end it doesn't return a result. the problem is testable at: http://www.itemmized.com/test/test/ this is my code:
if($_POST['submit']) {
function curl_exec_follow($ch, &$maxredirect = null) {
$mr = $maxredirect === null ? 5 : intval($maxredirect);
if (ini_get('open_basedir') == '' && ini_get('safe_mode' == 'Off')) {
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, $mr > 0);
curl_setopt($ch, CURLOPT_MAXREDIRS, $mr);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
} else {
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, false);
if ($mr > 0)
{
$original_url = curl_getinfo($ch, CURLINFO_EFFECTIVE_URL);
$newurl = $original_url;
$rch = curl_copy_handle($ch);
curl_setopt($rch, CURLOPT_HEADER, true);
curl_setopt($rch, CURLOPT_NOBODY, true);
curl_setopt($rch, CURLOPT_FORBID_REUSE, false);
do
{
curl_setopt($rch, CURLOPT_URL, $newurl);
$header = curl_exec($rch);
if (curl_errno($rch)) {
$code = 0;
} else {
$code = curl_getinfo($rch, CURLINFO_HTTP_CODE);
if ($code == 301 || $code == 302) {
preg_match('/Location:(.*?)\n/', $header, $matches);
$newurl = trim(array_pop($matches));
// if no scheme is present then the new url is a
// relative path and thus needs some extra care
if(!preg_match("/^https?:/i", $newurl)){
$newurl = $original_url . $newurl;
}
} else {
$code = 0;
}
}
} while ($code && --$mr);
curl_close($rch);
if (!$mr)
{
if ($maxredirect === null)
trigger_error('Too many redirects.', E_USER_WARNING);
else
$maxredirect = 0;
return false;
}
curl_setopt($ch, CURLOPT_URL, $newurl);
}
}
return curl_exec($ch);
}
$ch = curl_init($_POST['form_url']);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
$data = curl_exec_follow($ch);
curl_close($ch);
echo $data;
Try this.. hope this helps...
<?php
class Curl
{
public $cookieJar = "";
public function __construct($cookieJarFile = 'cookies.txt') {
$this->cookieJar = $cookieJarFile;
}
function setup()
{
$header = array();
$header[0] = "Accept: text/xml,application/xml,application/xhtml+xml,";
$header[0] .= "text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5";
$header[] = "Cache-Control: max-age=0";
$header[] = "Connection: keep-alive";
$header[] = "Keep-Alive: 300";
$header[] = "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7";
$header[] = "Accept-Language: en-us,en;q=0.5";
$header[] = "Pragma: "; // browsers keep this blank.
curl_setopt($this->curl, CURLOPT_USERAGENT, 'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.8.1.7) Gecko/20070914 Firefox/2.0.0.7');
curl_setopt($this->curl, CURLOPT_HTTPHEADER, $header);
curl_setopt($this->curl,CURLOPT_COOKIEJAR, $cookieJar);
curl_setopt($this->curl,CURLOPT_COOKIEFILE, $cookieJar);
curl_setopt($this->curl,CURLOPT_AUTOREFERER, true);
curl_setopt($this->curl,CURLOPT_FOLLOWLOCATION, true);
curl_setopt($this->curl,CURLOPT_RETURNTRANSFER, true);
}
function get($url)
{
$this->curl = curl_init($url);
$this->setup();
return $this->request();
}
function getAll($reg,$str)
{
preg_match_all($reg,$str,$matches);
return $matches[1];
}
function postForm($url, $fields, $referer='')
{
$this->curl = curl_init($url);
$this->setup();
curl_setopt($this->curl, CURLOPT_URL, $url);
curl_setopt($this->curl, CURLOPT_POST, 1);
curl_setopt($this->curl, CURLOPT_REFERER, $referer);
curl_setopt($this->curl, CURLOPT_POSTFIELDS, $fields);
return $this->request();
}
function getInfo($info)
{
$info = ($info == 'lasturl') ? curl_getinfo($this->curl, CURLINFO_EFFECTIVE_URL) : curl_getinfo($this->curl, $info);
return $info;
}
function request()
{
return curl_exec($this->curl);
}
}
{
$curl = new Curl();
$html = $curl->get("http://www.thefancy.com");
echo "$html";
}
?>
Probably you're unable to scrape http://www.thefancy.com because every time you reach the bottom of the page new content is loading so actually you are trying to get an enormous amount of information with the cUrl probably that's where the problem is. You just get a timeout try setting the timeout in php.ini with a larger number and give it a try again. Probably its gona take a while to load but I think this way it's going to work.
well the problem is the simple this today not working !
<?php
function cload($url, $got = array()){
//include(MDL.'socket_adapter.php');
$user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0";
$default_options = array(
'data' => 'og',
'post_data' => false,
'referer' => false,
'cookie' => false,
'auth' => false,
'proxy' => false,
'pauth' => false,
'returndata' => true,
);
foreach($default_options as $opt=>$value) {
if(!empty($default_options[$opt])) {$got[$opt] = $value; }
//if(!empty($got[$opt])) {$got[$opt] = $value; }
}
//echo "<hr>"; print_r($got); echo "<hr>";
$curl = curl_init();
//if(strstr($referer,"://")){
//curl_setopt ($curl, CURLOPT_REFERER, $got['referer']);}
curl_setopt ($curl, CURLOPT_URL, $url);
curl_setopt ($curl, CURLOPT_USERAGENT, $user_agent);
curl_setopt ($curl, CURLOPT_HEADER, 1);
if(isset($got['returndata'])){
curl_setopt ($curl, CURLOPT_RETURNTRANSFER, 1);
}else{
curl_setopt ($curl, CURLOPT_RETURNTRANSFER, false);
}
curl_setopt ($curl, CURLOPT_ENCODING, 'gzip, deflate');
curl_setopt ($curl, CURLOPT_SSL_VERIFYPEER, false);
if(isset($got['post_data'])){ curl_setopt($curl ,CURLOPT_POST , true);
curl_setpot($curl ,CURLOPT_POSTFIELDS ,$got['post_data']); }
if(isset($got['referer'])) curl_setopt($curl,CURLOPT_REFERER, $got['referer']);
if(isset($got['cookie'])){ if($got['cookie'] == "0"){
curl_setopt($curl ,CURLOPT_COOKIEJAR, TBP."cookie.txt"); }//If ever needed...
else{
curl_setopt($curl ,CURLOPT_COOKIE, $got['cookie']); }}
$custom_headers = array();
$custom_headers[] = "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
$custom_headers[] = "Pragma: no-cache";
$custom_headers[] = "Cache-Control: no-cache";
$custom_headers[] = "Accept-Language: en-us;q=0.7,en;q=0.3";
$custom_headers[] = "Accept-Charset: utf-8,windows-1251;q=0.7,*;q=0.7";
if(isset($url_parts['user']) and isset($url_parts['pass'])) {
$custom_headers[] = "Authorization: Basic ".base64_encode($url_parts['user'].':'.$url_parts['pass']);
}elseif(isset($got['auth'])){
$uj = explode(":",$got['auth']); $custom_headers[] = "Authorization: Basic ".base64_encode($uj[0].':'.$uj[1]); }
if(isset($got['pauth'])){
curl_setopt($curl ,CURLOPT_PROXYUSERPWD ,$pauth); }
if(isset($got['proxy'])){
curl_setopt($curl ,CURLOPT_PROXY ,$proxy); }
curl_setopt($curl ,CURLOPT_HTTPHEADER, $custom_headers);
//curl_setopt($curl, CURLINFO_HEADER_OUT, true);
$response = curl_exec($curl);
$info = curl_getinfo($curl);// , CURLINFO_HEADER_OUT);
curl_close ($curl);
//echo "<hr>"; echo $info; echo "<hr>";
return $response; }
?>
now when i am using this code :
<?php
$cv = load('https://localhost/a/ac.php?a=io',array('cookie' => $fbcook,'referer' => $ref,'post_data' => 'odl=lop&isi=837&io'));
echo $cv;
?>
but i am getting error :
Fatal error: Call to undefined function curl_setpot() in C:\xampp\htdocs\a\mack\curl.php on line 41
Please help me ! why this happning this line 42 indicates this line curl_setpot($curl ,CURLOPT_POSTFIELDS ,$got['post_data']); }
thanks
simple typo: curl_setpot should be curl_setopt
When a function of cURL is undefined, then it is most likely that cURL is not available.
However, I think that you meant curl_setopt().
curl_setpot should be curl_setopt.
You use xampp so you need to activate curl lib by following these steps:
Locate XAMPP install directory
Open php/php.ini (probably C:\xampp\php\php.ini
or C:\program files\apachefriends\xampp\php\php.ini)
Do a search for the word ‘curl’ and uncomment (remove the leading semicolon) that line. Before removing: ;extension=php_curl.dll. After removing: extension=php_curl.dll
Save and close
Open apache/bin/php.ini (probably C:\xampp\apache\php.ini
or C:\ program files\apachefriends\xampp\apache\php.ini)
Search for curl, uncomment as before (step 3)
Save and close
Do not forget to restart Apache
Good luck