I tried implementing a login via PayPal. Essentially, I just copied this example: How to Implement User Log-in with PayPal.
However, I am getting a 400 - Bad Request error for the second response (in the PayPal login include file). I cannot make much of the error, maybe someone knows whats up. The result:
object(Httpful\Response)#14 (13) {
["body"]=>
string(0) ""
["raw_body"]=>
string(0) ""
["headers"]=>
object(Httpful\Response\Headers)#15 (1) {
["headers":"Httpful\Response\Headers":private]=>
array(10) {
["date"]=>
string(29) "Sun, 23 Jul 2017 13:30:12 GMT"
["server"]=>
string(6) "Apache"
["paypal-debug-id"]=>
string(13) "bcd5c7f1d86b2"
["www-authenticate"]=>
string(255) "Bearer error_description="GET /v1/oauth2/token/userinfo?schema=openidconnect returned a response status of 400 Bad Request",correlation_id="1283a4f54dc2a",error="400",information_link="https://developer.paypal.com/docs/api/#errors",realm="UserInfoService""
["set-cookie"]=>
string(52) "X-PP-SILOVER=; Expires=Thu, 01 Jan 1970 00:00:01 GMT"
["vary"]=>
string(29) "Accept-Encoding,Authorization"
["connection"]=>
string(5) "close"
["http_x_pp_az_locator"]=>
string(11) "sandbox.slc"
["transfer-encoding"]=>
string(7) "chunked"
["content-type"]=>
string(29) "text/html; charset=ISO-8859-1"
}
}
["raw_headers"]=>
string(983) "HTTP/1.1 400 Bad Request
Date: Sun, 23 Jul 2017 13:30:12 GMT
Server: Apache
Paypal-Debug-Id: 1283a4f54dc2a
WWW-Authenticate: Bearer error_description="GET /v1/oauth2/token/userinfo?schema=openidconnect returned a response status of 400 Bad Request",correlation_id="1283a4f54dc2a",error="400",information_link="https://developer.paypal.com/docs/api/#errors",realm="UserInfoService"
Set-Cookie: Apache=10.72.108.11.1500816612903307; path=/; expires=Tue, 16-Jul-47 13:30:12 GMT
Vary: Accept-Encoding,Authorization
Connection: close
HTTP_X_PP_AZ_LOCATOR: sandbox.slc
Paypal-Debug-Id: bcd5c7f1d86b2
Set-Cookie: X-PP-SILOVER=name%3DSANDBOX3.API.1%26silo_version%3D1880%26app%3Didentityspartaweb_api%26TIME%3D3835982937%26HTTP_X_PP_AZ_LOCATOR%3Dsandbox.slc; Expires=Sun, 23 Jul 2017 14:00:12 GMT; domain=.paypal.com; path=/; Secure; HttpOnly
Set-Cookie: X-PP-SILOVER=; Expires=Thu, 01 Jan 1970 00:00:01 GMT
Transfer-Encoding: chunked
Content-Type: text/html; charset=ISO-8859-1"
["request"]=>
object(Httpful\Request)#13 (22) {
["uri"]=>
string(80) "https://api.sandbox.paypal.com/v1/identity/openidconnect/userinfo/?schema=openid"
["method"]=>
string(3) "GET"
["headers"]=>
array(2) {
["Authorization"]=>
string(97) "A23AAHJ_6sujsm8hanJJWRQ8WqIjhbVvxqG-Z3g4Te3QzwkVdw6cLWgCeidGOgPng0kFx24dYlCoWhZNKlhTuDs-_knOqOERQ"
["Content-Length"]=>
int(0)
}
["raw_headers"]=>
string(493) "GET /v1/identity/openidconnect/userinfo/?schema=openid HTTP/1.1
Host: api.sandbox.paypal.com
Expect:
User-Agent: Httpful/0.2.19 (cURL/7.47.0 PHP/7.0.18-0ubuntu0.16.04.1 (Linux) nginx/1.10.0 Mozilla/5.0 (Windows NT 10.0; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0)
Content-Type: application/json
Accept: */*; q=0.5, text/plain; q=0.8, text/html;level=3;
Authorization: A23AAHJ_6sujsm8hanJJWRQ8WqIjhbVvxqG-Z3g4Te3QzwkVdw6cLWgCeidGOgPng0kFx24dYlCoWhZNKlhTuDs-_knOqOERQ
Content-Length: 0
"
["strict_ssl"]=>
bool(false)
["content_type"]=>
string(16) "application/json"
["expected_type"]=>
NULL
["additional_curl_opts"]=>
array(0) {
}
["auto_parse"]=>
bool(true)
["serialize_payload_method"]=>
int(2)
["username"]=>
string(80) "[I removed this.]"
["password"]=>
string(80) "[I removed this.]"
["serialized_payload"]=>
NULL
["payload"]=>
NULL
["parse_callback"]=>
NULL
["error_callback"]=>
NULL
["send_callback"]=>
NULL
["follow_redirects"]=>
bool(false)
["max_redirects"]=>
int(25)
["payload_serializers"]=>
array(0) {
}
["_ch"]=>
resource(4) of type (Unknown)
["_debug"]=>
NULL
}
["code"]=>
int(400)
["content_type"]=>
string(9) "text/html"
["parent_type"]=>
string(9) "text/html"
["charset"]=>
string(10) "ISO-8859-1"
["meta_data"]=>
array(26) {
["url"]=>
string(80) "https://api.sandbox.paypal.com/v1/identity/openidconnect/userinfo/?schema=openid"
["content_type"]=>
string(29) "text/html; charset=ISO-8859-1"
["http_code"]=>
int(400)
["header_size"]=>
int(987)
["request_size"]=>
int(486)
["filetime"]=>
int(-1)
["ssl_verify_result"]=>
int(0)
["redirect_count"]=>
int(0)
["total_time"]=>
float(0.919636)
["namelookup_time"]=>
float(5.1E-5)
["connect_time"]=>
float(0.193331)
["pretransfer_time"]=>
float(0.708154)
["size_upload"]=>
float(0)
["size_download"]=>
float(0)
["speed_download"]=>
float(0)
["speed_upload"]=>
float(0)
["download_content_length"]=>
float(-1)
["upload_content_length"]=>
float(-1)
["starttransfer_time"]=>
float(0.919562)
["redirect_time"]=>
float(0)
["redirect_url"]=>
string(0) ""
["primary_ip"]=>
string(11) "173.0.82.78"
["certinfo"]=>
array(0) {
}
["primary_port"]=>
int(443)
["local_ip"]=>
string(9) "10.0.2.15"
["local_port"]=>
int(50988)
}
["is_mime_vendor_specific"]=>
bool(false)
["is_mime_personal"]=>
bool(false)
["parsers":"Httpful\Response":private]=>
NULL
}
My code: (ppinit.php just defines constants)
require_once(dirname(__FILE__)."/ppinit.php");
$requestData = '?grant_type=authorization_code&code='.getGet("code").'&return_url=http://localhost/paypal/return';
$response = \Httpful\Request::get("https://" . PAYPAL_API_URL . "/v1/identity/openidconnect/tokenservice" . $requestData)
->authenticateWith(PAYPAL_CLIENT_ID, PAYPAL_CLIENT_SECRET)
->send();
$jsonResponse = json_decode($response->raw_body);
if(isset($jsonResponse->error))
{
echo "Fehler bei der Anmeldung mit PayPal.";
echo "<br/>";
exit;
}
//that's the one that does not work.
$response = \Httpful\Request::get("https://" . PAYPAL_API_URL . "/v1/identity/openidconnect/userinfo/?schema=openid")
->contentType("application/json")
->authorization($jsonResponse->access_token)
->authenticateWith(PAYPAL_CLIENT_ID, PAYPAL_CLIENT_SECRET)
->send();
$ppuser = json_decode($response);
Generally speaking, a HTTP 400 Bad Request means that the sent request was malformed i.e. the data stream sent by the client to the server didn't follow the rules - read more.
It's hard to tell the exact source for such errors since there could be many, but I suggest you to wrap your HTTP calls in try/catch blocks and track down the cause of the error. So in this case you can replace your code with this:
try {
$response = \Httpful\Request::get("https://" . PAYPAL_API_URL . "/v1/identity/openidconnect/tokenservice" . $requestData)
->authenticateWith(PAYPAL_CLIENT_ID, PAYPAL_CLIENT_SECRET)
->send();
} catch (Exception $e) {
var_dump($e->getMessage());
exit(1);
}
If you are using namespace put a \ before Exception -> \Exception
Replacing the second request with the code below works.
try
{
$params = array('access_token' => $jsonResponse->access_token);
$userInfo = OpenIdUserinfo::getUserinfo($params, $paypal);
} catch (Exception $ex)
{
echo $ex;
exit(1);
}
Related
I am used cURL in PHP for uploading a video to Wistia. Everything works fine in my local server. But in dev server , the video is not uploading. Using the var_dump(curl_getinfo($ch)), I can see the content_type is different from local to dev server. I am confused about it.Can any one help me to resolve this problem.
Here is my code:
public function video_upload($filePath)
{
$data = array(
'api_password' => '0XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX5',
'file' => '#'.$filePath,
);
$url = 'https://upload.wistia.com';
$ch = curl_init();
curl_setopt_array($ch, array(
CURLOPT_URL => $url,
CURLOPT_POST => true,
CURLOPT_POSTFIELDS => $data,
CURLOPT_HEADER => false,
CURLOPT_RETURNTRANSFER => true,
));
curl_setopt($ch, CURLINFO_HEADER_OUT, true);
$response = curl_exec($ch);
var_dump(curl_getinfo($ch));
var_dump(curl_errno($ch));
curl_close($ch);
return $response;
}
Response received in my local server:
array(27) {
["url"]=> string(26) "https://upload.wistia.com/"
**["content_type"]=> string(30) "application/json;charset=utf-8"**
["http_code"]=> int(200)
["header_size"]=> int(688)
["request_size"]=> int(189)
["filetime"]=> int(-1)
["ssl_verify_result"]=> int(0)
["redirect_count"]=> int(0)
["total_time"]=> float(17.850026)
["namelookup_time"]=> float(0.252903)
["connect_time"]=> float(0.253271)
["pretransfer_time"]=> float(1.903306)
["size_upload"]=> float(279250)
["size_download"]=> float(417)
["speed_download"]=> float(23)
["speed_upload"]=> float(15644)
["download_content_length"]=> float(417)
["upload_content_length"]=> float(279250)
["starttransfer_time"]=> float(2.173591)
["redirect_time"]=> float(0)
["redirect_url"]=> string(0) ""
["primary_ip"]=> string(13) "162.209.95.19"
["certinfo"]=> array(0) { }
["primary_port"]=> int(443)
["local_ip"]=> string(13) "192.168.1.157"
["local_port"]=> int(54999)
["request_header"]=> string(189) "POST / HTTP/1.1 Host: upload.wistia.com Accept: */* Content-Length: 279250 Expect: 100-continue Content-Type: multipart/form-data; boundary=------------------------370a5719d6336ecc "
} int(0)
Response received in my dev server :
array(27) {
["url"]=> string(26) "https://upload.wistia.com/"
**["content_type"]=> string(23) "text/html;charset=utf-8"**
["http_code"]=> int(500)
["header_size"]=> int(718)
["request_size"]=> int(186)
["filetime"]=> int(-1)
["ssl_verify_result"]=> int(0)
["redirect_count"]=> int(0)
["total_time"]=> float(0.437061)
["namelookup_time"]=> float(0.004766)
["connect_time"]=> float(0.023656)
["pretransfer_time"]=> float(0.194844)
["size_upload"]=> float(319)
["size_download"]=> float(30)
["speed_download"]=> float(68)
["speed_upload"]=> float(729)
["download_content_length"]=> float(30)
["upload_content_length"]=> float(319)
["starttransfer_time"]=> float(0.216544)
["redirect_time"]=> float(0)
["redirect_url"]=> string(0) ""
["primary_ip"]=> string(15) "162.242.168.223"
["certinfo"]=> array(0) { }
["primary_port"]=> int(443)
["local_ip"]=> string(14) "224.178.240.48"
["local_port"]=> int(55164)
["request_header"]=> string(186) "POST / HTTP/1.1 Host: upload.wistia.com Accept: */* Content-Length: 319 Expect: 100-continue Content-Type: multipart/form-data; boundary=----------------------------d45c07c28860 "
} int(0)
It's hard to say why it is not working, but the response you get from the server includes a 500 http status code, which indicates that something is wrong on the server.
It is possible that there is something wrong on your end, but without more information from the server, it's really hard to tell what is going wrong. In general, 500 responses from the server indicates a server (wistia) error, not a client (you) error.
You might want to send the information to wistia to get more details.
You are getting a 500 error from the server in the second response. That's why it is not json.
I bet you're doing a different POST request altogether,
do like
$verbosefileh=tmpfile();
$verbosefile=stream_get_meta_data($verbosefileh)['uri'];
curl_setopt_array($ch,array(
CURLOPT_CERTINFO=>true,
CURLOPT_VERBOSE=>true,
CURLOPT_STDERR=>$verbosefileh
));
curl_exec($ch);
$postrequest=file_get_contents($verbosefile);
then study the post request closely, what's the difference between the request sent by the 2 servers? i bet there is something.. unless you're blocked out of IP ban
edit: a common gotcha, on some installations curl has a default useragent, and on some installations, curl doesn't. (like in debian 6, its something like "curl/7.21.3 (x86_64-unknown-linux-gnu) libcurl/7.21.3 OpenSSL/1.0.0c zlib/1.2.5", while in debian 8, there IS no default string.. or was it the other way around?), and many websites will block requests that doesn't contain a useragent. to ensure you have a useragent, you can use curl_setopt($ch,CURLOPT_USERAGENT,'curl php');
I need to determine if a URL exits. I ran across this post.
How can I check if a URL exists via PHP?
$file = 'http://godaddy';
$file_headers = #get_headers($file);
if($file_headers[0] == 'HTTP/1.1 404 Not Found')
{
$exists = false;
}
else
{
$exists = true;
}
And implemented this code and when I tested it as a user who forgot to put in the .com it comes back with true. Which isn't correct because if you go to http://godaddy there is no website.
I tried validating the $file before hand but
filter_var($url, FILTER_VALIDATE_URL);
views http://godaddy as a valid url.
Any idea how to handle this sort of input?
var_dump($file_headers)= array(8) {
[0]=> string(15) "HTTP/1.1 200 OK"
[1]=> string(13) "Server: nginx"
[2]=> string(35) "Date: Mon, 29 Jun 2015 14:23:07 GMT"
[3]=> string(23) "Content-Type: text/html"
[4]=> string(17) "Connection: close"
[5]=> string(21) "Vary: Accept-Encoding"
[6]=> string(38) "Expires: Mon, 29 Jun 2015 14:23:06 GMT"
[7]=> string(23) "Cache-Control: no-cache"
}
Try without the # character. in this way you can see the direct error. I think the error masking cannot permit the correct reading of the response of the get_header
I'm trying to set up a curl request for getting remote files only if they are been modified since my stored timestamp.
I want to manage the http code of my cURL request, here is an example.
I have stored a timestamp of the last download of file XX. 2014-12-08 06:56:03.
My cURL request
$ch = curl_init($url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_HEADER, true);
curl_setopt($ch, CURLOPT_FILETIME, true);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 5);
curl_setopt($ch, CURLOPT_TIMEVALUE, strtotime($timestamp));
curl_setopt($ch, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
$res = curl_exec($ch);
$info = curl_getinfo($ch);
As the documentation says
CURLOPT_TIMECONDITION
How CURLOPT_TIMEVALUE is treated. Use CURL_TIMECOND_IFMODSINCE to return the page only if it has been modified since the time specified in CURLOPT_TIMEVALUE. If it hasn't been modified, a "304 Not Modified" header will be returned assuming CURLOPT_HEADER is TRUE. Use CURL_TIMECOND_IFUNMODSINCE for the reverse effect. CURL_TIMECOND_IFMODSINCE is the default.
So, if my timestamp is 2014-12-08 06:56:03 and the filetime of the remote file is 2014-12-08 04:59:03 it has to return an http code of 304, but I'm getting always an http code of 200.
Have I misunderstood the docs?
These are the timestamps in seconds
local file 1418021941
remote file 1418014742
and this are the info of the above curl request
array(26) {
["url"]=> string(32) "XXX"
["content_type"]=> string(24) "application/octet-stream"
["http_code"]=> int(200)
["header_size"]=> int(251)
["request_size"]=> int(113)
["filetime"]=> int(1418014742)
["ssl_verify_result"]=> int(0)
["redirect_count"]=> int(0)
["total_time"]=> float(0.100412)
["namelookup_time"]=> float(0.010285)
["connect_time"]=> float(0.05576)
["pretransfer_time"]=> float(0.055878)
["size_upload"]=> float(0)
["size_download"]=> float(0)
["speed_download"]=> float(0)
["speed_upload"]=> float(0)
["download_content_length"]=> float(371712)
["upload_content_length"]=> float(0)
["starttransfer_time"]=> float(0.100382)
["redirect_time"]=> float(0)
["redirect_url"]=> string(0) ""
["primary_ip"]=> string(11) "XXX"
["certinfo"]=> array(0) { }
["primary_port"]=> int(80)
["local_ip"]=> string(11) "XXX"
["local_port"]=> int(XX)
}
These are the info if I change the timestamp of my local file to be less than the remote one 2014-12-06 06:56:03
array(26) {
["url"]=> string(32) "XXX"
["content_type"]=> string(24) "application/octet-stream"
["http_code"]=> int(200)
["header_size"]=> int(251)
["request_size"]=> int(113)
["filetime"]=> int(1418014742)
["ssl_verify_result"]=> int(0)
["redirect_count"]=> int(0)
["total_time"]=> float(0.583712)
["namelookup_time"]=> float(0.011975)
["connect_time"]=> float(0.056813)
["pretransfer_time"]=> float(0.056977)
["size_upload"]=> float(0)
["size_download"]=> float(371712)
["speed_download"]=> float(636807)
["speed_upload"]=> float(0)
["download_content_length"]=> float(371712)
["upload_content_length"]=> float(0)
["starttransfer_time"]=> float(0.103772)
["redirect_time"]=> float(0)
["redirect_url"]=> string(0) ""
["primary_ip"]=> string(11) "XXX"
["certinfo"]=> array(0) { }
["primary_port"]=> int(80)
["local_ip"]=> string(11) "XX"
["local_port"]=> int(XX)
}
As you can see there is a difference between the two, the [size download], =0 in the first one, >0 in the second one.
Any suggestion? I will always get an http code = 200?
--------- edit
This is the header I get back with local timestamp 2014-12-08 06:59:01
HTTP/1.1 200 OK Server: nginx Date: Mon, 08 Dec 2014 10:23:06 GMT Content-Type: application/octet-stream Content-Length: 371712 Last-Modified: Mon, 08 Dec 2014 04:59:02 GMT Connection: keep-alive ETag: "54853016-5ac00" Accept-Ranges: bytes
Yes, you misunderstood the dosc.
If it hasn't been modified, a "304 Not Modified" header will be returned assuming CURLOPT_HEADER is TRUE.
No change -> Return a "304 Not Modified" header.
Modified -> Return 200
---------round 2---------
File's Last-Modified: Thu, 18 Dec 2014 05:37:48 GMT.
date_default_timezone_set("Etc/GMT");
$url = "...";
$timestamp = "2014-12-18 05:37:48";
$ch = curl_init($url);
curl_setopt($ch, CURLOPT_HEADER, true);
curl_setopt($ch, CURLOPT_TIMEVALUE, strtotime($timestamp));
curl_setopt($ch, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
$res = curl_exec($ch);
1, if i use above test code, it will return a head like this:
HTTP/1.1 304 Not Modified
Date: Thu, 18 Dec 2014 08:27:55 GMT
Server: Apache/2.4.7 (Ubuntu)
ETag: "17c-50a76fe108d7a"
2, when i change the $timestamp, it will return this:
HTTP/1.1 200 OK
Date: Thu, 18 Dec 2014 08:31:02 GMT
Server: Apache/2.4.7 (Ubuntu)
Last-Modified: Thu, 18 Dec 2014 05:37:48 GMT
ETag: "17c-50a76fe108d7a"
Accept-Ranges: bytes
Content-Length: 380
Vary: Accept-Encoding
Content-Type: text/html
I think you should set the timezone, if i don't set the timezone it always return 200.
I'm trying to drop invalid URLs from my flash games site.
Here is my code:
function valid($URL1) {
$headers = get_headers($URL1);
$headers = substr($headers[8],38,5);//leaves only flash word
if ($headers=='flash')
return true; else return false;
}
$URL1='http://www.ht83.com/medias/media-16/ht83com-cde-house-decoration.swf';
if(valid($URL1))
echo 'SWF are word>' ;
that code return true even Content-Type are not swf .
by the way I already tried
$headers=$headers['Content-Type'];
but give me no result .
When I tried
var_dump($headers);
return this for valid SWF URL
http://www.ht83.com/medias/media-16/ht83com-spongebob-squarepants-gone-fishing.swf
array(9) { [0]=> string(15) "HTTP/1.1 200 OK" [1]=> string(35) "Date:
Sat, 01 Feb 2014 01:36:35 GMT" [2]=> string(144) "Server:
Apache/2.2.17 (Unix) mod_ssl/2.2.17 OpenSSL/0.9.8m DAV/2
mod_auth_passthrough/2.1 mod_bwlimited/1.4 FrontPage/5.0.2.2635
mod_fcgid/2.3.5" [3]=> string(20) "Accept-Ranges: bytes" [4]=>
string(22) "Content-Length: 342771" [5]=> string(39) "Cache-Control:
max-age=62208000, public" [6]=> string(38) "Expires: Mon, 03 Mar 2014
01:36:35 GMT" [7]=> string(17) "Connection: close" [8]=> string(43)
"Content-Type: application/x-shockwave-flash" }
AND this for the Invalid SWF URL
http://www.ht83.com/medias/media-16/ht83com-cde-house-decoration.swf
array(12) { [0]=> string(15) "HTTP/1.1 200 OK" [1]=> string(35) "Date:
Sat, 01 Feb 2014 01:40:06 GMT" [2]=> string(144) "Server:
Apache/2.2.17 (Unix) mod_ssl/2.2.17 OpenSSL/0.9.8m DAV/2
mod_auth_passthrough/2.1 mod_bwlimited/1.4 FrontPage/5.0.2.2635
mod_fcgid/2.3.5" [3]=> string(24) "X-Powered-By: PHP/5.2.16" [4]=>
string(38) "Expires: Thu, 19 Nov 1981 08:52:00 GMT" [5]=> string(77)
"Cache-Control: no-store, no-cache, must-revalidate, post-check=0,
pre-check=0" [6]=> string(16) "Pragma: no-cache" [7]=> string(62)
"Set-Cookie: PHPSESSID=359cf391842876b3cc79066dcc3a08f4; path=/" [8]=>
string(21) "Vary: Accept-Encoding" [9]=> string(52) "Cache-Control:
max-age=600, private, must-revalidate" [10]=> string(17) "Connection:
close" [11]=> string(23) "Content-Type: text/html" }
So their is any easier way to get correct Content-Type of URL .
Looks like I used get_headers() in numeric only . this code from Sean Johnson works
function valid($URL) {
$headers = get_headers($URL, 1);//
return stripos($headers['Content-Type'],"application/x-shockwave-flash")!==false;
}
According to the very first example of the get_headers documentation you need to use the second argument if you want to be able to access the header by it's key value.
Try this:
function valid($URL) {
$headers = get_headers($URL,1);
return stripos($headers['Content-Type'],"flash")!==false;
}
Your code is assuming that the Content-Type header will always be the 9th header returned by the server, which is not the case.
You will need to loop through the headers and examine only the correct one (that is, the one that starts with Content-Type:).
$curl = new Curl();
$data = 'Email='.urlencode('MYEMAIL#EMAIL.COM').'&Passwd='.urlencode('MYPASSWORD').'&accountType=GOOGLE&source=Google-cURL-Example&service=jotspot';
$curl->post('https://www.google.com/accounts/ClientLogin',$data);
//match authorization token
preg_match("!Auth=(.*)!",$curl->response,$match);
$auth = $match[1];
//set curl headers
$curl->set_headers(array(
'Content-Type: application/atom+xml; charset=utf-8',
'Host: sites.google.com',
'GData-Version: 1.4',
'Authorization: GoogleLogin auth='. trim($auth)));
//get a list of sites associated with my domain
$curl->get('https://sites.google.com/feeds/site/clevertechie.mygbiz.com');
//contains data returned by $curl->get();
echo $curl->response;
So instead of getting the list of sites from $curl->response, I get a message - "Content is not allowed in prolog." I've looked everywhere and haven't been able to find a solution, please help! Thanks! :)
This is the XML that is supposed to be returned by the response:
<?xml version='1.0' encoding='UTF-8'?>
<entry xmlns='http://www.w3.org/2005/Atom' xmlns:gAcl='http://schemas.google.com/acl/2007' xmlns:sites='http://schemas.google.com/sites/2008' xmlns:gs='http://schemas.google.com/spreadsheets/2006' xmlns:dc='http://purl.org/dc/terms' xmlns:batch='http://schemas.google.com/gdata/batch' xmlns:gd='http://schemas.google.com/g/2005' xmlns:thr='http://purl.org/syndication/thread/1.0'>
<updated>2012-10-31T19:00:17.297Z</updated>
<app:edited xmlns:app='http://www.w3.org/2007/app'>2012-10-31T19:00:17.297Z</app:edited>
<title>My Site Title</title>
<summary>My Site Summary</summary>
<sites:siteName>my-site-title</sites:siteName>
<sites:theme>slate</sites:theme>
</entry>
I can't paste the source of "https://sites.google.com/feeds/site/clevertechie.mygbiz.com" because it can't be accessed directly without authorization token, which is specified in the headers. The only way to retrieve its data is by using the token in the headers which I've done. Instead of getting above XML, I'm getting "Content is not allowed in prolog".
var_dump of $curl:
object(Curl)#1 (11) { ["curl_resource":protected]=> resource(4) of type (Unknown) ["proxy":protected]=> bool(false)
["proxy_type":protected]=> NULL ["response"]=> string(33) "Content is not allowed in prolog." ["time"]=> float(249)
["info"]=> array(26) { ["url"]=> string(59) "https://sites.google.com/feeds/site/clevertechie.mygbiz.com"
["content_type"]=> string(24) "text/html; charset=UTF-8" ["http_code"]=> int(400) ["header_size"]=> int(676)
["request_size"]=> int(1935) ["filetime"]=> int(-1) ["ssl_verify_result"]=> int(20) ["redirect_count"]=> int(0)
["total_time"]=> float(0.249) ["namelookup_time"]=> float(0.015) ["connect_time"]=> float(0.046)
["pretransfer_time"]=> float(0.109) ["size_upload"]=> float(111) ["size_download"]=> float(33)
["speed_download"]=> float(132) ["speed_upload"]=> float(445) ["download_content_length"]=> float(-1)
["upload_content_length"]=> float(111) ["starttransfer_time"]=> float(0.249) ["redirect_time"]=> float(0)
["certinfo"]=> array(0) { } ["primary_ip"]=> string(14) "74.125.224.194" ["primary_port"]=> int(443)
["local_ip"]=> string(13) "192.168.1.133" ["local_port"]=> int(61985) ["redirect_url"]=> string(0) "" }
["error"]=> NULL ["custom_headers"]=> NULL ["cookie_file"]=> string(46) "cookies.txt"
["custom_curl_options":protected]=> array(3) { [47]=> int(1) [10015]=> string(111)
"Email=MYEMAIL&Passwd=MYPASSWORD&accountType=GOOGLE&source=Google-cURL-Example&service=jotspot"
[10023]=> array(4) { [0]=> string(49) "Content-Type: application/atom+xml; charset=utf-8"
[1]=> string(22) "Host: sites.google.com" [2]=> string(18) "GData-Version: 1.4" [3]=> string(320)
"Authorization: GoogleLogin auth=DQAAAMMAAAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" } } ["curl_options":protected]=> array(9) { [10018]=> string(74)
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:14.0) Gecko/20100101 Firefox/14.0.1" [10016]=> string(22)
"http://www.google.com/" [13]=> int(60) [78]=> int(60) [19913]=> int(1) [52]=> int(1) [64]=> int(0) [81]=> int(0) [42]=> int(0) } }
$auth is just a string, its not supposed to be formatted as XML. I verified that there are no extra spaces or characters and it exactly matches the one returned by the first $curl->post request.
Leave the request type as a POST (with the content being the atom xml payload), set the content type to “application/atom+xml” but pass all of the oAuth as a GET, i.e. as escaped (urlEncoded) values on the URL query string.