Extract Json php MultilineString coordinates from Mysql - php

How do I insert and extract the following from a MySQL database using php. I have tried 'Multiploygon, Multilinestring, GeometryCollection' but cannot get the correct output as below.
1) JSON file to insert
{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "GID": 4728339, "PRCL_KEY": "0000T0JT005300000056000010", "PRCL_TYPE": "E", "LSTATUS": "R", "WSTATUS": "C", "GEOM_AREA": 1558.723715, "COMMENTS": "", "TAG_X": 30.920674, "TAG_Y": -25.452585, "TAG_VALUE": "1\/56", "TAG_SIZE": 0.000020, "TAG_ANGLE": 6.282645, "TAG_JUST": "MC", "ID": "T0JT00530000005600001", "DATE_STAMP": "2010\/10\/05" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 30.920838, -25.452746 ], [ 30.920876, -25.452431 ], [ 30.920876, -25.452431 ], [ 30.921498, -25.452367 ], [ 30.921498, -25.452367 ], [ 30.921492, -25.452341 ], [ 30.921492, -25.452341 ], [ 30.920476, -25.452445 ], [ 30.920476, -25.452445 ], [ 30.920441, -25.452719 ], [ 30.920441, -25.452719 ], [ 30.920838, -25.452746 ], [ 30.920838, -25.452746 ] ] ] } }
,
{ "type": "Feature", "properties": { "GID": 4822420, "PRCL_KEY": "0000T0JU004000004445000000", "PRCL_TYPE": "E", "LSTATUS": "R", "WSTATUS": "C", "GEOM_AREA": 391.799538, "COMMENTS": "", "TAG_X": 31.337096, "TAG_Y": -25.508292, "TAG_VALUE": "4445", "TAG_SIZE": 0.000020, "TAG_ANGLE": 0.002299, "TAG_JUST": "MC", "ID": "T0JU00400000444500000", "DATE_STAMP": "2013\/05\/22" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 31.336972, -25.508217 ], [ 31.337103, -25.508419 ], [ 31.337103, -25.508419 ], [ 31.337234, -25.508350 ], [ 31.337234, -25.508350 ], [ 31.337101, -25.508149 ], [ 31.337101, -25.508149 ], [ 31.336972, -25.508217 ], [ 31.336972, -25.508217 ] ] ] } }
2) Output needed
{"type":"FeatureCollection","features":[{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000007100000"},"type":"Polyline","coordinates":[29.245262,-26.95622,29.245272,-26.956412,29.245272,-26.956412,29.245282,-26.956603,29.245282,-26.956603,29.245826,-26.956581,29.245826,-26.956581,29.246128,-26.956398,29.246128,-26.956398]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000007300000"},"type":"Polyline","coordinates":[29.245211,-26.955206,29.245231,-26.955589,29.245231,-26.955589,29.246086,-26.955554,29.246086,-26.955554,29.246066,-26.95517,29.246066,-26.95517,29.245211,-26.955206,29.245211,-26.955206]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000007500000"},"type":"Polyline","coordinates":[29.245171,-26.954439,29.245191,-26.954822,29.245191,-26.954822,29.246047,-26.954787,29.246047,-26.954787,29.246037,-26.954594,29.246037,-26.954594,29.245558,-26.954614,29.245558,-26.954614]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000010200000"},"type":"Polyline","coordinates":[29.2436,-26.954444,29.243605,-26.95454,29.243605,-26.95454,29.243623,-26.954887,29.243623,-26.954887,29.244051,-26.95487,29.244051,-26.95487,29.244028,-26.954427,29.244028,-26.954427]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000030400000"},"type":"Polyline","coordinates":[29.242942,-26.949925,29.242963,-26.950314,29.242963,-26.950314,29.243388,-26.950298,29.243388,-26.950298,29.243374,-26.950036,29.243374,-26.950036,29.243367,-26.949915,29.243367,-26.949915]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000010400001"},"type":"Polyline","coordinates":[29.243429,-26.951122,29.243439,-26.951314,29.243439,-26.951314,29.243867,-26.951296,29.243867,-26.951296,29.243857,-26.951105,29.243857,-26.951105,29.243477,-26.95112,29.243477,-26.95112]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000010500000"},"type":"Polyline","coordinates":[29.243288,-26.956694,29.243309,-26.957098,29.243309,-26.957098,29.243328,-26.95746,29.243328,-26.95746,29.243755,-26.957443,29.243755,-26.957443,29.243716,-26.956676,29.243716,-26.956676]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000010600000"},"type":"Polyline","coordinates":[29.243261,-26.956168,29.243288,-26.956694,29.243288,-26.956694,29.243716,-26.956676,29.243716,-26.956676,29.243697,-26.956312,29.243697,-26.956312,29.243688,-26.956151,29.243688,-26.956151]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000010800000"},"type":"Polyline","coordinates":[29.243178,-26.954558,29.243195,-26.954905,29.243195,-26.954905,29.243304,-26.954901,29.243304,-26.954901,29.243623,-26.954887,29.243623,-26.954887,29.243605,-26.95454,29.243605,-26.95454]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000009400000"},"type":"Polyline","coordinates":[29.244647,-26.952287,29.244653,-26.952395,29.244653,-26.952395,29.244682,-26.952394,29.244682,-26.952394,29.244697,-26.952669,29.244697,-26.952669,29.245078,-26.952654,29.245078,-26.952654]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000009500000"},"type":"Polyline","coordinates":[29.244599,-26.95165,29.244609,-26.951846,29.244609,-26.951846,29.244652,-26.951844,29.244652,-26.951844,29.244661,-26.952032,29.244661,-26.952032,29.244825,-26.952025,29.244825,-26.952025]}},{"type":"Feature","geometry":{"properties":{"name":"T0IS00210000009800000"},"type":"Polyline","coordinates":[29.244753,-26.950491,29.244773,-26.950875,29.244773,-26.950875,29.244987,-26.950866,29.244987,-26.950866,29.244967,-26.950483,29.244967,-26.950483,29.244753,-26.950491,29.244753,-26.950491]}}
3) Current PHP code
$geojson = array( 'type' => 'FeatureCollection', 'features' => array() );
while($row = mysql_fetch_assoc($dbquery)) {
$feature = array(
'type' => 'Feature',
'geometry' => array(
'properties' => array(
'name' => $row['LINK_ID']),
'type' => 'Polyline',
'coordinates' => array((float)$row['lon1'],(float)$row['lat1'],(float)$row['lon2'],(float)$row['lat2'],(float)$row['lon3'],(float)$row['lat3'],(float)$row['lon4'],(float)$row['lat4'],(float)$row['lon5'],(float)$row['lat5'],(float)$row['lon6'],(float)$row['lat6'],(float)$row['lon7'],(float)$row['lat7'],(float)$row['lon8'],(float)$row['lat8'],(float)$row['lon9'],(float)$row['lat9'])
)
);
// array_push($geojson, $feature);
array_push($geojson['features'], $feature);
Thank you

Unfortunately, MySQL does not have a native ST_AsGeoJSON function like PostGIS. Fortunately, you can use geoPHP in combination with a little script I wrote to achieve the same result.
Export your GeoJSON file to a MySQL spatial table using OGRor QGIS.
Download geoPHP and my MySQL to GeoJSON script.
Fill in the proper db connection settings and this script should output your MySQL spatial table in proper GeoJSON format.

Related

Illegal Argument Exception when using Suggestors in Elasticsearch in PHP

I have tried implementing completion suggestor query in php as given here. My code is :
$params = [
"index" => $myIndex,
"body" => [
"try" => [
"text" => "ram",
"completion" => [ "value" => "suggest"]
]
]
];
$response = $client->suggest($params);
I have done indexing like this:
$params = [
"index" => $myIndex,
"body" => [
"settings"=> [
"analysis"=> [
"analyzer"=> [
"start_with_analyzer"=> [
"tokenizer"=> "my_edge_ngram",
"filter"=> [
"lowercase"
]
]
],
"tokenizer"=> [
"my_edge_ngram"=> [
"type"=> "edge_ngram",
"min_gram"=> 3,
"max_gram"=> 15
]
]
]
],
"mappings"=> [
"doc"=> [
"properties"=> [
"label"=> [
"type"=> "text",
"fields"=> [
"keyword"=> [
"type"=> "keyword"
],
"ngramed"=> [
"type"=> "text",
"analyzer"=> "start_with_analyzer"
]
]
]
]
]
]
]
];
$response = $client->indices()->create($params); // create an index
and I am getting the following error:
{
"error": {
"root_cause": [
{
"type": "illegal_argument_exception",
"reason": "[completion] unknown field [value], parser not found"
}
],
"type": "illegal_argument_exception",
"reason": "[completion] unknown field [value], parser not found"
},
"status": 400
}
I have tried changing value to value.keyword but it is showing same error. I am using elastic search 5.3.2 . How to resolve this error?
In the query you are using field 'value' inside the completion while it is not a field like this, that is the exact error is stating.
You can try the below solution:
$params = [
"index" => $myIndex,
"body" => [
"try" => [
"text" => "ram",
"completion" => [ "label" => "suggest"]
]
]
];
$response = $client->suggest($params);
Hope this will work.

No results once implementing an analyzer in Elasticsearch

I am needing to ignore the apostrophe with indexed results so that searching for "Johns potato" will show results for "John's potato"
I was able to get the analyzer accepted but now I return no search results. Does anyone see something obvious that I am missing?
$params = [
'index' => $index,
'body' => [
'settings' => [
'number_of_shards' => 5,
'number_of_replicas' => 2,
'analysis' => [
"analyzer" => [
"my_analyzer" => [
"tokenizer" => "keyword",
"char_filter" => [
"my_char_filter"
]
]
],
"char_filter" => [
"my_char_filter" => [
"type" => "mapping",
"mappings" => [
"' => "
]
]
]
]
],
'mappings' => [
$type => [
'_source' => [
'enabled' => true
],
'properties' => [
'title' => [
'type' => 'text',
'analyzer' => 'my_analyzer'
],
'content' => [
'type' => 'text',
'analyzer' => 'my_analyzer'
]
]
]
]
]
];
I did find out that removing the analyzer from my field mappings allowed results to reappear, but I get no results the second I add the analyzer.
Here's an example query that I make.
{
"body": {
"query": {
"bool": {
"must": {
"multi_match": {
"query": "apples",
"fields": [
"title",
"content"
]
}
},
"filter": {
"terms": {
"site_id": [
"1351",
"1349"
]
}
},
"must_not": [
{
"match": {
"visible": "false"
}
},
{
"match": {
"locked": "true"
}
}
]
}
}
}
}
Probably, what you really want, is to use the english analyzer that is provided. The standard analyzer which is the default will tokenize on whitespace and some punctuation, but will leave apostrophes alone. The english analyzer can stem and remove stop words since the language is known.
Here is the standard analyzer's output, where you can see "john's":
POST _analyze
{
"analyzer": "standard",
"text": "John's potato"
}
{
"tokens": [
{
"token": "john's",
"start_offset": 0,
"end_offset": 6,
"type": "<ALPHANUM>",
"position": 0
},
{
"token": "potato",
"start_offset": 7,
"end_offset": 13,
"type": "<ALPHANUM>",
"position": 1
}
]
}
And here is the english analyzer where you can see the 's is removed. The stemming will allow "John's", "Johns", and "John" to all match the document.
POST _analyze
{
"analyzer": "english",
"text": "John's potato"
}
{
"tokens": [
{
"token": "john",
"start_offset": 0,
"end_offset": 6,
"type": "<ALPHANUM>",
"position": 0
},
{
"token": "potato",
"start_offset": 7,
"end_offset": 13,
"type": "<ALPHANUM>",
"position": 1
}
]
}

ElasticSearch fulltext search with bool query

I am trying to make full-text search in whole document using
"query" => [
"query_string" => [
"fields" => ["_all"],
"query" => "fooA AND fooB"
]
]
It works pretty good. In the same query I need to run bool query
"query" => [
"bool" => [
"must" => [
"term" => [
"name" => "My_name"
]
],
"should" => [
....
]
]
]
Is it possible to combine these two queries ? is this proper way to make full-text search ?
In result I need all documents that contains "fooA AND fooB" in any field and special field name equals "My_name".
I found solution for my question thanks to this post.
"query" => [
"bool" => [
"must" => [
[
"term" => [
"name" => "My_name"
]
],
[
"query_string" => [
"fields" => ["_all"],
"query" => "fooA AND fooB"
]
]
]
]
]
This combination works for me
Yes, you can club these together as shown below :
GET _search
{
"from": 0,
"size": 20,
"query": {
"bool": {
"must": [
{
"term": {
"name": "MY_NAME"
}
},
{
"query_string": {
"query": "fooA AND fooB",
"fields": [
"_all"
]
}
}
]
}
}
}

Using "aggregate" to combine a list of all subdocuments that match query?

I'm trying to use a PHP mongo library to "aggregate" on a data structure like this:
{
"_id": 100,
"name": "Joe",
"pets":[
{
"name": "Kill me",
"animal": "Frog"
},
{
"name": "Petrov",
"animal": "Cat"
},
{
"name": "Joe",
"animal": "Frog"
}
]
},
{
"_id": 101,
"name": "Jane",
"pets":[
{
"name": "James",
"animal": "Hedgehog"
},
{
"name": "Franklin",
"animal": "Frog"
}
}
For example, if I want to get all subdocuments where the animal is a frog. Note that I do NOT want all matching "super-documents" (i.e. the ones with _id). I want to get an ARRAY that looks like this:
[
{
"name": "Kill me",
"animal": "Frog"
},
{
"name": "Joe",
"animal": "Frog"
},
{
"name": "Franklin",
"animal": "Frog"
}
]
What syntax am I supposed to use (in PHP) to accomplish this? I know it has to do with aggregate, but I couldn't find anything that matches this specific scenario.
You can use below aggregation. $match to find documents where array has a value of Frog and $unwind the pets array. $match where document has Frog and final step is to group the matching documents into array.
<?php
$mongo = new MongoDB\Driver\Manager("mongodb://localhost:27017");
$pipeline =
[
[
'$match' =>
[
'pets.animal' => 'Frog',
],
],
[
'$unwind' =>'$pets',
],
[
'$match' =>
[
'pets.animal' => 'Frog',
],
],
[
'$group' =>
[
'_id' => null,
'animals' => ['$push' => '$pets'],
],
],
];
$command = new \MongoDB\Driver\Command([
'aggregate' => 'insert_collection_name',
'pipeline' => $pipeline
]);
$cursor = $mongo->executeCommand('insert_db_name', $command);
foreach($cursor as $key => $document) {
//do something
}
?>

Parse Huge GeoJSON file and get polygon coordinates of a specific property

How to get the polygon coordinates of a specific property. It's very a huge file so the time to parse the file is a factor.
Is there a library to do that?
Sample of the geojson:
{
"type": "FeatureCollection",
"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::37001" } },
"features": [
{ "type": "Feature", "properties": { "HOOD_ID": 2799.000000, "HOOD_NAME": "Overtown", "MARKET_ID": "MK1245000", "MARKET": "Miami", "STATE": "12", "STATENAME": "Florida", "LATITUDE": 25.784659, "LONGITUDE": -80.202625, "AREA": 1.495920, "HLEVEL": 2.000000, "DATE_ADDED": "2012\/08\/04", "FLAG1": 0, "OB_GEO_ID": "NH2799" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -80.21463341110001, 25.782154451300002 ], [ -80.21588353300001, 25.782696872700001 ], [ -80.217973576800006, 25.7833078056 ], [ -80.219539583200003, 25.784199528800002 ], [ -80.211531118000011, 25.787386122500003 ], [ -80.20836940560001, 25.789128957700001 ], [ -80.206422272200001, 25.789848709300003 ], [ -80.2060101207, 25.7907922853 ], [ -80.206013661300005, 25.793566844899999 ], [ -80.206013794, 25.7968569831 ], [ -80.202368489099996, 25.796952708299997 ], [ -80.202379, 25.797313 ], [ -80.199836, 25.797309 ], [ -80.199819759600004, 25.7970196375 ], [ -80.1993398571, 25.797032239699998 ], [ -80.193583490500004, 25.797234161599999 ], [ -80.193806159800005, 25.796203267299997 ], [ -80.194272724399994, 25.7951752727 ], [ -80.193944, 25.795182 ], [ -80.194266, 25.793434 ], [ -80.195336, 25.789592 ], [ -80.195534, 25.787847 ], [ -80.195514, 25.778409 ], [ -80.195969425200005, 25.778397321299998 ], [ -80.19557104899999, 25.773179598799999 ], [ -80.195360063199999, 25.768486166300001 ], [ -80.196768768399991, 25.7682545324 ], [ -80.198226099099998, 25.768721241800002 ], [ -80.199164023899996, 25.769800189500003 ], [ -80.199997701599997, 25.770738292499999 ], [ -80.200414826200003, 25.772286616100001 ], [ -80.200936435800003, 25.773272690900001 ], [ -80.202343232900006, 25.7749143389 ], [ -80.204375245, 25.776884093299998 ], [ -80.205990323199998, 25.777259031 ], [ -80.206835373600001, 25.777897973199998 ], [ -80.207587, 25.777601 ], [ -80.210881, 25.78 ], [ -80.21463341110001, 25.782154451300002 ] ] ] } },
{ "type": "Feature", "properties": { "HOOD_ID": 2169.000000, "HOOD_NAME": "Church District", "MARKET_ID": "MK1235000", "MARKET": "Jacksonville", "STATE": "12", "STATENAME": "Florida", "LATITUDE": 30.332174, "LONGITUDE": -81.660212, "AREA": 0.131745, "HLEVEL": 1.000000, "DATE_ADDED": "2012\/08\/04", "FLAG1": 0, "OB_GEO_ID": "NH2169" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -81.664799, 30.331204 ], [ -81.663868, 30.334826 ], [ -81.655617, 30.333239 ], [ -81.656717, 30.329439 ], [ -81.664799, 30.331204 ] ] ] } }
}
Large files can best be parsed using an event-based JSON parser (here I use one by kuma-giyomu). The idea is to use callbacks when a certain token is encountered, so that the processing of the data can be done in between parsing.
In the following code, the property "coordinates" is used to trigger the creation of a new polygon object, and then the start function of the array handler to start a new coordinate array and it is submitted to the polygon object when an array end token is encountered.
<?php
include "JSONParser.php";
class Polygon {
public $coordinates = array();
}
$coords = null;
$polygons = array();
$polygon = null;
$j = new JSONParser();
$j->setPropertyHandler(function($value, $property) {
global $polygons, $polygon;
if ($value != "coordinates") {
if (!is_null($polygon)) {
$polygons[] = $polygon;
$polygon = null;
}
return;
}
if (is_null($polygon)) {
$polygon = new Polygon;
}
});
$j->setArrayHandlers(function($value, $property) {
global $coords, $polygon;
if (!is_null($polygon)) {
$coords = array();
}
}, function($value, $property) {
global $coords, $polygon;
if (!is_null($coords)) {
if (!is_null($polygon)) {
$polygon->coordinates[] = $coords;
}
$coords = null;
}
});
$j->setScalarHandler(function($value, $property) {
global $coords;
if (!is_null($coords)) {
$coords[] = $value;
}
});
try {
$j->parseDocument("test.json");
} catch (JSONParserException $e) {
}
if (!is_null($polygon)) {
$polygons[] = $polygon;
$polygon = null;
}
print_r($polygons);
outputs
Array
(
[0] => Polygon Object
(
[coordinates] => Array
(
[0] => Array
(
[0] => -80.21463341110001
[1] => 25.782154451300002
)
[1] => Array
(
[0] => -80.21588353300001
[1] => 25.782696872700001
)
[...]

Categories