≥ 1.11
GET request to find all the links on the provided page(s).
This module can be used as a generator .
The following documentation is the output of Special:ApiHelp/query+links, automatically generated by the pre-release version of MediaWiki that is running on this site (MediaWiki.org).Returns all links from the given pages.
Specific parameters:
Show links in these namespaces only.
How many links to return.
When more results are available, use this to continue. More detailed information on how to continue queries can be found on mediawiki.org.
Only list links to these titles. Useful for checking whether a certain page links to a certain title.
The direction in which to list.
{ "query": { "pages": [ { "pageid": 736, "ns": 0, "title": "Albert Einstein", "links": [ { "ns": 0, "title": "2dF Galaxy Redshift Survey" }, { "ns": 0, "title": "A priori and a posteriori" }, { "ns": 0, "title": "Aage Bohr" }, ... ] } ] } }
#!/usr/bin/python3 """ get_links.py MediaWiki API Demos Demo of `Links` module: Get all links on the given page(s) MIT License """ import requests S = requests.Session() URL = "https://en.wikipedia.org/w/api.php" PARAMS = { "action": "query", "format": "json", "titles": "Albert Einstein", "prop": "links" } R = S.get(url=URL, params=PARAMS) DATA = R.json() PAGES = DATA["query"]["pages"] for k, v in PAGES.items(): for l in v["links"]: print(l["title"])
<?php /* get_links.php MediaWiki API Demos Demo of `Links` module: Get all links on the given page(s) MIT License */ $endPoint = "https://en.wikipedia.org/w/api.php"; $params = [ "action" => "query", "format" => "json", "titles" => "Albert Einstein", "prop" => "links" ]; $url = $endPoint . "?" . http_build_query( $params ); $ch = curl_init( $url ); curl_setopt( $ch, CURLOPT_RETURNTRANSFER, true ); $output = curl_exec( $ch ); curl_close( $ch ); $result = json_decode( $output, true ); foreach( $result["query"]["pages"] as $k => $v ) { foreach( $v["links"] as $k => $v ) { echo( $v["title"] . "\n" ); } }
/* get_links.js MediaWiki API Demos Demo of `Links` module: Get all links on the given page(s) MIT License */ var url = "https://en.wikipedia.org/w/api.php"; var params = { action: "query", format: "json", titles: "Albert Einstein", prop: "links" }; url = url + "?origin=*"; Object.keys(params).forEach(function(key){url += "&" + key + "=" + params[key];}); fetch(url) .then(function(response){return response.json();}) .then(function(response) { var pages = response.query.pages; for (var p in pages) { for (var l of pages[p].links) { console.log(l.title); } } }) .catch(function(error){console.log(error);});
/* get_links.js MediaWiki API Demos Demo of `Links` module: Get all links on the given page(s) MIT License */ var params = { action: 'query', format: 'json', titles: 'Albert Einstein', prop: 'links' }, api = new mw.Api(); api.get( params ).done( function ( data ) { var pages = data.query.pages, p; for ( p in pages ) { pages[ p ].links.forEach( function ( l ) { console.log( l.title ); } ); } } );Example 2: Fetch missing links[edit]
Get request to fetch missing or red links on Wikipedia:Most-wanted_articles. Two steps to do so:
GET
request to the Action API to return all links embedded on the provided page.generator=links
query module's submodule
links
used as a
generatormodule to get a set of links embedded on a page.
Response{ "batchcomplete": "", "continue": { "continue": "gplcontinue||", "gplcontinue": "297177|0|1965_in_sumo" }, "query": { "pages": { "-1": { "missing": "", "ns": 0, "title": "(viii)" }, "-10": { "missing": "", "ns": 0, "title": "1954 in sumo" } ... } } }get_red_links.py Python[edit]
#!/usr/bin/python3 """ get_red_links.py MediaWiki API Demos Demo of `Links` module to identify red or missing links on a page. MIT License """ import requests S = requests.Session() URL = "https://en.wikipedia.org/w/api.php" PARAMS = { "action": "query", "titles": "Wikipedia:Most-wanted_articles", "gpllimit": "20", "format": "json", "generator": "links" } R = S.get(url=URL, params=PARAMS) DATA = R.json() PAGES = DATA['query']['pages'] for page in PAGES.values(): if 'missing' in page: print(page['title'])PHP[edit]
<?php /* get_red_links.php MediaWiki API Demos Demo of `Links` module to identify red or missing links on a page. MIT License */ $endPoint = "https://en.wikipedia.org/w/api.php"; $params = [ "action" => "query", "generator" => "links", "titles" => "Wikipedia:Most-wanted_articles", "gpllimit" => "20", "format" => "json" ]; $url = $endPoint . "?" . http_build_query( $params ); $ch = curl_init( $url ); curl_setopt( $ch, CURLOPT_RETURNTRANSFER, true ); $output = curl_exec( $ch ); curl_close( $ch ); $result = json_decode( $output, true ); foreach( $result["query"]["pages"] as $page ){ if( array_key_exists("missing",$page ) ){ echo( $page["title"] . "\n" ); } }JavaScript[edit]
/* get_red_links.js MediaWiki API Demos Demo of `Links` module to identify red or missing links on a page. MIT License */ var url = "https://en.wikipedia.org/w/api.php"; var params = { action: "query", generator: "links", titles: "Wikipedia:Most-wanted_articles", gpllimit: "20", format: "json" }; url = url + "?origin=*"; Object.keys(params).forEach(function(key){url += "&" + key + "=" + params[key];}); fetch(url) .then(function(response){return response.json();}) .then(function(response) { var pages = response.query.pages; for (var p in pages) { if(pages[p].hasOwnProperty("missing")){ console.log(pages[p].title); } } }) .catch(function(error){console.log(error);});MediaWiki JS[edit]
/* get_red_links.js MediaWiki API Demos Demo of `Links` module to identify red or missing links on a page. MIT License */ var params = { action: 'query', generator: 'links', titles: 'Wikipedia:Most-wanted_articles', gpllimit: '20', format: 'json' }, api = new mw.Api(); api.get( params ).done( function ( data ) { var pages = data.query.pages, p; for ( p in pages ) { if( pages[ p ].hasOwnProperty('missing') ){ console.log( pages[ p ].title ); } } } );
pldir
pltitles
pllimit
, plcontinue
RetroSearch is an open source project built by @garambo | Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4