From Meta, a Wikimedia project coordination wiki
function getSection($text,$section) {
# strip NOWIKI etc. to avoid confusion (true-parameter causes HTML
# comments to be stripped as well)
$striparray=array();
$parser=new Parser();
$parser->mOutputType=OT_WIKI;
$parser->mOptions = new ParserOptions();
$striptext=$parser->strip($text, $striparray, true);
# patch
global $wgUseEditor, $wgEditorToken;
if (eregi($wgEditorToken, $striptext, $result_eregi)) $wgUseEditor = true;
# now that we can be sure that no pseudo-sections are in the source,
# split it up by section
$secs =
preg_split(
'/(^=+.+?=+|^<h[1-6].*?>.*?<\/h[1-6].*?>)(?!\S)/mi',
$striptext, -1,
PREG_SPLIT_DELIM_CAPTURE);
if($section==0) {
$rv=$secs[0];
} else {
$headline=$secs[$section*2-1];
preg_match( '/^(=+).+?=+|^<h([1-6]).*?>.*?<\/h[1-6].*?>(?!\S)/mi',$headline,$matches);
$hlevel=$matches[1];
# translate wiki heading into level
if(strpos($hlevel,'=')!==false) {
$hlevel=strlen($hlevel);
}
$rv=$headline. $secs[$section*2];
$count=$section+1;
$break=false;
while(!empty($secs[$count*2-1]) && !$break) {
$subheadline=$secs[$count*2-1];
preg_match( '/^(=+).+?=+|^<h([1-6]).*?>.*?<\/h[1-6].*?>(?!\S)/mi',$subheadline,$matches);
$subhlevel=$matches[1];
if(strpos($subhlevel,'=')!==false) {
$subhlevel=strlen($subhlevel);
}
if($subhlevel > $hlevel) {
$rv.=$subheadline.$secs[$count*2];
}
if($subhlevel <= $hlevel) {
$break=true;
}
$count++;
}
}
# reinsert stripped tags
$rv=$parser->unstrip($rv,$striparray);
$rv=$parser->unstripNoWiki($rv,$striparray);
$rv=trim($rv);
return $rv;
}