1
0
mirror of https://github.com/e107inc/e107.git synced 2025-08-18 20:31:41 +02:00

Basic per-item search engine robots handling added to News and Pages.

This commit is contained in:
Cameron
2019-12-02 13:32:21 -08:00
parent e1504b91c0
commit 5cf54d07d2
11 changed files with 80 additions and 3 deletions

View File

@@ -3961,6 +3961,8 @@ class eResponse
protected $_meta_name_only = array('keywords', 'viewport', 'robots'); // Keep FB happy.
protected $_meta_property_only = array('article:section', 'article:tag'); // Keep FB happy.
protected $_meta = array();
protected $_meta_robot_types = array('noindex'=>'NoIndex', 'nofollow'=>'NoFollow','noarchive'=>'NoArchive','noimageindex'=>'NoImageIndex' );
protected $_meta_robot_descriptions = array('noindex'=>LAN_ROBOTS_NOINDEX, 'nofollow'=>LAN_ROBOTS_NOFOLLOW,'noarchive'=>LAN_ROBOTS_NOARCHIVE,'noimageindex'=>LAN_ROBOTS_NOIMAGE );
protected $_title_separator = ' » ';
protected $_content_type = 'html';
protected $_content_type_arr = array(
@@ -3980,6 +3982,16 @@ class eResponse
'jsonRender' => false,
);
public function getRobotTypes()
{
return $this->_meta_robot_types;
}
public function getRobotDescriptions()
{
return $this->_meta_robot_descriptions;
}
public function setParam($key, $value)
{
$this->_params[$key] = $value;
@@ -4368,6 +4380,7 @@ class eResponse
e107::getDebug()->log($this->_meta);
foreach ($this->_meta as $attr)
{
$attrData .= '<meta';