View source
<?php
function robotstxt_install() {
if (file_exists('./robots.txt')) {
variable_set('robotstxt', file_get_contents('./robots.txt'));
}
elseif (file_exists(drupal_get_path('module', 'robotstxt') . '/robots.txt')) {
variable_set('robotstxt', file_get_contents(drupal_get_path('module', 'robotstxt') . '/robots.txt'));
}
}
function robotstxt_uninstall() {
variable_del('robotstxt');
}
function robotstxt_requirements($phase) {
$requirements = array();
$t = get_t();
switch ($phase) {
case 'runtime':
if (!variable_get('clean_url', 0)) {
$requirements['robotstxt_cleanurl'] = array(
'title' => $t('RobotsTxt'),
'severity' => REQUIREMENT_ERROR,
'value' => $t('<a href="!clean_url">Clean URLs</a> are mandatory for this module.', array(
'!clean_url' => url('admin/settings/clean-urls'),
)),
);
}
if (file_exists('./robots.txt')) {
$requirements['robotstxt_file'] = array(
'title' => $t('RobotsTxt'),
'severity' => REQUIREMENT_WARNING,
'value' => $t('RobotsTxt module works only if you remove the existing robots.txt file in your website root.'),
);
}
}
return $requirements;
}
function robotstxt_update_6100() {
$ret = array();
$res = db_query('SELECT rid, perm FROM {permission}');
$perms = array();
while ($p = db_fetch_object($res)) {
$perm = $p->perm;
$perm = preg_replace('/administer site configuration/', 'administer site configuration, administer robots.txt', $perm);
$perms[$p->rid] = $perm;
}
foreach ($perms as $key => $value) {
db_query("UPDATE {permission} SET perm = '%s' WHERE rid = %d", $value, $key);
}
return $ret;
}