public function RobotsTxtBasicTestCase::testRobotsTxtCachingAndCompressionTestCase in RobotsTxt 7
Test robots.txt file encoding with caching and compression.
File
- ./
robotstxt.test, line 99 - Tests for robotstxt.module.
Class
- RobotsTxtBasicTestCase
- Tests basic functionality of configured robots.txt files.
Code
public function testRobotsTxtCachingAndCompressionTestCase() {
// Create an admin user, log in and access settings form.
$admin_user = $this
->drupalCreateUser(array(
'administer site configuration',
'administer robots.txt',
));
$this
->drupalLogin($admin_user);
// Enable caching and compression.
$edit = array();
$edit['cache'] = 1;
$edit['cache_lifetime'] = 60;
$edit['page_cache_maximum_age'] = 60;
$edit['page_compression'] = 1;
$this
->drupalPost('admin/config/development/performance', $edit, t('Save configuration'));
$this
->drupalLogout();
$url = url('robots-test.txt', array(
'absolute' => TRUE,
));
// Use drupal_http_request so gzipped response is not automatically
// uncompressed.
$response = drupal_http_request($url, array(
'headers' => array(
'Accept-encoding' => 'gzip',
),
));
$this
->assertEqual(trim(variable_get('robotstxt', '')), trim(gzinflate(substr($response->data, 10, -8))), 'The robots.txt content is properly served with compression enabled.');
// Note: the header may have charset appended.
$header = $response->headers['content-type'];
$this
->assertIdentical(strpos($header, 'text/plain'), 0, 'The robots.txt file was served with header Content-Type: text/plain');
}