You are here

public function S3fsTest::testCacheRefresh in S3 File System 8.3

Same name and namespace in other branches
  1. 4.0.x tests/src/Functional/S3fsTest.php \Drupal\Tests\s3fs\Functional\S3fsTest::testCacheRefresh()

Test the cache refresh.

File

tests/src/Functional/S3fsTest.php, line 180

Class

S3fsTest
S3 File System Tests.

Namespace

Drupal\Tests\s3fs\Functional

Code

public function testCacheRefresh() {

  // Add several files to the bucket using the AWS SDK directly, so that
  // s3fs won't cache them.
  $filenames = [
    'files/test2.txt',
    'parts/test3.txt',
    'test.txt',
  ];
  foreach ($filenames as $filename) {
    $filename = $this->remoteTestsFolderKey . '/' . $filename;
    $this->s3
      ->putObject([
      'Bucket' => $this->s3Config['bucket'],
      'Key' => $filename,
      'ACL' => 'public-read',
    ]);
  }
  $config = $this->s3Config;

  // Set the current test folder as the root prefix.
  $config['root_folder'] = $this->remoteTestsFolderKey;
  \Drupal::service('s3fs')
    ->refreshCache($config);

  // Query the DB to confirm that all the new files are cached.
  $result = $this->connection
    ->select('s3fs_file', 's')
    ->fields('s')
    ->condition('dir', 0, '=')
    ->execute();
  $cached_files = [];
  foreach ($result as $record) {
    $cached_files[] = str_replace('s3://', '', $record->uri);
  }
  $this
    ->assertEqual($filenames, $cached_files, 'The test files were all cached.');

  // Flush the cache, then do a refresh without versions support.
  $this->connection
    ->delete('s3fs_file')
    ->execute();

  // Disable Version Syncing.
  $config['disable_version_sync'] = TRUE;
  \Drupal::service('s3fs')
    ->refreshCache($config);
  $config['disable_version_sync'] = FALSE;

  // Query the DB to confirm that all the new files are cached.
  $result = $this->connection
    ->select('s3fs_file', 's')
    ->fields('s')
    ->condition('dir', 0, '=')
    ->execute();
  $cached_files = [];
  foreach ($result as $record) {
    $cached_files[] = str_replace('s3://', '', $record->uri);
  }
  $this
    ->assertEqual($filenames, $cached_files, 'The test files were all cached without versions.');

  // Flush the cache, then do a refresh using the root_folder setting.
  // Only the file in the root folder (test3.txt) should become cached.
  $this->connection
    ->delete('s3fs_file')
    ->execute();
  $config['root_folder'] = $this->remoteTestsFolderKey . '/parts';
  \Drupal::service('s3fs')
    ->refreshCache($config);

  // Confirm that only the file in the "parts" folder was cached.
  $records = $this->connection
    ->select('s3fs_file', 's')
    ->fields('s')
    ->condition('dir', 0, '=')
    ->execute()
    ->fetchAll();
  $this
    ->assertEqual(count($records), 1, 'There was only one file in the partially refreshed cache.');
  $this
    ->assertEqual($records[0]->uri, 's3://test3.txt', 'That file was the one in the "parts" folder, which is now the root folder, so "parts" is not in the URI.');

  // Now test using the Batch system.
  // Set the current test folder as the root prefix.
  $config['root_folder'] = $this->remoteTestsFolderKey;
  $this
    ->cacheBatchExecute($config);

  // Query the DB to confirm that all the new files are cached.
  $result = $this->connection
    ->select('s3fs_file', 's')
    ->fields('s')
    ->condition('dir', 0, '=')
    ->execute();
  $cached_files = [];
  foreach ($result as $record) {
    $cached_files[] = str_replace('s3://', '', $record->uri);
  }
  $this
    ->assertEqual($cached_files, $filenames, 'Batch refresh cached all files.');

  // Flush the cache, then do a refresh using the root_folder setting.
  // Only the file in the root folder (test3.txt) should become cached.
  $this->connection
    ->delete('s3fs_file')
    ->execute();
  $config['root_folder'] = $this->remoteTestsFolderKey . '/parts';
  $this
    ->cacheBatchExecute($config);

  // Confirm that only the file in the "parts" folder was cached.
  $records = $this->connection
    ->select('s3fs_file', 's')
    ->fields('s')
    ->condition('dir', 0, '=')
    ->execute()
    ->fetchAll();
  $this
    ->assertEqual(count($records), 1, 'Batch partial refresh cached only one file.');
  $this
    ->assertEqual($records[0]->uri, 's3://test3.txt', 'Batched refresh successfully stripped the "parts" folder which is now the root folder.');

  // Batch with disable_version_sync.
  $config['root_folder'] = $this->remoteTestsFolderKey;
  $config['disable_version_sync'] = TRUE;
  $this
    ->cacheBatchExecute($config);

  // Query the DB to confirm that all the new files are cached.
  $result = $this->connection
    ->select('s3fs_file', 's')
    ->fields('s')
    ->condition('dir', 0, '=')
    ->execute();
  $cached_files = [];
  foreach ($result as $record) {
    $cached_files[] = str_replace('s3://', '', $record->uri);
  }
  $this
    ->assertEqual($cached_files, $filenames, 'Batch refresh with disable_version_sync cached all files.');

  // Flush the cache, then do a refresh using the root_folder setting.
  // Only the file in the root folder (test3.txt) should become cached.
  $this->connection
    ->delete('s3fs_file')
    ->execute();
  $config['root_folder'] = $this->remoteTestsFolderKey . '/parts';
  $this
    ->cacheBatchExecute($config);

  // Confirm that only the file in the "parts" folder was cached.
  $records = $this->connection
    ->select('s3fs_file', 's')
    ->fields('s')
    ->condition('dir', 0, '=')
    ->execute()
    ->fetchAll();
  $this
    ->assertEqual(count($records), 1, 'Batch partial refresh with disable_version_sync  cached only one file.');
  $this
    ->assertEqual($records[0]->uri, 's3://test3.txt', 'Batched refresh with disable_version_sync  successfully stripped the "parts" folder which is now the root folder.');
}