View source
<?php
namespace Drupal\s3fs;
use Aws\Credentials\CredentialProvider;
use Aws\DoctrineCacheAdapter;
use Aws\S3\Exception\S3Exception;
use Aws\S3\S3Client;
use Doctrine\Common\Cache\FilesystemCache;
use Drupal\Component\Datetime\TimeInterface;
use Drupal\Core\Cache\Cache;
use Drupal\Core\Config\ConfigFactoryInterface;
use Drupal\Core\Database\Connection;
use Drupal\Core\Database\SchemaObjectExistsException;
use Drupal\Core\Extension\ModuleHandlerInterface;
use Drupal\Core\Messenger\MessengerTrait;
use Drupal\Core\Site\Settings;
use Drupal\Core\StreamWrapper\StreamWrapperManager;
use Drupal\Core\StringTranslation\StringTranslationTrait;
use Drupal\s3fs\StreamWrapper\S3fsStream;
class S3fsService implements S3fsServiceInterface {
use MessengerTrait;
use StringTranslationTrait;
protected $connection;
protected $configFactory;
protected $time;
const DEFAULT_S3_REGION = 'us-east-1';
protected $moduleHandler;
public function __construct(Connection $connection, ConfigFactoryInterface $config_factory, TimeInterface $time, ModuleHandlerInterface $module_handler) {
$this->connection = $connection;
$this->configFactory = $config_factory;
$this->time = $time;
$this->moduleHandler = $module_handler;
}
public function validate(array $config) {
$errors = [];
if (!class_exists('Aws\\S3\\S3Client')) {
$errors[] = $this
->t('Cannot load Aws\\S3\\S3Client class. Please ensure that the aws sdk php library is installed correctly.');
}
if (!empty($config['credentials_file'])) {
if (!is_file($config['credentials_file']) || !is_readable($config['credentials_file'])) {
$errors[] = $this
->t("Unable to read Custom Credentials file. Please verify @file exists\n and permissions are valid.", [
'@file' => $config['credentials_file'],
]);
}
}
if (empty($config['bucket'])) {
$errors[] = $this
->t('Your AmazonS3 bucket name is not configured.');
}
if (!empty($config['use_customhost']) && empty($config['hostname'])) {
$errors[] = $this
->t('You must specify a Hostname to use Custom Host feature.');
}
if (!empty($config['use_cname']) && empty($config['domain'])) {
$errors[] = $this
->t('You must specify a CDN Domain Name to use CNAME feature.');
}
switch ($config['domain_root']) {
case 'root':
if (empty($config['root_folder'])) {
$errors[] = $this
->t('You must specify a Root folder to map the Domain Name to it.');
}
break;
default:
break;
}
try {
$s3 = $this
->getAmazonS3Client($config);
} catch (\Exception $e) {
$errors[] = $this
->t('An unexpected error occurred obtaining S3Client . @message', [
'@message' => $e
->getMessage(),
]);
}
$date = date('dmy-Hi');
$key_path = "s3fs-tests-results";
if (!empty($config['root_folder'])) {
$key_path = "{$config['root_folder']}/{$key_path}";
}
$key = "{$key_path}/write-test-{$date}.txt";
$successPut = FALSE;
$successDelete = FALSE;
$exceptionCaught = FALSE;
try {
$putOptions = [
'Body' => 'Example file uploaded successfully.',
'Bucket' => $config['bucket'],
'Key' => $key,
];
if (!empty($config['encryption'])) {
$putOptions['ServerSideEncryption'] = $config['encryption'];
}
if (!empty($config['cache_control_header'])) {
$putOptions['CacheControl'] = $config['cache_control_header'];
}
$s3
->putObject($putOptions);
$object = $s3
->getObject([
'Bucket' => $config['bucket'],
'Key' => $key,
]);
if ($object) {
$successPut = TRUE;
$s3
->deleteObject([
'Bucket' => $config['bucket'],
'Key' => $key,
]);
$successDelete = TRUE;
}
} catch (\Exception $e) {
$exceptionCaught = $e;
}
if (!empty($config['read_only']) && ($successPut || $successDelete)) {
$errors[] = $this
->t('The provided credentials are not read-only.');
}
elseif ($exceptionCaught) {
$errors[] = $this
->t('An unexpected error occurred. @message', [
'@message' => $exceptionCaught
->getMessage(),
]);
}
if (empty($config['read_only']) && !Settings::get('s3fs.upload_as_private')) {
try {
$key = "{$key_path}/public-write-test-{$date}.txt";
$putOptions = [
'Body' => 'Example public file uploaded successfully.',
'Bucket' => $config['bucket'],
'Key' => $key,
'ACL' => 'public-read',
];
if (!empty($config['encryption'])) {
$putOptions['ServerSideEncryption'] = $config['encryption'];
}
$s3
->putObject($putOptions);
if ($object = $s3
->getObject([
'Bucket' => $config['bucket'],
'Key' => $key,
])) {
$s3
->deleteObject([
'Bucket' => $config['bucket'],
'Key' => $key,
]);
}
} catch (S3Exception $e) {
$errors[] = $this
->t("Could not upload file as publicly accessible. If the bucket security\n policy is set to BlockPublicAcl ensure that upload_as_private is enabled\n in your settings.php \$settings['s3fs.upload_as_private'] = TRUE;");
$errors[] = $this
->t('Error message: @message', [
'@message' => $e
->getMessage(),
]);
} catch (\Exception $e) {
$errors[] = $this
->t('An unexpected error occurred. @message', [
'@message' => $e
->getMessage(),
]);
}
}
if (empty($config['disable_version_sync'])) {
$args = $this
->getListObjectVersionArgs($config);
$args['MaxKeys'] = '1';
try {
$s3
->listObjectVersions($args);
} catch (\Exception $e) {
$errors[] = $this
->t('Unable to listObjectVersions. Is listObjectVersions supported
by your bucket? @message', [
'@message' => $e
->getMessage(),
]);
}
}
return $errors;
}
public function getAmazonS3Client(array $config) {
$s3 =& drupal_static(__METHOD__ . '_S3Client');
$static_config =& drupal_static(__METHOD__ . '_static_config');
if (!isset($s3) || $static_config != $config) {
$client_config = [];
$access_key = Settings::get('s3fs.access_key', '');
$secret_key = Settings::get('s3fs.secret_key', '');
$noKeyInSettings = empty($access_key) || empty($secret_key);
if ($noKeyInSettings && $this->moduleHandler
->moduleExists('key')) {
if (!$access_key && !empty($config['keymodule']['access_key_name'])) {
$key = \Drupal::service('key.repository')
->getKey($config['keymodule']['access_key_name']);
$key_value = $key
->getKeyValue();
if (!empty($key_value)) {
$access_key = $key_value;
}
}
if (!$secret_key && !empty($config['keymodule']['secret_key_name'])) {
$key = \Drupal::service('key.repository')
->getKey($config['keymodule']['secret_key_name']);
$key_value = $key
->getKeyValue();
if (!empty($key_value)) {
$secret_key = $key_value;
}
}
}
if (!empty($access_key) && !empty($secret_key)) {
$client_config['credentials'] = [
'key' => $access_key,
'secret' => $secret_key,
];
}
else {
$provider = CredentialProvider::defaultProvider();
if (!empty($config['credentials_file'])) {
$iniProvider = CredentialProvider::ini(NULL, $config['credentials_file']);
$provider = CredentialProvider::chain($iniProvider, $provider);
}
$provider = CredentialProvider::memoize($provider);
$doctrineInstalled = class_exists('\\Doctrine\\Common\\Cache\\FilesystemCache');
if (!empty($config['use_credentials_cache']) && !empty($config['credentials_cache_dir']) && $doctrineInstalled) {
$cache = new DoctrineCacheAdapter(new FilesystemCache($config['credentials_cache_dir'] . '/s3fscache', '.doctrine.cache', 017));
$provider = CredentialProvider::cache($provider, $cache);
}
$client_config['credentials'] = $provider;
}
if (!empty($config['region'])) {
$client_config['region'] = $config['region'];
$client_config['signature'] = 'v4';
}
if (!empty($config['use_customhost']) && !empty($config['hostname'])) {
if (preg_match('#http(s)?://#i', $config['hostname']) === 1) {
$client_config['endpoint'] = $config['hostname'];
}
else {
$client_config['endpoint'] = ($config['use_https'] ? 'https://' : 'http://') . $config['hostname'];
}
}
if (!empty($config['use_path_style_endpoint'])) {
$client_config['use_path_style_endpoint'] = TRUE;
}
$client_config['version'] = S3fsStream::API_VERSION;
if (!empty($config['disable_cert_verify'])) {
$client_config['http']['verify'] = FALSE;
}
if (!empty($config['disable_shared_config_files'])) {
$client_config['use_aws_shared_config_files'] = FALSE;
}
$s3 = new S3Client($client_config);
$static_config = $config;
}
return $s3;
}
public static function getListObjectVersionArgs(array $config) {
$args = [
'Bucket' => $config['bucket'],
];
if (!empty($config['root_folder'])) {
$args['Prefix'] = "{$config['root_folder']}/";
}
return $args;
}
public function refreshCache(array $config) {
$s3 = $this
->getAmazonS3Client($config);
$args = $this
->getListObjectVersionArgs($config);
try {
$operation = empty($config['disable_version_sync']) ? "ListObjectVersions" : "ListObjectsV2";
$paginator = $s3
->getPaginator($operation, $args);
} catch (\Exception $e) {
watchdog_exception('S3FS', $e);
$this
->messenger()
->addStatus($this
->t('Error refreshing cache. Please check the logs for more info.'));
return;
}
$file_metadata_list = [];
$folders = $this
->getExistingFolders();
$this
->setupTempTable();
try {
foreach ($paginator as $result) {
if ($result
->hasKey('Versions')) {
foreach ($result
->get('Versions') as $s3_metadata) {
$this
->getObjectMetadata($file_metadata_list, $folders, $s3_metadata, $config);
if (count($file_metadata_list) >= 10000) {
$this
->writeTemporaryMetadata($file_metadata_list, $folders);
}
}
}
elseif ($result
->hasKey('Contents')) {
foreach ($result
->get('Contents') as $s3_metadata) {
$this
->getObjectMetadata($file_metadata_list, $folders, $s3_metadata, $config);
if (count($file_metadata_list) >= 10000) {
$this
->writeTemporaryMetadata($file_metadata_list, $folders);
}
}
}
}
} catch (\Exception $e) {
watchdog_exception('S3FS', $e);
$this
->messenger()
->addStatus($this
->t('Error refreshing cache. Please check the logs for more info.'));
return;
}
$this
->writeTemporaryMetadata($file_metadata_list, $folders);
$this
->writeFolders($folders);
$this
->setTables();
Cache::invalidateTags([
S3FS_CACHE_TAG,
]);
$this
->messenger()
->addStatus($this
->t('S3 File System cache refreshed.'));
}
public function writeFolders(array $folders) {
if ($folders) {
$chunks = array_chunk($folders, 10000, TRUE);
foreach ($chunks as $chunk) {
$insert_query = \Drupal::database()
->insert('s3fs_file_temp')
->fields([
'uri',
'filesize',
'timestamp',
'dir',
'version',
]);
foreach ($chunk as $folder_uri => $ph) {
$metadata = $this
->convertMetadata($folder_uri, []);
$insert_query
->values($metadata);
}
$insert_query
->execute();
}
}
}
public function getObjectMetadata(array &$file_metadata_list, array &$folders, array $s3_metadata, array $config) {
$key = $s3_metadata['Key'];
if (!empty($config['root_folder'])) {
$key = substr_replace($key, '', 0, strlen($config['root_folder']) + 1);
}
$public_folder_name = !empty($config['public_folder']) ? $config['public_folder'] : 's3fs-public';
$private_folder_name = !empty($config['private_folder']) ? $config['private_folder'] : 's3fs-private';
if (strpos($key, "{$public_folder_name}/") === 0) {
$key = substr_replace($key, '', 0, strlen($public_folder_name) + 1);
$uri = "public://{$key}";
}
elseif (strpos($key, "{$private_folder_name}/") === 0) {
$key = substr_replace($key, '', 0, strlen($private_folder_name) + 1);
$uri = "private://{$key}";
}
else {
$uri = "s3://{$key}";
}
if (mb_strlen(rtrim($uri, '/')) > S3fsServiceInterface::MAX_URI_LENGTH) {
return;
}
if ($uri[strlen($uri) - 1] == '/') {
$folders[rtrim($uri, '/')] = TRUE;
}
else {
if (isset($s3_metadata['IsLatest']) && !$s3_metadata['IsLatest']) {
return;
}
if (!isset($s3_metadata['StorageClass'])) {
return;
}
if (isset($s3_metadata['VersionId']) && $s3_metadata['VersionId'] == 'null') {
unset($s3_metadata['VersionId']);
}
$file_metadata_list[] = $this
->convertMetadata($uri, $s3_metadata);
}
}
public function getExistingFolders() {
$folders = [];
$existing_folders = \Drupal::database()
->select('s3fs_file', 's')
->fields('s', [
'uri',
])
->condition('dir', 1, '=');
foreach ($existing_folders
->execute()
->fetchCol(0) as $folder_uri) {
$folders[rtrim($folder_uri, '/')] = TRUE;
}
return $folders;
}
public function setupTempTable() {
module_load_install('s3fs');
$schema = s3fs_schema();
try {
\Drupal::database()
->schema()
->dropTable('s3fs_file_temp');
\Drupal::database()
->schema()
->createTable('s3fs_file_temp', $schema['s3fs_file']);
s3fs_fix_table_indexes('s3fs_file_temp');
} catch (SchemaObjectExistsException $e) {
\Drupal::database()
->truncate('s3fs_file_temp')
->execute();
}
}
public function setTables() {
\Drupal::database()
->schema()
->renameTable('s3fs_file', 's3fs_file_old');
\Drupal::database()
->schema()
->renameTable('s3fs_file_temp', 's3fs_file');
\Drupal::database()
->schema()
->dropTable('s3fs_file_old');
}
public function convertMetadata($uri, array $s3_metadata) {
$metadata = [
'uri' => $uri,
'filesize' => 0,
'timestamp' => $this->time
->getRequestTime(),
'dir' => 0,
'version' => '',
];
if (empty($s3_metadata)) {
$metadata['dir'] = 1;
}
else {
if (isset($s3_metadata['ContentLength'])) {
$metadata['filesize'] = $s3_metadata['ContentLength'];
}
else {
if (isset($s3_metadata['Size'])) {
$metadata['filesize'] = $s3_metadata['Size'];
}
}
if (isset($s3_metadata['LastModified'])) {
$metadata['timestamp'] = date('U', strtotime($s3_metadata['LastModified']));
}
if (isset($s3_metadata['VersionId']) && $s3_metadata['VersionId'] != 'null') {
$metadata['version'] = $s3_metadata['VersionId'];
}
}
return $metadata;
}
public function writeTemporaryMetadata(array &$file_metadata_list, array &$folders) {
if ($file_metadata_list) {
$insert_query = \Drupal::database()
->insert('s3fs_file_temp')
->fields([
'uri',
'filesize',
'timestamp',
'dir',
'version',
]);
foreach ($file_metadata_list as $metadata) {
$insert_query
->values($metadata);
$uri = \Drupal::service('file_system')
->dirname($metadata['uri']);
$root = StreamWrapperManager::getScheme($uri) . '://';
while ($uri != $root && \Drupal::service('stream_wrapper_manager')
->isValidUri($uri)) {
$folders[$uri] = TRUE;
$uri = \Drupal::service('file_system')
->dirname($uri);
}
}
$insert_query
->execute();
}
$file_metadata_list = [];
}
}