public static function S3fsFileMigrationBatch::copyOperation in S3 File System 8.3
Same name and namespace in other branches
- 4.0.x src/Batch/S3fsFileMigrationBatch.php \Drupal\s3fs\Batch\S3fsFileMigrationBatch::copyOperation()
Batch operation callback that copy files to S3 File System.
Parameters
array $config: Array of configuration settings from which to configure the client.
array $file_paths: Array with file paths to process.
int $total: Total number of files to process in batch.
string $source_folder: Folder from copy the file.
string $target_folder: Folder to copy the file.
string $scheme: Scheme from copy files. E.g.: public.
array $uploadOptions: Options to control upload operations.
array|\DrushBatchContext $context: Batch context.
Overrides S3fsFileMigrationBatchInterface::copyOperation
File
- src/
Batch/ S3fsFileMigrationBatch.php, line 135
Class
- S3fsFileMigrationBatch
- Batch migrate files to a S3 bucket.
Namespace
Drupal\s3fs\BatchCode
public static function copyOperation(array $config, array $file_paths, $total, $source_folder, $target_folder, $scheme, array $uploadOptions, &$context) {
$s3fs = \Drupal::service('s3fs');
$s3 = $s3fs
->getAmazonS3Client($config);
$streamWrapper = \Drupal::service('stream_wrapper.s3fs');
$mimeGuesser = \Drupal::service('file.mime_type.guesser');
if (!isset($context['results']['scheme'])) {
$context['results']['scheme'] = $scheme;
$context['results']['progress'] = 0;
$context['results']['percent_progress'] = 0;
$context['results']['total'] = $total;
$context['results']['time_start'] = time();
$context['results']['errors'] = [];
}
foreach ($file_paths as $path) {
$relative_path = substr_replace($path, '', 0, strlen($source_folder) + 1);
$key_path = $target_folder . $relative_path;
$uri = $scheme . '://' . $relative_path;
if (mb_strlen($uri) > S3fsServiceInterface::MAX_URI_LENGTH) {
$context['results']['errors'][] = new TranslatableMarkup('Path @path is too long, upload skipped.', [
'@path' => $uri,
]);
// Update our progress information.
self::updateProgress($context);
continue;
}
$uploadConditions = [];
if (isset($uploadOptions['upload_conditions'])) {
$uploadConditions = $uploadOptions['upload_conditions'];
}
if (static::isFileAlreadyUploaded($path, $uri, $uploadConditions)) {
self::updateProgress($context);
continue;
}
if (method_exists($mimeGuesser, 'guessMimeType')) {
$contentType = $mimeGuesser
->guessMimeType($key_path);
}
else {
$contentType = $mimeGuesser
->guess($key_path);
}
$uploadParams = [
'Bucket' => $config['bucket'],
'Key' => $key_path,
'SourceFile' => $path,
'ContentType' => $contentType,
];
if (!empty($config['encryption'])) {
$uploadParams['ServerSideEncryption'] = $config['encryption'];
}
$uploadAsPrivate = Settings::get('s3fs.upload_as_private');
if ($scheme !== 'private' && !$uploadAsPrivate) {
$uploadParams['ACL'] = 'public-read';
}
// Set the Cache-Control header, if the user specified one.
if (!empty($config['cache_control_header'])) {
$uploadParams['CacheControl'] = $config['cache_control_header'];
}
\Drupal::moduleHandler()
->alter('s3fs_upload_params', $uploadParams);
try {
$s3
->putObject($uploadParams);
} catch (\Exception $e) {
$context['results']['errors'][] = new TranslatableMarkup('Failed to upload @file', [
'@file' => $path,
]);
self::updateProgress($context);
continue;
}
$streamWrapper
->writeUriToCache($uri);
self::updateProgress($context);
}
}