Switch to s3 multipart uploads for backups

This commit is contained in:
Matthew Penner 2020-10-31 17:44:20 -06:00
parent 23d2352a9b
commit 85af073438
7 changed files with 137 additions and 41 deletions

View file

@ -0,0 +1,131 @@
<?php
namespace Pterodactyl\Http\Controllers\Api\Remote\Backups;
use Carbon\CarbonImmutable;
use Illuminate\Http\Request;
use Illuminate\Http\JsonResponse;
use League\Flysystem\AwsS3v3\AwsS3Adapter;
use Pterodactyl\Http\Controllers\Controller;
use Pterodactyl\Extensions\Backups\BackupManager;
use Pterodactyl\Repositories\Eloquent\BackupRepository;
class BackupRemoteUploadController extends Controller
{
// I would use 1024 but I'm unsure if AWS or other S3 servers,
// use SI gigabyte (base 10), or the proper IEC gibibyte (base 2).
// const PART_SIZE = 5 * 1000 * 1000 * 1000;
const PART_SIZE = 5 * 1024 * 1024 * 1024;
/**
* @var \Pterodactyl\Repositories\Eloquent\BackupRepository
*/
private $repository;
/**
* @var \Pterodactyl\Extensions\Backups\BackupManager
*/
private $backupManager;
/**
* BackupRemoteUploadController constructor.
*
* @param \Pterodactyl\Repositories\Eloquent\BackupRepository $repository
* @param \Pterodactyl\Extensions\Backups\BackupManager $backupManager
*/
public function __construct(BackupRepository $repository, BackupManager $backupManager)
{
$this->repository = $repository;
$this->backupManager = $backupManager;
}
/**
* ?
*
* @param \Illuminate\Http\Request $request
* @param string $backup
*
* @return \Illuminate\Http\JsonResponse
*
* @throws \Pterodactyl\Exceptions\Repository\RecordNotFoundException
* @throws \Exception
*/
public function __invoke(Request $request, string $backup)
{
$size = $request->query('size', null);
if ($size === null) {
return new JsonResponse([], JsonResponse::HTTP_BAD_REQUEST);
}
/** @var \Pterodactyl\Models\Backup $model */
$model = $this->repository->findFirstWhere([[ 'uuid', '=', $backup ]]);
// Prevent backups that have already been completed from trying to
// be uploaded again.
if (! is_null($model->completed_at)) {
return new JsonResponse([], JsonResponse::HTTP_CONFLICT);
}
// Ensure we are using the S3 adapter.
$adapter = $this->backupManager->adapter();
if (! $adapter instanceof AwsS3Adapter) {
return new JsonResponse([], JsonResponse::HTTP);
}
$path = sprintf('%s/%s.tar.gz', $model->server->uuid, $model->uuid);
$client = $adapter->getClient();
$result = $client->execute($client->getCommand('CreateMultipartUpload', [
'Bucket' => $adapter->getBucket(),
'Key' => $path,
'ContentType' => 'application/x-gzip',
]));
$uploadId = $result->get('UploadId');
$completeMultipartUpload = $client->createPresignedRequest(
$client->getCommand('CompleteMultipartUpload', [
'Bucket' => $adapter->getBucket(),
'Key' => $path,
'ContentType' => 'application/x-gzip',
'UploadId' => $uploadId,
]),
CarbonImmutable::now()->addMinutes(30)
);
$abortMultipartUpload = $client->createPresignedRequest(
$client->getCommand('AbortMultipartUpload', [
'Bucket' => $adapter->getBucket(),
'Key' => $path,
'ContentType' => 'application/x-gzip',
'UploadId' => $uploadId,
]),
CarbonImmutable::now()->addMinutes(45)
);
$partCount = (int) $size / (self::PART_SIZE);
$parts = [];
for ($i = 0; $i < $partCount; $i++) {
$part = $client->createPresignedRequest(
$client->getCommand('UploadPart', [
'Bucket' => $adapter->getBucket(),
'Key' => $path,
'ContentType' => 'application/x-gzip',
'UploadId' => $uploadId,
'PartNumber' => $i + 1,
]),
CarbonImmutable::now()->addMinutes(30)
);
array_push($parts, $part->getUri()->__toString());
}
return new JsonResponse([
'CompleteMultipartUpload' => $completeMultipartUpload->getUri()->__toString(),
'AbortMultipartUpload' => $abortMultipartUpload->getUri()->__toString(),
'Parts' => $parts,
'PartSize' => self::PART_SIZE,
], JsonResponse::HTTP_OK);
}
}

View file

@ -2,12 +2,10 @@
namespace Pterodactyl\Http\Controllers\Api\Remote\Backups;
use Carbon\Carbon;
use Carbon\CarbonImmutable;
use Illuminate\Http\JsonResponse;
use Pterodactyl\Http\Controllers\Controller;
use Pterodactyl\Repositories\Eloquent\BackupRepository;
use Pterodactyl\Exceptions\Http\HttpForbiddenException;
use Symfony\Component\HttpKernel\Exception\BadRequestHttpException;
use Pterodactyl\Http\Requests\Api\Remote\ReportBackupCompleteRequest;
@ -42,7 +40,7 @@ class BackupStatusController extends Controller
/** @var \Pterodactyl\Models\Backup $model */
$model = $this->repository->findFirstWhere([[ 'uuid', '=', $backup ]]);
if (!is_null($model->completed_at)) {
if (! is_null($model->completed_at)) {
throw new BadRequestHttpException(
'Cannot update the status of a backup that is already marked as completed.'
);

View file

@ -7,7 +7,7 @@ use Illuminate\Database\Eloquent\SoftDeletes;
/**
* @property int $id
* @property int $server_id
* @property int $uuid
* @property string $uuid
* @property bool $is_successful
* @property string $name
* @property string[] $ignored_files

View file

@ -33,12 +33,11 @@ class DaemonBackupRepository extends DaemonRepository
* Tells the remote Daemon to begin generating a backup for the server.
*
* @param \Pterodactyl\Models\Backup $backup
* @param string|null $presignedUrl
* @return \Psr\Http\Message\ResponseInterface
*
* @throws \Pterodactyl\Exceptions\Http\Connection\DaemonConnectionException
*/
public function backup(Backup $backup, string $presignedUrl = null): ResponseInterface
public function backup(Backup $backup): ResponseInterface
{
Assert::isInstanceOf($this->server, Server::class);
@ -50,7 +49,6 @@ class DaemonBackupRepository extends DaemonRepository
'adapter' => $this->adapter ?? config('backups.default'),
'uuid' => $backup->uuid,
'ignored_files' => $backup->ignored_files,
'presigned_url' => $presignedUrl,
],
]
);

View file

@ -7,7 +7,6 @@ use Carbon\CarbonImmutable;
use Webmozart\Assert\Assert;
use Pterodactyl\Models\Backup;
use Pterodactyl\Models\Server;
use League\Flysystem\AwsS3v3\AwsS3Adapter;
use Illuminate\Database\ConnectionInterface;
use Pterodactyl\Extensions\Backups\BackupManager;
use Pterodactyl\Repositories\Eloquent\BackupRepository;
@ -122,42 +121,11 @@ class InitiateBackupService
'disk' => $this->backupManager->getDefaultAdapter(),
], true, true);
$url = $this->getS3PresignedUrl(sprintf('%s/%s.tar.gz', $server->uuid, $backup->uuid));
$this->daemonBackupRepository->setServer($server)
->setBackupAdapter($this->backupManager->getDefaultAdapter())
->backup($backup, $url);
->backup($backup);
return $backup;
});
}
/**
* Generates a presigned URL for the wings daemon to upload the completed archive
* to. We use a 30 minute expiration on these URLs to avoid issues with large backups
* that may take some time to complete.
*
* @param string $path
* @return string|null
*/
protected function getS3PresignedUrl(string $path)
{
$adapter = $this->backupManager->adapter();
if (! $adapter instanceof AwsS3Adapter) {
return null;
}
$client = $adapter->getClient();
$request = $client->createPresignedRequest(
$client->getCommand('PutObject', [
'Bucket' => $adapter->getBucket(),
'Key' => $path,
'ContentType' => 'application/x-gzip',
]),
CarbonImmutable::now()->addMinutes(30)
);
return $request->getUri()->__toString();
}
}

View file

@ -29,7 +29,7 @@ export default ({ backup, className }: Props) => {
items: data.items.map(b => b.uuid !== backup.uuid ? b : ({
...b,
isSuccessful: parsed.is_successful || true,
checksum: parsed.checksum || '',
checksum: (parsed.checksum_type || '') + ':' + (parsed.checksum || ''),
bytes: parsed.file_size || 0,
completedAt: new Date(),
})),

View file

@ -18,5 +18,6 @@ Route::group(['prefix' => '/servers/{uuid}'], function () {
});
Route::group(['prefix' => '/backups'], function () {
Route::get('/{backup}', 'Backups\BackupRemoteUploadController');
Route::post('/{backup}', 'Backups\BackupStatusController');
});