123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566 |
- i<?php
- use Aws\S3\S3Client;
- use Aws\Exception\AwsException;
- use Aws\S3\ObjectUploader;
- use Aws\S3\MultipartUploader;
- use Aws\Exception\MultipartUploadException;
- require_once(dirname(__FILE__).'/3rdparty/autoload.php');
- $CONFIG = dirname(__FILE__).'/storage.config.php';
- include($CONFIG);
- echo "\nconnect to S3...\n";
- $bucket_name = $CONFIG['objectstore']['arguments']['bucket'];
- $s3Client = new S3Client([
- 'version' => 'latest',
- 'endpoint' => $CONFIG['objectstore']['arguments']['hostname'],
- 'region' => $CONFIG['objectstore']['arguments']['region'],
- 'credentials' => [
- 'key' => $CONFIG['objectstore']['arguments']['key'],
- 'secret' => $CONFIG['objectstore']['arguments']['secret'],
- ],
- 'use_path_style_endpoint' => $CONFIG['objectstore']['arguments']['use_path_style']
- ]);
- $bucket = 'nextcloud';
- $file_Path = '/storage/nextcloud/test_pedro.gpkg';
- $files = [$file_Path];
- $key = basename($file_Path);
- echo "\nPHP: copy file : ".$file_Path."\n";
- // Using stream instead of file path
- $source = fopen($file_Path, 'rb');
- $uploader = new ObjectUploader(
- $s3Client,
- $bucket,
- $key,
- $source,
- 'private',
- [ 'concurrency' => 5, 'part_size' => 1536*1024*1024 ]
- );
- $start_time = microtime(true);
- do {
- try {
- $result = $uploader->upload();
- if ($result["@metadata"]["statusCode"] == '200') {
- print('File successfully uploaded to ' . $result["ObjectURL"]);
- }
- } catch (MultipartUploadException $e) {
- rewind($source);
- $uploader = new MultipartUploader($s3Client, $source, [
- 'state' => $e->getState(),
- 'acl' => 'public-read',
- ]);
- }
- } while (!isset($result));
- fclose($source);
- $end_time = microtime(true);
- $execution_time = ($end_time - $start_time);
- echo "\nExecution time of script = ".$execution_time." sec\n";
|