'latest', 'endpoint' => $CONFIG['objectstore']['arguments']['hostname'].':'.$CONFIG['objectstore']['arguments']['port'], 'region' => $CONFIG['objectstore']['arguments']['region'], 'credentials' => [ 'key' => $CONFIG['objectstore']['arguments']['key'], 'secret' => $CONFIG['objectstore']['arguments']['secret'], ], 'use_path_style_endpoint' => $CONFIG['objectstore']['arguments']['use_path_style'] ]); echo "...succeed\n"; /* $buckets = $s3->listBuckets([]); echo $buckets; echo "\n"; try { foreach ($buckets['Buckets'] as $bucket){ echo "{$bucket['Name']}\t{$bucket['CreationDate']}\n"; } } catch (S3Exception $e) { echo $e->getMessage(); echo "\n"; } echo "\n". "\nread files in S3...\n"; echo "The contents of your bucket $bucket_name are: \n\n"; $objects = S3list($s3, $bucket_name); echo 'S3list:'.print_r($objects); $result_s3 = S3put($s3, $bucket, [ 'SourceFile' => './nextcloud_25.tar.gz', ]); echo 'S3put:'.$result_s3; */ $bucket = 'nextcloud'; $file_Path = './nextcloud_25.tar.gz'; $key = basename($file_Path); echo "\nCopy file : ".$file_Path."\n"; $start_time = microtime(true); try{ $result = $s3->putObject([ 'Bucket' => $bucket, 'Key' => $key, 'SourceFile' => $file_Path, 'ACL' => 'private', ]); } catch (S3Exception $e) { echo $e->getMessage() . "\n"; } $end_time = microtime(true); $execution_time = ($end_time - $start_time); echo " Execution time of script = ".$execution_time." sec\n"; echo 'S3put:'.$result_s3; //######################################################################################### function S3list($s3, $bucket, $maxIteration = 10000000) { $objects = []; try { $iteration = 0; $marker = ''; do { //$result = $s3->listObjectsV2(['Bucket' => $bucket, 'Marker' => $marker]); $result = $s3->listObjectsV2(['Bucket' => $bucket]); if (rand(0,100) > 75 ) { echo '.'; } if ($result->get('Contents')) { $objects = array_merge($objects, $result->get('Contents')); } if (count($objects)) { $marker = $objects[count($objects) - 1]['Key']; } } while ($result->get('IsTruncated') && ++$iteration < $maxIteration); if ($result->get('IsTruncated')) { echo "\n".'WARNING: The number of keys greater than '.count($objects).' (the first part is loaded)'; } return $objects; } catch (S3Exception $e) { return 'ERROR: Cannot retrieve objects: '.$e->getMessage(); } } //######################################################################################### function S3put($s3, $bucket, $vars = array() ) { #return 'dummy'; if (is_string($vars) ) { if (file_exists($vars)) { $vars = array('SourceFile' => $vars); } else { return 'ERROR: S3put($cms, $bucket, $vars)'; } } if (empty($vars['Bucket']) ) { $vars['Bucket'] = $bucket; } if (empty($vars['Key']) && !empty($vars['SourceFile'])) { $vars['Key'] = $vars['SourceFile']; } if (empty($vars['ACL']) ) { $vars['ACL'] = 'private'; } if (empty($vars['Bucket']) ) { return 'ERROR: no Bucket'; } if (empty($vars['Key']) ) { return 'ERROR: no Key'; } if (!file_exists($vars['SourceFile'])) { return 'ERROR: file \''.$vars['SourceFile'].'\' does not exist'; } try { if (isset($GLOBALS['MULTIPART_THRESHOLD']) && filesize($vars['SourceFile']) > $GLOBALS['MULTIPART_THRESHOLD']*1024*1024) { $uploader = new MultipartUploader($s3, $vars['SourceFile'], $vars); $result = $uploader->upload(); } else { if (filesize($vars['SourceFile']) > 2*1024*1024*1024) { echo "\n".'WARNING: file \''.$vars['SourceFile'].'\' is larger then 2 Gb, consider enabeling \'MultipartUploader\''; } print_r($vars); $result = $s3->putObject($vars); } if (!empty($result['ObjectURL'])) { return 'OK: '.'ObjectURL:'.$result['ObjectURL']; } else { return 'ERROR: '.$vars['Key'].' was not uploaded'; } } catch (MultipartUploadException | S3Exception | Exception $e) { return 'ERROR: ' . $e->getMessage(); } }