class-destination-s3.php 21.7 KB
<?php
/**
 * Documentation: http://docs.amazonwebservices.com/aws-sdk-php-2/latest/class-Aws.S3.S3Client.html
 */
class BackWPup_Pro_Destination_S3 extends BackWPup_Destination_S3 {


	/**
	 * @param $job_settings
	 */
	public function wizard_page( array $job_settings ) {
		?>
		<table class="form-table">
			<tr>
				<td>
					<fieldset>
						<label for="s3region"><?php esc_html_e( 'Select a S3 service:', 'backwpup' ); ?><br />
						<select name="s3region" id="s3region" title="<?php esc_html_e( 'Amazon S3 Region', 'backwpup' ); ?>">
							<option value="us-east-1" <?php selected( 'us-east-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: US Standard', 'backwpup' ); ?></option>
							<option value="us-west-1" <?php selected( 'us-west-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: US West (Northern California)', 'backwpup' ); ?></option>
							<option value="us-west-2" <?php selected( 'us-west-2', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: US West (Oregon)', 'backwpup' ); ?></option>
							<option value="eu-west-1" <?php selected( 'eu-west-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: EU (Ireland)', 'backwpup' ); ?></option>
							<option value="eu-central-1" <?php selected( 'eu-central-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: EU (Germany)', 'backwpup' ); ?></option>
							<option value="ap-south-1" <?php selected( 'ap-south-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: Asia Pacific (Mumbai)', 'backwpup' ); ?></option>
							<option value="ap-northeast-1" <?php selected( 'ap-northeast-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: Asia Pacific (Tokyo)', 'backwpup' ); ?></option>
							<option value="ap-northeast-2" <?php selected( 'ap-northeast-2', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: Asia Pacific (Seoul)', 'backwpup' ); ?></option>
							<option value="ap-southeast-1" <?php selected( 'ap-southeast-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: Asia Pacific (Singapore)', 'backwpup' ); ?></option>
							<option value="ap-southeast-2" <?php selected( 'ap-southeast-2', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: Asia Pacific (Sydney)', 'backwpup' ); ?></option>
							<option value="sa-east-1" <?php selected( 'sa-east-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: South America (Sao Paulo)', 'backwpup' ); ?></option>
							<option value="cn-north-1" <?php selected( 'cn-north-1', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Amazon S3: China (Beijing)', 'backwpup' ); ?></option>
							<option value="google-storage" <?php selected( 'google-storage', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Google Storage: EU', 'backwpup' ); ?></option>
							<option value="google-storage-us" <?php selected( 'google-storage-us', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Google Storage: USA', 'backwpup' ); ?></option>
							<option value="google-storage-asia" <?php selected( 'google-storage-asia', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Google Storage: Asia', 'backwpup' ); ?></option>
							<option value="dreamhost" <?php selected( 'dreamhost', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'Dream Host Cloud Storage', 'backwpup' ); ?></option>
							<option value="greenqloud" <?php selected( 'greenqloud', $job_settings[ 's3region' ], TRUE ) ?>><?php esc_html_e( 'GreenQloud Storage Qloud', 'backwpup' ); ?></option>
						</select></label><br/>
						<label for="s3base_url"><?php esc_html_e( 'or set an S3 Server URL:', 'backwpup' ); ?>
							<input id="s3base_url" name="s3base_url" type="text"
								   value="<?php echo esc_attr( $job_settings[ 's3base_url' ] ); ?>" class="large-text" autocomplete="off" /></label><br/>
						<label for="s3accesskey"><strong><?php esc_html_e( 'Access Key:', 'backwpup' ); ?></strong>
							<input id="s3accesskey" name="s3accesskey" type="text"
								   value="<?php echo esc_attr( $job_settings[ 's3accesskey' ] );?>" class="large-text" autocomplete="off" /></label><br/>
						<label for="s3secretkey"><strong><?php esc_html_e( 'Secret Key:', 'backwpup' ); ?></strong><br/>
							<input id="s3secretkey" name="s3secretkey" type="password"
								   value="<?php echo esc_attr( BackWPup_Encryption::decrypt( $job_settings[ 's3secretkey' ] ) );?>" class="large-text" autocomplete="off" /></label><br/>
						<label for="s3bucketselected"><strong><?php esc_html_e( 'Bucket:', 'backwpup' ); ?></strong><br/>
							<input id="s3bucketselected" name="s3bucketselected" type="hidden" value="<?php echo esc_attr( $job_settings[ 's3bucket' ] ); ?>" />
							<?php if ( $job_settings[ 's3accesskey' ] && $job_settings[ 's3secretkey' ] ) $this->edit_ajax( array(
																																 's3accesskey'  => $job_settings[  's3accesskey' ],
																																 's3secretkey'  => BackWPup_Encryption::decrypt( $job_settings[ 's3secretkey' ] ),
																																 's3bucketselected'   => $job_settings[ 's3bucket' ],
																																 's3base_url' 	=> $job_settings[ 's3base_url' ],
																																 's3region' 	=> $job_settings[ 's3region' ]
																															) ); ?></label>

						&nbsp;&nbsp;&nbsp;<label for="s3newbucket"><?php esc_html_e('New Bucket:', 'backwpup'); ?><input id="s3newbucket" name="s3newbucket" type="text" value="" class="small-text" autocomplete="off" /></label><br/>
						<br/>
						<label for="ids3dir"><strong><?php esc_html_e( 'Folder in bucket:', 'backwpup' ); ?></strong><br/>
							<input name="s3dir" id="ids3dir" type="text" value="<?php echo esc_attr( $job_settings[ 's3dir' ] ); ?>"  class="large-text" /></label><br/>

						<?php
						if ( $job_settings[ 'backuptype' ] === 'archive' ) {
							?>
							<label id="ids3maxbackups">
								<input
									name="s3maxbackups"
									id="ids3maxbackups"
									type="number"
									min="0"
									step="1"
									value="<?php echo esc_attr( $job_settings[ 's3maxbackups' ] );?>"
									class="small-text"
								/>
								<?php  esc_html_e( 'Number of files to keep in folder.', 'backwpup' ); ?></label>
							<br/>
						<?php } else { ?>
							<label for="ids3syncnodelete"><input class="checkbox" value="1"
																 type="checkbox" <?php checked(  $job_settings[ 's3syncnodelete' ], TRUE ); ?>
																 name="s3syncnodelete" id="ids3syncnodelete" /> <?php esc_html_e( 'Do not delete files while syncing to destination!', 'backwpup' ); ?></label>
							<br/>
						<?php } ?>
					</fieldset>
				</td>
			</tr>
		</table>
	<?php
	}

	/**
	 * @param $job_settings
	 *
	 * @return array
	 */
	public function wizard_save( array $job_settings ) {

		$job_settings[ 's3ssencrypt' ] = '';
		$job_settings[ 's3storageclass' ] = '';
		$job_settings[ 's3multipart' ] = TRUE;
		if ( $job_settings[ 's3bucket' ] === 'google-storage' ) {
			$job_settings[ 's3multipart' ] = FALSE;
		}
		$job_settings[ 's3accesskey' ] = isset( $_POST[ 's3accesskey' ] ) ? sanitize_text_field( $_POST[ 's3accesskey' ] ) : '';
		$job_settings[ 's3secretkey' ] = isset( $_POST[ 's3secretkey' ] ) ? BackWPup_Encryption::encrypt( (string) $_POST[ 's3secretkey' ] ) : '';
		$job_settings[ 's3base_url' ] = isset( $_POST[ 's3base_url' ] ) ? esc_url_raw( $_POST[ 's3base_url' ] ) : '';
		$job_settings[ 's3region' ] = isset( $_POST[ 's3region' ] ) ? sanitize_text_field( $_POST[ 's3region' ] ) : '';
		$job_settings[ 's3bucket' ] = isset( $_POST[ 's3bucket' ] ) ? sanitize_text_field( $_POST[ 's3bucket' ] ) : '';

		$_POST[ 's3dir' ] = trailingslashit( str_replace( '//', '/', str_replace( '\\', '/', trim( sanitize_text_field( $_POST[ 's3dir' ] ) ) ) ) );
		if ( substr( $_POST[ 's3dir' ], 0, 1 ) == '/' )
			$_POST[ 's3dir' ] = substr( $_POST[ 's3dir' ], 1 );
		if ( $_POST[ 's3dir' ] === '/' )
			$_POST[ 's3dir' ] = '';
		$job_settings[ 's3dir' ] = $_POST[ 's3dir' ];

		if ( isset( $_POST[ 's3maxbackups' ] ) )
			$job_settings[ 's3maxbackups' ] = isset( $_POST[ 's3maxbackups' ] ) ? absint( $_POST[ 's3maxbackups' ] ) : 0;
		if ( isset( $_POST[ 's3syncnodelete' ] ) )
			$job_settings[ 's3syncnodelete'] = ! empty( $_POST[ 's3syncnodelete' ] );

		//create new bucket
		if ( !empty( $_POST[ 's3newbucket' ] ) ) {
			try {
				$s3 = Aws\S3\S3Client::factory( array( 	'key'	=> $job_settings[ 's3accesskey' ],
													 'secret'	=> BackWPup_Encryption::decrypt( $job_settings[ 's3secretkey' ] ),
													 'region'	=> $job_settings[ 's3region' ],
													 'base_url'	=> $this->get_s3_base_url( $job_settings[ 's3region' ], $job_settings[ 's3base_url' ]),
													 'scheme'	=> 'https',
													 'ssl.certificate_authority' => BackWPup::get_plugin_data( 'cacert' ) ) );
				// set bucket creation region
				if ( $job_settings[ 's3region' ] === 'google-storage' ) {
					$region = 'EU';
				} elseif ( $job_settings[ 's3region' ] === 'google-storage-us' ) {
					$region = 'US';
				} elseif ( $job_settings[ 's3region' ] === 'google-storage-asia' ) {
					$region = 'ASIA';
				} else {
					$region = $job_settings[ 's3region' ];
				}

				if ( $s3->isValidBucketName( $_POST[ 's3newbucket' ] ) ) {
					$s3->createBucket( array(
											   'Bucket' => sanitize_text_field( $_POST[ 's3newbucket' ] ),
											   'LocationConstraint' => $region
										  ) );
					$s3->waitUntil('bucket_exists', array( 'Bucket' => sanitize_text_field( $_POST[ 's3newbucket' ] ) ) );
					BackWPup_Admin::message( sprintf( __( 'Bucket %1$s created in %2$s.','backwpup'), sanitize_text_field( $_POST[ 's3newbucket' ] ) ) );
				} else {
					BackWPup_Admin::message( sprintf( __( ' %s is not a valid bucket name.','backwpup'), sanitize_text_field( $_POST[ 's3newbucket' ] ) ), TRUE );
				}
			}
			catch ( Aws\S3\Exception\S3Exception $e ) {
				BackWPup_Admin::message( $e->getMessage(), TRUE );
			}
			$job_settings[ 's3bucket' ] = sanitize_text_field( $_POST[ 's3newbucket' ] );
		}

		return $job_settings;
	}


	/**
	 * @param $job_object
	 * @return bool
	 */
	public function job_run_sync( BackWPup_Job $job_object ) {

		$job_object->substeps_todo = $job_object->count_folder + count( $job_object->additional_files_to_backup ) + 2;

		if ( $job_object->steps_data[ $job_object->step_working ]['SAVE_STEP_TRY'] != $job_object->steps_data[ $job_object->step_working ][ 'STEP_TRY' ] ) {
			$job_object->log( sprintf( __( '%d. Trying to sync files to S3 Service&#160;&hellip;', 'backwpup' ), $job_object->steps_data[ $job_object->step_working ][ 'STEP_TRY' ] ), E_USER_NOTICE );
			$job_object->steps_data[ $job_object->step_working ][ 'on_sub_step' ] = 'get_files';
			$job_object->steps_data[ $job_object->step_working ][ 'on_file' ] = 0;
			$job_object->steps_data[ $job_object->step_working ][ 'on_folder' ] = 0;
			$job_object->steps_data[ $job_object->step_working ][ 'dest_files' ] = array();
		}

		try {
			$s3 = Aws\S3\S3Client::factory( array(	'key'		=> $job_object->job[ 's3accesskey' ],
													'secret'	=> BackWPup_Encryption::decrypt( $job_object->job[ 's3secretkey' ] ),
													'region'	=> $job_object->job[ 's3region' ],
													'base_url'	=> $this->get_s3_base_url( $job_object->job[ 's3region' ], $job_object->job[ 's3base_url' ] ),
													'scheme'	=> 'https',
													'ssl.certificate_authority' => BackWPup::get_plugin_data( 'cacert' ) ) );

			if ( $job_object->steps_data[ $job_object->step_working ]['SAVE_STEP_TRY'] != $job_object->steps_data[ $job_object->step_working ][ 'STEP_TRY' ] ) {

				if ( $s3->doesBucketExist( $job_object->job[ 's3bucket' ] ) ) {
					$bucketregion = $s3->getBucketLocation( array( 'Bucket' => $job_object->job[ 's3bucket' ] ) );
					$job_object->log( sprintf( __( 'Connected to S3 Bucket "%1$s" in %2$s', 'backwpup' ), $job_object->job[ 's3bucket' ], $bucketregion->get( 'Location' ) ), E_USER_NOTICE );
				}
				else {
					$job_object->log( sprintf( __( 'S3 Bucket "%s" does not exist!', 'backwpup' ), $job_object->job[ 's3bucket' ] ), E_USER_ERROR );

					return TRUE;
				}

			}

			// get files from S3
			if ( $job_object->steps_data[ $job_object->step_working ][ 'on_sub_step' ] == 'get_files' ) {

				if ( ! isset( $job_object->steps_data[ $job_object->step_working ][ 'file_list_results' ] ) || $job_object->steps_data[ $job_object->step_working ][ 'file_list_results' ] == 1000 ) {
					if ( ! isset( $job_object->steps_data[ $job_object->step_working ][ 'file_list_results' ] ) ) {
						$job_object->log( __( 'Retrieving file list from S3.', 'backwpup'  ), E_USER_NOTICE );
						$job_object->steps_data[ $job_object->step_working ][ 'file_list_results' ] = 0;
						$args		= array(
							'Bucket' => $job_object->job[ 's3bucket' ],
							'Prefix' => (string) $job_object->job[ 's3dir' ],
							'MaxKeys' => 1000
						);

						$objects = $s3->getIterator( 'ListObjects', $args );
						if ( is_object( $objects ) ) {
							foreach ( $objects as $object ) {
								$job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $object[ 'Key' ] ) ] = $object[ 'Size' ];
								$job_object->steps_data[ $job_object->step_working ][ 'file_list_marker' ] = $object[ 'Key' ];
								$job_object->steps_data[ $job_object->step_working ][ 'file_list_results' ] ++;
							}
						}
						$job_object->do_restart_time();
					}

					while ( $job_object->steps_data[ $job_object->step_working ][ 'file_list_results' ] == 1000 ) {
						$job_object->steps_data[ $job_object->step_working ][ 'file_list_results' ] = 0;

						$args			= array(
							'Bucket' => $job_object->job[ 's3bucket' ],
							'Prefix' => (string) $job_object->job[ 's3dir' ],
							'Marker' =>  $job_object->steps_data[ $job_object->step_working ][ 'file_list_marker' ],
							'MaxKeys' => 1000
						);

						$objects = $s3->getIterator( 'ListObjects', $args );
						if ( is_object( $objects ) ) {
							foreach ( $objects as $object ) {
								$job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $object[ 'Key' ] ) ] = $object[ 'Size' ];
								$job_object->steps_data[ $job_object->step_working ][ 'file_list_marker' ] = $object[ 'Key' ];
								$job_object->steps_data[ $job_object->step_working ][ 'file_list_results' ] ++;
							}
						}
						$job_object->do_restart_time();
					}
				}
				$job_object->substeps_done ++;
				$job_object->steps_data[ $job_object->step_working ][ 'on_sub_step' ] = 'sync_new_changed';
			}

			//create Parameter
			$create_args                 	= array();
			$create_args[ 'Bucket' ] 	 	= $job_object->job[ 's3bucket' ];
			$create_args[ 'ACL' ]        	= 'private';
			//encryption
			if ( ! empty( $job_object->job[ 's3ssencrypt' ] ) ) {
				$create_args[ 'ServerSideEncryption' ] = $job_object->job[ 's3ssencrypt' ];
			}
			//Storage class
			if ( ! empty( $job_object->job[ 's3storageclass' ] ) ) {
				$create_args[ 'StorageClass' ] = $job_object->job[ 's3storageclass' ];
			}
			$create_args[ 'Metadata' ] = array( 'BackupTime' => date( 'Y-m-d H:i:s', $job_object->start_time ) );

			if ( $job_object->steps_data[ $job_object->step_working ][ 'on_sub_step' ] == 'sync_new_changed' ) {
				//Sync files
				//go folder by folder
				if ( $job_object->steps_data[ $job_object->step_working ]['SAVE_STEP_TRY'] != $job_object->steps_data[ $job_object->step_working ][ 'STEP_TRY' ] )
					$job_object->log( __( 'Upload changed files to S3.', 'backwpup'  ) );

				$folders_to_backup = $job_object->get_folders_to_backup();
				for ( ; $job_object->steps_data[ $job_object->step_working ][ 'on_folder' ] < count( $folders_to_backup ); $job_object->steps_data[ $job_object->step_working ][ 'on_folder' ]++ ) {
					$files_in_folder = $job_object->get_files_in_folder( $folders_to_backup[ $job_object->steps_data[ $job_object->step_working ][ 'on_folder' ] ] );
					for( ; $job_object->steps_data[ $job_object->step_working ][ 'on_file' ] < count( $files_in_folder ); $job_object->steps_data[ $job_object->step_working ][ 'on_file' ]++ ) {
						$job_object->do_restart_time();
						//crate file name on destination
						$dest_file_name = $job_object->job[ 's3dir' ] . ltrim( $job_object->get_destination_path_replacement( $files_in_folder[ $job_object->steps_data[ $job_object->step_working ][ 'on_file' ] ] ), '/' );
						//Upload file is not exits or the same
						if ( ! isset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $dest_file_name ) ] ) || ( isset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $dest_file_name ) ] ) && $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $dest_file_name ) ] != filesize( $files_in_folder[ $job_object->steps_data[ $job_object->step_working ][ 'on_file' ] ] ) ) ) {
							$create_args[ 'Body' ] 	  		= fopen( $files_in_folder[ $job_object->steps_data[ $job_object->step_working ][ 'on_file' ] ], 'rb' );
							$create_args[ 'Key' ] 		 	= $dest_file_name;
							$create_args[ 'ContentType' ]	= $job_object->get_mime_type( $files_in_folder[ $job_object->steps_data[ $job_object->step_working ][ 'on_file' ] ] );
							$s3->putObject( $create_args );
							$job_object->log( sprintf( __( 'File %s uploaded to S3.', 'backwpup' ), $dest_file_name ) );
						}
						//remove from array
						if ( isset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $dest_file_name ) ] ) )
							unset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $dest_file_name ) ] );
					}
					$job_object->substeps_done ++;
					$job_object->steps_data[ $job_object->step_working ][ 'on_file' ] = 0;
				}
				$job_object->steps_data[ $job_object->step_working ][ 'on_sub_step' ] = 'sync_extra';
				$job_object->steps_data[ $job_object->step_working ][ 'on_file' ] = 0;
			}

			if ( $job_object->steps_data[ $job_object->step_working ][ 'on_sub_step' ] == 'sync_extra' ) {
				//sync extra files
				if ( ! empty( $job_object->additional_files_to_backup ) ) {
					for ( ; $job_object->steps_data[ $job_object->step_working ][ 'on_file' ] < count( $job_object->additional_files_to_backup ); $job_object->steps_data[ $job_object->step_working ][ 'on_file' ]++ ) {
						$job_object->do_restart_time();
						$file = $job_object->additional_files_to_backup[ $job_object->steps_data[ $job_object->step_working ][ 'on_file' ] ];
						if ( isset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $job_object->job[ 's3dir' ] . basename( $file ) ) ] ) && filesize( $file ) == $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $job_object->job[ 's3dir' ] . basename( $file ) ) ] ) {
							unset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $job_object->job[ 's3dir' ] . basename( $file ) ) ] );
							$job_object->substeps_done ++;
							continue;
						}
						$create_args[ 'Body' ] 	  		= fopen( $file, 'rb' );
						$create_args[ 'Key' ] 		 	= $job_object->job[ 's3dir' ] . basename( $file );
						$create_args[ 'ContentType' ]	= $job_object->get_mime_type( $file );
						$s3->putObject( $create_args );
						$job_object->log( sprintf( __( 'Extra file %s uploaded to S3.', 'backwpup' ), basename( $file ) ) );
						if ( isset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $job_object->job[ 's3dir' ] . basename( $file ) ) ] ) )
							unset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ utf8_encode( $job_object->job[ 's3dir' ] . basename( $file ) ) ]);
						$job_object->substeps_done ++;
					}
				}
				$job_object->steps_data[ $job_object->step_working ][ 'on_file' ] = 0;
				$job_object->steps_data[ $job_object->step_working ][ 'on_sub_step' ] = 'sync_delete';
			}

			//delete rest files
			if ( $job_object->steps_data[ $job_object->step_working ][ 'on_sub_step' ] == 'sync_delete' && ! $job_object->job[ 's3syncnodelete' ] ) {
				if ( $job_object->steps_data[ $job_object->step_working ]['SAVE_STEP_TRY'] != $job_object->steps_data[ $job_object->step_working ][ 'STEP_TRY' ] )
					$job_object->log( __( 'Delete nonexistent files on S3', 'backwpup'  ) );
				foreach( array_keys( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ] ) as $dest_file ) {
					$args = array(
						'Bucket' => $job_object->job[ 's3bucket' ],
						'Key' => utf8_decode( $dest_file )
					);
					$s3->deleteObject( $args );
					$job_object->log( sprintf( __( 'File %s deleted from S3.', 'backwpup' ), utf8_decode( $dest_file ) ) );
					unset( $job_object->steps_data[ $job_object->step_working ][ 'dest_files' ][ $dest_file ] );
					$job_object->do_restart_time();
				}
			}
			$job_object->substeps_done ++;

		}
		catch ( Exception $e ) {
			$job_object->log( E_USER_ERROR, sprintf( __( 'S3 Service API: %s', 'backwpup' ), $e->getMessage() ), $e->getFile(), $e->getLine() );

			return FALSE;
		}

		return TRUE;
	}


	/**
	 *
	 */
	public function wizard_inline_js() {
		?>
		<script type="text/javascript">
			jQuery(document).ready(function ($) {
				function awsgetbucket() {
					var data = {
						action: 'backwpup_dest_s3',
						s3accesskey: $('input[name="s3accesskey"]').val(),
						s3secretkey: $('input[name="s3secretkey"]').val(),
						s3bucketselected: $('input[name="s3bucketselected"]').val(),
						s3base_url: $('input[name="s3base_url"]').val(),
						s3region: $('#s3region').val(),
						_ajax_nonce: $('#backwpupajaxnonce').val()
					};
					$.post(ajaxurl, data, function (response) {
						$('#s3bucketerror').remove();
						$('#s3bucket').remove();
						$('#s3bucketselected').after(response);
					});
				}

				$('input[name="s3accesskey"]').change(function () {
					awsgetbucket();
				});
				$('input[name="s3secretkey"]').change(function () {
					awsgetbucket();
				});
				$('input[name="s3base_url"]').change(function () {
					awsgetbucket();
				});
				$('#s3region').change(function () {
					awsgetbucket();
				});
			});
		</script>
		<?php
	}

}