diff --git a/.gitignore b/.gitignore
index 4b2e89d..f122c3b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,4 +3,6 @@ AwsCredentials.properties
target/
*.iml
.gradle
-build/
\ No newline at end of file
+build/
+*.log
+out
diff --git a/README.md b/README.md
index a22480a..ec6bfae 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# AWS SDK for Java Sample Project
+# AWS SDK for Java 'Improved' Sample Project
A simple Java application illustrating usage of the AWS SDK for Java.
@@ -46,7 +46,7 @@ Maven:
Gradle:
- gradlew clean build run
+ gradlew clean build runS3Example runSQSExample
When you start making your own buckets, the S3 documentation provides a good overview
diff --git a/build.gradle b/build.gradle
index 7f10524..9329f8b 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,7 +1,7 @@
apply plugin: 'java'
-apply plugin: 'application'
-
-mainClassName="com.amazonaws.samples.S3Sample"
+apply plugin: 'idea'
+//apply plugin: 'eclipse'
+//apply plugin: 'application'
repositories {
mavenCentral()
@@ -9,4 +9,18 @@ repositories {
dependencies {
compile 'com.amazonaws:aws-java-sdk:1.9.6'
-}
\ No newline at end of file
+ compile 'com.amazonaws:amazon-sqs-java-extended-client-lib:1.0.1'
+ compile 'log4j:log4j:1.2.17'
+}
+
+task(runS3Example, dependsOn: 'classes', type: JavaExec) {
+ main = 'com.amazonaws.samples.S3Sample'
+ classpath = sourceSets.main.runtimeClasspath
+}
+
+task(runSQSExample, dependsOn: 'classes', type: JavaExec) {
+ main = 'com.amazonaws.samples.SQSExtendedClientExample'
+ classpath = sourceSets.main.runtimeClasspath
+}
+
+defaultTasks 'runS3Example', 'runSQSExample'
diff --git a/pom.xml b/pom.xml
index efc3c43..98e7d17 100644
--- a/pom.xml
+++ b/pom.xml
@@ -35,14 +35,24 @@
1.2.1
+ first-execution
java
+
+ com.amazonaws.samples.S3Sample
+
+
+
+ second-execution
+
+ java
+
+
+ com.amazonaws.samples.SQSExtendedClientExample
+
-
- com.amazonaws.samples.S3Sample
-
diff --git a/src/main/java/com/amazonaws/samples/S3TestUtil.java b/src/main/java/com/amazonaws/samples/S3TestUtil.java
new file mode 100644
index 0000000..fadf1cd
--- /dev/null
+++ b/src/main/java/com/amazonaws/samples/S3TestUtil.java
@@ -0,0 +1,183 @@
+package com.amazonaws.samples;
+
+import com.amazonaws.AmazonServiceException;
+import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
+import com.amazonaws.regions.Region;
+import com.amazonaws.regions.Regions;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.model.*;
+import com.amazonaws.services.s3.transfer.TransferManager;
+import com.amazonaws.services.s3.transfer.Upload;
+
+import java.io.*;
+import java.util.Arrays;
+import java.util.Iterator;
+
+/**
+ * Requires that you created a file ~/.aws/credentials
+ */
+public class S3TestUtil
+{
+ private static final AmazonS3 s3;
+ private static final Region usWest2;
+ private static final DefaultAWSCredentialsProviderChain credentialProviderChain;
+ private static TransferManager tx;
+
+ static {
+ s3 = new AmazonS3Client();
+ usWest2 = Region.getRegion(Regions.US_WEST_2);
+ s3.setRegion(usWest2);
+ credentialProviderChain = new DefaultAWSCredentialsProviderChain();
+ }
+
+ public static AmazonS3 getS3() {
+ return s3;
+ }
+
+ public static void deleteEntireBucket(String bucket_name) {
+ System.out.println("Deleting S3 bucket: " + bucket_name);
+ try {
+ System.out.println(" - removing objects from bucket");
+ ObjectListing object_listing = s3.listObjects(bucket_name);
+ while (true) {
+ for (Iterator> iterator = object_listing.getObjectSummaries().iterator(); iterator.hasNext(); ) {
+ S3ObjectSummary summary = (S3ObjectSummary) iterator.next();
+ s3.deleteObject(bucket_name, summary.getKey());
+ }
+ if (object_listing.isTruncated()) {
+ object_listing = s3.listNextBatchOfObjects(object_listing);
+ } else {
+ break;
+ }
+ }
+
+ System.out.println(" - removing versions from bucket");
+ VersionListing version_listing = s3.listVersions(new ListVersionsRequest().withBucketName(bucket_name));
+ while (true) {
+ for (Iterator> iterator = version_listing.getVersionSummaries().iterator(); iterator.hasNext(); ) {
+ S3VersionSummary vs = (S3VersionSummary) iterator.next();
+ s3.deleteVersion(bucket_name, vs.getKey(), vs.getVersionId());
+ }
+ if (version_listing.isTruncated()) {
+ version_listing = s3.listNextBatchOfVersions(version_listing);
+ } else {
+ break;
+ }
+ }
+ s3.deleteBucket(bucket_name);
+ } catch (AmazonServiceException e) {
+ System.err.println(e.getErrorMessage());
+ System.exit(1);
+ }
+ System.out.println("Done removing bucket: " + bucket_name);
+ }
+
+ public static void copyEntireBucket(String src_bucket, String dest_bucket) {
+ System.out.println("Copying S3 bucket '" + src_bucket + "' to destination '" + dest_bucket + "' ...");
+ try {
+ System.out.println(" - copying objects from bucket");
+ ObjectListing object_listing = s3.listObjects(src_bucket);
+ while (true) {
+ for (Iterator> iterator = object_listing.getObjectSummaries().iterator(); iterator.hasNext(); ) {
+ S3ObjectSummary summary = (S3ObjectSummary) iterator.next();
+ s3.copyObject(src_bucket, summary.getKey(), dest_bucket, summary.getKey());
+ }
+ if (object_listing.isTruncated()) {
+ object_listing = s3.listNextBatchOfObjects(object_listing);
+ } else {
+ break;
+ }
+ }
+ //System.out.println(" - copying versions from bucket");
+ //VersionListing version_listing = s3.listVersions(new ListVersionsRequest().withBucketName(src_bucket));
+ //while (true) {
+ // for (Iterator> iterator = version_listing.getVersionSummaries().iterator(); iterator.hasNext(); ) {
+ // S3VersionSummary vs = (S3VersionSummary) iterator.next();
+ // String key = vs.getKey();
+ // String ver = vs.getVersionId();
+ // // how??
+ // }
+ // if (version_listing.isTruncated()) {
+ // version_listing = s3.listNextBatchOfVersions(version_listing);
+ // } else {
+ // break;
+ // }
+ //}
+ //s3.deleteBucket(src_bucket);
+ } catch (AmazonServiceException e) {
+ System.err.println(e.getErrorMessage());
+ System.exit(1);
+ }
+ System.out.println("Done copying bucket '" + src_bucket + "' to new bucket '" + dest_bucket + "'");
+ }
+
+ public static File createTmpFile() {
+ File tempTestFile = null;
+ try {
+ tempTestFile = File.createTempFile("aws-java-sdk-copy-test-", ".txt");
+ tempTestFile.deleteOnExit();
+
+ Writer writer = new OutputStreamWriter(new FileOutputStream(tempTestFile));
+ writer.write("abcdefghijklmnopqrstuvwxyz\n");
+ writer.write("01234567890112345678901234\n");
+ writer.write("!@#$%^&*()-=[]{};':',.<>/?\n");
+ writer.write("01234567890112345678901234\n");
+ writer.write("abcdefghijklmnopqrstuvwxyz\n");
+ writer.close();
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+ return tempTestFile;
+ }
+
+ public static void createExpiringBucket(String name) {
+ BucketLifecycleConfiguration.Rule bucketExpirationRule =
+ new BucketLifecycleConfiguration.Rule()
+ .withId("RULE: Delete bucket after 1 day")
+ .withExpirationInDays(1) // expiration date must be midnight GMT and is effectively GMT-8, or around 4pm
+ .withStatus(BucketLifecycleConfiguration.ENABLED.toString());
+
+ BucketLifecycleConfiguration configuration =
+ new BucketLifecycleConfiguration()
+ .withRules(Arrays.asList(bucketExpirationRule));
+
+ boolean bucketMissing = !bucketExists(name);
+
+ if (bucketMissing) {
+ System.out.println("Creating 10 minute bucket " + name + "\n");
+ s3.createBucket(name);
+ s3.setBucketLifecycleConfiguration(name, configuration);
+ }
+ }
+
+ public static boolean bucketExists(String name) {
+ for (Bucket bucket : s3.listBuckets()) {
+ if (bucket.getName().equals(name)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public static void showAllBuckets() {
+ for (Bucket bucket : s3.listBuckets()) {
+ System.out.println("bucket: " + bucket.getName());
+ }
+ }
+
+ public static void uploadTmpFileToBucket(String bucketName, File fileKey) {
+ System.out.println("Uploading a file to bucket " + bucketName);
+ tx = new TransferManager(credentialProviderChain.getCredentials());
+
+ Upload myUpload = tx.upload(bucketName, fileKey.getName(), fileKey);
+
+ try {
+ myUpload.waitForCompletion();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ if (tx != null) tx.shutdownNow();
+ }
+
+}
diff --git a/src/main/java/com/amazonaws/samples/S3TransferExample.java b/src/main/java/com/amazonaws/samples/S3TransferExample.java
new file mode 100644
index 0000000..3235930
--- /dev/null
+++ b/src/main/java/com/amazonaws/samples/S3TransferExample.java
@@ -0,0 +1,43 @@
+package com.amazonaws.samples;
+
+import java.io.*;
+import java.util.UUID;
+
+public class S3TransferExample
+{
+ private static File fileKey1;
+ private static File fileKey2;
+ private static String sourceBucket;
+ private static String destinationBucket;
+
+ public static void main(String[] args) {
+ createSourceAndDestinationBuckets("test-bucket-" + UUID.randomUUID());
+ fileKey1 = S3TestUtil.createTmpFile();
+ S3TestUtil.uploadTmpFileToBucket(sourceBucket, fileKey1);
+ fileKey2 = S3TestUtil.createTmpFile();
+ S3TestUtil.uploadTmpFileToBucket(sourceBucket, fileKey2);
+ copyBucketToNewLocation();
+ S3TestUtil.showAllBuckets();
+ //deleteTestBucketsNow();
+ }
+
+ private static void copyBucketToNewLocation() {
+ System.out.println("Copying bucket " + sourceBucket + " to new bucket " + destinationBucket);
+ //CopyObjectResult copyObjectResult = S3TestUtil.getS3().copyObject(sourceBucket, fileKey1.getName(), destinationBucket, fileKey1.getName());
+ S3TestUtil.copyEntireBucket(sourceBucket, destinationBucket);
+ }
+
+ public static void createSourceAndDestinationBuckets(String name)
+ {
+ sourceBucket = name;
+ destinationBucket = name + "-dest";
+ if (!S3TestUtil.bucketExists(sourceBucket)) S3TestUtil.createExpiringBucket(sourceBucket);
+ if (!S3TestUtil.bucketExists(destinationBucket)) S3TestUtil.createExpiringBucket(destinationBucket);
+ }
+
+ public static void deleteTestBucketsNow() {
+ S3TestUtil.deleteEntireBucket(sourceBucket);
+ S3TestUtil.deleteEntireBucket(destinationBucket);
+ }
+
+}
diff --git a/src/main/java/com/amazonaws/samples/SQSExtendedClientExample.java b/src/main/java/com/amazonaws/samples/SQSExtendedClientExample.java
new file mode 100644
index 0000000..7edc26a
--- /dev/null
+++ b/src/main/java/com/amazonaws/samples/SQSExtendedClientExample.java
@@ -0,0 +1,109 @@
+package com.amazonaws.samples;
+
+import com.amazon.sqs.javamessaging.AmazonSQSExtendedClient;
+import com.amazon.sqs.javamessaging.ExtendedClientConfiguration;
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.profile.ProfileCredentialsProvider;
+import com.amazonaws.regions.Region;
+import com.amazonaws.regions.Regions;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3ClientBuilder;
+import com.amazonaws.services.s3.model.*;
+import com.amazonaws.services.sqs.AmazonSQS;
+import com.amazonaws.services.sqs.AmazonSQSClient;
+import com.amazonaws.services.sqs.model.*;
+import org.apache.log4j.Logger;
+import org.joda.time.DateTime;
+import org.joda.time.format.DateTimeFormat;
+
+import java.util.*;
+
+public class SQSExtendedClientExample {
+
+ private static final Logger log = Logger.getLogger(SQSExtendedClientExample.class.getName());
+
+ private static final String s3BucketName = UUID.randomUUID() + "-" + DateTimeFormat.forPattern("yyMMdd-hhmmss").print(new DateTime());
+
+ public static void main(String[] args) {
+
+ AWSCredentials credentials = null;
+
+ try {
+ credentials = new ProfileCredentialsProvider("default").getCredentials();
+ } catch (Exception e) {
+ throw new AmazonClientException(
+ "Cannot load the AWS credentials from the expected AWS credential profiles file. "
+ + "Make sure that your credentials file is at the correct "
+ + "location (/home/$USER/.aws/credentials) and is in a valid format.", e);
+ }
+
+ AmazonS3 s3 = AmazonS3ClientBuilder
+ .standard()
+ .withRegion(Regions.US_WEST_2)
+ .withCredentials(new AWSStaticCredentialsProvider(credentials))
+ .build();
+
+ // Set the Amazon S3 bucket name, and set a lifecycle rule on the bucket to
+ // permanently delete objects a certain number of days after
+ // each object's creation date.
+ // Then create the bucket, and enable message objects to be stored in the bucket.
+ BucketLifecycleConfiguration.Rule expirationRule = new BucketLifecycleConfiguration.Rule();
+ expirationRule.withExpirationInDays(14).withStatus("Enabled");
+ BucketLifecycleConfiguration lifecycleConfig = new BucketLifecycleConfiguration().withRules(expirationRule);
+
+ s3.createBucket(s3BucketName);
+ s3.setBucketLifecycleConfiguration(s3BucketName, lifecycleConfig);
+ log.info("Bucket created and configured.");
+
+ // Set the SQS extended client configuration with large payload support enabled.
+ ExtendedClientConfiguration extendedClientConfig = new ExtendedClientConfiguration()
+ .withLargePayloadSupportEnabled(s3, s3BucketName);
+
+ AmazonSQS sqsExtended = new AmazonSQSExtendedClient(new AmazonSQSClient(credentials), extendedClientConfig);
+ Region sqsRegion = Region.getRegion(Regions.US_WEST_2);
+ sqsExtended.setRegion(sqsRegion);
+
+ // Create a long string of characters for the message object to be stored in the bucket.
+ int stringLength = 300000;
+ char[] chars = new char[stringLength];
+ Arrays.fill(chars, 'x');
+ String myLongString = new String(chars);
+
+ // Create a message queue for this example.
+ String QueueName = "QueueName" + UUID.randomUUID().toString();
+ CreateQueueRequest createQueueRequest = new CreateQueueRequest(QueueName);
+ String myQueueUrl = sqsExtended.createQueue(createQueueRequest).getQueueUrl();
+ log.info("Queue created.");
+
+ // Send the message.
+ SendMessageRequest myMessageRequest = new SendMessageRequest(myQueueUrl, myLongString);
+ sqsExtended.sendMessage(myMessageRequest);
+ log.info("Sent the message.");
+
+ // Receive messages, and then print general information about them.
+ ReceiveMessageRequest receiveMessageRequest = new ReceiveMessageRequest(myQueueUrl);
+ List messages = sqsExtended.receiveMessage(receiveMessageRequest).getMessages();
+
+ for (Message message : messages) {
+ log.info("\nMessage received:");
+ log.info(" ID: " + message.getMessageId());
+ log.info(" Receipt handle: " + message.getReceiptHandle());
+ log.info(" Message body (first 5 characters): " + message.getBody().substring(0, 5));
+ }
+
+ // Delete the message, the queue, and the bucket.
+ String messageReceiptHandle = messages.get(0).getReceiptHandle();
+ sqsExtended.deleteMessage(new DeleteMessageRequest(myQueueUrl, messageReceiptHandle));
+ log.info("Deleted the message.");
+
+ sqsExtended.deleteQueue(new DeleteQueueRequest(myQueueUrl));
+ log.info("Deleted the queue.");
+
+ S3TestUtil.deleteEntireBucket(s3BucketName);
+ log.info("Deleted the bucket.");
+
+ }
+
+}
diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties
new file mode 100644
index 0000000..5ee5ab1
--- /dev/null
+++ b/src/main/resources/log4j.properties
@@ -0,0 +1,20 @@
+# Root logger option
+log4j.rootLogger=INFO, stdout, file
+
+log4j.logger.com.amazonaws.request=DEBUG
+
+# Redirect log messages to console
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+
+# Redirect log messages to a log file, support file rolling.
+log4j.appender.file=org.apache.log4j.RollingFileAppender
+log4j.appender.file.File=aws.log
+log4j.appender.file.MaxFileSize=5MB
+log4j.appender.file.MaxBackupIndex=10
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+
+