sparkSubmitConfs;
-
-
- /**
- * Defines the EMR cluster's name
- *
- * @param clusterName the EMR cluster's name
- * @return an EMR cluster builder
- */
- public Builder clusterName(String clusterName) {
- this.sparkClusterName = clusterName;
- return this;
- }
-
- /**
- * Defines the EMR cluster's region
- * See https://docs.aws.amazon.com/general/latest/gr/rande.html
- *
- * @param region the EMR cluster's region
- * @return an EMR cluster builder
- */
- public Builder awsRegion(String region) {
- this.sparkAwsRegion = region;
- return this;
- }
-
- /**
- * Defines the EMR release version to be used in this cluster
- * uses a release label
- * See https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-4.2.0/emr-release-differences.html#emr-release-label
- *
- * @param releaseLabel the EMR release label
- * @return an EM cluster Builder
- */
- public Builder emrRelease(String releaseLabel) {
- this.sparkEmrRelease = releaseLabel;
- return this;
- }
-
- /**
- * Defines the IAM role to be assumed by the EMR service
- *
- * https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-service.html
- *
- * @param serviceRole the service role
- * @return an EM cluster Builder
- */
- public Builder emrServiceRole(String serviceRole) {
- this.sparkEmrServiceRole = serviceRole;
- return this;
- }
-
- /**
- * A list of configuration parameters to apply to EMR instances.
- *
- * @param configs the EMR configurations to apply to this cluster
- * @return an EMR cluster builder
- */
- public Builder emrConfigs(List configs) {
- this.sparkEmrConfigs = configs;
- return this;
- }
-
- /**
- * The id of the EC2 subnet to be used for this Spark EMR service
- * see https://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Subnets.html
- *
- * @param id the subnet ID
- * @return an EMR cluster builder
- */
- public Builder subnetId(String id) {
- this.sparkSubNetid = id;
- return this;
- }
-
- /**
- * The id of additional security groups this deployment should adopt for both master and slaves
- *
- * @param securityGroups
- * @return an EMR cluster builder
- */
- public Builder securityGroupIDs(List securityGroups) {
- this.sparkSecurityGroupIds = securityGroups;
- return this;
- }
-
- /**
- * The number of instances this deployment should comprise of
- *
- * @param count the number of instances for this cluster
- * @rturn an EMR cluster buidler
- */
- public Builder instanceCount(int count) {
- this.sparkInstanceCount = count;
- return this;
- }
-
- /**
- * The type of instance this cluster should comprise of
- * See https://aws.amazon.com/ec2/instance-types/
- *
- * @param instanceType the type of instance for this cluster
- * @return an EMR cluster builder
- */
- public Builder instanceType(String instanceType) {
- this.sparkInstanceType = instanceType;
- return this;
- }
-
- /**
- * The optional bid value for this cluster's spot instances
- * see https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/how-spot-instances-work.html
- * Uses the on-demand market if empty.
- *
- * @param optBid the Optional bid price for this cluster's instnces
- * @return an EMR cluster Builder
- */
- public Builder instanceBidPrice(Optional optBid) {
- this.sparkInstanceBidPrice = optBid;
- return this;
- }
-
- /**
- * The EC2 instance role that this cluster's instances should assume
- * see https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
- *
- * @param role the intended instance role
- * @return an EMR cluster builder
- */
- public Builder instanceRole(String role) {
- this.sparkInstanceRole = role;
- return this;
- }
-
- /**
- * the S3 folder in which to find the application jar
- *
- * @param jarfolder the S3 folder in which to find a jar
- * @return an EMR cluster builder
- */
- public Builder s3JarFolder(String jarfolder) {
- this.sparkS3JarFolder = jarfolder;
- return this;
- }
-
- /**
- * The timeout duration for this Spark EMR cluster, in minutes
- *
- * @param timeoutMinutes
- * @return an EMR cluster builder
- */
- public Builder sparkTimeOutDurationMinutes(int timeoutMinutes) {
- this.sparkTimeoutDurationMinutes = timeoutMinutes;
- return this;
- }
-
- /**
- * Creates an EMR Spark cluster deployment
- *
- * @return a SparkEMRClient
- */
- public SparkEMRClient build() {
- this.sparkEmrClientBuilder = AmazonElasticMapReduceClientBuilder.standard().withRegion(sparkAwsRegion);
- this.sparkS3ClientBuilder = AmazonS3ClientBuilder.standard().withRegion(sparkAwsRegion);
- // note this will be kept alive without steps, an arbitrary choice to avoid rapid test-teardown-restart cycles
- this.sparkJobFlowInstancesConfig = (new JobFlowInstancesConfig()).withKeepJobFlowAliveWhenNoSteps(true);
- if (this.sparkSubNetid != null)
- this.sparkJobFlowInstancesConfig = this.sparkJobFlowInstancesConfig.withEc2SubnetId(this.sparkSubNetid);
- if (!this.sparkSecurityGroupIds.isEmpty()) {
- this.sparkJobFlowInstancesConfig = this.sparkJobFlowInstancesConfig.withAdditionalMasterSecurityGroups(this.sparkSecurityGroupIds);
- this.sparkJobFlowInstancesConfig = this.sparkJobFlowInstancesConfig.withAdditionalSlaveSecurityGroups(this.sparkSecurityGroupIds);
- }
-
- InstanceGroupConfig masterConfig =
- (new InstanceGroupConfig()).withInstanceCount(1).withInstanceRole(InstanceRoleType.MASTER).withInstanceType(sparkInstanceType);
- if (sparkInstanceBidPrice.isPresent()) {
- masterConfig = masterConfig.withMarket(MarketType.SPOT).withBidPrice(sparkInstanceBidPrice.get().toString());
- } else {
- masterConfig = masterConfig.withMarket(MarketType.ON_DEMAND);
- }
-
- int slaveCount = sparkInstanceCount - 1;
- InstanceGroupConfig slaveConfig =
- (new InstanceGroupConfig()).withInstanceCount(slaveCount).withInstanceRole(InstanceRoleType.CORE).withInstanceRole(sparkInstanceType);
- if (sparkInstanceBidPrice.isPresent()) {
- slaveConfig = slaveConfig.withMarket(MarketType.SPOT).withBidPrice(sparkInstanceBidPrice.get().toString());
- } else {
- slaveConfig = slaveConfig.withMarket(MarketType.ON_DEMAND);
- }
- if (slaveCount > 0) {
- this.sparkJobFlowInstancesConfig = this.sparkJobFlowInstancesConfig.withInstanceGroups(Arrays.asList(masterConfig, slaveConfig));
- } else {
- this.sparkJobFlowInstancesConfig = this.sparkJobFlowInstancesConfig.withInstanceGroups(slaveConfig);
- }
-
- this.sparkRunJobFlowRequest = new RunJobFlowRequest();
- if (!sparkEmrConfigs.isEmpty()) {
- List emrConfigs = new ArrayList<>();
- for (EmrConfig config : sparkEmrConfigs) {
- emrConfigs.add(config.toAwsConfig());
- }
- this.sparkRunJobFlowRequest = this.sparkRunJobFlowRequest.withConfigurations(emrConfigs);
- }
- this.sparkRunJobFlowRequest =
- this.sparkRunJobFlowRequest.withName(sparkClusterName).withApplications((new Application()).withName("Spark"))
- .withReleaseLabel(sparkEmrRelease)
- .withServiceRole(sparkEmrServiceRole)
- .withJobFlowRole(sparkInstanceRole)
- .withInstances(this.sparkJobFlowInstancesConfig);
-
- return new SparkEMRClient(
- this.sparkClusterName,
- this.sparkAwsRegion,
- this.sparkEmrRelease,
- this.sparkEmrServiceRole,
- this.sparkEmrConfigs,
- this.sparkSubNetid,
- this.sparkSecurityGroupIds,
- this.sparkInstanceCount,
- this.sparkInstanceType,
- this.sparkInstanceBidPrice,
- this.sparkInstanceRole,
- this.sparkS3JarFolder,
- this.sparkTimeoutDurationMinutes,
- this.sparkEmrClientBuilder,
- this.sparkS3ClientBuilder,
- this.sparkJobFlowInstancesConfig,
- this.sparkRunJobFlowRequest,
- this.sparkS3PutObjectDecorator,
- this.sparkSubmitConfs
- );
- }
-
- }
-
-}
diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/BaseS3.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/BaseS3.java
deleted file mode 100755
index 13175505b..000000000
--- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/BaseS3.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*******************************************************************************
- * Copyright (c) 2015-2018 Skymind, Inc.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Apache License, Version 2.0 which is available at
- * https://www.apache.org/licenses/LICENSE-2.0.
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- ******************************************************************************/
-
-package org.deeplearning4j.aws.s3;
-
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.auth.PropertiesCredentials;
-import com.amazonaws.services.ec2.AmazonEC2;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.AmazonS3Client;
-
-import java.io.File;
-import java.io.InputStream;
-
-
-/**
- * The S3 Credentials works via discovering the credentials
- * from the system properties (passed in via -D or System wide)
- * If you invoke the JVM with -Dorg.deeplearning4j.aws.accessKey=YOUR_ACCESS_KEY
- * and -Dorg.deeplearning4j.aws.accessSecret=YOUR_SECRET_KEY
- * this will pick up the credentials from there, otherwise it will also attempt to look in
- * the system environment for the following variables:
- *
- *
- * AWS_ACCESS_KEY_ID
- * AWS_SECRET_ACCESS_KEY
- * @author Adam Gibson
- *
- */
-public abstract class BaseS3 {
-
-
- /**
- *
- */
- protected static final long serialVersionUID = -2280107690193651289L;
- protected String accessKey;
- protected String secretKey;
- protected AWSCredentials creds;
- public final static String ACCESS_KEY = "org.deeplearning4j.aws.accessKey";
- public final static String ACCESS_SECRET = "org.deeplearning4j.aws.accessSecret";
- public final static String AWS_ACCESS_KEY = "AWS_ACCESS_KEY"; //"AWS_ACCESS_KEY_ID";
- public final static String AWS_SECRET_KEY = "AWS_SECRET_KEY"; //"AWS_SECRET_ACCESS_KEY";
-
-
- protected void findCreds() {
- if (System.getProperty(ACCESS_KEY) != null && System.getProperty(ACCESS_SECRET) != null) {
- accessKey = System.getProperty(ACCESS_KEY);
- secretKey = System.getProperty(ACCESS_SECRET);
- }
-
- else if (System.getenv(AWS_ACCESS_KEY) != null && System.getenv(AWS_SECRET_KEY) != null) {
- accessKey = System.getenv(AWS_ACCESS_KEY);
- secretKey = System.getenv(AWS_SECRET_KEY);
- }
- }
-
- public BaseS3() {
- findCreds();
- if (accessKey != null && secretKey != null)
- creds = new BasicAWSCredentials(accessKey, secretKey);
- if (creds == null)
- throw new IllegalStateException("Unable to find ec2 credentials");
- }
-
- public BaseS3(File file) throws Exception {
- if (accessKey != null && secretKey != null)
- creds = new BasicAWSCredentials(accessKey, secretKey);
- else
- creds = new PropertiesCredentials(file);
-
-
- }
-
- public BaseS3(InputStream is) throws Exception {
- if (accessKey != null && secretKey != null)
- creds = new BasicAWSCredentials(accessKey, secretKey);
- else
- creds = new PropertiesCredentials(is);
-
-
- }
-
- public AWSCredentials getCreds() {
- return creds;
- }
-
- public void setCreds(AWSCredentials creds) {
- this.creds = creds;
- }
-
- public AmazonS3 getClient() {
- return new AmazonS3Client(creds);
- }
-
- public AmazonEC2 getEc2() {
-
- return new AmazonEC2Client(creds);
- }
-
-
-}
diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/BaseS3DataSetIterator.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/BaseS3DataSetIterator.java
deleted file mode 100755
index 5532f79d2..000000000
--- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/BaseS3DataSetIterator.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*******************************************************************************
- * Copyright (c) 2015-2018 Skymind, Inc.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Apache License, Version 2.0 which is available at
- * https://www.apache.org/licenses/LICENSE-2.0.
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- ******************************************************************************/
-
-package org.deeplearning4j.aws.s3.reader;
-
-import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
-
-import java.io.InputStream;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * baseline data applyTransformToDestination iterator for
- * @author Adam Gibson
- *
- */
-public abstract class BaseS3DataSetIterator implements DataSetIterator {
-
- /**
- *
- */
- private static final long serialVersionUID = 885205002006822431L;
- private S3Downloader downloader;
- private List buckets;
- private int currBucket;
- private Iterator currIterator;
-
- public BaseS3DataSetIterator() {
- downloader = new S3Downloader();
- buckets = downloader.buckets();
- currBucket = 0;
- currIterator = downloader.iterateBucket(buckets.get(currBucket));
- }
-
-
-
- public InputStream nextObject() {
- if (currIterator.hasNext())
- return currIterator.next();
- else if (currBucket < buckets.size()) {
- currBucket++;
- currIterator = downloader.iterateBucket(buckets.get(currBucket));
- return currIterator.next();
- }
-
- return null;
- }
-
-
-
- @Override
- public boolean hasNext() {
- return currBucket < buckets.size() && currIterator.hasNext();
- }
-
-
-
- public String currBucket() {
- return buckets.get(currBucket);
- }
-
-
-
- public void nextBucket() {
- currBucket++;
- }
-
-
-
-}
diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/BucketIterator.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/BucketIterator.java
deleted file mode 100755
index 228fc5cfc..000000000
--- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/BucketIterator.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*******************************************************************************
- * Copyright (c) 2015-2018 Skymind, Inc.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Apache License, Version 2.0 which is available at
- * https://www.apache.org/licenses/LICENSE-2.0.
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- ******************************************************************************/
-
-package org.deeplearning4j.aws.s3.reader;
-
-import com.amazonaws.services.s3.model.ObjectListing;
-import com.amazonaws.services.s3.model.S3ObjectSummary;
-
-import java.io.InputStream;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Iterator over individual S3 bucket
- * @author Adam Gibson
- *
- */
-public class BucketIterator implements Iterator {
-
- private S3Downloader s3;
- private String bucket;
- private ObjectListing currList;
- private List currObjects;
- private int currObject;
-
-
-
- public BucketIterator(String bucket) {
- this(bucket, null);
-
- }
-
-
- public BucketIterator(String bucket, S3Downloader s3) {
- this.bucket = bucket;
-
- if (s3 == null)
- this.s3 = new S3Downloader();
- else
- this.s3 = s3;
- currList = this.s3.listObjects(bucket);
- currObjects = currList.getObjectSummaries();
-
- }
-
-
-
- @Override
- public boolean hasNext() {
- return currObject < currObjects.size();
- }
-
- @Override
- public InputStream next() {
- if (currObject < currObjects.size()) {
- InputStream ret = s3.objectForKey(bucket, currObjects.get(currObject).getKey());
- currObject++;
- return ret;
- } else if (currList.isTruncated()) {
- currList = s3.nextList(currList);
- currObjects = currList.getObjectSummaries();
- currObject = 0;
-
- InputStream ret = s3.objectForKey(bucket, currObjects.get(currObject).getKey());
-
- currObject++;
- return ret;
- }
-
-
- throw new IllegalStateException("Indeterminate state");
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
-
-
-}
diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/S3Downloader.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/S3Downloader.java
deleted file mode 100755
index 980a3f3e9..000000000
--- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/reader/S3Downloader.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/*******************************************************************************
- * Copyright (c) 2015-2018 Skymind, Inc.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Apache License, Version 2.0 which is available at
- * https://www.apache.org/licenses/LICENSE-2.0.
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- ******************************************************************************/
-
-package org.deeplearning4j.aws.s3.reader;
-
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.model.*;
-import com.amazonaws.services.s3.transfer.MultipleFileDownload;
-import com.amazonaws.services.s3.transfer.TransferManager;
-import org.apache.commons.io.IOUtils;
-import org.deeplearning4j.aws.s3.BaseS3;
-
-import java.io.*;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Downloads files from S3
- * @author Adam Gibson
- *
- */
-public class S3Downloader extends BaseS3 {
-
-
- /**
- * Return the keys for a bucket
- * @param bucket the bucket to get the keys for
- * @return the bucket's keys
- */
- public List keysForBucket(String bucket) {
- AmazonS3 s3 = getClient();
- List ret = new ArrayList<>();
- ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucket);
- ObjectListing objectListing;
-
- do {
- objectListing = s3.listObjects(listObjectsRequest);
- for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
- ret.add(objectSummary.getKey());
- }
- listObjectsRequest.setMarker(objectListing.getNextMarker());
- } while (objectListing.isTruncated());
-
- return ret;
- }
-
- /**
- * Returns the list of buckets in s3
- * @return the list of buckets
- */
- public List buckets() {
- List ret = new ArrayList<>();
- AmazonS3 s3 = getClient();
- List buckets = s3.listBuckets();
- for (Bucket b : buckets)
- ret.add(b.getName());
- return ret;
- }
-
- /**
- * Iterate over individual buckets.
- * Returns input streams to each object.
- * It is your responsibility to close the input streams
- * @param bucket the bucket to iterate over
- * @return an iterator over the objects in an s3 bucket
- */
- public Iterator iterateBucket(String bucket) {
- return new BucketIterator(bucket, this);
- }
-
- /**
- * Iterator style one list at a time
- * @param list the list to get the next batch for
- * @return the next batch of objects or null if
- * none are left
- */
- public ObjectListing nextList(ObjectListing list) {
- AmazonS3 s3 = getClient();
- if (list.isTruncated())
- return s3.listNextBatchOfObjects(list);
- return null;
- }
-
- /**
- * Simple way of retrieving the listings for a bucket
- * @param bucket the bucket to retrieve listings for
- * @return the object listing for this bucket
- */
- public ObjectListing listObjects(String bucket) {
- AmazonS3 s3 = getClient();
- ObjectListing list = s3.listObjects(bucket);
- return list;
- }
-
- /**
- * Paginates through a bucket's keys invoking the listener
- * at each key
- * @param bucket the bucket to iterate
- * @param listener the listener
- */
- public void paginate(String bucket, BucketKeyListener listener) {
- AmazonS3 s3 = getClient();
- ObjectListing list = s3.listObjects(bucket);
- for (S3ObjectSummary summary : list.getObjectSummaries()) {
- if (listener != null)
- listener.onKey(s3, bucket, summary.getKey());
- }
-
- while (list.isTruncated()) {
- list = s3.listNextBatchOfObjects(list);
- for (S3ObjectSummary summary : list.getObjectSummaries()) {
- if (listener != null)
- listener.onKey(s3, bucket, summary.getKey());
- }
- }
-
-
- }
-
-
- /**
- * Returns an input stream for the given bucket and key
- * @param bucket the bucket to retrieve from
- * @param key the key of the objec t
- * @return an input stream to the object
- */
- public InputStream objectForKey(String bucket, String key) {
- AmazonS3 s3 = getClient();
- S3Object obj = s3.getObject(bucket, key);
- InputStream is = obj.getObjectContent();
- return is;
- }
-
-
- public void download(String bucket, String key, File to) throws IOException {
- AmazonS3 s3 = getClient();
- S3Object obj = s3.getObject(bucket, key);
- InputStream is = obj.getObjectContent();
- BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(to));
- IOUtils.copy(is, bos);
- bos.close();
- is.close();
- obj.close();
- }
-
- public void download(String bucket, String key, OutputStream to) throws IOException {
- AmazonS3 s3 = getClient();
- S3Object obj = s3.getObject(bucket, key);
- InputStream is = obj.getObjectContent();
- BufferedOutputStream bos = new BufferedOutputStream(to);
-
- IOUtils.copy(is, bos);
- bos.close();
- is.close();
- obj.close();
- }
-
- public MultipleFileDownload downloadFolder(String bucketName, String keyPrefix, File folderPath) {
- TransferManager transfer = new TransferManager(getClient());
- return transfer.downloadDirectory(bucketName, keyPrefix, folderPath);
- }
-
-
-}
diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/uploader/S3Uploader.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/uploader/S3Uploader.java
deleted file mode 100755
index eacc71386..000000000
--- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/s3/uploader/S3Uploader.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*******************************************************************************
- * Copyright (c) 2015-2018 Skymind, Inc.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Apache License, Version 2.0 which is available at
- * https://www.apache.org/licenses/LICENSE-2.0.
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- ******************************************************************************/
-
-package org.deeplearning4j.aws.s3.uploader;
-
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.AmazonS3Client;
-import com.amazonaws.services.s3.model.*;
-import com.amazonaws.services.s3.transfer.MultipleFileUpload;
-import com.amazonaws.services.s3.transfer.TransferManager;
-import org.deeplearning4j.aws.s3.BaseS3;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Uploads files to S3
- *
- * @see {@link BaseS3}
- * @author Adam Gibson
- *
- */
-public class S3Uploader extends BaseS3 {
-
-
- /**
- * Multi part upload for big files
- * @param file the file to upload
- * @param bucketName the bucket name to upload
- */
- public void multiPartUpload(File file, String bucketName) {
- AmazonS3 client = new AmazonS3Client(creds);
- bucketName = ensureValidBucketName(bucketName);
-
- List buckets = client.listBuckets();
- for (Bucket b : buckets)
- if (b.getName().equals(bucketName)) {
- doMultiPart(client, bucketName, file);
- return;
- }
-
- //bucket didn't exist: create it
- client.createBucket(bucketName);
- doMultiPart(client, bucketName, file);
- }
-
- /**
- * Upload the file to the bucket.
- * Will create the bucket if it hasn't already been created
- * @param file the file to upload
- * @param bucketName the name of the bucket
- */
- public void upload(File file, String bucketName) {
- AmazonS3 client = new AmazonS3Client(creds);
- bucketName = ensureValidBucketName(bucketName);
-
- List buckets = client.listBuckets();
- for (Bucket b : buckets)
- if (b.getName().equals(bucketName)) {
- client.putObject(bucketName, file.getName(), file);
- return;
- }
-
- //bucket didn't exist: create it
- client.createBucket(bucketName);
- client.putObject(bucketName, file.getName(), file);
-
- }
-
- private void doMultiPart(AmazonS3 s3Client, String bucketName, File file) {
- // Create a list of UploadPartResponse objects. You get one of these
- // for each part upload.
- List partETags = new ArrayList<>();
-
- // Step 1: Initialize.
- InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, file.getName());
- InitiateMultipartUploadResult initResponse = s3Client.initiateMultipartUpload(initRequest);
-
- long contentLength = file.length();
- long partSize = 5242880; // Set part size to 5 MB.
-
- try {
- // Step 2: Upload parts.
- long filePosition = 0;
- for (int i = 1; filePosition < contentLength; i++) {
- // Last part can be less than 5 MB. Adjust part size.
- partSize = Math.min(partSize, (contentLength - filePosition));
-
- // Create request to upload a part.
- UploadPartRequest uploadRequest = new UploadPartRequest().withBucketName(bucketName)
- .withKey(file.getName()).withUploadId(initResponse.getUploadId()).withPartNumber(i)
- .withFileOffset(filePosition).withFile(file).withPartSize(partSize);
-
- // Upload part and add response to our list.
- partETags.add(s3Client.uploadPart(uploadRequest).getPartETag());
-
- filePosition += partSize;
- }
-
- // Step 3: Complete.
- CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(bucketName, file.getName(),
- initResponse.getUploadId(), partETags);
-
- s3Client.completeMultipartUpload(compRequest);
- } catch (Exception e) {
- s3Client.abortMultipartUpload(
- new AbortMultipartUploadRequest(bucketName, file.getName(), initResponse.getUploadId()));
- }
- }
-
- private String ensureValidBucketName(String bucketName) {
- String formatted = bucketName.replaceAll("\\s+", "_");
- int length = bucketName.length();
- if (length >= 62)
- length = 62;
- formatted = formatted.substring(0, length);
- formatted = formatted.replace(".", "d");
- formatted = formatted.toLowerCase();
- if (formatted.endsWith("-"))
- formatted = formatted.substring(0, length - 1);
-
- return formatted;
- }
-
- public void upload(File file, String name, String bucketName) {
- AmazonS3 client = getClient();
- bucketName = ensureValidBucketName(bucketName);
- List buckets = client.listBuckets();
- // ObjectMetadata med = new ObjectMetadata();
- // med.setContentLength(fileLength);
- for (Bucket b : buckets)
- if (b.getName().equals(bucketName)) {
- //client.putObject(bucketName, name, is, med);
- client.putObject(new PutObjectRequest(bucketName, name, file));
- return;
- }
-
- //bucket didn't exist: createComplex it
- client.createBucket(bucketName);
- //client.putObject(bucketName, name, is, med);
- client.putObject(new PutObjectRequest(bucketName, name, file));
- }
-
-
- public MultipleFileUpload uploadFolder(String bucketName, String keyPrefix, File folderPath,
- boolean includeSubDir) {
- TransferManager transfer = new TransferManager(getClient());
- return transfer.uploadDirectory(bucketName, keyPrefix, folderPath, includeSubDir);
- }
-
- public MultipleFileUpload uploadFileList(String bucketName, File folderPath, List fileList,
- String keyPrefix) {
- TransferManager transfer = new TransferManager(getClient());
- return transfer.uploadFileList(bucketName, keyPrefix, folderPath, fileList);
- }
-
-
-}
diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/resources/provision-master.sh b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/resources/provision-master.sh
deleted file mode 100755
index 7f7285bb5..000000000
--- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-aws/src/main/resources/provision-master.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/bash
-
-################################################################################
-# Copyright (c) 2015-2018 Skymind, Inc.
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-################################################################################
-
-sudo yum -y install blas java-1.7.0-openjdk-devel
-
-if [ ! -f "/opt/dl4j" ];
-then
- sudo mkdir /opt/dl4j
- sudo yum -y install git
- git clone https://github.com/agibsonccc/java-deeplearning
-
- wget http://www.trieuvan.com/apache/maven/maven-3/3.1.1/binaries/apache-maven-3.1.1-bin.tar.gz
- sudo mv apache-maven-3.1.1-bin.tar.gz /opt
- cd /opt && sudo tar xvf apache-maven-3.1.1-bin.tar.gz && sudo mv apache-maven-3.1.1 /opt/mvn
- cd /home/ec2-user/java-deeplearning/ && /opt/mvn/bin/mvn -DskipTests clean install
- echo "Printing distribution"
- ls /home/ec2-user/java-deeplearning/deeplearning4j-distribution/target
- echo "Before moving distribution"
- sudo mv deeplearning4j-distribution/target/deeplearning4j-dist.tar.gz /opt
- echo "Moving distribution to opt directory..."
- echo "Moving in to opt directory"
- cd /opt
-
- sudo tar xzvf deeplearning4j-dist.tar.gz
- #sudo mkdir /opt/dl4j
- echo "Moving jars in to /opt/dl4j/..."
- sudo mv *.jar /opt/dl4j
-fi
-
-
diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/pom.xml b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/pom.xml
index 97515cf5e..2c7a94de8 100644
--- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/pom.xml
+++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/pom.xml
@@ -86,6 +86,13 @@
logback-classic
test
+
+
+ org.deeplearning4j
+ deeplearning4j-common-tests
+ ${project.version}
+ test
+
diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/BaseDL4JTest.java
deleted file mode 100644
index 8e087cc2f..000000000
--- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/BaseDL4JTest.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*******************************************************************************
- * Copyright (c) 2015-2018 Skymind, Inc.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Apache License, Version 2.0 which is available at
- * https://www.apache.org/licenses/LICENSE-2.0.
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- ******************************************************************************/
-
-package org.deeplearning4j.parallelism.parameterserver;
-
-import lombok.extern.slf4j.Slf4j;
-import org.bytedeco.javacpp.Pointer;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.rules.TestName;
-import org.nd4j.linalg.api.buffer.DataType;
-import org.nd4j.linalg.api.memory.MemoryWorkspace;
-import org.nd4j.linalg.api.ops.executioner.OpExecutioner;
-import org.nd4j.linalg.factory.Nd4j;
-import org.nd4j.linalg.profiler.ProfilerConfig;
-
-import java.lang.management.ManagementFactory;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-@Slf4j
-public class BaseDL4JTest {
-
- @Rule
- public TestName name = new TestName();
-
- protected long startTime;
- protected int threadCountBefore;
-
- /**
- * Override this to set the profiling mode for the tests defined in the child class
- */
- public OpExecutioner.ProfilingMode getProfilingMode(){
- return OpExecutioner.ProfilingMode.SCOPE_PANIC;
- }
-
- /**
- * Override this to set the datatype of the tests defined in the child class
- */
- public DataType getDataType(){
- return DataType.DOUBLE;
- }
-
- public DataType getDefaultFPDataType(){
- return getDataType();
- }
-
- @Before
- public void beforeTest(){
- log.info("{}.{}", getClass().getSimpleName(), name.getMethodName());
- Nd4j.getExecutioner().setProfilingMode(getProfilingMode());
- Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build());
- Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType());
- startTime = System.currentTimeMillis();
- threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount();
- }
-
- @After
- public void afterTest(){
- //Attempt to keep workspaces isolated between tests
- Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();
- MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace();
- Nd4j.getMemoryManager().setCurrentWorkspace(null);
- if(currWS != null){
- //Not really safe to continue testing under this situation... other tests will likely fail with obscure
- // errors that are hard to track back to this
- log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS);
- System.exit(1);
- }
-
- StringBuilder sb = new StringBuilder();
- long maxPhys = Pointer.maxPhysicalBytes();
- long maxBytes = Pointer.maxBytes();
- long currPhys = Pointer.physicalBytes();
- long currBytes = Pointer.totalBytes();
-
- long jvmTotal = Runtime.getRuntime().totalMemory();
- long jvmMax = Runtime.getRuntime().maxMemory();
-
- int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount();
-
- long duration = System.currentTimeMillis() - startTime;
- sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName())
- .append(": ").append(duration).append(" ms")
- .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")")
- .append(", jvmTotal=").append(jvmTotal)
- .append(", jvmMax=").append(jvmMax)
- .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes)
- .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys);
-
- List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread();
- if(ws != null && ws.size() > 0){
- long currSize = 0;
- for(MemoryWorkspace w : ws){
- currSize += w.getCurrentSize();
- }
- if(currSize > 0){
- sb.append(", threadWSSize=").append(currSize)
- .append(" (").append(ws.size()).append(" WSs)");
- }
- }
-
-
- Properties p = Nd4j.getExecutioner().getEnvironmentInformation();
- Object o = p.get("cuda.devicesInformation");
- if(o instanceof List){
- List