Datavec bulk operation (#9075)

* Bulk operation can be used instead of iteration inspection

Signed-off-by: Dariusz Zbyrad <dariusz.zbyrad@gmail.com>

* Redundant 'Collection.addAll()' call inspection

Signed-off-by: Dariusz Zbyrad <dariusz.zbyrad@gmail.com>
master
dariuszzbyrad 2020-08-28 00:59:14 +02:00 committed by GitHub
parent 8ff0aa8ddf
commit 6afec82181
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 8 additions and 17 deletions

View File

@ -128,9 +128,7 @@ public class CSVVariableSlidingWindowRecordReader extends CSVRecordReader implem
}
List<List<Writable>> sequence = new ArrayList<>();
for(List<Writable> line : queue) {
sequence.add(line);
}
sequence.addAll(queue);
if(exhausted && queue.size()==1)
queue.pollLast();

View File

@ -200,9 +200,8 @@ public class OverlappingTimeWindowFunction implements WindowFunction {
public Schema transform(Schema inputSchema) {
if (!addWindowStartTimeColumn && !addWindowEndTimeColumn)
return inputSchema;
List<ColumnMetaData> newMeta = new ArrayList<>();
newMeta.addAll(inputSchema.getColumnMetaData());
List<ColumnMetaData> newMeta = new ArrayList<>(inputSchema.getColumnMetaData());
if (addWindowStartTimeColumn) {
newMeta.add(new TimeMetaData("windowStartTime"));

View File

@ -165,9 +165,8 @@ public class TimeWindowFunction implements WindowFunction {
public Schema transform(Schema inputSchema) {
if (!addWindowStartTimeColumn && !addWindowEndTimeColumn)
return inputSchema;
List<ColumnMetaData> newMeta = new ArrayList<>();
newMeta.addAll(inputSchema.getColumnMetaData());
List<ColumnMetaData> newMeta = new ArrayList<>(inputSchema.getColumnMetaData());
if (addWindowStartTimeColumn) {
newMeta.add(new TimeMetaData("windowStartTime"));

View File

@ -53,8 +53,7 @@ public class AddConstantColumnTransform implements Transform {
@Override
public Schema transform(Schema inputSchema) {
List<ColumnMetaData> outMeta = new ArrayList<>();
outMeta.addAll(inputSchema.getColumnMetaData());
List<ColumnMetaData> outMeta = new ArrayList<>(inputSchema.getColumnMetaData());
ColumnMetaData newColMeta = newColumnType.newColumnMetaData(newColumnName);
outMeta.add(newColMeta);

View File

@ -75,8 +75,7 @@ public class ConcatenateStringColumns extends BaseTransform implements ColumnOp
}
}
List<ColumnMetaData> outMeta = new ArrayList<>();
outMeta.addAll(inputSchema.getColumnMetaData());
List<ColumnMetaData> outMeta = new ArrayList<>(inputSchema.getColumnMetaData());
ColumnMetaData newColMeta = ColumnType.String.newColumnMetaData(newColumnName);
outMeta.add(newColMeta);

View File

@ -279,8 +279,7 @@ public class TestTransforms extends BaseND4JTest {
Assert.assertEquals(outputColumns, newSchema.getColumnNames());
List<Writable> input = new ArrayList<>();
for (Writable value : COLUMN_VALUES)
input.add(value);
input.addAll(COLUMN_VALUES);
transform.setInputSchema(schema);
List<Writable> transformed = transform.map(input);

View File

@ -243,8 +243,7 @@ public class ArrowWritableRecordBatch extends AbstractWritableRecordBatch implem
public List<List<Writable>> toArrayList() {
List<List<Writable>> ret = new ArrayList<>();
for(int i = 0; i < size(); i++) {
List<Writable> add = new ArrayList<>();
add.addAll(get(i));
List<Writable> add = new ArrayList<>(get(i));
ret.add(add);
}

View File

@ -40,8 +40,7 @@ public class LocalGroupToSequenceFunction implements Function<List<List<Writable
public List<List<Writable>> apply(List<List<Writable>> lists) {
List<List<Writable>> list = new ArrayList<>();
for (List<Writable> writables : lists)
list.add(writables);
list.addAll(lists);
Collections.sort(list, comparator);