Datavec bulk operation (#9075)
* Bulk operation can be used instead of iteration inspection Signed-off-by: Dariusz Zbyrad <dariusz.zbyrad@gmail.com> * Redundant 'Collection.addAll()' call inspection Signed-off-by: Dariusz Zbyrad <dariusz.zbyrad@gmail.com>master
parent
8ff0aa8ddf
commit
6afec82181
|
@ -128,9 +128,7 @@ public class CSVVariableSlidingWindowRecordReader extends CSVRecordReader implem
|
|||
}
|
||||
|
||||
List<List<Writable>> sequence = new ArrayList<>();
|
||||
for(List<Writable> line : queue) {
|
||||
sequence.add(line);
|
||||
}
|
||||
sequence.addAll(queue);
|
||||
|
||||
if(exhausted && queue.size()==1)
|
||||
queue.pollLast();
|
||||
|
|
|
@ -200,9 +200,8 @@ public class OverlappingTimeWindowFunction implements WindowFunction {
|
|||
public Schema transform(Schema inputSchema) {
|
||||
if (!addWindowStartTimeColumn && !addWindowEndTimeColumn)
|
||||
return inputSchema;
|
||||
List<ColumnMetaData> newMeta = new ArrayList<>();
|
||||
|
||||
newMeta.addAll(inputSchema.getColumnMetaData());
|
||||
List<ColumnMetaData> newMeta = new ArrayList<>(inputSchema.getColumnMetaData());
|
||||
|
||||
if (addWindowStartTimeColumn) {
|
||||
newMeta.add(new TimeMetaData("windowStartTime"));
|
||||
|
|
|
@ -165,9 +165,8 @@ public class TimeWindowFunction implements WindowFunction {
|
|||
public Schema transform(Schema inputSchema) {
|
||||
if (!addWindowStartTimeColumn && !addWindowEndTimeColumn)
|
||||
return inputSchema;
|
||||
List<ColumnMetaData> newMeta = new ArrayList<>();
|
||||
|
||||
newMeta.addAll(inputSchema.getColumnMetaData());
|
||||
List<ColumnMetaData> newMeta = new ArrayList<>(inputSchema.getColumnMetaData());
|
||||
|
||||
if (addWindowStartTimeColumn) {
|
||||
newMeta.add(new TimeMetaData("windowStartTime"));
|
||||
|
|
|
@ -53,8 +53,7 @@ public class AddConstantColumnTransform implements Transform {
|
|||
|
||||
@Override
|
||||
public Schema transform(Schema inputSchema) {
|
||||
List<ColumnMetaData> outMeta = new ArrayList<>();
|
||||
outMeta.addAll(inputSchema.getColumnMetaData());
|
||||
List<ColumnMetaData> outMeta = new ArrayList<>(inputSchema.getColumnMetaData());
|
||||
|
||||
ColumnMetaData newColMeta = newColumnType.newColumnMetaData(newColumnName);
|
||||
outMeta.add(newColMeta);
|
||||
|
|
|
@ -75,8 +75,7 @@ public class ConcatenateStringColumns extends BaseTransform implements ColumnOp
|
|||
}
|
||||
}
|
||||
|
||||
List<ColumnMetaData> outMeta = new ArrayList<>();
|
||||
outMeta.addAll(inputSchema.getColumnMetaData());
|
||||
List<ColumnMetaData> outMeta = new ArrayList<>(inputSchema.getColumnMetaData());
|
||||
|
||||
ColumnMetaData newColMeta = ColumnType.String.newColumnMetaData(newColumnName);
|
||||
outMeta.add(newColMeta);
|
||||
|
|
|
@ -279,8 +279,7 @@ public class TestTransforms extends BaseND4JTest {
|
|||
Assert.assertEquals(outputColumns, newSchema.getColumnNames());
|
||||
|
||||
List<Writable> input = new ArrayList<>();
|
||||
for (Writable value : COLUMN_VALUES)
|
||||
input.add(value);
|
||||
input.addAll(COLUMN_VALUES);
|
||||
|
||||
transform.setInputSchema(schema);
|
||||
List<Writable> transformed = transform.map(input);
|
||||
|
|
|
@ -243,8 +243,7 @@ public class ArrowWritableRecordBatch extends AbstractWritableRecordBatch implem
|
|||
public List<List<Writable>> toArrayList() {
|
||||
List<List<Writable>> ret = new ArrayList<>();
|
||||
for(int i = 0; i < size(); i++) {
|
||||
List<Writable> add = new ArrayList<>();
|
||||
add.addAll(get(i));
|
||||
List<Writable> add = new ArrayList<>(get(i));
|
||||
ret.add(add);
|
||||
}
|
||||
|
||||
|
|
|
@ -40,8 +40,7 @@ public class LocalGroupToSequenceFunction implements Function<List<List<Writable
|
|||
public List<List<Writable>> apply(List<List<Writable>> lists) {
|
||||
|
||||
List<List<Writable>> list = new ArrayList<>();
|
||||
for (List<Writable> writables : lists)
|
||||
list.add(writables);
|
||||
list.addAll(lists);
|
||||
|
||||
Collections.sort(list, comparator);
|
||||
|
||||
|
|
Loading…
Reference in New Issue