Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ src/test/scripts/functions/pipelines/intermediates/classification/*

venv
venv/*

.venv
# resource optimization
scripts/resource/output
*.pem
Expand Down
99 changes: 76 additions & 23 deletions src/main/java/org/apache/sysds/api/PythonDMLScript.java
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
Expand All @@ -24,9 +24,11 @@
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.sysds.api.jmlc.Connection;
import org.apache.sysds.common.Types.ValueType;

import org.apache.sysds.common.Types;
import org.apache.sysds.runtime.DMLRuntimeException;
import org.apache.sysds.runtime.frame.data.FrameBlock;
import org.apache.sysds.runtime.frame.data.columns.Array;
import org.apache.sysds.runtime.matrix.data.MatrixBlock;
import org.apache.sysds.runtime.util.CommonThreadPool;
import org.apache.sysds.runtime.util.UnixPipeUtils;
Expand Down Expand Up @@ -79,7 +81,7 @@ public static void main(String[] args) throws Exception {
* therefore use logging framework. and terminate program.
*/
LOG.info("failed startup", p4e);
System.exit(-1);
exitHandler.exit(-1);
}
catch(Exception e) {
throw new DMLException("Failed startup and maintaining Python gateway", e);
Expand Down Expand Up @@ -116,59 +118,59 @@ public void openPipes(String path, int num) throws IOException {
}
}

public MatrixBlock startReadingMbFromPipe(int id, int rlen, int clen, Types.ValueType type) throws IOException {
public MatrixBlock startReadingMbFromPipe(int id, int rlen, int clen, ValueType type) throws IOException {
long limit = (long) rlen * clen;
LOG.debug("trying to read matrix from "+id+" with "+rlen+" rows and "+clen+" columns. Total size: "+limit);
if(limit > Integer.MAX_VALUE)
throw new DMLRuntimeException("Dense NumPy array of size " + limit +
" cannot be converted to MatrixBlock");
MatrixBlock mb = new MatrixBlock(rlen, clen, false, -1);
MatrixBlock mb;
if(fromPython != null){
BufferedInputStream pipe = fromPython.get(id);
double[] denseBlock = new double[(int) limit];
UnixPipeUtils.readNumpyArrayInBatches(pipe, id, BATCH_SIZE, (int) limit, type, denseBlock, 0);
mb.init(denseBlock, rlen, clen);
long nnz = UnixPipeUtils.readNumpyArrayInBatches(pipe, id, BATCH_SIZE, (int) limit, type, denseBlock, 0);
mb = new MatrixBlock(rlen, clen, denseBlock);
mb.setNonZeros(nnz);
} else {
throw new DMLRuntimeException("FIFO Pipes are not initialized.");
}
mb.recomputeNonZeros();
mb.examSparsity();
LOG.debug("Reading from Python finished");
mb.examSparsity();
return mb;
}

public MatrixBlock startReadingMbFromPipes(int[] blockSizes, int rlen, int clen, Types.ValueType type) throws ExecutionException, InterruptedException {
public MatrixBlock startReadingMbFromPipes(int[] blockSizes, int rlen, int clen, ValueType type) throws ExecutionException, InterruptedException {
long limit = (long) rlen * clen;
if(limit > Integer.MAX_VALUE)
throw new DMLRuntimeException("Dense NumPy array of size " + limit +
" cannot be converted to MatrixBlock");
MatrixBlock mb = new MatrixBlock(rlen, clen, false, -1);
MatrixBlock mb = new MatrixBlock(rlen, clen, false, rlen*clen);
if(fromPython != null){
ExecutorService pool = CommonThreadPool.get();
double[] denseBlock = new double[(int) limit];
int offsetOut = 0;
List<Future<Void>> futures = new ArrayList<>();
List<Future<Long>> futures = new ArrayList<>();
for (int i = 0; i < blockSizes.length; i++) {
BufferedInputStream pipe = fromPython.get(i);
int id = i, blockSize = blockSizes[i], _offsetOut = offsetOut;
Callable<Void> task = () -> {
UnixPipeUtils.readNumpyArrayInBatches(pipe, id, BATCH_SIZE, blockSize, type, denseBlock, _offsetOut);
return null;
Callable<Long> task = () -> {
return UnixPipeUtils.readNumpyArrayInBatches(pipe, id, BATCH_SIZE, blockSize, type, denseBlock, _offsetOut);
};

futures.add(pool.submit(task));
offsetOut += blockSize;
}
// Wait for all tasks and propagate exceptions
for (Future<Void> f : futures) {
f.get();
// Wait for all tasks and propagate exceptions, sum up nonzeros
long nnz = 0;
for (Future<Long> f : futures) {
nnz += f.get();
}

mb.init(denseBlock, rlen, clen);
mb = new MatrixBlock(rlen, clen, denseBlock);
mb.setNonZeros(nnz);
} else {
throw new DMLRuntimeException("FIFO Pipes are not initialized.");
}
mb.recomputeNonZeros();
mb.examSparsity();
return mb;
}
Expand All @@ -181,14 +183,51 @@ public void startWritingMbToPipe(int id, MatrixBlock mb) throws IOException {
LOG.debug("Trying to write matrix ["+baseDir + "-"+ id+"] with "+rlen+" rows and "+clen+" columns. Total size: "+numElem*8);

BufferedOutputStream out = toPython.get(id);
long bytes = UnixPipeUtils.writeNumpyArrayInBatches(out, id, BATCH_SIZE, numElem, Types.ValueType.FP64, mb);
long bytes = UnixPipeUtils.writeNumpyArrayInBatches(out, id, BATCH_SIZE, numElem, ValueType.FP64, mb);

LOG.debug("Writing of " + bytes +" Bytes to Python ["+baseDir + "-"+ id+"] finished");
} else {
throw new DMLRuntimeException("FIFO Pipes are not initialized.");
}
}

public void startReadingColFromPipe(int id, FrameBlock fb, int rows, int totalBytes, int col, ValueType type, boolean any) throws IOException {
if (fromPython == null) {
throw new DMLRuntimeException("FIFO Pipes are not initialized.");
}

BufferedInputStream pipe = fromPython.get(id);
LOG.debug("Start reading FrameBlock column from pipe #" + id + " with type " + type);

// Delegate to UnixPipeUtils
Array<?> arr = UnixPipeUtils.readFrameColumnFromPipe(pipe, id, rows, totalBytes, BATCH_SIZE, type);
// Set column into FrameBlock
fb.setColumn(col, arr);
ValueType[] schema = fb.getSchema();
// inplace update the schema for cases: int8 -> int32
schema[col] = arr.getValueType();

LOG.debug("Finished reading FrameBlock column from pipe #" + id);
}

public void startWritingColToPipe(int id, FrameBlock fb, int col) throws IOException {
if (toPython == null) {
throw new DMLRuntimeException("FIFO Pipes are not initialized.");
}

BufferedOutputStream pipe = toPython.get(id);
ValueType type = fb.getSchema()[col];
int rows = fb.getNumRows();
Array<?> array = fb.getColumn(col);

LOG.debug("Start writing FrameBlock column #" + col + " to pipe #" + id + " with type " + type + " and " + rows + " rows");

// Delegate to UnixPipeUtils
long bytes = UnixPipeUtils.writeFrameColumnToPipe(pipe, id, BATCH_SIZE, array, type);

LOG.debug("Finished writing FrameBlock column #" + col + " to pipe #" + id + ". Total bytes: " + bytes);
}

public void closePipes() throws IOException {
LOG.debug("Closing all pipes in Java");
for (BufferedInputStream pipe : fromPython.values())
Expand All @@ -198,6 +237,20 @@ public void closePipes() throws IOException {
LOG.debug("Closed all pipes in Java");
}

@FunctionalInterface
public interface ExitHandler {
void exit(int status);
}

private static volatile ExitHandler exitHandler = System::exit;

public static void setExitHandler(ExitHandler handler) {
exitHandler = handler == null ? System::exit : handler;
}

public static void resetExitHandler() {
exitHandler = System::exit;
}
protected static class DMLGateWayListener extends DefaultGatewayServerListener {
private static final Log LOG = LogFactory.getLog(DMLGateWayListener.class.getName());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ public void reset(int size) {

@Override
public byte[] getAsByteArray() {
ByteBuffer floatBuffer = ByteBuffer.allocate(8 * _size);
ByteBuffer floatBuffer = ByteBuffer.allocate(4 * _size);
floatBuffer.order(ByteOrder.nativeOrder());
for(int i = 0; i < _size; i++)
floatBuffer.putFloat(_data[i]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
Expand Down Expand Up @@ -91,7 +91,7 @@ public void processInstruction(ExecutionContext ec) {
FrameBlock fin = ec.getFrameInput(input1.getName());
String spec = ec.getScalarInput(input2).getStringValue();
String[] colnames = fin.getColumnNames();

// execute block transform encode
MultiColumnEncoder encoder = EncoderFactory.createEncoder(spec, colnames, fin.getNumColumns(), null);
// TODO: Assign #threads in compiler and pass via the instruction string
Expand Down
Loading
Loading