Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Model Dowload Buttons #719

Merged
merged 26 commits into from
Mar 11, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
757e753
This creates the component which will populate the Download Tab with …
anfee1 Mar 2, 2021
3563314
Making a place for the download buttons.
anfee1 Mar 2, 2021
3dc1bf2
Adding the Model Download Handler allowing the backend to feed the li…
anfee1 Mar 2, 2021
89f48c4
Getting rid of some of the test code.
anfee1 Mar 2, 2021
a491d98
Improve Block usability (#712)
stu1130 Mar 2, 2021
5ca07f4
Removing unnecessary logging messages.
anfee1 Mar 3, 2021
eb8d51d
block factory init commit (#697)
lanking520 Mar 3, 2021
cb352ad
[DOCS] Fixing TrainingListener documentation (#718)
aksrajvanshi Mar 3, 2021
48cf663
Fix DJL serving flaky test for mac (#721)
frankfliu Mar 4, 2021
28a32ff
Fixing all of the nits.
anfee1 Mar 4, 2021
c9d28c8
Getting rid of unnecessary methods.
anfee1 Mar 4, 2021
a059417
update onnxruntime along with String tensor (#724)
lanking520 Mar 5, 2021
347eb07
Add profiler doc (#722)
stu1130 Mar 5, 2021
a363db7
Resolving some comments.
anfee1 Mar 5, 2021
9d55e6e
Using a better criteria incase multiple models have the same name.
anfee1 Mar 5, 2021
b4a1cc0
Fixing the java doc.
anfee1 Mar 5, 2021
a66e168
Configure verbose of mxnet extra libraries (#728)
zachgk Mar 8, 2021
5aa09a2
Added a TODO for using the artifact repo to get the base uri.
anfee1 Mar 8, 2021
f881e4d
paddlepaddle CN notebook (#730)
lanking520 Mar 8, 2021
a6a2232
add EI documentation (#733)
lanking520 Mar 9, 2021
a90129e
allow pytorch stream model loading (#729)
lanking520 Mar 9, 2021
c6aebe0
add NDList decode from inputStream (#734)
lanking520 Mar 9, 2021
8342d44
Remove memory scope and improve memory management (#695)
zachgk Mar 9, 2021
43e5891
Remove erroneous random forest application (#726)
zachgk Mar 9, 2021
2158e99
Minor fixes on duplicated code (#736)
lanking520 Mar 9, 2021
f29daf8
Trying to rebase to fix PR.
anfee1 Mar 10, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ jobs:
uses: actions/setup-python@v1
with:
python-version: '3.x'
- name: Install CN fonts
run: sudo apt-get update && sudo apt-get install fonts-arphic-uming
- name: install Python Dependencies
run: pip3 install nbconvert==5.6.1 mkdocs mkdocs-exclude mknotebooks==0.4.1 mkdocs-material jupyter Pygments Markdown==3.2.2
- name: Install IJava kernel
Expand Down
11 changes: 1 addition & 10 deletions api/src/main/java/ai/djl/Application.java
Original file line number Diff line number Diff line change
Expand Up @@ -268,15 +268,6 @@ public interface Tabular {
* @see <a href="https://d2l.djl.ai/chapter_linear-networks/softmax-regression.html">The D2L
* chapter introducing this application</a>
*/
Application SOFTMAX_REGRESSION = new Application("tabular/linear_regression");

/**
* This is erroneous because random forest is a technique (not deep learning), not an
* application.
*
* <p>The actual application is likely to be in {@link Tabular}, especially {@link
* #SOFTMAX_REGRESSION}.
*/
Application RANDOM_FOREST = new Application("tabular/random_forest");
Application SOFTMAX_REGRESSION = new Application("tabular/softmax_regression");
}
}
16 changes: 16 additions & 0 deletions api/src/main/java/ai/djl/BaseModel.java
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,22 @@ public Path getModelPath() {
return modelDir;
}

/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(200);
sb.append("Model (\n\tName: ").append(modelName);
if (modelDir != null) {
sb.append("\n\tModel location: ").append(modelDir.toAbsolutePath());
}
sb.append("\n\tData Type: ").append(dataType);
for (Map.Entry<String, String> entry : properties.entrySet()) {
sb.append("\n\t").append(entry.getKey()).append(": ").append(entry.getValue());
}
sb.append("\n)");
return sb.toString();
}

/** {@inheritDoc} */
@SuppressWarnings("deprecation")
@Override
Expand Down
9 changes: 9 additions & 0 deletions api/src/main/java/ai/djl/engine/Engine.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import ai.djl.Device;
import ai.djl.Model;
import ai.djl.ndarray.NDManager;
import ai.djl.nn.SymbolBlock;
import ai.djl.training.GradientCollector;
import ai.djl.training.LocalParameterServer;
import ai.djl.training.ParameterServer;
Expand Down Expand Up @@ -190,6 +191,14 @@ public Device defaultDevice() {
return defaultDevice;
}

/**
* Construct an empty SymbolBlock for loading.
*
* @param manager the manager to manage parameters
* @return Empty {@link SymbolBlock} for static graph
*/
public abstract SymbolBlock newSymbolBlock(NDManager manager);

/**
* Constructs a new model.
*
Expand Down
4 changes: 2 additions & 2 deletions api/src/main/java/ai/djl/modality/nlp/Decoder.java
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ public void initializeChildBlocks(NDManager manager, DataType dataType, Shape...

/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(NDManager manager, Shape[] inputShapes) {
return block.getOutputShapes(manager, inputShapes);
public Shape[] getOutputShapes(Shape[] inputShapes) {
return block.getOutputShapes(inputShapes);
}

/** {@inheritDoc} */
Expand Down
4 changes: 2 additions & 2 deletions api/src/main/java/ai/djl/modality/nlp/Encoder.java
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ public void initializeChildBlocks(NDManager manager, DataType dataType, Shape...

/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(NDManager manager, Shape[] inputShapes) {
return block.getOutputShapes(manager, inputShapes);
public Shape[] getOutputShapes(Shape[] inputShapes) {
return block.getOutputShapes(inputShapes);
}

/** {@inheritDoc} */
Expand Down
9 changes: 4 additions & 5 deletions api/src/main/java/ai/djl/modality/nlp/EncoderDecoder.java
Original file line number Diff line number Diff line change
Expand Up @@ -97,19 +97,18 @@ public NDList forward(
* @param manager the NDManager to initialize the parameters
* @param dataType the datatype of the parameters
* @param inputShapes the shapes of the inputs to the block
* @return the shapes of the outputs of the block
*/
@Override
public Shape[] initialize(NDManager manager, DataType dataType, Shape... inputShapes) {
public void initialize(NDManager manager, DataType dataType, Shape... inputShapes) {
beforeInitialize(inputShapes);
encoder.initialize(manager, dataType, inputShapes[0]);
return decoder.initialize(manager, dataType, inputShapes[1]);
decoder.initialize(manager, dataType, inputShapes[1]);
}

/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(NDManager manager, Shape[] inputShapes) {
return decoder.getOutputShapes(manager, new Shape[] {inputShapes[1]});
public Shape[] getOutputShapes(Shape[] inputShapes) {
return decoder.getOutputShapes(new Shape[] {inputShapes[1]});
}

/** {@inheritDoc} */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ public void initializeChildBlocks(NDManager manager, DataType dataType, Shape...

/** {@inheritDoc} */
@Override
public Shape[] getOutputShapes(NDManager manager, Shape[] inputShapes) {
return trainableWordEmbedding.getOutputShapes(manager, inputShapes);
public Shape[] getOutputShapes(Shape[] inputShapes) {
return trainableWordEmbedding.getOutputShapes(inputShapes);
}
}
32 changes: 29 additions & 3 deletions api/src/main/java/ai/djl/ndarray/BaseNDManager.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import ai.djl.Device;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.util.Pair;
import ai.djl.util.PairList;
import java.nio.Buffer;
import java.nio.file.Path;
Expand All @@ -34,12 +35,14 @@ public abstract class BaseNDManager implements NDManager {
protected String name;
protected Device device;
protected ConcurrentHashMap<String, AutoCloseable> resources;
protected ConcurrentHashMap<String, Pair<NDResource, NDManager>> tempResources;
protected AtomicBoolean closed = new AtomicBoolean(false);

protected BaseNDManager(NDManager parent, Device device) {
this.parent = parent;
this.device = Device.defaultIfNull(device, getEngine());
resources = new ConcurrentHashMap<>();
tempResources = new ConcurrentHashMap<>();
uid = UUID.randomUUID().toString();
}

Expand All @@ -49,6 +52,12 @@ public NDArray create(String data) {
throw new UnsupportedOperationException("Not supported!");
}

/** {@inheritDoc} */
@Override
public NDArray create(String[] data) {
throw new UnsupportedOperationException("Not supported!");
}

/** {@inheritDoc} */
@Override
public NDArray create(Shape shape, DataType dataType) {
Expand Down Expand Up @@ -197,7 +206,7 @@ public String toString() {

/** {@inheritDoc} */
@Override
public synchronized void attach(String resourceId, AutoCloseable resource) {
public synchronized void attachInternal(String resourceId, AutoCloseable resource) {
if (closed.get()) {
throw new IllegalStateException("NDManager has been closed already.");
}
Expand All @@ -206,7 +215,17 @@ public synchronized void attach(String resourceId, AutoCloseable resource) {

/** {@inheritDoc} */
@Override
public synchronized void detach(String resourceId) {
public void tempAttachInternal(
NDManager originalManager, String resourceId, NDResource resource) {
if (closed.get()) {
throw new IllegalStateException("NDManager has been closed already.");
}
tempResources.put(resourceId, new Pair<>(resource, originalManager));
}

/** {@inheritDoc} */
@Override
public synchronized void detachInternal(String resourceId) {
if (closed.get()) {
// This may happen in the middle of BaseNDManager.close()
return;
Expand Down Expand Up @@ -238,7 +257,14 @@ public synchronized void close() {
logger.error("Resource close failed.", e);
}
}
parent.detach(uid);
for (Pair<NDResource, NDManager> resource : tempResources.values()) {
try {
resource.getKey().attach(resource.getValue());
} catch (Exception e) {
logger.error("Temporary resource return failed.", e);
}
}
parent.detachInternal(uid);
resources.clear();
}
}
Expand Down
39 changes: 10 additions & 29 deletions api/src/main/java/ai/djl/ndarray/NDArray.java
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
* href="https://github.com/awslabs/djl/blob/master/docs/development/memory_management.md">NDArray
* Memory Management Guide</a>
*/
public interface NDArray extends AutoCloseable {
public interface NDArray extends NDResource {

/**
* Decodes {@code NDArray} from bytes.
Expand All @@ -53,13 +53,6 @@ static NDArray decode(NDManager manager, byte[] byteArray) {
return manager.decode(byteArray);
}

/**
* Returns the {@link NDManager} used to create this {@code NDArray}.
*
* @return the {@link NDManager} used to create this {@code NDArray}
*/
NDManager getManager();

/**
* Returns the name of this {@code NDArray}.
*
Expand Down Expand Up @@ -146,27 +139,6 @@ default byte[] encode() {
return NDSerializer.encode(this);
}

/**
* Attaches this {@code NDArray} to the specified {@link NDManager}.
*
* <p>Attached resource will be closed when the {@link NDManager} is closed.
*
* @param manager the {@link NDManager} to be attached
* @return the original {@link NDManager}
*/
NDManager attach(NDManager manager);

/**
* Detaches the {@code NDArray} from current {@link NDManager}'s lifecycle.
*
* <p>The {@code NDArray} becomes un-managed, it is the user's responsibility to close the
* {@code NDArray}. Failure to close the resource might cause your machine to run out of native
* memory.
*
* @see NDManager
*/
void detach();

/**
* Moves this {@code NDArray} to a different {@link Device}.
*
Expand Down Expand Up @@ -371,6 +343,15 @@ default boolean[] toBooleanArray() {
return ret;
}

/**
* Converts this {@code NDArray} to a String array.
*
* <p>This method is only applicable to the String typed NDArray and not for printing purpose
*
* @return Array of Strings
*/
String[] toStringArray();

/**
* Converts this {@code NDArray} to a Number array based on its {@link DataType}.
*
Expand Down
8 changes: 7 additions & 1 deletion api/src/main/java/ai/djl/ndarray/NDArrayAdapter.java
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ default SparseFormat getSparseFormat() {

/** {@inheritDoc} */
@Override
default NDManager attach(NDManager manager) {
default void attach(NDManager manager) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}

Expand Down Expand Up @@ -135,6 +135,12 @@ default NDArray stopGradient() {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}

/** {@inheritDoc} */
@Override
default String[] toStringArray() {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}

/** {@inheritDoc} */
@Override
default ByteBuffer toByteBuffer() {
Expand Down
61 changes: 31 additions & 30 deletions api/src/main/java/ai/djl/ndarray/NDList.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,10 @@
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

/**
* An {@code NDList} represents a sequence of {@link NDArray}s with names.
Expand All @@ -34,7 +32,7 @@
*
* @see NDArray
*/
public class NDList extends ArrayList<NDArray> implements AutoCloseable {
public class NDList extends ArrayList<NDArray> implements NDResource {

private static final long serialVersionUID = 1L;

Expand Down Expand Up @@ -77,7 +75,18 @@ public NDList(Collection<NDArray> other) {
* @return {@code NDList}
*/
public static NDList decode(NDManager manager, byte[] byteArray) {
try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(byteArray))) {
return decode(manager, new ByteArrayInputStream(byteArray));
}

/**
* Decodes NDList from {@link InputStream}.
*
* @param manager manager assigned to {@link NDArray}
* @param is input stream contains the ndlist information
* @return {@code NDList}
*/
public static NDList decode(NDManager manager, InputStream is) {
try (DataInputStream dis = new DataInputStream(is)) {
int size = dis.readInt();
if (size < 0) {
throw new IllegalArgumentException("Invalid NDList size: " + size);
Expand Down Expand Up @@ -200,36 +209,28 @@ public NDList toDevice(Device device, boolean copy) {
return newNDList;
}

/**
* Attaches each ndarray in this list to the specified manager.
*
* @param manager the manager to attach the lists to
* @return a list of {@code NDManager} with which original NDArray are attached
* @see NDArray#attach(NDManager)
*/
public List<NDManager> attach(NDManager manager) {
return stream().map(array -> array.attach(manager)).collect(Collectors.toList());
/** {@inheritDoc} */
@Override
public NDManager getManager() {
return head().getManager();
}

/**
* Attaches each ndarray in this list to the specified manager.
*
* @param managers the list of managers to attach
* @return a list of {@code NDManager} with which original NDArray are attached
*/
public List<NDManager> attach(List<NDManager> managers) {
return IntStream.range(0, size())
.mapToObj(i -> get(i).attach(managers.get(i)))
.collect(Collectors.toList());
/** {@inheritDoc} */
@Override
public void attach(NDManager manager) {
stream().forEach(array -> array.attach(manager));
}

/**
* Detaches each ndarray in this list from their current managers.
*
* @see NDArray#detach()
*/
/** {@inheritDoc} */
@Override
public void tempAttach(NDManager manager) {
stream().forEach(array -> array.tempAttach(manager));
}

/** {@inheritDoc} */
@Override
public void detach() {
forEach(NDArray::detach);
stream().forEach(NDResource::detach);
}

/**
Expand Down
Loading