Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Modelmanager workerstatus async #510

Merged
merged 7 commits into from
Jan 12, 2021
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.handler.codec.http.multipart.DefaultHttpDataFactory;
Expand Down Expand Up @@ -64,7 +65,16 @@ protected void handleRequest(
throws ModelException {
switch (segments[1]) {
case "ping":
ModelManager.getInstance().workerStatus(ctx);
// TODO: Check if its OK to send other 2xx errors to ALB for "Partial Healthy"
// and "Unhealthy"
ModelManager.getInstance()
.workerStatus(ctx)
.thenAccept(
response ->
NettyUtils.sendJsonResponse(
ctx,
new StatusResponse(response),
HttpResponseStatus.OK));
break;
case "invocations":
handleInvocations(ctx, req, decoder);
Expand Down
17 changes: 6 additions & 11 deletions serving/src/main/java/ai/djl/serving/wlm/ModelManager.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,8 @@
import ai.djl.repository.zoo.ZooModel;
import ai.djl.serving.http.BadRequestException;
import ai.djl.serving.http.DescribeModelResponse;
import ai.djl.serving.http.StatusResponse;
import ai.djl.serving.util.ConfigManager;
import ai.djl.serving.util.NettyUtils;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.HttpResponseStatus;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
Expand Down Expand Up @@ -248,12 +245,14 @@ public DescribeModelResponse describeModel(String modelName) throws ModelNotFoun
* Sends model server health status to client.
*
* @param ctx the client connection channel context
* @return completableFuture with eventuelly result in the future after async execution
*/
public void workerStatus(final ChannelHandlerContext ctx) {
Runnable r =
public CompletableFuture<String> workerStatus(final ChannelHandlerContext ctx) {
return CompletableFuture.supplyAsync(
() -> {
String response = "Healthy";
int numWorking = 0;

int numScaled = 0;
for (Map.Entry<String, ModelInfo> m : models.entrySet()) {
numScaled += m.getValue().getMinWorkers();
Expand All @@ -266,11 +265,7 @@ public void workerStatus(final ChannelHandlerContext ctx) {
response = "Unhealthy";
}

// TODO: Check if its OK to send other 2xx errors to ALB for "Partial Healthy"
// and "Unhealthy"
NettyUtils.sendJsonResponse(
ctx, new StatusResponse(response), HttpResponseStatus.OK);
};
wlm.scheduleAsync(r);
return response;
});
}
}