diff --git a/jetstream/tools/proxy_dev/base.Dockerfile b/jetstream/tools/proxy_dev/base.Dockerfile index 5e4cd2e4..9162bcf0 100644 --- a/jetstream/tools/proxy_dev/base.Dockerfile +++ b/jetstream/tools/proxy_dev/base.Dockerfile @@ -22,9 +22,7 @@ RUN pip install setuptools==58 fastapi==0.103.2 uvicorn RUN pip install ./JetStream -COPY inference_mlperf4.1 ./inference_mlperf4.1 RUN apt -y update && apt-get -y install python3-dev && apt-get -y install build-essential -RUN pip install ./inference_mlperf4.1/loadgen RUN pip install \ transformers==4.31.0 \ nltk==3.8.1 \ diff --git a/jetstream/tools/proxy_dev/dev.Dockerfile b/jetstream/tools/proxy_dev/dev.Dockerfile index be7a36fc..25bf382e 100644 --- a/jetstream/tools/proxy_dev/dev.Dockerfile +++ b/jetstream/tools/proxy_dev/dev.Dockerfile @@ -11,7 +11,6 @@ ENV JAX_BACKEND_TARGET=grpc://localhost:38681 # Copy all files from local workspace into docker container COPY JetStream ./JetStream COPY maxtext ./maxtext -COPY inference_mlperf4.1 ./inference_mlperf4.1 RUN pip install ./JetStream RUN pip install -r ./maxtext/requirements.txt diff --git a/jetstream/tools/requester.py b/jetstream/tools/requester.py index 30d7ac40..7ac0d55a 100644 --- a/jetstream/tools/requester.py +++ b/jetstream/tools/requester.py @@ -26,7 +26,7 @@ _SERVER = flags.DEFINE_string("server", "0.0.0.0", "server address") _PORT = flags.DEFINE_string("port", "9000", "port to ping") -_TEXT = flags.DEFINE_string("text", "Today is a good day", "The message") +_TEXT = flags.DEFINE_string("text", "My dog is cute", "The message") _MAX_TOKENS = flags.DEFINE_integer( "max_tokens", 3, "Maximum number of output/decode tokens of a sequence" )