Skip to content

Commit

Permalink
Merge branch 'master' into issue_561
Browse files Browse the repository at this point in the history
  • Loading branch information
maaquib authored Dec 11, 2020
2 parents 3bda56e + 355a8bb commit 86a05ea
Show file tree
Hide file tree
Showing 8 changed files with 30 additions and 25 deletions.
2 changes: 2 additions & 0 deletions docs/torchserve_on_win_native.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ NOTE At present, wheels for windows are not available on PyPi. However following
- Install [Microsoft Visual C++ Redistributable for Visual Studio 2015, 2017 and 2019](https://support.microsoft.com/en-in/help/2977003/the-latest-supported-visual-c-downloads)

NOTE ensure that you have restarted system after install above Visual C++ components
- Ensure that 'nvidia-smi.exe' is available in `Path` environment variable. Usually, it should be available under `<your_install_drive>\Program Files\NVIDIA Corporation\NVSMI`
e.g. C:\Program Files\NVIDIA Corporation\NVSMI, add this path to `Path` env variable
- Start 'Anaconda Powershell Prompt' (APP) as Admin User i.e. By right click on APP and run following commands
- `git clone https://github.com/pytorch/serve.git`
- `pip install click`
Expand Down
2 changes: 1 addition & 1 deletion frontend/tools/gradle/formatter.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class FormatterPlugin implements Plugin<Project> {
Project rootProject = project.getRootProject()
for (item in project.sourceSets) {
for (File file : item.getAllSource()) {
if (!file.getName().endsWith(".java") || file.getAbsolutePath().contains("/grpc/")) {
if (!file.getName().endsWith(".java") || file.getAbsolutePath().contains("/grpc/") || file.getAbsolutePath().contains("\\grpc\\")) {
continue
}

Expand Down
12 changes: 6 additions & 6 deletions test/postman/https_test_collection.json
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@
"method": "PUT",
"header": [],
"url": {
"raw": "https://{{hostname}}:{{sec-mgmt-port}}/models/squeezenet1_1?min_worker=5&max_worker=5&synchronous=true",
"raw": "https://{{hostname}}:{{sec-mgmt-port}}/models/squeezenet1_1?min_worker=1&max_worker=1&synchronous=true",
"protocol": "https",
"host": [
"{{hostname}}"
Expand All @@ -213,11 +213,11 @@
"query": [
{
"key": "min_worker",
"value": "5"
"value": "1"
},
{
"key": "max_worker",
"value": "5"
"value": "1"
},
{
"key": "synchronous",
Expand Down Expand Up @@ -248,7 +248,7 @@
"method": "PUT",
"header": [],
"url": {
"raw": "https://{{hostname}}:{{sec-mgmt-port}}/models/squeezenet1_1?min_worker=6&max_worker=6&synchronous=false",
"raw": "https://{{hostname}}:{{sec-mgmt-port}}/models/squeezenet1_1?min_worker=1&max_worker=1&synchronous=false",
"protocol": "https",
"host": [
"{{hostname}}"
Expand All @@ -261,11 +261,11 @@
"query": [
{
"key": "min_worker",
"value": "6"
"value": "1"
},
{
"key": "max_worker",
"value": "6"
"value": "1"
},
{
"key": "synchronous",
Expand Down
24 changes: 12 additions & 12 deletions test/postman/management_data.json
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
"type": "scale",
"test": "Scale Invalid Mnist Handler Model with Min Workers - Synchronous",
"METHOD": "PUT",
"path": "models/mnist?min_worker=4&synchronous=true",
"path": "models/mnist?min_worker=1&synchronous=true",
"status_code": 500,
"grpc_status_code": 13
},
Expand Down Expand Up @@ -125,36 +125,36 @@
"type": "scale",
"test": "Scale Min Workers - Asynchronous",
"METHOD": "PUT",
"path": "models/squeezenet1_1?min_worker=3",
"path": "models/squeezenet1_1?min_worker=1",
"status_code": 202
},
{
"type": "scale",
"test": "Scale Min Workers - Synchronous",
"METHOD": "PUT",
"path": "models/squeezenet1_1?min_worker=4&synchronous=true",
"path": "models/squeezenet1_1?min_worker=1&synchronous=true",
"status_code": 200
},
{
"type": "scale",
"test": "Scale Min Workers for a Version",
"METHOD": "PUT",
"path": "models/squeezenet1_1/1.0?min_worker=5&synchronous=true",
"path": "models/squeezenet1_1/1.0?min_worker=1&synchronous=true",
"status_code": 200
},
{
"type": "scale",
"test": "Scale Min Workers for a Valid model but Invalid Version",
"METHOD": "PUT",
"path": "models/squeezenet1_1/0.0?min_worker=5&synchronous=true",
"path": "models/squeezenet1_1/0.0?min_worker=1&synchronous=true",
"status_code": 404,
"grpc_status_code": 5
},
{
"type": "scale",
"test": "Scale Min Workers with GPU",
"METHOD": "PUT",
"path": "models/squeezenet1_1?min_worker=6&number_gpu=1",
"path": "models/squeezenet1_1?min_worker=1&number_gpu=1",
"status_code": 202
},
{
Expand Down Expand Up @@ -339,13 +339,13 @@
"type": "scale",
"test": "Scale up Workers - Synchronous",
"METHOD": "PUT",
"path": "models/resnet-18?min_worker=5&max_worker=5&synchronous=true"
"path": "models/resnet-18?min_worker=1&max_worker=1&synchronous=true"
},
{
"type": "scale",
"test": "Scale up Workers - Asynchronous",
"METHOD": "PUT",
"path": "models/resnet-18?min_worker=6&max_worker=6&synchronous=false",
"path": "models/resnet-18?min_worker=1&max_worker=1&synchronous=false",
"status_code": 202
},
{
Expand Down Expand Up @@ -404,7 +404,7 @@
"type": "scale",
"test": "Scale Workers - Valid \"min_worker\" value, Invalid \"synchronous\"",
"METHOD": "PUT",
"path": "models/resnet-18?min_worker=2&synchronous=Nan",
"path": "models/resnet-18?min_worker=1&synchronous=Nan",
"status_code": 202
},
{
Expand All @@ -425,23 +425,23 @@
"type": "scale",
"test": "Update Worker for an invalid/non-existent model",
"METHOD": "PUT",
"path": "models/resnet181?min_worker=3",
"path": "models/resnet181?min_worker=1",
"status_code": 404,
"grpc_status_code": 5
},
{
"type": "scale",
"test": "Update Worker with Invalid Worker Count",
"METHOD": "PUT",
"path": "models/resnet-18?min_worker=10&max_worker=9",
"path": "models/resnet-18?min_worker=2&max_worker=1",
"status_code": 400,
"grpc_status_code": 3
},
{
"type": "scale",
"test": "Update Worker with Positive Worker Count",
"METHOD": "PUT",
"path": "models/resnet-18?min_worker=4",
"path": "models/resnet-18?min_worker=1",
"status_code": 202
},
{
Expand Down
6 changes: 2 additions & 4 deletions ts_scripts/install_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,11 @@
REPO_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
sys.path.append(REPO_ROOT)

from ts_scripts.utils import check_python_version, is_gpu_instance
from ts_scripts.utils import check_python_version


class Common():
def __init__(self):
# Assumption is nvidia-smi is installed on systems with gpu
self.is_gpu_instance = is_gpu_instance()
self.torch_stable_url = "https://download.pytorch.org/whl/torch_stable.html"
self.sudo_cmd = 'sudo '

Expand Down Expand Up @@ -72,7 +70,7 @@ def __init__(self):
self.sudo_cmd = ''

def install_torch_packages(self, cuda_version):
if self.is_gpu_instance and cuda_version:
if cuda_version and cuda_version != "latest":
os.system(f"pip install -U -r requirements/torch_{cuda_version}.txt -f {self.torch_stable_url}")
else:
os.system(f"pip install -U -r requirements/torch.txt -f {self.torch_stable_url}")
Expand Down
2 changes: 1 addition & 1 deletion ts_scripts/torchserve_grpc_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def infer(stub, model_name, model_input):
def register(stub, model_name):
params = {
'url': "https://torchserve.s3.amazonaws.com/mar_files/{}.mar".format(model_name),
'initial_workers': 4,
'initial_workers': 1,
'synchronous': True,
'model_name': model_name
}
Expand Down
1 change: 1 addition & 0 deletions ts_scripts/tsutils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
import platform
import sys
import time
import requests

Expand Down
6 changes: 5 additions & 1 deletion ts_scripts/utils.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import os
import platform
import sys

nvidia_smi_cmd = {'Windows': 'nvidia-smi.exe',
'Darwin': 'nvidia-smi',
'Linux': 'nvidia-smi'}

def is_gpu_instance():
return True if os.system("nvidia-smi") == 0 else False
return True if os.system(nvidia_smi_cmd[platform.system()]) == 0 else False


def is_conda_env():
Expand Down

0 comments on commit 86a05ea

Please sign in to comment.