Skip to content

Commit 2372083

Browse files
Merge pull request #8 from truefoundry/fix-deploy-model
fix: improve deploy-ml-model flow
2 parents 3c91a20 + 37b4f5d commit 2372083

File tree

3 files changed

+35
-14
lines changed

3 files changed

+35
-14
lines changed

deploy-ml-model/app.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@
55

66
model = joblib.load("iris_classifier.joblib")
77

8-
app = FastAPI()
8+
app = FastAPI(docs_url="/", root_path=os.getenv("TFY_SERVICE_ROOT_PATH", "/"))
9+
910

1011
@app.post("/predict")
1112
def predict(
@@ -18,4 +19,5 @@ def predict(
1819
petal_width=petal_width,
1920
)
2021
prediction = int(model.predict(pd.DataFrame([data]))[0])
21-
return {"prediction": prediction}
22+
return {"prediction": prediction}
23+

deploy-ml-model/deploy.py

+29-10
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,42 @@
1+
import argparse
12
import logging
2-
from servicefoundry import Build, PythonBuild, Service, Resources
3+
from servicefoundry import Build, PythonBuild, Service, Resources, Port
34

45
logging.basicConfig(level=logging.INFO)
56

7+
parser = argparse.ArgumentParser()
8+
parser.add_argument("--name", required=True, type=str, help="Name of the application.")
9+
parser.add_argument(
10+
"--workspace_fqn",
11+
required=True,
12+
type=str,
13+
help="FQN of the workspace where application will be deployed.",
14+
)
15+
parser.add_argument(
16+
"--host",
17+
required=True,
18+
type=str,
19+
help="Host where the application will be available for access. Ex:- my-app.my-org.com",
20+
)
21+
args = parser.parse_args()
22+
623
image = Build(
724
build_spec=PythonBuild(
825
command="uvicorn app:app --port 8000 --host 0.0.0.0",
9-
requirements_path="requirements.txt"
26+
requirements_path="requirements.txt",
1027
)
1128
)
1229

1330
service = Service(
14-
name="ml-deploy",
31+
name=args.name,
1532
image=image,
16-
ports=[{"port": 8000}],
17-
resources=Resources(memory_limit=1500, memory_request=1000),
18-
env={
19-
"UVICORN_WEB_CONCURRENCY": "1",
20-
"ENVIRONMENT": "dev"
21-
}
33+
ports=[Port(port=8000, host=args.host)],
34+
resources=Resources(
35+
cpu_request=0.1,
36+
cpu_limit=0.1,
37+
memory_request=500,
38+
memory_limit=500,
39+
),
40+
env={"UVICORN_WEB_CONCURRENCY": "1", "ENVIRONMENT": "dev"},
2241
)
23-
service.deploy(workspace_fqn="YOUR_WORKSPACE_FQN")
42+
service.deploy(workspace_fqn=args.workspace_fqn)

deploy-ml-model/requirements.txt

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
fastapi==0.81.0
22
uvicorn==0.18.3
33
scikit-learn==1.0.2
4-
joblib
5-
pandas
4+
joblib==1.3.2
5+
pandas==2.1.0

0 commit comments

Comments
 (0)