diff --git a/Supporting-material/resize.sh b/Supporting-material/resize.sh
old mode 100644
new mode 100755
diff --git a/project-ml-microservice-kubernetes/.circleci/config.yml b/project-ml-microservice-kubernetes/.circleci/config.yml
new file mode 100644
index 000000000..e69de29bb
diff --git a/project-ml-microservice-kubernetes/Dockerfile b/project-ml-microservice-kubernetes/Dockerfile
index 8f70001b5..a6f575344 100644
--- a/project-ml-microservice-kubernetes/Dockerfile
+++ b/project-ml-microservice-kubernetes/Dockerfile
@@ -2,17 +2,20 @@ FROM python:3.7.3-stretch
## Step 1:
# Create a working directory
-
+WORKDIR /app
## Step 2:
# Copy source code to working directory
-
+COPY app.py /app/
+COPY model_data /app/model_data
+COPY requirements.txt /app/
## Step 3:
# Install packages from requirements.txt
# hadolint ignore=DL3013
-
+RUN pip install --upgrade pip &&\
+ pip install --trusted-host pypi.python.org -r requirements.txt
## Step 4:
# Expose port 80
-
+EXPOSE 80
## Step 5:
# Run app.py at container launch
-
+CMD ["python", "app.py"]
diff --git a/project-ml-microservice-kubernetes/app.py b/project-ml-microservice-kubernetes/app.py
index 7d583b72e..f33ed7e78 100644
--- a/project-ml-microservice-kubernetes/app.py
+++ b/project-ml-microservice-kubernetes/app.py
@@ -20,7 +20,7 @@ def scale(payload):
@app.route("/")
def home():
- html = f"
Sklearn Prediction Home
"
+ html = "Sklearn Prediction Home
"
return html.format(format)
@app.route("/predict", methods=['POST'])
@@ -61,11 +61,11 @@ def predict():
# scale the input
scaled_payload = scale(inference_payload)
# get an output prediction from the pretrained model, clf
+ # load pretrained model as clf
+ clf = joblib.load("./model_data/boston_housing_prediction.joblib")
prediction = list(clf.predict(scaled_payload))
# TO DO: Log the output prediction value
return jsonify({'prediction': prediction})
if __name__ == "__main__":
- # load pretrained model as clf
- clf = joblib.load("./model_data/boston_housing_prediction.joblib")
app.run(host='0.0.0.0', port=80, debug=True) # specify port=80
diff --git a/project-ml-microservice-kubernetes/minikube-linux-amd64 b/project-ml-microservice-kubernetes/minikube-linux-amd64
new file mode 100644
index 000000000..8d8ae9ef9
Binary files /dev/null and b/project-ml-microservice-kubernetes/minikube-linux-amd64 differ
diff --git a/project-ml-microservice-kubernetes/output_txt_files/docker_out.txt b/project-ml-microservice-kubernetes/output_txt_files/docker_out.txt
index 79b164d89..b6ec660af 100644
--- a/project-ml-microservice-kubernetes/output_txt_files/docker_out.txt
+++ b/project-ml-microservice-kubernetes/output_txt_files/docker_out.txt
@@ -1 +1,22 @@
-
\ No newline at end of file
+
+REPOSITORY TAG IMAGE ID CREATED SIZE
+api latest a05ce67946c3 7 seconds ago 1.26GB
+ e15b07d66fc6 14 minutes ago 1.26GB
+ * Serving Flask app "app" (lazy loading)
+ * Environment: production
+ WARNING: Do not use the development server in a production environment.
+ Use a production WSGI server instead.
+ * Debug mode: on
+ * Running on http://0.0.0.0:80/ (Press CTRL+C to quit)
+ * Restarting with stat
+ * Debugger is active!
+ * Debugger PIN: 194-414-639
+[2023-11-19 11:38:58,322] INFO in app: JSON payload:
+{'CHAS': {'0': 0}, 'RM': {'0': 6.575}, 'TAX': {'0': 296.0}, 'PTRATIO': {'0': 15.3}, 'B': {'0': 396.9}, 'LSTAT': {'0': 4.98}}
+[2023-11-19 11:38:58,343] INFO in app: Inference payload DataFrame:
+ CHAS RM TAX PTRATIO B LSTAT
+0 0 6.575 296.0 15.3 396.9 4.98
+[2023-11-19 11:38:58,354] INFO in app: Scaling Payload:
+ CHAS RM TAX PTRATIO B LSTAT
+0 0 6.575 296.0 15.3 396.9 4.98
+172.17.0.1 - - [19/Nov/2023 11:38:58] "POST /predict HTTP/1.1" 200 -
\ No newline at end of file
diff --git a/project-ml-microservice-kubernetes/output_txt_files/kubernetes_out.txt b/project-ml-microservice-kubernetes/output_txt_files/kubernetes_out.txt
index a79241a75..654238c89 100644
--- a/project-ml-microservice-kubernetes/output_txt_files/kubernetes_out.txt
+++ b/project-ml-microservice-kubernetes/output_txt_files/kubernetes_out.txt
@@ -1 +1,33 @@
-
\ No newline at end of file
+
+pod/api-microservices created
+NAMESPACE NAME READY STATUS RESTARTS AGE
+default api-microservices 1/1 Running 0 12s
+kube-system coredns-66bff467f8-cbzpm 1/1 Running 0 19h
+kube-system coredns-66bff467f8-nppkt 1/1 Running 0 19h
+kube-system etcd-khaled-virtual-machine 1/1 Running 0 12m
+kube-system kube-apiserver-khaled-virtual-machine 1/1 Running 0 19h
+kube-system kube-controller-manager-khaled-virtual-machine 1/1 Running 1 19h
+kube-system kube-proxy-c7lr2 1/1 Running 0 19h
+kube-system kube-scheduler-khaled-virtual-machine 1/1 Running 1 19h
+kube-system storage-provisioner 1/1 Running 1 19h
+Forwarding from 127.0.0.1:8000 -> 80
+Forwarding from [::1]:8000 -> 80
+
+ * Serving Flask app "app" (lazy loading)
+ * Environment: production
+ WARNING: Do not use the development server in a production environment.
+ Use a production WSGI server instead.
+ * Debug mode: on
+ * Running on http://0.0.0.0:80/ (Press CTRL+C to quit)
+ * Restarting with stat
+ * Debugger is active!
+ * Debugger PIN: 161-774-587
+[2023-11-19 19:38:23,813] INFO in app: JSON payload:
+{'CHAS': {'0': 0}, 'RM': {'0': 6.575}, 'TAX': {'0': 296.0}, 'PTRATIO': {'0': 15.3}, 'B': {'0': 396.9}, 'LSTAT': {'0': 4.98}}
+[2023-11-19 19:38:23,864] INFO in app: Inference payload DataFrame:
+ CHAS RM TAX PTRATIO B LSTAT
+0 0 6.575 296.0 15.3 396.9 4.98
+[2023-11-19 19:38:23,872] INFO in app: Scaling Payload:
+ CHAS RM TAX PTRATIO B LSTAT
+0 0 6.575 296.0 15.3 396.9 4.98
+172.17.0.1 - - [19/Nov/2023 19:38:23] "POST /predict HTTP/1.1" 200 -
\ No newline at end of file
diff --git a/project-ml-microservice-kubernetes/run_docker.sh b/project-ml-microservice-kubernetes/run_docker.sh
index 65c3f832e..98b362c8d 100755
--- a/project-ml-microservice-kubernetes/run_docker.sh
+++ b/project-ml-microservice-kubernetes/run_docker.sh
@@ -4,9 +4,11 @@
# Step 1:
# Build image and add a descriptive tag
-
+docker build --tag=api .
# Step 2:
# List docker images
+docker image ls
# Step 3:
# Run flask app
+docker run -p 8000:80 api
\ No newline at end of file
diff --git a/project-ml-microservice-kubernetes/run_kubernetes.sh b/project-ml-microservice-kubernetes/run_kubernetes.sh
index b041b1082..9084d8b71 100755
--- a/project-ml-microservice-kubernetes/run_kubernetes.sh
+++ b/project-ml-microservice-kubernetes/run_kubernetes.sh
@@ -5,14 +5,18 @@
# Step 1:
# This is your Docker ID/path
# dockerpath=<>
+#list nodes
+dockerpath=longtony/api-microservices:v1.0.0
# Step 2
# Run the Docker Hub container with kubernetes
-
+minikube start
+kubectl create deploy api-microservice --image=$dockerpath
# Step 3:
# List kubernetes pods
+kubectl get nodes
# Step 4:
# Forward the container port to a host
-
+kubectl port-forward pod/api-microservice 8000:80
diff --git a/project-ml-microservice-kubernetes/setup_env.sh b/project-ml-microservice-kubernetes/setup_env.sh
new file mode 100755
index 000000000..2e914b746
--- /dev/null
+++ b/project-ml-microservice-kubernetes/setup_env.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+wget https://github.com/hadolint/hadolint/releases/download/v2.12.0/hadolint-Linux-x86_64
+sudo mv hadolint-Linux-x86_64 /usr/local/bin/hadolint
+sudo chmod +x /usr/local/bin/hadolint
+
+
+curl -LO https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64
+sudo install minikube-linux-amd64 /usr/local/bin/minikube
diff --git a/project-ml-microservice-kubernetes/upload_docker.sh b/project-ml-microservice-kubernetes/upload_docker.sh
index 19baeafe4..25a4ed07b 100755
--- a/project-ml-microservice-kubernetes/upload_docker.sh
+++ b/project-ml-microservice-kubernetes/upload_docker.sh
@@ -6,10 +6,13 @@
# Step 1:
# Create dockerpath
# dockerpath=
-
+dockerpath=longtony/api-microservices:v1.0.0
+docker tag api longtony/api-microservices:v1.0.0
# Step 2:
# Authenticate & tag
echo "Docker ID and Image: $dockerpath"
+docker login
# Step 3:
# Push image to a docker repository
+docker push longtony/api-microservices:v1.0.0
\ No newline at end of file