From b404321643c7a045461923bd6c90232e06f05cf6 Mon Sep 17 00:00:00 2001
From: "cc02503@surrey.ac.uk" <cc02503@surrey.ac.uk>
Date: Sun, 14 Apr 2024 04:49:34 +0100
Subject: [PATCH] Added docker files to the user and product microservices to
 create docker images. Created docker compose file to pull kafka and zookeeper
 images from docker. Added the user and product microservices to the docker
 compose file so they can all be on the same docker network. Modified config
 files to replace kafka host from localhost.

---
 Kafka/docker-compose.yml                      |  44 ++++++++++++++++++
 Product_MicroService_Group3/Dockerfile        |  23 +++++++++
 Product_MicroService_Group3/app/config.py     |   2 +-
 .../addReviewController.cpython-311.pyc       | Bin 3139 -> 3139 bytes
 .../getReviewsController.cpython-311.pyc      | Bin 0 -> 2399 bytes
 .../app/controllers/getReviewsController.py   |   3 +-
 .../app/publishers/kafkaPublishers.py         |   5 +-
 Product_MicroService_Group3/requirements.txt  |  15 +++++-
 User_MicroService_Group3/Dockerfile           |  23 +++++++++
 User_MicroService_Group3/app/config.py        |   4 +-
 .../fetchUsernameController.cpython-311.pyc   | Bin 1293 -> 1295 bytes
 .../showReviewsController.cpython-311.pyc     | Bin 2018 -> 2018 bytes
 .../controllers/fetchUsernameController.py    |   2 +
 .../app/controllers/getUsersController.py     |   5 +-
 .../app/controllers/showReviewsController.py  |   5 +-
 .../kafkaPublishers.cpython-311.pyc           | Bin 2334 -> 2334 bytes
 .../app/publishers/kafkaPublishers.py         |  11 +++--
 User_MicroService_Group3/requirements.txt     |  15 +++++-
 18 files changed, 140 insertions(+), 17 deletions(-)
 create mode 100644 Kafka/docker-compose.yml
 create mode 100644 Product_MicroService_Group3/Dockerfile
 create mode 100644 Product_MicroService_Group3/app/controllers/__pycache__/getReviewsController.cpython-311.pyc
 create mode 100644 User_MicroService_Group3/Dockerfile

diff --git a/Kafka/docker-compose.yml b/Kafka/docker-compose.yml
new file mode 100644
index 00000000..ac67edd7
--- /dev/null
+++ b/Kafka/docker-compose.yml
@@ -0,0 +1,44 @@
+version: "3"
+
+services:
+  zookeeper:
+    image: wurstmeister/zookeeper
+    container_name: zookeeper
+    ports:
+      - "2182:2182"
+    networks:
+      - kafka_network
+
+  kafka:
+    image: wurstmeister/kafka
+    container_name: kafka
+    ports:
+      - "9092:9092"
+    environment:
+      KAFKA_ADVERTISED_HOST_NAME: kafka
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_BOOTSTRAP_SEVERS: kafka:9092
+    networks:
+      - kafka_network
+
+  user-microservice:
+    image: user-microservice:1.0
+    container_name: user-microservice
+    depends_on:
+      - kafka
+      - zookeeper
+    networks:
+      - kafka_network
+
+  product-microservice:
+    image: product-microservice:1.0
+    container_name: product-microservice
+    depends_on:
+      - kafka
+      - zookeeper
+    networks:
+      - kafka_network
+      
+networks:
+  kafka_network:
+    driver: bridge
\ No newline at end of file
diff --git a/Product_MicroService_Group3/Dockerfile b/Product_MicroService_Group3/Dockerfile
new file mode 100644
index 00000000..e51666e8
--- /dev/null
+++ b/Product_MicroService_Group3/Dockerfile
@@ -0,0 +1,23 @@
+FROM python:3.11.4
+
+COPY requirements.txt /product_ms/
+
+COPY app /product_ms/
+
+WORKDIR /product_ms
+
+RUN pip install --no-cache-dir -r requirements.txt
+
+
+RUN apt-get update && apt-get install -y \
+    unixodbc \
+    unixodbc-dev \
+    freetds-dev \
+    tdsodbc \
+    && rm -rf /var/lib/apt/lists/*
+
+# Set environment variables for ODBC configuration if needed
+ENV ODBCINI=/etc/odbc.ini
+ENV ODBCSYSINI=/etc
+
+CMD ["python", "index.py"]
\ No newline at end of file
diff --git a/Product_MicroService_Group3/app/config.py b/Product_MicroService_Group3/app/config.py
index 5ddee913..96b98372 100644
--- a/Product_MicroService_Group3/app/config.py
+++ b/Product_MicroService_Group3/app/config.py
@@ -6,4 +6,4 @@ SECRET_KEY = "Group3"
 
 PORT = 5001
 
-KAFKA_SERVER= "localhost:9092"
\ No newline at end of file
+KAFKA_SERVER= "kafka:9092"
\ No newline at end of file
diff --git a/Product_MicroService_Group3/app/controllers/__pycache__/addReviewController.cpython-311.pyc b/Product_MicroService_Group3/app/controllers/__pycache__/addReviewController.cpython-311.pyc
index 0e51d65e2ae6279d43717f3f1f61d3ea671e160c..a8ef2134af3385e3f89b7bc21c35f53302104a00 100644
GIT binary patch
delta 43
ycmX>saae+HIWI340}z<b6HIgF*~s^ng^_#nAC`I+M&r#joL)?f9Fvc8*8l+cKnuM9

delta 43
ycmX>saae+HIWI340}$|M{7>D>y^-%N3nSO&KP>euj7FPlIK7w{*(M+7t^oiO%nZW-

diff --git a/Product_MicroService_Group3/app/controllers/__pycache__/getReviewsController.cpython-311.pyc b/Product_MicroService_Group3/app/controllers/__pycache__/getReviewsController.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6c78e2bcbd74045dce7a609453092db0728bf0ef
GIT binary patch
literal 2399
zcma)7O>7fK6rQoY_WCyoguj$3+a##C362#Yg$N=+AXHQ&Q5}k~ELoe~ak9z&xU=ho
zSV@%w2UH=oH>_S7^+2iMfT~vQwe-*)*JxGHN|7Q}J#ce{kPxT7S^p#shxTc9X5M@A
z-psuB=G*V$@dyI4zUeAVaR~jzm?i}plb1ih<Q|fdj8&xJEY^HkpXSf{wLmtYaaj&C
zolgyFp=`*T`&C{GXTuoz5W#YQMDF_#dI(>gvQZM<j5WF4qs+;{dHg8UzFy{Uqx-Pz
zVQXnN4xYnu_)+9O2046n$|mIKZItbhV;Pi;zhJ^7=Y}t;B~qk{Zn>dF)6kW|svDx@
zvl1~aH^3xr$Rwtz7<v-B(RpI!DOpy?iV1A|W3jL#&QL=xNrbv#(0<}6%ln3lp41v{
z$=txqbY>Q;LK?9a4B3<#2lEC$^z3{2Ep+#g)jBppi><kg+pn~a)({=Fn#lmmMdrgJ
z*|+2cmG)WTrVPRQJsz+X-KE{*9}?zY!wSwIIUsY5G04VbXvz(hOhWUDOqmB3G%qYJ
zM<_L@8_-LtT0VZ$C<!_tvS1m)91&DwejXS_XS^uOf+*DOl0h0`Zu}503e|?(U_DHf
zXUu++=yD_Sc@6R*&Xc5%COqA)q~zyT^XynAySpH~`FUb2cb)M$QCpQ3a??7wM3rTd
zyJScujp&w{n*pt%i)!w=4hm{2)~a9>giMK2vYMMPO4KC64TCP_F38KGF2V697DQDg
z`aCh&O>U4mVU|b>x&gOS2zacgK7UP-sF5LbS&>No3N=c_lR2?i%t?lBQ3E`{v0CL}
zPPC+{;;I|jxfDGFcVRg+4d^z|t03w)cz3CitR@DW#K6Xt?NFkII=M-_72S7tyz=SO
zXulope-%OfsR#4bzEP)d)Q<K39qYA)lhxQMCw9t?o!ahx7g&2g%um;X=)loR!QOw&
zjvWK0>#*H($?mvp^Os-3mZ+Ri4M|iN4AUArn?5@NMcg%K7?x>Kv6zSOvbagDG6jnu
zd|o+kf)vV`d1(-nyiGE;Sk@b~Q^tD>c5_#;H&-J-nD(-6$B1<tMr(L;m;YUs{zrj!
z+jXB6-=*i;wHUQ&gKNG;R<|Ct@49DY2v)LE{_mAMtZY5my*_1>LH)?;ZhAs!0MU2W
z{cWcTule`d-2@z7_kZqZ_Vw8cUO~6~H_-}CMyA|OsbpG)1`oRRj!RY{f93`I2QbjG
z6pa`qtK4&~{`v%Xk(OYVBnkKey!F-h%E4Kqs7Pbw$c#uW#RAM6D~F~F1(R4~<wKLA
zqSk?cJ!YZtipP>sm#>2*KxlxeD61ZqlS^9Bbb~~f44Ke<o~(}4oW=HW!^P${ZqV>{
zal@ByNu<bjP6lWPGvNV7L|L>%w}TSNpfbQmLx73Ns!0H%2Vp{4RK2brCgz{eZJ@2h
z!7sHhH2Y}kr@)VqUp}m+C!F-ev-H)c>8pPLmvPdWYGT$&%x-*C^M$yf=RJMj41GQH
zfL4a8JtI!f$eoTYK5i#IsPcmjKWOuVz}{J?bU24kRQZ&{r))mOL~3Q)IeNOvk2?IQ
z&5tsXQt5S$3|INI!>8?Ld%YFyt|80~?R4G-??32jZ2`ONX|I85ez1bkJOWqy)pudw
z;&Jrkcw{2(e=^cJ!TW#XIhao+J6*1zislk!Pnfb8d64Yor4G|w8stPZQ#<fg$7ybf
z{ShfEUzgA4bx|Yvyc^#Ev;17q<p2sT;=vm2WzJdLzen^io3li`;m?8pl1j$u2(Sz=
z%-^Ad^JDxRowNU6dybNJd)r36c6(!J*Yj1lGPt==?HqPGhd1KeIPna3KgHelf#l=K
z$7gKZUB%}de6Hr}#*?^)SpSb7`OfdiQYd!h&UyR9c{@5@jgC9f@eRHf2xG|6+nsmO
Fe*w@4VAucv

literal 0
HcmV?d00001

diff --git a/Product_MicroService_Group3/app/controllers/getReviewsController.py b/Product_MicroService_Group3/app/controllers/getReviewsController.py
index 49b5f7c6..3ecdc45c 100644
--- a/Product_MicroService_Group3/app/controllers/getReviewsController.py
+++ b/Product_MicroService_Group3/app/controllers/getReviewsController.py
@@ -1,5 +1,6 @@
 from flask import Blueprint, jsonify, request, json, session
 from models.getReviews import get_reviews
+from config import KAFKA_SERVER
 
 from kafka import KafkaProducer
 
@@ -30,7 +31,7 @@ def get_review():
         return {"error" : "You need to be logged in to add a review"}
     
 
-producer = KafkaProducer(bootstrap_servers = "localhost:9092")
+producer = KafkaProducer(bootstrap_servers = KAFKA_SERVER)
 
 def send_review_message(reviews):
     metadata =producer.send("customer_reviews", json.dumps(reviews).encode("utf_8"))
diff --git a/Product_MicroService_Group3/app/publishers/kafkaPublishers.py b/Product_MicroService_Group3/app/publishers/kafkaPublishers.py
index 20018a03..d03504a6 100644
--- a/Product_MicroService_Group3/app/publishers/kafkaPublishers.py
+++ b/Product_MicroService_Group3/app/publishers/kafkaPublishers.py
@@ -1,15 +1,16 @@
 from kafka import KafkaProducer
 from kafka.admin import KafkaAdminClient, NewTopic
+from config import KAFKA_SERVER
 
 import json
 
-producer = KafkaProducer(bootstrap_servers="localhost:9092")
+producer = KafkaProducer(bootstrap_servers=KAFKA_SERVER)
 
 
 #Creates the topic
 def create_product_updated_topic():
     # Create KafkaAdminClient instance
-    admin_client = KafkaAdminClient(bootstrap_servers="localhost:9092")
+    admin_client = KafkaAdminClient(bootstrap_servers=KAFKA_SERVER)
 
     # Define the topic name and configuration
     topic_name = "product_updated_topic"
diff --git a/Product_MicroService_Group3/requirements.txt b/Product_MicroService_Group3/requirements.txt
index 6da6fc6e..aed8ace4 100644
--- a/Product_MicroService_Group3/requirements.txt
+++ b/Product_MicroService_Group3/requirements.txt
@@ -1,3 +1,14 @@
-Flask==3.0.0
+Flask==3.0.2
+Flask-Cors==4.0.0
 gunicorn==21.2.0
-boto3==1.29.0
\ No newline at end of file
+kafka-python==2.0.2
+pyodbc==5.1.0
+blinker==1.7.0
+click==8.1.7
+itsdangerous==2.1.2
+Jinja2==3.1.2
+Werkzeug==3.0.1
+boto3==1.34.71
+colorama==0.4.6
+MarkupSafe==2.1.3
+requests==2.31.0
\ No newline at end of file
diff --git a/User_MicroService_Group3/Dockerfile b/User_MicroService_Group3/Dockerfile
new file mode 100644
index 00000000..3e7d584f
--- /dev/null
+++ b/User_MicroService_Group3/Dockerfile
@@ -0,0 +1,23 @@
+FROM python:3.11.4
+
+COPY requirements.txt /user_ms/
+
+COPY app /user_ms/
+
+WORKDIR /user_ms
+
+RUN pip install --no-cache-dir -r requirements.txt
+
+
+RUN apt-get update && apt-get install -y \
+    unixodbc \
+    unixodbc-dev \
+    freetds-dev \
+    tdsodbc \
+    && rm -rf /var/lib/apt/lists/*
+
+# Set environment variables for ODBC configuration if needed
+ENV ODBCINI=/etc/odbc.ini
+ENV ODBCSYSINI=/etc
+
+CMD ["python", "index.py"]
\ No newline at end of file
diff --git a/User_MicroService_Group3/app/config.py b/User_MicroService_Group3/app/config.py
index 358b5ce9..3217efec 100644
--- a/User_MicroService_Group3/app/config.py
+++ b/User_MicroService_Group3/app/config.py
@@ -4,4 +4,6 @@ import os
 DEBUG = True
 SECRET_KEY = "Group3"
 
-PORT = 5000
\ No newline at end of file
+PORT = 5000
+
+KAFKA_SERVER = "kafka:9092"
\ No newline at end of file
diff --git a/User_MicroService_Group3/app/controllers/__pycache__/fetchUsernameController.cpython-311.pyc b/User_MicroService_Group3/app/controllers/__pycache__/fetchUsernameController.cpython-311.pyc
index 993fb4379d90d57d8dccc87743e0bec879754e00..4cbe6aa45038b2bd7bda92392953d3e3b023f3a3 100644
GIT binary patch
delta 46
zcmeC>>gVEH&dbZi00bh71=AXsH}b_XGjeRsW#(jLoG`hZC5-h02LqGv<O?je0P7VB
AyZ`_I

delta 44
ycmeC@>gD2F&dbZi00hdu|5INvZRCq%W@Ou(%go8h*f+VHC5-t2qtN6_EVcmk#|y0h

diff --git a/User_MicroService_Group3/app/controllers/__pycache__/showReviewsController.cpython-311.pyc b/User_MicroService_Group3/app/controllers/__pycache__/showReviewsController.cpython-311.pyc
index e94a298c93ec474867d52280eef3e0341f88aca2..d12a61ce3d85861ca4a71b737865e07616283d1c 100644
GIT binary patch
delta 22
ccmaFF|A?P=IWI340}uo+6-?9L$a|F?07e%D^8f$<

delta 22
ccmaFF|A?P=IWI340}#AaWJr_P$a|F?07g><-2eap

diff --git a/User_MicroService_Group3/app/controllers/fetchUsernameController.py b/User_MicroService_Group3/app/controllers/fetchUsernameController.py
index 311efa0a..00380458 100644
--- a/User_MicroService_Group3/app/controllers/fetchUsernameController.py
+++ b/User_MicroService_Group3/app/controllers/fetchUsernameController.py
@@ -3,6 +3,8 @@ from models.fetchUsername import get_username
 
 fetch_username_bp = Blueprint("getUsername",__name__)
 
+#This function is for the product microservice. It makes a call to this
+#endpoint to fetch a user's username when they want to add a product review
 @fetch_username_bp.route("/user/getUsername", methods=["POST"])
 def username():
 
diff --git a/User_MicroService_Group3/app/controllers/getUsersController.py b/User_MicroService_Group3/app/controllers/getUsersController.py
index 8c51a05f..e2a9bd4c 100644
--- a/User_MicroService_Group3/app/controllers/getUsersController.py
+++ b/User_MicroService_Group3/app/controllers/getUsersController.py
@@ -1,10 +1,11 @@
 from flask import Blueprint, jsonify, request, json, session, redirect
+from config import KAFKA_SERVER
 
 from models.getUsers import fetch_user_info
 
 from kafka import KafkaConsumer
 
-consumer = KafkaConsumer("review_events", bootstrap_servers='localhost:9092')
+consumer = KafkaConsumer("review_events", bootstrap_servers=KAFKA_SERVER)
 
 def getusers(user_id):
 
@@ -14,5 +15,5 @@ def getusers(user_id):
 for message in consumer:
 
     event_data = json.loads(message.value.decode())
-    user_id = event_data['user_id']
+    user_id = event_data["user_id"]
     username = getusers(user_id)
\ No newline at end of file
diff --git a/User_MicroService_Group3/app/controllers/showReviewsController.py b/User_MicroService_Group3/app/controllers/showReviewsController.py
index 8ac832b3..924ede66 100644
--- a/User_MicroService_Group3/app/controllers/showReviewsController.py
+++ b/User_MicroService_Group3/app/controllers/showReviewsController.py
@@ -1,17 +1,18 @@
 from flask import Blueprint, jsonify, request, session
 from kafka import KafkaConsumer
 import json
+from config import KAFKA_SERVER
 
 show_reviews_bp = Blueprint("showReviews", __name__)
 
 # Kafka consumer configuration
 consumer_conf = {
-    "bootstrap_servers": "localhost:9092",
+    "bootstrap_servers": KAFKA_SERVER,
     "group_id": "show_reviews_group",  # Specify a unique group ID for this consumer
     "auto_offset_reset": "earliest"    # Start consuming from the beginning of the topic
 }
 
-# Function to consume reviews from Kafka
+# Function to consume reviews published by product microservice
 def consume_reviews(num_reviews=1):
     consumer = KafkaConsumer("customer_reviews", **consumer_conf)
     reviews = []
diff --git a/User_MicroService_Group3/app/publishers/__pycache__/kafkaPublishers.cpython-311.pyc b/User_MicroService_Group3/app/publishers/__pycache__/kafkaPublishers.cpython-311.pyc
index 3847106fe73d1b4414eb89e2fd3d5401f6c225f7..f298ced069dad7e7a1242f27879c71831e59724d 100644
GIT binary patch
delta 59
wcmbOyG*5_kIWI340}#Y76HME_kynwOQDL(_`z1zM9bQJ!4-A+@kt9$x0GPcDasU7T

delta 59
wcmbOyG*5_kIWI340}x#K&YC8+kynwOQGT;N`z1zMEnY^^4-A+@kt9$x0G}ufhyVZp

diff --git a/User_MicroService_Group3/app/publishers/kafkaPublishers.py b/User_MicroService_Group3/app/publishers/kafkaPublishers.py
index 042a5c13..7d41460e 100644
--- a/User_MicroService_Group3/app/publishers/kafkaPublishers.py
+++ b/User_MicroService_Group3/app/publishers/kafkaPublishers.py
@@ -1,15 +1,17 @@
 from kafka import KafkaProducer
 from kafka.admin import KafkaAdminClient, NewTopic
 
-import json
+from config import KAFKA_SERVER
 
-producer = KafkaProducer(bootstrap_servers="localhost:9092")
+import json
 
+producer = KafkaProducer(bootstrap_servers=KAFKA_SERVER)
 
 
+#Creates the topic
 def create_profile_updated_topic():
     # Create KafkaAdminClient instance
-    admin_client = KafkaAdminClient(bootstrap_servers="localhost:9092")
+    admin_client = KafkaAdminClient(bootstrap_servers=KAFKA_SERVER)
 
     # Define the topic name and configuration
     topic_name = "profile_updated_topic"
@@ -27,7 +29,8 @@ def create_profile_updated_topic():
         admin_client.create_topics(new_topics=[new_topic], validate_only=False)
 
 
-
+#Function is called in updateProfileControllers.py
+#Topic message is collected from ProductMicroservice/Subsribers/updateUsernameSubscriber.py
 def publish_username_updated_event(event_data):
     # Serialize the event data to JSON
     event_json = json.dumps(event_data)
diff --git a/User_MicroService_Group3/requirements.txt b/User_MicroService_Group3/requirements.txt
index 6da6fc6e..aed8ace4 100644
--- a/User_MicroService_Group3/requirements.txt
+++ b/User_MicroService_Group3/requirements.txt
@@ -1,3 +1,14 @@
-Flask==3.0.0
+Flask==3.0.2
+Flask-Cors==4.0.0
 gunicorn==21.2.0
-boto3==1.29.0
\ No newline at end of file
+kafka-python==2.0.2
+pyodbc==5.1.0
+blinker==1.7.0
+click==8.1.7
+itsdangerous==2.1.2
+Jinja2==3.1.2
+Werkzeug==3.0.1
+boto3==1.34.71
+colorama==0.4.6
+MarkupSafe==2.1.3
+requests==2.31.0
\ No newline at end of file
-- 
GitLab