From a030144a6a9db7b3912910c3b90b274cfad5cda7 Mon Sep 17 00:00:00 2001 From: Satwik Pattanaik Date: Mon, 4 Jul 2022 20:11:33 -0400 Subject: [PATCH 01/40] WIP --- .../tenant/blueprints/event_driven/ticket.py | 40 +++++++++++++++---- servers/tenant/controllers/baseController.py | 18 ++++++++- 2 files changed, 50 insertions(+), 8 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 53dcc75..4fb1066 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -16,14 +16,11 @@ ticket_bp = Blueprint("ticket_bp", __name__, url_prefix="ticket") - # TODO: USER BASED AUTH - ticket_controller = TicketController() pieces_controller = PieceController() - """ Route expects requests of format: @@ -77,6 +74,20 @@ def ticket_post(): # create ticket return "success" +@ticket_bp.route("/", methods=["GET"]) +@require_appkey +def ticket_get_all(): + + filters = request.args.get("filters") + limit = request.args.get("limit") + + data = ticket_controller._get(limit, filters) if limit else ticket_controller._get(filters) + + res = alchemyConverter(data) + response = json.dumps(res, cls=AlchemyEncoder) + + return response + """ Route expects requests of format: @@ -92,7 +103,7 @@ def ticket_post(): # create ticket """ -@ticket_bp.route("/", methods=["GET"]) +@ticket_bp.route("/date-range", methods=["GET"]) @require_appkey def ticket_get_range(): def validate_date_format(date_text): @@ -132,8 +143,23 @@ def validate_date_format(date_text): """ +@ticket_bp.route("/attribute/{attribute_name}", methods=["GET"]) +@require_appkey +def ticket_attribute_get(attribute_name): + + filters.extend({"ticket_id": ticket_id}) + + latest_ticket = ticket_controller._get_latest_event_objects( + number_of_res=number_of_res, filters=filters + ) + + res = alchemyConverter(latest_ticket) + response = json.dumps(res, cls=AlchemyEncoder) + + return response + -@ticket_bp.route("/{ticket_id}", methods=["GET"]) +@ticket_bp.route("/id/{ticket_id}", methods=["GET"]) @require_appkey def ticket_get(ticket_id): filters = request.args.get("filters") @@ -166,7 +192,7 @@ def ticket_get(ticket_id): """ -@ticket_bp.route("/{ticket_id}", methods=["GET"]) +@ticket_bp.route("/id/{ticket_id}", methods=["GET"]) @require_appkey def ticket_get_history(ticket_id): filters = request.args.get("filters") @@ -201,7 +227,7 @@ def ticket_get_history(ticket_id): """ -@ticket_bp.route("/{ticket_id}", methods=["POST"]) +@ticket_bp.route("/id/{ticket_id}", methods=["PUT"]) @require_appkey def ticket_update(ticket_id): diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 2b735b5..164e949 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -85,7 +85,9 @@ def _delete(self, filters=[]): self.session.commit() - def _get(self, filters=[]): + def _get(self, filters): + if not filters: + filters = [] objects = ( self.session.query(self.model) @@ -95,6 +97,20 @@ def _get(self, filters=[]): ) return objects + + def _get(self, lim, filters): + if not filters: + filters = [] + + objects = ( + self.session.query(self.model) + .filter(*convert_dict_to_alchemy_filters(filters)) + .group_by(self.model.non_prim_identifying_column_name) + .order_by(self.model.timestamp) + .limit(lim) + ) + + return objects class BaseTimeSeriesController(BaseController): From 8fe99a881b91554f9d2afa2801472d26e0890597 Mon Sep 17 00:00:00 2001 From: Satwik Pattanaik Date: Mon, 4 Jul 2022 20:17:48 -0400 Subject: [PATCH 02/40] add .vscode to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 9eccc7e..0bdadf7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ *venv* __pycache__/ *.pyc +.vscode \ No newline at end of file From a563ba9005290e7e778fa13564272c14be8565b6 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Tue, 5 Jul 2022 02:31:08 -0400 Subject: [PATCH 03/40] modifying db schema --- requirements.txt | 51 ---- .../tenant/app.Dockerfile | 0 .../tenant/blueprints/event_driven/ticket.py | 13 +- servers/tenant/blueprints/{ => simple}/pdf.py | 4 +- servers/tenant/blueprints/simple/users.py | 4 +- .../tenant/celery.Dockerfile | 0 servers/tenant/controllers/baseController.py | 45 ++-- .../tenant/controllers/consigneeController.py | 32 --- .../tenant/controllers/controllerMapper.py | 14 - servers/tenant/database/index_creation.sql | 11 - servers/tenant/database/table_creation.sql | 53 +--- .../tenant/docker-compose.yml | 0 .../tenant/kill-cluster.sh | 0 servers/tenant/models/models.py | 102 +------- servers/tenant/requirements.txt | 52 +++- servers/tenant/server.py | 2 +- .../tenant/start-cluster.sh | 0 servers/tenant/test/test.py | 245 +++--------------- 18 files changed, 135 insertions(+), 493 deletions(-) delete mode 100644 requirements.txt rename app.Dockerfile => servers/tenant/app.Dockerfile (100%) rename servers/tenant/blueprints/{ => simple}/pdf.py (92%) rename celery.Dockerfile => servers/tenant/celery.Dockerfile (100%) delete mode 100644 servers/tenant/controllers/consigneeController.py rename docker-compose.yml => servers/tenant/docker-compose.yml (100%) rename kill-cluster.sh => servers/tenant/kill-cluster.sh (100%) rename start-cluster.sh => servers/tenant/start-cluster.sh (100%) diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 2cceaa4..0000000 --- a/requirements.txt +++ /dev/null @@ -1,51 +0,0 @@ -amqp==5.0.9 -billiard==3.6.4.0 -celery==5.2.3 -cffi==1.15.0 -chardet==4.0.0 -click==8.0.3 -click-didyoumean==0.3.0 -click-plugins==1.1.1 -click-repl==0.2.0 -coloredlogs==15.0.1 -cryptography==36.0.1 -Deprecated==1.2.13 -Flask==2.0.2 -humanfriendly==10.0 -img2pdf==0.4.3 -importlib-resources==5.4.0 -itsdangerous==2.0.1 -Jinja2==3.0.3 -kombu==5.2.3 -lxml==4.7.1 -MarkupSafe==2.0.1 -multilingual-pdf2text==1.1.0 -numpy==1.22.0 -ocrmypdf==13.2.0 - -packaging==21.3 -pdf2image==1.16.0 -pdfminer.six==20211012 -pdfplumber==0.6.0 -pdftotext==2.2.2 -pikepdf==4.3.1 -Pillow==9.0.0 -pluggy==1.0.0 -prompt-toolkit==3.0.24 -pycparser==2.21 -pydantic==1.9.0 -pyparsing==3.0.6 -PyPDF2==1.26.0 -pytesseract==0.3.8 -pytz==2021.3 -redis==4.1.2 -reportlab==3.6.5 -six==1.16.0 -tqdm==4.62.3 -typing-extensions==4.0.1 -vine==5.0.0 -Wand==0.6.7 -wcwidth==0.2.5 -Werkzeug==2.0.2 -wrapt==1.13.3 -zipp==3.7.0 diff --git a/app.Dockerfile b/servers/tenant/app.Dockerfile similarity index 100% rename from app.Dockerfile rename to servers/tenant/app.Dockerfile diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 4fb1066..45c7e28 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -1,5 +1,7 @@ import json import datetime + +from numpy import number from flask import request, jsonify, Blueprint import sys @@ -75,14 +77,15 @@ def ticket_post(): # create ticket @ticket_bp.route("/", methods=["GET"]) -@require_appkey +# @require_appkey def ticket_get_all(): - filters = request.args.get("filters") - limit = request.args.get("limit") - - data = ticket_controller._get(limit, filters) if limit else ticket_controller._get(filters) + filters = request.args.get("filters") or {} + limit = request.args.get("limit") or 1 + data = ticket_controller._get_latest_event_objects(filters, number_of_res=limit) + print("data------------------") + print(data) res = alchemyConverter(data) response = json.dumps(res, cls=AlchemyEncoder) diff --git a/servers/tenant/blueprints/pdf.py b/servers/tenant/blueprints/simple/pdf.py similarity index 92% rename from servers/tenant/blueprints/pdf.py rename to servers/tenant/blueprints/simple/pdf.py index f08068e..84361fa 100644 --- a/servers/tenant/blueprints/pdf.py +++ b/servers/tenant/blueprints/simple/pdf.py @@ -6,7 +6,7 @@ from celery_client import client, logger from controllers.pdfController import PDFController -pdf_bp = Blueprint("pdf_bp", __name__) +pdf_bp = Blueprint("pdf_bp", __name__, url_prefix="document") pdfcontroller = PDFController() @@ -27,7 +27,7 @@ def pdf_post(): if file and file.filename.split(".")[-1].lower() == "pdf": pdfcontroller.process_files() resp = jsonify({"message": "File successfully uploaded"}) - resp.status_code = 200 + resp.status_code = 202 return resp else: resp = jsonify({"message": "Allowed file types are pdf only"}) diff --git a/servers/tenant/blueprints/simple/users.py b/servers/tenant/blueprints/simple/users.py index 61be4ca..fef8d80 100644 --- a/servers/tenant/blueprints/simple/users.py +++ b/servers/tenant/blueprints/simple/users.py @@ -18,7 +18,6 @@ user_controller = UserController() - @user_bp.route("/", methods=["GET"]) @require_appkey def user_get(): # create ticket @@ -35,7 +34,7 @@ def user_post(): # create ticket return "success" -@user_bp.route("/modify", methods=["POST"]) +@user_bp.route("", methods=["PUT"]) @require_appkey def user_modify(): @@ -43,7 +42,6 @@ def user_modify(): update_dict = request.form["update_dict"] user_controller._modify(userId, **update_dict) - return "success" diff --git a/celery.Dockerfile b/servers/tenant/celery.Dockerfile similarity index 100% rename from celery.Dockerfile rename to servers/tenant/celery.Dockerfile diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 164e949..130fb84 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -85,32 +85,19 @@ def _delete(self, filters=[]): self.session.commit() - def _get(self, filters): + def _get(self, model, filters, limit=500): if not filters: filters = [] - objects = ( - self.session.query(self.model) - .filter(*convert_dict_to_alchemy_filters(filters)) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - ) + objects = self.session.query(self.model) \ + .filter(*convert_dict_to_alchemy_filters(model, filters)) \ + .group_by(self.model.non_prim_identifying_column_name) \ + .order_by(self.model.timestamp) \ + .limit(limit) + return objects - - def _get(self, lim, filters): - if not filters: - filters = [] - - objects = ( - self.session.query(self.model) - .filter(*convert_dict_to_alchemy_filters(filters)) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - .limit(lim) - ) - return objects class BaseTimeSeriesController(BaseController): @@ -144,15 +131,21 @@ def _create_base_event(self, args_dict): def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # get up to 'number_of_res' last event objects + # latest_objs = ( + # self.session.query(self.model) + # .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) + # .group_by(self.model.non_prim_identifying_column_name) + # .order_by(self.model.timestamp) + # .limit(number_of_res).all() + # ) latest_objs = ( - self.session.query(self.model) - .filters(*convert_dict_to_alchemy_filters(filters)) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - .paginate(page, number_of_res) + self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) + .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) + .limit(number_of_res).all() ) - return latest_objs + # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() + return latest_objs[0] def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): diff --git a/servers/tenant/controllers/consigneeController.py b/servers/tenant/controllers/consigneeController.py deleted file mode 100644 index 4080978..0000000 --- a/servers/tenant/controllers/consigneeController.py +++ /dev/null @@ -1,32 +0,0 @@ -from turtle import update -from baseController import BaseTimeSeriesController -import sys - -sys.path.insert(0, "..") # import parent folder - -from models.models import ConsigneeEvents, TicketEvents - - -ticket_controller = BaseTimeSeriesController(TicketEvents) - - -class ConsigneeController(BaseTimeSeriesController): - def __init__(self): - super().__init__(model=ConsigneeEvents) - self.model = ConsigneeEvents - - def _propagating_modify(self, ConsigneeEvents, ticketEventId, update_dict): - - """ - Definition: creates a new shippper event with the same shipperId - but creates a new ticketEvent with the updated ConsigneeEvents for - the specfic ticket which is given in context - """ - - new_shipper_event = self._modify_object(shipperEventId, update_dict) - - ticket_update_dict = {"shipperEventId": new_shipper_event.shipperEventId} - - ticket_controller._modify_object(ticketEventId, ticket_update_dict) - - return new_shipper_event diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index 02daf6a..5361867 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -18,25 +18,11 @@ def __init__(self): super().__init__(Customers) -class ShipperController(BaseNestedDependencyContoller): - def __init__(self): - super().__init__(ShipperEvents) - - -class ConsigneeController(BaseNestedDependencyContoller): - def __init__(self): - super().__init__(ConsigneeEvents) - - class TicketController(BaseTimeSeriesController): def __init__(self): super().__init__(TicketEvents) -class PieceController(BaseTimeSeriesController): - def __init__(self): - super().__init__(PieceEvents) - class GenericMilestoneController(BaseTimeSeriesController): def __init__(self): diff --git a/servers/tenant/database/index_creation.sql b/servers/tenant/database/index_creation.sql index a6ee3e0..98ae6df 100644 --- a/servers/tenant/database/index_creation.sql +++ b/servers/tenant/database/index_creation.sql @@ -1,11 +1,3 @@ -CREATE INDEX idx_shipperEvents_comp ON ShipperEvents(shipperId, timestamp); - -CREATE INDEX idx_shipperEvents_ts ON ShipperEvents(timestamp); - -CREATE INDEX idx_consigneeEvents_comp ON ConsigneeEvents(consigneeId, timestamp); - -CREATE INDEX idx_consigneeEvents_ts ON ConsigneeEvents(timestamp); - CREATE INDEX idx_ticketEvents_comp ON TicketEvents(ticketEventId, timestamp); CREATE INDEX idx_ticketEvents_ts ON TicketEvents(timestamp); @@ -22,6 +14,3 @@ CREATE INDEX idx_deliveryMilestones_comp ON DeliveryMilestones(milestoneId, time CREATE INDEX idx_deliveryMilestones_ts ON DeliveryMilestones(timestamp); -CREATE INDEX idx_PieceEvents_comp ON PieceEvents(piecesEventId, timestamp); - -CREATE INDEX idx_PieceEvents_ts ON PieceEvents(timestamp); \ No newline at end of file diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index 039c01c..17e4067 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -12,7 +12,7 @@ CREATE TYPE USERTYPE AS ENUM ( 'MANAGER', 'DISPATCH', 'CUSTOMER', - 'BROKER', + 'DRIVER', 'WORKER' ); @@ -34,32 +34,6 @@ CREATE TABLE IF NOT EXISTS Users ( PRIMARY KEY(userId) ); -CREATE TABLE IF NOT EXISTS ShipperEvents ( - shipperEventId INT, - shipperId INT NOT NULL, - timestamp INT NOT NULL, - userId INT NOT NULL, - companyName VARCHAR(256) NOT NULL, - address VARCHAR(256) NOT NULL, - postalCode VARCHAR(7) NOT NULL, - phoneNumber VARCHAR(15) NOT NULL, - PRIMARY KEY(shipperEventId), - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) -); - -CREATE TABLE IF NOT EXISTS ConsigneeEvents ( - consigneeEventId INT NOT NULL, - consigneeId INT NOT NULL, - timestamp INT NOT NULL, - userId INT NOT NULL, - company VARCHAR(256) NOT NULL, - name VARCHAR(256) NOT NULL, - address VARCHAR(256) NOT NULL, - postalCode VARCHAR(7) NOT NULL, - phoneNumber VARCHAR(15) NOT NULL, - PRIMARY KEY(consigneeEventId) NOT NULL, - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) -); CREATE TABLE IF NOT EXISTS TicketEvents ( ticketEventId INT, @@ -77,9 +51,18 @@ CREATE TABLE IF NOT EXISTS TicketEvents ( BOLNumber INT, specialServices VARCHAR(256), specialInstructions VARCHAR(256), + shipperCompany VARCHAR(256), + shipperName VARCHAR(256), + shipperAddress VARCHAR(256), + shipperPostalCode VARCHAR(256), + shipperPhoneNumber VARCHAR(256), + consigneeCompany VARCHAR(256), + consigneeName VARCHAR(256), + consigneeAddress VARCHAR(256), + consigneePostalCode VARCHAR(256), + consigneePhoneNumber VARCHAR(256), + pieces VARCHAR(256), PRIMARY KEY(ticketEventId), - CONSTRAINT fk_shipperId FOREIGN KEY (shipperEventId) REFERENCES ShipperEvents(shipperEventId), - CONSTRAINT fk_consigneeId FOREIGN KEY (consigneeEventId) REFERENCES ConsigneeEvents(consigneeEventId), CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) ); @@ -128,16 +111,4 @@ CREATE TABLE IF NOT EXISTS DeliveryMilestones ( CONSTRAINT fk_ticketEventId FOREIGN KEY (ticketEventId) REFERENCES TicketEvents(ticketEventId), CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) -); - -CREATE TABLE IF NOT EXISTS PieceEvents ( - piecesEventId INT, - piecesId INT, - timestamp INT, - ticketEventId INT, - userId INT, - pieceDescription VARCHAR(256), - PRIMARY KEY(piecesEventId), - CONSTRAINT fk_ticketId FOREIGN KEY (ticketEventId) REFERENCES TicketEvents(ticketEventId), - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) ); \ No newline at end of file diff --git a/docker-compose.yml b/servers/tenant/docker-compose.yml similarity index 100% rename from docker-compose.yml rename to servers/tenant/docker-compose.yml diff --git a/kill-cluster.sh b/servers/tenant/kill-cluster.sh similarity index 100% rename from kill-cluster.sh rename to servers/tenant/kill-cluster.sh diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index 9e06357..ee40464 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -62,51 +62,12 @@ def __repr__(self): return f"< Users:: userId: {self.userId}>" -class ShipperEvents(Base): - __tablename__ = "shipperevents" - non_prim_identifying_column_name = "shipperId" - shipperEventId = Column(Integer, primary_key=True, autoincrement=True) - shipperId = Column(Integer, nullable=False) - timestamp = Column(Integer, default=int(time.time())) - userId = Column(Integer, ForeignKey(Users.userId)) - companyName = Column(String, nullable=False) - address = Column(String, nullable=False) - postalCode = Column(String, nullable=False) - phoneNumber = Column(String, nullable=False, default=int(time.time())) - - user = relationship("Users") - - def __repr__(self): - return f"" - - -class ConsigneeEvents(Base): - __tablename__ = "consigneeevents" - non_prim_identifying_column_name = "consigneeId" - consigneeEventId = Column(Integer, primary_key=True, autoincrement=True) - consigneeId = Column(Integer, nullable=False) - timestamp = Column(Integer, default=int(time.time())) - userId = Column(Integer, ForeignKey(Users.userId)) - companyName = Column(String, nullable=False) - address = Column(String, nullable=False) - postalCode = Column(String, nullable=False) - phoneNumber = Column(String, nullable=False) - - user = relationship("Users") - - class TicketEvents(Base): __tablename__ = "ticketevents" non_prim_identifying_column_name = "ticketId" ticketEventId = Column(Integer, primary_key=True, autoincrement=True) ticketId = Column(Integer, nullable=False) timestamp = Column(Integer, default=int(time.time())) - shipperEventId = Column( - Integer, ForeignKey(ShipperEvents.shipperEventId), nullable=False - ) - consigneeEventId = Column( - Integer, ForeignKey(ConsigneeEvents.consigneeEventId), nullable=False - ) userId = Column(Integer, ForeignKey(Users.userId), nullable=False) customerId = Column(Integer, ForeignKey(Customers.customerId), nullable=False) barcodeNumber = Column(Integer, nullable=False) @@ -117,32 +78,19 @@ class TicketEvents(Base): BOLNumber = Column(Integer, nullable=False) specialServices = Column(String) specialInstructions = Column(String) - - shipperEvent = relationship("ShipperEvents") - consigneeEvent = relationship("ConsigneeEvents") + shipperCompany = Column(String, nullable=False) + shipperName = Column(String, nullable=False) + shipperAddress = Column(String, nullable=False) + shipperPostalCode = Column(String, nullable=False) + shipperPhoneNumber = Column(String, nullable=False) + consigneeCompany = Column(String, nullable=False) + consigneeName = Column(String, nullable=False) + consigneeAddress = Column(String, nullable=False) + consigneePostalCode = Column(String, nullable=False) + consigneePhoneNumber = Column(String, nullable=False) + pieces = Column(String, nullable=False) user = relationship("Users") customer = relationship("Customers") - pieces = relationship( - "PieceEvents", - # lazy="dynamic", - primaryjoin="TicketEvents.ticketEventId == PieceEvents.ticketEventId", - ) - - -class PieceEvents(Base): - __tablename__ = "piecesevents" - non_prim_identifying_column_name = "piecesId" - piecesEventId = Column(Integer, primary_key=True, autoincrement=True) - piecesId = Column(Integer, nullable=False) - timestamp = Column(Integer, default=int(time.time())) - ticketEventId = Column(Integer, ForeignKey(TicketEvents.ticketEventId)) - # customerId = Column(Integer, ForeignKey(Customers.customerId)) - userId = Column(Integer, ForeignKey(Users.userId)) - pieceDescription = Column(String) - - user = relationship("Users") # represents user which created / modified object - # customer = relationship("Customers") - ticketEvents = relationship("TicketEvents", viewonly=True) class GenericMilestones(Base): @@ -193,18 +141,6 @@ class DeliveryMilestones(Base): if __name__ == "__main__": - shipperId_timestamp_idx = Index( - "shipperId_timestamp_idx", ShipperEvents.shipperId, ShipperEvents.timestamp - ) - - INDEXES.append(shipperId_timestamp_idx) - - consigneeId_timestamp_idx = Index( - "consigneeId_timestamp_idx", ConsigneeEvents.consigneeId, ConsigneeEvents.timestamp - ) - - INDEXES.append(consigneeId_timestamp_idx) - ticketId_timestamp_idx = Index( "ticketId_timestamp_idx", TicketEvents.ticketId, TicketEvents.timestamp ) @@ -221,22 +157,6 @@ class DeliveryMilestones(Base): INDEXES.append(ticket_customerId_idx) - - ticket_shippperId_idx = Index("ticket_shippperId_idx", TicketEvents.shipperEventId) - - INDEXES.append(ticket_shippperId_idx) - - - ticket_consigneeId_idx = Index("ticket_consigneeId_idx", TicketEvents.consigneeEventId) - - INDEXES.append(ticket_consigneeId_idx) - - piecesId_timestamp_idx = Index( - "piecesId_timestamp_idx", PieceEvents.piecesId, PieceEvents.timestamp - ) - - INDEXES.append(piecesId_timestamp_idx) - gen_milestoneId_idx = Index("gen_milestoneId_idx", GenericMilestones.milestoneId) INDEXES.append(gen_milestoneId_idx) diff --git a/servers/tenant/requirements.txt b/servers/tenant/requirements.txt index fd12384..317033c 100644 --- a/servers/tenant/requirements.txt +++ b/servers/tenant/requirements.txt @@ -1,7 +1,55 @@ -sqlalchemy -psycopg2-binary +amqp==5.0.9 +billiard==3.6.4.0 celery==5.2.3 +cffi==1.15.0 +chardet==4.0.0 +click==8.0.3 +click-didyoumean==0.3.0 +click-plugins==1.1.1 +click-repl==0.2.0 +coloredlogs==15.0.1 +cryptography==36.0.1 +Deprecated==1.2.13 Flask==2.0.2 +humanfriendly==10.0 +img2pdf==0.4.3 +importlib-resources==5.4.0 +itsdangerous==2.0.1 +Jinja2==3.0.3 +kombu==5.2.3 +lxml==4.7.1 +MarkupSafe==2.0.1 +multilingual-pdf2text==1.1.0 +numpy==1.22.0 +ocrmypdf==13.2.0 +packaging==21.3 +pdf2image==1.16.0 +pdfminer.six==20211012 +pdfplumber==0.6.0 +pdftotext==2.2.2 +pikepdf==4.3.1 +Pillow==9.0.0 +pluggy==1.0.0 +prompt-toolkit==3.0.24 +pycparser==2.21 +pydantic==1.9.0 +pyparsing==3.0.6 +PyPDF2==1.26.0 +pytesseract==0.3.8 +pytz==2021.3 +redis==4.1.2 +reportlab==3.6.5 +six==1.16.0 +tqdm==4.62.3 +typing-extensions==4.0.1 +vine==5.0.0 +Wand==0.6.7 +wcwidth==0.2.5 +Werkzeug==2.0.2 +wrapt==1.13.3 +zipp==3.7.0 +sqlalchemy +psycopg2-binary Faker==13.7.0 flask-restplus==0.13.0 flask-sqlalchemy==2.4.4 diff --git a/servers/tenant/server.py b/servers/tenant/server.py index 5d456fe..14d4ad7 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -2,7 +2,7 @@ from blueprints.event_driven.ticket import ticket_bp from blueprints.simple.customers import customer_bp from blueprints.simple.users import user_bp -from blueprints.pdf import pdf_bp # TODO: Move this in seperate microservice +from servers.tenant.blueprints.simple.pdf import pdf_bp # TODO: Move this in seperate microservice # from models.__init__ import engine, Base # from models.models import INDEXES diff --git a/start-cluster.sh b/servers/tenant/start-cluster.sh similarity index 100% rename from start-cluster.sh rename to servers/tenant/start-cluster.sh diff --git a/servers/tenant/test/test.py b/servers/tenant/test/test.py index 41bd945..c565be9 100644 --- a/servers/tenant/test/test.py +++ b/servers/tenant/test/test.py @@ -18,10 +18,7 @@ from controllers.controllerMapper import ( UserController, CustomerController, - ShipperController, - ConsigneeController, TicketController, - PieceController, GenericMilestoneController, InventoryMilestoneController, DeliveryMilestoneController, @@ -34,7 +31,7 @@ app = Flask(__name__) with app.app_context(): - def generate_users(scale=100): + def generate_users(scale=5): user_controller = UserController() @@ -66,7 +63,7 @@ def generate_users(scale=100): return user_controller._create_bulk(args_arr) - def generate_customers(scale=100): + def generate_customers(scale=2): customer_controller = CustomerController() @@ -85,143 +82,9 @@ def generate_customers(scale=100): return customer_controller._create_bulk(args_arr) - def generate_shipper_events(scale=50, users=[]): - - shipper_events_controller = ShipperController() - - n = len( - session.query(ShipperEvents) - .filter(ShipperEvents.shipperEventId == ShipperEvents.shipperId) - .distinct() - .all() - ) - - if n < scale: - print(f"Generating {scale - n } Shippers") - - for _ in range(scale - n): - shipperId = random.randint(1, 2147483645) - userId = random.choice(users).userId - companyName = faker.company() - address = faker.address() - postalCode = faker.zipcode() - phoneNumber = faker.phone_number() - - obj = shipper_events_controller._create_base_event( - { - "shipperId": shipperId, - "userId": userId, - "companyName": companyName, - "address": address, - "postalCode": postalCode, - "phoneNumber": phoneNumber, - } - ) - - # created_ids.append(obj.shipperEventId) - - for i in range(random.randrange(10, 20)): - - userId = random.choice(users).userId - - if i % 4 == 0: - companyName = faker.company() - elif i % 4 == 1: - address = faker.address() - elif i % 4 == 2: - postalCode = faker.zipcode() - elif i % 4 == 3: - phoneNumber = faker.phone_number() - - # companyName = faker.company() - # address = faker.address() - # postalCode = faker.zipcode() - # phoneNumber = faker.phone_number() - - created_obj = shipper_events_controller._modify_latest_object( - getattr(obj, ShipperEvents.non_prim_identifying_column_name), - { - "userId": userId, - "companyName": companyName, - "address": address, - "postalCode": postalCode, - "phoneNumber": phoneNumber, - }, - ) - - print("Created Shipper") - # created_ids.append(created_obj.shipperEventId) - - def generate_consignee_events(scale=20, users=[]): - - shipper_events_controller = ConsigneeController() - - n = len( - session.query(ConsigneeEvents) - .filter(ConsigneeEvents.consigneeEventId == ConsigneeEvents.consigneeId) - .distinct() - .all() - ) - - if n < scale: - print(f"Generating {scale - n } Consignee") - - for _ in range(scale - n): - consigneeId = random.randint(1, 2147483645) - userId = random.choice(users).userId - companyName = faker.company() - address = faker.address() - postalCode = faker.zipcode() - phoneNumber = faker.phone_number() - - obj = shipper_events_controller._create_base_event( - { - "consigneeId": consigneeId, - "userId": userId, - "companyName": companyName, - "address": address, - "postalCode": postalCode, - "phoneNumber": phoneNumber, - } - ) - - # created_ids.append(obj.shipperEventId) - - for i in range(random.randrange(10, 20)): - - userId = random.choice(users).userId - - if i % 4 == 0: - companyName = faker.company() - elif i % 4 == 1: - address = faker.address() - elif i % 4 == 2: - postalCode = faker.zipcode() - elif i % 4 == 3: - phoneNumber = faker.phone_number() - - companyName = faker.company() - address = faker.address() - postalCode = faker.zipcode() - phoneNumber = faker.phone_number() - - created_obj = shipper_events_controller._modify_latest_object( - getattr(obj, ConsigneeEvents.non_prim_identifying_column_name), - { - "userId": userId, - "companyName": companyName, - "address": address, - "postalCode": postalCode, - "phoneNumber": phoneNumber, - }, - ) - - # created_ids.append(created_obj.shipperEventId) - - print("Created Consignee") def generate_ticket_events( - scale=20, shipperEvents=[], consigneeEvents=[], users=[], customers=[] + scale=20, users=[], customers=[] ): ticket_events_controller = TicketController() @@ -237,8 +100,6 @@ def generate_ticket_events( print(f"Generating {scale - n } Tickets") for _ in range(scale - n): - shipperEventId = random.choice(shipperEvents).shipperEventId - consigneeEventId = random.choice(consigneeEvents).consigneeEventId userId = random.choice(users).userId customerId = random.choice(customers).customerId barcodeNumber = random.randrange(100000000, 900000000) @@ -247,8 +108,19 @@ def generate_ticket_events( weight = random.randrange(100, 200) claimedNumberOfPieces = random.randrange(1, 5) BOLNumber = random.randrange(100000000, 900000000) - specialServices = "" - specialInstructions = "" + specialServices = faker.sentence() + specialInstructions = faker.sentence() + shipperCompany = faker.company() + shipperName = faker.name() + shipperAddress = faker.address() + shipperPostalCode = faker.zipcode() + shipperPhoneNumber = faker.phone_number() + consigneeCompany = faker.company() + consigneeName = faker.name() + consigneeAddress = faker.address() + consigneePostalCode = faker.zipcode() + consigneePhoneNumber = faker.phone_number() + pieces = faker.sentence() obj = ticket_events_controller._create_base_event( { @@ -264,6 +136,17 @@ def generate_ticket_events( "BOLNumber": BOLNumber, "specialServices": specialServices, "specialInstructions": specialInstructions, + "shipperCompany": shipperCompany, + "shipperName": shipperName, + "shipperAddress": shipperAddress, + "shipperPostalCode": shipperPostalCode, + "shipperPhoneNumber": shipperPhoneNumber, + "consigneeCompany": consigneeCompany, + "consigneeName": consigneeName, + "consigneeAddress": consigneeAddress, + "consigneePostalCode": consigneePostalCode, + "consigneePhoneNumber": consigneePhoneNumber, + "pieces": pieces } ) @@ -302,60 +185,8 @@ def generate_ticket_events( print("Created Ticket") - def generate_pieces_events(scale=20, ticketEvents=[], customers=[], users=[]): - - pieces_events_controller = PieceController() - - n = len( - session.query(PieceEvents) - .filter(PieceEvents.piecesEventId == PieceEvents.piecesId) - .distinct() - .all() - ) - - if n < scale: - print(f"Generating {scale - n } Pieces") - - for _ in range(scale - n): - - ticketEventId = random.choice(ticketEvents).ticketEventId - - for _ in range(1, 5): - piecesId = random.randint(1, 2147483645) - customerId = random.choice(customers).customerId - userId = random.choice(users).userId - pieceDescription = "" - obj = pieces_events_controller._create_base_event( - { - "piecesId": piecesId, - "ticketEventId": ticketEventId, - "customerId": customerId, - "userId": userId, - "pieceDescription": pieceDescription, - } - ) - - for i in range(random.randrange(1, 3)): - - ticketEventId = random.choice(ticketEvents).ticketEventId - customerId = random.choice(customers).customerId - userId = random.choice(users).userId - - created_obj = pieces_events_controller._modify_latest_object( - getattr(obj, PieceEvents.non_prim_identifying_column_name), - { - "piecesId": piecesId, - "ticketEventId": ticketEventId, - "customerId": customerId, - "userId": userId, - "pieceDescription": pieceDescription, - }, - ) - - print("Created Piece") - - def generate_generic_milestones_events(scale=200, ticket_map=[], users=[]): + def generate_generic_milestones_events(scale=50, ticket_map=[], users=[]): gen_milestone_controller = GenericMilestoneController() @@ -389,7 +220,7 @@ def generate_generic_milestones_events(scale=200, ticket_map=[], users=[]): print("Created Gen Milestone") - def generate_inventory_milestones_events(scale=200, ticket_map=[], users=[]): + def generate_inventory_milestones_events(scale=50, ticket_map=[], users=[]): gen_milestone_controller = InventoryMilestoneController() @@ -429,7 +260,7 @@ def generate_inventory_milestones_events(scale=200, ticket_map=[], users=[]): print("Created Inventory Milestone") - def generate_delivery_milestones_events(scale=200, ticket_map=[], users=[]): + def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): gen_milestone_controller = DeliveryMilestoneController() @@ -479,17 +310,8 @@ def generate_delivery_milestones_events(scale=200, ticket_map=[], users=[]): # pprint(alchemyConverter(users[0])) - - generate_shipper_events(scale=10, users=users) - shipperEvents = session.query(ShipperEvents).limit(200).all() - - generate_consignee_events(scale=10, users=users) - consigneeEvents = session.query(ConsigneeEvents).limit(200).all() - generate_ticket_events( scale=20, - shipperEvents=shipperEvents, - consigneeEvents=consigneeEvents, users=users, customers=customers, ) @@ -500,13 +322,8 @@ def generate_delivery_milestones_events(scale=200, ticket_map=[], users=[]): # exit() - generate_pieces_events( - scale=20, ticketEvents=ticketEvents, customers=customers, users=users - ) - pieceEvents = session.query(PieceEvents).distinct().all() - - pprint(alchemyConverter(pieceEvents[0])) + pprint(alchemyConverter(ticketEvents[0])) exit() From a16799801311b3cf0126bbc8f9949f1bca519086 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Tue, 5 Jul 2022 19:41:59 -0400 Subject: [PATCH 04/40] push --- servers/tenant/database/index_creation.sql | 1 + servers/tenant/database/table_creation.sql | 158 ++++++++++----------- servers/tenant/models/__init__.py | 4 +- servers/tenant/models/models.py | 61 ++++---- servers/tenant/test/test.py | 7 +- 5 files changed, 117 insertions(+), 114 deletions(-) diff --git a/servers/tenant/database/index_creation.sql b/servers/tenant/database/index_creation.sql index 98ae6df..4c55377 100644 --- a/servers/tenant/database/index_creation.sql +++ b/servers/tenant/database/index_creation.sql @@ -1,3 +1,4 @@ +-- SQLBook: Code CREATE INDEX idx_ticketEvents_comp ON TicketEvents(ticketEventId, timestamp); CREATE INDEX idx_ticketEvents_ts ON TicketEvents(timestamp); diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index 17e4067..1d3e34b 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -9,106 +9,106 @@ CREATE TYPE DELIVERY_TICKET_STATUS AS ENUM('DELIVERED', 'IN_TRANSIT'); CREATE TYPE GENERIC_TICKET_STATUS AS ENUM('INVENTORY', 'ASSIGNED', 'OUT_FOR_DELIVERY'); CREATE TYPE USERTYPE AS ENUM ( - 'MANAGER', - 'DISPATCH', - 'CUSTOMER', - 'DRIVER', - 'WORKER' + "MANAGER", + "DISPATCH", + "CUSTOMER", + "DRIVER", + "WORKER" ); CREATE TABLE IF NOT EXISTS Customers ( - customerId INT, + "customerId" INT, name VARCHAR(50), - PRIMARY KEY(customerId) + PRIMARY KEY("customerId") ); CREATE TABLE IF NOT EXISTS Users ( - userId INT, - userType USERTYPE NOT NULL, - username VARCHAR(30) NOT NULL, - firstName VARCHAR(30) NOT NULL, - lastName VARCHAR (30) NOT NULL, - email VARCHAR(30) NOT NULL, - createdAt INT NOT NULL, - modifiedAt INT NOT NULL, - PRIMARY KEY(userId) + "userId" INT, + "userType" USERTYPE NOT NULL, + "username" VARCHAR(30) NOT NULL, + "firstName" VARCHAR(30) NOT NULL, + "lastName" VARCHAR (30) NOT NULL, + "email" VARCHAR(30) NOT NULL, + "createdAt" INT NOT NULL, + "modifiedAt" INT NOT NULL, + PRIMARY KEY("userId") ); CREATE TABLE IF NOT EXISTS TicketEvents ( - ticketEventId INT, - ticketId INT, - timestamp INT, - shipperEventId INT, - consigneeEventId INT, - userId INT, - customerId INT, - barcodeNumber INT, - houseReferenceNumber INT, - orderS3Link VARCHAR(50), - weight INT, - claimedNumberOfPieces INT, - BOLNumber INT, - specialServices VARCHAR(256), - specialInstructions VARCHAR(256), - shipperCompany VARCHAR(256), - shipperName VARCHAR(256), - shipperAddress VARCHAR(256), - shipperPostalCode VARCHAR(256), - shipperPhoneNumber VARCHAR(256), - consigneeCompany VARCHAR(256), - consigneeName VARCHAR(256), - consigneeAddress VARCHAR(256), - consigneePostalCode VARCHAR(256), - consigneePhoneNumber VARCHAR(256), - pieces VARCHAR(256), - PRIMARY KEY(ticketEventId), - CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) + "ticketEventId" INT, + "ticketId" INT, + "timestamp" INT, + "shipperEventId" INT, + "consigneeEventId" INT, + "userId" INT, + "customerId" INT, + "barcodeNumber" INT, + "houseReferenceNumber" INT, + "orderS3Link" VARCHAR(50), + "weight" INT, + "claimedNumberOfPieces" INT, + "BOLNumber" INT, + "specialServices" VARCHAR(256), + "specialInstructions" VARCHAR(256), + "shipperCompany" VARCHAR(256), + "shipperName" VARCHAR(256), + "shipperAddress" VARCHAR(256), + "shipperPostalCode" VARCHAR(256), + "shipperPhoneNumber" VARCHAR(256), + "consigneeCompany" VARCHAR(256), + "consigneeName" VARCHAR(256), + "consigneeAddress" VARCHAR(256), + "consigneePostalCode" VARCHAR(256), + "consigneePhoneNumber" VARCHAR(256), + "pieces" VARCHAR(256), + PRIMARY KEY("ticketEventId"), + CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), + CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") ); CREATE TABLE IF NOT EXISTS GenericMilestones ( - milestoneId INT, + "milestoneId" INT, timestamp INT, - ticketEventId INT, - customerId INT, - userId INT, - ticketStatus GENERIC_TICKET_STATUS, - PRIMARY KEY(milestoneId), - CONSTRAINT fk_ticketEventId FOREIGN KEY (ticketEventId) REFERENCES TicketEvents(ticketEventId), - CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) + "ticketEventId" INT, + "customerId" INT, + "userId" INT, + "ticketStatus" GENERIC_TICKET_STATUS, + PRIMARY KEY("milestoneId"), + CONSTRAINT "fk_ticketEventId" FOREIGN KEY ("ticketEventId") REFERENCES TicketEvents("ticketEventId"), + CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), + CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") ); CREATE TABLE IF NOT EXISTS InventoryMilestones ( - milestoneId INT, + "milestoneId" INT, timestamp INT, - ticketEventId INT, - customerId INT, - userId INT, - ticketStatus INVENTORY_TICKET_STATUS, - approvalStatus TICKET_APPROVAL_STATUS, - PRIMARY KEY(milestoneId), - CONSTRAINT fk_ticketEventId FOREIGN KEY (ticketEventId) REFERENCES TicketEvents(ticketEventId), - CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) + "ticketEventId" INT, + "customerId" INT, + "userId" INT, + "ticketStatus" INVENTORY_TICKET_STATUS, + "approvalStatus" TICKET_APPROVAL_STATUS, + PRIMARY KEY("milestoneId"), + CONSTRAINT "fk_ticketEventId" FOREIGN KEY ("ticketEventId") REFERENCES TicketEvents("ticketEventId"), + CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), + CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") ); CREATE TABLE IF NOT EXISTS DeliveryMilestones ( - milestoneId INT, + "milestoneId" INT, timestamp INT, - ticketEventId INT, - customerId INT, - userId INT, - ticketStatus DELIVERY_TICKET_STATUS, - approvalStatus TICKET_APPROVAL_STATUS, - PODLink VARCHAR(50), - signatureLink VARCHAR(50), - picture1Link VARCHAR(50), - picture2Link VARCHAR(50), - picture3Link VARCHAR(50), - PRIMARY KEY(milestoneId), - CONSTRAINT fk_ticketEventId FOREIGN KEY (ticketEventId) REFERENCES TicketEvents(ticketEventId), - CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) + "ticketEventId" INT, + "customerId" INT, + "userId" INT, + "ticketStatus" DELIVERY_TICKET_STATUS, + "approvalStatus" TICKET_APPROVAL_STATUS, + "PODLink" VARCHAR(50), + "signatureLink" VARCHAR(50), + "picture1Link" VARCHAR(50), + "picture2Link" VARCHAR(50), + "picture3Link" VARCHAR(50), + PRIMARY KEY("milestoneId"), + CONSTRAINT "fk_ticketEventId" FOREIGN KEY ("ticketEventId") REFERENCES TicketEvents("ticketEventId"), + CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), + CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") ); \ No newline at end of file diff --git a/servers/tenant/models/__init__.py b/servers/tenant/models/__init__.py index ce33c12..09d5b44 100644 --- a/servers/tenant/models/__init__.py +++ b/servers/tenant/models/__init__.py @@ -4,7 +4,7 @@ from sqlalchemy.orm import sessionmaker db_port = os.getenv("DB_PORT", "5432") -db_name = os.getenv("DB_NAME", "tenant_db") +db_name = os.getenv("DB_NAME", "tenant_database") db_username = os.getenv("DB_USERNAME", "postgres") db_password = os.getenv("DB_PASSWORD", "password") db_url = os.getenv("DB_URL", "ship-solver.ccxmktobiszx.ca-central-1.rds.amazonaws.com") @@ -15,6 +15,6 @@ Base = declarative_base() engine = create_engine(cnx_string) - +print("connecting to db....") Session = sessionmaker(bind=engine) session = Session() diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index ee40464..84ee19d 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -31,11 +31,11 @@ class Generic_Ticket_Status(enum): class UserType(enum): - manager = "MANAGER" - dispatch = "DISPATCH" - customer = "CUSTOMER" - brooker = "BROOKER" - worker = "WORKER" + manager = "manager" + dispatch = "dispatch" + customer = "customer" + driver = "driver" + worker = "worker" class Customers(Base): @@ -50,7 +50,7 @@ def __repr__(self): class Users(Base): __tablename__ = "users" userId = Column(Integer, primary_key=True, autoincrement=True) - userType = Column(Enum(UserType), nullable=False) + userType = Column(String, nullable=False) username = Column(String, nullable=False) firstName = Column(String, nullable=False) lastName = Column(String, nullable=False) @@ -78,16 +78,19 @@ class TicketEvents(Base): BOLNumber = Column(Integer, nullable=False) specialServices = Column(String) specialInstructions = Column(String) + # shipper shipperCompany = Column(String, nullable=False) shipperName = Column(String, nullable=False) shipperAddress = Column(String, nullable=False) shipperPostalCode = Column(String, nullable=False) shipperPhoneNumber = Column(String, nullable=False) + # consignee consigneeCompany = Column(String, nullable=False) consigneeName = Column(String, nullable=False) consigneeAddress = Column(String, nullable=False) consigneePostalCode = Column(String, nullable=False) consigneePhoneNumber = Column(String, nullable=False) + # pieces pieces = Column(String, nullable=False) user = relationship("Users") customer = relationship("Customers") @@ -140,40 +143,38 @@ class DeliveryMilestones(Base): user = relationship("Users") -if __name__ == "__main__": - ticketId_timestamp_idx = Index( - "ticketId_timestamp_idx", TicketEvents.ticketId, TicketEvents.timestamp - ) - - INDEXES.append(ticketId_timestamp_idx) +ticketId_timestamp_idx = Index( + "ticketId_timestamp_idx", TicketEvents.ticketId, TicketEvents.timestamp +) +INDEXES.append(ticketId_timestamp_idx) - ticket_userId_idx = Index("ticket_userId_idx", TicketEvents.userId) - INDEXES.append(ticket_userId_idx) +ticket_userId_idx = Index("ticket_userId_idx", TicketEvents.userId) +INDEXES.append(ticket_userId_idx) - ticket_customerId_idx = Index("ticket_customerId_idx", TicketEvents.customerId) +ticket_customerId_idx = Index("ticket_customerId_idx", TicketEvents.customerId) - INDEXES.append(ticket_customerId_idx) +INDEXES.append(ticket_customerId_idx) - gen_milestoneId_idx = Index("gen_milestoneId_idx", GenericMilestones.milestoneId) +gen_milestoneId_idx = Index("gen_milestoneId_idx", GenericMilestones.milestoneId) - INDEXES.append(gen_milestoneId_idx) +INDEXES.append(gen_milestoneId_idx) - inv_milestoneId_idx = Index("inv_milestoneId_idx", InventoryMilestones.milestoneId) +inv_milestoneId_idx = Index("inv_milestoneId_idx", InventoryMilestones.milestoneId) - INDEXES.append(inv_milestoneId_idx) +INDEXES.append(inv_milestoneId_idx) - del_milestoneId_idx = Index("del_milestoneId_idx", DeliveryMilestones.milestoneId) +del_milestoneId_idx = Index("del_milestoneId_idx", DeliveryMilestones.milestoneId) - INDEXES.append(del_milestoneId_idx) +INDEXES.append(del_milestoneId_idx) - print("Configuring DB ...") - Base.metadata.create_all(engine) - try: - # create indexes - for index in INDEXES: - index.create(bind=engine) - except: - pass +print("Configuring DB ...") +Base.metadata.create_all(engine) +try: + # create indexes + for index in INDEXES: + index.create(bind=engine) +except: + pass diff --git a/servers/tenant/test/test.py b/servers/tenant/test/test.py index c565be9..358750b 100644 --- a/servers/tenant/test/test.py +++ b/servers/tenant/test/test.py @@ -5,7 +5,7 @@ from faker import Faker import os from flask import Flask, jsonify - +import uuid; from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker @@ -45,12 +45,13 @@ def generate_users(scale=5): firstName = faker.unique.first_name() lastName = faker.unique.last_name() - userType = random.choice([ut for ut in UserType]).value.lower() + userType = random.choice([ut for ut in UserType]).value username = firstName.lower()[0] + lastName.lower() email = f"{username}@faker.com" args_arr.append( { + "userId": random.randint(1, 1000000000), "userType": userType, "username": username, "firstName": firstName, @@ -300,7 +301,7 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): print("Created Delivery Milestone") - generate_users(scale=10) + generate_users(scale=5) users = session.query(Users).all() # print(random.choice(users)) From 8c777934313591e9fe9429cedfdbfb0c0d0f4d92 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Tue, 5 Jul 2022 20:07:35 -0400 Subject: [PATCH 05/40] fix schema --- .../tenant/blueprints/event_driven/ticket.py | 9 +-- servers/tenant/controllers/baseController.py | 13 ++-- servers/tenant/database/table_creation.sql | 11 +-- servers/tenant/test/test.py | 8 +-- servers/tenant/utils.py | 68 ++++++++++--------- 5 files changed, 54 insertions(+), 55 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 45c7e28..fd0f27a 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -8,8 +8,8 @@ sys.path.insert(0, "..") # import parent folder -from controllers.controllerMapper import PieceController, TicketController -from models.models import TicketEvents, PieceEvents +from controllers.controllerMapper import TicketController +from models.models import TicketEvents from utils import ( AlchemyEncoder, require_appkey, @@ -21,7 +21,6 @@ # TODO: USER BASED AUTH ticket_controller = TicketController() -pieces_controller = PieceController() """ Route expects requests of format: @@ -81,11 +80,9 @@ def ticket_post(): # create ticket def ticket_get_all(): filters = request.args.get("filters") or {} - limit = request.args.get("limit") or 1 + limit = request.args.get("limit") or 2 data = ticket_controller._get_latest_event_objects(filters, number_of_res=limit) - print("data------------------") - print(data) res = alchemyConverter(data) response = json.dumps(res, cls=AlchemyEncoder) diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 130fb84..74fb3d5 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -138,14 +138,15 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # .order_by(self.model.timestamp) # .limit(number_of_res).all() # ) - latest_objs = ( - self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) - .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) - .limit(number_of_res).all() - ) + latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ + .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) \ + .limit(number_of_res).all() + # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() - return latest_objs[0] + print("LATEST_OBJS-------") + print(latest_objs) + return latest_objs def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index 1d3e34b..35f9586 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -1,3 +1,4 @@ +-- SQLBook: Code declare @tablename varchar(50) set @tablename = "TENANT1"; CREATE TYPE INVENTORY_TICKET_STATUS AS ENUM('REENTRY', 'ENTRY'); @@ -9,11 +10,11 @@ CREATE TYPE DELIVERY_TICKET_STATUS AS ENUM('DELIVERED', 'IN_TRANSIT'); CREATE TYPE GENERIC_TICKET_STATUS AS ENUM('INVENTORY', 'ASSIGNED', 'OUT_FOR_DELIVERY'); CREATE TYPE USERTYPE AS ENUM ( - "MANAGER", - "DISPATCH", - "CUSTOMER", - "DRIVER", - "WORKER" + 'MANAGER', + 'DISPATCH', + 'CUSTOMER', + 'DRIVER', + 'WORKER' ); CREATE TABLE IF NOT EXISTS Customers ( diff --git a/servers/tenant/test/test.py b/servers/tenant/test/test.py index 358750b..5d1491f 100644 --- a/servers/tenant/test/test.py +++ b/servers/tenant/test/test.py @@ -45,7 +45,7 @@ def generate_users(scale=5): firstName = faker.unique.first_name() lastName = faker.unique.last_name() - userType = random.choice([ut for ut in UserType]).value + userType = random.choice([ut for ut in UserType]).value.lower() username = firstName.lower()[0] + lastName.lower() email = f"{username}@faker.com" @@ -125,8 +125,6 @@ def generate_ticket_events( obj = ticket_events_controller._create_base_event( { - "shipperEventId": shipperEventId, - "consigneeEventId": consigneeEventId, "userId": userId, "customerId": customerId, "barcodeNumber": barcodeNumber, @@ -154,8 +152,6 @@ def generate_ticket_events( for i in range(random.randrange(10, 20)): userId = random.choice(users).userId - shipperEventId = random.choice(shipperEvents).shipperEventId - consigneeEventId = random.choice(consigneeEvents).consigneeEventId userId = random.choice(users).userId customerId = random.choice(customers).customerId barcodeNumber = random.randrange(100000000, 900000000) @@ -169,8 +165,6 @@ def generate_ticket_events( getattr(obj, TicketEvents.non_prim_identifying_column_name), { "ticketId": obj.ticketId, - "shipperEventId": shipperEventId, - "consigneeEventId": consigneeEventId, "userId": userId, "customerId": customerId, "barcodeNumber": barcodeNumber, diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index bbf2d8f..814b6c7 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -15,38 +15,44 @@ def default(self, obj): # DFS function used to convert alchemy objects to JSON -def alchemyConverter(object, res={}, visited=set({})): - visited.add(str(object.__class__)) - for field in [ - x - for x in dir(object) - if not x.startswith("_") - and x not in set({"metadata", "non_prim_identifying_column_name", "registry"}) - ]: - - cls_name = str(object.__getattribute__(field).__class__) - - if "models.models." in cls_name: - if cls_name in visited: - continue - else: - visited.add(cls_name) - - res[field] = {} - alchemyConverter(getattr(object, field), res[field], visited=visited) - visited.remove(cls_name) - elif "InstrumentedList" in cls_name: - res[field] = [] - - for i, obj in enumerate(getattr(object, field)): - - res[field].append({}) - alchemyConverter(obj, res[field][i], visited=visited) +def alchemyConverter(object): + def single_convert(obj, res={}, visited=set({})): + visited.add(str(object.__class__)) + for field in [ + x + for x in dir(object) + if not x.startswith("_") + and x not in set({"metadata", "non_prim_identifying_column_name", "registry"}) + ]: + cls_name = str(obj.__getattribute__(field).__class__) + if "models.models." in cls_name: + if cls_name in visited: + continue + else: + visited.add(cls_name) + + res[field] = {} + single_convert(getattr(obj, field), res[field], visited=visited) + visited.remove(cls_name) + elif "InstrumentedList" in cls_name: + res[field] = [] + + for i, obj in enumerate(getattr(obj, field)): + + res[field].append({}) + single_convert(obj, res[field][i], visited=visited) - else: - res[field] = getattr(object, field) - - return res + else: + res[field] = getattr(obj, field) + + return res + + if type(object) == list: + res = [single_convert(obj) for obj in object] + return res + else: + return single_convert(object) + # converts fiters as a dictionary to alchemy interpretable results From 9bffad8d14b042ac455900fa4ba86bdf55392f3e Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Tue, 5 Jul 2022 22:04:54 -0400 Subject: [PATCH 06/40] get endpoints --- .../tenant/blueprints/event_driven/ticket.py | 157 +++++++----------- servers/tenant/controllers/baseController.py | 63 ++++--- servers/tenant/server.py | 2 +- servers/tenant/utils.py | 71 ++++---- 4 files changed, 137 insertions(+), 156 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index fd0f27a..3bf23ff 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -1,5 +1,5 @@ import json -import datetime +from datetime import datetime from numpy import number from flask import request, jsonify, Blueprint @@ -66,60 +66,53 @@ def ticket_post(): # create ticket ticket_event = ticket_controller._create_base_event(ticket_dict) - pieces_args_array = ticket_dict["pieces"] + return {"success"} - for pieces_args in pieces_args_array: - pieces_args["ticketEventId"] = ticket_event.ticketEventId - pieces_controller._create_base_event(pieces_args) - - return "success" +# http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00&end=2022-04-04T00:00:00&shipperName=Eric%20Shea +# curl http://127.0.0.1:6767/api/ticket/?shipperName +# # curl http://127.0.0.1:6767/api/ticket?key=a +# # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z @ticket_bp.route("/", methods=["GET"]) # @require_appkey def ticket_get_all(): - filters = request.args.get("filters") or {} - limit = request.args.get("limit") or 2 - - data = ticket_controller._get_latest_event_objects(filters, number_of_res=limit) - res = alchemyConverter(data) - response = json.dumps(res, cls=AlchemyEncoder) - - return response - -""" -Route expects requests of format: - -{ - "datetime" : "value", - "filters" : { - "field1": "value1", - "field2": "value2", - .... - } -} - -""" - - -@ticket_bp.route("/date-range", methods=["GET"]) -@require_appkey -def ticket_get_range(): + filters = request.args or {} + sql_filters = dict(filters) + + if "start" in sql_filters: + del sql_filters["start"] + if "end" in sql_filters: + del sql_filters["end"] + if "limit" in sql_filters: + del sql_filters["limit"] + + if "limit" not in filters: + limit = 5 + else: + limit = filters["limit"] def validate_date_format(date_text): try: - datetime.datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%SZ") + return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") except ValueError: - raise ValueError("Incorrect data format, should be YYYY-MM-DD") - - dt = request.args.get("datetime") - validate_date_format(dt) - - filters = request.args.get("filters") - - data = ticket_controller._get_latest_event_objects_from_start_date( - dt, filters=filters - ) + raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") + + if "start" in filters: + dt_start_str = filters["start"] + dt_start = validate_date_format(dt_start_str) + if "end" in filters: + dt_end_str= filters["end"] + dt_end = validate_date_format(dt_end_str) + data = ticket_controller._get_latest_event_objects_in_range( + dt_start, dt_end, filters=sql_filters, number_of_res=limit + ) + else: + data = ticket_controller._get_latest_event_objects_from_start_date( + dt_start, filters=sql_filters, number_of_res=limit + ) + else: + data = ticket_controller._get_latest_event_objects(sql_filters, number_of_res=limit) res = alchemyConverter(data) response = json.dumps(res, cls=AlchemyEncoder) @@ -127,61 +120,30 @@ def validate_date_format(date_text): return response -""" -Route expects requests of format: - -{ - "ticket_id" : "value", - "filters" : { - "field1": "value1", - "field2": "value2", - .... - }, - "number_of_res" : value, - -} - -""" - -@ticket_bp.route("/attribute/{attribute_name}", methods=["GET"]) -@require_appkey -def ticket_attribute_get(attribute_name): - - filters.extend({"ticket_id": ticket_id}) - - latest_ticket = ticket_controller._get_latest_event_objects( - number_of_res=number_of_res, filters=filters - ) - - res = alchemyConverter(latest_ticket) - response = json.dumps(res, cls=AlchemyEncoder) - - return response - - -@ticket_bp.route("/id/{ticket_id}", methods=["GET"]) -@require_appkey +@ticket_bp.route("/", methods=["GET"]) +# @require_appkey def ticket_get(ticket_id): - filters = request.args.get("filters") + filters = request.args.get("filters") or {} + number_of_res = request.args.get("number_of_res") - filters.extend({"ticket_id": ticket_id}) + filters["ticketId"] = ticket_id + latest_ticket = ticket_controller._get_latest_event_objects( number_of_res=number_of_res, filters=filters ) - res = alchemyConverter(latest_ticket) + res = alchemyConverter(latest_ticket[0]) response = json.dumps(res, cls=AlchemyEncoder) return response - """ Route expects requests of format: { - "ticket_id" : "value", + "datetime" : "value", "filters" : { "field1": "value1", "field2": "value2", @@ -192,20 +154,23 @@ def ticket_get(ticket_id): """ -@ticket_bp.route("/id/{ticket_id}", methods=["GET"]) -@require_appkey -def ticket_get_history(ticket_id): - filters = request.args.get("filters") - filters.extend({"ticket_id": ticket_id}) - latest_ticket = ticket_controller._get_latest_event_objects( - page=1, number_of_res=20, filters=filters - ) +# @ticket_bp.route("/attribute/{attribute_name}", methods=["GET"]) +# @require_appkey +# def ticket_attribute_get(attribute_name): + +# filters.extend({"ticket_id": ticket_id}) + +# latest_ticket = ticket_controller._get_latest_event_objects( +# number_of_res=number_of_res, filters=filters +# ) + +# res = alchemyConverter(latest_ticket) +# response = json.dumps(res, cls=AlchemyEncoder) + +# return response - res = alchemyConverter(latest_ticket) - response = json.dumps(res, cls=AlchemyEncoder) - return response """ @@ -227,7 +192,7 @@ def ticket_get_history(ticket_id): """ -@ticket_bp.route("/id/{ticket_id}", methods=["PUT"]) +@ticket_bp.route("/", methods=["PUT"]) @require_appkey def ticket_update(ticket_id): diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 74fb3d5..9b529f4 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -6,7 +6,7 @@ from sqlalchemy.inspection import inspect from sqlalchemy.orm import sessionmaker import sys - +from datetime import datetime sys.path.insert(0, "..") # import parent folder from models.models import Users @@ -131,16 +131,18 @@ def _create_base_event(self, args_dict): def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # get up to 'number_of_res' last event objects - # latest_objs = ( - # self.session.query(self.model) - # .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) - # .group_by(self.model.non_prim_identifying_column_name) - # .order_by(self.model.timestamp) - # .limit(number_of_res).all() - # ) - latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ - .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) \ - .limit(number_of_res).all() + latest_objs = ( + self.session.query(self.model) + .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) + .group_by(self.model.non_prim_identifying_column_name) + .order_by(self.model.timestamp) + .limit(number_of_res).all() + ) + + # print(*convert_dict_to_alchemy_filters(self.model, filters)) + # latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ + # .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ + # .limit(number_of_res).all() # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() @@ -148,36 +150,45 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): print(latest_objs) return latest_objs - def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): + # def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): - starttime = int(time.mktime(start_datetime).timetuple()) + # starttime = int(time.mktime(start_datetime).timetuple()) - filters.append(self.model.timestamp >= starttime) + # filters.append(self.model.timestamp >= starttime) - latest_objs = ( - self.session.query(self.model) - .filter(*convert_dict_to_alchemy_filters(filters)) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - ) + # latest_objs = ( + # self.session.query(self.model) + # .filter(*convert_dict_to_alchemy_filters(filters)) + # .group_by(self.model.non_prim_identifying_column_name) + # .order_by(self.model.timestamp) + # ) - return latest_objs + # return latest_objs - def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}): + def _get_latest_event_objects_from_start_date(self, datetime1, filters={}, number_of_res=5): + return self._get_latest_event_objects_in_range(datetime1, datetime.now(), filters=filters, number_of_res=5) - assert datetime1 <= datetime2 + def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, number_of_res=5): + print("\n\n\nDATETIM1", datetime1, datetime2) + + assert datetime1 <= datetime2 time1 = int(time.mktime(datetime1.timetuple())) time2 = int(time.mktime(datetime2.timetuple())) - filters.append(self.model.timestamp >= time1) - filters.append(self.model.timestamp <= time2) + + session_filters = convert_dict_to_alchemy_filters(self.model, filters) + + session_filters.append(self.model.timestamp >= time1) + session_filters.append(self.model.timestamp <= time2) results = ( self.session.query(self.model) - .filter(*convert_dict_to_alchemy_filters(filters)) + .filter(*session_filters) + .limit(number_of_res) .all() ) + print("results" , results) return results diff --git a/servers/tenant/server.py b/servers/tenant/server.py index 14d4ad7..ca38cc0 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -41,4 +41,4 @@ def hello_world(): print("REGISTERING BLUEPRINT") app.register_blueprint(parent) - app.run(debug=False, host="0.0.0.0", port=6767) + app.run(debug=True, host="0.0.0.0", port=6767) diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index 814b6c7..1fc132e 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -14,45 +14,50 @@ def default(self, obj): return json.JSONEncoder.default(self, obj) -# DFS function used to convert alchemy objects to JSON -def alchemyConverter(object): - def single_convert(obj, res={}, visited=set({})): - visited.add(str(object.__class__)) - for field in [ - x - for x in dir(object) - if not x.startswith("_") - and x not in set({"metadata", "non_prim_identifying_column_name", "registry"}) - ]: - cls_name = str(obj.__getattribute__(field).__class__) - if "models.models." in cls_name: - if cls_name in visited: - continue - else: - visited.add(cls_name) - - res[field] = {} - single_convert(getattr(obj, field), res[field], visited=visited) - visited.remove(cls_name) - elif "InstrumentedList" in cls_name: - res[field] = [] - - for i, obj in enumerate(getattr(obj, field)): - - res[field].append({}) - single_convert(obj, res[field][i], visited=visited) +# DFS function used to convert alchemy objects to JSON +def alchemyConvertUtil(object, res={}, visited=set({})): + visited.add(str(object.__class__)) + for field in [ + x + for x in dir(object) + if not x.startswith("_") + and x not in set({"metadata", "non_prim_identifying_column_name", "registry"}) + ]: + + cls_name = str(object.__getattribute__(field).__class__) + + if "models.models." in cls_name: + if cls_name in visited: + continue else: - res[field] = getattr(obj, field) + visited.add(cls_name) + + res[field] = {} + alchemyConvertUtil(getattr(object, field), res[field], visited=visited) + visited.remove(cls_name) + elif "InstrumentedList" in cls_name: + res[field] = [] + + for i, obj in enumerate(getattr(object, field)): + + res[field].append({}) + alchemyConvertUtil(obj, res[field][i], visited=visited) - return res + else: + res[field] = getattr(object, field) + + return res - if type(object) == list: - res = [single_convert(obj) for obj in object] +def alchemyConverter(obj): + print("obj", obj) + if type(obj) == list: + res = [] + for ele in obj: + res.append(alchemyConvertUtil(ele)) return res else: - return single_convert(object) - + return alchemyConvertUtil(obj) # converts fiters as a dictionary to alchemy interpretable results From 70c8bc4fdcc9bf3f515bb6ef9a27265606e74b67 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Wed, 6 Jul 2022 21:17:03 -0400 Subject: [PATCH 07/40] ALL tickets API done --- .../tenant/blueprints/event_driven/ticket.py | 55 ++++++++------ servers/tenant/controllers/baseController.py | 74 ++++++------------- servers/tenant/models/__init__.py | 2 +- servers/tenant/utils.py | 11 +-- 4 files changed, 61 insertions(+), 81 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 3bf23ff..480150b 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -74,29 +74,32 @@ def ticket_post(): # create ticket # # curl http://127.0.0.1:6767/api/ticket?key=a # # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z -@ticket_bp.route("/", methods=["GET"]) -# @require_appkey -def ticket_get_all(): - filters = request.args or {} - sql_filters = dict(filters) - +def get_clean_filters_dict(immutable_args): + sql_filters = dict(immutable_args) if "start" in sql_filters: del sql_filters["start"] if "end" in sql_filters: del sql_filters["end"] if "limit" in sql_filters: del sql_filters["limit"] + return sql_filters + +def validate_date_format(date_text): + try: + return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") + except ValueError: + raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") +@ticket_bp.route("/", methods=["GET"]) +# @require_appkey +def ticket_get_all(): + filters = request.args or {} + sql_filters = get_clean_filters_dict(filters) if "limit" not in filters: limit = 5 else: limit = filters["limit"] - def validate_date_format(date_text): - try: - return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") - except ValueError: - raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") if "start" in filters: dt_start_str = filters["start"] @@ -113,9 +116,15 @@ def validate_date_format(date_text): ) else: data = ticket_controller._get_latest_event_objects(sql_filters, number_of_res=limit) - + res = alchemyConverter(data) - response = json.dumps(res, cls=AlchemyEncoder) + + print("\n\n\n\nRES POST AC ----------------------") + print(res) + response = json.dumps(res) + + print("\n\n\n\nRESULT RESPONSE ------------------" ) + print(response) return response @@ -124,18 +133,18 @@ def validate_date_format(date_text): # @require_appkey def ticket_get(ticket_id): filters = request.args.get("filters") or {} - - number_of_res = request.args.get("number_of_res") - - filters["ticketId"] = ticket_id - - - latest_ticket = ticket_controller._get_latest_event_objects( - number_of_res=number_of_res, filters=filters + + + sql_filters = get_clean_filters_dict(filters) + sql_filters["ticketId"] = ticket_id + dt_start = validate_date_format("1900-01-01T00:00:00") + dt_end = validate_date_format("2100-01-01T00:00:00") + data = ticket_controller._get_latest_event_objects_in_range( + dt_start, dt_end, filters=sql_filters ) - res = alchemyConverter(latest_ticket[0]) - response = json.dumps(res, cls=AlchemyEncoder) + res = alchemyConverter(data[0]) + response = json.dumps(res) return response diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 9b529f4..afdc573 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -131,18 +131,19 @@ def _create_base_event(self, args_dict): def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # get up to 'number_of_res' last event objects - latest_objs = ( - self.session.query(self.model) - .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - .limit(number_of_res).all() - ) - - # print(*convert_dict_to_alchemy_filters(self.model, filters)) - # latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ - # .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ + # latest_objs = ( + # self.session.query(self.model) + # .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) + # .group_by(self.model.non_prim_identifying_column_name) + # .order_by(self.model.timestamp) # .limit(number_of_res).all() + # ) + + print(*convert_dict_to_alchemy_filters(self.model, filters)) + latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ + .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ + .order_by(self.model.timestamp) \ + .limit(1).all() # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() @@ -170,8 +171,6 @@ def _get_latest_event_objects_from_start_date(self, datetime1, filters={}, numb def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, number_of_res=5): - print("\n\n\nDATETIM1", datetime1, datetime2) - assert datetime1 <= datetime2 time1 = int(time.mktime(datetime1.timetuple())) time2 = int(time.mktime(datetime2.timetuple())) @@ -181,47 +180,18 @@ def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, n session_filters.append(self.model.timestamp >= time1) session_filters.append(self.model.timestamp <= time2) - - results = ( - self.session.query(self.model) - .filter(*session_filters) - .limit(number_of_res) - .all() - ) - print("results" , results) - + + print("------------------------RUNNING TICKET GET QUERY----------------------------") + results = \ + self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ + .filter(*session_filters) \ + .order_by(self.model.non_prim_identifying_column_name, self.model.timestamp) \ + .limit(number_of_res).all() + print("----------complete-----------------") + for result in results: + print("TID " + str(result.ticketId)) return results - def _get_latest_event_objects_in_range_with_limit( - self, datetime1, datetime2, filters={}, max_number_of_results=None - ): - - assert datetime1 <= datetime2 - - time1 = int(time.mktime(datetime1.timetuple())) - time2 = int(time.mktime(datetime2.timetuple())) - - filters.append(self.model.timestamp >= time1) - filters.append(self.model.timestamp <= time2) - - if max_number_of_results is None: - latest_objs = ( - self.session.query(self.model) - .filter(*convert_dict_to_alchemy_filters(filters)) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - ).all() - - elif isinstance(max_number_of_results, int): - latest_objs = ( - self.session.query(self.model) - .filter(*filters) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - ).limit(max_number_of_results) - - return latest_objs - def _find_latest_prim_key_from_non_prim_identifying_column_val( self, non_prim_identifying_col_val ): diff --git a/servers/tenant/models/__init__.py b/servers/tenant/models/__init__.py index 09d5b44..5f179c7 100644 --- a/servers/tenant/models/__init__.py +++ b/servers/tenant/models/__init__.py @@ -14,7 +14,7 @@ Base = declarative_base() -engine = create_engine(cnx_string) +engine = create_engine(cnx_string, echo=True) print("connecting to db....") Session = sessionmaker(bind=engine) session = Session() diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index 1fc132e..bef921b 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -1,7 +1,7 @@ import os from flask import abort, request from functools import wraps - +import copy import json @@ -16,7 +16,7 @@ def default(self, obj): # DFS function used to convert alchemy objects to JSON -def alchemyConvertUtil(object, res={}, visited=set({})): +def alchemyConvertUtil(object, res, visited): visited.add(str(object.__class__)) for field in [ x @@ -50,14 +50,15 @@ def alchemyConvertUtil(object, res={}, visited=set({})): return res def alchemyConverter(obj): - print("obj", obj) if type(obj) == list: res = [] for ele in obj: - res.append(alchemyConvertUtil(ele)) + print("ALCHEMY DEBUG ---------------------------") + print("TID: " + str(ele.ticketId)) + res.append(alchemyConvertUtil(ele, {}, visited=set())) return res else: - return alchemyConvertUtil(obj) + return alchemyConvertUtil(obj, {}, visited=set()) # converts fiters as a dictionary to alchemy interpretable results From 7803197f5bb1b25e985b4f0d83c096ff666e0cab Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Wed, 6 Jul 2022 23:42:36 -0400 Subject: [PATCH 08/40] Fixing default date bug --- .../tenant/blueprints/event_driven/ticket.py | 39 ++++++++----------- servers/tenant/controllers/baseController.py | 2 +- servers/tenant/utils.py | 3 +- 3 files changed, 19 insertions(+), 25 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 480150b..4d31d64 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -1,5 +1,6 @@ import json from datetime import datetime +from wsgiref import validate from numpy import number from flask import request, jsonify, Blueprint @@ -91,31 +92,25 @@ def validate_date_format(date_text): except ValueError: raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") +def default_start(): + dt_start = validate_date_format("1900-01-01T00:00:00") + return dt_start + +def default_end(): + dt_end = validate_date_format("2100-01-01T00:00:00") + return dt_end + @ticket_bp.route("/", methods=["GET"]) # @require_appkey def ticket_get_all(): filters = request.args or {} sql_filters = get_clean_filters_dict(filters) - if "limit" not in filters: - limit = 5 - else: - limit = filters["limit"] - - if "start" in filters: - dt_start_str = filters["start"] - dt_start = validate_date_format(dt_start_str) - if "end" in filters: - dt_end_str= filters["end"] - dt_end = validate_date_format(dt_end_str) - data = ticket_controller._get_latest_event_objects_in_range( - dt_start, dt_end, filters=sql_filters, number_of_res=limit - ) - else: - data = ticket_controller._get_latest_event_objects_from_start_date( - dt_start, filters=sql_filters, number_of_res=limit - ) - else: - data = ticket_controller._get_latest_event_objects(sql_filters, number_of_res=limit) + limit = 5 if "limit" not in filters else filters["limit"] + + dt_start = validate_date_format(filters["start"]) if "start" in filters else default_start() + dt_end = validate_date_format(filters["end"]) if "end" in filters else default_end() + + data = ticket_controller._get_latest_event_objects_in_range(dt_start, dt_end, sql_filters, number_of_res=limit) res = alchemyConverter(data) @@ -137,10 +132,8 @@ def ticket_get(ticket_id): sql_filters = get_clean_filters_dict(filters) sql_filters["ticketId"] = ticket_id - dt_start = validate_date_format("1900-01-01T00:00:00") - dt_end = validate_date_format("2100-01-01T00:00:00") data = ticket_controller._get_latest_event_objects_in_range( - dt_start, dt_end, filters=sql_filters + default_start(), default_end(), filters=sql_filters ) res = alchemyConverter(data[0]) diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index afdc573..1e5da0a 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -166,7 +166,7 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # return latest_objs - def _get_latest_event_objects_from_start_date(self, datetime1, filters={}, number_of_res=5): + def _get_latest_event_objects_from_start_date(self, datetime1, filters, number_of_res=5): return self._get_latest_event_objects_in_range(datetime1, datetime.now(), filters=filters, number_of_res=5) diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index bef921b..ef7ee52 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -55,7 +55,8 @@ def alchemyConverter(obj): for ele in obj: print("ALCHEMY DEBUG ---------------------------") print("TID: " + str(ele.ticketId)) - res.append(alchemyConvertUtil(ele, {}, visited=set())) + json_res = alchemyConvertUtil(ele, {}, visited=set()) + res.append(json_res) return res else: return alchemyConvertUtil(obj, {}, visited=set()) From 03dd09929fecdca5daf92b7dcdd89419502fa299 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 16:08:52 -0400 Subject: [PATCH 09/40] Cors header --- servers/tenant/blueprints/event_driven/ticket.py | 3 +-- servers/tenant/config.py | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 4d31d64..f3f0995 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -75,7 +75,6 @@ def ticket_post(): # create ticket # # curl http://127.0.0.1:6767/api/ticket?key=a # # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z - def get_clean_filters_dict(immutable_args): sql_filters = dict(immutable_args) if "start" in sql_filters: @@ -105,7 +104,7 @@ def default_end(): def ticket_get_all(): filters = request.args or {} sql_filters = get_clean_filters_dict(filters) - limit = 5 if "limit" not in filters else filters["limit"] + limit = 5000 if "limit" not in filters else filters["limit"] dt_start = validate_date_format(filters["start"]) if "start" in filters else default_start() dt_end = validate_date_format(filters["end"]) if "end" in filters else default_end() diff --git a/servers/tenant/config.py b/servers/tenant/config.py index 6790876..d4a0ff0 100644 --- a/servers/tenant/config.py +++ b/servers/tenant/config.py @@ -8,4 +8,5 @@ app.config["DEBUG"] = True app.config["MAX_CONTENT_LENGTH"] = 16 * 1024 * 1024 app.config["CELERY_BROKER_URL"] = CELERY_BROKER_URL +app.config['CORS_HEADERS'] = 'Content-Type' client.conf.update(app.config) From ec0560d77724bb3164dbfd56093acfa59062368e Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 16:17:00 -0400 Subject: [PATCH 10/40] Cors header --- .../tenant/blueprints/event_driven/ticket.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index f3f0995..5641b9c 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -75,6 +75,11 @@ def ticket_post(): # create ticket # # curl http://127.0.0.1:6767/api/ticket?key=a # # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z +def corsify(resp): + resp.headers['Access-Control-Allow-Origin'] = '*' + resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] + return json.dumps(resp) + def get_clean_filters_dict(immutable_args): sql_filters = dict(immutable_args) if "start" in sql_filters: @@ -113,14 +118,7 @@ def ticket_get_all(): res = alchemyConverter(data) - print("\n\n\n\nRES POST AC ----------------------") - print(res) - response = json.dumps(res) - - print("\n\n\n\nRESULT RESPONSE ------------------" ) - print(response) - - return response + return corsify(res) @ticket_bp.route("/", methods=["GET"]) @@ -136,9 +134,7 @@ def ticket_get(ticket_id): ) res = alchemyConverter(data[0]) - response = json.dumps(res) - - return response + return corsify(res) """ Route expects requests of format: From dbc2ec5b56662c9da81473f35c5bcf3ebc3e8847 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 16:20:28 -0400 Subject: [PATCH 11/40] Cors header --- servers/tenant/blueprints/event_driven/ticket.py | 5 +++-- servers/tenant/config.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 5641b9c..c68de92 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -3,7 +3,7 @@ from wsgiref import validate from numpy import number -from flask import request, jsonify, Blueprint +from flask import make_response, request, jsonify, Blueprint import sys @@ -76,9 +76,10 @@ def ticket_post(): # create ticket # # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z def corsify(resp): + resp = make_response(json.dumps(resp)) resp.headers['Access-Control-Allow-Origin'] = '*' resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] - return json.dumps(resp) + return resp def get_clean_filters_dict(immutable_args): sql_filters = dict(immutable_args) diff --git a/servers/tenant/config.py b/servers/tenant/config.py index d4a0ff0..b2ed244 100644 --- a/servers/tenant/config.py +++ b/servers/tenant/config.py @@ -1,4 +1,4 @@ -from flask import Flask +from flask import Flask, make_response from celery_client import client CELERY_BROKER_URL = "redis://redis:6379/0" From 2b80291714338e8c73881be3456a49037ea7edee Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 17:27:04 -0400 Subject: [PATCH 12/40] message --- servers/tenant/blueprints/event_driven/ticket.py | 1 + 1 file changed, 1 insertion(+) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index c68de92..188949d 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -79,6 +79,7 @@ def corsify(resp): resp = make_response(json.dumps(resp)) resp.headers['Access-Control-Allow-Origin'] = '*' resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] + trd return resp def get_clean_filters_dict(immutable_args): From 90019d70367c4544ee49d046378d118b63950c1b Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 18:41:19 -0400 Subject: [PATCH 13/40] Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz --- .../tenant/blueprints/event_driven/shipper.py | 149 +----------------- .../tenant/blueprints/event_driven/ticket.py | 5 + servers/tenant/blueprints/simple/pdf.py | 43 ----- .../tenant/controllers/controllerMapper.py | 5 + servers/tenant/controllers/pdfController.py | 63 -------- servers/tenant/database/table_creation.sql | 28 +++- servers/tenant/models/models.py | 29 ++++ servers/tenant/requirements.txt | 1 + servers/tenant/server.py | 3 +- 9 files changed, 71 insertions(+), 255 deletions(-) delete mode 100644 servers/tenant/blueprints/simple/pdf.py delete mode 100644 servers/tenant/controllers/pdfController.py diff --git a/servers/tenant/blueprints/event_driven/shipper.py b/servers/tenant/blueprints/event_driven/shipper.py index 9d9c7b1..4137098 100644 --- a/servers/tenant/blueprints/event_driven/shipper.py +++ b/servers/tenant/blueprints/event_driven/shipper.py @@ -6,8 +6,8 @@ sys.path.insert(0, "..") # import parent folder -from controllers.controllerMapper import ShipperController, TicketController -from models.models import TicketEvents, ShipperEvents +from controllers.controllerMapper import PdfController +from models.models import TicketEvents from utils import ( AlchemyEncoder, require_appkey, @@ -19,147 +19,4 @@ # TODO: USER BASED AUTH - -shipper_controller = ShipperController() -ticket_controller = TicketController() - - -""" -Route expects requests of format: - -{ - "shipper" : { - "shipperEventId" : value, - "consigneeEventId" : value, - "userId" : value, - "phoneNumber" : value, - ... - } - -} - -""" - - -@shipper_bp.route("/", methods=["POST"]) -@require_appkey -def shipper_post(): # create ticket - - shipper_dict = request.args.get("shipper") - - # remove ticketId and ticketEventId if present - shipper_dict.pop(shipper_controller.primary_key, None) - shipper_dict.pop(TicketEvents.non_prim_identifying_column_name, None) - - shipper_event = shipper_controller._create_base_event(shipper_dict) - - return shipper_event - - -""" -Route expects requests of format: - -{ - "shipper_id" : "value", - "filters" : { - "field1": "value1", - "field2": "value2", - .... - }, - "number_of_res" : value, -} - -""" - - -@shipper_bp.route("/{ticket_id}", methods=["GET"]) -@require_appkey -def shipper_get(shipper_id): - filters = request.args.get("filters") - number_of_res = request.args.get("number_of_res") - - filters.extend({"shipper_id": shipper_id}) - - latest_shippers = shipper_controller._get_latest_event_objects( - number_of_res=number_of_res, filters=filters - ) - - res = alchemyConverter(latest_shippers) - response = json.dumps(res, cls=AlchemyEncoder) - - return response - - -""" -Route expects requests of format: - -{ - "ticket_id" : "value", - "filters" : { - "field1": "value1", - "field2": "value2", - .... - } -} - -""" - - -@shipper_bp.route("/{shipper_id}", methods=["GET"]) -@require_appkey -def shpiper_get_history(shipper_id): - filters = request.args.get("filters") - filters.extend({"shipper_id": shipper_id}) - - latest_ticket = shipper_controller._get_latest_event_objects( - page=1, number_of_res=20, filters=filters - ) - - res = alchemyConverter(latest_ticket) - response = json.dumps(res, cls=AlchemyEncoder) - - return response - - -""" -Route expects requests of format: - -{ - "update_dict" : { - "field1": "value1", - "field2": "value2", - ... - }, - "filters" : { - "field1": "value1", - "field2": "value2", - .... - } -} - -""" - - -@shipper_bp.route("/{shipper_id}", methods=["POST"]) -@require_appkey -def shipper_update(shipper_id): - - update_dict = request.form["update_dict"] - - # remove ticketId and ticketEventId if present - update_dict.pop(shipper_controller.primary_key, None) - update_dict.pop(TicketEvents.non_prim_identifying_column_name, None) - - filters = request.form["filters"] - filters.extend({"shipper_id": shipper_id}) - - updated_shipper = shipper_controller._modify_latest_object( - update_dict, filters=filters - ) - - ticket_controller._modify_latest_object(updated_shipper.shipper_id) - - res = alchemyConverter(updated_object) - response = json.dumps(res, cls=AlchemyEncoder) - - return response +ticket_controller = TicketController() \ No newline at end of file diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 188949d..8bea9e7 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -4,6 +4,7 @@ from numpy import number from flask import make_response, request, jsonify, Blueprint +from flask_cors import cross_origin import sys @@ -56,6 +57,7 @@ @ticket_bp.route("/", methods=["POST"]) +@cross_origin(supports_credentials=True) @require_appkey def ticket_post(): # create ticket @@ -107,6 +109,7 @@ def default_end(): return dt_end @ticket_bp.route("/", methods=["GET"]) +@cross_origin(supports_credentials=True) # @require_appkey def ticket_get_all(): filters = request.args or {} @@ -124,6 +127,7 @@ def ticket_get_all(): @ticket_bp.route("/", methods=["GET"]) +@cross_origin(supports_credentials=True) # @require_appkey def ticket_get(ticket_id): filters = request.args.get("filters") or {} @@ -192,6 +196,7 @@ def ticket_get(ticket_id): @ticket_bp.route("/", methods=["PUT"]) +@cross_origin(supports_credentials=True) @require_appkey def ticket_update(ticket_id): diff --git a/servers/tenant/blueprints/simple/pdf.py b/servers/tenant/blueprints/simple/pdf.py deleted file mode 100644 index 84361fa..0000000 --- a/servers/tenant/blueprints/simple/pdf.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -from flask import request, jsonify, Blueprint -import io -from uuid import uuid4 -import traceback -from celery_client import client, logger -from controllers.pdfController import PDFController - -pdf_bp = Blueprint("pdf_bp", __name__, url_prefix="document") - -pdfcontroller = PDFController() - - -@pdf_bp.route("", methods=["POST"]) -def pdf_post(): - if "file" not in request.files: - res = jsonify({"message": "No file part in the request"}) - res.status_code = 400 - return res - - file = request.files["file"] - - if file.filename == "": - res = jsonify({"message": "No file selected for uploading"}) - res.status_code = 400 - return res - if file and file.filename.split(".")[-1].lower() == "pdf": - pdfcontroller.process_files() - resp = jsonify({"message": "File successfully uploaded"}) - resp.status_code = 202 - return resp - else: - resp = jsonify({"message": "Allowed file types are pdf only"}) - resp.status_code = 400 - return resp - - -@pdf_bp.route("{pdf_id}", methods=["GET"]) -def pdf_get(): - res = jsonify({"message": "Please specify PDFId"}) - # TODO ... - res.status_code = 400 - return res diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index 5361867..25309c0 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -37,3 +37,8 @@ def __init__(self): class DeliveryMilestoneController(BaseTimeSeriesController): def __init__(self): super().__init__(DeliveryMilestones) + + +class UserController(DocumentController): + def __init__(self): + super().__init__(Documents) diff --git a/servers/tenant/controllers/pdfController.py b/servers/tenant/controllers/pdfController.py deleted file mode 100644 index 7dafff5..0000000 --- a/servers/tenant/controllers/pdfController.py +++ /dev/null @@ -1,63 +0,0 @@ -import os -from flask import request, jsonify, Blueprint -# from celery import group -# import PyPDF2 -import io -from uuid import uuid4 -import sys - -sys.path.insert(0, "../../") # import parent folder - -import extraction.app as ex -import traceback -from celery_client import client, logger - -FAILURE = -1 -SUCCESS = 0 -UPLOAD_FOLDER = "/opt/metadata-extraction/uploads" - - -class PDFController: - def process_files(self, file): - - tasks_to_run = self.fan_out(file) # split up tasks - - self.do_all_work(tasks_to_run) # run ocr pipeline for each task - result = tasks_to_run.apply_async() - return result - - def do_all_work(self, tasks_to_run): - result = tasks_to_run.apply_async() - return result - - def fan_out(self, file): - folder_uuid = uuid4() - with io.BytesIO(file.read()) as open_pdf_file: - read_pdf = PyPDF2.PdfFileReader(open_pdf_file) - num_pages = read_pdf.getNumPages() - folder = f"{UPLOAD_FOLDER}/{folder_uuid}" - os.mkdir(folder) - for i in range(num_pages): - output_pdf = PyPDF2.PdfFileWriter() - output_pdf.addPage(read_pdf.getPage(i)) - file_uuid = uuid4() - f_dir = f"{folder}/{file_uuid}" - os.mkdir(f_dir) - with open(f"{f_dir}/{file_uuid}.pdf", "wb") as f: - output_pdf.write(f) - file.close() - pdf_folders = os.listdir(folder) - return group( - [self.work.s(f"{folder}/{pdf_folder}") for pdf_folder in pdf_folders] - ) - - @client.task - def work(self, pdf_folder): - pdf_file = f"{pdf_folder}.pdf" - try: - doclist = ex.work(pdf_folder) - except Exception as e: - logger.info(f"file {pdf_folder}/{pdf_file} error. msg: {str(e)}") - logger.info(traceback.format_exc()) - return {"status": FAILURE, "folder": pdf_folder} - return {"status": SUCCESS, "folder": pdf_folder, "doclist": doclist} diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index 35f9586..f27a70c 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -35,13 +35,37 @@ CREATE TABLE IF NOT EXISTS Users ( PRIMARY KEY("userId") ); +CREATE TABLE IF NOT EXISTS Documents ( + "documentId" INT, + "timestamp" INT, + "userId" INT, + "customerName" INT, + "barcodeNumber" INT, + "houseReferenceNumber" INT, + "orderS3Link" VARCHAR(50), + "weight" INT, + "claimedNumberOfPieces" INT, + "BOLNumber" INT, + "specialServices" VARCHAR(256), + "specialInstructions" VARCHAR(256), + "shipperCompany" VARCHAR(256), + "shipperName" VARCHAR(256), + "shipperAddress" VARCHAR(256), + "shipperPostalCode" VARCHAR(256), + "shipperPhoneNumber" VARCHAR(256), + "consigneeCompany" VARCHAR(256), + "consigneeName" VARCHAR(256), + "consigneeAddress" VARCHAR(256), + "consigneePostalCode" VARCHAR(256), + "consigneePhoneNumber" VARCHAR(256), + "pieces" VARCHAR(256), + PRIMARY KEY("documentId") +); CREATE TABLE IF NOT EXISTS TicketEvents ( "ticketEventId" INT, "ticketId" INT, "timestamp" INT, - "shipperEventId" INT, - "consigneeEventId" INT, "userId" INT, "customerId" INT, "barcodeNumber" INT, diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index 84ee19d..22eb3af 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -61,7 +61,36 @@ class Users(Base): def __repr__(self): return f"< Users:: userId: {self.userId}>" +class Documents(Base): + __tablename__ = "documents" + documentId = Column(Integer, nullable=False) + timestamp = Column(Integer, default=int(time.time())) + userId = Column(Integer, ForeignKey(Users.userId), nullable=False) + customerId = Column(Integer, ForeignKey(Customers.customerId), nullable=False) + barcodeNumber = Column(Integer, nullable=False) + houseReferenceNumber = Column(Integer, nullable=False) + orderS3Link = Column(String, nullable=False) + weight = Column(Integer, nullable=False) + claimedNumberOfPieces = Column(Integer, nullable=False) + BOLNumber = Column(Integer, nullable=False) + specialServices = Column(String) + specialInstructions = Column(String) + # shipper + shipperCompany = Column(String, nullable=False) + shipperName = Column(String, nullable=False) + shipperAddress = Column(String, nullable=False) + shipperPostalCode = Column(String, nullable=False) + shipperPhoneNumber = Column(String, nullable=False) + # consignee + consigneeCompany = Column(String, nullable=False) + consigneeName = Column(String, nullable=False) + consigneeAddress = Column(String, nullable=False) + consigneePostalCode = Column(String, nullable=False) + consigneePhoneNumber = Column(String, nullable=False) + # pieces + pieces = Column(String, nullable=False) + class TicketEvents(Base): __tablename__ = "ticketevents" non_prim_identifying_column_name = "ticketId" diff --git a/servers/tenant/requirements.txt b/servers/tenant/requirements.txt index 317033c..970152f 100644 --- a/servers/tenant/requirements.txt +++ b/servers/tenant/requirements.txt @@ -22,6 +22,7 @@ MarkupSafe==2.0.1 multilingual-pdf2text==1.1.0 numpy==1.22.0 ocrmypdf==13.2.0 +Flask-Cors packaging==21.3 pdf2image==1.16.0 pdfminer.six==20211012 diff --git a/servers/tenant/server.py b/servers/tenant/server.py index ca38cc0..2a12e84 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -2,7 +2,8 @@ from blueprints.event_driven.ticket import ticket_bp from blueprints.simple.customers import customer_bp from blueprints.simple.users import user_bp -from servers.tenant.blueprints.simple.pdf import pdf_bp # TODO: Move this in seperate microservice +from flask_cors import cross_origin +from servers.tenant.blueprints.simple.document import pdf_bp # TODO: Move this in seperate microservice # from models.__init__ import engine, Base # from models.models import INDEXES From 419e917863ee8bcc57fe064a849329af77b6e991 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 19:04:55 -0400 Subject: [PATCH 14/40] Fix commit bugs for mergmerge --- .../tenant/blueprints/event_driven/pieces.py | 2 ++ .../tenant/blueprints/event_driven/shipper.py | 22 ------------------- .../tenant/blueprints/event_driven/ticket.py | 1 - .../tenant/controllers/controllerMapper.py | 2 +- servers/tenant/models/models.py | 8 +++---- 5 files changed, 7 insertions(+), 28 deletions(-) delete mode 100644 servers/tenant/blueprints/event_driven/shipper.py diff --git a/servers/tenant/blueprints/event_driven/pieces.py b/servers/tenant/blueprints/event_driven/pieces.py index 3be27d9..da90292 100644 --- a/servers/tenant/blueprints/event_driven/pieces.py +++ b/servers/tenant/blueprints/event_driven/pieces.py @@ -1,6 +1,7 @@ import json import datetime from flask import request, jsonify, Blueprint +from flask_cors import cross_origin import sys @@ -39,6 +40,7 @@ @pieces_bp.route("/{piece_id}", methods=["GET"]) +@cross_origin(supports_credentials=True) @require_appkey def pieces_get_history(piece_id): filters = request.args.get("filters") diff --git a/servers/tenant/blueprints/event_driven/shipper.py b/servers/tenant/blueprints/event_driven/shipper.py deleted file mode 100644 index 4137098..0000000 --- a/servers/tenant/blueprints/event_driven/shipper.py +++ /dev/null @@ -1,22 +0,0 @@ -import json -import datetime -from flask import request, jsonify, Blueprint - -import sys - -sys.path.insert(0, "..") # import parent folder - -from controllers.controllerMapper import PdfController -from models.models import TicketEvents -from utils import ( - AlchemyEncoder, - require_appkey, - alchemyConverter, -) - -shipper_bp = Blueprint("shipper_bp", __name__, url_prefix="shipper") - - -# TODO: USER BASED AUTH - -ticket_controller = TicketController() \ No newline at end of file diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 8bea9e7..2060499 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -81,7 +81,6 @@ def corsify(resp): resp = make_response(json.dumps(resp)) resp.headers['Access-Control-Allow-Origin'] = '*' resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] - trd return resp def get_clean_filters_dict(immutable_args): diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index 25309c0..c5e1cba 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -39,6 +39,6 @@ def __init__(self): super().__init__(DeliveryMilestones) -class UserController(DocumentController): +class DocumentController(BaseController): def __init__(self): super().__init__(Documents) diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index 22eb3af..9f28aa0 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -63,10 +63,8 @@ def __repr__(self): class Documents(Base): __tablename__ = "documents" - documentId = Column(Integer, nullable=False) + documentId = Column(Integer, primary_key=True, nullable=False) timestamp = Column(Integer, default=int(time.time())) - userId = Column(Integer, ForeignKey(Users.userId), nullable=False) - customerId = Column(Integer, ForeignKey(Customers.customerId), nullable=False) barcodeNumber = Column(Integer, nullable=False) houseReferenceNumber = Column(Integer, nullable=False) orderS3Link = Column(String, nullable=False) @@ -89,8 +87,10 @@ class Documents(Base): consigneePhoneNumber = Column(String, nullable=False) # pieces pieces = Column(String, nullable=False) + customerName = Column(String, nullable=False) + + - class TicketEvents(Base): __tablename__ = "ticketevents" non_prim_identifying_column_name = "ticketId" From 32dee55d98864ba43414c8757ab4abe2e4881f66 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Fri, 8 Jul 2022 04:30:04 -0400 Subject: [PATCH 15/40] Fixed celery pipeline --- extraction/app.py | 37 -- extraction/const.py | 45 -- extraction/extract.py | 351 ------------ extraction/ocr.py | 22 - extraction/requirements.txt | 51 -- servers/tenant/Pipfile | 22 - servers/tenant/Pipfile.lock | 501 ------------------ servers/tenant/app.Dockerfile | 8 - .../tenant/blueprints/event_driven/pieces.py | 56 -- servers/tenant/celery.Dockerfile | 8 - servers/tenant/celery_client.py | 48 ++ servers/tenant/docker-compose.yml | 52 -- servers/tenant/kill-cluster.sh | 1 - servers/tenant/models/__init__.py | 2 +- servers/tenant/requirements.txt | 60 --- servers/tenant/server.py | 23 +- servers/tenant/start-cluster.sh | 2 - 17 files changed, 53 insertions(+), 1236 deletions(-) delete mode 100644 extraction/app.py delete mode 100644 extraction/const.py delete mode 100644 extraction/extract.py delete mode 100644 extraction/ocr.py delete mode 100644 extraction/requirements.txt delete mode 100644 servers/tenant/Pipfile delete mode 100644 servers/tenant/Pipfile.lock delete mode 100644 servers/tenant/app.Dockerfile delete mode 100644 servers/tenant/blueprints/event_driven/pieces.py delete mode 100644 servers/tenant/celery.Dockerfile delete mode 100644 servers/tenant/docker-compose.yml delete mode 100644 servers/tenant/kill-cluster.sh delete mode 100644 servers/tenant/requirements.txt delete mode 100755 servers/tenant/start-cluster.sh diff --git a/extraction/app.py b/extraction/app.py deleted file mode 100644 index 4501c6b..0000000 --- a/extraction/app.py +++ /dev/null @@ -1,37 +0,0 @@ -import os -# from multilingual_pdf2text.pdf2text import PDF2Text -# from multilingual_pdf2text.models.document_model.document import Document -# import pdfplumber -# import extraction.extract as e -import json - - -def read_pdfplumber(file_name): - with pdfplumber.open(file_name) as pdf: - page = pdf.pages[0] - page = page.extract_text() - return page - - -def work(folder_path): - pdf_uuid = folder_path.split("/")[-1] - pdf_file = f"{folder_path}/{pdf_uuid}.pdf" - print(f"Working on {pdf_file}...") - pdf_document = Document( - document_path=pdf_file, - language='eng' - ) - pdf2text = PDF2Text(document=pdf_document) - content = pdf2text.extract() - - ml_page_text = list(content)[0]["text"] - pp_text = read_pdfplumber(pdf_file) - - extract_json = e.extract(ml_page_text, plumber_page=pp_text) - - with open(f"{folder_path}/{pdf_uuid}.json", "w") as f: - json.dump(extract_json, f, indent=2) - return extract_json - -if __name__ == '__main__': - work("uploads/bf0c396f-dcc6-4d3f-8d7c-9180d2f0a322/cedc5b27-2a94-4e17-ac48-65c13e065102") \ No newline at end of file diff --git a/extraction/const.py b/extraction/const.py deleted file mode 100644 index d477c86..0000000 --- a/extraction/const.py +++ /dev/null @@ -1,45 +0,0 @@ -CEVA = "CEVA" -NORTH_AMERICAN = "NORTH_AMERICAN" - -CEVA_NUM = "1-888-327-8247" - - -#doclist_keys - -HOUSE_REF = "house_ref" -BARCODE = "barcode" -FIRST_PARTY = "first_party" -NUM_PCS = "num_pcs" -PCS = "pcs" -WEIGHT = "weight" - -PKG = "pkg" -WT_LBS = "wt(lbs)" -COMMODITY_DESCRIPTION = "commodity_description" -DIMS_IN = "dims(in)" - -BOL_NUM = "bol_num" -SPECIAL_SERVICES = "special_services" -SPECIAL_INSTRUCTIONS = "special_instructions" - -COMPANY = "company" -NAME = "name" -ADDRESS = "address" -POSTAL_CODE = "postal_code" -PHONE_NUMBER = "phone_number" - -CONSIGNEE = "consignee" -SHIPPER = "shipper" - -CEVA_SHIPPER_FIELDS = [COMPANY, ADDRESS] -CEVA_CONSIGNEE_FIELDS = [NAME, ADDRESS] - -NORTH_AMERICAN_SHIPPER_FIELDS = [COMPANY, NAME, COMPANY, ADDRESS] -NORTH_AMERICAN_CONSIGNEE_FIELDS = [COMPANY, NAME, COMPANY, ADDRESS] -BARCODE_REGEX = "([A-Z][A-Z]\d{3}-\d{7})" -PCS_REGEX = "(\d+) +PCS" -LBS_REGEX = "(\d+) +[Ll]bs" -POSTAL_CODE_REGEX_BOTH = "[ABCEGHJ-NPRSTVXY][\dO][ABCEGHJ-NPRSTV-Z][ -]?[\dO][ABCEGHJ-NPRSTV-Z][\dO]$" -PHONE_NUMBER_REGEX = "((\+\d{1,2}\s)?\(?(905|807|705|647|613|519|416|343|289|226)\)?[\s.-]?\d{3}[\s.-]?\d{4})" - -PHONE_COLON_REGEX = "ne: (\d{10})" \ No newline at end of file diff --git a/extraction/extract.py b/extraction/extract.py deleted file mode 100644 index 491ffbd..0000000 --- a/extraction/extract.py +++ /dev/null @@ -1,351 +0,0 @@ -import os -import ocrmypdf -import time -import re -from extraction.const import * -import pdfplumber - -def ocr(file_path, save_path): - ocrmypdf.ocr(file_path, save_path) - -def read_pdf(file_name, page_num): - file_path = os.path.join("../text", file_name.split(".")[0], f"{page_num}.txt") - with open(file_path, "r") as f: - return f.read() - -def read_pdfplumber(file_name, page_num): - with pdfplumber.open("../data/NORTH_AMERICAN.pdf") as pdf: - page = pdf.pages[page_num-1] - page = page.extract_text() - return page - - -""" -CEVA - 1st party - Consignee info - Name - Addr - Postal Code - Phone Number - House/ref - Barcode - Lbs - # PCs - Shipper - Special Instructions -""" -def extract_ceva(page): - lines = page.splitlines() - ceva_list = {FIRST_PARTY: CEVA} - # barcode - matches = re.findall(BARCODE_REGEX, page) - if matches: - insert_in_dict(ceva_list, BARCODE, matches[0]) - # NUM PCS - matches = re.findall(PCS_REGEX, page) - if matches: - insert_in_dict(ceva_list, NUM_PCS, matches[0]) - # weight - matches = re.findall(LBS_REGEX, page) - if matches: - insert_in_dict(ceva_list, WEIGHT, f"{matches[0]} lbs") - # phone number - matches = re.findall(PHONE_NUMBER_REGEX, page) - consignee_phone_number = matches[0][0] if matches else "" - - for line_num, line in enumerate(lines): - # house ref # - if "house" in line.lower() or "ref #" in line.lower(): - insert_in_dict(ceva_list, HOUSE_REF, line.split(" ")[-1]) - # shipper - if line.lower().startswith('shipper') or line.lower().endswith('expéditeur'): - shipper = extract_info_ceva(lines, line_num) - insert_in_dict(ceva_list, SHIPPER, shipper) - - #consignee - if is_consignee(line): - consignee = extract_info_ceva(lines, line_num, is_shipper=False) - insert_in_dict(consignee, PHONE_NUMBER, consignee_phone_number) - insert_in_dict(ceva_list, CONSIGNEE, consignee) - if "instructions" in line.lower(): - special_instructions = extract_special(lines, line_num, ["reference"]) - insert_in_dict(ceva_list, SPECIAL_INSTRUCTIONS, special_instructions) - - return ceva_list - - -def is_consignee(line): - return line.lower().startswith('consignee') or line.lower().endswith('consignataire') - - -def extract_info_ceva(lines, starting_num, is_shipper=True): - - field_index = 0 - curr_field_entry = "" - shipper_dict = {} - FIELDS = CEVA_SHIPPER_FIELDS if is_shipper else CEVA_CONSIGNEE_FIELDS - for index in range(starting_num+1, len(lines)): - if not lines[index]: - continue - # name or company - if field_index == 0: - if starts_with_number(lines[index]): - field_index += 1 - shipper_dict[FIELDS[field_index-1]] = curr_field_entry.rstrip() - curr_field_entry = "" - else: - curr_field_entry += lines[index] + " " - - if FIELDS[field_index] == ADDRESS: - curr_field_entry += lines[index] + " " - if is_consignee(lines[index]) or re.findall(POSTAL_CODE_REGEX_BOTH, lines[index]): - shipper_dict[ADDRESS] = curr_field_entry.rstrip() - break - - for field in FIELDS: - if field not in shipper_dict: - shipper_dict[field] = "" - - postal_code = extract_postal_code(shipper_dict[ADDRESS]) - insert_in_dict(shipper_dict, POSTAL_CODE, postal_code) - - return shipper_dict - - -def starts_with_number(line): - return line.split(" ")[0].isnumeric() - - -def extract_special(lines, starting_num, keywords): - entry = "" - outer_break = False - for index in range(starting_num+1, len(lines)): - if not lines[index]: - continue - for keyword in keywords: - if keyword in lines[index].lower(): - outer_break = True - break - if outer_break: - break - entry += lines[index] + " " - - return entry.rstrip() - - -""" - -North American - 1st party - BOL # - Consignee information - # PCS - DIMS - Special Services - -""" - -def extract_north_american(page, page_2): - lines = page.splitlines() - north_american_list = {FIRST_PARTY: NORTH_AMERICAN} - - # phone number - matches = re.findall(PHONE_COLON_REGEX, page) - shipper_phone_number = matches[0] if matches else "" - consignee_phone_number = matches[1] if len(matches) > 1 else "" - - for line_num, line in enumerate(lines): - # ref # - if "ref#" in line.lower(): - ref_num = line.split(" ")[-1] - if "ref" not in ref_num.lower(): - insert_in_dict(north_american_list, HOUSE_REF, line.split(":")[-1].strip()) - # BOL # - if "bol" in line.lower(): - bol_num = line.split(" ")[-1] - if "bol" not in bol_num.lower(): - insert_in_dict(north_american_list, BOL_NUM, line.split(" ")[-1]) - # shipper - if "shipper" in line.lower(): - shipper = extract_info_north_american(lines, line_num) - insert_in_dict(shipper, PHONE_NUMBER, shipper_phone_number) - insert_in_dict(north_american_list, SHIPPER, shipper) - # consignee - if "consignee" in line.lower(): - consignee = extract_info_north_american(lines, line_num, is_shipper=False) - insert_in_dict(consignee, PHONE_NUMBER, consignee_phone_number) - insert_in_dict(north_american_list, CONSIGNEE, consignee) - #special services - if "services" in line.lower(): - special_services = extract_special(lines, line_num, ["question", "issue", "905-277-2000"]) - insert_in_dict(north_american_list, SPECIAL_SERVICES, special_services) - - lines = page_2.splitlines() - for line_num, line in enumerate(lines): - if "pkg" in line.lower() or "wt(lbs)" in line.lower(): - pcs = extract_pcs(lines, line_num) - insert_in_dict(north_american_list, PCS, pcs) - - - return north_american_list - - -def extract_pcs(lines, starting_num): - pcs = [] - num_pcs = 0 - weight = 0 - for index in range(starting_num+1, len(lines)): - if len(lines[index]) < 13: - _num_pcs, _weight = [float(x) for x in lines[index].split(" ")] - assert _num_pcs == num_pcs and _weight == weight - break - second_space = lines[index].find(" ", lines[index].find(" ") + 1) - dim_nums = [re.findall("\d+\.\d+", x)[0] for x in lines[index].split(" ")[-3:]] - pkg, wt = lines[index].split(" ")[:2] - num_pcs += 1 - weight += float(wt) - commodity_description = lines[index][second_space:].split(dim_nums[0])[0].lstrip().rstrip() - dims = ' x '.join(dim_nums) - pcs.append({PKG: pkg, WT_LBS: wt, COMMODITY_DESCRIPTION: commodity_description, DIMS_IN: dims}) - - return pcs - -def extract_info_north_american(lines, starting_num, is_shipper=True): - field_index = 0 - curr_field_entry = "" - shipper_dict = {} - FIELDS = NORTH_AMERICAN_SHIPPER_FIELDS if is_shipper else NORTH_AMERICAN_CONSIGNEE_FIELDS - company = False - company_1 = "" - name = "" - company_2 = "" - address = "" - for index in range(starting_num+1, len(lines)): - if not lines[index]: - continue - if field_index == 0: - if "contact" in lines[index].lower(): - company = True - name = lines[index].split(": ")[-1] - company_1 = curr_field_entry.rstrip() - curr_field_entry = "" - field_index += 2 - continue - curr_field_entry += lines[index] + " " - if company: - if starts_with_number(lines[index]): - company = False - field_index += 1 - company_2 = curr_field_entry.rstrip() - curr_field_entry = "" - else: - curr_field_entry += lines[index] + " " - - - if FIELDS[field_index] == ADDRESS: - curr_field_entry += lines[index] + " " - if is_consignee(lines[index]) or re.findall(POSTAL_CODE_REGEX_BOTH, lines[index]): - address = curr_field_entry.rstrip() - break - - if is_shipper: - shipper_dict[COMPANY] = company_2 - shipper_dict[ADDRESS] = address - else: - shipper_dict[COMPANY] = company_1 - shipper_dict[NAME] = name - shipper_dict[ADDRESS] = (company_2 + ", " if company_2 else "") + address - - for field in FIELDS: - if field not in shipper_dict: - shipper_dict[field] = "" - - postal_code = extract_postal_code(shipper_dict[ADDRESS]) - insert_in_dict(shipper_dict, POSTAL_CODE, postal_code) - return shipper_dict - -def generate_doclist(_list): - return { - FIRST_PARTY: _list[FIRST_PARTY] if FIRST_PARTY in _list else "", - HOUSE_REF: _list[HOUSE_REF] if HOUSE_REF in _list else "", - BARCODE: _list[BARCODE] if BARCODE in _list else "", - PCS: _list[PCS] if PCS in _list else [], - NUM_PCS: _list[NUM_PCS] if NUM_PCS in _list else "", - WEIGHT: _list[WEIGHT] if WEIGHT in _list else "", - BOL_NUM: _list[BOL_NUM] if BOL_NUM in _list else "", - SPECIAL_SERVICES: _list[SPECIAL_SERVICES] if SPECIAL_SERVICES in _list else "", - SPECIAL_INSTRUCTIONS: _list[SPECIAL_INSTRUCTIONS] if SPECIAL_INSTRUCTIONS in _list else "", - CONSIGNEE: { - COMPANY: _list[CONSIGNEE][COMPANY] if CONSIGNEE in _list and COMPANY in _list[CONSIGNEE] else "", - NAME: _list[CONSIGNEE][NAME] if CONSIGNEE in _list and NAME in _list[CONSIGNEE] else "", - ADDRESS: _list[CONSIGNEE][ADDRESS] if CONSIGNEE in _list and ADDRESS in _list[CONSIGNEE] else "", - POSTAL_CODE: _list[CONSIGNEE][POSTAL_CODE] if CONSIGNEE in _list and POSTAL_CODE in _list[CONSIGNEE] else "", - PHONE_NUMBER: _list[CONSIGNEE][PHONE_NUMBER] if CONSIGNEE in _list and PHONE_NUMBER in _list[CONSIGNEE] else "" - }, - SHIPPER: { - COMPANY: _list[SHIPPER][COMPANY] if SHIPPER in _list and COMPANY in _list[SHIPPER] else "", - NAME: _list[SHIPPER][NAME] if SHIPPER in _list and NAME in _list[SHIPPER] else "", - ADDRESS: _list[SHIPPER][ADDRESS] if SHIPPER in _list and ADDRESS in _list[SHIPPER] else "", - POSTAL_CODE: _list[SHIPPER][POSTAL_CODE] if SHIPPER in _list and POSTAL_CODE in _list[SHIPPER] else "", - PHONE_NUMBER: _list[SHIPPER][PHONE_NUMBER] if SHIPPER in _list and PHONE_NUMBER in _list[SHIPPER] else "" - } - } - - -def extract(page, plumber_page=None): - second_party = predict_second_party(page) - - if second_party == CEVA: - return extract_ceva(page) - elif second_party == NORTH_AMERICAN: - return extract_north_american(page, plumber_page) - - return {} - -def predict_second_party(page): - - if CEVA.lower() in page.lower() or CEVA_NUM in page: - return CEVA - - return NORTH_AMERICAN - - -def insert_in_dict(_dict, key, value): - if not key in _dict: - _dict[key] = value - - -def extract_postal_code(address): - matches = re.findall(f"({POSTAL_CODE_REGEX_BOTH})", address) - if not matches: - return "" - postal_code = matches[0] - - # correct Os to 0s - for i in [-3, -1, 1]: - if postal_code[i] == "O": - postal_code = list(postal_code) - postal_code[i] = "0" - postal_code = ''.join(postal_code) - return postal_code - - -if __name__ == "__main__": - start = time.time() - ceva = read_pdf("CEVA-ocr.pdf", 1) - ceva_list = extract(ceva) - ceva_doclist = generate_doclist(ceva_list) - print(ceva_doclist) - - print() - - north_american_1 = read_pdf("NORTH_AMERICAN.pdf", 1) - north_american_2 = read_pdfplumber("NORTH_AMERICAN.pdf", 1) - north_american_list = extract(north_american_1, plumber_page=north_american_2) - north_american_doclist = generate_doclist(north_american_list) - print(north_american_doclist) - - print(time.time()-start) - - diff --git a/extraction/ocr.py b/extraction/ocr.py deleted file mode 100644 index 5ea34c1..0000000 --- a/extraction/ocr.py +++ /dev/null @@ -1,22 +0,0 @@ - - -from multilingual_pdf2text.pdf2text import PDF2Text -from multilingual_pdf2text.models.document_model.document import Document -import logging -import time - -if __name__ == "__main__": - stat = time.time() - pdf_document = Document( - document_path="../data/NORTH_AMERICAN.pdf", - language='eng' - ) - pdf2text = PDF2Text(document=pdf_document) - content = pdf2text.extract() - print(time.time()-stat) - - for page in content: - with open(f"text/NORTH_AMERICAN/{page['page_number']}.txt", "w") as f: - f.write(page["text"]) - - diff --git a/extraction/requirements.txt b/extraction/requirements.txt deleted file mode 100644 index 520194d..0000000 --- a/extraction/requirements.txt +++ /dev/null @@ -1,51 +0,0 @@ -amqp==5.0.9 -billiard==3.6.4.0 -celery==5.2.3 -cffi==1.15.0 -chardet==4.0.0 -click==8.0.3 -click-didyoumean==0.3.0 -click-plugins==1.1.1 -click-repl==0.2.0 -coloredlogs==15.0.1 -cryptography==36.0.1 -Deprecated==1.2.13 -Flask==2.0.2 -humanfriendly==10.0 -img2pdf==0.4.3 -importlib-resources==5.4.0 -itsdangerous==2.0.1 -Jinja2==3.0.3 -kombu==5.2.3 -lxml==4.7.1 -MarkupSafe==2.0.1 -multilingual-pdf2text==1.1.0 -numpy==1.22.0 -ocrmypdf==13.2.0 -opencv-python==4.5.5.62 -packaging==21.3 -pdf2image==1.16.0 -pdfminer.six==20211012 -pdfplumber==0.6.0 -pdftotext==2.2.2 -pikepdf==4.3.1 -Pillow==9.0.0 -pluggy==1.0.0 -prompt-toolkit==3.0.24 -pycparser==2.21 -pydantic==1.9.0 -pyparsing==3.0.6 -PyPDF2==1.26.0 -pytesseract==0.3.8 -pytz==2021.3 -redis==4.1.2 -reportlab==3.6.5 -six==1.16.0 -tqdm==4.62.3 -typing-extensions==4.0.1 -vine==5.0.0 -Wand==0.6.7 -wcwidth==0.2.5 -Werkzeug==2.0.2 -wrapt==1.13.3 -zipp==3.7.0 diff --git a/servers/tenant/Pipfile b/servers/tenant/Pipfile deleted file mode 100644 index db9acab..0000000 --- a/servers/tenant/Pipfile +++ /dev/null @@ -1,22 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[packages] -psycopg2-binary = "*" -celery = "==5.2.3" -flask-restplus = "==0.13.0" -flask-marshmallow = "==0.14.0" -marshmallow-sqlalchemy = "==0.24.1" -marshmallow = "==3.9.1" -python-dotenv = "==0.20.0" -SQLAlchemy = "*" -Flask = "==2.0.2" -Faker = "==13.7.0" -Flask-SQLAlchemy = "==2.4.4" - -[dev-packages] - -[requires] -python_version = "3.8" diff --git a/servers/tenant/Pipfile.lock b/servers/tenant/Pipfile.lock deleted file mode 100644 index a24e0f8..0000000 --- a/servers/tenant/Pipfile.lock +++ /dev/null @@ -1,501 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "6dff868c3d4497e3f717b753c5792b94a2f7cc606ce84194f3bb2bfd0fe58121" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.8" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "amqp": { - "hashes": [ - "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2", - "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359" - ], - "markers": "python_version >= '3.6'", - "version": "==5.1.1" - }, - "aniso8601": { - "hashes": [ - "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f", - "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973" - ], - "version": "==9.0.1" - }, - "attrs": { - "hashes": [ - "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", - "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==21.4.0" - }, - "billiard": { - "hashes": [ - "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547", - "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b" - ], - "version": "==3.6.4.0" - }, - "celery": { - "hashes": [ - "sha256:8aacd02fc23a02760686d63dde1eb0daa9f594e735e73ea8fb15c2ff15cb608c", - "sha256:e2cd41667ad97d4f6a2f4672d1c6a6ebada194c619253058b5f23704aaadaa82" - ], - "index": "pypi", - "version": "==5.2.3" - }, - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.3" - }, - "click-didyoumean": { - "hashes": [ - "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667", - "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035" - ], - "markers": "python_full_version >= '3.6.2' and python_full_version < '4.0.0'", - "version": "==0.3.0" - }, - "click-plugins": { - "hashes": [ - "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b", - "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8" - ], - "version": "==1.1.1" - }, - "click-repl": { - "hashes": [ - "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b", - "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8" - ], - "version": "==0.2.0" - }, - "faker": { - "hashes": [ - "sha256:0301ace8365d98f3d0bf6e9a40200c8548e845d3812402ae1daf589effe3fb01", - "sha256:b1903db92175d78051858128ada397c7dc76f376f6967975419da232b3ebd429" - ], - "index": "pypi", - "version": "==13.7.0" - }, - "flask": { - "hashes": [ - "sha256:7b2fb8e934ddd50731893bdcdb00fc8c0315916f9fcd50d22c7cc1a95ab634e2", - "sha256:cb90f62f1d8e4dc4621f52106613488b5ba826b2e1e10a33eac92f723093ab6a" - ], - "index": "pypi", - "version": "==2.0.2" - }, - "flask-marshmallow": { - "hashes": [ - "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754", - "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950" - ], - "index": "pypi", - "version": "==0.14.0" - }, - "flask-restplus": { - "hashes": [ - "sha256:a15d251923a8feb09a5d805c2f4d188555910a42c64d58f7dd281b8cac095f1b", - "sha256:a66e442d0bca08f389fc3d07b4d808fc89961285d12fb8013f7cf15516fa9f5c" - ], - "index": "pypi", - "version": "==0.13.0" - }, - "flask-sqlalchemy": { - "hashes": [ - "sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e", - "sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5" - ], - "index": "pypi", - "version": "==2.4.4" - }, - "greenlet": { - "hashes": [ - "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3", - "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711", - "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd", - "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073", - "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708", - "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67", - "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23", - "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1", - "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08", - "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd", - "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2", - "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa", - "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8", - "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40", - "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab", - "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6", - "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc", - "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b", - "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e", - "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963", - "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3", - "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d", - "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d", - "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe", - "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28", - "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3", - "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e", - "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c", - "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d", - "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0", - "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497", - "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee", - "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713", - "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58", - "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a", - "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06", - "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88", - "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965", - "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f", - "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4", - "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5", - "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c", - "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a", - "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1", - "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43", - "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627", - "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b", - "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168", - "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d", - "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5", - "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478", - "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf", - "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce", - "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c", - "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b" - ], - "markers": "python_version >= '3' and (platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32'))))))", - "version": "==1.1.2" - }, - "importlib-resources": { - "hashes": [ - "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3", - "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8" - ], - "markers": "python_version < '3.9'", - "version": "==5.7.1" - }, - "itsdangerous": { - "hashes": [ - "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44", - "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.1.2" - }, - "jinja2": { - "hashes": [ - "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", - "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" - ], - "markers": "python_version >= '3.7'", - "version": "==3.1.2" - }, - "jsonschema": { - "hashes": [ - "sha256:71b5e39324422543546572954ce71c67728922c104902cb7ce252e522235b33f", - "sha256:7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc" - ], - "markers": "python_version >= '3.7'", - "version": "==4.5.1" - }, - "kombu": { - "hashes": [ - "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610", - "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4" - ], - "markers": "python_version >= '3.7'", - "version": "==5.2.4" - }, - "markupsafe": { - "hashes": [ - "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003", - "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88", - "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5", - "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7", - "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a", - "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603", - "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1", - "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135", - "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247", - "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6", - "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601", - "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77", - "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02", - "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e", - "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63", - "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f", - "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980", - "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b", - "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812", - "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff", - "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96", - "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1", - "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925", - "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a", - "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6", - "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e", - "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f", - "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4", - "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f", - "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3", - "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c", - "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a", - "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417", - "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a", - "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a", - "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37", - "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452", - "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933", - "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a", - "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7" - ], - "markers": "python_version >= '3.7'", - "version": "==2.1.1" - }, - "marshmallow": { - "hashes": [ - "sha256:73facc37462dfc0b27f571bdaffbef7709e19f7a616beb3802ea425b07843f4e", - "sha256:e26763201474b588d144dae9a32bdd945cd26a06c943bc746a6882e850475378" - ], - "index": "pypi", - "version": "==3.9.1" - }, - "marshmallow-sqlalchemy": { - "hashes": [ - "sha256:93f47b880ac7070f7b34c8ac0a71eeec3f8582a22e5c0330c1c436e3f5f99a37", - "sha256:d051cf013c075c43e1ee5c4b01f8fab6dd6b140dab6825be45875f674a0d289c" - ], - "index": "pypi", - "version": "==0.24.1" - }, - "prompt-toolkit": { - "hashes": [ - "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752", - "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7" - ], - "markers": "python_full_version >= '3.6.2'", - "version": "==3.0.29" - }, - "psycopg2-binary": { - "hashes": [ - "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7", - "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76", - "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa", - "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9", - "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004", - "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1", - "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094", - "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57", - "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af", - "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554", - "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232", - "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c", - "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b", - "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834", - "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2", - "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71", - "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460", - "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e", - "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4", - "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d", - "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d", - "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9", - "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f", - "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063", - "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478", - "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092", - "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c", - "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce", - "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1", - "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65", - "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e", - "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4", - "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029", - "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33", - "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39", - "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53", - "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307", - "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42", - "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35", - "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8", - "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb", - "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae", - "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e", - "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f", - "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba", - "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24", - "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca", - "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb", - "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef", - "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42", - "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1", - "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667", - "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272", - "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281", - "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e", - "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd" - ], - "index": "pypi", - "version": "==2.9.3" - }, - "pyrsistent": { - "hashes": [ - "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c", - "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc", - "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e", - "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26", - "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec", - "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286", - "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045", - "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec", - "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8", - "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c", - "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca", - "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22", - "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a", - "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96", - "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc", - "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1", - "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07", - "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6", - "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b", - "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5", - "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6" - ], - "markers": "python_version >= '3.7'", - "version": "==0.18.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==2.8.2" - }, - "python-dotenv": { - "hashes": [ - "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f", - "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938" - ], - "index": "pypi", - "version": "==0.20.0" - }, - "pytz": { - "hashes": [ - "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7", - "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c" - ], - "version": "==2022.1" - }, - "setuptools": { - "hashes": [ - "sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373", - "sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e" - ], - "markers": "python_version >= '3.6'", - "version": "==59.6.0" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==1.16.0" - }, - "sqlalchemy": { - "hashes": [ - "sha256:09c606d8238feae2f360b8742ffbe67741937eb0a05b57f536948d198a3def96", - "sha256:166a3887ec355f7d2f12738f7fa25dc8ac541867147a255f790f2f41f614cb44", - "sha256:16abf35af37a3d5af92725fc9ec507dd9e9183d261c2069b6606d60981ed1c6e", - "sha256:2e885548da361aa3f8a9433db4cfb335b2107e533bf314359ae3952821d84b3e", - "sha256:2ec89bf98cc6a0f5d1e28e3ad28e9be6f3b4bdbd521a4053c7ae8d5e1289a8a1", - "sha256:2ecac4db8c1aa4a269f5829df7e706639a24b780d2ac46b3e485cbbd27ec0028", - "sha256:316c7e5304dda3e3ad711569ac5d02698bbc71299b168ac56a7076b86259f7ea", - "sha256:5041474dcab7973baa91ec1f3112049a9dd4652898d6a95a6a895ff5c58beb6b", - "sha256:53d2d9ee93970c969bc4e3c78b1277d7129554642f6ffea039c282c7dc4577bc", - "sha256:5864a83bd345871ad9699ce466388f836db7572003d67d9392a71998092210e3", - "sha256:5c90ef955d429966d84326d772eb34333178737ebb669845f1d529eb00c75e72", - "sha256:5d50cb71c1dbed70646d521a0975fb0f92b7c3f84c61fa59e07be23a1aaeecfc", - "sha256:64678ac321d64a45901ef2e24725ec5e783f1f4a588305e196431447e7ace243", - "sha256:64d796e9af522162f7f2bf7a3c5531a0a550764c426782797bbeed809d0646c5", - "sha256:6cb4c4f57a20710cea277edf720d249d514e587f796b75785ad2c25e1c0fed26", - "sha256:6e1fe00ee85c768807f2a139b83469c1e52a9ffd58a6eb51aa7aeb524325ab18", - "sha256:6e859fa96605027bd50d8e966db1c4e1b03e7b3267abbc4b89ae658c99393c58", - "sha256:7a052bd9f53004f8993c624c452dfad8ec600f572dd0ed0445fbe64b22f5570e", - "sha256:81e53bd383c2c33de9d578bfcc243f559bd3801a0e57f2bcc9a943c790662e0c", - "sha256:83cf3077712be9f65c9aaa0b5bc47bc1a44789fd45053e2e3ecd59ff17c63fe9", - "sha256:8b20c4178ead9bc398be479428568ff31b6c296eb22e75776273781a6551973f", - "sha256:8d07fe2de0325d06e7e73281e9a9b5e259fbd7cbfbe398a0433cbb0082ad8fa7", - "sha256:a0ae3aa2e86a4613f2d4c49eb7da23da536e6ce80b2bfd60bbb2f55fc02b0b32", - "sha256:af2587ae11400157753115612d6c6ad255143efba791406ad8a0cbcccf2edcb3", - "sha256:b3db741beaa983d4cbf9087558620e7787106319f7e63a066990a70657dd6b35", - "sha256:be094460930087e50fd08297db9d7aadaed8408ad896baf758e9190c335632da", - "sha256:cb441ca461bf97d00877b607f132772644b623518b39ced54da433215adce691", - "sha256:ce20f5da141f8af26c123ebaa1b7771835ca6c161225ce728962a79054f528c3", - "sha256:d57ac32f8dc731fddeb6f5d1358b4ca5456e72594e664769f0a9163f13df2a31", - "sha256:dce3468bf1fc12374a1a732c9efd146ce034f91bb0482b602a9311cb6166a920", - "sha256:e12532c4d3f614678623da5d852f038ace1f01869b89f003ed6fe8c793f0c6a3", - "sha256:e74ce103b81c375c3853b436297952ef8d7863d801dcffb6728d01544e5191b5", - "sha256:f0394a3acfb8925db178f7728adb38c027ed7e303665b225906bfa8099dc1ce8", - "sha256:f522214f6749bc073262529c056f7dfd660f3b5ec4180c5354d985eb7219801e", - "sha256:fbf8c09fe9728168f8cc1b40c239eab10baf9c422c18be7f53213d70434dea43", - "sha256:fca8322e04b2dde722fcb0558682740eebd3bd239bea7a0d0febbc190e99dc15" - ], - "index": "pypi", - "version": "==1.4.36" - }, - "vine": { - "hashes": [ - "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30", - "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e" - ], - "markers": "python_version >= '3.6'", - "version": "==5.0.0" - }, - "wcwidth": { - "hashes": [ - "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", - "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" - ], - "version": "==0.2.5" - }, - "werkzeug": { - "hashes": [ - "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6", - "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255" - ], - "markers": "python_version >= '3.7'", - "version": "==2.1.2" - }, - "zipp": { - "hashes": [ - "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad", - "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099" - ], - "markers": "python_version < '3.10'", - "version": "==3.8.0" - } - }, - "develop": {} -} diff --git a/servers/tenant/app.Dockerfile b/servers/tenant/app.Dockerfile deleted file mode 100644 index 0427bd6..0000000 --- a/servers/tenant/app.Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM python:3.9 -EXPOSE 5000 -RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install -y build-essential libpoppler-cpp-dev pkg-config python-dev -COPY requirements.txt . -RUN pip3 install --upgrade pip -RUN pip3 install -r requirements.txt -WORKDIR /opt/metadata-extraction -ENV PYTHONPATH . \ No newline at end of file diff --git a/servers/tenant/blueprints/event_driven/pieces.py b/servers/tenant/blueprints/event_driven/pieces.py deleted file mode 100644 index da90292..0000000 --- a/servers/tenant/blueprints/event_driven/pieces.py +++ /dev/null @@ -1,56 +0,0 @@ -import json -import datetime -from flask import request, jsonify, Blueprint -from flask_cors import cross_origin - -import sys - -sys.path.insert(0, "..") # import parent folder - -from controllers.controllerMapper import PieceController -from models.models import TicketEvents, PieceEvents -from utils import ( - AlchemyEncoder, - require_appkey, - alchemyConverter, -) - -pieces_bp = Blueprint("pieces_bp", __name__, url_prefix="piece") - - -# TODO: USER BASED AUTH - - -pieces_controller = PieceController() - - -""" -Route expects requests of format: - -{ - "piece_id" : "value", - "filters" : { - "field1": "value1", - "field2": "value2", - .... - } -} - -""" - - -@pieces_bp.route("/{piece_id}", methods=["GET"]) -@cross_origin(supports_credentials=True) -@require_appkey -def pieces_get_history(piece_id): - filters = request.args.get("filters") - filters.extend({"piece_id": piece_id}) - - pieces = pieces_controller._get_latest_event_objects( - page=1, number_of_res=20, filters=filters - ) - - res = alchemyConverter(pieces) - response = json.dumps(res, cls=AlchemyEncoder) - - return response diff --git a/servers/tenant/celery.Dockerfile b/servers/tenant/celery.Dockerfile deleted file mode 100644 index d394f79..0000000 --- a/servers/tenant/celery.Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM python:3.9 -RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install -y build-essential libpoppler-cpp-dev pkg-config python-dev -RUN apt -y install tesseract-ocr && apt -y install libtesseract-dev -COPY requirements.txt . -RUN pip3 install --upgrade pip -RUN pip3 install -r requirements.txt -WORKDIR /opt/metadata-extraction/server -ENV PYTHONPATH .. \ No newline at end of file diff --git a/servers/tenant/celery_client.py b/servers/tenant/celery_client.py index 7b7bedc..2c90f45 100644 --- a/servers/tenant/celery_client.py +++ b/servers/tenant/celery_client.py @@ -1,6 +1,54 @@ from celery import Celery from celery.utils.log import get_logger +import os +import io +from uuid import uuid4 +import traceback +# import tenant.controllers.DocumentController as document_controller +import PyPDF2 +import extraction.app as ex +from celery import group CELERY_BROKER_URL = 'redis://redis:6379/0' client = Celery(__name__, broker=CELERY_BROKER_URL) logger = get_logger(__name__) +FAILURE = -1 +SUCCESS = 0 +UPLOAD_FOLDER = "/opt/metadata-extraction/uploads" + + +def fan_out(file): + folder_uuid = uuid4() + with io.BytesIO(file.read()) as open_pdf_file: + read_pdf = PyPDF2.PdfFileReader(open_pdf_file) + num_pages = read_pdf.getNumPages() + folder = f"{UPLOAD_FOLDER}/{folder_uuid}" + os.mkdir(folder) + for i in range(num_pages): + output_pdf = PyPDF2.PdfFileWriter() + output_pdf.addPage(read_pdf.getPage(i)) + file_uuid = uuid4() + f_dir = f"{folder}/{file_uuid}" + os.mkdir(f_dir) + with open(f"{f_dir}/{file_uuid}.pdf", "wb") as f: + output_pdf.write(f) + file.close() + pdf_folders = os.listdir(folder) + return group([work.s(f"{folder}/{pdf_folder}") for pdf_folder in pdf_folders]) + + +def do_all_work(tasks_to_run): + result = tasks_to_run.apply_async() + return result + + +@client.task +def work(pdf_folder): + pdf_file = f"{pdf_folder}.pdf" + try: + doclist = ex.work(pdf_folder) + except Exception as e: + logger.info(f"file {pdf_folder}/{pdf_file} error. msg: {str(e)}") + logger.info(traceback.format_exc()) + return {"status": FAILURE, "folder": pdf_folder} + return {"status": SUCCESS, "folder": pdf_folder, "doclist": doclist} \ No newline at end of file diff --git a/servers/tenant/docker-compose.yml b/servers/tenant/docker-compose.yml deleted file mode 100644 index 4823ffb..0000000 --- a/servers/tenant/docker-compose.yml +++ /dev/null @@ -1,52 +0,0 @@ -version: '3' -services: - host-injector: - container_name: host_c - volumes: - - '/var/run/docker.sock:/tmp/docker.sock' - - '/etc/hosts:/tmp/hosts' - image: dvdarias/docker-hoster - redis: - hostname: redis.wlp.com - image: redis:latest - container_name: rd01 - ports: - - "6379:6379" - volumes: - - "redis:/data" - worker: - hostname: celery.wlp.com - depends_on: - - "redis" - build: - context: . - dockerfile: celery.Dockerfile - volumes: - - .:/opt/metadata-extraction - command: celery -A __init__.client worker --loglevel=info -f celery.logs -Ofair -c 2 - tty: true - app: - hostname: app.wlp.com - depends_on: - - "redis" - build: - context: . - dockerfile: app.Dockerfile - volumes: - - .:/opt/metadata-extraction - container_name: app01 - ports: - - "5000:5000" - command: python3 server/__init__.py - tty: true - flower: - hostname: flower.wlp.com - image: mher/flower - container_name: flower01 - environment: - - CELERY_BROKER_URL=redis://redis:6379/0 - - FLOWER_PORT=8888 - ports: - - "8888:8888" -volumes: - redis: \ No newline at end of file diff --git a/servers/tenant/kill-cluster.sh b/servers/tenant/kill-cluster.sh deleted file mode 100644 index 7e6a281..0000000 --- a/servers/tenant/kill-cluster.sh +++ /dev/null @@ -1 +0,0 @@ -docker-compose kill \ No newline at end of file diff --git a/servers/tenant/models/__init__.py b/servers/tenant/models/__init__.py index 5f179c7..206e2a6 100644 --- a/servers/tenant/models/__init__.py +++ b/servers/tenant/models/__init__.py @@ -4,7 +4,7 @@ from sqlalchemy.orm import sessionmaker db_port = os.getenv("DB_PORT", "5432") -db_name = os.getenv("DB_NAME", "tenant_database") +db_name = os.getenv("DB_NAME", "tenant_db") db_username = os.getenv("DB_USERNAME", "postgres") db_password = os.getenv("DB_PASSWORD", "password") db_url = os.getenv("DB_URL", "ship-solver.ccxmktobiszx.ca-central-1.rds.amazonaws.com") diff --git a/servers/tenant/requirements.txt b/servers/tenant/requirements.txt deleted file mode 100644 index 970152f..0000000 --- a/servers/tenant/requirements.txt +++ /dev/null @@ -1,60 +0,0 @@ -amqp==5.0.9 -billiard==3.6.4.0 -celery==5.2.3 -cffi==1.15.0 -chardet==4.0.0 -click==8.0.3 -click-didyoumean==0.3.0 -click-plugins==1.1.1 -click-repl==0.2.0 -coloredlogs==15.0.1 -cryptography==36.0.1 -Deprecated==1.2.13 -Flask==2.0.2 -humanfriendly==10.0 -img2pdf==0.4.3 -importlib-resources==5.4.0 -itsdangerous==2.0.1 -Jinja2==3.0.3 -kombu==5.2.3 -lxml==4.7.1 -MarkupSafe==2.0.1 -multilingual-pdf2text==1.1.0 -numpy==1.22.0 -ocrmypdf==13.2.0 -Flask-Cors -packaging==21.3 -pdf2image==1.16.0 -pdfminer.six==20211012 -pdfplumber==0.6.0 -pdftotext==2.2.2 -pikepdf==4.3.1 -Pillow==9.0.0 -pluggy==1.0.0 -prompt-toolkit==3.0.24 -pycparser==2.21 -pydantic==1.9.0 -pyparsing==3.0.6 -PyPDF2==1.26.0 -pytesseract==0.3.8 -pytz==2021.3 -redis==4.1.2 -reportlab==3.6.5 -six==1.16.0 -tqdm==4.62.3 -typing-extensions==4.0.1 -vine==5.0.0 -Wand==0.6.7 -wcwidth==0.2.5 -Werkzeug==2.0.2 -wrapt==1.13.3 -zipp==3.7.0 -sqlalchemy -psycopg2-binary -Faker==13.7.0 -flask-restplus==0.13.0 -flask-sqlalchemy==2.4.4 -flask-marshmallow==0.14.0 -marshmallow-sqlalchemy==0.24.1 -marshmallow==3.9.1 -python-dotenv==0.20.0 \ No newline at end of file diff --git a/servers/tenant/server.py b/servers/tenant/server.py index 2a12e84..36774ac 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -1,36 +1,21 @@ -from config import app +from tenant.config import app from blueprints.event_driven.ticket import ticket_bp from blueprints.simple.customers import customer_bp from blueprints.simple.users import user_bp from flask_cors import cross_origin -from servers.tenant.blueprints.simple.document import pdf_bp # TODO: Move this in seperate microservice - -# from models.__init__ import engine, Base -# from models.models import INDEXES +from tenant.blueprints.simple.document import document_bp # TODO: Move this in seperate microservice from flask import Blueprint from dotenv import load_dotenv load_dotenv(".env", override=True) parent = Blueprint("api", __name__, url_prefix="/api") -parent.register_blueprint(pdf_bp) +parent.register_blueprint(document_bp) parent.register_blueprint(ticket_bp) parent.register_blueprint(customer_bp) parent.register_blueprint(user_bp) -# @app.before_first_request -# def instantiate_database(): # creates tables and indexes from models if not instantiated -# try: -# # create indexes -# for index in INDEXES: -# index.create(bind=engine) -# except: -# pass - -# # create all tables -# Base.metadata.create_all(engine) - @app.route("/") def hello_world(): @@ -42,4 +27,4 @@ def hello_world(): print("REGISTERING BLUEPRINT") app.register_blueprint(parent) - app.run(debug=True, host="0.0.0.0", port=6767) + app.run(debug=True, host="0.0.0.0", port=5000) diff --git a/servers/tenant/start-cluster.sh b/servers/tenant/start-cluster.sh deleted file mode 100755 index 46eee91..0000000 --- a/servers/tenant/start-cluster.sh +++ /dev/null @@ -1,2 +0,0 @@ -mkdir uploads -p -docker-compose up -d --build \ No newline at end of file From 0f596ce5d2d0cd67cb7e1e94717b6c55dd6bb370 Mon Sep 17 00:00:00 2001 From: Satwik Pattanaik Date: Mon, 4 Jul 2022 20:11:33 -0400 Subject: [PATCH 16/40] WIP --- servers/tenant/controllers/baseController.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 8a95ce9..463a64f 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -93,6 +93,20 @@ def _get(self, filters, limit=5000): ) return objects + + def _get(self, lim, filters): + if not filters: + filters = [] + + objects = ( + self.session.query(self.model) + .filter(*convert_dict_to_alchemy_filters(filters)) + .group_by(self.model.non_prim_identifying_column_name) + .order_by(self.model.timestamp) + .limit(lim) + ) + + return objects def _get_count(self, filters): if not filters: From 057c206506e2f9e3bdc54f7aae448cbcc6f4c136 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Tue, 5 Jul 2022 02:31:08 -0400 Subject: [PATCH 17/40] modifying db schema --- .../tenant/blueprints/event_driven/ticket.py | 22 +++++ servers/tenant/blueprints/simple/pdf.py | 43 ++++++++++ servers/tenant/blueprints/simple/users.py | 4 + servers/tenant/controllers/baseController.py | 35 +++++--- .../tenant/controllers/controllerMapper.py | 8 ++ servers/tenant/database/index_creation.sql | 3 + servers/tenant/database/table_creation.sql | 53 ++++++++++++ servers/tenant/models/models.py | 31 +++++++ servers/tenant/requirements.txt | 8 ++ servers/tenant/server.py | 4 + servers/tenant/test/test.py | 86 +++++++++++++++++++ 11 files changed, 286 insertions(+), 11 deletions(-) create mode 100644 servers/tenant/blueprints/simple/pdf.py diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index f919bcd..e1bdb3f 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -1,9 +1,16 @@ import json +<<<<<<< HEAD from datetime import datetime from wsgiref import validate from numpy import number from flask import make_response, request, jsonify, Blueprint +======= +import datetime + +from numpy import number +from flask import request, jsonify, Blueprint +>>>>>>> modifying db schema import sys @@ -93,6 +100,7 @@ def ticket_post(): # create ticket ticket_dict["pieces"] = PIECES_SEPERATOR.join(ticket_dict["pieces"]) ticket_event = ticket_controller._create_base_event(ticket_dict) +<<<<<<< HEAD response = {"ticketId": ticket_event.ticketId} return make_response(json.dumps(response)) @@ -127,6 +135,20 @@ def get_clean_filters_dict(immutable_args): if "limit" in sql_filters: del sql_filters["limit"] return sql_filters +======= +@ticket_bp.route("/", methods=["GET"]) +# @require_appkey +def ticket_get_all(): + + filters = request.args.get("filters") or {} + limit = request.args.get("limit") or 1 + + data = ticket_controller._get_latest_event_objects(filters, number_of_res=limit) + print("data------------------") + print(data) + res = alchemyConverter(data) + response = json.dumps(res, cls=AlchemyEncoder) +>>>>>>> modifying db schema def validate_date_format(date_text): diff --git a/servers/tenant/blueprints/simple/pdf.py b/servers/tenant/blueprints/simple/pdf.py new file mode 100644 index 0000000..84361fa --- /dev/null +++ b/servers/tenant/blueprints/simple/pdf.py @@ -0,0 +1,43 @@ +import os +from flask import request, jsonify, Blueprint +import io +from uuid import uuid4 +import traceback +from celery_client import client, logger +from controllers.pdfController import PDFController + +pdf_bp = Blueprint("pdf_bp", __name__, url_prefix="document") + +pdfcontroller = PDFController() + + +@pdf_bp.route("", methods=["POST"]) +def pdf_post(): + if "file" not in request.files: + res = jsonify({"message": "No file part in the request"}) + res.status_code = 400 + return res + + file = request.files["file"] + + if file.filename == "": + res = jsonify({"message": "No file selected for uploading"}) + res.status_code = 400 + return res + if file and file.filename.split(".")[-1].lower() == "pdf": + pdfcontroller.process_files() + resp = jsonify({"message": "File successfully uploaded"}) + resp.status_code = 202 + return resp + else: + resp = jsonify({"message": "Allowed file types are pdf only"}) + resp.status_code = 400 + return resp + + +@pdf_bp.route("{pdf_id}", methods=["GET"]) +def pdf_get(): + res = jsonify({"message": "Please specify PDFId"}) + # TODO ... + res.status_code = 400 + return res diff --git a/servers/tenant/blueprints/simple/users.py b/servers/tenant/blueprints/simple/users.py index 7bb1f44..1f73efe 100644 --- a/servers/tenant/blueprints/simple/users.py +++ b/servers/tenant/blueprints/simple/users.py @@ -32,7 +32,11 @@ def user_post(): # create ticket @user_bp.route("", methods=["PUT"]) +<<<<<<< HEAD @auth_required() +======= +@require_appkey +>>>>>>> modifying db schema def user_modify(): userId = request.form["userId"] diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 463a64f..21837ba 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -81,6 +81,7 @@ def _delete(self, filters=[]): self.session.commit() +<<<<<<< HEAD def _get(self, filters, limit=5000): if not filters: filters = [] @@ -91,23 +92,22 @@ def _get(self, filters, limit=5000): .limit(limit) .all() ) - - return objects - - def _get(self, lim, filters): +======= + def _get(self, model, filters, limit=500): if not filters: filters = [] - objects = ( - self.session.query(self.model) - .filter(*convert_dict_to_alchemy_filters(filters)) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - .limit(lim) - ) + objects = self.session.query(self.model) \ + .filter(*convert_dict_to_alchemy_filters(model, filters)) \ + .group_by(self.model.non_prim_identifying_column_name) \ + .order_by(self.model.timestamp) \ + .limit(limit) + +>>>>>>> modifying db schema return objects + def _get_count(self, filters): if not filters: filters = [] @@ -147,6 +147,7 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) # .group_by(self.model.non_prim_identifying_column_name) # .order_by(self.model.timestamp) +<<<<<<< HEAD # .limit(number_of_res) # .all() # ) @@ -165,6 +166,18 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): print("LATEST_OBJS-------") print(latest_objs) return latest_objs +======= + # .limit(number_of_res).all() + # ) + latest_objs = ( + self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) + .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) + .limit(number_of_res).all() + ) + + # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() + return latest_objs[0] +>>>>>>> modifying db schema # def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index 3797e09..4ce5a1f 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -20,6 +20,7 @@ def __init__(self): super().__init__(Customers) +<<<<<<< HEAD class TicketStatusController(BaseController): def __init__(self): super().__init__(TicketStatus) @@ -86,6 +87,13 @@ def convert_to_desc(self, milestones): class InventoryMilestonesController(MilestoneController): def __init__(self): super().__init__(InventoryMilestones) +======= +class TicketController(BaseTimeSeriesController): + def __init__(self): + super().__init__(TicketEvents) + + +>>>>>>> modifying db schema def convert_to_desc(self, milestones): string_milestones = [] diff --git a/servers/tenant/database/index_creation.sql b/servers/tenant/database/index_creation.sql index 4c55377..3014758 100644 --- a/servers/tenant/database/index_creation.sql +++ b/servers/tenant/database/index_creation.sql @@ -1,4 +1,7 @@ +<<<<<<< HEAD -- SQLBook: Code +======= +>>>>>>> modifying db schema CREATE INDEX idx_ticketEvents_comp ON TicketEvents(ticketEventId, timestamp); CREATE INDEX idx_ticketEvents_ts ON TicketEvents(timestamp); diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index f27a70c..d6ecb25 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -35,6 +35,7 @@ CREATE TABLE IF NOT EXISTS Users ( PRIMARY KEY("userId") ); +<<<<<<< HEAD CREATE TABLE IF NOT EXISTS Documents ( "documentId" INT, "timestamp" INT, @@ -90,6 +91,39 @@ CREATE TABLE IF NOT EXISTS TicketEvents ( PRIMARY KEY("ticketEventId"), CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") +======= + +CREATE TABLE IF NOT EXISTS TicketEvents ( + ticketEventId INT, + ticketId INT, + timestamp INT, + shipperEventId INT, + consigneeEventId INT, + userId INT, + customerId INT, + barcodeNumber INT, + houseReferenceNumber INT, + orderS3Link VARCHAR(50), + weight INT, + claimedNumberOfPieces INT, + BOLNumber INT, + specialServices VARCHAR(256), + specialInstructions VARCHAR(256), + shipperCompany VARCHAR(256), + shipperName VARCHAR(256), + shipperAddress VARCHAR(256), + shipperPostalCode VARCHAR(256), + shipperPhoneNumber VARCHAR(256), + consigneeCompany VARCHAR(256), + consigneeName VARCHAR(256), + consigneeAddress VARCHAR(256), + consigneePostalCode VARCHAR(256), + consigneePhoneNumber VARCHAR(256), + pieces VARCHAR(256), + PRIMARY KEY(ticketEventId), + CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), + CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) +>>>>>>> modifying db schema ); CREATE TABLE IF NOT EXISTS GenericMilestones ( @@ -120,6 +154,7 @@ CREATE TABLE IF NOT EXISTS InventoryMilestones ( ); CREATE TABLE IF NOT EXISTS DeliveryMilestones ( +<<<<<<< HEAD "milestoneId" INT, timestamp INT, "ticketEventId" INT, @@ -136,4 +171,22 @@ CREATE TABLE IF NOT EXISTS DeliveryMilestones ( CONSTRAINT "fk_ticketEventId" FOREIGN KEY ("ticketEventId") REFERENCES TicketEvents("ticketEventId"), CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") +======= + milestoneId INT, + timestamp INT, + ticketEventId INT, + customerId INT, + userId INT, + ticketStatus DELIVERY_TICKET_STATUS, + approvalStatus TICKET_APPROVAL_STATUS, + PODLink VARCHAR(50), + signatureLink VARCHAR(50), + picture1Link VARCHAR(50), + picture2Link VARCHAR(50), + picture3Link VARCHAR(50), + PRIMARY KEY(milestoneId), + CONSTRAINT fk_ticketEventId FOREIGN KEY (ticketEventId) REFERENCES TicketEvents(ticketEventId), + CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), + CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) +>>>>>>> modifying db schema ); \ No newline at end of file diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index 710dbf9..dfb796c 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -105,6 +105,7 @@ def __repr__(self): return f"< Users:: userId: {self.userId}>" +<<<<<<< HEAD class Documents(Base): __tablename__ = "documents" documentId = Column(Integer, primary_key=True, nullable=False) @@ -145,6 +146,8 @@ class TicketStatus(Base): user = relationship("Users") +======= +>>>>>>> modifying db schema class TicketEvents(Base): __tablename__ = "ticketevents" non_prim_identifying_column_name = "ticketId" @@ -152,10 +155,15 @@ class TicketEvents(Base): # TODO: forgein key ticketId = Column(Integer, ForeignKey(TicketStatus.ticketId)) timestamp = Column(Integer, default=int(time.time())) +<<<<<<< HEAD userId = Column(Integer, ForeignKey(Users.userId), nullable=False, index=True) customerId = Column( Integer, ForeignKey(Customers.customerId), nullable=False, index=True ) +======= + userId = Column(Integer, ForeignKey(Users.userId), nullable=False) + customerId = Column(Integer, ForeignKey(Customers.customerId), nullable=False) +>>>>>>> modifying db schema barcodeNumber = Column(Integer, nullable=False) houseReferenceNumber = Column(Integer, nullable=False) orderS3Link = Column(String, nullable=False) @@ -164,18 +172,25 @@ class TicketEvents(Base): BOLNumber = Column(Integer, nullable=False) specialServices = Column(String) specialInstructions = Column(String) +<<<<<<< HEAD # shipper +======= +>>>>>>> modifying db schema shipperCompany = Column(String, nullable=False) shipperName = Column(String, nullable=False) shipperAddress = Column(String, nullable=False) shipperPostalCode = Column(String, nullable=False) shipperPhoneNumber = Column(String, nullable=False) +<<<<<<< HEAD # consignee +======= +>>>>>>> modifying db schema consigneeCompany = Column(String, nullable=False) consigneeName = Column(String, nullable=False) consigneeAddress = Column(String, nullable=False) consigneePostalCode = Column(String, nullable=False) consigneePhoneNumber = Column(String, nullable=False) +<<<<<<< HEAD # pieces pieces = Column(String, nullable=False) isPickup = Column(Boolean, nullable=False) @@ -193,6 +208,11 @@ class CreationMilestones(Base): ticketId = Column( Integer, ForeignKey(TicketStatus.ticketId), nullable=False, index=True ) +======= + pieces = Column(String, nullable=False) + user = relationship("Users") + customer = relationship("Customers") +>>>>>>> modifying db schema newStatus = Column(Enum(Creation_Milestone_Status), nullable=False) @@ -229,6 +249,7 @@ class PickupMilestones(Base): class InventoryMilestones(Base): __tablename__ = "inventorymilestones" +<<<<<<< HEAD milestoneId = Column(Integer, primary_key=True, autoincrement=True) ticketId = Column( Integer, ForeignKey(TicketStatus.ticketId), nullable=False, index=True @@ -243,6 +264,12 @@ class InventoryMilestones(Base): timestamp = Column(Integer, nullable=False, default=int(time.time())) approvedByUser = relationship("Users") +======= +if __name__ == "__main__": + ticketId_timestamp_idx = Index( + "ticketId_timestamp_idx", TicketEvents.ticketId, TicketEvents.timestamp + ) +>>>>>>> modifying db schema class AssignmentMilestones(Base): @@ -270,6 +297,7 @@ class AssignmentMilestones(Base): class IncompleteDeliveryMilestones(Base): __tablename__ = "inconpletedeliverymilestones" +<<<<<<< HEAD milestoneId = Column(Integer, primary_key=True, autoincrement=True) ticketId = Column( Integer, ForeignKey(TicketStatus.ticketId), nullable=False, index=True @@ -320,6 +348,9 @@ class DeliveryMilestones(Base): timestamp = Column(Integer, nullable=False, default=int(time.time())) completingUser = relationship("Users") +======= + gen_milestoneId_idx = Index("gen_milestoneId_idx", GenericMilestones.milestoneId) +>>>>>>> modifying db schema ticketId_timestamp_idx = Index( diff --git a/servers/tenant/requirements.txt b/servers/tenant/requirements.txt index 86417b1..a36e9c6 100644 --- a/servers/tenant/requirements.txt +++ b/servers/tenant/requirements.txt @@ -23,7 +23,10 @@ multilingual-pdf2text==1.1.0 numpy==1.22.0 ocrmypdf==13.2.0 packaging==21.3 +<<<<<<< HEAD Flask-Cors +======= +>>>>>>> modifying db schema pdf2image==1.16.0 pdfminer.six==20211012 pdfplumber==0.6.0 @@ -49,11 +52,16 @@ wcwidth==0.2.5 Werkzeug==2.0.2 wrapt==1.13.3 zipp==3.7.0 +<<<<<<< HEAD aniso8601==9.0.1 attrs==21.4.0 certifi==2022.6.15 charset-normalizer==2.1.0 ecdsa==0.17.0 +======= +sqlalchemy +psycopg2-binary +>>>>>>> modifying db schema Faker==13.7.0 flask-cognito-auth==1.1.0 flask-marshmallow==0.14.0 diff --git a/servers/tenant/server.py b/servers/tenant/server.py index 87088d5..c4564e9 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -4,11 +4,15 @@ from blueprints.event_driven.ticket import ticket_bp from blueprints.simple.customers import customer_bp from blueprints.simple.users import user_bp +<<<<<<< HEAD from blueprints.simple.milestones import milestone_bp from blueprints.simple.driver import driver_bp from flask_cors import CORS from flask_cognito_lib import CognitoAuth +======= +from servers.tenant.blueprints.simple.pdf import pdf_bp # TODO: Move this in seperate microservice +>>>>>>> modifying db schema # from models.__init__ import engine, Base # from models.models import INDEXES diff --git a/servers/tenant/test/test.py b/servers/tenant/test/test.py index f89e5c9..b2c873f 100644 --- a/servers/tenant/test/test.py +++ b/servers/tenant/test/test.py @@ -20,6 +20,7 @@ UserController, CustomerController, TicketController, +<<<<<<< HEAD CreationMilestonesController, PickupMilestonesController, InventoryMilestonesController, @@ -33,6 +34,11 @@ Assignment_Milestone_Status, Delivery_Milestone_Status, Incomplete_Delivery_Milestone_Status, +======= + GenericMilestoneController, + InventoryMilestoneController, + DeliveryMilestoneController, +>>>>>>> modifying db schema ) from utils import alchemyConverter from utils import AlchemyEncoder @@ -43,6 +49,7 @@ app = Flask(__name__) with app.app_context(): +<<<<<<< HEAD # Controllers creationMilestonesController = CreationMilestonesController() pickupMilestonesController = PickupMilestonesController() @@ -87,6 +94,8 @@ UserType.worker: [], } +======= +>>>>>>> modifying db schema def generate_users(scale=5): user_controller = UserController() @@ -143,7 +152,14 @@ def generate_customers(scale=2): return customer_controller._create_bulk(args_arr) +<<<<<<< HEAD def generate_ticket_events(scale=400, users=[], customers=[]): +======= + + def generate_ticket_events( + scale=20, users=[], customers=[] + ): +>>>>>>> modifying db schema ticket_events_controller = TicketController() @@ -179,9 +195,12 @@ def generate_ticket_events(scale=400, users=[], customers=[]): consigneePostalCode = faker.zipcode() consigneePhoneNumber = faker.phone_number() pieces = faker.sentence() +<<<<<<< HEAD isPickup = False noSignatureRequired = False tailgateAuthorized = False +======= +>>>>>>> modifying db schema obj = ticket_events_controller._create_base_event( { @@ -205,11 +224,15 @@ def generate_ticket_events(scale=400, users=[], customers=[]): "consigneeAddress": consigneeAddress, "consigneePostalCode": consigneePostalCode, "consigneePhoneNumber": consigneePhoneNumber, +<<<<<<< HEAD "pieces": pieces, "isPickup": isPickup, "noSignatureRequired": noSignatureRequired, "tailgateAuthorized": tailgateAuthorized +======= + "pieces": pieces +>>>>>>> modifying db schema } ) @@ -244,6 +267,7 @@ def generate_ticket_events(scale=400, users=[], customers=[]): print("Created Ticket") +<<<<<<< HEAD def list_diff(li1, li2): return list(set(li1) - set(li2)) + list(set(li2) - set(li1)) @@ -457,10 +481,48 @@ def generate_milestone_events(old_tickets): # "approvalStatus": approvalStatus, # } # ) +======= + + def generate_generic_milestones_events(scale=50, ticket_map=[], users=[]): + + gen_milestone_controller = GenericMilestoneController() + + n = len(session.query(GenericMilestones).distinct().all()) + if n < scale: + print(f"Generating Gen Milestones for {scale - n } Tickets") + + for _ in range(scale - n): + + ticketId = random.choice([k for k in ticket_map]) + + for _ in range( + random.randint(4, 10) + ): # number of milestones per ticket + + milestoneId = random.randint(1, 2147483645) + userId = random.choice(users).userId + + ticketStatus = random.choice( + [e for e in Generic_Ticket_Status] + ).value.lower() + + obj = gen_milestone_controller._create( + { + "milestoneId": milestoneId, + "ticketEventId": random.choice(ticket_map[ticketId]), + "userId": userId, + "ticketStatus": ticketStatus, + } + ) +>>>>>>> modifying db schema # print("Created Inventory Milestone") +<<<<<<< HEAD # def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): +======= + def generate_inventory_milestones_events(scale=50, ticket_map=[], users=[]): +>>>>>>> modifying db schema # gen_milestone_controller = DeliveryMilestoneController() @@ -473,9 +535,24 @@ def generate_milestone_events(old_tickets): # ticketId = random.choice([k for k in ticket_map]) +<<<<<<< HEAD # for _ in range( # random.randint(4, 10) # ): # number of milestones per ticket +======= + print("Created Inventory Milestone") + + def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): + + gen_milestone_controller = DeliveryMilestoneController() + + n = len(session.query(DeliveryMilestones).distinct().all()) + + if n < scale: + print(f"Generating Delivery Milestones for {scale - n } Tickets") + + for _ in range(scale - n): +>>>>>>> modifying db schema # milestoneId = random.randint(1, 2147483645) # userId = random.choice(users).userId @@ -510,6 +587,7 @@ def generate_milestone_events(old_tickets): # pprint(alchemyConverter(users[0])) +<<<<<<< HEAD oldTickets = ( session.query(TicketEvents) .with_entities(TicketEvents.ticketId) @@ -520,6 +598,10 @@ def generate_milestone_events(old_tickets): generate_ticket_events( scale=500, +======= + generate_ticket_events( + scale=20, +>>>>>>> modifying db schema users=users, customers=customers, ) @@ -531,6 +613,10 @@ def generate_milestone_events(old_tickets): # exit() +<<<<<<< HEAD +======= + +>>>>>>> modifying db schema pprint(alchemyConverter(ticketEvents[0])) exit() From a6ac6ccb65bff28a2afa156c86b3b6262670b69d Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Tue, 5 Jul 2022 19:41:59 -0400 Subject: [PATCH 18/40] push --- servers/tenant/database/index_creation.sql | 4 ++ servers/tenant/database/table_creation.sql | 64 ++++++++++++++++++++-- servers/tenant/models/__init__.py | 6 +- servers/tenant/models/models.py | 50 +++++++++++++++++ servers/tenant/test/test.py | 29 ++++++++++ 5 files changed, 147 insertions(+), 6 deletions(-) diff --git a/servers/tenant/database/index_creation.sql b/servers/tenant/database/index_creation.sql index 3014758..489b6b2 100644 --- a/servers/tenant/database/index_creation.sql +++ b/servers/tenant/database/index_creation.sql @@ -1,7 +1,11 @@ <<<<<<< HEAD +<<<<<<< HEAD -- SQLBook: Code ======= >>>>>>> modifying db schema +======= +-- SQLBook: Code +>>>>>>> push CREATE INDEX idx_ticketEvents_comp ON TicketEvents(ticketEventId, timestamp); CREATE INDEX idx_ticketEvents_ts ON TicketEvents(timestamp); diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index d6ecb25..ac21b0d 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -10,11 +10,11 @@ CREATE TYPE DELIVERY_TICKET_STATUS AS ENUM('DELIVERED', 'IN_TRANSIT'); CREATE TYPE GENERIC_TICKET_STATUS AS ENUM('INVENTORY', 'ASSIGNED', 'OUT_FOR_DELIVERY'); CREATE TYPE USERTYPE AS ENUM ( - 'MANAGER', - 'DISPATCH', - 'CUSTOMER', - 'DRIVER', - 'WORKER' + "MANAGER", + "DISPATCH", + "CUSTOMER", + "DRIVER", + "WORKER" ); CREATE TABLE IF NOT EXISTS Customers ( @@ -33,6 +33,7 @@ CREATE TABLE IF NOT EXISTS Users ( "createdAt" INT NOT NULL, "modifiedAt" INT NOT NULL, PRIMARY KEY("userId") +<<<<<<< HEAD ); <<<<<<< HEAD @@ -61,6 +62,8 @@ CREATE TABLE IF NOT EXISTS Documents ( "consigneePhoneNumber" VARCHAR(256), "pieces" VARCHAR(256), PRIMARY KEY("documentId") +======= +>>>>>>> push ); CREATE TABLE IF NOT EXISTS TicketEvents ( @@ -94,6 +97,7 @@ CREATE TABLE IF NOT EXISTS TicketEvents ( ======= CREATE TABLE IF NOT EXISTS TicketEvents ( +<<<<<<< HEAD ticketEventId INT, ticketId INT, timestamp INT, @@ -124,6 +128,37 @@ CREATE TABLE IF NOT EXISTS TicketEvents ( CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) >>>>>>> modifying db schema +======= + "ticketEventId" INT, + "ticketId" INT, + "timestamp" INT, + "shipperEventId" INT, + "consigneeEventId" INT, + "userId" INT, + "customerId" INT, + "barcodeNumber" INT, + "houseReferenceNumber" INT, + "orderS3Link" VARCHAR(50), + "weight" INT, + "claimedNumberOfPieces" INT, + "BOLNumber" INT, + "specialServices" VARCHAR(256), + "specialInstructions" VARCHAR(256), + "shipperCompany" VARCHAR(256), + "shipperName" VARCHAR(256), + "shipperAddress" VARCHAR(256), + "shipperPostalCode" VARCHAR(256), + "shipperPhoneNumber" VARCHAR(256), + "consigneeCompany" VARCHAR(256), + "consigneeName" VARCHAR(256), + "consigneeAddress" VARCHAR(256), + "consigneePostalCode" VARCHAR(256), + "consigneePhoneNumber" VARCHAR(256), + "pieces" VARCHAR(256), + PRIMARY KEY("ticketEventId"), + CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), + CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") +>>>>>>> push ); CREATE TABLE IF NOT EXISTS GenericMilestones ( @@ -154,6 +189,7 @@ CREATE TABLE IF NOT EXISTS InventoryMilestones ( ); CREATE TABLE IF NOT EXISTS DeliveryMilestones ( +<<<<<<< HEAD <<<<<<< HEAD "milestoneId" INT, timestamp INT, @@ -189,4 +225,22 @@ CREATE TABLE IF NOT EXISTS DeliveryMilestones ( CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) >>>>>>> modifying db schema +======= + "milestoneId" INT, + timestamp INT, + "ticketEventId" INT, + "customerId" INT, + "userId" INT, + "ticketStatus" DELIVERY_TICKET_STATUS, + "approvalStatus" TICKET_APPROVAL_STATUS, + "PODLink" VARCHAR(50), + "signatureLink" VARCHAR(50), + "picture1Link" VARCHAR(50), + "picture2Link" VARCHAR(50), + "picture3Link" VARCHAR(50), + PRIMARY KEY("milestoneId"), + CONSTRAINT "fk_ticketEventId" FOREIGN KEY ("ticketEventId") REFERENCES TicketEvents("ticketEventId"), + CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), + CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") +>>>>>>> push ); \ No newline at end of file diff --git a/servers/tenant/models/__init__.py b/servers/tenant/models/__init__.py index 92914ed..38a9741 100644 --- a/servers/tenant/models/__init__.py +++ b/servers/tenant/models/__init__.py @@ -4,7 +4,7 @@ from sqlalchemy.orm import sessionmaker db_port = os.getenv("DB_PORT", "5432") -db_name = os.getenv("DB_NAME", "tenant_db") +db_name = os.getenv("DB_NAME", "tenant_database") db_username = os.getenv("DB_USERNAME", "postgres") db_password = os.getenv("DB_PASSWORD", "password") db_url = os.getenv("DB_URL", "ship-solver.ccxmktobiszx.ca-central-1.rds.amazonaws.com") @@ -14,7 +14,11 @@ Base = declarative_base() +<<<<<<< HEAD engine = create_engine(cnx_string, echo=False) +======= +engine = create_engine(cnx_string) +>>>>>>> push print("connecting to db....") Session = sessionmaker(bind=engine) session = Session() diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index dfb796c..c7b493f 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -172,25 +172,36 @@ class TicketEvents(Base): BOLNumber = Column(Integer, nullable=False) specialServices = Column(String) specialInstructions = Column(String) +<<<<<<< HEAD <<<<<<< HEAD # shipper ======= >>>>>>> modifying db schema +======= + # shipper +>>>>>>> push shipperCompany = Column(String, nullable=False) shipperName = Column(String, nullable=False) shipperAddress = Column(String, nullable=False) shipperPostalCode = Column(String, nullable=False) shipperPhoneNumber = Column(String, nullable=False) +<<<<<<< HEAD <<<<<<< HEAD # consignee ======= >>>>>>> modifying db schema +======= + # consignee +>>>>>>> push consigneeCompany = Column(String, nullable=False) consigneeName = Column(String, nullable=False) consigneeAddress = Column(String, nullable=False) consigneePostalCode = Column(String, nullable=False) consigneePhoneNumber = Column(String, nullable=False) <<<<<<< HEAD +<<<<<<< HEAD +======= +>>>>>>> push # pieces pieces = Column(String, nullable=False) isPickup = Column(Boolean, nullable=False) @@ -263,6 +274,7 @@ class InventoryMilestones(Base): newStatus = Column(Enum(Inventory_Milestone_Status), nullable=False) timestamp = Column(Integer, nullable=False, default=int(time.time())) +<<<<<<< HEAD approvedByUser = relationship("Users") ======= if __name__ == "__main__": @@ -318,8 +330,15 @@ class IncompleteDeliveryMilestones(Base): timestamp = Column(Integer, nullable=False, default=int(time.time())) assigneeUser = relationship("Users") +======= +ticketId_timestamp_idx = Index( + "ticketId_timestamp_idx", TicketEvents.ticketId, TicketEvents.timestamp +) +>>>>>>> push +INDEXES.append(ticketId_timestamp_idx) +<<<<<<< HEAD class DeliveryMilestones(Base): __tablename__ = "deliverymilestones" @@ -369,3 +388,34 @@ class DeliveryMilestones(Base): pass +======= + +ticket_userId_idx = Index("ticket_userId_idx", TicketEvents.userId) + +INDEXES.append(ticket_userId_idx) + +ticket_customerId_idx = Index("ticket_customerId_idx", TicketEvents.customerId) + +INDEXES.append(ticket_customerId_idx) + +gen_milestoneId_idx = Index("gen_milestoneId_idx", GenericMilestones.milestoneId) + +INDEXES.append(gen_milestoneId_idx) + +inv_milestoneId_idx = Index("inv_milestoneId_idx", InventoryMilestones.milestoneId) + +INDEXES.append(inv_milestoneId_idx) + +del_milestoneId_idx = Index("del_milestoneId_idx", DeliveryMilestones.milestoneId) + +INDEXES.append(del_milestoneId_idx) + +print("Configuring DB ...") +Base.metadata.create_all(engine) +try: + # create indexes + for index in INDEXES: + index.create(bind=engine) +except: + pass +>>>>>>> push diff --git a/servers/tenant/test/test.py b/servers/tenant/test/test.py index b2c873f..0585f9a 100644 --- a/servers/tenant/test/test.py +++ b/servers/tenant/test/test.py @@ -6,7 +6,11 @@ from faker import Faker import os from flask import Flask, jsonify +<<<<<<< HEAD import uuid +======= +import uuid; +>>>>>>> push from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker @@ -110,8 +114,12 @@ def generate_users(scale=5): firstName = faker.unique.first_name() lastName = faker.unique.last_name() +<<<<<<< HEAD userType = random.choice([ut for ut in UserType]) userTypeValue = userType.value.lower() +======= + userType = random.choice([ut for ut in UserType]).value +>>>>>>> push username = firstName.lower()[0] + lastName.lower() email = f"{username}@faker.com" userId = random.randint(1, 1000000000) @@ -120,8 +128,13 @@ def generate_users(scale=5): args_arr.append( { +<<<<<<< HEAD "userId": userId, "userType": userTypeValue, +======= + "userId": random.randint(1, 1000000000), + "userType": userType, +>>>>>>> push "username": username, "firstName": firstName, "lastName": lastName, @@ -577,7 +590,23 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): # print("Created Delivery Milestone") +<<<<<<< HEAD generate_users(scale=70) +======= + obj = gen_milestone_controller._create( + { + "milestoneId": milestoneId, + "ticketEventId": random.choice(ticket_map[ticketId]), + "userId": userId, + "ticketStatus": ticketStatus, + "approvalStatus": approvalStatus, + } + ) + + print("Created Delivery Milestone") + + generate_users(scale=5) +>>>>>>> push users = session.query(Users).all() # print(random.choice(users)) From 62ebb4315261cad0b71e3661176c22114efa5ada Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Tue, 5 Jul 2022 20:07:35 -0400 Subject: [PATCH 19/40] fix schema --- .../tenant/blueprints/event_driven/ticket.py | 12 ++++-- servers/tenant/controllers/baseController.py | 15 ++++--- servers/tenant/database/table_creation.sql | 10 ++--- servers/tenant/test/test.py | 35 ++++++++++++++++ servers/tenant/utils.py | 41 +++++++++++++++++++ 5 files changed, 100 insertions(+), 13 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index e1bdb3f..bec4b9d 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -16,7 +16,11 @@ sys.path.insert(0, "..") # import parent folder +<<<<<<< HEAD from controllers.controllerMapper import TicketController, TicketStatusController +======= +from controllers.controllerMapper import TicketController +>>>>>>> fix schema from models.models import TicketEvents from utils import ( AlchemyEncoder, @@ -28,8 +32,12 @@ ticket_bp = Blueprint("ticket_bp", __name__, url_prefix="ticket") ticket_controller = TicketController() +<<<<<<< HEAD ticket_status_controller = TicketStatusController() PIECES_SEPERATOR = ",+-" +======= + +>>>>>>> fix schema """ Route expects requests of format: @@ -141,11 +149,9 @@ def get_clean_filters_dict(immutable_args): def ticket_get_all(): filters = request.args.get("filters") or {} - limit = request.args.get("limit") or 1 + limit = request.args.get("limit") or 2 data = ticket_controller._get_latest_event_objects(filters, number_of_res=limit) - print("data------------------") - print(data) res = alchemyConverter(data) response = json.dumps(res, cls=AlchemyEncoder) >>>>>>> modifying db schema diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 21837ba..b4fdfcb 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -169,15 +169,20 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): ======= # .limit(number_of_res).all() # ) - latest_objs = ( - self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) - .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) - .limit(number_of_res).all() - ) + latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ + .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) \ + .limit(number_of_res).all() + # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() +<<<<<<< HEAD return latest_objs[0] >>>>>>> modifying db schema +======= + print("LATEST_OBJS-------") + print(latest_objs) + return latest_objs +>>>>>>> fix schema # def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index ac21b0d..7d186bd 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -10,11 +10,11 @@ CREATE TYPE DELIVERY_TICKET_STATUS AS ENUM('DELIVERED', 'IN_TRANSIT'); CREATE TYPE GENERIC_TICKET_STATUS AS ENUM('INVENTORY', 'ASSIGNED', 'OUT_FOR_DELIVERY'); CREATE TYPE USERTYPE AS ENUM ( - "MANAGER", - "DISPATCH", - "CUSTOMER", - "DRIVER", - "WORKER" + 'MANAGER', + 'DISPATCH', + 'CUSTOMER', + 'DRIVER', + 'WORKER' ); CREATE TABLE IF NOT EXISTS Customers ( diff --git a/servers/tenant/test/test.py b/servers/tenant/test/test.py index 0585f9a..1907731 100644 --- a/servers/tenant/test/test.py +++ b/servers/tenant/test/test.py @@ -114,12 +114,16 @@ def generate_users(scale=5): firstName = faker.unique.first_name() lastName = faker.unique.last_name() +<<<<<<< HEAD <<<<<<< HEAD userType = random.choice([ut for ut in UserType]) userTypeValue = userType.value.lower() ======= userType = random.choice([ut for ut in UserType]).value >>>>>>> push +======= + userType = random.choice([ut for ut in UserType]).value.lower() +>>>>>>> fix schema username = firstName.lower()[0] + lastName.lower() email = f"{username}@faker.com" userId = random.randint(1, 1000000000) @@ -249,6 +253,7 @@ def generate_ticket_events( } ) +<<<<<<< HEAD # for i in range(random.randrange(10, 20)): # userId = random.choice(users).userId @@ -277,6 +282,36 @@ def generate_ticket_events( # "specialInstructions": specialInstructions, # }, # ) +======= + for i in range(random.randrange(10, 20)): + + userId = random.choice(users).userId + userId = random.choice(users).userId + customerId = random.choice(customers).customerId + barcodeNumber = random.randrange(100000000, 900000000) + houseReferenceNumber = random.randrange(100000000, 900000000) + orderS3Link = "s3link" + weight = random.randrange(100, 200) + claimedNumberOfPieces = random.randrange(1, 5) + BOLNumber = random.randrange(100000000, 900000000) + + created_obj = ticket_events_controller._modify_latest_object( + getattr(obj, TicketEvents.non_prim_identifying_column_name), + { + "ticketId": obj.ticketId, + "userId": userId, + "customerId": customerId, + "barcodeNumber": barcodeNumber, + "houseReferenceNumber": houseReferenceNumber, + "orderS3Link": orderS3Link, + "weight": weight, + "claimedNumberOfPieces": claimedNumberOfPieces, + "BOLNumber": BOLNumber, + "specialServices": specialServices, + "specialInstructions": specialInstructions, + }, + ) +>>>>>>> fix schema print("Created Ticket") diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index 3429fb6..7202d00 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -16,6 +16,7 @@ def default(self, obj): # DFS function used to convert alchemy objects to JSON +<<<<<<< HEAD def alchemyConvertUtil(object, res, visited): visited.add(str(object.__class__)) for field in [ @@ -58,6 +59,46 @@ def alchemyConverter(obj): return res else: return alchemyConvertUtil(obj, {}, visited=set()) +======= +def alchemyConverter(object): + def single_convert(obj, res={}, visited=set({})): + visited.add(str(object.__class__)) + for field in [ + x + for x in dir(object) + if not x.startswith("_") + and x not in set({"metadata", "non_prim_identifying_column_name", "registry"}) + ]: + cls_name = str(obj.__getattribute__(field).__class__) + if "models.models." in cls_name: + if cls_name in visited: + continue + else: + visited.add(cls_name) + + res[field] = {} + single_convert(getattr(obj, field), res[field], visited=visited) + visited.remove(cls_name) + elif "InstrumentedList" in cls_name: + res[field] = [] + + for i, obj in enumerate(getattr(obj, field)): + + res[field].append({}) + single_convert(obj, res[field][i], visited=visited) + + else: + res[field] = getattr(obj, field) + + return res + + if type(object) == list: + res = [single_convert(obj) for obj in object] + return res + else: + return single_convert(object) + +>>>>>>> fix schema # converts fiters as a dictionary to alchemy interpretable results From 2bbbd1f66a84c19b939d934308268dec64dd726b Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Tue, 5 Jul 2022 22:04:54 -0400 Subject: [PATCH 20/40] get endpoints --- .../tenant/blueprints/event_driven/ticket.py | 120 ++++++++++++++++++ servers/tenant/controllers/baseController.py | 41 ++++++ servers/tenant/utils.py | 48 ++++++- 3 files changed, 205 insertions(+), 4 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index bec4b9d..611c623 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -1,5 +1,6 @@ import json <<<<<<< HEAD +<<<<<<< HEAD from datetime import datetime from wsgiref import validate @@ -7,6 +8,9 @@ from flask import make_response, request, jsonify, Blueprint ======= import datetime +======= +from datetime import datetime +>>>>>>> get endpoints from numpy import number from flask import request, jsonify, Blueprint @@ -108,6 +112,7 @@ def ticket_post(): # create ticket ticket_dict["pieces"] = PIECES_SEPERATOR.join(ticket_dict["pieces"]) ticket_event = ticket_controller._create_base_event(ticket_dict) +<<<<<<< HEAD <<<<<<< HEAD response = {"ticketId": ticket_event.ticketId} return make_response(json.dumps(response)) @@ -125,6 +130,15 @@ def ticket_edit(ticket_id): # create ticket #join pieces into single string ticket_dict["pieces"] = PIECES_SEPERATOR.join(ticket_dict["pieces"]) ticket_event = ticket_controller._create_base_event(ticket_dict) +======= + return {"success"} + + +# http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00&end=2022-04-04T00:00:00&shipperName=Eric%20Shea +# curl http://127.0.0.1:6767/api/ticket/?shipperName +# # curl http://127.0.0.1:6767/api/ticket?key=a +# # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z +>>>>>>> get endpoints response = {"ticketId": ticket_event.ticketId} return make_response(json.dumps(response)) @@ -148,6 +162,7 @@ def get_clean_filters_dict(immutable_args): # @require_appkey def ticket_get_all(): +<<<<<<< HEAD filters = request.args.get("filters") or {} limit = request.args.get("limit") or 2 @@ -191,6 +206,43 @@ def ticket_get_all(): data = ticket_controller._get_latest_event_objects_in_range( dt_start, dt_end, sql_filters, number_of_res=limit ) +======= + filters = request.args or {} + sql_filters = dict(filters) + + if "start" in sql_filters: + del sql_filters["start"] + if "end" in sql_filters: + del sql_filters["end"] + if "limit" in sql_filters: + del sql_filters["limit"] + + if "limit" not in filters: + limit = 5 + else: + limit = filters["limit"] + def validate_date_format(date_text): + try: + return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") + except ValueError: + raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") + + if "start" in filters: + dt_start_str = filters["start"] + dt_start = validate_date_format(dt_start_str) + if "end" in filters: + dt_end_str= filters["end"] + dt_end = validate_date_format(dt_end_str) + data = ticket_controller._get_latest_event_objects_in_range( + dt_start, dt_end, filters=sql_filters, number_of_res=limit + ) + else: + data = ticket_controller._get_latest_event_objects_from_start_date( + dt_start, filters=sql_filters, number_of_res=limit + ) + else: + data = ticket_controller._get_latest_event_objects(sql_filters, number_of_res=limit) +>>>>>>> get endpoints res = alchemyConverter(data) for ticket in res: @@ -200,6 +252,7 @@ def ticket_get_all(): return make_response(json.dumps(res, cls=AlchemyEncoder)) +<<<<<<< HEAD def get_single(ticket_id): filters = request.args.get("filters") or {} @@ -217,7 +270,26 @@ def ticket_get(ticket_id): data = get_single(ticket_id) res = alchemyConverter(data) return make_response(json.dumps(res, cls=AlchemyEncoder)) +======= +@ticket_bp.route("/", methods=["GET"]) +# @require_appkey +def ticket_get(ticket_id): + filters = request.args.get("filters") or {} + + number_of_res = request.args.get("number_of_res") + + filters["ticketId"] = ticket_id + + latest_ticket = ticket_controller._get_latest_event_objects( + number_of_res=number_of_res, filters=filters + ) + + res = alchemyConverter(latest_ticket[0]) + response = json.dumps(res, cls=AlchemyEncoder) + + return response +>>>>>>> get endpoints """ Route expects requests of format: @@ -233,6 +305,29 @@ def ticket_get(ticket_id): """ +<<<<<<< HEAD +======= + + +# @ticket_bp.route("/attribute/{attribute_name}", methods=["GET"]) +# @require_appkey +# def ticket_attribute_get(attribute_name): + +# filters.extend({"ticket_id": ticket_id}) + +# latest_ticket = ticket_controller._get_latest_event_objects( +# number_of_res=number_of_res, filters=filters +# ) + +# res = alchemyConverter(latest_ticket) +# response = json.dumps(res, cls=AlchemyEncoder) + +# return response + + + + +>>>>>>> get endpoints """ Route expects requests of format: @@ -251,3 +346,28 @@ def ticket_get(ticket_id): """ +<<<<<<< HEAD +======= + +@ticket_bp.route("/", methods=["PUT"]) +@require_appkey +def ticket_update(ticket_id): + + update_dict = request.form["update_dict"] + + # remove ticketId and ticketEventId if present + update_dict.pop(ticket_controller.primary_key, None) + update_dict.pop(TicketEvents.non_prim_identifying_column_name, None) + + filters = request.form["filters"] + filters.extend({"ticket_id": ticket_id}) + + updated_object = ticket_controller._modify_latest_object( + update_dict, filters=filters + ) + + res = alchemyConverter(updated_object) + response = json.dumps(res, cls=AlchemyEncoder) + + return response +>>>>>>> get endpoints diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index b4fdfcb..1652147 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -7,7 +7,10 @@ from sqlalchemy.orm import sessionmaker import sys from datetime import datetime +<<<<<<< HEAD +======= +>>>>>>> get endpoints sys.path.insert(0, "..") # import parent folder from models.models import TicketStatus @@ -142,6 +145,7 @@ def _create_base_event(self, args_dict): def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # get up to 'number_of_res' last event objects +<<<<<<< HEAD # latest_objs = ( # self.session.query(self.model) # .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) @@ -172,6 +176,20 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) \ .limit(number_of_res).all() +======= + latest_objs = ( + self.session.query(self.model) + .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) + .group_by(self.model.non_prim_identifying_column_name) + .order_by(self.model.timestamp) + .limit(number_of_res).all() + ) + + # print(*convert_dict_to_alchemy_filters(self.model, filters)) + # latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ + # .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ + # .limit(number_of_res).all() +>>>>>>> get endpoints # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() @@ -199,6 +217,7 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # return latest_objs +<<<<<<< HEAD def _get_latest_event_objects_from_start_date( self, datetime1, filters, number_of_res=5 ): @@ -209,10 +228,23 @@ def _get_latest_event_objects_from_start_date( def _get_latest_event_objects_in_range( self, datetime1, datetime2, filters={}, number_of_res=5 ): +======= + def _get_latest_event_objects_from_start_date(self, datetime1, filters={}, number_of_res=5): + return self._get_latest_event_objects_in_range(datetime1, datetime.now(), filters=filters, number_of_res=5) + + + def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, number_of_res=5): + print("\n\n\nDATETIM1", datetime1, datetime2) + +>>>>>>> get endpoints assert datetime1 <= datetime2 time1 = int(time.mktime(datetime1.timetuple())) time2 = int(time.mktime(datetime2.timetuple())) +<<<<<<< HEAD +======= + +>>>>>>> get endpoints session_filters = convert_dict_to_alchemy_filters(self.model, filters) session_filters.append(self.model.timestamp >= time1) @@ -223,6 +255,7 @@ def _get_latest_event_objects_in_range( ) results = ( self.session.query(self.model) +<<<<<<< HEAD .distinct(self.model.non_prim_identifying_column_name) .filter(*session_filters) .order_by(self.model.non_prim_identifying_column_name, self.model.timestamp) @@ -232,6 +265,14 @@ def _get_latest_event_objects_in_range( print("----------complete-----------------") for result in results: print("TID " + str(result.ticketId)) +======= + .filter(*session_filters) + .limit(number_of_res) + .all() + ) + print("results" , results) + +>>>>>>> get endpoints return results def _find_latest_prim_key_from_non_prim_identifying_column_val( diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index 7202d00..9d3442f 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -14,6 +14,7 @@ def default(self, obj): return json.JSONEncoder.default(self, obj) +<<<<<<< HEAD # DFS function used to convert alchemy objects to JSON <<<<<<< HEAD @@ -86,19 +87,58 @@ def single_convert(obj, res={}, visited=set({})): res[field].append({}) single_convert(obj, res[field][i], visited=visited) +======= +>>>>>>> get endpoints +# DFS function used to convert alchemy objects to JSON +def alchemyConvertUtil(object, res={}, visited=set({})): + visited.add(str(object.__class__)) + for field in [ + x + for x in dir(object) + if not x.startswith("_") + and x not in set({"metadata", "non_prim_identifying_column_name", "registry"}) + ]: + + cls_name = str(object.__getattribute__(field).__class__) + + if "models.models." in cls_name: + if cls_name in visited: + continue else: - res[field] = getattr(obj, field) + visited.add(cls_name) + + res[field] = {} + alchemyConvertUtil(getattr(object, field), res[field], visited=visited) + visited.remove(cls_name) + elif "InstrumentedList" in cls_name: + res[field] = [] + + for i, obj in enumerate(getattr(object, field)): - return res + res[field].append({}) + alchemyConvertUtil(obj, res[field][i], visited=visited) + + else: + res[field] = getattr(object, field) + + return res - if type(object) == list: - res = [single_convert(obj) for obj in object] +def alchemyConverter(obj): + print("obj", obj) + if type(obj) == list: + res = [] + for ele in obj: + res.append(alchemyConvertUtil(ele)) return res else: +<<<<<<< HEAD return single_convert(object) >>>>>>> fix schema +======= + return alchemyConvertUtil(obj) +>>>>>>> get endpoints # converts fiters as a dictionary to alchemy interpretable results From f47628126eb733f8cf50c73edfec5a3a015af6e7 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Wed, 6 Jul 2022 21:17:03 -0400 Subject: [PATCH 21/40] ALL tickets API done --- .../tenant/blueprints/event_driven/ticket.py | 55 ++++++++++++++----- servers/tenant/controllers/baseController.py | 30 ++++++++++ servers/tenant/models/__init__.py | 4 ++ servers/tenant/utils.py | 11 +++- 4 files changed, 82 insertions(+), 18 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 611c623..b1ce71d 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -148,6 +148,7 @@ def ticket_edit(ticket_id): # create ticket # # curl http://127.0.0.1:6767/api/ticket?key=a # # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z +<<<<<<< HEAD def get_clean_filters_dict(immutable_args): sql_filters = dict(immutable_args) if "start" in sql_filters: @@ -210,22 +211,34 @@ def ticket_get_all(): filters = request.args or {} sql_filters = dict(filters) +======= + +def get_clean_filters_dict(immutable_args): + sql_filters = dict(immutable_args) +>>>>>>> ALL tickets API done if "start" in sql_filters: del sql_filters["start"] if "end" in sql_filters: del sql_filters["end"] if "limit" in sql_filters: del sql_filters["limit"] + return sql_filters + +def validate_date_format(date_text): + try: + return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") + except ValueError: + raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") +@ticket_bp.route("/", methods=["GET"]) +# @require_appkey +def ticket_get_all(): + filters = request.args or {} + sql_filters = get_clean_filters_dict(filters) if "limit" not in filters: limit = 5 else: limit = filters["limit"] - def validate_date_format(date_text): - try: - return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") - except ValueError: - raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") if "start" in filters: dt_start_str = filters["start"] @@ -242,6 +255,7 @@ def validate_date_format(date_text): ) else: data = ticket_controller._get_latest_event_objects(sql_filters, number_of_res=limit) +<<<<<<< HEAD >>>>>>> get endpoints res = alchemyConverter(data) @@ -250,6 +264,17 @@ def validate_date_format(date_text): ticket["ticketStatus"]["currentStatus"] = ticket["ticketStatus"]["currentStatus"].value return make_response(json.dumps(res, cls=AlchemyEncoder)) +======= + + res = alchemyConverter(data) + + print("\n\n\n\nRES POST AC ----------------------") + print(res) + response = json.dumps(res) + + print("\n\n\n\nRESULT RESPONSE ------------------" ) + print(response) +>>>>>>> ALL tickets API done <<<<<<< HEAD @@ -275,18 +300,18 @@ def ticket_get(ticket_id): # @require_appkey def ticket_get(ticket_id): filters = request.args.get("filters") or {} - - number_of_res = request.args.get("number_of_res") - - filters["ticketId"] = ticket_id - - - latest_ticket = ticket_controller._get_latest_event_objects( - number_of_res=number_of_res, filters=filters + + + sql_filters = get_clean_filters_dict(filters) + sql_filters["ticketId"] = ticket_id + dt_start = validate_date_format("1900-01-01T00:00:00") + dt_end = validate_date_format("2100-01-01T00:00:00") + data = ticket_controller._get_latest_event_objects_in_range( + dt_start, dt_end, filters=sql_filters ) - res = alchemyConverter(latest_ticket[0]) - response = json.dumps(res, cls=AlchemyEncoder) + res = alchemyConverter(data[0]) + response = json.dumps(res) return response >>>>>>> get endpoints diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 1652147..c257ce6 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -146,11 +146,15 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # get up to 'number_of_res' last event objects <<<<<<< HEAD +<<<<<<< HEAD +======= +>>>>>>> ALL tickets API done # latest_objs = ( # self.session.query(self.model) # .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) # .group_by(self.model.non_prim_identifying_column_name) # .order_by(self.model.timestamp) +<<<<<<< HEAD <<<<<<< HEAD # .limit(number_of_res) # .all() @@ -190,6 +194,16 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ # .limit(number_of_res).all() >>>>>>> get endpoints +======= + # .limit(number_of_res).all() + # ) + + print(*convert_dict_to_alchemy_filters(self.model, filters)) + latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ + .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ + .order_by(self.model.timestamp) \ + .limit(1).all() +>>>>>>> ALL tickets API done # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() @@ -234,9 +248,12 @@ def _get_latest_event_objects_from_start_date(self, datetime1, filters={}, numb def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, number_of_res=5): +<<<<<<< HEAD print("\n\n\nDATETIM1", datetime1, datetime2) >>>>>>> get endpoints +======= +>>>>>>> ALL tickets API done assert datetime1 <= datetime2 time1 = int(time.mktime(datetime1.timetuple())) time2 = int(time.mktime(datetime2.timetuple())) @@ -249,6 +266,7 @@ def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, n session_filters.append(self.model.timestamp >= time1) session_filters.append(self.model.timestamp <= time2) +<<<<<<< HEAD print( "------------------------RUNNING TICKET GET QUERY----------------------------" @@ -273,6 +291,18 @@ def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, n print("results" , results) >>>>>>> get endpoints +======= + + print("------------------------RUNNING TICKET GET QUERY----------------------------") + results = \ + self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ + .filter(*session_filters) \ + .order_by(self.model.non_prim_identifying_column_name, self.model.timestamp) \ + .limit(number_of_res).all() + print("----------complete-----------------") + for result in results: + print("TID " + str(result.ticketId)) +>>>>>>> ALL tickets API done return results def _find_latest_prim_key_from_non_prim_identifying_column_val( diff --git a/servers/tenant/models/__init__.py b/servers/tenant/models/__init__.py index 38a9741..d1dc5c5 100644 --- a/servers/tenant/models/__init__.py +++ b/servers/tenant/models/__init__.py @@ -15,10 +15,14 @@ Base = declarative_base() <<<<<<< HEAD +<<<<<<< HEAD engine = create_engine(cnx_string, echo=False) ======= engine = create_engine(cnx_string) >>>>>>> push +======= +engine = create_engine(cnx_string, echo=True) +>>>>>>> ALL tickets API done print("connecting to db....") Session = sessionmaker(bind=engine) session = Session() diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index 9d3442f..494fa5f 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -91,7 +91,7 @@ def single_convert(obj, res={}, visited=set({})): >>>>>>> get endpoints # DFS function used to convert alchemy objects to JSON -def alchemyConvertUtil(object, res={}, visited=set({})): +def alchemyConvertUtil(object, res, visited): visited.add(str(object.__class__)) for field in [ x @@ -125,13 +125,15 @@ def alchemyConvertUtil(object, res={}, visited=set({})): return res def alchemyConverter(obj): - print("obj", obj) if type(obj) == list: res = [] for ele in obj: - res.append(alchemyConvertUtil(ele)) + print("ALCHEMY DEBUG ---------------------------") + print("TID: " + str(ele.ticketId)) + res.append(alchemyConvertUtil(ele, {}, visited=set())) return res else: +<<<<<<< HEAD <<<<<<< HEAD return single_convert(object) @@ -139,6 +141,9 @@ def alchemyConverter(obj): ======= return alchemyConvertUtil(obj) >>>>>>> get endpoints +======= + return alchemyConvertUtil(obj, {}, visited=set()) +>>>>>>> ALL tickets API done # converts fiters as a dictionary to alchemy interpretable results From 57d16c440316ff7adf6660c79886f701579239ab Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Wed, 6 Jul 2022 23:42:36 -0400 Subject: [PATCH 22/40] Fixing default date bug --- .../tenant/blueprints/event_driven/ticket.py | 24 ++++++++++++++++--- servers/tenant/controllers/baseController.py | 4 ++++ servers/tenant/utils.py | 3 ++- 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index b1ce71d..2cc5e93 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -3,6 +3,7 @@ <<<<<<< HEAD from datetime import datetime from wsgiref import validate +<<<<<<< HEAD from numpy import number from flask import make_response, request, jsonify, Blueprint @@ -11,6 +12,8 @@ ======= from datetime import datetime >>>>>>> get endpoints +======= +>>>>>>> Fixing default date bug from numpy import number from flask import request, jsonify, Blueprint @@ -230,11 +233,20 @@ def validate_date_format(date_text): except ValueError: raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") +def default_start(): + dt_start = validate_date_format("1900-01-01T00:00:00") + return dt_start + +def default_end(): + dt_end = validate_date_format("2100-01-01T00:00:00") + return dt_end + @ticket_bp.route("/", methods=["GET"]) # @require_appkey def ticket_get_all(): filters = request.args or {} sql_filters = get_clean_filters_dict(filters) +<<<<<<< HEAD if "limit" not in filters: limit = 5 else: @@ -265,6 +277,14 @@ def ticket_get_all(): return make_response(json.dumps(res, cls=AlchemyEncoder)) ======= +======= + limit = 5 if "limit" not in filters else filters["limit"] + + dt_start = validate_date_format(filters["start"]) if "start" in filters else default_start() + dt_end = validate_date_format(filters["end"]) if "end" in filters else default_end() + + data = ticket_controller._get_latest_event_objects_in_range(dt_start, dt_end, sql_filters, number_of_res=limit) +>>>>>>> Fixing default date bug res = alchemyConverter(data) @@ -304,10 +324,8 @@ def ticket_get(ticket_id): sql_filters = get_clean_filters_dict(filters) sql_filters["ticketId"] = ticket_id - dt_start = validate_date_format("1900-01-01T00:00:00") - dt_end = validate_date_format("2100-01-01T00:00:00") data = ticket_controller._get_latest_event_objects_in_range( - dt_start, dt_end, filters=sql_filters + default_start(), default_end(), filters=sql_filters ) res = alchemyConverter(data[0]) diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index c257ce6..33a4272 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -231,6 +231,7 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # return latest_objs +<<<<<<< HEAD <<<<<<< HEAD def _get_latest_event_objects_from_start_date( self, datetime1, filters, number_of_res=5 @@ -244,6 +245,9 @@ def _get_latest_event_objects_in_range( ): ======= def _get_latest_event_objects_from_start_date(self, datetime1, filters={}, number_of_res=5): +======= + def _get_latest_event_objects_from_start_date(self, datetime1, filters, number_of_res=5): +>>>>>>> Fixing default date bug return self._get_latest_event_objects_in_range(datetime1, datetime.now(), filters=filters, number_of_res=5) diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index 494fa5f..4f14e22 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -130,7 +130,8 @@ def alchemyConverter(obj): for ele in obj: print("ALCHEMY DEBUG ---------------------------") print("TID: " + str(ele.ticketId)) - res.append(alchemyConvertUtil(ele, {}, visited=set())) + json_res = alchemyConvertUtil(ele, {}, visited=set()) + res.append(json_res) return res else: <<<<<<< HEAD From c1e40db673541ed3f9873635b33abbdf9a638200 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 16:08:52 -0400 Subject: [PATCH 23/40] Cors header --- servers/tenant/blueprints/event_driven/ticket.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 2cc5e93..8493a72 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -192,6 +192,7 @@ def default_end(): dt_end = validate_date_format("2100-01-01T00:00:00") return dt_end +<<<<<<< HEAD @ticket_bp.route("/", methods=["GET"]) @auth_required() @@ -216,6 +217,8 @@ def ticket_get_all(): ======= +======= +>>>>>>> Cors header def get_clean_filters_dict(immutable_args): sql_filters = dict(immutable_args) >>>>>>> ALL tickets API done @@ -246,6 +249,7 @@ def default_end(): def ticket_get_all(): filters = request.args or {} sql_filters = get_clean_filters_dict(filters) +<<<<<<< HEAD <<<<<<< HEAD if "limit" not in filters: limit = 5 @@ -279,6 +283,9 @@ def ticket_get_all(): ======= ======= limit = 5 if "limit" not in filters else filters["limit"] +======= + limit = 5000 if "limit" not in filters else filters["limit"] +>>>>>>> Cors header dt_start = validate_date_format(filters["start"]) if "start" in filters else default_start() dt_end = validate_date_format(filters["end"]) if "end" in filters else default_end() From 3bc32a357b855e812714f3034853dd1c4f32abf6 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 16:17:00 -0400 Subject: [PATCH 24/40] Cors header --- servers/tenant/blueprints/event_driven/ticket.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 8493a72..c04a6f7 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -152,6 +152,14 @@ def ticket_edit(ticket_id): # create ticket # # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z <<<<<<< HEAD +<<<<<<< HEAD +======= +def corsify(resp): + resp.headers['Access-Control-Allow-Origin'] = '*' + resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] + return json.dumps(resp) + +>>>>>>> Cors header def get_clean_filters_dict(immutable_args): sql_filters = dict(immutable_args) if "start" in sql_filters: @@ -295,6 +303,7 @@ def ticket_get_all(): res = alchemyConverter(data) +<<<<<<< HEAD print("\n\n\n\nRES POST AC ----------------------") print(res) response = json.dumps(res) @@ -303,6 +312,9 @@ def ticket_get_all(): print(response) >>>>>>> ALL tickets API done +======= + return corsify(res) +>>>>>>> Cors header <<<<<<< HEAD def get_single(ticket_id): @@ -336,10 +348,14 @@ def ticket_get(ticket_id): ) res = alchemyConverter(data[0]) +<<<<<<< HEAD response = json.dumps(res) return response >>>>>>> get endpoints +======= + return corsify(res) +>>>>>>> Cors header """ Route expects requests of format: From f312dccb60246f3383ff15a55246a42f3a3dd6da Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 16:20:28 -0400 Subject: [PATCH 25/40] Cors header --- servers/tenant/blueprints/event_driven/ticket.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index c04a6f7..5104923 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -16,8 +16,12 @@ >>>>>>> Fixing default date bug from numpy import number +<<<<<<< HEAD from flask import request, jsonify, Blueprint >>>>>>> modifying db schema +======= +from flask import make_response, request, jsonify, Blueprint +>>>>>>> Cors header import sys @@ -155,9 +159,10 @@ def ticket_edit(ticket_id): # create ticket <<<<<<< HEAD ======= def corsify(resp): + resp = make_response(json.dumps(resp)) resp.headers['Access-Control-Allow-Origin'] = '*' resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] - return json.dumps(resp) + return resp >>>>>>> Cors header def get_clean_filters_dict(immutable_args): From dc4d771fb28b8a3c042c20232ce3b88886cca08d Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 17:27:04 -0400 Subject: [PATCH 26/40] message --- servers/tenant/blueprints/event_driven/ticket.py | 1 + 1 file changed, 1 insertion(+) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 5104923..9c2cc7b 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -162,6 +162,7 @@ def corsify(resp): resp = make_response(json.dumps(resp)) resp.headers['Access-Control-Allow-Origin'] = '*' resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] + trd return resp >>>>>>> Cors header From fde205ed5a271894874944de40e9b76b77839929 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 18:41:19 -0400 Subject: [PATCH 27/40] Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz --- .../tenant/blueprints/event_driven/shipper.py | 22 ++++++++++ .../tenant/blueprints/event_driven/ticket.py | 15 +++++++ servers/tenant/blueprints/simple/pdf.py | 43 ------------------- .../tenant/controllers/controllerMapper.py | 9 +++- servers/tenant/database/table_creation.sql | 9 ++++ servers/tenant/models/models.py | 32 ++++++++++++++ servers/tenant/requirements.txt | 1 + servers/tenant/server.py | 5 +++ 8 files changed, 92 insertions(+), 44 deletions(-) create mode 100644 servers/tenant/blueprints/event_driven/shipper.py delete mode 100644 servers/tenant/blueprints/simple/pdf.py diff --git a/servers/tenant/blueprints/event_driven/shipper.py b/servers/tenant/blueprints/event_driven/shipper.py new file mode 100644 index 0000000..4137098 --- /dev/null +++ b/servers/tenant/blueprints/event_driven/shipper.py @@ -0,0 +1,22 @@ +import json +import datetime +from flask import request, jsonify, Blueprint + +import sys + +sys.path.insert(0, "..") # import parent folder + +from controllers.controllerMapper import PdfController +from models.models import TicketEvents +from utils import ( + AlchemyEncoder, + require_appkey, + alchemyConverter, +) + +shipper_bp = Blueprint("shipper_bp", __name__, url_prefix="shipper") + + +# TODO: USER BASED AUTH + +ticket_controller = TicketController() \ No newline at end of file diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 9c2cc7b..bc0b9c5 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -7,6 +7,7 @@ from numpy import number from flask import make_response, request, jsonify, Blueprint +<<<<<<< HEAD ======= import datetime ======= @@ -22,6 +23,9 @@ ======= from flask import make_response, request, jsonify, Blueprint >>>>>>> Cors header +======= +from flask_cors import cross_origin +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz import sys @@ -106,7 +110,12 @@ def ticket_get_all_with_status(status): # create ticket @ticket_bp.route("/", methods=["POST"]) +<<<<<<< HEAD @auth_required() +======= +@cross_origin(supports_credentials=True) +@require_appkey +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz def ticket_post(): # create ticket print("Creating ticket from the following JSON:") print(request.data) @@ -259,6 +268,7 @@ def default_end(): return dt_end @ticket_bp.route("/", methods=["GET"]) +@cross_origin(supports_credentials=True) # @require_appkey def ticket_get_all(): filters = request.args or {} @@ -335,6 +345,7 @@ def get_single(ticket_id): return data[0] if isinstance(data, list) else data @ticket_bp.route("/", methods=["GET"]) +<<<<<<< HEAD @auth_required() def ticket_get(ticket_id): data = get_single(ticket_id) @@ -342,6 +353,9 @@ def ticket_get(ticket_id): return make_response(json.dumps(res, cls=AlchemyEncoder)) ======= @ticket_bp.route("/", methods=["GET"]) +======= +@cross_origin(supports_credentials=True) +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz # @require_appkey def ticket_get(ticket_id): filters = request.args.get("filters") or {} @@ -422,6 +436,7 @@ def ticket_get(ticket_id): ======= @ticket_bp.route("/", methods=["PUT"]) +@cross_origin(supports_credentials=True) @require_appkey def ticket_update(ticket_id): diff --git a/servers/tenant/blueprints/simple/pdf.py b/servers/tenant/blueprints/simple/pdf.py deleted file mode 100644 index 84361fa..0000000 --- a/servers/tenant/blueprints/simple/pdf.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -from flask import request, jsonify, Blueprint -import io -from uuid import uuid4 -import traceback -from celery_client import client, logger -from controllers.pdfController import PDFController - -pdf_bp = Blueprint("pdf_bp", __name__, url_prefix="document") - -pdfcontroller = PDFController() - - -@pdf_bp.route("", methods=["POST"]) -def pdf_post(): - if "file" not in request.files: - res = jsonify({"message": "No file part in the request"}) - res.status_code = 400 - return res - - file = request.files["file"] - - if file.filename == "": - res = jsonify({"message": "No file selected for uploading"}) - res.status_code = 400 - return res - if file and file.filename.split(".")[-1].lower() == "pdf": - pdfcontroller.process_files() - resp = jsonify({"message": "File successfully uploaded"}) - resp.status_code = 202 - return resp - else: - resp = jsonify({"message": "Allowed file types are pdf only"}) - resp.status_code = 400 - return resp - - -@pdf_bp.route("{pdf_id}", methods=["GET"]) -def pdf_get(): - res = jsonify({"message": "Please specify PDFId"}) - # TODO ... - res.status_code = 400 - return res diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index 4ce5a1f..8601a03 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -157,6 +157,7 @@ class DeliveryMilestonesController(MilestoneController): def __init__(self): super().__init__(DeliveryMilestones) +<<<<<<< HEAD def convert_to_desc(self, milestones): string_milestones = [] for milestone in milestones: @@ -228,4 +229,10 @@ def _create_base_event(self, args_dict): "approvedByUserId": args_dict["userId"], } ) - return obj \ No newline at end of file + return obj +======= + +class UserController(DocumentController): + def __init__(self): + super().__init__(Documents) +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index 7d186bd..2e38aa9 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -37,6 +37,9 @@ CREATE TABLE IF NOT EXISTS Users ( ); <<<<<<< HEAD +<<<<<<< HEAD +======= +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz CREATE TABLE IF NOT EXISTS Documents ( "documentId" INT, "timestamp" INT, @@ -62,14 +65,18 @@ CREATE TABLE IF NOT EXISTS Documents ( "consigneePhoneNumber" VARCHAR(256), "pieces" VARCHAR(256), PRIMARY KEY("documentId") +<<<<<<< HEAD ======= >>>>>>> push +======= +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz ); CREATE TABLE IF NOT EXISTS TicketEvents ( "ticketEventId" INT, "ticketId" INT, "timestamp" INT, +<<<<<<< HEAD "userId" INT, "customerId" INT, "barcodeNumber" INT, @@ -134,6 +141,8 @@ CREATE TABLE IF NOT EXISTS TicketEvents ( "timestamp" INT, "shipperEventId" INT, "consigneeEventId" INT, +======= +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz "userId" INT, "customerId" INT, "barcodeNumber" INT, diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index c7b493f..00fb580 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -104,7 +104,36 @@ class Users(Base): def __repr__(self): return f"< Users:: userId: {self.userId}>" +class Documents(Base): + __tablename__ = "documents" + documentId = Column(Integer, nullable=False) + timestamp = Column(Integer, default=int(time.time())) + userId = Column(Integer, ForeignKey(Users.userId), nullable=False) + customerId = Column(Integer, ForeignKey(Customers.customerId), nullable=False) + barcodeNumber = Column(Integer, nullable=False) + houseReferenceNumber = Column(Integer, nullable=False) + orderS3Link = Column(String, nullable=False) + weight = Column(Integer, nullable=False) + claimedNumberOfPieces = Column(Integer, nullable=False) + BOLNumber = Column(Integer, nullable=False) + specialServices = Column(String) + specialInstructions = Column(String) + # shipper + shipperCompany = Column(String, nullable=False) + shipperName = Column(String, nullable=False) + shipperAddress = Column(String, nullable=False) + shipperPostalCode = Column(String, nullable=False) + shipperPhoneNumber = Column(String, nullable=False) + # consignee + consigneeCompany = Column(String, nullable=False) + consigneeName = Column(String, nullable=False) + consigneeAddress = Column(String, nullable=False) + consigneePostalCode = Column(String, nullable=False) + consigneePhoneNumber = Column(String, nullable=False) + # pieces + pieces = Column(String, nullable=False) +<<<<<<< HEAD <<<<<<< HEAD class Documents(Base): __tablename__ = "documents" @@ -148,6 +177,9 @@ class TicketStatus(Base): ======= >>>>>>> modifying db schema +======= + +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz class TicketEvents(Base): __tablename__ = "ticketevents" non_prim_identifying_column_name = "ticketId" diff --git a/servers/tenant/requirements.txt b/servers/tenant/requirements.txt index a36e9c6..a51c422 100644 --- a/servers/tenant/requirements.txt +++ b/servers/tenant/requirements.txt @@ -22,6 +22,7 @@ MarkupSafe==2.0.1 multilingual-pdf2text==1.1.0 numpy==1.22.0 ocrmypdf==13.2.0 +Flask-Cors packaging==21.3 <<<<<<< HEAD Flask-Cors diff --git a/servers/tenant/server.py b/servers/tenant/server.py index c4564e9..5ca329a 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -5,6 +5,7 @@ from blueprints.simple.customers import customer_bp from blueprints.simple.users import user_bp <<<<<<< HEAD +<<<<<<< HEAD from blueprints.simple.milestones import milestone_bp from blueprints.simple.driver import driver_bp @@ -13,6 +14,10 @@ ======= from servers.tenant.blueprints.simple.pdf import pdf_bp # TODO: Move this in seperate microservice >>>>>>> modifying db schema +======= +from flask_cors import cross_origin +from servers.tenant.blueprints.simple.document import pdf_bp # TODO: Move this in seperate microservice +>>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz # from models.__init__ import engine, Base # from models.models import INDEXES From 410b6634025525b41485b42699c9b7f186ce3db0 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 7 Jul 2022 19:04:55 -0400 Subject: [PATCH 28/40] Fix commit bugs for mergmerge --- .../tenant/blueprints/event_driven/pieces.py | 6 +++++ .../tenant/blueprints/event_driven/shipper.py | 22 ------------------- .../tenant/blueprints/event_driven/ticket.py | 1 - .../tenant/controllers/controllerMapper.py | 2 +- servers/tenant/models/models.py | 10 ++++++--- 5 files changed, 14 insertions(+), 27 deletions(-) delete mode 100644 servers/tenant/blueprints/event_driven/shipper.py diff --git a/servers/tenant/blueprints/event_driven/pieces.py b/servers/tenant/blueprints/event_driven/pieces.py index 32bd159..e92bcc2 100644 --- a/servers/tenant/blueprints/event_driven/pieces.py +++ b/servers/tenant/blueprints/event_driven/pieces.py @@ -1,6 +1,7 @@ import json import datetime from flask import request, jsonify, Blueprint +from flask_cors import cross_origin import sys @@ -35,7 +36,12 @@ @pieces_bp.route("/{piece_id}", methods=["GET"]) +<<<<<<< HEAD @auth_required() +======= +@cross_origin(supports_credentials=True) +@require_appkey +>>>>>>> Fix commit bugs for mergmerge def pieces_get_history(piece_id): filters = request.args.get("filters") filters.extend({"piece_id": piece_id}) diff --git a/servers/tenant/blueprints/event_driven/shipper.py b/servers/tenant/blueprints/event_driven/shipper.py deleted file mode 100644 index 4137098..0000000 --- a/servers/tenant/blueprints/event_driven/shipper.py +++ /dev/null @@ -1,22 +0,0 @@ -import json -import datetime -from flask import request, jsonify, Blueprint - -import sys - -sys.path.insert(0, "..") # import parent folder - -from controllers.controllerMapper import PdfController -from models.models import TicketEvents -from utils import ( - AlchemyEncoder, - require_appkey, - alchemyConverter, -) - -shipper_bp = Blueprint("shipper_bp", __name__, url_prefix="shipper") - - -# TODO: USER BASED AUTH - -ticket_controller = TicketController() \ No newline at end of file diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index bc0b9c5..e89f3ce 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -171,7 +171,6 @@ def corsify(resp): resp = make_response(json.dumps(resp)) resp.headers['Access-Control-Allow-Origin'] = '*' resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] - trd return resp >>>>>>> Cors header diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index 8601a03..d000ba8 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -232,7 +232,7 @@ def _create_base_event(self, args_dict): return obj ======= -class UserController(DocumentController): +class DocumentController(BaseController): def __init__(self): super().__init__(Documents) >>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index 00fb580..828241c 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -106,10 +106,8 @@ def __repr__(self): class Documents(Base): __tablename__ = "documents" - documentId = Column(Integer, nullable=False) + documentId = Column(Integer, primary_key=True, nullable=False) timestamp = Column(Integer, default=int(time.time())) - userId = Column(Integer, ForeignKey(Users.userId), nullable=False) - customerId = Column(Integer, ForeignKey(Customers.customerId), nullable=False) barcodeNumber = Column(Integer, nullable=False) houseReferenceNumber = Column(Integer, nullable=False) orderS3Link = Column(String, nullable=False) @@ -132,7 +130,11 @@ class Documents(Base): consigneePhoneNumber = Column(String, nullable=False) # pieces pieces = Column(String, nullable=False) + customerName = Column(String, nullable=False) + + +<<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD class Documents(Base): @@ -180,6 +182,8 @@ class TicketStatus(Base): ======= >>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz +======= +>>>>>>> Fix commit bugs for mergmerge class TicketEvents(Base): __tablename__ = "ticketevents" non_prim_identifying_column_name = "ticketId" From 6ed5543a3a177ca0d1e097164cb51064484efd10 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Fri, 8 Jul 2022 04:30:04 -0400 Subject: [PATCH 29/40] Fixed celery pipeline --- extraction/app.py | 37 --- extraction/const.py | 45 --- extraction/extract.py | 351 --------------------- extraction/ocr.py | 22 -- extraction/requirements.txt | 51 --- servers/tenant/Pipfile | 22 -- servers/tenant/Pipfile.lock | 501 ------------------------------ servers/tenant/app.Dockerfile | 8 - servers/tenant/celery.Dockerfile | 8 - servers/tenant/celery_client.py | 48 +++ servers/tenant/docker-compose.yml | 52 ---- servers/tenant/kill-cluster.sh | 1 - servers/tenant/models/__init__.py | 2 +- servers/tenant/server.py | 23 +- servers/tenant/start-cluster.sh | 2 - 15 files changed, 71 insertions(+), 1102 deletions(-) delete mode 100644 extraction/app.py delete mode 100644 extraction/const.py delete mode 100644 extraction/extract.py delete mode 100644 extraction/ocr.py delete mode 100644 extraction/requirements.txt delete mode 100644 servers/tenant/Pipfile delete mode 100644 servers/tenant/Pipfile.lock delete mode 100644 servers/tenant/app.Dockerfile delete mode 100644 servers/tenant/celery.Dockerfile delete mode 100644 servers/tenant/docker-compose.yml delete mode 100644 servers/tenant/kill-cluster.sh delete mode 100755 servers/tenant/start-cluster.sh diff --git a/extraction/app.py b/extraction/app.py deleted file mode 100644 index 4501c6b..0000000 --- a/extraction/app.py +++ /dev/null @@ -1,37 +0,0 @@ -import os -# from multilingual_pdf2text.pdf2text import PDF2Text -# from multilingual_pdf2text.models.document_model.document import Document -# import pdfplumber -# import extraction.extract as e -import json - - -def read_pdfplumber(file_name): - with pdfplumber.open(file_name) as pdf: - page = pdf.pages[0] - page = page.extract_text() - return page - - -def work(folder_path): - pdf_uuid = folder_path.split("/")[-1] - pdf_file = f"{folder_path}/{pdf_uuid}.pdf" - print(f"Working on {pdf_file}...") - pdf_document = Document( - document_path=pdf_file, - language='eng' - ) - pdf2text = PDF2Text(document=pdf_document) - content = pdf2text.extract() - - ml_page_text = list(content)[0]["text"] - pp_text = read_pdfplumber(pdf_file) - - extract_json = e.extract(ml_page_text, plumber_page=pp_text) - - with open(f"{folder_path}/{pdf_uuid}.json", "w") as f: - json.dump(extract_json, f, indent=2) - return extract_json - -if __name__ == '__main__': - work("uploads/bf0c396f-dcc6-4d3f-8d7c-9180d2f0a322/cedc5b27-2a94-4e17-ac48-65c13e065102") \ No newline at end of file diff --git a/extraction/const.py b/extraction/const.py deleted file mode 100644 index d477c86..0000000 --- a/extraction/const.py +++ /dev/null @@ -1,45 +0,0 @@ -CEVA = "CEVA" -NORTH_AMERICAN = "NORTH_AMERICAN" - -CEVA_NUM = "1-888-327-8247" - - -#doclist_keys - -HOUSE_REF = "house_ref" -BARCODE = "barcode" -FIRST_PARTY = "first_party" -NUM_PCS = "num_pcs" -PCS = "pcs" -WEIGHT = "weight" - -PKG = "pkg" -WT_LBS = "wt(lbs)" -COMMODITY_DESCRIPTION = "commodity_description" -DIMS_IN = "dims(in)" - -BOL_NUM = "bol_num" -SPECIAL_SERVICES = "special_services" -SPECIAL_INSTRUCTIONS = "special_instructions" - -COMPANY = "company" -NAME = "name" -ADDRESS = "address" -POSTAL_CODE = "postal_code" -PHONE_NUMBER = "phone_number" - -CONSIGNEE = "consignee" -SHIPPER = "shipper" - -CEVA_SHIPPER_FIELDS = [COMPANY, ADDRESS] -CEVA_CONSIGNEE_FIELDS = [NAME, ADDRESS] - -NORTH_AMERICAN_SHIPPER_FIELDS = [COMPANY, NAME, COMPANY, ADDRESS] -NORTH_AMERICAN_CONSIGNEE_FIELDS = [COMPANY, NAME, COMPANY, ADDRESS] -BARCODE_REGEX = "([A-Z][A-Z]\d{3}-\d{7})" -PCS_REGEX = "(\d+) +PCS" -LBS_REGEX = "(\d+) +[Ll]bs" -POSTAL_CODE_REGEX_BOTH = "[ABCEGHJ-NPRSTVXY][\dO][ABCEGHJ-NPRSTV-Z][ -]?[\dO][ABCEGHJ-NPRSTV-Z][\dO]$" -PHONE_NUMBER_REGEX = "((\+\d{1,2}\s)?\(?(905|807|705|647|613|519|416|343|289|226)\)?[\s.-]?\d{3}[\s.-]?\d{4})" - -PHONE_COLON_REGEX = "ne: (\d{10})" \ No newline at end of file diff --git a/extraction/extract.py b/extraction/extract.py deleted file mode 100644 index 491ffbd..0000000 --- a/extraction/extract.py +++ /dev/null @@ -1,351 +0,0 @@ -import os -import ocrmypdf -import time -import re -from extraction.const import * -import pdfplumber - -def ocr(file_path, save_path): - ocrmypdf.ocr(file_path, save_path) - -def read_pdf(file_name, page_num): - file_path = os.path.join("../text", file_name.split(".")[0], f"{page_num}.txt") - with open(file_path, "r") as f: - return f.read() - -def read_pdfplumber(file_name, page_num): - with pdfplumber.open("../data/NORTH_AMERICAN.pdf") as pdf: - page = pdf.pages[page_num-1] - page = page.extract_text() - return page - - -""" -CEVA - 1st party - Consignee info - Name - Addr - Postal Code - Phone Number - House/ref - Barcode - Lbs - # PCs - Shipper - Special Instructions -""" -def extract_ceva(page): - lines = page.splitlines() - ceva_list = {FIRST_PARTY: CEVA} - # barcode - matches = re.findall(BARCODE_REGEX, page) - if matches: - insert_in_dict(ceva_list, BARCODE, matches[0]) - # NUM PCS - matches = re.findall(PCS_REGEX, page) - if matches: - insert_in_dict(ceva_list, NUM_PCS, matches[0]) - # weight - matches = re.findall(LBS_REGEX, page) - if matches: - insert_in_dict(ceva_list, WEIGHT, f"{matches[0]} lbs") - # phone number - matches = re.findall(PHONE_NUMBER_REGEX, page) - consignee_phone_number = matches[0][0] if matches else "" - - for line_num, line in enumerate(lines): - # house ref # - if "house" in line.lower() or "ref #" in line.lower(): - insert_in_dict(ceva_list, HOUSE_REF, line.split(" ")[-1]) - # shipper - if line.lower().startswith('shipper') or line.lower().endswith('expéditeur'): - shipper = extract_info_ceva(lines, line_num) - insert_in_dict(ceva_list, SHIPPER, shipper) - - #consignee - if is_consignee(line): - consignee = extract_info_ceva(lines, line_num, is_shipper=False) - insert_in_dict(consignee, PHONE_NUMBER, consignee_phone_number) - insert_in_dict(ceva_list, CONSIGNEE, consignee) - if "instructions" in line.lower(): - special_instructions = extract_special(lines, line_num, ["reference"]) - insert_in_dict(ceva_list, SPECIAL_INSTRUCTIONS, special_instructions) - - return ceva_list - - -def is_consignee(line): - return line.lower().startswith('consignee') or line.lower().endswith('consignataire') - - -def extract_info_ceva(lines, starting_num, is_shipper=True): - - field_index = 0 - curr_field_entry = "" - shipper_dict = {} - FIELDS = CEVA_SHIPPER_FIELDS if is_shipper else CEVA_CONSIGNEE_FIELDS - for index in range(starting_num+1, len(lines)): - if not lines[index]: - continue - # name or company - if field_index == 0: - if starts_with_number(lines[index]): - field_index += 1 - shipper_dict[FIELDS[field_index-1]] = curr_field_entry.rstrip() - curr_field_entry = "" - else: - curr_field_entry += lines[index] + " " - - if FIELDS[field_index] == ADDRESS: - curr_field_entry += lines[index] + " " - if is_consignee(lines[index]) or re.findall(POSTAL_CODE_REGEX_BOTH, lines[index]): - shipper_dict[ADDRESS] = curr_field_entry.rstrip() - break - - for field in FIELDS: - if field not in shipper_dict: - shipper_dict[field] = "" - - postal_code = extract_postal_code(shipper_dict[ADDRESS]) - insert_in_dict(shipper_dict, POSTAL_CODE, postal_code) - - return shipper_dict - - -def starts_with_number(line): - return line.split(" ")[0].isnumeric() - - -def extract_special(lines, starting_num, keywords): - entry = "" - outer_break = False - for index in range(starting_num+1, len(lines)): - if not lines[index]: - continue - for keyword in keywords: - if keyword in lines[index].lower(): - outer_break = True - break - if outer_break: - break - entry += lines[index] + " " - - return entry.rstrip() - - -""" - -North American - 1st party - BOL # - Consignee information - # PCS - DIMS - Special Services - -""" - -def extract_north_american(page, page_2): - lines = page.splitlines() - north_american_list = {FIRST_PARTY: NORTH_AMERICAN} - - # phone number - matches = re.findall(PHONE_COLON_REGEX, page) - shipper_phone_number = matches[0] if matches else "" - consignee_phone_number = matches[1] if len(matches) > 1 else "" - - for line_num, line in enumerate(lines): - # ref # - if "ref#" in line.lower(): - ref_num = line.split(" ")[-1] - if "ref" not in ref_num.lower(): - insert_in_dict(north_american_list, HOUSE_REF, line.split(":")[-1].strip()) - # BOL # - if "bol" in line.lower(): - bol_num = line.split(" ")[-1] - if "bol" not in bol_num.lower(): - insert_in_dict(north_american_list, BOL_NUM, line.split(" ")[-1]) - # shipper - if "shipper" in line.lower(): - shipper = extract_info_north_american(lines, line_num) - insert_in_dict(shipper, PHONE_NUMBER, shipper_phone_number) - insert_in_dict(north_american_list, SHIPPER, shipper) - # consignee - if "consignee" in line.lower(): - consignee = extract_info_north_american(lines, line_num, is_shipper=False) - insert_in_dict(consignee, PHONE_NUMBER, consignee_phone_number) - insert_in_dict(north_american_list, CONSIGNEE, consignee) - #special services - if "services" in line.lower(): - special_services = extract_special(lines, line_num, ["question", "issue", "905-277-2000"]) - insert_in_dict(north_american_list, SPECIAL_SERVICES, special_services) - - lines = page_2.splitlines() - for line_num, line in enumerate(lines): - if "pkg" in line.lower() or "wt(lbs)" in line.lower(): - pcs = extract_pcs(lines, line_num) - insert_in_dict(north_american_list, PCS, pcs) - - - return north_american_list - - -def extract_pcs(lines, starting_num): - pcs = [] - num_pcs = 0 - weight = 0 - for index in range(starting_num+1, len(lines)): - if len(lines[index]) < 13: - _num_pcs, _weight = [float(x) for x in lines[index].split(" ")] - assert _num_pcs == num_pcs and _weight == weight - break - second_space = lines[index].find(" ", lines[index].find(" ") + 1) - dim_nums = [re.findall("\d+\.\d+", x)[0] for x in lines[index].split(" ")[-3:]] - pkg, wt = lines[index].split(" ")[:2] - num_pcs += 1 - weight += float(wt) - commodity_description = lines[index][second_space:].split(dim_nums[0])[0].lstrip().rstrip() - dims = ' x '.join(dim_nums) - pcs.append({PKG: pkg, WT_LBS: wt, COMMODITY_DESCRIPTION: commodity_description, DIMS_IN: dims}) - - return pcs - -def extract_info_north_american(lines, starting_num, is_shipper=True): - field_index = 0 - curr_field_entry = "" - shipper_dict = {} - FIELDS = NORTH_AMERICAN_SHIPPER_FIELDS if is_shipper else NORTH_AMERICAN_CONSIGNEE_FIELDS - company = False - company_1 = "" - name = "" - company_2 = "" - address = "" - for index in range(starting_num+1, len(lines)): - if not lines[index]: - continue - if field_index == 0: - if "contact" in lines[index].lower(): - company = True - name = lines[index].split(": ")[-1] - company_1 = curr_field_entry.rstrip() - curr_field_entry = "" - field_index += 2 - continue - curr_field_entry += lines[index] + " " - if company: - if starts_with_number(lines[index]): - company = False - field_index += 1 - company_2 = curr_field_entry.rstrip() - curr_field_entry = "" - else: - curr_field_entry += lines[index] + " " - - - if FIELDS[field_index] == ADDRESS: - curr_field_entry += lines[index] + " " - if is_consignee(lines[index]) or re.findall(POSTAL_CODE_REGEX_BOTH, lines[index]): - address = curr_field_entry.rstrip() - break - - if is_shipper: - shipper_dict[COMPANY] = company_2 - shipper_dict[ADDRESS] = address - else: - shipper_dict[COMPANY] = company_1 - shipper_dict[NAME] = name - shipper_dict[ADDRESS] = (company_2 + ", " if company_2 else "") + address - - for field in FIELDS: - if field not in shipper_dict: - shipper_dict[field] = "" - - postal_code = extract_postal_code(shipper_dict[ADDRESS]) - insert_in_dict(shipper_dict, POSTAL_CODE, postal_code) - return shipper_dict - -def generate_doclist(_list): - return { - FIRST_PARTY: _list[FIRST_PARTY] if FIRST_PARTY in _list else "", - HOUSE_REF: _list[HOUSE_REF] if HOUSE_REF in _list else "", - BARCODE: _list[BARCODE] if BARCODE in _list else "", - PCS: _list[PCS] if PCS in _list else [], - NUM_PCS: _list[NUM_PCS] if NUM_PCS in _list else "", - WEIGHT: _list[WEIGHT] if WEIGHT in _list else "", - BOL_NUM: _list[BOL_NUM] if BOL_NUM in _list else "", - SPECIAL_SERVICES: _list[SPECIAL_SERVICES] if SPECIAL_SERVICES in _list else "", - SPECIAL_INSTRUCTIONS: _list[SPECIAL_INSTRUCTIONS] if SPECIAL_INSTRUCTIONS in _list else "", - CONSIGNEE: { - COMPANY: _list[CONSIGNEE][COMPANY] if CONSIGNEE in _list and COMPANY in _list[CONSIGNEE] else "", - NAME: _list[CONSIGNEE][NAME] if CONSIGNEE in _list and NAME in _list[CONSIGNEE] else "", - ADDRESS: _list[CONSIGNEE][ADDRESS] if CONSIGNEE in _list and ADDRESS in _list[CONSIGNEE] else "", - POSTAL_CODE: _list[CONSIGNEE][POSTAL_CODE] if CONSIGNEE in _list and POSTAL_CODE in _list[CONSIGNEE] else "", - PHONE_NUMBER: _list[CONSIGNEE][PHONE_NUMBER] if CONSIGNEE in _list and PHONE_NUMBER in _list[CONSIGNEE] else "" - }, - SHIPPER: { - COMPANY: _list[SHIPPER][COMPANY] if SHIPPER in _list and COMPANY in _list[SHIPPER] else "", - NAME: _list[SHIPPER][NAME] if SHIPPER in _list and NAME in _list[SHIPPER] else "", - ADDRESS: _list[SHIPPER][ADDRESS] if SHIPPER in _list and ADDRESS in _list[SHIPPER] else "", - POSTAL_CODE: _list[SHIPPER][POSTAL_CODE] if SHIPPER in _list and POSTAL_CODE in _list[SHIPPER] else "", - PHONE_NUMBER: _list[SHIPPER][PHONE_NUMBER] if SHIPPER in _list and PHONE_NUMBER in _list[SHIPPER] else "" - } - } - - -def extract(page, plumber_page=None): - second_party = predict_second_party(page) - - if second_party == CEVA: - return extract_ceva(page) - elif second_party == NORTH_AMERICAN: - return extract_north_american(page, plumber_page) - - return {} - -def predict_second_party(page): - - if CEVA.lower() in page.lower() or CEVA_NUM in page: - return CEVA - - return NORTH_AMERICAN - - -def insert_in_dict(_dict, key, value): - if not key in _dict: - _dict[key] = value - - -def extract_postal_code(address): - matches = re.findall(f"({POSTAL_CODE_REGEX_BOTH})", address) - if not matches: - return "" - postal_code = matches[0] - - # correct Os to 0s - for i in [-3, -1, 1]: - if postal_code[i] == "O": - postal_code = list(postal_code) - postal_code[i] = "0" - postal_code = ''.join(postal_code) - return postal_code - - -if __name__ == "__main__": - start = time.time() - ceva = read_pdf("CEVA-ocr.pdf", 1) - ceva_list = extract(ceva) - ceva_doclist = generate_doclist(ceva_list) - print(ceva_doclist) - - print() - - north_american_1 = read_pdf("NORTH_AMERICAN.pdf", 1) - north_american_2 = read_pdfplumber("NORTH_AMERICAN.pdf", 1) - north_american_list = extract(north_american_1, plumber_page=north_american_2) - north_american_doclist = generate_doclist(north_american_list) - print(north_american_doclist) - - print(time.time()-start) - - diff --git a/extraction/ocr.py b/extraction/ocr.py deleted file mode 100644 index 5ea34c1..0000000 --- a/extraction/ocr.py +++ /dev/null @@ -1,22 +0,0 @@ - - -from multilingual_pdf2text.pdf2text import PDF2Text -from multilingual_pdf2text.models.document_model.document import Document -import logging -import time - -if __name__ == "__main__": - stat = time.time() - pdf_document = Document( - document_path="../data/NORTH_AMERICAN.pdf", - language='eng' - ) - pdf2text = PDF2Text(document=pdf_document) - content = pdf2text.extract() - print(time.time()-stat) - - for page in content: - with open(f"text/NORTH_AMERICAN/{page['page_number']}.txt", "w") as f: - f.write(page["text"]) - - diff --git a/extraction/requirements.txt b/extraction/requirements.txt deleted file mode 100644 index 520194d..0000000 --- a/extraction/requirements.txt +++ /dev/null @@ -1,51 +0,0 @@ -amqp==5.0.9 -billiard==3.6.4.0 -celery==5.2.3 -cffi==1.15.0 -chardet==4.0.0 -click==8.0.3 -click-didyoumean==0.3.0 -click-plugins==1.1.1 -click-repl==0.2.0 -coloredlogs==15.0.1 -cryptography==36.0.1 -Deprecated==1.2.13 -Flask==2.0.2 -humanfriendly==10.0 -img2pdf==0.4.3 -importlib-resources==5.4.0 -itsdangerous==2.0.1 -Jinja2==3.0.3 -kombu==5.2.3 -lxml==4.7.1 -MarkupSafe==2.0.1 -multilingual-pdf2text==1.1.0 -numpy==1.22.0 -ocrmypdf==13.2.0 -opencv-python==4.5.5.62 -packaging==21.3 -pdf2image==1.16.0 -pdfminer.six==20211012 -pdfplumber==0.6.0 -pdftotext==2.2.2 -pikepdf==4.3.1 -Pillow==9.0.0 -pluggy==1.0.0 -prompt-toolkit==3.0.24 -pycparser==2.21 -pydantic==1.9.0 -pyparsing==3.0.6 -PyPDF2==1.26.0 -pytesseract==0.3.8 -pytz==2021.3 -redis==4.1.2 -reportlab==3.6.5 -six==1.16.0 -tqdm==4.62.3 -typing-extensions==4.0.1 -vine==5.0.0 -Wand==0.6.7 -wcwidth==0.2.5 -Werkzeug==2.0.2 -wrapt==1.13.3 -zipp==3.7.0 diff --git a/servers/tenant/Pipfile b/servers/tenant/Pipfile deleted file mode 100644 index db9acab..0000000 --- a/servers/tenant/Pipfile +++ /dev/null @@ -1,22 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[packages] -psycopg2-binary = "*" -celery = "==5.2.3" -flask-restplus = "==0.13.0" -flask-marshmallow = "==0.14.0" -marshmallow-sqlalchemy = "==0.24.1" -marshmallow = "==3.9.1" -python-dotenv = "==0.20.0" -SQLAlchemy = "*" -Flask = "==2.0.2" -Faker = "==13.7.0" -Flask-SQLAlchemy = "==2.4.4" - -[dev-packages] - -[requires] -python_version = "3.8" diff --git a/servers/tenant/Pipfile.lock b/servers/tenant/Pipfile.lock deleted file mode 100644 index a24e0f8..0000000 --- a/servers/tenant/Pipfile.lock +++ /dev/null @@ -1,501 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "6dff868c3d4497e3f717b753c5792b94a2f7cc606ce84194f3bb2bfd0fe58121" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.8" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "amqp": { - "hashes": [ - "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2", - "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359" - ], - "markers": "python_version >= '3.6'", - "version": "==5.1.1" - }, - "aniso8601": { - "hashes": [ - "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f", - "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973" - ], - "version": "==9.0.1" - }, - "attrs": { - "hashes": [ - "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", - "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==21.4.0" - }, - "billiard": { - "hashes": [ - "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547", - "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b" - ], - "version": "==3.6.4.0" - }, - "celery": { - "hashes": [ - "sha256:8aacd02fc23a02760686d63dde1eb0daa9f594e735e73ea8fb15c2ff15cb608c", - "sha256:e2cd41667ad97d4f6a2f4672d1c6a6ebada194c619253058b5f23704aaadaa82" - ], - "index": "pypi", - "version": "==5.2.3" - }, - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.3" - }, - "click-didyoumean": { - "hashes": [ - "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667", - "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035" - ], - "markers": "python_full_version >= '3.6.2' and python_full_version < '4.0.0'", - "version": "==0.3.0" - }, - "click-plugins": { - "hashes": [ - "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b", - "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8" - ], - "version": "==1.1.1" - }, - "click-repl": { - "hashes": [ - "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b", - "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8" - ], - "version": "==0.2.0" - }, - "faker": { - "hashes": [ - "sha256:0301ace8365d98f3d0bf6e9a40200c8548e845d3812402ae1daf589effe3fb01", - "sha256:b1903db92175d78051858128ada397c7dc76f376f6967975419da232b3ebd429" - ], - "index": "pypi", - "version": "==13.7.0" - }, - "flask": { - "hashes": [ - "sha256:7b2fb8e934ddd50731893bdcdb00fc8c0315916f9fcd50d22c7cc1a95ab634e2", - "sha256:cb90f62f1d8e4dc4621f52106613488b5ba826b2e1e10a33eac92f723093ab6a" - ], - "index": "pypi", - "version": "==2.0.2" - }, - "flask-marshmallow": { - "hashes": [ - "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754", - "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950" - ], - "index": "pypi", - "version": "==0.14.0" - }, - "flask-restplus": { - "hashes": [ - "sha256:a15d251923a8feb09a5d805c2f4d188555910a42c64d58f7dd281b8cac095f1b", - "sha256:a66e442d0bca08f389fc3d07b4d808fc89961285d12fb8013f7cf15516fa9f5c" - ], - "index": "pypi", - "version": "==0.13.0" - }, - "flask-sqlalchemy": { - "hashes": [ - "sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e", - "sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5" - ], - "index": "pypi", - "version": "==2.4.4" - }, - "greenlet": { - "hashes": [ - "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3", - "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711", - "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd", - "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073", - "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708", - "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67", - "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23", - "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1", - "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08", - "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd", - "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2", - "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa", - "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8", - "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40", - "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab", - "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6", - "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc", - "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b", - "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e", - "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963", - "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3", - "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d", - "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d", - "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe", - "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28", - "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3", - "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e", - "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c", - "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d", - "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0", - "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497", - "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee", - "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713", - "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58", - "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a", - "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06", - "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88", - "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965", - "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f", - "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4", - "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5", - "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c", - "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a", - "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1", - "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43", - "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627", - "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b", - "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168", - "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d", - "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5", - "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478", - "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf", - "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce", - "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c", - "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b" - ], - "markers": "python_version >= '3' and (platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32'))))))", - "version": "==1.1.2" - }, - "importlib-resources": { - "hashes": [ - "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3", - "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8" - ], - "markers": "python_version < '3.9'", - "version": "==5.7.1" - }, - "itsdangerous": { - "hashes": [ - "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44", - "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.1.2" - }, - "jinja2": { - "hashes": [ - "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", - "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" - ], - "markers": "python_version >= '3.7'", - "version": "==3.1.2" - }, - "jsonschema": { - "hashes": [ - "sha256:71b5e39324422543546572954ce71c67728922c104902cb7ce252e522235b33f", - "sha256:7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc" - ], - "markers": "python_version >= '3.7'", - "version": "==4.5.1" - }, - "kombu": { - "hashes": [ - "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610", - "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4" - ], - "markers": "python_version >= '3.7'", - "version": "==5.2.4" - }, - "markupsafe": { - "hashes": [ - "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003", - "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88", - "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5", - "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7", - "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a", - "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603", - "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1", - "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135", - "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247", - "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6", - "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601", - "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77", - "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02", - "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e", - "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63", - "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f", - "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980", - "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b", - "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812", - "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff", - "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96", - "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1", - "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925", - "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a", - "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6", - "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e", - "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f", - "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4", - "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f", - "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3", - "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c", - "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a", - "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417", - "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a", - "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a", - "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37", - "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452", - "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933", - "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a", - "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7" - ], - "markers": "python_version >= '3.7'", - "version": "==2.1.1" - }, - "marshmallow": { - "hashes": [ - "sha256:73facc37462dfc0b27f571bdaffbef7709e19f7a616beb3802ea425b07843f4e", - "sha256:e26763201474b588d144dae9a32bdd945cd26a06c943bc746a6882e850475378" - ], - "index": "pypi", - "version": "==3.9.1" - }, - "marshmallow-sqlalchemy": { - "hashes": [ - "sha256:93f47b880ac7070f7b34c8ac0a71eeec3f8582a22e5c0330c1c436e3f5f99a37", - "sha256:d051cf013c075c43e1ee5c4b01f8fab6dd6b140dab6825be45875f674a0d289c" - ], - "index": "pypi", - "version": "==0.24.1" - }, - "prompt-toolkit": { - "hashes": [ - "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752", - "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7" - ], - "markers": "python_full_version >= '3.6.2'", - "version": "==3.0.29" - }, - "psycopg2-binary": { - "hashes": [ - "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7", - "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76", - "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa", - "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9", - "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004", - "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1", - "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094", - "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57", - "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af", - "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554", - "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232", - "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c", - "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b", - "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834", - "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2", - "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71", - "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460", - "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e", - "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4", - "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d", - "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d", - "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9", - "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f", - "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063", - "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478", - "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092", - "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c", - "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce", - "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1", - "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65", - "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e", - "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4", - "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029", - "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33", - "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39", - "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53", - "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307", - "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42", - "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35", - "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8", - "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb", - "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae", - "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e", - "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f", - "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba", - "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24", - "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca", - "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb", - "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef", - "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42", - "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1", - "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667", - "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272", - "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281", - "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e", - "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd" - ], - "index": "pypi", - "version": "==2.9.3" - }, - "pyrsistent": { - "hashes": [ - "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c", - "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc", - "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e", - "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26", - "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec", - "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286", - "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045", - "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec", - "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8", - "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c", - "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca", - "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22", - "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a", - "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96", - "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc", - "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1", - "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07", - "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6", - "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b", - "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5", - "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6" - ], - "markers": "python_version >= '3.7'", - "version": "==0.18.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==2.8.2" - }, - "python-dotenv": { - "hashes": [ - "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f", - "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938" - ], - "index": "pypi", - "version": "==0.20.0" - }, - "pytz": { - "hashes": [ - "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7", - "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c" - ], - "version": "==2022.1" - }, - "setuptools": { - "hashes": [ - "sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373", - "sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e" - ], - "markers": "python_version >= '3.6'", - "version": "==59.6.0" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==1.16.0" - }, - "sqlalchemy": { - "hashes": [ - "sha256:09c606d8238feae2f360b8742ffbe67741937eb0a05b57f536948d198a3def96", - "sha256:166a3887ec355f7d2f12738f7fa25dc8ac541867147a255f790f2f41f614cb44", - "sha256:16abf35af37a3d5af92725fc9ec507dd9e9183d261c2069b6606d60981ed1c6e", - "sha256:2e885548da361aa3f8a9433db4cfb335b2107e533bf314359ae3952821d84b3e", - "sha256:2ec89bf98cc6a0f5d1e28e3ad28e9be6f3b4bdbd521a4053c7ae8d5e1289a8a1", - "sha256:2ecac4db8c1aa4a269f5829df7e706639a24b780d2ac46b3e485cbbd27ec0028", - "sha256:316c7e5304dda3e3ad711569ac5d02698bbc71299b168ac56a7076b86259f7ea", - "sha256:5041474dcab7973baa91ec1f3112049a9dd4652898d6a95a6a895ff5c58beb6b", - "sha256:53d2d9ee93970c969bc4e3c78b1277d7129554642f6ffea039c282c7dc4577bc", - "sha256:5864a83bd345871ad9699ce466388f836db7572003d67d9392a71998092210e3", - "sha256:5c90ef955d429966d84326d772eb34333178737ebb669845f1d529eb00c75e72", - "sha256:5d50cb71c1dbed70646d521a0975fb0f92b7c3f84c61fa59e07be23a1aaeecfc", - "sha256:64678ac321d64a45901ef2e24725ec5e783f1f4a588305e196431447e7ace243", - "sha256:64d796e9af522162f7f2bf7a3c5531a0a550764c426782797bbeed809d0646c5", - "sha256:6cb4c4f57a20710cea277edf720d249d514e587f796b75785ad2c25e1c0fed26", - "sha256:6e1fe00ee85c768807f2a139b83469c1e52a9ffd58a6eb51aa7aeb524325ab18", - "sha256:6e859fa96605027bd50d8e966db1c4e1b03e7b3267abbc4b89ae658c99393c58", - "sha256:7a052bd9f53004f8993c624c452dfad8ec600f572dd0ed0445fbe64b22f5570e", - "sha256:81e53bd383c2c33de9d578bfcc243f559bd3801a0e57f2bcc9a943c790662e0c", - "sha256:83cf3077712be9f65c9aaa0b5bc47bc1a44789fd45053e2e3ecd59ff17c63fe9", - "sha256:8b20c4178ead9bc398be479428568ff31b6c296eb22e75776273781a6551973f", - "sha256:8d07fe2de0325d06e7e73281e9a9b5e259fbd7cbfbe398a0433cbb0082ad8fa7", - "sha256:a0ae3aa2e86a4613f2d4c49eb7da23da536e6ce80b2bfd60bbb2f55fc02b0b32", - "sha256:af2587ae11400157753115612d6c6ad255143efba791406ad8a0cbcccf2edcb3", - "sha256:b3db741beaa983d4cbf9087558620e7787106319f7e63a066990a70657dd6b35", - "sha256:be094460930087e50fd08297db9d7aadaed8408ad896baf758e9190c335632da", - "sha256:cb441ca461bf97d00877b607f132772644b623518b39ced54da433215adce691", - "sha256:ce20f5da141f8af26c123ebaa1b7771835ca6c161225ce728962a79054f528c3", - "sha256:d57ac32f8dc731fddeb6f5d1358b4ca5456e72594e664769f0a9163f13df2a31", - "sha256:dce3468bf1fc12374a1a732c9efd146ce034f91bb0482b602a9311cb6166a920", - "sha256:e12532c4d3f614678623da5d852f038ace1f01869b89f003ed6fe8c793f0c6a3", - "sha256:e74ce103b81c375c3853b436297952ef8d7863d801dcffb6728d01544e5191b5", - "sha256:f0394a3acfb8925db178f7728adb38c027ed7e303665b225906bfa8099dc1ce8", - "sha256:f522214f6749bc073262529c056f7dfd660f3b5ec4180c5354d985eb7219801e", - "sha256:fbf8c09fe9728168f8cc1b40c239eab10baf9c422c18be7f53213d70434dea43", - "sha256:fca8322e04b2dde722fcb0558682740eebd3bd239bea7a0d0febbc190e99dc15" - ], - "index": "pypi", - "version": "==1.4.36" - }, - "vine": { - "hashes": [ - "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30", - "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e" - ], - "markers": "python_version >= '3.6'", - "version": "==5.0.0" - }, - "wcwidth": { - "hashes": [ - "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", - "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" - ], - "version": "==0.2.5" - }, - "werkzeug": { - "hashes": [ - "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6", - "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255" - ], - "markers": "python_version >= '3.7'", - "version": "==2.1.2" - }, - "zipp": { - "hashes": [ - "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad", - "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099" - ], - "markers": "python_version < '3.10'", - "version": "==3.8.0" - } - }, - "develop": {} -} diff --git a/servers/tenant/app.Dockerfile b/servers/tenant/app.Dockerfile deleted file mode 100644 index 0427bd6..0000000 --- a/servers/tenant/app.Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM python:3.9 -EXPOSE 5000 -RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install -y build-essential libpoppler-cpp-dev pkg-config python-dev -COPY requirements.txt . -RUN pip3 install --upgrade pip -RUN pip3 install -r requirements.txt -WORKDIR /opt/metadata-extraction -ENV PYTHONPATH . \ No newline at end of file diff --git a/servers/tenant/celery.Dockerfile b/servers/tenant/celery.Dockerfile deleted file mode 100644 index d394f79..0000000 --- a/servers/tenant/celery.Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM python:3.9 -RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install -y build-essential libpoppler-cpp-dev pkg-config python-dev -RUN apt -y install tesseract-ocr && apt -y install libtesseract-dev -COPY requirements.txt . -RUN pip3 install --upgrade pip -RUN pip3 install -r requirements.txt -WORKDIR /opt/metadata-extraction/server -ENV PYTHONPATH .. \ No newline at end of file diff --git a/servers/tenant/celery_client.py b/servers/tenant/celery_client.py index 7b7bedc..2c90f45 100644 --- a/servers/tenant/celery_client.py +++ b/servers/tenant/celery_client.py @@ -1,6 +1,54 @@ from celery import Celery from celery.utils.log import get_logger +import os +import io +from uuid import uuid4 +import traceback +# import tenant.controllers.DocumentController as document_controller +import PyPDF2 +import extraction.app as ex +from celery import group CELERY_BROKER_URL = 'redis://redis:6379/0' client = Celery(__name__, broker=CELERY_BROKER_URL) logger = get_logger(__name__) +FAILURE = -1 +SUCCESS = 0 +UPLOAD_FOLDER = "/opt/metadata-extraction/uploads" + + +def fan_out(file): + folder_uuid = uuid4() + with io.BytesIO(file.read()) as open_pdf_file: + read_pdf = PyPDF2.PdfFileReader(open_pdf_file) + num_pages = read_pdf.getNumPages() + folder = f"{UPLOAD_FOLDER}/{folder_uuid}" + os.mkdir(folder) + for i in range(num_pages): + output_pdf = PyPDF2.PdfFileWriter() + output_pdf.addPage(read_pdf.getPage(i)) + file_uuid = uuid4() + f_dir = f"{folder}/{file_uuid}" + os.mkdir(f_dir) + with open(f"{f_dir}/{file_uuid}.pdf", "wb") as f: + output_pdf.write(f) + file.close() + pdf_folders = os.listdir(folder) + return group([work.s(f"{folder}/{pdf_folder}") for pdf_folder in pdf_folders]) + + +def do_all_work(tasks_to_run): + result = tasks_to_run.apply_async() + return result + + +@client.task +def work(pdf_folder): + pdf_file = f"{pdf_folder}.pdf" + try: + doclist = ex.work(pdf_folder) + except Exception as e: + logger.info(f"file {pdf_folder}/{pdf_file} error. msg: {str(e)}") + logger.info(traceback.format_exc()) + return {"status": FAILURE, "folder": pdf_folder} + return {"status": SUCCESS, "folder": pdf_folder, "doclist": doclist} \ No newline at end of file diff --git a/servers/tenant/docker-compose.yml b/servers/tenant/docker-compose.yml deleted file mode 100644 index 4823ffb..0000000 --- a/servers/tenant/docker-compose.yml +++ /dev/null @@ -1,52 +0,0 @@ -version: '3' -services: - host-injector: - container_name: host_c - volumes: - - '/var/run/docker.sock:/tmp/docker.sock' - - '/etc/hosts:/tmp/hosts' - image: dvdarias/docker-hoster - redis: - hostname: redis.wlp.com - image: redis:latest - container_name: rd01 - ports: - - "6379:6379" - volumes: - - "redis:/data" - worker: - hostname: celery.wlp.com - depends_on: - - "redis" - build: - context: . - dockerfile: celery.Dockerfile - volumes: - - .:/opt/metadata-extraction - command: celery -A __init__.client worker --loglevel=info -f celery.logs -Ofair -c 2 - tty: true - app: - hostname: app.wlp.com - depends_on: - - "redis" - build: - context: . - dockerfile: app.Dockerfile - volumes: - - .:/opt/metadata-extraction - container_name: app01 - ports: - - "5000:5000" - command: python3 server/__init__.py - tty: true - flower: - hostname: flower.wlp.com - image: mher/flower - container_name: flower01 - environment: - - CELERY_BROKER_URL=redis://redis:6379/0 - - FLOWER_PORT=8888 - ports: - - "8888:8888" -volumes: - redis: \ No newline at end of file diff --git a/servers/tenant/kill-cluster.sh b/servers/tenant/kill-cluster.sh deleted file mode 100644 index 7e6a281..0000000 --- a/servers/tenant/kill-cluster.sh +++ /dev/null @@ -1 +0,0 @@ -docker-compose kill \ No newline at end of file diff --git a/servers/tenant/models/__init__.py b/servers/tenant/models/__init__.py index d1dc5c5..86cc19e 100644 --- a/servers/tenant/models/__init__.py +++ b/servers/tenant/models/__init__.py @@ -4,7 +4,7 @@ from sqlalchemy.orm import sessionmaker db_port = os.getenv("DB_PORT", "5432") -db_name = os.getenv("DB_NAME", "tenant_database") +db_name = os.getenv("DB_NAME", "tenant_db") db_username = os.getenv("DB_USERNAME", "postgres") db_password = os.getenv("DB_PASSWORD", "password") db_url = os.getenv("DB_URL", "ship-solver.ccxmktobiszx.ca-central-1.rds.amazonaws.com") diff --git a/servers/tenant/server.py b/servers/tenant/server.py index 5ca329a..b08bbeb 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -1,6 +1,10 @@ +<<<<<<< HEAD import os from flask import Flask, Blueprint, jsonify, session # from config import app +======= +from tenant.config import app +>>>>>>> Fixed celery pipeline from blueprints.event_driven.ticket import ticket_bp from blueprints.simple.customers import customer_bp from blueprints.simple.users import user_bp @@ -16,11 +20,16 @@ >>>>>>> modifying db schema ======= from flask_cors import cross_origin +<<<<<<< HEAD from servers.tenant.blueprints.simple.document import pdf_bp # TODO: Move this in seperate microservice >>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz # from models.__init__ import engine, Base # from models.models import INDEXES +======= +from tenant.blueprints.simple.document import document_bp # TODO: Move this in seperate microservice +from flask import Blueprint +>>>>>>> Fixed celery pipeline from dotenv import load_dotenv load_dotenv(".env", override=True) @@ -36,6 +45,10 @@ cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) parent = Blueprint("api", __name__, url_prefix="/api") +<<<<<<< HEAD +======= +parent.register_blueprint(document_bp) +>>>>>>> Fixed celery pipeline parent.register_blueprint(ticket_bp) parent.register_blueprint(customer_bp) parent.register_blueprint(user_bp) @@ -43,10 +56,18 @@ parent.register_blueprint(driver_bp) +<<<<<<< HEAD +======= + +@app.route("/") +def hello_world(): + return "Server Started!" + +>>>>>>> Fixed celery pipeline if __name__ == "__main__": print("REGISTERING BLUEPRINT") app.register_blueprint(parent) - app.run(debug=True, host="0.0.0.0", port=6767) + app.run(debug=True, host="0.0.0.0", port=5000) diff --git a/servers/tenant/start-cluster.sh b/servers/tenant/start-cluster.sh deleted file mode 100755 index 46eee91..0000000 --- a/servers/tenant/start-cluster.sh +++ /dev/null @@ -1,2 +0,0 @@ -mkdir uploads -p -docker-compose up -d --build \ No newline at end of file From dcdb007847bae696673e45afab6dfcb2f2b2c1c0 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 21 Jul 2022 05:20:38 -0400 Subject: [PATCH 30/40] document api finished --- servers/app.Dockerfile | 8 + servers/celery.Dockerfile | 8 + servers/docker-compose.yml | 52 ++++ servers/kill-cluster.sh | 1 + servers/start-cluster.sh | 2 + .../tenant/blueprints/event_driven/pieces.py | 56 ---- .../tenant/blueprints/event_driven/ticket.py | 246 +----------------- servers/tenant/blueprints/simple/customers.py | 6 +- servers/tenant/blueprints/simple/document.py | 69 +++++ servers/tenant/blueprints/simple/driver.py | 4 +- .../tenant/blueprints/simple/milestones.py | 6 +- servers/tenant/blueprints/simple/users.py | 14 +- servers/tenant/celery_client.py | 50 +++- .../tenant/controllers/DocumentController.py | 26 ++ servers/tenant/controllers/baseController.py | 106 -------- .../tenant/controllers/controllerMapper.py | 19 +- servers/tenant/database/index_creation.sql | 7 - servers/tenant/database/table_creation.sql | 116 --------- servers/tenant/models/__init__.py | 8 - servers/tenant/models/models.py | 138 ++-------- servers/tenant/requirements.txt | 10 +- servers/tenant/server.py | 39 +-- servers/tenant/test/test.py | 150 ----------- servers/tenant/utils.py | 87 ------- 24 files changed, 253 insertions(+), 975 deletions(-) create mode 100644 servers/app.Dockerfile create mode 100644 servers/celery.Dockerfile create mode 100644 servers/docker-compose.yml create mode 100755 servers/kill-cluster.sh create mode 100755 servers/start-cluster.sh delete mode 100644 servers/tenant/blueprints/event_driven/pieces.py create mode 100644 servers/tenant/blueprints/simple/document.py create mode 100644 servers/tenant/controllers/DocumentController.py diff --git a/servers/app.Dockerfile b/servers/app.Dockerfile new file mode 100644 index 0000000..9497d24 --- /dev/null +++ b/servers/app.Dockerfile @@ -0,0 +1,8 @@ +FROM python:3.9 +EXPOSE 5000 +RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install -y build-essential libpoppler-cpp-dev pkg-config python-dev +COPY requirements.txt . +RUN pip3 install --upgrade pip +RUN pip3 install -r requirements.txt +WORKDIR /opt/metadata-extraction +ENV PYTHONPATH . diff --git a/servers/celery.Dockerfile b/servers/celery.Dockerfile new file mode 100644 index 0000000..e214354 --- /dev/null +++ b/servers/celery.Dockerfile @@ -0,0 +1,8 @@ +FROM python:3.9 +RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install -y build-essential libpoppler-cpp-dev pkg-config python-dev +RUN apt -y install tesseract-ocr && apt -y install libtesseract-dev +COPY requirements.txt . +RUN pip3 install --upgrade pip +RUN pip3 install -r requirements.txt +WORKDIR /opt/metadata-extraction/tenant +ENV PYTHONPATH .. diff --git a/servers/docker-compose.yml b/servers/docker-compose.yml new file mode 100644 index 0000000..1601a98 --- /dev/null +++ b/servers/docker-compose.yml @@ -0,0 +1,52 @@ +version: '3' +services: + host-injector: + container_name: host_c + volumes: + - '/var/run/docker.sock:/tmp/docker.sock' + - '/etc/hosts:/tmp/hosts' + image: dvdarias/docker-hoster + redis: + hostname: redis.wlp.com + image: redis:latest + container_name: rd01 + ports: + - "6379:6379" + volumes: + - "redis:/data" + worker: + hostname: celery.wlp.com + depends_on: + - "redis" + build: + context: . + dockerfile: celery.Dockerfile + volumes: + - .:/opt/metadata-extraction + command: celery -A config.client worker --loglevel=info -f celery.logs -Ofair -c 2 + tty: true + app: + hostname: app.wlp.com + depends_on: + - "redis" + build: + context: . + dockerfile: app.Dockerfile + volumes: + - .:/opt/metadata-extraction + container_name: app01 + ports: + - "6767:6767" + command: python3 tenant/server.py + tty: true + flower: + hostname: flower.wlp.com + image: mher/flower + container_name: flower01 + environment: + - CELERY_BROKER_URL=redis://redis:6379/0 + - FLOWER_PORT=8888 + ports: + - "8888:8888" +volumes: + redis: \ No newline at end of file diff --git a/servers/kill-cluster.sh b/servers/kill-cluster.sh new file mode 100755 index 0000000..7e6a281 --- /dev/null +++ b/servers/kill-cluster.sh @@ -0,0 +1 @@ +docker-compose kill \ No newline at end of file diff --git a/servers/start-cluster.sh b/servers/start-cluster.sh new file mode 100755 index 0000000..46eee91 --- /dev/null +++ b/servers/start-cluster.sh @@ -0,0 +1,2 @@ +mkdir uploads -p +docker-compose up -d --build \ No newline at end of file diff --git a/servers/tenant/blueprints/event_driven/pieces.py b/servers/tenant/blueprints/event_driven/pieces.py deleted file mode 100644 index e92bcc2..0000000 --- a/servers/tenant/blueprints/event_driven/pieces.py +++ /dev/null @@ -1,56 +0,0 @@ -import json -import datetime -from flask import request, jsonify, Blueprint -from flask_cors import cross_origin - -import sys - -sys.path.insert(0, "..") # import parent folder - -from controllers.controllerMapper import PieceController -from models.models import TicketEvents, PieceEvents -from utils import ( - AlchemyEncoder, - alchemyConverter, -) -from flask_cognito_lib.decorators import auth_required - -pieces_bp = Blueprint("pieces_bp", __name__, url_prefix="piece") - -pieces_controller = PieceController() - - -""" -Route expects requests of format: - -{ - "piece_id" : "value", - "filters" : { - "field1": "value1", - "field2": "value2", - .... - } -} - -""" - - -@pieces_bp.route("/{piece_id}", methods=["GET"]) -<<<<<<< HEAD -@auth_required() -======= -@cross_origin(supports_credentials=True) -@require_appkey ->>>>>>> Fix commit bugs for mergmerge -def pieces_get_history(piece_id): - filters = request.args.get("filters") - filters.extend({"piece_id": piece_id}) - - pieces = pieces_controller._get_latest_event_objects( - page=1, number_of_res=20, filters=filters - ) - - res = alchemyConverter(pieces) - response = json.dumps(res, cls=AlchemyEncoder) - - return response diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index e89f3ce..5302e0e 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -1,58 +1,28 @@ import json -<<<<<<< HEAD -<<<<<<< HEAD from datetime import datetime from wsgiref import validate -<<<<<<< HEAD from numpy import number from flask import make_response, request, jsonify, Blueprint -<<<<<<< HEAD -======= -import datetime -======= -from datetime import datetime ->>>>>>> get endpoints -======= ->>>>>>> Fixing default date bug - -from numpy import number -<<<<<<< HEAD -from flask import request, jsonify, Blueprint ->>>>>>> modifying db schema -======= -from flask import make_response, request, jsonify, Blueprint ->>>>>>> Cors header -======= -from flask_cors import cross_origin ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz import sys sys.path.insert(0, "..") # import parent folder -<<<<<<< HEAD from controllers.controllerMapper import TicketController, TicketStatusController -======= -from controllers.controllerMapper import TicketController ->>>>>>> fix schema from models.models import TicketEvents from utils import ( AlchemyEncoder, alchemyConverter, ) -from flask_cognito_lib.decorators import auth_required +# from flask_cognito_lib.decorators import auth_required ticket_bp = Blueprint("ticket_bp", __name__, url_prefix="ticket") ticket_controller = TicketController() -<<<<<<< HEAD ticket_status_controller = TicketStatusController() PIECES_SEPERATOR = ",+-" -======= - ->>>>>>> fix schema """ Route expects requests of format: @@ -86,7 +56,7 @@ @ticket_bp.route("/status/", methods=["GET"]) -@auth_required() +#@auth_required() def ticket_get_all_with_status(status): # create ticket limit = 5000 if "limit" not in request.args else request.args["limit"] @@ -110,12 +80,7 @@ def ticket_get_all_with_status(status): # create ticket @ticket_bp.route("/", methods=["POST"]) -<<<<<<< HEAD -@auth_required() -======= -@cross_origin(supports_credentials=True) -@require_appkey ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz +#@auth_required() def ticket_post(): # create ticket print("Creating ticket from the following JSON:") print(request.data) @@ -128,14 +93,12 @@ def ticket_post(): # create ticket ticket_dict["pieces"] = PIECES_SEPERATOR.join(ticket_dict["pieces"]) ticket_event = ticket_controller._create_base_event(ticket_dict) -<<<<<<< HEAD -<<<<<<< HEAD response = {"ticketId": ticket_event.ticketId} return make_response(json.dumps(response)) # TODO fix primary key issue, ticketeventID needs to be unique for edits @ticket_bp.route("/", methods=["POST"]) -@auth_required() +#@auth_required() def ticket_edit(ticket_id): # create ticket print("Creating ticket from the following JSON:") print(request.data) @@ -146,15 +109,6 @@ def ticket_edit(ticket_id): # create ticket #join pieces into single string ticket_dict["pieces"] = PIECES_SEPERATOR.join(ticket_dict["pieces"]) ticket_event = ticket_controller._create_base_event(ticket_dict) -======= - return {"success"} - - -# http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00&end=2022-04-04T00:00:00&shipperName=Eric%20Shea -# curl http://127.0.0.1:6767/api/ticket/?shipperName -# # curl http://127.0.0.1:6767/api/ticket?key=a -# # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z ->>>>>>> get endpoints response = {"ticketId": ticket_event.ticketId} return make_response(json.dumps(response)) @@ -164,16 +118,6 @@ def ticket_edit(ticket_id): # create ticket # # curl http://127.0.0.1:6767/api/ticket?key=a # # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z -<<<<<<< HEAD -<<<<<<< HEAD -======= -def corsify(resp): - resp = make_response(json.dumps(resp)) - resp.headers['Access-Control-Allow-Origin'] = '*' - resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] - return resp - ->>>>>>> Cors header def get_clean_filters_dict(immutable_args): sql_filters = dict(immutable_args) if "start" in sql_filters: @@ -183,19 +127,6 @@ def get_clean_filters_dict(immutable_args): if "limit" in sql_filters: del sql_filters["limit"] return sql_filters -======= -@ticket_bp.route("/", methods=["GET"]) -# @require_appkey -def ticket_get_all(): - -<<<<<<< HEAD - filters = request.args.get("filters") or {} - limit = request.args.get("limit") or 2 - - data = ticket_controller._get_latest_event_objects(filters, number_of_res=limit) - res = alchemyConverter(data) - response = json.dumps(res, cls=AlchemyEncoder) ->>>>>>> modifying db schema def validate_date_format(date_text): @@ -214,10 +145,9 @@ def default_end(): dt_end = validate_date_format("2100-01-01T00:00:00") return dt_end -<<<<<<< HEAD @ticket_bp.route("/", methods=["GET"]) -@auth_required() +#@auth_required() def ticket_get_all(): filters = request.args or {} sql_filters = get_clean_filters_dict(filters) @@ -233,69 +163,6 @@ def ticket_get_all(): data = ticket_controller._get_latest_event_objects_in_range( dt_start, dt_end, sql_filters, number_of_res=limit ) -======= - filters = request.args or {} - sql_filters = dict(filters) - -======= - -======= ->>>>>>> Cors header -def get_clean_filters_dict(immutable_args): - sql_filters = dict(immutable_args) ->>>>>>> ALL tickets API done - if "start" in sql_filters: - del sql_filters["start"] - if "end" in sql_filters: - del sql_filters["end"] - if "limit" in sql_filters: - del sql_filters["limit"] - return sql_filters - -def validate_date_format(date_text): - try: - return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") - except ValueError: - raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") - -def default_start(): - dt_start = validate_date_format("1900-01-01T00:00:00") - return dt_start - -def default_end(): - dt_end = validate_date_format("2100-01-01T00:00:00") - return dt_end - -@ticket_bp.route("/", methods=["GET"]) -@cross_origin(supports_credentials=True) -# @require_appkey -def ticket_get_all(): - filters = request.args or {} - sql_filters = get_clean_filters_dict(filters) -<<<<<<< HEAD -<<<<<<< HEAD - if "limit" not in filters: - limit = 5 - else: - limit = filters["limit"] - - if "start" in filters: - dt_start_str = filters["start"] - dt_start = validate_date_format(dt_start_str) - if "end" in filters: - dt_end_str= filters["end"] - dt_end = validate_date_format(dt_end_str) - data = ticket_controller._get_latest_event_objects_in_range( - dt_start, dt_end, filters=sql_filters, number_of_res=limit - ) - else: - data = ticket_controller._get_latest_event_objects_from_start_date( - dt_start, filters=sql_filters, number_of_res=limit - ) - else: - data = ticket_controller._get_latest_event_objects(sql_filters, number_of_res=limit) -<<<<<<< HEAD ->>>>>>> get endpoints res = alchemyConverter(data) for ticket in res: @@ -303,35 +170,8 @@ def ticket_get_all(): ticket["ticketStatus"]["currentStatus"] = ticket["ticketStatus"]["currentStatus"].value return make_response(json.dumps(res, cls=AlchemyEncoder)) -======= -======= - limit = 5 if "limit" not in filters else filters["limit"] -======= - limit = 5000 if "limit" not in filters else filters["limit"] ->>>>>>> Cors header - - dt_start = validate_date_format(filters["start"]) if "start" in filters else default_start() - dt_end = validate_date_format(filters["end"]) if "end" in filters else default_end() - - data = ticket_controller._get_latest_event_objects_in_range(dt_start, dt_end, sql_filters, number_of_res=limit) ->>>>>>> Fixing default date bug - - res = alchemyConverter(data) - -<<<<<<< HEAD - print("\n\n\n\nRES POST AC ----------------------") - print(res) - response = json.dumps(res) - - print("\n\n\n\nRESULT RESPONSE ------------------" ) - print(response) ->>>>>>> ALL tickets API done -======= - return corsify(res) ->>>>>>> Cors header -<<<<<<< HEAD def get_single(ticket_id): filters = request.args.get("filters") or {} @@ -344,37 +184,12 @@ def get_single(ticket_id): return data[0] if isinstance(data, list) else data @ticket_bp.route("/", methods=["GET"]) -<<<<<<< HEAD -@auth_required() +#@auth_required() def ticket_get(ticket_id): data = get_single(ticket_id) res = alchemyConverter(data) return make_response(json.dumps(res, cls=AlchemyEncoder)) -======= -@ticket_bp.route("/", methods=["GET"]) -======= -@cross_origin(supports_credentials=True) ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz -# @require_appkey -def ticket_get(ticket_id): - filters = request.args.get("filters") or {} - - - sql_filters = get_clean_filters_dict(filters) - sql_filters["ticketId"] = ticket_id - data = ticket_controller._get_latest_event_objects_in_range( - default_start(), default_end(), filters=sql_filters - ) - res = alchemyConverter(data[0]) -<<<<<<< HEAD - response = json.dumps(res) - - return response ->>>>>>> get endpoints -======= - return corsify(res) ->>>>>>> Cors header """ Route expects requests of format: @@ -390,29 +205,6 @@ def ticket_get(ticket_id): """ -<<<<<<< HEAD -======= - - -# @ticket_bp.route("/attribute/{attribute_name}", methods=["GET"]) -# @require_appkey -# def ticket_attribute_get(attribute_name): - -# filters.extend({"ticket_id": ticket_id}) - -# latest_ticket = ticket_controller._get_latest_event_objects( -# number_of_res=number_of_res, filters=filters -# ) - -# res = alchemyConverter(latest_ticket) -# response = json.dumps(res, cls=AlchemyEncoder) - -# return response - - - - ->>>>>>> get endpoints """ Route expects requests of format: @@ -431,29 +223,3 @@ def ticket_get(ticket_id): """ -<<<<<<< HEAD -======= - -@ticket_bp.route("/", methods=["PUT"]) -@cross_origin(supports_credentials=True) -@require_appkey -def ticket_update(ticket_id): - - update_dict = request.form["update_dict"] - - # remove ticketId and ticketEventId if present - update_dict.pop(ticket_controller.primary_key, None) - update_dict.pop(TicketEvents.non_prim_identifying_column_name, None) - - filters = request.form["filters"] - filters.extend({"ticket_id": ticket_id}) - - updated_object = ticket_controller._modify_latest_object( - update_dict, filters=filters - ) - - res = alchemyConverter(updated_object) - response = json.dumps(res, cls=AlchemyEncoder) - - return response ->>>>>>> get endpoints diff --git a/servers/tenant/blueprints/simple/customers.py b/servers/tenant/blueprints/simple/customers.py index b5108e9..01d92cf 100644 --- a/servers/tenant/blueprints/simple/customers.py +++ b/servers/tenant/blueprints/simple/customers.py @@ -7,7 +7,7 @@ from controllers.controllerMapper import CustomerController from models.models import Users -from flask_cognito_lib.decorators import auth_required +# from flask_cognito_lib.decorators import auth_required from utils import ( AlchemyEncoder, alchemyConverter @@ -20,14 +20,14 @@ @customer_bp.route("/", methods=["POST"]) -@auth_required() +#@auth_required() def customer_post(): # create ticket customer = customer_controller._create(json.loads(request.data)) response = {"customerId": customer.customerId} return make_response(json.dumps(response)) @customer_bp.route("/", methods=["GET"]) -@auth_required() +#@auth_required() def customer_get(): # create ticket limit = 5000 if "limit" not in request.args else request.args["limit"] if "limit" in request.args: diff --git a/servers/tenant/blueprints/simple/document.py b/servers/tenant/blueprints/simple/document.py new file mode 100644 index 0000000..c917420 --- /dev/null +++ b/servers/tenant/blueprints/simple/document.py @@ -0,0 +1,69 @@ +import os +from flask import request, jsonify, Blueprint +import io +from uuid import uuid4 +import traceback + +from celery_client import client, logger, fan_out, do_all_work +from tenant.models.models import DocumentStatus, Documents +from tenant.controllers.DocumentController import DocumentController, DocumentStatusController +import PyPDF2 +import extraction.app as ex +from celery import group +import json +from utils import ( + AlchemyEncoder, + alchemyConverter, +) + +document_bp = Blueprint("document_bp", __name__, url_prefix="document") + +FAILURE = -1 +SUCCESS = 0 +UPLOAD_FOLDER = "/opt/metadata-extraction/uploads" +document_status_controller = DocumentStatusController() +document_controller = DocumentController() +@document_bp.route("/", methods=["POST"]) +def document_post(): + if "file" not in request.files: + res = jsonify({"message": "No file part in the request"}) + res.status_code = 400 + return res + + file = request.files["file"] + + if file.filename == "": + res = jsonify({"message": "No file selected for uploading"}) + res.status_code = 400 + return res + if file and file.filename.split(".")[-1].lower() == "pdf": + document_status = document_status_controller._create({"numPages": 0}) + response = {"documentStatusId": document_status.documentStatusId} + resp = jsonify(response) + resp.status_code = 202 + tasks_to_run = fan_out(file, document_status.documentStatusId) # split up tasks + do_all_work(tasks_to_run) # run ocr pipeline for each task + document_status = document_status_controller._modify({"documentStatusId": document_status.documentStatusId}, {"numPages": len(tasks_to_run)}) + return resp + else: + resp = jsonify({"message": "Allowed file types are pdf only"}) + resp.status_code = 400 + return resp + + +@document_bp.route("/", methods=["GET"]) +def document_get(document_id): + filters = {"documentStatusId": document_id} + documents = document_controller._get(filters) + ds_entry = document_status_controller._get(filters) + ds_entry = alchemyConverter(ds_entry) + num_pages = ds_entry[0]["numPages"] + documents = alchemyConverter(documents) + if len(documents) == num_pages: + res = {"status": "COMPLETE", "progress": 100, "documents": documents} + else: + res = {"status": "PENDING", "progress": 100*len(documents) // num_pages, "documents": []} + res = jsonify(res) + res.status_code = 200 + return res + diff --git a/servers/tenant/blueprints/simple/driver.py b/servers/tenant/blueprints/simple/driver.py index 4f7ba1e..d59f4ca 100644 --- a/servers/tenant/blueprints/simple/driver.py +++ b/servers/tenant/blueprints/simple/driver.py @@ -7,7 +7,7 @@ from controllers.controllerMapper import UserController from models.models import UserType -from flask_cognito_lib.decorators import auth_required +# from flask_cognito_lib.decorators import auth_required from utils import ( AlchemyEncoder, alchemyConverter @@ -20,7 +20,7 @@ @driver_bp.route("/", methods=["GET"]) -@auth_required() +#@auth_required() def driver_get(): drivers = user_controller._get({'userType': UserType.driver.value}) diff --git a/servers/tenant/blueprints/simple/milestones.py b/servers/tenant/blueprints/simple/milestones.py index 8dad9f1..8035137 100644 --- a/servers/tenant/blueprints/simple/milestones.py +++ b/servers/tenant/blueprints/simple/milestones.py @@ -17,7 +17,7 @@ DeliveryMilestonesController, TicketStatusController, ) -from flask_cognito_lib.decorators import auth_required +# from flask_cognito_lib.decorators import auth_required from models.models import ( CreationMilestones, @@ -47,7 +47,7 @@ @milestone_bp.route("/", methods=["GET"]) -@auth_required() +#@auth_required() def milestone_get(ticket_id): # create ticket filters = { @@ -70,7 +70,7 @@ def milestone_get(ticket_id): # create ticket @milestone_bp.route("/", methods=["POST"]) -@auth_required() +#@auth_required() def milestone_post(milestone_type): # create ticket milestone_class = getattr(sys.modules[__name__], milestone_type) milestone_controller = class_to_cntrl_map[milestone_class] diff --git a/servers/tenant/blueprints/simple/users.py b/servers/tenant/blueprints/simple/users.py index 1f73efe..2d5b249 100644 --- a/servers/tenant/blueprints/simple/users.py +++ b/servers/tenant/blueprints/simple/users.py @@ -8,7 +8,7 @@ from controllers.controllerMapper import UserController from models.models import Users -from flask_cognito_lib.decorators import auth_required +# from flask_cognito_lib.decorators import auth_required user_bp = Blueprint("user_bp", __name__, url_prefix="user") @@ -16,14 +16,14 @@ user_controller = UserController() @user_bp.route("/", methods=["GET"]) -@auth_required() +#@auth_required() def user_get(): # create ticket user_controller._get(**request.form["user"]) return "success" @user_bp.route("/", methods=["POST"]) -@auth_required() +#@auth_required() def user_post(): # create ticket (request.get_json(force=True)['user']) @@ -32,11 +32,7 @@ def user_post(): # create ticket @user_bp.route("", methods=["PUT"]) -<<<<<<< HEAD -@auth_required() -======= -@require_appkey ->>>>>>> modifying db schema +#@auth_required() def user_modify(): userId = request.form["userId"] @@ -47,7 +43,7 @@ def user_modify(): @user_bp.route("/", methods=["DELETE"]) -@auth_required() +#@auth_required() def user_delete(): userId = request.args.get("userId") user_controller._delete(userId) diff --git a/servers/tenant/celery_client.py b/servers/tenant/celery_client.py index 2c90f45..87cabe9 100644 --- a/servers/tenant/celery_client.py +++ b/servers/tenant/celery_client.py @@ -4,20 +4,46 @@ import io from uuid import uuid4 import traceback -# import tenant.controllers.DocumentController as document_controller import PyPDF2 import extraction.app as ex +import extraction.extract as ext from celery import group +from tenant.controllers.DocumentController import DocumentController +import boto3 + + +# def get_file_s3(): +# s3_client = boto3.client('s3') +# TENANT = "test-tenant1" +# BUCKET = f"{TENANT}-bucket" +# OBJECT = 'signatures/cook-with-roommates-bonus-carbonara.jpg' + +# download_url = s3_client.generate_presigned_url( +# 'get_object', +# Params={'Bucket': BUCKET, 'Key': OBJECT, 'ResponseContentDisposition': 'attachment'}, +# ExpiresIn=600) + +# view_url = s3_client.generate_presigned_url( +# 'get_object', +# Params={'Bucket': BUCKET, 'Key': OBJECT}, +# ExpiresIn=600) +TENANT = "test-tenant1" +BUCKET = f"{TENANT}-bucket" +aws_access_key_id = os.getenv("aws_access_key_id") +aws_secret_access_key = os.getenv("aws_secret_access_key") +print(aws_secret_access_key, aws_access_key_id) CELERY_BROKER_URL = 'redis://redis:6379/0' client = Celery(__name__, broker=CELERY_BROKER_URL) logger = get_logger(__name__) FAILURE = -1 SUCCESS = 0 +PIECES_SEPERATOR = ",+-" UPLOAD_FOLDER = "/opt/metadata-extraction/uploads" +s3 = boto3.resource('s3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) +bucket = s3.Bucket(BUCKET) - -def fan_out(file): +def fan_out(file, documentStatusId): folder_uuid = uuid4() with io.BytesIO(file.read()) as open_pdf_file: read_pdf = PyPDF2.PdfFileReader(open_pdf_file) @@ -32,9 +58,11 @@ def fan_out(file): os.mkdir(f_dir) with open(f"{f_dir}/{file_uuid}.pdf", "wb") as f: output_pdf.write(f) + + bucket.upload_file(f"{f_dir}/{file_uuid}.pdf", f"documents/{folder_uuid}/{file_uuid}.pdf") file.close() pdf_folders = os.listdir(folder) - return group([work.s(f"{folder}/{pdf_folder}") for pdf_folder in pdf_folders]) + return group([work.s(f"{folder}/{pdf_folder}", documentStatusId) for pdf_folder in pdf_folders]) def do_all_work(tasks_to_run): @@ -43,12 +71,24 @@ def do_all_work(tasks_to_run): @client.task -def work(pdf_folder): +def work(pdf_folder, documentStatusId): + document_controller = DocumentController() pdf_file = f"{pdf_folder}.pdf" try: doclist = ex.work(pdf_folder) + doclist["orderS3Link"] = f"s3://{BUCKET}/documents/{pdf_file.replace(UPLOAD_FOLDER, '')}" + doclist["pieces"] = PIECES_SEPERATOR.join(doclist["pieces"]) + doclist["documentStatusId"] = documentStatusId + doclist["success"] = True + document_controller._create(doclist) except Exception as e: logger.info(f"file {pdf_folder}/{pdf_file} error. msg: {str(e)}") logger.info(traceback.format_exc()) + doclist = ext.generate_doclist({}) + doclist["orderS3Link"] = f"s3://{BUCKET}/documents/{pdf_file.replace(UPLOAD_FOLDER, '')}" + doclist["pieces"] = PIECES_SEPERATOR.join(doclist["pieces"]) + doclist["documentStatusId"] = documentStatusId + doclist["success"] = False + document_controller._create(doclist) return {"status": FAILURE, "folder": pdf_folder} return {"status": SUCCESS, "folder": pdf_folder, "doclist": doclist} \ No newline at end of file diff --git a/servers/tenant/controllers/DocumentController.py b/servers/tenant/controllers/DocumentController.py new file mode 100644 index 0000000..9c09fdf --- /dev/null +++ b/servers/tenant/controllers/DocumentController.py @@ -0,0 +1,26 @@ +import os +from flask import request, jsonify, Blueprint + +from celery import group +import PyPDF2 +import io +from uuid import uuid4 +import sys + +from tenant.controllers.baseController import BaseController + +sys.path.insert(0, "..") # import parent folder + +from models.models import DocumentStatus, Documents +from models.__init__ import session +from utils import convert_dict_to_alchemy_filters + + +class DocumentController(BaseController): + def __init__(self): + super().__init__(Documents) + + +class DocumentStatusController(BaseController): + def __init__(self): + super().__init__(DocumentStatus) \ No newline at end of file diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 33a4272..7029c33 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -7,10 +7,7 @@ from sqlalchemy.orm import sessionmaker import sys from datetime import datetime -<<<<<<< HEAD -======= ->>>>>>> get endpoints sys.path.insert(0, "..") # import parent folder from models.models import TicketStatus @@ -84,7 +81,6 @@ def _delete(self, filters=[]): self.session.commit() -<<<<<<< HEAD def _get(self, filters, limit=5000): if not filters: filters = [] @@ -95,18 +91,6 @@ def _get(self, filters, limit=5000): .limit(limit) .all() ) -======= - def _get(self, model, filters, limit=500): - if not filters: - filters = [] - - objects = self.session.query(self.model) \ - .filter(*convert_dict_to_alchemy_filters(model, filters)) \ - .group_by(self.model.non_prim_identifying_column_name) \ - .order_by(self.model.timestamp) \ - .limit(limit) - ->>>>>>> modifying db schema return objects @@ -145,17 +129,11 @@ def _create_base_event(self, args_dict): def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # get up to 'number_of_res' last event objects -<<<<<<< HEAD -<<<<<<< HEAD -======= ->>>>>>> ALL tickets API done # latest_objs = ( # self.session.query(self.model) # .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) # .group_by(self.model.non_prim_identifying_column_name) # .order_by(self.model.timestamp) -<<<<<<< HEAD -<<<<<<< HEAD # .limit(number_of_res) # .all() # ) @@ -174,47 +152,6 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): print("LATEST_OBJS-------") print(latest_objs) return latest_objs -======= - # .limit(number_of_res).all() - # ) - latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ - .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) \ - .limit(number_of_res).all() -======= - latest_objs = ( - self.session.query(self.model) - .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) - .group_by(self.model.non_prim_identifying_column_name) - .order_by(self.model.timestamp) - .limit(number_of_res).all() - ) - - # print(*convert_dict_to_alchemy_filters(self.model, filters)) - # latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ - # .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ - # .limit(number_of_res).all() ->>>>>>> get endpoints -======= - # .limit(number_of_res).all() - # ) - - print(*convert_dict_to_alchemy_filters(self.model, filters)) - latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ - .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ - .order_by(self.model.timestamp) \ - .limit(1).all() ->>>>>>> ALL tickets API done - - - # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() -<<<<<<< HEAD - return latest_objs[0] ->>>>>>> modifying db schema -======= - print("LATEST_OBJS-------") - print(latest_objs) - return latest_objs ->>>>>>> fix schema # def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): @@ -231,8 +168,6 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # return latest_objs -<<<<<<< HEAD -<<<<<<< HEAD def _get_latest_event_objects_from_start_date( self, datetime1, filters, number_of_res=5 ): @@ -243,41 +178,20 @@ def _get_latest_event_objects_from_start_date( def _get_latest_event_objects_in_range( self, datetime1, datetime2, filters={}, number_of_res=5 ): -======= - def _get_latest_event_objects_from_start_date(self, datetime1, filters={}, number_of_res=5): -======= - def _get_latest_event_objects_from_start_date(self, datetime1, filters, number_of_res=5): ->>>>>>> Fixing default date bug - return self._get_latest_event_objects_in_range(datetime1, datetime.now(), filters=filters, number_of_res=5) - - - def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, number_of_res=5): -<<<<<<< HEAD - print("\n\n\nDATETIM1", datetime1, datetime2) - ->>>>>>> get endpoints -======= ->>>>>>> ALL tickets API done assert datetime1 <= datetime2 time1 = int(time.mktime(datetime1.timetuple())) time2 = int(time.mktime(datetime2.timetuple())) -<<<<<<< HEAD -======= - ->>>>>>> get endpoints session_filters = convert_dict_to_alchemy_filters(self.model, filters) session_filters.append(self.model.timestamp >= time1) session_filters.append(self.model.timestamp <= time2) -<<<<<<< HEAD print( "------------------------RUNNING TICKET GET QUERY----------------------------" ) results = ( self.session.query(self.model) -<<<<<<< HEAD .distinct(self.model.non_prim_identifying_column_name) .filter(*session_filters) .order_by(self.model.non_prim_identifying_column_name, self.model.timestamp) @@ -287,26 +201,6 @@ def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, n print("----------complete-----------------") for result in results: print("TID " + str(result.ticketId)) -======= - .filter(*session_filters) - .limit(number_of_res) - .all() - ) - print("results" , results) - ->>>>>>> get endpoints -======= - - print("------------------------RUNNING TICKET GET QUERY----------------------------") - results = \ - self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ - .filter(*session_filters) \ - .order_by(self.model.non_prim_identifying_column_name, self.model.timestamp) \ - .limit(number_of_res).all() - print("----------complete-----------------") - for result in results: - print("TID " + str(result.ticketId)) ->>>>>>> ALL tickets API done return results def _find_latest_prim_key_from_non_prim_identifying_column_val( diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index d000ba8..454c4fe 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -1,12 +1,10 @@ from statistics import mode -from regex import D from controllers.baseController import ( BaseController, BaseTimeSeriesController, BaseNestedDependencyContoller, ) -import sys from models.models import * @@ -20,7 +18,6 @@ def __init__(self): super().__init__(Customers) -<<<<<<< HEAD class TicketStatusController(BaseController): def __init__(self): super().__init__(TicketStatus) @@ -87,13 +84,6 @@ def convert_to_desc(self, milestones): class InventoryMilestonesController(MilestoneController): def __init__(self): super().__init__(InventoryMilestones) -======= -class TicketController(BaseTimeSeriesController): - def __init__(self): - super().__init__(TicketEvents) - - ->>>>>>> modifying db schema def convert_to_desc(self, milestones): string_milestones = [] @@ -157,7 +147,6 @@ class DeliveryMilestonesController(MilestoneController): def __init__(self): super().__init__(DeliveryMilestones) -<<<<<<< HEAD def convert_to_desc(self, milestones): string_milestones = [] for milestone in milestones: @@ -229,10 +218,4 @@ def _create_base_event(self, args_dict): "approvedByUserId": args_dict["userId"], } ) - return obj -======= - -class DocumentController(BaseController): - def __init__(self): - super().__init__(Documents) ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz + return obj \ No newline at end of file diff --git a/servers/tenant/database/index_creation.sql b/servers/tenant/database/index_creation.sql index 489b6b2..4c55377 100644 --- a/servers/tenant/database/index_creation.sql +++ b/servers/tenant/database/index_creation.sql @@ -1,11 +1,4 @@ -<<<<<<< HEAD -<<<<<<< HEAD -- SQLBook: Code -======= ->>>>>>> modifying db schema -======= --- SQLBook: Code ->>>>>>> push CREATE INDEX idx_ticketEvents_comp ON TicketEvents(ticketEventId, timestamp); CREATE INDEX idx_ticketEvents_ts ON TicketEvents(timestamp); diff --git a/servers/tenant/database/table_creation.sql b/servers/tenant/database/table_creation.sql index 2e38aa9..f27a70c 100644 --- a/servers/tenant/database/table_creation.sql +++ b/servers/tenant/database/table_creation.sql @@ -33,13 +33,8 @@ CREATE TABLE IF NOT EXISTS Users ( "createdAt" INT NOT NULL, "modifiedAt" INT NOT NULL, PRIMARY KEY("userId") -<<<<<<< HEAD ); -<<<<<<< HEAD -<<<<<<< HEAD -======= ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz CREATE TABLE IF NOT EXISTS Documents ( "documentId" INT, "timestamp" INT, @@ -65,18 +60,12 @@ CREATE TABLE IF NOT EXISTS Documents ( "consigneePhoneNumber" VARCHAR(256), "pieces" VARCHAR(256), PRIMARY KEY("documentId") -<<<<<<< HEAD -======= ->>>>>>> push -======= ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz ); CREATE TABLE IF NOT EXISTS TicketEvents ( "ticketEventId" INT, "ticketId" INT, "timestamp" INT, -<<<<<<< HEAD "userId" INT, "customerId" INT, "barcodeNumber" INT, @@ -101,73 +90,6 @@ CREATE TABLE IF NOT EXISTS TicketEvents ( PRIMARY KEY("ticketEventId"), CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") -======= - -CREATE TABLE IF NOT EXISTS TicketEvents ( -<<<<<<< HEAD - ticketEventId INT, - ticketId INT, - timestamp INT, - shipperEventId INT, - consigneeEventId INT, - userId INT, - customerId INT, - barcodeNumber INT, - houseReferenceNumber INT, - orderS3Link VARCHAR(50), - weight INT, - claimedNumberOfPieces INT, - BOLNumber INT, - specialServices VARCHAR(256), - specialInstructions VARCHAR(256), - shipperCompany VARCHAR(256), - shipperName VARCHAR(256), - shipperAddress VARCHAR(256), - shipperPostalCode VARCHAR(256), - shipperPhoneNumber VARCHAR(256), - consigneeCompany VARCHAR(256), - consigneeName VARCHAR(256), - consigneeAddress VARCHAR(256), - consigneePostalCode VARCHAR(256), - consigneePhoneNumber VARCHAR(256), - pieces VARCHAR(256), - PRIMARY KEY(ticketEventId), - CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) ->>>>>>> modifying db schema -======= - "ticketEventId" INT, - "ticketId" INT, - "timestamp" INT, - "shipperEventId" INT, - "consigneeEventId" INT, -======= ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz - "userId" INT, - "customerId" INT, - "barcodeNumber" INT, - "houseReferenceNumber" INT, - "orderS3Link" VARCHAR(50), - "weight" INT, - "claimedNumberOfPieces" INT, - "BOLNumber" INT, - "specialServices" VARCHAR(256), - "specialInstructions" VARCHAR(256), - "shipperCompany" VARCHAR(256), - "shipperName" VARCHAR(256), - "shipperAddress" VARCHAR(256), - "shipperPostalCode" VARCHAR(256), - "shipperPhoneNumber" VARCHAR(256), - "consigneeCompany" VARCHAR(256), - "consigneeName" VARCHAR(256), - "consigneeAddress" VARCHAR(256), - "consigneePostalCode" VARCHAR(256), - "consigneePhoneNumber" VARCHAR(256), - "pieces" VARCHAR(256), - PRIMARY KEY("ticketEventId"), - CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), - CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") ->>>>>>> push ); CREATE TABLE IF NOT EXISTS GenericMilestones ( @@ -198,43 +120,6 @@ CREATE TABLE IF NOT EXISTS InventoryMilestones ( ); CREATE TABLE IF NOT EXISTS DeliveryMilestones ( -<<<<<<< HEAD -<<<<<<< HEAD - "milestoneId" INT, - timestamp INT, - "ticketEventId" INT, - "customerId" INT, - "userId" INT, - "ticketStatus" DELIVERY_TICKET_STATUS, - "approvalStatus" TICKET_APPROVAL_STATUS, - "PODLink" VARCHAR(50), - "signatureLink" VARCHAR(50), - "picture1Link" VARCHAR(50), - "picture2Link" VARCHAR(50), - "picture3Link" VARCHAR(50), - PRIMARY KEY("milestoneId"), - CONSTRAINT "fk_ticketEventId" FOREIGN KEY ("ticketEventId") REFERENCES TicketEvents("ticketEventId"), - CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), - CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") -======= - milestoneId INT, - timestamp INT, - ticketEventId INT, - customerId INT, - userId INT, - ticketStatus DELIVERY_TICKET_STATUS, - approvalStatus TICKET_APPROVAL_STATUS, - PODLink VARCHAR(50), - signatureLink VARCHAR(50), - picture1Link VARCHAR(50), - picture2Link VARCHAR(50), - picture3Link VARCHAR(50), - PRIMARY KEY(milestoneId), - CONSTRAINT fk_ticketEventId FOREIGN KEY (ticketEventId) REFERENCES TicketEvents(ticketEventId), - CONSTRAINT fk_customerId FOREIGN KEY (customerId) REFERENCES Customers(customerId), - CONSTRAINT fk_userId FOREIGN KEY (userId) REFERENCES Users(userId) ->>>>>>> modifying db schema -======= "milestoneId" INT, timestamp INT, "ticketEventId" INT, @@ -251,5 +136,4 @@ CREATE TABLE IF NOT EXISTS DeliveryMilestones ( CONSTRAINT "fk_ticketEventId" FOREIGN KEY ("ticketEventId") REFERENCES TicketEvents("ticketEventId"), CONSTRAINT "fk_customerId" FOREIGN KEY ("customerId") REFERENCES Customers("customerId"), CONSTRAINT "fk_userId" FOREIGN KEY ("userId") REFERENCES Users("userId") ->>>>>>> push ); \ No newline at end of file diff --git a/servers/tenant/models/__init__.py b/servers/tenant/models/__init__.py index 86cc19e..92914ed 100644 --- a/servers/tenant/models/__init__.py +++ b/servers/tenant/models/__init__.py @@ -14,15 +14,7 @@ Base = declarative_base() -<<<<<<< HEAD -<<<<<<< HEAD engine = create_engine(cnx_string, echo=False) -======= -engine = create_engine(cnx_string) ->>>>>>> push -======= -engine = create_engine(cnx_string, echo=True) ->>>>>>> ALL tickets API done print("connecting to db....") Session = sessionmaker(bind=engine) session = Session() diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index 828241c..19e35b6 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -104,49 +104,26 @@ class Users(Base): def __repr__(self): return f"< Users:: userId: {self.userId}>" -class Documents(Base): - __tablename__ = "documents" - documentId = Column(Integer, primary_key=True, nullable=False) - timestamp = Column(Integer, default=int(time.time())) - barcodeNumber = Column(Integer, nullable=False) - houseReferenceNumber = Column(Integer, nullable=False) - orderS3Link = Column(String, nullable=False) - weight = Column(Integer, nullable=False) - claimedNumberOfPieces = Column(Integer, nullable=False) - BOLNumber = Column(Integer, nullable=False) - specialServices = Column(String) - specialInstructions = Column(String) - # shipper - shipperCompany = Column(String, nullable=False) - shipperName = Column(String, nullable=False) - shipperAddress = Column(String, nullable=False) - shipperPostalCode = Column(String, nullable=False) - shipperPhoneNumber = Column(String, nullable=False) - # consignee - consigneeCompany = Column(String, nullable=False) - consigneeName = Column(String, nullable=False) - consigneeAddress = Column(String, nullable=False) - consigneePostalCode = Column(String, nullable=False) - consigneePhoneNumber = Column(String, nullable=False) - # pieces - pieces = Column(String, nullable=False) - customerName = Column(String, nullable=False) +class DocumentStatus(Base): + __tablename__ = "documentstatus" + documentStatusId = Column(Integer, primary_key=True, nullable=False, autoincrement=True) + status = Column(String, nullable=False, default="PENDING") + numPages = Column(Integer, nullable=False) -<<<<<<< HEAD -<<<<<<< HEAD -<<<<<<< HEAD class Documents(Base): __tablename__ = "documents" documentId = Column(Integer, primary_key=True, nullable=False) + documentStatusId = Column(Integer, nullable=False) + success = Column(Boolean, nullable=False) timestamp = Column(Integer, default=int(time.time())) - barcodeNumber = Column(Integer, nullable=False) - houseReferenceNumber = Column(Integer, nullable=False) + barcodeNumber = Column(String, nullable=False) + houseReferenceNumber = Column(String, nullable=False) orderS3Link = Column(String, nullable=False) - weight = Column(Integer, nullable=False) + weight = Column(String, nullable=False) claimedNumberOfPieces = Column(Integer, nullable=False) - BOLNumber = Column(Integer, nullable=False) + BOLNumber = Column(String, nullable=False) specialServices = Column(String) specialInstructions = Column(String) # shipper @@ -177,13 +154,6 @@ class TicketStatus(Base): user = relationship("Users") -======= ->>>>>>> modifying db schema -======= - ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz -======= ->>>>>>> Fix commit bugs for mergmerge class TicketEvents(Base): __tablename__ = "ticketevents" non_prim_identifying_column_name = "ticketId" @@ -191,53 +161,30 @@ class TicketEvents(Base): # TODO: forgein key ticketId = Column(Integer, ForeignKey(TicketStatus.ticketId)) timestamp = Column(Integer, default=int(time.time())) -<<<<<<< HEAD userId = Column(Integer, ForeignKey(Users.userId), nullable=False, index=True) customerId = Column( Integer, ForeignKey(Customers.customerId), nullable=False, index=True ) -======= - userId = Column(Integer, ForeignKey(Users.userId), nullable=False) - customerId = Column(Integer, ForeignKey(Customers.customerId), nullable=False) ->>>>>>> modifying db schema - barcodeNumber = Column(Integer, nullable=False) - houseReferenceNumber = Column(Integer, nullable=False) + barcodeNumber = Column(String, nullable=False) + houseReferenceNumber = Column(String, nullable=False) orderS3Link = Column(String, nullable=False) - weight = Column(Integer, nullable=False) + weight = Column(String, nullable=False) claimedNumberOfPieces = Column(Integer, nullable=False) - BOLNumber = Column(Integer, nullable=False) + BOLNumber = Column(String, nullable=False) specialServices = Column(String) specialInstructions = Column(String) -<<<<<<< HEAD -<<<<<<< HEAD # shipper -======= ->>>>>>> modifying db schema -======= - # shipper ->>>>>>> push shipperCompany = Column(String, nullable=False) shipperName = Column(String, nullable=False) shipperAddress = Column(String, nullable=False) shipperPostalCode = Column(String, nullable=False) shipperPhoneNumber = Column(String, nullable=False) -<<<<<<< HEAD -<<<<<<< HEAD # consignee -======= ->>>>>>> modifying db schema -======= - # consignee ->>>>>>> push consigneeCompany = Column(String, nullable=False) consigneeName = Column(String, nullable=False) consigneeAddress = Column(String, nullable=False) consigneePostalCode = Column(String, nullable=False) consigneePhoneNumber = Column(String, nullable=False) -<<<<<<< HEAD -<<<<<<< HEAD -======= ->>>>>>> push # pieces pieces = Column(String, nullable=False) isPickup = Column(Boolean, nullable=False) @@ -255,11 +202,6 @@ class CreationMilestones(Base): ticketId = Column( Integer, ForeignKey(TicketStatus.ticketId), nullable=False, index=True ) -======= - pieces = Column(String, nullable=False) - user = relationship("Users") - customer = relationship("Customers") ->>>>>>> modifying db schema newStatus = Column(Enum(Creation_Milestone_Status), nullable=False) @@ -296,7 +238,6 @@ class PickupMilestones(Base): class InventoryMilestones(Base): __tablename__ = "inventorymilestones" -<<<<<<< HEAD milestoneId = Column(Integer, primary_key=True, autoincrement=True) ticketId = Column( Integer, ForeignKey(TicketStatus.ticketId), nullable=False, index=True @@ -310,14 +251,7 @@ class InventoryMilestones(Base): newStatus = Column(Enum(Inventory_Milestone_Status), nullable=False) timestamp = Column(Integer, nullable=False, default=int(time.time())) -<<<<<<< HEAD approvedByUser = relationship("Users") -======= -if __name__ == "__main__": - ticketId_timestamp_idx = Index( - "ticketId_timestamp_idx", TicketEvents.ticketId, TicketEvents.timestamp - ) ->>>>>>> modifying db schema class AssignmentMilestones(Base): @@ -345,7 +279,6 @@ class AssignmentMilestones(Base): class IncompleteDeliveryMilestones(Base): __tablename__ = "inconpletedeliverymilestones" -<<<<<<< HEAD milestoneId = Column(Integer, primary_key=True, autoincrement=True) ticketId = Column( Integer, ForeignKey(TicketStatus.ticketId), nullable=False, index=True @@ -366,15 +299,8 @@ class IncompleteDeliveryMilestones(Base): timestamp = Column(Integer, nullable=False, default=int(time.time())) assigneeUser = relationship("Users") -======= -ticketId_timestamp_idx = Index( - "ticketId_timestamp_idx", TicketEvents.ticketId, TicketEvents.timestamp -) ->>>>>>> push -INDEXES.append(ticketId_timestamp_idx) -<<<<<<< HEAD class DeliveryMilestones(Base): __tablename__ = "deliverymilestones" @@ -403,9 +329,6 @@ class DeliveryMilestones(Base): timestamp = Column(Integer, nullable=False, default=int(time.time())) completingUser = relationship("Users") -======= - gen_milestoneId_idx = Index("gen_milestoneId_idx", GenericMilestones.milestoneId) ->>>>>>> modifying db schema ticketId_timestamp_idx = Index( @@ -424,34 +347,3 @@ class DeliveryMilestones(Base): pass -======= - -ticket_userId_idx = Index("ticket_userId_idx", TicketEvents.userId) - -INDEXES.append(ticket_userId_idx) - -ticket_customerId_idx = Index("ticket_customerId_idx", TicketEvents.customerId) - -INDEXES.append(ticket_customerId_idx) - -gen_milestoneId_idx = Index("gen_milestoneId_idx", GenericMilestones.milestoneId) - -INDEXES.append(gen_milestoneId_idx) - -inv_milestoneId_idx = Index("inv_milestoneId_idx", InventoryMilestones.milestoneId) - -INDEXES.append(inv_milestoneId_idx) - -del_milestoneId_idx = Index("del_milestoneId_idx", DeliveryMilestones.milestoneId) - -INDEXES.append(del_milestoneId_idx) - -print("Configuring DB ...") -Base.metadata.create_all(engine) -try: - # create indexes - for index in INDEXES: - index.create(bind=engine) -except: - pass ->>>>>>> push diff --git a/servers/tenant/requirements.txt b/servers/tenant/requirements.txt index a51c422..8799b7a 100644 --- a/servers/tenant/requirements.txt +++ b/servers/tenant/requirements.txt @@ -1,3 +1,4 @@ +boto3 amqp==5.0.9 billiard==3.6.4.0 celery==5.2.3 @@ -22,12 +23,8 @@ MarkupSafe==2.0.1 multilingual-pdf2text==1.1.0 numpy==1.22.0 ocrmypdf==13.2.0 -Flask-Cors packaging==21.3 -<<<<<<< HEAD Flask-Cors -======= ->>>>>>> modifying db schema pdf2image==1.16.0 pdfminer.six==20211012 pdfplumber==0.6.0 @@ -53,16 +50,11 @@ wcwidth==0.2.5 Werkzeug==2.0.2 wrapt==1.13.3 zipp==3.7.0 -<<<<<<< HEAD aniso8601==9.0.1 attrs==21.4.0 certifi==2022.6.15 charset-normalizer==2.1.0 ecdsa==0.17.0 -======= -sqlalchemy -psycopg2-binary ->>>>>>> modifying db schema Faker==13.7.0 flask-cognito-auth==1.1.0 flask-marshmallow==0.14.0 diff --git a/servers/tenant/server.py b/servers/tenant/server.py index b08bbeb..6c25924 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -1,35 +1,19 @@ -<<<<<<< HEAD import os from flask import Flask, Blueprint, jsonify, session # from config import app -======= -from tenant.config import app ->>>>>>> Fixed celery pipeline from blueprints.event_driven.ticket import ticket_bp from blueprints.simple.customers import customer_bp from blueprints.simple.users import user_bp -<<<<<<< HEAD -<<<<<<< HEAD from blueprints.simple.milestones import milestone_bp from blueprints.simple.driver import driver_bp +from blueprints.simple.document import document_bp + from flask_cors import CORS -from flask_cognito_lib import CognitoAuth -======= -from servers.tenant.blueprints.simple.pdf import pdf_bp # TODO: Move this in seperate microservice ->>>>>>> modifying db schema -======= -from flask_cors import cross_origin -<<<<<<< HEAD -from servers.tenant.blueprints.simple.document import pdf_bp # TODO: Move this in seperate microservice ->>>>>>> Stefan codeazzzzzzzzzzzzzzzzzzzzzzzzzzzz +# from flask_cognito_lib import CognitoAuth # from models.__init__ import engine, Base # from models.models import INDEXES -======= -from tenant.blueprints.simple.document import document_bp # TODO: Move this in seperate microservice -from flask import Blueprint ->>>>>>> Fixed celery pipeline from dotenv import load_dotenv load_dotenv(".env", override=True) @@ -41,33 +25,22 @@ app.config["AWS_COGNITO_USER_POOL_CLIENT_ID"] = os.environ["AWS_COGNITO_USER_POOL_CLIENT_ID"] app.config["AWS_COGNITO_DOMAIN"] = os.environ["AWS_COGNITO_DOMAIN"] -auth = CognitoAuth(app) +# auth = CognitoAuth(app) cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) parent = Blueprint("api", __name__, url_prefix="/api") -<<<<<<< HEAD -======= -parent.register_blueprint(document_bp) ->>>>>>> Fixed celery pipeline parent.register_blueprint(ticket_bp) parent.register_blueprint(customer_bp) parent.register_blueprint(user_bp) parent.register_blueprint(milestone_bp) parent.register_blueprint(driver_bp) +parent.register_blueprint(document_bp) -<<<<<<< HEAD -======= - -@app.route("/") -def hello_world(): - return "Server Started!" - ->>>>>>> Fixed celery pipeline if __name__ == "__main__": print("REGISTERING BLUEPRINT") app.register_blueprint(parent) - app.run(debug=True, host="0.0.0.0", port=5000) + app.run(debug=True, host="0.0.0.0", port=6767) diff --git a/servers/tenant/test/test.py b/servers/tenant/test/test.py index 1907731..f89e5c9 100644 --- a/servers/tenant/test/test.py +++ b/servers/tenant/test/test.py @@ -6,11 +6,7 @@ from faker import Faker import os from flask import Flask, jsonify -<<<<<<< HEAD import uuid -======= -import uuid; ->>>>>>> push from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker @@ -24,7 +20,6 @@ UserController, CustomerController, TicketController, -<<<<<<< HEAD CreationMilestonesController, PickupMilestonesController, InventoryMilestonesController, @@ -38,11 +33,6 @@ Assignment_Milestone_Status, Delivery_Milestone_Status, Incomplete_Delivery_Milestone_Status, -======= - GenericMilestoneController, - InventoryMilestoneController, - DeliveryMilestoneController, ->>>>>>> modifying db schema ) from utils import alchemyConverter from utils import AlchemyEncoder @@ -53,7 +43,6 @@ app = Flask(__name__) with app.app_context(): -<<<<<<< HEAD # Controllers creationMilestonesController = CreationMilestonesController() pickupMilestonesController = PickupMilestonesController() @@ -98,8 +87,6 @@ UserType.worker: [], } -======= ->>>>>>> modifying db schema def generate_users(scale=5): user_controller = UserController() @@ -114,16 +101,8 @@ def generate_users(scale=5): firstName = faker.unique.first_name() lastName = faker.unique.last_name() -<<<<<<< HEAD -<<<<<<< HEAD userType = random.choice([ut for ut in UserType]) userTypeValue = userType.value.lower() -======= - userType = random.choice([ut for ut in UserType]).value ->>>>>>> push -======= - userType = random.choice([ut for ut in UserType]).value.lower() ->>>>>>> fix schema username = firstName.lower()[0] + lastName.lower() email = f"{username}@faker.com" userId = random.randint(1, 1000000000) @@ -132,13 +111,8 @@ def generate_users(scale=5): args_arr.append( { -<<<<<<< HEAD "userId": userId, "userType": userTypeValue, -======= - "userId": random.randint(1, 1000000000), - "userType": userType, ->>>>>>> push "username": username, "firstName": firstName, "lastName": lastName, @@ -169,14 +143,7 @@ def generate_customers(scale=2): return customer_controller._create_bulk(args_arr) -<<<<<<< HEAD def generate_ticket_events(scale=400, users=[], customers=[]): -======= - - def generate_ticket_events( - scale=20, users=[], customers=[] - ): ->>>>>>> modifying db schema ticket_events_controller = TicketController() @@ -212,12 +179,9 @@ def generate_ticket_events( consigneePostalCode = faker.zipcode() consigneePhoneNumber = faker.phone_number() pieces = faker.sentence() -<<<<<<< HEAD isPickup = False noSignatureRequired = False tailgateAuthorized = False -======= ->>>>>>> modifying db schema obj = ticket_events_controller._create_base_event( { @@ -241,19 +205,14 @@ def generate_ticket_events( "consigneeAddress": consigneeAddress, "consigneePostalCode": consigneePostalCode, "consigneePhoneNumber": consigneePhoneNumber, -<<<<<<< HEAD "pieces": pieces, "isPickup": isPickup, "noSignatureRequired": noSignatureRequired, "tailgateAuthorized": tailgateAuthorized -======= - "pieces": pieces ->>>>>>> modifying db schema } ) -<<<<<<< HEAD # for i in range(random.randrange(10, 20)): # userId = random.choice(users).userId @@ -282,40 +241,9 @@ def generate_ticket_events( # "specialInstructions": specialInstructions, # }, # ) -======= - for i in range(random.randrange(10, 20)): - - userId = random.choice(users).userId - userId = random.choice(users).userId - customerId = random.choice(customers).customerId - barcodeNumber = random.randrange(100000000, 900000000) - houseReferenceNumber = random.randrange(100000000, 900000000) - orderS3Link = "s3link" - weight = random.randrange(100, 200) - claimedNumberOfPieces = random.randrange(1, 5) - BOLNumber = random.randrange(100000000, 900000000) - - created_obj = ticket_events_controller._modify_latest_object( - getattr(obj, TicketEvents.non_prim_identifying_column_name), - { - "ticketId": obj.ticketId, - "userId": userId, - "customerId": customerId, - "barcodeNumber": barcodeNumber, - "houseReferenceNumber": houseReferenceNumber, - "orderS3Link": orderS3Link, - "weight": weight, - "claimedNumberOfPieces": claimedNumberOfPieces, - "BOLNumber": BOLNumber, - "specialServices": specialServices, - "specialInstructions": specialInstructions, - }, - ) ->>>>>>> fix schema print("Created Ticket") -<<<<<<< HEAD def list_diff(li1, li2): return list(set(li1) - set(li2)) + list(set(li2) - set(li1)) @@ -529,48 +457,10 @@ def generate_milestone_events(old_tickets): # "approvalStatus": approvalStatus, # } # ) -======= - - def generate_generic_milestones_events(scale=50, ticket_map=[], users=[]): - - gen_milestone_controller = GenericMilestoneController() - - n = len(session.query(GenericMilestones).distinct().all()) - if n < scale: - print(f"Generating Gen Milestones for {scale - n } Tickets") - - for _ in range(scale - n): - - ticketId = random.choice([k for k in ticket_map]) - - for _ in range( - random.randint(4, 10) - ): # number of milestones per ticket - - milestoneId = random.randint(1, 2147483645) - userId = random.choice(users).userId - - ticketStatus = random.choice( - [e for e in Generic_Ticket_Status] - ).value.lower() - - obj = gen_milestone_controller._create( - { - "milestoneId": milestoneId, - "ticketEventId": random.choice(ticket_map[ticketId]), - "userId": userId, - "ticketStatus": ticketStatus, - } - ) ->>>>>>> modifying db schema # print("Created Inventory Milestone") -<<<<<<< HEAD # def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): -======= - def generate_inventory_milestones_events(scale=50, ticket_map=[], users=[]): ->>>>>>> modifying db schema # gen_milestone_controller = DeliveryMilestoneController() @@ -583,24 +473,9 @@ def generate_inventory_milestones_events(scale=50, ticket_map=[], users=[]): # ticketId = random.choice([k for k in ticket_map]) -<<<<<<< HEAD # for _ in range( # random.randint(4, 10) # ): # number of milestones per ticket -======= - print("Created Inventory Milestone") - - def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): - - gen_milestone_controller = DeliveryMilestoneController() - - n = len(session.query(DeliveryMilestones).distinct().all()) - - if n < scale: - print(f"Generating Delivery Milestones for {scale - n } Tickets") - - for _ in range(scale - n): ->>>>>>> modifying db schema # milestoneId = random.randint(1, 2147483645) # userId = random.choice(users).userId @@ -625,23 +500,7 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): # print("Created Delivery Milestone") -<<<<<<< HEAD generate_users(scale=70) -======= - obj = gen_milestone_controller._create( - { - "milestoneId": milestoneId, - "ticketEventId": random.choice(ticket_map[ticketId]), - "userId": userId, - "ticketStatus": ticketStatus, - "approvalStatus": approvalStatus, - } - ) - - print("Created Delivery Milestone") - - generate_users(scale=5) ->>>>>>> push users = session.query(Users).all() # print(random.choice(users)) @@ -651,7 +510,6 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): # pprint(alchemyConverter(users[0])) -<<<<<<< HEAD oldTickets = ( session.query(TicketEvents) .with_entities(TicketEvents.ticketId) @@ -662,10 +520,6 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): generate_ticket_events( scale=500, -======= - generate_ticket_events( - scale=20, ->>>>>>> modifying db schema users=users, customers=customers, ) @@ -677,10 +531,6 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): # exit() -<<<<<<< HEAD -======= - ->>>>>>> modifying db schema pprint(alchemyConverter(ticketEvents[0])) exit() diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index 4f14e22..3429fb6 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -14,10 +14,8 @@ def default(self, obj): return json.JSONEncoder.default(self, obj) -<<<<<<< HEAD # DFS function used to convert alchemy objects to JSON -<<<<<<< HEAD def alchemyConvertUtil(object, res, visited): visited.add(str(object.__class__)) for field in [ @@ -60,91 +58,6 @@ def alchemyConverter(obj): return res else: return alchemyConvertUtil(obj, {}, visited=set()) -======= -def alchemyConverter(object): - def single_convert(obj, res={}, visited=set({})): - visited.add(str(object.__class__)) - for field in [ - x - for x in dir(object) - if not x.startswith("_") - and x not in set({"metadata", "non_prim_identifying_column_name", "registry"}) - ]: - cls_name = str(obj.__getattribute__(field).__class__) - if "models.models." in cls_name: - if cls_name in visited: - continue - else: - visited.add(cls_name) - - res[field] = {} - single_convert(getattr(obj, field), res[field], visited=visited) - visited.remove(cls_name) - elif "InstrumentedList" in cls_name: - res[field] = [] - - for i, obj in enumerate(getattr(obj, field)): - - res[field].append({}) - single_convert(obj, res[field][i], visited=visited) -======= ->>>>>>> get endpoints - -# DFS function used to convert alchemy objects to JSON -def alchemyConvertUtil(object, res, visited): - visited.add(str(object.__class__)) - for field in [ - x - for x in dir(object) - if not x.startswith("_") - and x not in set({"metadata", "non_prim_identifying_column_name", "registry"}) - ]: - - cls_name = str(object.__getattribute__(field).__class__) - - if "models.models." in cls_name: - if cls_name in visited: - continue - else: - visited.add(cls_name) - - res[field] = {} - alchemyConvertUtil(getattr(object, field), res[field], visited=visited) - visited.remove(cls_name) - elif "InstrumentedList" in cls_name: - res[field] = [] - - for i, obj in enumerate(getattr(object, field)): - - res[field].append({}) - alchemyConvertUtil(obj, res[field][i], visited=visited) - - else: - res[field] = getattr(object, field) - - return res - -def alchemyConverter(obj): - if type(obj) == list: - res = [] - for ele in obj: - print("ALCHEMY DEBUG ---------------------------") - print("TID: " + str(ele.ticketId)) - json_res = alchemyConvertUtil(ele, {}, visited=set()) - res.append(json_res) - return res - else: -<<<<<<< HEAD -<<<<<<< HEAD - return single_convert(object) - ->>>>>>> fix schema -======= - return alchemyConvertUtil(obj) ->>>>>>> get endpoints -======= - return alchemyConvertUtil(obj, {}, visited=set()) ->>>>>>> ALL tickets API done # converts fiters as a dictionary to alchemy interpretable results From a9e7fb58f7cb62be9595394205dae3eb19e4183c Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Thu, 21 Jul 2022 19:03:05 -0400 Subject: [PATCH 31/40] s3 presigned links --- .gitignore | 4 - .../tenant/blueprints/event_driven/ticket.py | 146 +----------------- servers/tenant/blueprints/simple/users.py | 4 - servers/tenant/celery_client.py | 52 ++----- servers/tenant/controllers/baseController.py | 77 --------- .../tenant/controllers/controllerMapper.py | 17 -- servers/tenant/models/__init__.py | 4 - servers/tenant/models/models.py | 75 +-------- servers/tenant/server.py | 26 ---- servers/tenant/test/test.py | 142 ----------------- servers/tenant/utils.py | 5 - 11 files changed, 19 insertions(+), 533 deletions(-) diff --git a/.gitignore b/.gitignore index 4d35263..eb713bd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,6 @@ *venv* __pycache__/ *.pyc -<<<<<<< HEAD .vscode **/.env tmp -======= -.vscode ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 04960f8..436aa63 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -4,27 +4,18 @@ from numpy import number from flask import make_response, request, jsonify, Blueprint -<<<<<<< HEAD -======= -from flask_cors import cross_origin ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 import sys sys.path.insert(0, "..") # import parent folder -<<<<<<< HEAD from controllers.controllerMapper import TicketController, TicketStatusController -======= -from controllers.controllerMapper import TicketController ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 from models.models import TicketEvents from utils import ( AlchemyEncoder, alchemyConverter, ) -<<<<<<< HEAD # from flask_cognito_lib.decorators import auth_required ticket_bp = Blueprint("ticket_bp", __name__, url_prefix="ticket") @@ -32,14 +23,6 @@ ticket_controller = TicketController() ticket_status_controller = TicketStatusController() PIECES_SEPERATOR = ",+-" -======= -ticket_bp = Blueprint("ticket_bp", __name__, url_prefix="ticket") - -# TODO: USER BASED AUTH - -ticket_controller = TicketController() - ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 """ Route expects requests of format: @@ -97,12 +80,7 @@ def ticket_get_all_with_status(status): # create ticket @ticket_bp.route("/", methods=["POST"]) -<<<<<<< HEAD #@auth_required() -======= -@cross_origin(supports_credentials=True) -@require_appkey ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 def ticket_post(): # create ticket print("Creating ticket from the following JSON:") print(request.data) @@ -132,7 +110,6 @@ def ticket_edit(ticket_id): # create ticket ticket_dict["pieces"] = PIECES_SEPERATOR.join(ticket_dict["pieces"]) ticket_event = ticket_controller._create_base_event(ticket_dict) -<<<<<<< HEAD response = {"ticketId": ticket_event.ticketId} return make_response(json.dumps(response)) @@ -150,15 +127,6 @@ def get_clean_filters_dict(immutable_args): if "limit" in sql_filters: del sql_filters["limit"] return sql_filters -======= - return {"success"} - - -# http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00&end=2022-04-04T00:00:00&shipperName=Eric%20Shea -# curl http://127.0.0.1:6767/api/ticket/?shipperName -# # curl http://127.0.0.1:6767/api/ticket?key=a -# # curl http://127.0.0.1:6767/api/ticket/?start=2022-01-01T00:00:00Z&end=2022-04-04T00:00:00Z ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 def corsify(resp): resp = make_response(json.dumps(resp)) @@ -166,30 +134,12 @@ def corsify(resp): resp.headers['Access-Control-Allow-Headers'] = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept'] return resp -<<<<<<< HEAD def validate_date_format(date_text): try: return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") except ValueError: raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") -======= -def get_clean_filters_dict(immutable_args): - sql_filters = dict(immutable_args) - if "start" in sql_filters: - del sql_filters["start"] - if "end" in sql_filters: - del sql_filters["end"] - if "limit" in sql_filters: - del sql_filters["limit"] - return sql_filters - -def validate_date_format(date_text): - try: - return datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S") - except ValueError: - raise ValueError("Incorrect data format, should be %Y-%m-%dT%H:%M:%S") ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 def default_start(): dt_start = validate_date_format("1900-01-01T00:00:00") @@ -199,7 +149,6 @@ def default_end(): dt_end = validate_date_format("2100-01-01T00:00:00") return dt_end -<<<<<<< HEAD def default_end(): dt_end = validate_date_format("2100-01-01T00:00:00") return dt_end @@ -227,45 +176,9 @@ def ticket_get_all(): for ticket in res: ticket["pieces"] = ticket["pieces"].split(PIECES_SEPERATOR) ticket["ticketStatus"]["currentStatus"] = ticket["ticketStatus"]["currentStatus"].value -======= -@ticket_bp.route("/", methods=["GET"]) -@cross_origin(supports_credentials=True) -# @require_appkey -def ticket_get_all(): - filters = request.args or {} - sql_filters = get_clean_filters_dict(filters) - limit = 5000 if "limit" not in filters else filters["limit"] - - dt_start = validate_date_format(filters["start"]) if "start" in filters else default_start() - dt_end = validate_date_format(filters["end"]) if "end" in filters else default_end() - - data = ticket_controller._get_latest_event_objects_in_range(dt_start, dt_end, sql_filters, number_of_res=limit) - - res = alchemyConverter(data) - - return corsify(res) - - -@ticket_bp.route("/", methods=["GET"]) -@cross_origin(supports_credentials=True) -# @require_appkey -def ticket_get(ticket_id): - filters = request.args.get("filters") or {} - - - sql_filters = get_clean_filters_dict(filters) - sql_filters["ticketId"] = ticket_id - data = ticket_controller._get_latest_event_objects_in_range( - default_start(), default_end(), filters=sql_filters - ) - - res = alchemyConverter(data[0]) - return corsify(res) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 return make_response(json.dumps(res, cls=AlchemyEncoder)) -<<<<<<< HEAD def get_single(ticket_id): filters = request.args.get("filters") or {} @@ -284,38 +197,10 @@ def ticket_get(ticket_id): data = get_single(ticket_id) res = alchemyConverter(data) return make_response(json.dumps(res, cls=AlchemyEncoder)) -======= -{ - "datetime" : "value", - "filters" : { - "field1": "value1", - "field2": "value2", - .... - } -} - -""" - - - -# @ticket_bp.route("/attribute/{attribute_name}", methods=["GET"]) -# @require_appkey -# def ticket_attribute_get(attribute_name): - -# filters.extend({"ticket_id": ticket_id}) - -# latest_ticket = ticket_controller._get_latest_event_objects( -# number_of_res=number_of_res, filters=filters -# ) - -# res = alchemyConverter(latest_ticket) -# response = json.dumps(res, cls=AlchemyEncoder) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 # return response - -<<<<<<< HEAD +""" { "datetime" : "value", "filters" : { @@ -326,9 +211,6 @@ def ticket_get(ticket_id): } """ -======= - ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 """ Route expects requests of format: @@ -348,29 +230,3 @@ def ticket_get(ticket_id): """ -<<<<<<< HEAD -======= - -@ticket_bp.route("/", methods=["PUT"]) -@cross_origin(supports_credentials=True) -@require_appkey -def ticket_update(ticket_id): - - update_dict = request.form["update_dict"] - - # remove ticketId and ticketEventId if present - update_dict.pop(ticket_controller.primary_key, None) - update_dict.pop(TicketEvents.non_prim_identifying_column_name, None) - - filters = request.form["filters"] - filters.extend({"ticket_id": ticket_id}) - - updated_object = ticket_controller._modify_latest_object( - update_dict, filters=filters - ) - - res = alchemyConverter(updated_object) - response = json.dumps(res, cls=AlchemyEncoder) - - return response ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 diff --git a/servers/tenant/blueprints/simple/users.py b/servers/tenant/blueprints/simple/users.py index ebdf977..2d5b249 100644 --- a/servers/tenant/blueprints/simple/users.py +++ b/servers/tenant/blueprints/simple/users.py @@ -32,11 +32,7 @@ def user_post(): # create ticket @user_bp.route("", methods=["PUT"]) -<<<<<<< HEAD #@auth_required() -======= -@require_appkey ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 def user_modify(): userId = request.form["userId"] diff --git a/servers/tenant/celery_client.py b/servers/tenant/celery_client.py index 48acff9..de16300 100644 --- a/servers/tenant/celery_client.py +++ b/servers/tenant/celery_client.py @@ -4,20 +4,14 @@ import io from uuid import uuid4 import traceback -<<<<<<< HEAD import PyPDF2 import extraction.app as ex import extraction.extract as ext -======= -# import tenant.controllers.DocumentController as document_controller -import PyPDF2 -import extraction.app as ex ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 from celery import group from tenant.controllers.DocumentController import DocumentController import boto3 - +from botocore.client import Config # def get_file_s3(): # s3_client = boto3.client('s3') @@ -28,12 +22,14 @@ # download_url = s3_client.generate_presigned_url( # 'get_object', # Params={'Bucket': BUCKET, 'Key': OBJECT, 'ResponseContentDisposition': 'attachment'}, -# ExpiresIn=600) +# ExpiresIn=3600) # view_url = s3_client.generate_presigned_url( # 'get_object', # Params={'Bucket': BUCKET, 'Key': OBJECT}, -# ExpiresIn=600) +# ExpiresIn=3600) + + TENANT = "test-tenant1" BUCKET = f"{TENANT}-bucket" aws_access_key_id = os.getenv("aws_access_key_id") @@ -44,24 +40,19 @@ logger = get_logger(__name__) FAILURE = -1 SUCCESS = 0 -<<<<<<< HEAD PIECES_SEPERATOR = ",+-" UPLOAD_FOLDER = "/opt/metadata-extraction/uploads" -s3 = boto3.resource('s3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) +s3 = boto3.resource('s3', region_name='ca-central-1', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, config=Config(signature_version='s3v4')) bucket = s3.Bucket(BUCKET) +s3_client = boto3.client('s3', region_name='ca-central-1', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, config=Config(signature_version='s3v4')) def fan_out(file, documentStatusId): -======= -UPLOAD_FOLDER = "/opt/metadata-extraction/uploads" - - -def fan_out(file): ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 folder_uuid = uuid4() with io.BytesIO(file.read()) as open_pdf_file: read_pdf = PyPDF2.PdfFileReader(open_pdf_file) num_pages = read_pdf.getNumPages() folder = f"{UPLOAD_FOLDER}/{folder_uuid}" + os.mkdir(folder) for i in range(num_pages): output_pdf = PyPDF2.PdfFileWriter() @@ -71,17 +62,11 @@ def fan_out(file): os.mkdir(f_dir) with open(f"{f_dir}/{file_uuid}.pdf", "wb") as f: output_pdf.write(f) -<<<<<<< HEAD bucket.upload_file(f"{f_dir}/{file_uuid}.pdf", f"documents/{folder_uuid}/{file_uuid}.pdf") file.close() pdf_folders = os.listdir(folder) return group([work.s(f"{folder}/{pdf_folder}", documentStatusId) for pdf_folder in pdf_folders]) -======= - file.close() - pdf_folders = os.listdir(folder) - return group([work.s(f"{folder}/{pdf_folder}") for pdf_folder in pdf_folders]) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 def do_all_work(tasks_to_run): @@ -90,13 +75,18 @@ def do_all_work(tasks_to_run): @client.task -<<<<<<< HEAD def work(pdf_folder, documentStatusId): document_controller = DocumentController() pdf_file = f"{pdf_folder}.pdf" + OBJECT = f"documents{pdf_file.replace(UPLOAD_FOLDER, '')}" + view_url = s3_client.generate_presigned_url( + 'get_object', + Params={'Bucket': BUCKET, 'Key': OBJECT}, + ExpiresIn=3600) try: doclist = ex.work(pdf_folder) - doclist["orderS3Link"] = f"s3://{BUCKET}/documents/{pdf_file.replace(UPLOAD_FOLDER, '')}" + doclist["orderS3Path"] = f"s3://{BUCKET}/{OBJECT}" + doclist["orderS3Link"] = view_url doclist["pieces"] = PIECES_SEPERATOR.join(doclist["pieces"]) doclist["documentStatusId"] = documentStatusId doclist["success"] = True @@ -105,19 +95,11 @@ def work(pdf_folder, documentStatusId): logger.info(f"file {pdf_folder}/{pdf_file} error. msg: {str(e)}") logger.info(traceback.format_exc()) doclist = ext.generate_doclist({}) - doclist["orderS3Link"] = f"s3://{BUCKET}/documents/{pdf_file.replace(UPLOAD_FOLDER, '')}" + doclist["orderS3Path"] = f"s3://{BUCKET}/{OBJECT}" + doclist["orderS3Link"] = view_url doclist["pieces"] = PIECES_SEPERATOR.join(doclist["pieces"]) doclist["documentStatusId"] = documentStatusId doclist["success"] = False document_controller._create(doclist) -======= -def work(pdf_folder): - pdf_file = f"{pdf_folder}.pdf" - try: - doclist = ex.work(pdf_folder) - except Exception as e: - logger.info(f"file {pdf_folder}/{pdf_file} error. msg: {str(e)}") - logger.info(traceback.format_exc()) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 return {"status": FAILURE, "folder": pdf_folder} return {"status": SUCCESS, "folder": pdf_folder, "doclist": doclist} \ No newline at end of file diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 4ee4877..a869177 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -7,10 +7,6 @@ from sqlalchemy.orm import sessionmaker import sys from datetime import datetime -<<<<<<< HEAD - -======= ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 sys.path.insert(0, "..") # import parent folder from models.models import TicketStatus @@ -84,7 +80,6 @@ def _delete(self, filters=[]): self.session.commit() -<<<<<<< HEAD def _get(self, filters, limit=5000): if not filters: filters = [] @@ -95,23 +90,10 @@ def _get(self, filters, limit=5000): .limit(limit) .all() ) -======= - def _get(self, model, filters, limit=500): - if not filters: - filters = [] - - objects = self.session.query(self.model) \ - .filter(*convert_dict_to_alchemy_filters(model, filters)) \ - .group_by(self.model.non_prim_identifying_column_name) \ - .order_by(self.model.timestamp) \ - .limit(limit) - ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 return objects -<<<<<<< HEAD def _get_count(self, filters): if not filters: filters = [] @@ -124,8 +106,6 @@ def _get_count(self, filters): return len(objects) -======= ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 class BaseTimeSeriesController(BaseController): def __init__(self, model): @@ -153,7 +133,6 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # .filter_by(*convert_dict_to_alchemy_filters(self.model, filters)) # .group_by(self.model.non_prim_identifying_column_name) # .order_by(self.model.timestamp) -<<<<<<< HEAD # .limit(number_of_res) # .all() # ) @@ -167,23 +146,11 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): .limit(1) .all() ) -======= - # .limit(number_of_res).all() - # ) - - print(*convert_dict_to_alchemy_filters(self.model, filters)) - latest_objs = self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ - .filter(*convert_dict_to_alchemy_filters(self.model, filters)) \ - .order_by(self.model.timestamp) \ - .limit(1).all() - ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() print("LATEST_OBJS-------") print(latest_objs) return latest_objs -<<<<<<< HEAD # def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): @@ -235,50 +202,6 @@ def _get_latest_event_objects_in_range( print("TID " + str(result.ticketId)) return results -======= - - # def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): - - # starttime = int(time.mktime(start_datetime).timetuple()) - - # filters.append(self.model.timestamp >= starttime) - - # latest_objs = ( - # self.session.query(self.model) - # .filter(*convert_dict_to_alchemy_filters(filters)) - # .group_by(self.model.non_prim_identifying_column_name) - # .order_by(self.model.timestamp) - # ) - - # return latest_objs - - def _get_latest_event_objects_from_start_date(self, datetime1, filters, number_of_res=5): - return self._get_latest_event_objects_in_range(datetime1, datetime.now(), filters=filters, number_of_res=5) - - - def _get_latest_event_objects_in_range(self, datetime1, datetime2, filters={}, number_of_res=5): - assert datetime1 <= datetime2 - time1 = int(time.mktime(datetime1.timetuple())) - time2 = int(time.mktime(datetime2.timetuple())) - - - session_filters = convert_dict_to_alchemy_filters(self.model, filters) - - session_filters.append(self.model.timestamp >= time1) - session_filters.append(self.model.timestamp <= time2) - - print("------------------------RUNNING TICKET GET QUERY----------------------------") - results = \ - self.session.query(self.model).distinct(self.model.non_prim_identifying_column_name) \ - .filter(*session_filters) \ - .order_by(self.model.non_prim_identifying_column_name, self.model.timestamp) \ - .limit(number_of_res).all() - print("----------complete-----------------") - for result in results: - print("TID " + str(result.ticketId)) - return results - ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 def _find_latest_prim_key_from_non_prim_identifying_column_val( self, non_prim_identifying_col_val ): diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index ed3db46..b18c373 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -18,7 +18,6 @@ def __init__(self): super().__init__(Customers) -<<<<<<< HEAD class TicketStatusController(BaseController): def __init__(self): super().__init__(TicketStatus) @@ -85,13 +84,6 @@ def convert_to_desc(self, milestones): class InventoryMilestonesController(MilestoneController): def __init__(self): super().__init__(InventoryMilestones) -======= -class TicketController(BaseTimeSeriesController): - def __init__(self): - super().__init__(TicketEvents) - - ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 def convert_to_desc(self, milestones): string_milestones = [] @@ -169,7 +161,6 @@ def convert_to_desc(self, milestones): class TicketController(BaseTimeSeriesController): def __init__(self): -<<<<<<< HEAD super().__init__(TicketEvents) self.ticket_status_controller = TicketStatusController() self.creation_milestone_controller = CreationMilestonesController() @@ -228,11 +219,3 @@ def _create_base_event(self, args_dict): } ) return obj -======= - super().__init__(DeliveryMilestones) - - -class DocumentController(BaseController): - def __init__(self): - super().__init__(Documents) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 diff --git a/servers/tenant/models/__init__.py b/servers/tenant/models/__init__.py index b7719b9..92914ed 100644 --- a/servers/tenant/models/__init__.py +++ b/servers/tenant/models/__init__.py @@ -14,11 +14,7 @@ Base = declarative_base() -<<<<<<< HEAD engine = create_engine(cnx_string, echo=False) -======= -engine = create_engine(cnx_string, echo=True) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 print("connecting to db....") Session = sessionmaker(bind=engine) session = Session() diff --git a/servers/tenant/models/models.py b/servers/tenant/models/models.py index 0c06264..8832845 100644 --- a/servers/tenant/models/models.py +++ b/servers/tenant/models/models.py @@ -104,7 +104,6 @@ class Users(Base): def __repr__(self): return f"< Users:: userId: {self.userId}>" -<<<<<<< HEAD class DocumentStatus(Base): __tablename__ = "documentstatus" @@ -122,27 +121,13 @@ class Documents(Base): barcodeNumber = Column(String, nullable=False) houseReferenceNumber = Column(String, nullable=False) orderS3Link = Column(String, nullable=False) + orderS3Path = Column(String, nullable=False) weight = Column(String, nullable=False) claimedNumberOfPieces = Column(Integer, nullable=False) BOLNumber = Column(String, nullable=False) specialServices = Column(String) specialInstructions = Column(String) # shipper -======= -class Documents(Base): - __tablename__ = "documents" - documentId = Column(Integer, primary_key=True, nullable=False) - timestamp = Column(Integer, default=int(time.time())) - barcodeNumber = Column(Integer, nullable=False) - houseReferenceNumber = Column(Integer, nullable=False) - orderS3Link = Column(String, nullable=False) - weight = Column(Integer, nullable=False) - claimedNumberOfPieces = Column(Integer, nullable=False) - BOLNumber = Column(Integer, nullable=False) - specialServices = Column(String) - specialInstructions = Column(String) - # shipper ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 shipperCompany = Column(String, nullable=False) shipperName = Column(String, nullable=False) shipperAddress = Column(String, nullable=False) @@ -156,7 +141,6 @@ class Documents(Base): consigneePhoneNumber = Column(String, nullable=False) # pieces pieces = Column(String, nullable=False) -<<<<<<< HEAD noSignatureRequired = Column(Boolean, nullable=False) tailgateAuthorized = Column(Boolean, nullable=False) customerName = Column(String, nullable=False) @@ -167,9 +151,6 @@ class TicketStatus(Base): ticketId = Column(Integer, primary_key=True, autoincrement=True) currentStatus = Column(Enum(Generic_Milestone_Status), nullable=False) assignedTo = Column(Integer, ForeignKey(Users.userId), nullable=True, index=True) -======= - customerName = Column(String, nullable=False) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 @@ -180,30 +161,19 @@ class TicketEvents(Base): # TODO: forgein key ticketId = Column(Integer, ForeignKey(TicketStatus.ticketId)) timestamp = Column(Integer, default=int(time.time())) -<<<<<<< HEAD userId = Column(Integer, ForeignKey(Users.userId), nullable=False, index=True) customerId = Column( Integer, ForeignKey(Customers.customerId), nullable=False, index=True ) barcodeNumber = Column(String, nullable=False) houseReferenceNumber = Column(String, nullable=False) -======= - userId = Column(Integer, ForeignKey(Users.userId), nullable=False) - customerId = Column(Integer, ForeignKey(Customers.customerId), nullable=False) - barcodeNumber = Column(Integer, nullable=False) - houseReferenceNumber = Column(Integer, nullable=False) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 orderS3Link = Column(String, nullable=False) weight = Column(String, nullable=False) claimedNumberOfPieces = Column(Integer, nullable=False) BOLNumber = Column(String, nullable=False) specialServices = Column(String) specialInstructions = Column(String) -<<<<<<< HEAD # shipper -======= - # shipper ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 shipperCompany = Column(String, nullable=False) shipperName = Column(String, nullable=False) shipperAddress = Column(String, nullable=False) @@ -217,7 +187,6 @@ class TicketEvents(Base): consigneePhoneNumber = Column(String, nullable=False) # pieces pieces = Column(String, nullable=False) -<<<<<<< HEAD isPickup = Column(Boolean, nullable=False) noSignatureRequired = Column(Boolean, nullable=False) tailgateAuthorized = Column(Boolean, nullable=False) @@ -233,10 +202,6 @@ class CreationMilestones(Base): ticketId = Column( Integer, ForeignKey(TicketStatus.ticketId), nullable=False, index=True ) -======= - user = relationship("Users") - customer = relationship("Customers") ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 newStatus = Column(Enum(Creation_Milestone_Status), nullable=False) @@ -273,7 +238,6 @@ class PickupMilestones(Base): class InventoryMilestones(Base): __tablename__ = "inventorymilestones" -<<<<<<< HEAD milestoneId = Column(Integer, primary_key=True, autoincrement=True) ticketId = Column( Integer, ForeignKey(TicketStatus.ticketId), nullable=False, index=True @@ -383,40 +347,3 @@ class DeliveryMilestones(Base): pass -======= -ticketId_timestamp_idx = Index( - "ticketId_timestamp_idx", TicketEvents.ticketId, TicketEvents.timestamp -) - -INDEXES.append(ticketId_timestamp_idx) - - -ticket_userId_idx = Index("ticket_userId_idx", TicketEvents.userId) - -INDEXES.append(ticket_userId_idx) - -ticket_customerId_idx = Index("ticket_customerId_idx", TicketEvents.customerId) - -INDEXES.append(ticket_customerId_idx) - -gen_milestoneId_idx = Index("gen_milestoneId_idx", GenericMilestones.milestoneId) - -INDEXES.append(gen_milestoneId_idx) - -inv_milestoneId_idx = Index("inv_milestoneId_idx", InventoryMilestones.milestoneId) - -INDEXES.append(inv_milestoneId_idx) - -del_milestoneId_idx = Index("del_milestoneId_idx", DeliveryMilestones.milestoneId) - -INDEXES.append(del_milestoneId_idx) - -print("Configuring DB ...") -Base.metadata.create_all(engine) -try: - # create indexes - for index in INDEXES: - index.create(bind=engine) -except: - pass ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 diff --git a/servers/tenant/server.py b/servers/tenant/server.py index 1e97a6d..6c25924 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -1,4 +1,3 @@ -<<<<<<< HEAD import os from flask import Flask, Blueprint, jsonify, session # from config import app @@ -15,15 +14,6 @@ # from models.__init__ import engine, Base # from models.models import INDEXES -======= -from tenant.config import app -from blueprints.event_driven.ticket import ticket_bp -from blueprints.simple.customers import customer_bp -from blueprints.simple.users import user_bp -from flask_cors import cross_origin -from tenant.blueprints.simple.document import document_bp # TODO: Move this in seperate microservice -from flask import Blueprint ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 from dotenv import load_dotenv load_dotenv(".env", override=True) @@ -39,10 +29,6 @@ cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) parent = Blueprint("api", __name__, url_prefix="/api") -<<<<<<< HEAD -======= -parent.register_blueprint(document_bp) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 parent.register_blueprint(ticket_bp) parent.register_blueprint(customer_bp) parent.register_blueprint(user_bp) @@ -51,22 +37,10 @@ parent.register_blueprint(document_bp) -<<<<<<< HEAD -======= - -@app.route("/") -def hello_world(): - return "Server Started!" - ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 if __name__ == "__main__": print("REGISTERING BLUEPRINT") app.register_blueprint(parent) -<<<<<<< HEAD app.run(debug=True, host="0.0.0.0", port=6767) -======= - app.run(debug=True, host="0.0.0.0", port=5000) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 diff --git a/servers/tenant/test/test.py b/servers/tenant/test/test.py index c3c50c7..f89e5c9 100644 --- a/servers/tenant/test/test.py +++ b/servers/tenant/test/test.py @@ -6,11 +6,7 @@ from faker import Faker import os from flask import Flask, jsonify -<<<<<<< HEAD import uuid -======= -import uuid; ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker @@ -24,7 +20,6 @@ UserController, CustomerController, TicketController, -<<<<<<< HEAD CreationMilestonesController, PickupMilestonesController, InventoryMilestonesController, @@ -38,11 +33,6 @@ Assignment_Milestone_Status, Delivery_Milestone_Status, Incomplete_Delivery_Milestone_Status, -======= - GenericMilestoneController, - InventoryMilestoneController, - DeliveryMilestoneController, ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 ) from utils import alchemyConverter from utils import AlchemyEncoder @@ -53,7 +43,6 @@ app = Flask(__name__) with app.app_context(): -<<<<<<< HEAD # Controllers creationMilestonesController = CreationMilestonesController() pickupMilestonesController = PickupMilestonesController() @@ -98,8 +87,6 @@ UserType.worker: [], } -======= ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 def generate_users(scale=5): user_controller = UserController() @@ -124,13 +111,8 @@ def generate_users(scale=5): args_arr.append( { -<<<<<<< HEAD "userId": userId, "userType": userTypeValue, -======= - "userId": random.randint(1, 1000000000), - "userType": userType, ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 "username": username, "firstName": firstName, "lastName": lastName, @@ -161,14 +143,7 @@ def generate_customers(scale=2): return customer_controller._create_bulk(args_arr) -<<<<<<< HEAD def generate_ticket_events(scale=400, users=[], customers=[]): -======= - - def generate_ticket_events( - scale=20, users=[], customers=[] - ): ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 ticket_events_controller = TicketController() @@ -204,12 +179,9 @@ def generate_ticket_events( consigneePostalCode = faker.zipcode() consigneePhoneNumber = faker.phone_number() pieces = faker.sentence() -<<<<<<< HEAD isPickup = False noSignatureRequired = False tailgateAuthorized = False -======= ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 obj = ticket_events_controller._create_base_event( { @@ -233,7 +205,6 @@ def generate_ticket_events( "consigneeAddress": consigneeAddress, "consigneePostalCode": consigneePostalCode, "consigneePhoneNumber": consigneePhoneNumber, -<<<<<<< HEAD "pieces": pieces, "isPickup": isPickup, "noSignatureRequired": noSignatureRequired, @@ -486,83 +457,10 @@ def generate_milestone_events(old_tickets): # "approvalStatus": approvalStatus, # } # ) -======= - "pieces": pieces - } - ) - - for i in range(random.randrange(10, 20)): - - userId = random.choice(users).userId - userId = random.choice(users).userId - customerId = random.choice(customers).customerId - barcodeNumber = random.randrange(100000000, 900000000) - houseReferenceNumber = random.randrange(100000000, 900000000) - orderS3Link = "s3link" - weight = random.randrange(100, 200) - claimedNumberOfPieces = random.randrange(1, 5) - BOLNumber = random.randrange(100000000, 900000000) - - created_obj = ticket_events_controller._modify_latest_object( - getattr(obj, TicketEvents.non_prim_identifying_column_name), - { - "ticketId": obj.ticketId, - "userId": userId, - "customerId": customerId, - "barcodeNumber": barcodeNumber, - "houseReferenceNumber": houseReferenceNumber, - "orderS3Link": orderS3Link, - "weight": weight, - "claimedNumberOfPieces": claimedNumberOfPieces, - "BOLNumber": BOLNumber, - "specialServices": specialServices, - "specialInstructions": specialInstructions, - }, - ) - - print("Created Ticket") - - - def generate_generic_milestones_events(scale=50, ticket_map=[], users=[]): - - gen_milestone_controller = GenericMilestoneController() - - n = len(session.query(GenericMilestones).distinct().all()) - if n < scale: - print(f"Generating Gen Milestones for {scale - n } Tickets") - - for _ in range(scale - n): - - ticketId = random.choice([k for k in ticket_map]) - - for _ in range( - random.randint(4, 10) - ): # number of milestones per ticket - - milestoneId = random.randint(1, 2147483645) - userId = random.choice(users).userId - - ticketStatus = random.choice( - [e for e in Generic_Ticket_Status] - ).value.lower() - - obj = gen_milestone_controller._create( - { - "milestoneId": milestoneId, - "ticketEventId": random.choice(ticket_map[ticketId]), - "userId": userId, - "ticketStatus": ticketStatus, - } - ) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 # print("Created Inventory Milestone") -<<<<<<< HEAD # def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): -======= - def generate_inventory_milestones_events(scale=50, ticket_map=[], users=[]): ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 # gen_milestone_controller = DeliveryMilestoneController() @@ -575,24 +473,9 @@ def generate_inventory_milestones_events(scale=50, ticket_map=[], users=[]): # ticketId = random.choice([k for k in ticket_map]) -<<<<<<< HEAD # for _ in range( # random.randint(4, 10) # ): # number of milestones per ticket -======= - print("Created Inventory Milestone") - - def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): - - gen_milestone_controller = DeliveryMilestoneController() - - n = len(session.query(DeliveryMilestones).distinct().all()) - - if n < scale: - print(f"Generating Delivery Milestones for {scale - n } Tickets") - - for _ in range(scale - n): ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 # milestoneId = random.randint(1, 2147483645) # userId = random.choice(users).userId @@ -617,23 +500,7 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): # print("Created Delivery Milestone") -<<<<<<< HEAD generate_users(scale=70) -======= - obj = gen_milestone_controller._create( - { - "milestoneId": milestoneId, - "ticketEventId": random.choice(ticket_map[ticketId]), - "userId": userId, - "ticketStatus": ticketStatus, - "approvalStatus": approvalStatus, - } - ) - - print("Created Delivery Milestone") - - generate_users(scale=5) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 users = session.query(Users).all() # print(random.choice(users)) @@ -643,7 +510,6 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): # pprint(alchemyConverter(users[0])) -<<<<<<< HEAD oldTickets = ( session.query(TicketEvents) .with_entities(TicketEvents.ticketId) @@ -654,10 +520,6 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): generate_ticket_events( scale=500, -======= - generate_ticket_events( - scale=20, ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 users=users, customers=customers, ) @@ -669,10 +531,6 @@ def generate_delivery_milestones_events(scale=50, ticket_map=[], users=[]): # exit() -<<<<<<< HEAD -======= - ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 pprint(alchemyConverter(ticketEvents[0])) exit() diff --git a/servers/tenant/utils.py b/servers/tenant/utils.py index 4f5f045..3429fb6 100644 --- a/servers/tenant/utils.py +++ b/servers/tenant/utils.py @@ -53,11 +53,6 @@ def alchemyConverter(obj): if type(obj) == list: res = [] for ele in obj: -<<<<<<< HEAD -======= - print("ALCHEMY DEBUG ---------------------------") - print("TID: " + str(ele.ticketId)) ->>>>>>> 32dee55d98864ba43414c8757ab4abe2e4881f66 json_res = alchemyConvertUtil(ele, {}, visited=set()) res.append(json_res) return res From 9aa601b27ce4efe7698dc0da6ddd8416c4781e95 Mon Sep 17 00:00:00 2001 From: Satwik Pattanaik Date: Thu, 21 Jul 2022 23:29:25 -0400 Subject: [PATCH 32/40] enable cognito --- servers/tenant/blueprints/event_driven/ticket.py | 12 ++++++------ servers/tenant/blueprints/simple/customers.py | 6 +++--- servers/tenant/blueprints/simple/document.py | 5 ++++- servers/tenant/blueprints/simple/driver.py | 4 ++-- servers/tenant/blueprints/simple/milestones.py | 6 +++--- servers/tenant/blueprints/simple/users.py | 10 +++++----- 6 files changed, 23 insertions(+), 20 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 436aa63..de16ddc 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -16,7 +16,7 @@ alchemyConverter, ) -# from flask_cognito_lib.decorators import auth_required +from flask_cognito_lib.decorators import auth_required ticket_bp = Blueprint("ticket_bp", __name__, url_prefix="ticket") @@ -56,7 +56,7 @@ @ticket_bp.route("/status/", methods=["GET"]) -#@auth_required() +@auth_required() def ticket_get_all_with_status(status): # create ticket limit = 5000 if "limit" not in request.args else request.args["limit"] @@ -80,7 +80,7 @@ def ticket_get_all_with_status(status): # create ticket @ticket_bp.route("/", methods=["POST"]) -#@auth_required() +@auth_required() def ticket_post(): # create ticket print("Creating ticket from the following JSON:") print(request.data) @@ -98,7 +98,7 @@ def ticket_post(): # create ticket # TODO fix primary key issue, ticketeventID needs to be unique for edits @ticket_bp.route("/", methods=["POST"]) -#@auth_required() +@auth_required() def ticket_edit(ticket_id): # create ticket print("Creating ticket from the following JSON:") print(request.data) @@ -155,7 +155,7 @@ def default_end(): @ticket_bp.route("/", methods=["GET"]) -#@auth_required() +@auth_required() def ticket_get_all(): filters = request.args or {} sql_filters = get_clean_filters_dict(filters) @@ -192,7 +192,7 @@ def get_single(ticket_id): return data[0] if isinstance(data, list) else data @ticket_bp.route("/", methods=["GET"]) -#@auth_required() +@auth_required() def ticket_get(ticket_id): data = get_single(ticket_id) res = alchemyConverter(data) diff --git a/servers/tenant/blueprints/simple/customers.py b/servers/tenant/blueprints/simple/customers.py index 01d92cf..b5108e9 100644 --- a/servers/tenant/blueprints/simple/customers.py +++ b/servers/tenant/blueprints/simple/customers.py @@ -7,7 +7,7 @@ from controllers.controllerMapper import CustomerController from models.models import Users -# from flask_cognito_lib.decorators import auth_required +from flask_cognito_lib.decorators import auth_required from utils import ( AlchemyEncoder, alchemyConverter @@ -20,14 +20,14 @@ @customer_bp.route("/", methods=["POST"]) -#@auth_required() +@auth_required() def customer_post(): # create ticket customer = customer_controller._create(json.loads(request.data)) response = {"customerId": customer.customerId} return make_response(json.dumps(response)) @customer_bp.route("/", methods=["GET"]) -#@auth_required() +@auth_required() def customer_get(): # create ticket limit = 5000 if "limit" not in request.args else request.args["limit"] if "limit" in request.args: diff --git a/servers/tenant/blueprints/simple/document.py b/servers/tenant/blueprints/simple/document.py index c917420..9e5c983 100644 --- a/servers/tenant/blueprints/simple/document.py +++ b/servers/tenant/blueprints/simple/document.py @@ -10,7 +10,8 @@ import PyPDF2 import extraction.app as ex from celery import group -import json +import json +from flask_cognito_lib.decorators import auth_required from utils import ( AlchemyEncoder, alchemyConverter, @@ -24,6 +25,7 @@ document_status_controller = DocumentStatusController() document_controller = DocumentController() @document_bp.route("/", methods=["POST"]) +@auth_required() def document_post(): if "file" not in request.files: res = jsonify({"message": "No file part in the request"}) @@ -52,6 +54,7 @@ def document_post(): @document_bp.route("/", methods=["GET"]) +@auth_required() def document_get(document_id): filters = {"documentStatusId": document_id} documents = document_controller._get(filters) diff --git a/servers/tenant/blueprints/simple/driver.py b/servers/tenant/blueprints/simple/driver.py index d59f4ca..4f7ba1e 100644 --- a/servers/tenant/blueprints/simple/driver.py +++ b/servers/tenant/blueprints/simple/driver.py @@ -7,7 +7,7 @@ from controllers.controllerMapper import UserController from models.models import UserType -# from flask_cognito_lib.decorators import auth_required +from flask_cognito_lib.decorators import auth_required from utils import ( AlchemyEncoder, alchemyConverter @@ -20,7 +20,7 @@ @driver_bp.route("/", methods=["GET"]) -#@auth_required() +@auth_required() def driver_get(): drivers = user_controller._get({'userType': UserType.driver.value}) diff --git a/servers/tenant/blueprints/simple/milestones.py b/servers/tenant/blueprints/simple/milestones.py index 8035137..8dad9f1 100644 --- a/servers/tenant/blueprints/simple/milestones.py +++ b/servers/tenant/blueprints/simple/milestones.py @@ -17,7 +17,7 @@ DeliveryMilestonesController, TicketStatusController, ) -# from flask_cognito_lib.decorators import auth_required +from flask_cognito_lib.decorators import auth_required from models.models import ( CreationMilestones, @@ -47,7 +47,7 @@ @milestone_bp.route("/", methods=["GET"]) -#@auth_required() +@auth_required() def milestone_get(ticket_id): # create ticket filters = { @@ -70,7 +70,7 @@ def milestone_get(ticket_id): # create ticket @milestone_bp.route("/", methods=["POST"]) -#@auth_required() +@auth_required() def milestone_post(milestone_type): # create ticket milestone_class = getattr(sys.modules[__name__], milestone_type) milestone_controller = class_to_cntrl_map[milestone_class] diff --git a/servers/tenant/blueprints/simple/users.py b/servers/tenant/blueprints/simple/users.py index 2d5b249..7bb1f44 100644 --- a/servers/tenant/blueprints/simple/users.py +++ b/servers/tenant/blueprints/simple/users.py @@ -8,7 +8,7 @@ from controllers.controllerMapper import UserController from models.models import Users -# from flask_cognito_lib.decorators import auth_required +from flask_cognito_lib.decorators import auth_required user_bp = Blueprint("user_bp", __name__, url_prefix="user") @@ -16,14 +16,14 @@ user_controller = UserController() @user_bp.route("/", methods=["GET"]) -#@auth_required() +@auth_required() def user_get(): # create ticket user_controller._get(**request.form["user"]) return "success" @user_bp.route("/", methods=["POST"]) -#@auth_required() +@auth_required() def user_post(): # create ticket (request.get_json(force=True)['user']) @@ -32,7 +32,7 @@ def user_post(): # create ticket @user_bp.route("", methods=["PUT"]) -#@auth_required() +@auth_required() def user_modify(): userId = request.form["userId"] @@ -43,7 +43,7 @@ def user_modify(): @user_bp.route("/", methods=["DELETE"]) -#@auth_required() +@auth_required() def user_delete(): userId = request.args.get("userId") user_controller._delete(userId) From 2ecc006ae9226f4f0a057bffbde393ee64b8be8d Mon Sep 17 00:00:00 2001 From: Satwik Pattanaik Date: Thu, 21 Jul 2022 23:51:28 -0400 Subject: [PATCH 33/40] docker changes for flask-cognito-lib --- servers/app.Dockerfile | 9 +++++++++ servers/celery.Dockerfile | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/servers/app.Dockerfile b/servers/app.Dockerfile index 9497d24..bc6a280 100644 --- a/servers/app.Dockerfile +++ b/servers/app.Dockerfile @@ -4,5 +4,14 @@ RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install - COPY requirements.txt . RUN pip3 install --upgrade pip RUN pip3 install -r requirements.txt +RUN mkdir /root/.ssh/ +# -------------- Manual Setup Required -------------------- # +# Must get Deploy Key and drop it in the server/tenant dir # +COPY flask-cognito-lib_deploy /root/.ssh/flask-cognito-lib_deploy +# --------------------------------------------------------- # +RUN touch /root/.ssh/known_hosts +RUN ssh-keyscan github.com >> /root/.ssh/known_hosts +RUN git - C /root clone git@github.com:ShipSolver/flask-cognito-lib.git +RUN pip3 install -e /root/flask-cognito-lib WORKDIR /opt/metadata-extraction ENV PYTHONPATH . diff --git a/servers/celery.Dockerfile b/servers/celery.Dockerfile index e214354..a971a00 100644 --- a/servers/celery.Dockerfile +++ b/servers/celery.Dockerfile @@ -4,5 +4,14 @@ RUN apt -y install tesseract-ocr && apt -y install libtesseract-dev COPY requirements.txt . RUN pip3 install --upgrade pip RUN pip3 install -r requirements.txt +RUN mkdir /root/.ssh/ +# -------------- Manual Setup Required -------------------- # +# Must get Deploy Key and drop it in the server/tenant dir # +COPY flask-cognito-lib_deploy /root/.ssh/flask-cognito-lib_deploy +# --------------------------------------------------------- # +RUN touch /root/.ssh/known_hosts +RUN ssh-keyscan github.com >> /root/.ssh/known_hosts +RUN git - C /root clone git@github.com:ShipSolver/flask-cognito-lib.git +RUN pip3 install -e /root/flask-cognito-lib WORKDIR /opt/metadata-extraction/tenant ENV PYTHONPATH .. From e0e035fa7fd0b8c449c8106a59691bb9ba2c75f1 Mon Sep 17 00:00:00 2001 From: Satwik Pattanaik Date: Thu, 21 Jul 2022 23:51:28 -0400 Subject: [PATCH 34/40] docker changes for flask-cognito-lib --- servers/app.Dockerfile | 9 +++++++++ servers/celery.Dockerfile | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/servers/app.Dockerfile b/servers/app.Dockerfile index 9497d24..64ae7ea 100644 --- a/servers/app.Dockerfile +++ b/servers/app.Dockerfile @@ -4,5 +4,14 @@ RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install - COPY requirements.txt . RUN pip3 install --upgrade pip RUN pip3 install -r requirements.txt +RUN mkdir /root/.ssh/ +# ---------------- Manual Setup Required ------------------ # +# Must get Deploy Key and drop it into the server dir # +COPY flask-cognito-lib_deploy /root/.ssh/flask-cognito-lib_deploy +# --------------------------------------------------------- # +RUN touch /root/.ssh/known_hosts +RUN ssh-keyscan github.com >> /root/.ssh/known_hosts +RUN git - C /root clone git@github.com:ShipSolver/flask-cognito-lib.git +RUN pip3 install -e /root/flask-cognito-lib WORKDIR /opt/metadata-extraction ENV PYTHONPATH . diff --git a/servers/celery.Dockerfile b/servers/celery.Dockerfile index e214354..42bcb24 100644 --- a/servers/celery.Dockerfile +++ b/servers/celery.Dockerfile @@ -4,5 +4,14 @@ RUN apt -y install tesseract-ocr && apt -y install libtesseract-dev COPY requirements.txt . RUN pip3 install --upgrade pip RUN pip3 install -r requirements.txt +RUN mkdir /root/.ssh/ +# ---------------- Manual Setup Required ------------------ # +# Must get Deploy Key and drop it into the server dir # +COPY flask-cognito-lib_deploy /root/.ssh/flask-cognito-lib_deploy +# --------------------------------------------------------- # +RUN touch /root/.ssh/known_hosts +RUN ssh-keyscan github.com >> /root/.ssh/known_hosts +RUN git - C /root clone git@github.com:ShipSolver/flask-cognito-lib.git +RUN pip3 install -e /root/flask-cognito-lib WORKDIR /opt/metadata-extraction/tenant ENV PYTHONPATH .. From 400662b53aad87d819c863d2af21aa5207f09edb Mon Sep 17 00:00:00 2001 From: Satwik Date: Fri, 22 Jul 2022 14:32:52 +0000 Subject: [PATCH 35/40] docker file changes --- servers/app.Dockerfile | 15 +++++---------- servers/celery.Dockerfile | 12 +++--------- servers/docker-compose.yml | 2 +- 3 files changed, 9 insertions(+), 20 deletions(-) diff --git a/servers/app.Dockerfile b/servers/app.Dockerfile index 64ae7ea..b2ab061 100644 --- a/servers/app.Dockerfile +++ b/servers/app.Dockerfile @@ -1,17 +1,12 @@ FROM python:3.9 -EXPOSE 5000 RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install -y build-essential libpoppler-cpp-dev pkg-config python-dev -COPY requirements.txt . +RUN apt -y install libpq-dev +COPY tenant/requirements.txt . RUN pip3 install --upgrade pip RUN pip3 install -r requirements.txt -RUN mkdir /root/.ssh/ -# ---------------- Manual Setup Required ------------------ # -# Must get Deploy Key and drop it into the server dir # -COPY flask-cognito-lib_deploy /root/.ssh/flask-cognito-lib_deploy -# --------------------------------------------------------- # -RUN touch /root/.ssh/known_hosts -RUN ssh-keyscan github.com >> /root/.ssh/known_hosts -RUN git - C /root clone git@github.com:ShipSolver/flask-cognito-lib.git +run pip3 install psycopg2 +RUN git -C /root clone https://github.com/ShipSolver/flask-cognito-lib.git RUN pip3 install -e /root/flask-cognito-lib WORKDIR /opt/metadata-extraction ENV PYTHONPATH . +EXPOSE 6767 diff --git a/servers/celery.Dockerfile b/servers/celery.Dockerfile index 42bcb24..f91441c 100644 --- a/servers/celery.Dockerfile +++ b/servers/celery.Dockerfile @@ -1,17 +1,11 @@ FROM python:3.9 RUN apt-get update && apt-get -y install qpdf poppler-utils && apt-get install -y build-essential libpoppler-cpp-dev pkg-config python-dev RUN apt -y install tesseract-ocr && apt -y install libtesseract-dev -COPY requirements.txt . +COPY tenant/requirements.txt . RUN pip3 install --upgrade pip RUN pip3 install -r requirements.txt -RUN mkdir /root/.ssh/ -# ---------------- Manual Setup Required ------------------ # -# Must get Deploy Key and drop it into the server dir # -COPY flask-cognito-lib_deploy /root/.ssh/flask-cognito-lib_deploy -# --------------------------------------------------------- # -RUN touch /root/.ssh/known_hosts -RUN ssh-keyscan github.com >> /root/.ssh/known_hosts -RUN git - C /root clone git@github.com:ShipSolver/flask-cognito-lib.git +run pip3 install psycopg2-binary +RUN git -C /root clone https://github.com/ShipSolver/flask-cognito-lib.git RUN pip3 install -e /root/flask-cognito-lib WORKDIR /opt/metadata-extraction/tenant ENV PYTHONPATH .. diff --git a/servers/docker-compose.yml b/servers/docker-compose.yml index 1601a98..28e186d 100644 --- a/servers/docker-compose.yml +++ b/servers/docker-compose.yml @@ -36,7 +36,7 @@ services: - .:/opt/metadata-extraction container_name: app01 ports: - - "6767:6767" + - "6768:6767" command: python3 tenant/server.py tty: true flower: From a17a30bf62d39361b850293f58beeebfae08c0f4 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Fri, 22 Jul 2022 10:58:19 -0400 Subject: [PATCH 36/40] Dockerization complete --- servers/docker-compose.yml | 2 +- servers/tenant/server.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/servers/docker-compose.yml b/servers/docker-compose.yml index 28e186d..1601a98 100644 --- a/servers/docker-compose.yml +++ b/servers/docker-compose.yml @@ -36,7 +36,7 @@ services: - .:/opt/metadata-extraction container_name: app01 ports: - - "6768:6767" + - "6767:6767" command: python3 tenant/server.py tty: true flower: diff --git a/servers/tenant/server.py b/servers/tenant/server.py index 6c25924..f4e56a9 100644 --- a/servers/tenant/server.py +++ b/servers/tenant/server.py @@ -10,7 +10,7 @@ from flask_cors import CORS -# from flask_cognito_lib import CognitoAuth +from flask_cognito_lib import CognitoAuth # from models.__init__ import engine, Base # from models.models import INDEXES @@ -25,7 +25,7 @@ app.config["AWS_COGNITO_USER_POOL_CLIENT_ID"] = os.environ["AWS_COGNITO_USER_POOL_CLIENT_ID"] app.config["AWS_COGNITO_DOMAIN"] = os.environ["AWS_COGNITO_DOMAIN"] -# auth = CognitoAuth(app) +auth = CognitoAuth(app) cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) parent = Blueprint("api", __name__, url_prefix="/api") From 39d08f8a2676b5ca95e47efbb1ddac08624087c6 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Fri, 22 Jul 2022 11:07:56 -0400 Subject: [PATCH 37/40] Extraction --- servers/extraction/app.py | 40 ++++ servers/extraction/const.py | 48 ++++ servers/extraction/extract.py | 349 ++++++++++++++++++++++++++++ servers/extraction/ocr.py | 22 ++ servers/extraction/requirements.txt | 51 ++++ 5 files changed, 510 insertions(+) create mode 100644 servers/extraction/app.py create mode 100644 servers/extraction/const.py create mode 100644 servers/extraction/extract.py create mode 100644 servers/extraction/ocr.py create mode 100644 servers/extraction/requirements.txt diff --git a/servers/extraction/app.py b/servers/extraction/app.py new file mode 100644 index 0000000..93e50c5 --- /dev/null +++ b/servers/extraction/app.py @@ -0,0 +1,40 @@ +import os +from multilingual_pdf2text.pdf2text import PDF2Text +from multilingual_pdf2text.models.document_model.document import Document +import pdfplumber +import extraction.extract as e +import json +from celery.utils.log import get_logger + +logger = get_logger(__name__) + +def read_pdfplumber(file_name): + with pdfplumber.open(file_name) as pdf: + page = pdf.pages[0] + page = page.extract_text() + return page + + +def work(folder_path): + pdf_uuid = folder_path.split("/")[-1] + pdf_file = f"{folder_path}/{pdf_uuid}.pdf" + print(f"Working on {pdf_file}...") + pdf_document = Document( + document_path=pdf_file, + language='eng' + ) + pdf2text = PDF2Text(document=pdf_document) + content = pdf2text.extract() + + ml_page_text = list(content)[0]["text"] + pp_text = read_pdfplumber(pdf_file) + for i in range(14): + logger.info("WE HERE----------------") + extract_json = e.generate_doclist(e.extract(ml_page_text, plumber_page=pp_text)) + + with open(f"{folder_path}/{pdf_uuid}.json", "w") as f: + json.dump(extract_json, f, indent=2) + return extract_json + +if __name__ == '__main__': + work("uploads/bf0c396f-dcc6-4d3f-8d7c-9180d2f0a322/cedc5b27-2a94-4e17-ac48-65c13e065102") \ No newline at end of file diff --git a/servers/extraction/const.py b/servers/extraction/const.py new file mode 100644 index 0000000..ceb86da --- /dev/null +++ b/servers/extraction/const.py @@ -0,0 +1,48 @@ +CEVA = "CEVA" +NORTH_AMERICAN = "NORTH_AMERICAN" + +CEVA_NUM = "1-888-327-8247" + + +#doclist_keys + +BARCODE = "barcodeNumber" +HOUSE_REF = "houseReferenceNumber" +WEIGHT = "weight" +NUM_PCS = "claimedNumberOfPieces" +BOL_NUM = "BOLNumber" +SPECIAL_SERVICES = "specialServices" +SPECIAL_INSTRUCTIONS = "specialInstructions" +CONSIGNEE = "consignee" +SHIPPER = "shipper" +COMPANY = "Company" +NAME = "Name" +ADDRESS = "Address" +POSTAL_CODE = "PostalCode" +PHONE_NUMBER = "PhoneNumber" + +NO_SIGNATURE_REQUIRED = "noSignatureRequired" +TAILGATE_AUTHORIZED = "tailgateAuthorized" + +FIRST_PARTY = "customerName" + +PCS = "pieces" + +PKG = "pkg" +WT_LBS = "weight" +COMMODITY_DESCRIPTION = "commodity_description" +DIMS_IN = "dims(in)" + + +CEVA_SHIPPER_FIELDS = [COMPANY, ADDRESS] +CEVA_CONSIGNEE_FIELDS = [NAME, ADDRESS] + +NORTH_AMERICAN_SHIPPER_FIELDS = [COMPANY, NAME, COMPANY, ADDRESS] +NORTH_AMERICAN_CONSIGNEE_FIELDS = [COMPANY, NAME, COMPANY, ADDRESS] +BARCODE_REGEX = "([A-Z][A-Z]\d{3}-\d{7})" +PCS_REGEX = "(\d+) +PCS" +LBS_REGEX = "(\d+) +[Ll]bs" +POSTAL_CODE_REGEX_BOTH = "[ABCEGHJ-NPRSTVXY][\dO][ABCEGHJ-NPRSTV-Z][ -]?[\dO][ABCEGHJ-NPRSTV-Z][\dO]$" +PHONE_NUMBER_REGEX = "((\+\d{1,2}\s)?\(?(905|807|705|647|613|519|416|343|289|226)\)?[\s.-]?\d{3}[\s.-]?\d{4})" + +PHONE_COLON_REGEX = "ne: (\d{10})" \ No newline at end of file diff --git a/servers/extraction/extract.py b/servers/extraction/extract.py new file mode 100644 index 0000000..beb2473 --- /dev/null +++ b/servers/extraction/extract.py @@ -0,0 +1,349 @@ +import os +import ocrmypdf +import time +import re +from extraction.const import * +import pdfplumber + +def ocr(file_path, save_path): + ocrmypdf.ocr(file_path, save_path) + +def read_pdf(file_name, page_num): + file_path = os.path.join("../text", file_name.split(".")[0], f"{page_num}.txt") + with open(file_path, "r") as f: + return f.read() + +def read_pdfplumber(file_name, page_num): + with pdfplumber.open("../data/NORTH_AMERICAN.pdf") as pdf: + page = pdf.pages[page_num-1] + page = page.extract_text() + return page + + +""" +CEVA + 1st party + Consignee info + Name + Addr + Postal Code + Phone Number + House/ref + Barcode + Lbs + # PCs + Shipper + Special Instructions +""" +def extract_ceva(page): + lines = page.splitlines() + ceva_list = {FIRST_PARTY: CEVA} + # barcode + matches = re.findall(BARCODE_REGEX, page) + if matches: + insert_in_dict(ceva_list, BARCODE, matches[0]) + # NUM PCS + matches = re.findall(PCS_REGEX, page) + if matches: + insert_in_dict(ceva_list, NUM_PCS, matches[0]) + # weight + matches = re.findall(LBS_REGEX, page) + if matches: + insert_in_dict(ceva_list, WEIGHT, f"{matches[0]} lbs") + # phone number + matches = re.findall(PHONE_NUMBER_REGEX, page) + consignee_phone_number = matches[0][0] if matches else "" + + for line_num, line in enumerate(lines): + # house ref # + if "house" in line.lower() or "ref #" in line.lower(): + insert_in_dict(ceva_list, HOUSE_REF, line.split(" ")[-1]) + # shipper + if line.lower().startswith('shipper') or line.lower().endswith('expéditeur'): + shipper = extract_info_ceva(lines, line_num) + insert_in_dict(ceva_list, SHIPPER, shipper) + + #consignee + if is_consignee(line): + consignee = extract_info_ceva(lines, line_num, is_shipper=False) + insert_in_dict(consignee, PHONE_NUMBER, consignee_phone_number) + insert_in_dict(ceva_list, CONSIGNEE, consignee) + if "instructions" in line.lower(): + special_instructions = extract_special(lines, line_num, ["reference"]) + insert_in_dict(ceva_list, SPECIAL_INSTRUCTIONS, special_instructions) + + return ceva_list + + +def is_consignee(line): + return line.lower().startswith('consignee') or line.lower().endswith('consignataire') + + +def extract_info_ceva(lines, starting_num, is_shipper=True): + + field_index = 0 + curr_field_entry = "" + shipper_dict = {} + FIELDS = CEVA_SHIPPER_FIELDS if is_shipper else CEVA_CONSIGNEE_FIELDS + for index in range(starting_num+1, len(lines)): + if not lines[index]: + continue + # name or company + if field_index == 0: + if starts_with_number(lines[index]): + field_index += 1 + shipper_dict[FIELDS[field_index-1]] = curr_field_entry.rstrip() + curr_field_entry = "" + else: + curr_field_entry += lines[index] + " " + + if FIELDS[field_index] == ADDRESS: + curr_field_entry += lines[index] + " " + if is_consignee(lines[index]) or re.findall(POSTAL_CODE_REGEX_BOTH, lines[index]): + shipper_dict[ADDRESS] = curr_field_entry.rstrip() + break + + for field in FIELDS: + if field not in shipper_dict: + shipper_dict[field] = "" + + postal_code = extract_postal_code(shipper_dict[ADDRESS]) + insert_in_dict(shipper_dict, POSTAL_CODE, postal_code) + + return shipper_dict + + +def starts_with_number(line): + return line.split(" ")[0].isnumeric() + + +def extract_special(lines, starting_num, keywords): + entry = "" + outer_break = False + for index in range(starting_num+1, len(lines)): + if not lines[index]: + continue + for keyword in keywords: + if keyword in lines[index].lower(): + outer_break = True + break + if outer_break: + break + entry += lines[index] + " " + + return entry.rstrip() + + +""" + +North American + 1st party + BOL # + Consignee information + # PCS + DIMS + Special Services + +""" + +def extract_north_american(page, page_2): + lines = page.splitlines() + north_american_list = {FIRST_PARTY: NORTH_AMERICAN} + + # phone number + matches = re.findall(PHONE_COLON_REGEX, page) + shipper_phone_number = matches[0] if matches else "" + consignee_phone_number = matches[1] if len(matches) > 1 else "" + + for line_num, line in enumerate(lines): + # ref # + if "ref#" in line.lower(): + ref_num = line.split(" ")[-1] + if "ref" not in ref_num.lower(): + insert_in_dict(north_american_list, HOUSE_REF, line.split(":")[-1].strip()) + # BOL # + if "bol" in line.lower(): + bol_num = line.split(" ")[-1] + if "bol" not in bol_num.lower(): + insert_in_dict(north_american_list, BOL_NUM, line.split(" ")[-1]) + # shipper + if "shipper" in line.lower(): + shipper = extract_info_north_american(lines, line_num) + insert_in_dict(shipper, PHONE_NUMBER, shipper_phone_number) + insert_in_dict(north_american_list, SHIPPER, shipper) + # consignee + if "consignee" in line.lower(): + consignee = extract_info_north_american(lines, line_num, is_shipper=False) + insert_in_dict(consignee, PHONE_NUMBER, consignee_phone_number) + insert_in_dict(north_american_list, CONSIGNEE, consignee) + #special services + if "services" in line.lower(): + special_services = extract_special(lines, line_num, ["question", "issue", "905-277-2000"]) + insert_in_dict(north_american_list, SPECIAL_SERVICES, special_services) + + lines = page_2.splitlines() + for line_num, line in enumerate(lines): + if "pkg" in line.lower() or "wt(lbs)" in line.lower(): + pcs = extract_pcs(lines, line_num) + insert_in_dict(north_american_list, PCS, pcs) + + + return north_american_list + + +def extract_pcs(lines, starting_num): + pcs = [] + num_pcs = 0 + weight = 0 + for index in range(starting_num+1, len(lines)): + if len(lines[index]) < 13: + _num_pcs, _weight = [float(x) for x in lines[index].split(" ")] + assert _num_pcs == num_pcs and _weight == weight + break + second_space = lines[index].find(" ", lines[index].find(" ") + 1) + dim_nums = [re.findall("\d+\.\d+", x)[0] for x in lines[index].split(" ")[-3:]] + pkg, wt = lines[index].split(" ")[:2] + num_pcs += 1 + weight += float(wt) + commodity_description = lines[index][second_space:].split(dim_nums[0])[0].lstrip().rstrip() + dims = ' x '.join(dim_nums) + pcs.append({PKG: pkg, WT_LBS: wt, COMMODITY_DESCRIPTION: commodity_description, DIMS_IN: dims}) + + return pcs + +def extract_info_north_american(lines, starting_num, is_shipper=True): + field_index = 0 + curr_field_entry = "" + shipper_dict = {} + FIELDS = NORTH_AMERICAN_SHIPPER_FIELDS if is_shipper else NORTH_AMERICAN_CONSIGNEE_FIELDS + company = False + company_1 = "" + name = "" + company_2 = "" + address = "" + for index in range(starting_num+1, len(lines)): + if not lines[index]: + continue + if field_index == 0: + if "contact" in lines[index].lower(): + company = True + name = lines[index].split(": ")[-1] + company_1 = curr_field_entry.rstrip() + curr_field_entry = "" + field_index += 2 + continue + curr_field_entry += lines[index] + " " + if company: + if starts_with_number(lines[index]): + company = False + field_index += 1 + company_2 = curr_field_entry.rstrip() + curr_field_entry = "" + else: + curr_field_entry += lines[index] + " " + + + if FIELDS[field_index] == ADDRESS: + curr_field_entry += lines[index] + " " + if is_consignee(lines[index]) or re.findall(POSTAL_CODE_REGEX_BOTH, lines[index]): + address = curr_field_entry.rstrip() + break + + if is_shipper: + shipper_dict[COMPANY] = company_2 + shipper_dict[ADDRESS] = address + else: + shipper_dict[COMPANY] = company_1 + shipper_dict[NAME] = name + shipper_dict[ADDRESS] = (company_2 + ", " if company_2 else "") + address + + for field in FIELDS: + if field not in shipper_dict: + shipper_dict[field] = "" + + postal_code = extract_postal_code(shipper_dict[ADDRESS]) + insert_in_dict(shipper_dict, POSTAL_CODE, postal_code) + return shipper_dict + +def generate_doclist(_list): + return { + FIRST_PARTY: _list[FIRST_PARTY] if FIRST_PARTY in _list else "", + HOUSE_REF: _list[HOUSE_REF] if HOUSE_REF in _list else "", + BARCODE: _list[BARCODE] if BARCODE in _list else "", + PCS: _list[PCS] if PCS in _list else [], + NUM_PCS: _list[NUM_PCS] if NUM_PCS in _list else 0, + WEIGHT: _list[WEIGHT] if WEIGHT in _list else "", + BOL_NUM: _list[BOL_NUM] if BOL_NUM in _list else "", + SPECIAL_SERVICES: _list[SPECIAL_SERVICES] if SPECIAL_SERVICES in _list else "", + SPECIAL_INSTRUCTIONS: _list[SPECIAL_INSTRUCTIONS] if SPECIAL_INSTRUCTIONS in _list else "", + CONSIGNEE+COMPANY: _list[CONSIGNEE][COMPANY] if CONSIGNEE in _list and COMPANY in _list[CONSIGNEE] else "", + CONSIGNEE+NAME: _list[CONSIGNEE][NAME] if CONSIGNEE in _list and NAME in _list[CONSIGNEE] else "", + CONSIGNEE+ADDRESS: _list[CONSIGNEE][ADDRESS] if CONSIGNEE in _list and ADDRESS in _list[CONSIGNEE] else "", + CONSIGNEE+POSTAL_CODE: _list[CONSIGNEE][POSTAL_CODE] if CONSIGNEE in _list and POSTAL_CODE in _list[CONSIGNEE] else "", + CONSIGNEE+PHONE_NUMBER: _list[CONSIGNEE][PHONE_NUMBER] if CONSIGNEE in _list and PHONE_NUMBER in _list[CONSIGNEE] else "", + SHIPPER+COMPANY: _list[SHIPPER][COMPANY] if SHIPPER in _list and COMPANY in _list[SHIPPER] else "", + SHIPPER+NAME: _list[SHIPPER][NAME] if SHIPPER in _list and NAME in _list[SHIPPER] else "", + SHIPPER+ADDRESS: _list[SHIPPER][ADDRESS] if SHIPPER in _list and ADDRESS in _list[SHIPPER] else "", + SHIPPER+POSTAL_CODE: _list[SHIPPER][POSTAL_CODE] if SHIPPER in _list and POSTAL_CODE in _list[SHIPPER] else "", + SHIPPER+PHONE_NUMBER: _list[SHIPPER][PHONE_NUMBER] if SHIPPER in _list and PHONE_NUMBER in _list[SHIPPER] else "", + NO_SIGNATURE_REQUIRED: _list[NO_SIGNATURE_REQUIRED] if NO_SIGNATURE_REQUIRED in _list else False, + TAILGATE_AUTHORIZED: _list[TAILGATE_AUTHORIZED] if TAILGATE_AUTHORIZED in _list else False + } + + +def extract(page, plumber_page=None): + second_party = predict_second_party(page) + + if second_party == CEVA: + return extract_ceva(page) + elif second_party == NORTH_AMERICAN: + return extract_north_american(page, plumber_page) + + return {} + +def predict_second_party(page): + + if CEVA.lower() in page.lower() or CEVA_NUM in page: + return CEVA + + return NORTH_AMERICAN + + +def insert_in_dict(_dict, key, value): + if not key in _dict: + _dict[key] = value + + +def extract_postal_code(address): + matches = re.findall(f"({POSTAL_CODE_REGEX_BOTH})", address) + if not matches: + return "" + postal_code = matches[0] + + # correct Os to 0s + for i in [-3, -1, 1]: + if postal_code[i] == "O": + postal_code = list(postal_code) + postal_code[i] = "0" + postal_code = ''.join(postal_code) + return postal_code + + +if __name__ == "__main__": + start = time.time() + ceva = read_pdf("CEVA-ocr.pdf", 1) + ceva_list = extract(ceva) + ceva_doclist = generate_doclist(ceva_list) + print(ceva_doclist) + + print() + + north_american_1 = read_pdf("NORTH_AMERICAN.pdf", 1) + north_american_2 = read_pdfplumber("NORTH_AMERICAN.pdf", 1) + north_american_list = extract(north_american_1, plumber_page=north_american_2) + north_american_doclist = generate_doclist(north_american_list) + print(north_american_doclist) + + print(time.time()-start) + + diff --git a/servers/extraction/ocr.py b/servers/extraction/ocr.py new file mode 100644 index 0000000..5ea34c1 --- /dev/null +++ b/servers/extraction/ocr.py @@ -0,0 +1,22 @@ + + +from multilingual_pdf2text.pdf2text import PDF2Text +from multilingual_pdf2text.models.document_model.document import Document +import logging +import time + +if __name__ == "__main__": + stat = time.time() + pdf_document = Document( + document_path="../data/NORTH_AMERICAN.pdf", + language='eng' + ) + pdf2text = PDF2Text(document=pdf_document) + content = pdf2text.extract() + print(time.time()-stat) + + for page in content: + with open(f"text/NORTH_AMERICAN/{page['page_number']}.txt", "w") as f: + f.write(page["text"]) + + diff --git a/servers/extraction/requirements.txt b/servers/extraction/requirements.txt new file mode 100644 index 0000000..520194d --- /dev/null +++ b/servers/extraction/requirements.txt @@ -0,0 +1,51 @@ +amqp==5.0.9 +billiard==3.6.4.0 +celery==5.2.3 +cffi==1.15.0 +chardet==4.0.0 +click==8.0.3 +click-didyoumean==0.3.0 +click-plugins==1.1.1 +click-repl==0.2.0 +coloredlogs==15.0.1 +cryptography==36.0.1 +Deprecated==1.2.13 +Flask==2.0.2 +humanfriendly==10.0 +img2pdf==0.4.3 +importlib-resources==5.4.0 +itsdangerous==2.0.1 +Jinja2==3.0.3 +kombu==5.2.3 +lxml==4.7.1 +MarkupSafe==2.0.1 +multilingual-pdf2text==1.1.0 +numpy==1.22.0 +ocrmypdf==13.2.0 +opencv-python==4.5.5.62 +packaging==21.3 +pdf2image==1.16.0 +pdfminer.six==20211012 +pdfplumber==0.6.0 +pdftotext==2.2.2 +pikepdf==4.3.1 +Pillow==9.0.0 +pluggy==1.0.0 +prompt-toolkit==3.0.24 +pycparser==2.21 +pydantic==1.9.0 +pyparsing==3.0.6 +PyPDF2==1.26.0 +pytesseract==0.3.8 +pytz==2021.3 +redis==4.1.2 +reportlab==3.6.5 +six==1.16.0 +tqdm==4.62.3 +typing-extensions==4.0.1 +vine==5.0.0 +Wand==0.6.7 +wcwidth==0.2.5 +Werkzeug==2.0.2 +wrapt==1.13.3 +zipp==3.7.0 From efa0367890c295125f6f89529683623e77f9b4c9 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Fri, 22 Jul 2022 12:34:31 -0400 Subject: [PATCH 38/40] satwik --- servers/tenant/blueprints/event_driven/ticket.py | 5 ++--- servers/tenant/controllers/baseController.py | 6 ------ 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index de16ddc..c5402f9 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -56,7 +56,7 @@ @ticket_bp.route("/status/", methods=["GET"]) -@auth_required() +# @auth_required() def ticket_get_all_with_status(status): # create ticket limit = 5000 if "limit" not in request.args else request.args["limit"] @@ -72,10 +72,9 @@ def ticket_get_all_with_status(status): # create ticket ticket = get_single(ticketId) if ticket: tickets.append(ticket) - tickets = alchemyConverter(data) res = {"tickets": tickets, "count": num_tickets} - + print(res) return make_response(json.dumps(res, cls=AlchemyEncoder)) diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index a869177..af8cbc3 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -186,9 +186,6 @@ def _get_latest_event_objects_in_range( session_filters.append(self.model.timestamp >= time1) session_filters.append(self.model.timestamp <= time2) - print( - "------------------------RUNNING TICKET GET QUERY----------------------------" - ) results = ( self.session.query(self.model) .distinct(self.model.non_prim_identifying_column_name) @@ -197,9 +194,6 @@ def _get_latest_event_objects_in_range( .limit(number_of_res) .all() ) - print("----------complete-----------------") - for result in results: - print("TID " + str(result.ticketId)) return results def _find_latest_prim_key_from_non_prim_identifying_column_val( From ef635c0c76b609bd36c9f5084b9e22411aa60bd6 Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Fri, 22 Jul 2022 14:55:10 -0400 Subject: [PATCH 39/40] document_api --- .../tenant/blueprints/event_driven/ticket.py | 52 ++++++++++++------- servers/tenant/blueprints/simple/document.py | 5 +- .../tenant/blueprints/simple/milestones.py | 4 +- servers/tenant/controllers/baseController.py | 7 ++- .../tenant/controllers/controllerMapper.py | 27 ++++++++++ 5 files changed, 67 insertions(+), 28 deletions(-) diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index c5402f9..48928a4 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -55,31 +55,43 @@ """ +# @ticket_bp.route("/status/", methods=["GET"]) +# # @auth_required() +# def ticket_get_all_with_status(status): # create ticket + +# limit = 5000 if "limit" not in request.args else request.args["limit"] +# sql_filters = get_clean_filters_dict(request.args) +# sql_filters["currentStatus"] = status +# data = ticket_status_controller._get(sql_filters, limit=limit) +# num_tickets = ticket_status_controller._get_count(sql_filters) + +# data = alchemyConverter(data) +# ticketIds = [x["ticketId"] for x in data] +# tickets = [] +# for ticketId in ticketIds: +# ticket = get_single(ticketId) +# if ticket: +# tickets.append(ticket) +# tickets = alchemyConverter(data) + +# res = {"tickets": tickets, "count": num_tickets} +# print(res) +# return make_response(json.dumps(res, cls=AlchemyEncoder)) @ticket_bp.route("/status/", methods=["GET"]) # @auth_required() def ticket_get_all_with_status(status): # create ticket limit = 5000 if "limit" not in request.args else request.args["limit"] - sql_filters = get_clean_filters_dict(request.args) - sql_filters["currentStatus"] = status - data = ticket_status_controller._get(sql_filters, limit=limit) - num_tickets = ticket_status_controller._get_count(sql_filters) - - data = alchemyConverter(data) - ticketIds = [x["ticketId"] for x in data] - tickets = [] - for ticketId in ticketIds: - ticket = get_single(ticketId) - if ticket: - tickets.append(ticket) - - res = {"tickets": tickets, "count": num_tickets} - print(res) - return make_response(json.dumps(res, cls=AlchemyEncoder)) + ticket_sql_filters = get_clean_filters_dict(request.args) + tickets = ticket_status_controller._get_tickets_with_status(status, ticket_sql_filters, limit) + num_tickets = ticket_status_controller._get_count(ticket_sql_filters) + + res = {"tickets": alchemyConverter(tickets), "count": num_tickets} + return make_response(json.dumps(res, cls=AlchemyEncoder)) @ticket_bp.route("/", methods=["POST"]) -@auth_required() +# @auth_required() def ticket_post(): # create ticket print("Creating ticket from the following JSON:") print(request.data) @@ -97,7 +109,7 @@ def ticket_post(): # create ticket # TODO fix primary key issue, ticketeventID needs to be unique for edits @ticket_bp.route("/", methods=["POST"]) -@auth_required() +# @auth_required() def ticket_edit(ticket_id): # create ticket print("Creating ticket from the following JSON:") print(request.data) @@ -154,7 +166,7 @@ def default_end(): @ticket_bp.route("/", methods=["GET"]) -@auth_required() +# @auth_required() def ticket_get_all(): filters = request.args or {} sql_filters = get_clean_filters_dict(filters) @@ -170,7 +182,7 @@ def ticket_get_all(): data = ticket_controller._get_latest_event_objects_in_range( dt_start, dt_end, sql_filters, number_of_res=limit ) - + print(data) res = alchemyConverter(data) for ticket in res: ticket["pieces"] = ticket["pieces"].split(PIECES_SEPERATOR) diff --git a/servers/tenant/blueprints/simple/document.py b/servers/tenant/blueprints/simple/document.py index 9e5c983..5836b86 100644 --- a/servers/tenant/blueprints/simple/document.py +++ b/servers/tenant/blueprints/simple/document.py @@ -24,8 +24,9 @@ UPLOAD_FOLDER = "/opt/metadata-extraction/uploads" document_status_controller = DocumentStatusController() document_controller = DocumentController() + @document_bp.route("/", methods=["POST"]) -@auth_required() +# @auth_required() def document_post(): if "file" not in request.files: res = jsonify({"message": "No file part in the request"}) @@ -54,7 +55,7 @@ def document_post(): @document_bp.route("/", methods=["GET"]) -@auth_required() +# @auth_required() def document_get(document_id): filters = {"documentStatusId": document_id} documents = document_controller._get(filters) diff --git a/servers/tenant/blueprints/simple/milestones.py b/servers/tenant/blueprints/simple/milestones.py index 8dad9f1..64e57f4 100644 --- a/servers/tenant/blueprints/simple/milestones.py +++ b/servers/tenant/blueprints/simple/milestones.py @@ -47,7 +47,7 @@ @milestone_bp.route("/", methods=["GET"]) -@auth_required() +# @auth_required() def milestone_get(ticket_id): # create ticket filters = { @@ -70,7 +70,7 @@ def milestone_get(ticket_id): # create ticket @milestone_bp.route("/", methods=["POST"]) -@auth_required() +# @auth_required() def milestone_post(milestone_type): # create ticket milestone_class = getattr(sys.modules[__name__], milestone_type) milestone_controller = class_to_cntrl_map[milestone_class] diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index af8cbc3..0cfa2e2 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -137,19 +137,18 @@ def _get_latest_event_objects(self, page=1, number_of_res=1, filters={}): # .all() # ) - print(*convert_dict_to_alchemy_filters(self.model, filters)) latest_objs = ( self.session.query(self.model) .distinct(self.model.non_prim_identifying_column_name) .filter(*convert_dict_to_alchemy_filters(self.model, filters)) - .order_by(self.model.timestamp) + .order_by(self.model.non_prim_identifying_column_name, self.model.timestamp) .limit(1) .all() ) # latest_objs = self.session.query(self.model, subquery).order_by(self.model.timestamp).all() - print("LATEST_OBJS-------") - print(latest_objs) + # print("LATEST_OBJS-------") + # print(latest_objs) return latest_objs # def _get_latest_event_objects_from_start_date(self, start_datetime, filters={}): diff --git a/servers/tenant/controllers/controllerMapper.py b/servers/tenant/controllers/controllerMapper.py index b18c373..2024fcc 100644 --- a/servers/tenant/controllers/controllerMapper.py +++ b/servers/tenant/controllers/controllerMapper.py @@ -21,6 +21,33 @@ def __init__(self): class TicketStatusController(BaseController): def __init__(self): super().__init__(TicketStatus) + self.ticket_controller = BaseTimeSeriesController(TicketEvents) + + def _get_tickets_with_status(self, status, filters: dict, limit): + tickets = [] + + ticketIds = ( + self.session.query(TicketStatus.ticketId) + .filter(TicketStatus.currentStatus == status) + .limit(limit) + .all() + ) + + print(filters, ticketIds) + for i, tid_tup in enumerate(ticketIds): + + filters_cpy = filters.copy() + filters_cpy["ticketId"] = tid_tup[0] + print(filters_cpy) + + ticket = self.ticket_controller._get_latest_event_objects(filters=filters_cpy) + tickets.append(ticket[0]) + + if i == limit: + break + + + return tickets class MilestoneController(BaseController): From b32a29bd04d0f4a6e059dbc58ba1bd96a0d99d7b Mon Sep 17 00:00:00 2001 From: Dante Mazza Date: Sun, 29 Jan 2023 22:35:59 -0500 Subject: [PATCH 40/40] idek lol --- postgres/docker-compose.yml | 2 -- servers/app.Dockerfile | 6 ++++++ servers/celery.Dockerfile | 6 ++++++ servers/tenant/blueprints/event_driven/ticket.py | 4 +++- servers/tenant/controllers/baseController.py | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/postgres/docker-compose.yml b/postgres/docker-compose.yml index 41d3013..2a6c2b6 100644 --- a/postgres/docker-compose.yml +++ b/postgres/docker-compose.yml @@ -5,8 +5,6 @@ services: image: postgres:latest ports: - 5432:5432 - env_file: - - env/postgres.env environment: POSTGRES_DB: tenant_db POSTGRES_HOST_AUTH_METHOD: trust diff --git a/servers/app.Dockerfile b/servers/app.Dockerfile index c2efb35..03cecd4 100644 --- a/servers/app.Dockerfile +++ b/servers/app.Dockerfile @@ -10,3 +10,9 @@ RUN pip3 install -e /root/flask-cognito-lib WORKDIR /opt/metadata-extraction ENV PYTHONPATH . EXPOSE 6767 +ENV aws_secret_access_key Mwi2Sq90taDAkUZwtiEuLHvTXZLzXDQZExPh53R4 +ENV aws_access_key_id AKIASPMMHOET3PNSICG4 +ENV AWS_REGION="us-east-1" +ENV AWS_COGNITO_USER_POOL_ID="us-east-1_6AUY6LKPZ" +ENV AWS_COGNITO_USER_POOL_CLIENT_ID="2vukbtukva3u0oh29lf32ghmkp" +ENV AWS_COGNITO_DOMAIN="https://shipsolver-dev.auth.us-east-1.amazoncognito.com" \ No newline at end of file diff --git a/servers/celery.Dockerfile b/servers/celery.Dockerfile index b12be3a..78c3e35 100644 --- a/servers/celery.Dockerfile +++ b/servers/celery.Dockerfile @@ -9,3 +9,9 @@ RUN git -C /root clone https://github.com/ShipSolver/flask-cognito-lib.git RUN pip3 install -e /root/flask-cognito-lib WORKDIR /opt/metadata-extraction/tenant ENV PYTHONPATH .. +ENV aws_secret_access_key Mwi2Sq90taDAkUZwtiEuLHvTXZLzXDQZExPh53R4 +ENV aws_access_key_id AKIASPMMHOET3PNSICG4 +ENV AWS_REGION="us-east-1" +ENV AWS_COGNITO_USER_POOL_ID="us-east-1_6AUY6LKPZ" +ENV AWS_COGNITO_USER_POOL_CLIENT_ID="2vukbtukva3u0oh29lf32ghmkp" +ENV AWS_COGNITO_DOMAIN="https://shipsolver-dev.auth.us-east-1.amazoncognito.com" \ No newline at end of file diff --git a/servers/tenant/blueprints/event_driven/ticket.py b/servers/tenant/blueprints/event_driven/ticket.py index 48928a4..450671a 100644 --- a/servers/tenant/blueprints/event_driven/ticket.py +++ b/servers/tenant/blueprints/event_driven/ticket.py @@ -2,7 +2,6 @@ from datetime import datetime from wsgiref import validate -from numpy import number from flask import make_response, request, jsonify, Blueprint import sys @@ -90,6 +89,9 @@ def ticket_get_all_with_status(status): # create ticket return make_response(json.dumps(res, cls=AlchemyEncoder)) +""" +127.0.0.1:6767:/api/ticket/ +""" @ticket_bp.route("/", methods=["POST"]) # @auth_required() def ticket_post(): # create ticket diff --git a/servers/tenant/controllers/baseController.py b/servers/tenant/controllers/baseController.py index 0cfa2e2..d7f7223 100644 --- a/servers/tenant/controllers/baseController.py +++ b/servers/tenant/controllers/baseController.py @@ -177,7 +177,7 @@ def _get_latest_event_objects_in_range( self, datetime1, datetime2, filters={}, number_of_res=5 ): assert datetime1 <= datetime2 - time1 = int(time.mktime(datetime1.timetuple())) + time1 = max(0, int(time.mktime(datetime1.timetuple()))) time2 = int(time.mktime(datetime2.timetuple())) session_filters = convert_dict_to_alchemy_filters(self.model, filters)