From 7668e1cbd45db9b466e5f2330d00d12ea05d276c Mon Sep 17 00:00:00 2001 From: benne238 Date: Thu, 1 Apr 2021 15:22:29 -0400 Subject: [PATCH 01/43] update ecnqueue from the front end repository --- webqueue2_api/ECNQueue.py | 173 ++++++++++++-------------------------- 1 file changed, 55 insertions(+), 118 deletions(-) diff --git a/webqueue2_api/ECNQueue.py b/webqueue2_api/ECNQueue.py index 17d9c5a..b1ce16f 100644 --- a/webqueue2_api/ECNQueue.py +++ b/webqueue2_api/ECNQueue.py @@ -1,31 +1,22 @@ """A library for interacting with Purdue ECN's ticketing system. - This library allows interacting with queue Items (called Items) and collections of items (called Queues). - Example: # Create a single Item (ce100) >>> item = Item("ce", 100) # Get the sender's email address from an Item >>> item = Item("ce", 100) >>> item.userEmail - # Create an entire Queue (ce) >>> queue = Queue("ce") # Get the number of items in a Queue >>> queue = Queue("ce") >>> numItems = len(queue) - # Get all queues (and their items) >>> queues = getQueues() - Attributes: queueDirectory: The directory to load queues from. queuesToIgnore: Queues that will not be loaded when running getQueues() - -Raises: - # TODO: Add description(s) of when a ValueError is raised. - ValueError: [description] """ #------------------------------------------------------------------------------# @@ -47,10 +38,11 @@ #------------------------------------------------------------------------------# # The directory where queue items are -currentFilePath = __file__ -currentFileDirectory = os.path.dirname(currentFilePath) -currentFileDirectoryParent = os.path.dirname(currentFileDirectory) -queueDirectory = os.path.join(currentFileDirectoryParent, "q-snapshot") +#currentFilePath = __file__ +#currentFileDirectory = os.path.dirname(currentFilePath) +#currentFileDirectoryParent = os.path.dirname(currentFileDirectory) +#queueDirectory = os.path.join(currentFileDirectoryParent, "q-snapshot") +queueDirectory = "/home/pier/e/benne238/webqueue2/q-snapshot" # Queues to not load in getQueues() queuesToIgnore = ["archives", "drafts", "inbox", "coral"] @@ -62,15 +54,14 @@ #------------------------------------------------------------------------------# def isValidItemName(name: str) -> bool: - """Returns true if file name is a valid item name - + """Returns true if file name is a valid item name. + A file name is true if it contains between 1 and 3 integer numbers allowing for + any integer between 0 and 999. Example: isValidItemName("21") -> true isValidItemName("twentyone") -> false - Args: name (str): The name to test. - Returns: bool: Name is valid item name. """ @@ -83,16 +74,20 @@ def isValidItemName(name: str) -> bool: # Classes #------------------------------------------------------------------------------# class Item: - """A single issue. - + """A chronological representation of an interaction with a user. Example: # Create an Item (ce100) - >>> item = Item("ce", 100) - + >>> item = Item("ce", 100, headersOnly=false) + # Create an Item without parsing its contents (ce100) + >>> item = Item("ce", 100, headersOnly=True) + Args: + queue (str): The name of the Item's queue. + number (int): The number of the Item. + headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. Attributes: lastUpdated: An ISO 8601 formatted time string showing the last time the file was updated according to the filesystem. headers: A list of dictionaries containing header keys and values. - content: A list of section dictionaries. + content: A list of section dictionaries (only included if headersOnly is False). isLocked: A boolean showing whether or not a lockfile for the item is present. userEmail: The email address of the person who this item is from. userName: The real name of the person who this item is from. @@ -104,21 +99,21 @@ class Item: department: The most recent department for this item. dateReceived: The date this item was created. jsonData: A JSON serializable representation of the Item. + Raises: + ValueError: When the number passed to the constructor cannot be parsed. """ - def __init__(self, queue: str, number: int) -> None: + def __init__(self, queue: str, number: int, headersOnly: bool = False) -> None: self.queue = queue try: self.number = int(number) except ValueError: - raise ValueError(" Could not convert \"" + - number + "\" to an integer") - + raise ValueError(f'Could not convert "{number}" to an integer') self.__path = "/".join([queueDirectory, self.queue, str(self.number)]) self.lastUpdated = self.__getLastUpdated() self.__rawItem = self.__getRawItem() self.headers = self.__parseHeaders() - self.content = self.__parseSections() + if not headersOnly: self.content = self.__parseSections() self.isLocked = self.__isLocked() self.userEmail = self.__parseFromData(data="userEmail") self.userName = self.__parseFromData(data="userName") @@ -129,35 +124,17 @@ def __init__(self, queue: str, number: int) -> None: self.priority = self.__getMostRecentHeaderByType("Priority") self.department = self.__getMostRecentHeaderByType("Department") self.building = self.__getMostRecentHeaderByType("Building") - self.dateReceived = self.__getFormattedDate( - self.__getMostRecentHeaderByType("Date")) + self.dateReceived = self.__getFormattedDate(self.__getMostRecentHeaderByType("Date")) + self.jsonData = {} - # TODO: Autopopulate jsonData w/ __dir__() command. Exclude `^_` and `jsonData`. - self.jsonData = { - "queue": self.queue, - "number": self.number, - "lastUpdated": self.lastUpdated, - "headers": self.headers, - "content": self.content, - "isLocked": self.isLocked, - "userEmail": self.userEmail, - "userName": self.userName, - "userAlias": self.userAlias, - "assignedTo": self.assignedTo, - "subject": self.subject, - "status": self.status, - "priority": self.priority, - "department": self.department, - "building": self.building, - "dateReceived": self.dateReceived - } + for attribute in self.__dir__(): + if "_" not in attribute and attribute != "toJson" and attribute != "jsonData": + self.jsonData[attribute] = self.__getattribute__(attribute) def __getLastUpdated(self) -> str: """Returns last modified time of item reported by the filesystem in mm-dd-yy hh:mm am/pm format. - Example: 07-23-20 10:34 AM - Returns: str: last modified time of item reported by the filesystem in mm-dd-yy hh:mm am/pm format. """ @@ -169,7 +146,6 @@ def __getLastUpdated(self) -> str: def __getRawItem(self) -> list: """Returns a list of all lines in the item file - Returns: list: List of all the lines in the item file """ @@ -178,12 +154,10 @@ def __getRawItem(self) -> list: def __getHeaderBoundary(self) -> int: """Returns the 0 based line number where the Item headers stop. - Example: The header end would be on line 13 12: X-ECN-Queue-Original-URL: 13: 14: I need help. - Returns: int: line number where the Item headers end """ @@ -194,7 +168,6 @@ def __getHeaderBoundary(self) -> int: def __parseHeaders(self) -> list: """Returns a list containing dictionaries of header type and data. Removes queue prefixes and whitespace. - Examples: "[ce] QStatus: Dont Delete\\nFrom: Justin Campbell \\n" becomes @@ -202,7 +175,6 @@ def __parseHeaders(self) -> list: {"QStatus": "Don't Delete"}, {"From": "Justin Campbell "} ] - Returns: list: Header dicts """ @@ -412,7 +384,6 @@ def __parseSections(self) -> list: def __directoryParsing(self, directoryStartLine: int) -> dict: """Returns a dictionary with directory information - Example: Name: Nestor Fabian Rodriguez Buitrago Login: rodri563 @@ -426,10 +397,8 @@ def __directoryParsing(self, directoryStartLine: int) -> dict: User ECNDB: http://eng.purdue.edu/jump/2e8399a Host ECNDB: http://eng.purdue.edu/jump/2e83999 Subject: Autocad installation - Args: directoryStartLine (int): line number within the item that the directory starts on - Returns: dict: dictionary that splits each line within the directory into a key and a value """ @@ -524,15 +493,12 @@ def __directoryParsing(self, directoryStartLine: int) -> dict: def __assignmentParsing(self, contentStart: int) -> list: """Returns a list with assignment information dictionaries - Example: Assigned-To: campb303 Assigned-To-Updated-Time: Tue, 23 Jun 2020 13:27:00 EDT Assigned-To-Updated-By: campb303 - Args: contentStart (int): line number where the content starts - Returns: list: [ {"type": "assignment", @@ -586,15 +552,12 @@ def __assignmentParsing(self, contentStart: int) -> list: def __initialMessageParsing(self, content: list) -> dict: """Returns a dictionary with initial message information - Example: \n Testtest\n \n - Args: content (list): content of the initial message - Returns: dict: "type": "initial_message", @@ -664,7 +627,6 @@ def __initialMessageParsing(self, content: list) -> dict: def __editParsing(self, content: list, lineNum: int) -> dict: """Returns a dictionary with edit information - Example: *** Edited by: campb303 at: 06/23/20 13:27:56 ***\n \n @@ -672,11 +634,9 @@ def __editParsing(self, content: list, lineNum: int) -> dict: \n \n \n - Args: content (list): content of an edit lineNum (int): line number of an edit within an item - Returns: dict: a dictionary with these keys, "type": "edi", @@ -723,7 +683,6 @@ def __editParsing(self, content: list, lineNum: int) -> dict: def __replyToParsing(self, content: list, lineNum: int) -> dict: """Returns a dictionary with reply to user information - Example: *** Replied by: campb303 at: 06/23/20 13:28:18 ***\n \n @@ -732,11 +691,9 @@ def __replyToParsing(self, content: list, lineNum: int) -> dict: Justin\n ECN\n \n - Args: content (list): content of a reply to user lineNum (int): line number of a reply to user in an item - Returns: dict: a dictionary with these keys, "type": "reply_to_user", @@ -780,15 +737,12 @@ def __replyToParsing(self, content: list, lineNum: int) -> dict: def __statusParsing(self, content: list, lineNum: int) -> dict: """Returns a dictionary with status information - Example: *** Status updated by: campb303 at: 6/23/2020 13:26:55 ***\n Dont Delete\n - Args: content (list): The content of a status update lineNum (int): The line number of a status update in an item - Returns: dict: a dictionary with these keys, "type": "status", @@ -835,7 +789,6 @@ def __statusParsing(self, content: list, lineNum: int) -> dict: def __userReplyParsing(self, replyContent: list, lineNumber: int) -> dict: """Returns a dictionary with user reply information - Example: === Additional information supplied by user ===\n \n @@ -852,7 +805,6 @@ def __userReplyParsing(self, replyContent: list, lineNumber: int) -> dict: Args: replyContent (list): The entire section of a reply-from-user lineNumber (int): The line number of the begining of a reply-from-user section within and item - Returns: dict: a dictionary with these keys, "type": "reply_from_user", @@ -986,7 +938,6 @@ def __userReplyParsing(self, replyContent: list, lineNumber: int) -> dict: def __getFormattedSectionContent(self, sectionContent: list) -> list: """Returns a list with message content that is stripped of unnecessary newlines and begining delimiters - Example: *** Edited by: mph at: 02/21/20 10:27:16 ***\n \n @@ -997,10 +948,8 @@ def __getFormattedSectionContent(self, sectionContent: list) -> list: \n \n \n - Args: sectionContent (list): The section content of a parsed section - Returns: list: the section content of a parsed section without any delimiters and unnecessary newlines """ @@ -1036,15 +985,12 @@ def __getFormattedSectionContent(self, sectionContent: list) -> list: def __errorParsing(self, line: str, lineNum: int, expectedSyntax: str) -> dict: """Returns a dictionary with error parse information when a line is malformed - Example: "*** Status updated by: ewhile at: 5/7/2020 10:59:11 *** sharing between\n" - Args: line (str): line of that threw error lineNum (int): line number in the item that threw error expectedSyntax (str): a message stating the syntax the line should follow - Returns: dict: a dictionary with these keys, "type": "parse_error", @@ -1080,13 +1026,10 @@ def __errorParsing(self, line: str, lineNum: int, expectedSyntax: str) -> dict: def __getSortedSections(self, sectionsList: list) -> list: """Sorts the sections chronologically by datetime - Example: [example] need to do - Args: sections (list): the list of sections to be sorted - Returns: list: a list of sections sorted by datetime """ @@ -1119,13 +1062,10 @@ def __getSortedSections(self, sectionsList: list) -> list: def __isLocked(self) -> Union[str, bool]: """Returns a string info about the lock if true and a bool False if false - Example: A file is locked "CE 100 is locked by campb303 using qvi" - Example: a file is not locked False - Returns: Union[str, bool]: String with info about lock if true, bool False if false """ @@ -1142,18 +1082,14 @@ def __isLocked(self) -> Union[str, bool]: def __getMostRecentHeaderByType(self, headerType: str) -> str: """Return the data of most recent header of the given type. If no header of that type exists, return an empty string. - Example: Requesting a Status header that does exist __getMostRecentHeaderByType("Status") becomes "Waiting for Reply" - Example: Requesting a Status header that doesn't exist __getMostRecentHeaderByType("Status") becomes "" - Args: headerType (str): Type of header to return. - Returns: str: data of most recent header of the given type or empty string. """ @@ -1165,14 +1101,11 @@ def __getMostRecentHeaderByType(self, headerType: str) -> str: def __parseFromData(self, data: str) -> str: """Parse From header and return requested data. Returns empty string if requested data is unavailable. - Examples: From data is "From: Campbell, Justin " __parseFromData(data="userName") returns "Campbell, Justin" __parseFromData(data="userEmail") returns "campb303@purdue.edu" - Args: data (str): The data desired; can be "userName" or "userEmail". - Returns: str: userName, userEmail or empty string. """ @@ -1190,13 +1123,10 @@ def __parseFromData(self, data: str) -> str: def __getUserAlias(self) -> str: """Returns user's Career Account alias if present. If Career Account alias isn't present, returns empty string. - Example: Email from campb303@purdue.edu userAlias = "campb303" - Example: Email from spam@spammer.net userAlias = "" - Returns: str: User's Career Account alias if present or empty string """ @@ -1219,7 +1149,6 @@ def __getFormattedDate(self, date: str) -> str: """Returns the date/time formatted as RFC 8601 YYYY-MM-DDTHH:MM:SS+00:00. Returns empty string if the string argument passed to the function is not a datetime. See: https://en.wikipedia.org/wiki/ISO_8601 - Returns: str: Properly formatted date/time recieved or empty string. """ @@ -1236,7 +1165,6 @@ def __getFormattedDate(self, date: str) -> str: def toJson(self) -> dict: """Returns a JSON safe representation of the item. - Returns: dict: JSON safe representation of the item. """ @@ -1245,24 +1173,28 @@ def toJson(self) -> dict: def __repr__(self) -> str: return self.queue + str(self.number) -# TODO: Make Queue iterable using __iter__. See: https://thispointer.com/python-how-to-make-a-class-iterable-create-iterator-class-for-it/ class Queue: - """A collection of items. - + """A collection of Items. Example: # Create a queue (ce) >>> queue = Queue("ce") - + # Create a queue without parsing item contents (ce) + >>> queue = Queue("ce", headersOnly=False) + Args: + queue (str): The name of the queue. + headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. Attributes: name: The name of the queue. items: A list of Items in the queue. jsonData: A JSON serializable representation of the Queue. """ - def __init__(self, name: str) -> None: + def __init__(self, name: str, headersOnly: bool = True) -> None: self.name = name + self.headersOnly = headersOnly self.__directory = queueDirectory + "/" + self.name + "/" self.items = self.__getItems() + self._index = 0 self.jsonData = { "name": self.name, @@ -1271,7 +1203,6 @@ def __init__(self, name: str) -> None: def __getItems(self) -> list: """Returns a list of items for this Queue - Returns: list: a list of items for this Queue """ @@ -1283,16 +1214,14 @@ def __getItems(self) -> list: isFile = True if os.path.isfile(itemPath) else False if isFile and isValidItemName(item): - items.append(Item(self.name, item)) + items.append(Item(self.name, item, headersOnly=self.headersOnly)) return items def toJson(self) -> dict: """Return JSON safe representation of the Queue - The JSON representation of every item in the Queue is added to the Queue's JSON data then the Queue's JSON data is returned. - Returns: dict: JSON safe representation of the Queue """ @@ -1309,12 +1238,17 @@ def __len__(self) -> int: def __repr__(self) -> str: return f'{self.name}_queue' + # Implements the interable interface requirements by passing direct references + # to the item list's interable values. + def __iter__(self) -> list: + return iter(self.items) + def __next__(self) -> int: + return iter(self.items).__next__() + def getValidQueues() -> list: """Returns a list of queues on the filesystem excluding ignored queues. - Example: ["bidc", "me", "ce"] - Returns: list: Valid queues """ @@ -1332,7 +1266,6 @@ def getValidQueues() -> list: def getQueueCounts() -> list: """Returns a list of dictionaries with the number of items in each queue. - Example: [ { @@ -1344,14 +1277,13 @@ def getQueueCounts() -> list: number_of_items: 3 } ] - Returns: list: Dictionaries with the number of items in each queue. """ queueInfo = [] for queue in getValidQueues(): possibleItems = os.listdir(queueDirectory + "/" + queue) - validItems = [isValidItemName for file in possibleItems] + validItems = [file for file in possibleItems if isValidItemName(file)] queueInfo.append( {"name": queue, "number_of_items": len(validItems)} ) # Sorts list of queue info alphabetically @@ -1359,16 +1291,21 @@ def getQueueCounts() -> list: return sortedQueueInfo - -def loadQueues() -> list: +def loadAllQueues(headersOnly: bool = True) -> list: """Return a list of Queues for each queue. - + Example: + # Load all Queues without parsing Item content + >>> loadAllQueues(); + Load all Queues and parsing Item content + >>> loadAllQueues(headersOnly=False) + Args: + headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. Returns: - list: list of Queues for each queue. + list: List of Queues for each queue. """ queues = [] for queue in getValidQueues(): - queues.append(Queue(queue)) + queues.append(Queue(queue, headersOnly=headersOnly)) return queues \ No newline at end of file From 647cc7987b785374844254036bc6defbd1d37a78 Mon Sep 17 00:00:00 2001 From: benne238 Date: Thu, 1 Apr 2021 17:23:21 -0400 Subject: [PATCH 02/43] creation of basic directory schema with proper __init__.py scripts in each subpackage --- webqueue2_api/ECNQueue/__init__.py | 13 +++++++++++++ webqueue2_api/ECNQueue/parser/__init__.py | 0 webqueue2_api/api/__init__.py | 1 + webqueue2_api/api/__main__.py | 0 webqueue2_api/api/resources/__init__.py | 1 + 5 files changed, 15 insertions(+) create mode 100644 webqueue2_api/ECNQueue/__init__.py create mode 100644 webqueue2_api/ECNQueue/parser/__init__.py create mode 100644 webqueue2_api/api/__init__.py create mode 100644 webqueue2_api/api/__main__.py create mode 100644 webqueue2_api/api/resources/__init__.py diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py new file mode 100644 index 0000000..8eec7ca --- /dev/null +++ b/webqueue2_api/ECNQueue/__init__.py @@ -0,0 +1,13 @@ +from . import Item, Queue, utils, parser +from os import environ +import dotenv + +dotenv.load_dotenv("ECNQueue.cfg") + +queue_directory = "/home/pier/e/queue/Mail" +if environ.get("QUEUE_DIRECTORY"): + queue_directory = environ.get("QUEUE_DIRECTORY") + +queues_to_ignore = ["archives", "drafts", "inbox", "coral"] +if environ.get("QUEUES_TO_IGNORE"): + queues_to_ignore = [queue for queue in environ.get("QUEUES_TO_IGNORE").split(",") if queue in utils.getValidQueues()] \ No newline at end of file diff --git a/webqueue2_api/ECNQueue/parser/__init__.py b/webqueue2_api/ECNQueue/parser/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py new file mode 100644 index 0000000..6e2347d --- /dev/null +++ b/webqueue2_api/api/__init__.py @@ -0,0 +1 @@ +from . import auth \ No newline at end of file diff --git a/webqueue2_api/api/__main__.py b/webqueue2_api/api/__main__.py new file mode 100644 index 0000000..e69de29 diff --git a/webqueue2_api/api/resources/__init__.py b/webqueue2_api/api/resources/__init__.py new file mode 100644 index 0000000..b19862f --- /dev/null +++ b/webqueue2_api/api/resources/__init__.py @@ -0,0 +1 @@ +from . import item, login, queue, queue_list, queue, refresh_access_token \ No newline at end of file From 29b2fa3bd8d1ec67eef7b810531c903cf907a75c Mon Sep 17 00:00:00 2001 From: benne238 Date: Thu, 1 Apr 2021 21:44:33 -0400 Subject: [PATCH 03/43] absolute import statements in the __init__ files for the backend --- webqueue2_api/ECNQueue/__init__.py | 2 +- webqueue2_api/__init__.py | 2 +- webqueue2_api/api/__init__.py | 2 +- webqueue2_api/api/resources/__init__.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index 8eec7ca..891f5f2 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -1,4 +1,4 @@ -from . import Item, Queue, utils, parser +from webqueue2_api.ECNQueue import Item, Queue, utils, parser from os import environ import dotenv diff --git a/webqueue2_api/__init__.py b/webqueue2_api/__init__.py index 3bec1e8..f9edf5f 100644 --- a/webqueue2_api/__init__.py +++ b/webqueue2_api/__init__.py @@ -1 +1 @@ -from . import api, ECNQueue \ No newline at end of file +from webqueue2_api import api, ECNQueue \ No newline at end of file diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py index 6e2347d..e16230a 100644 --- a/webqueue2_api/api/__init__.py +++ b/webqueue2_api/api/__init__.py @@ -1 +1 @@ -from . import auth \ No newline at end of file +from webqueue2_api.api import auth \ No newline at end of file diff --git a/webqueue2_api/api/resources/__init__.py b/webqueue2_api/api/resources/__init__.py index b19862f..06b5a8e 100644 --- a/webqueue2_api/api/resources/__init__.py +++ b/webqueue2_api/api/resources/__init__.py @@ -1 +1 @@ -from . import item, login, queue, queue_list, queue, refresh_access_token \ No newline at end of file +from webqueue2_api.api.resources import item, login, queue, queue_list, queue, refresh_access_token \ No newline at end of file From 18c2742d354cb0c2f3f6811445ef31a88af0f7f8 Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 2 Apr 2021 16:57:03 -0400 Subject: [PATCH 04/43] removed ECNQueue.py from the repo --- webqueue2_api/ECNQueue.py | 1311 ------------------------------------- 1 file changed, 1311 deletions(-) delete mode 100644 webqueue2_api/ECNQueue.py diff --git a/webqueue2_api/ECNQueue.py b/webqueue2_api/ECNQueue.py deleted file mode 100644 index b1ce16f..0000000 --- a/webqueue2_api/ECNQueue.py +++ /dev/null @@ -1,1311 +0,0 @@ -"""A library for interacting with Purdue ECN's ticketing system. -This library allows interacting with queue Items (called Items) and collections -of items (called Queues). -Example: - # Create a single Item (ce100) - >>> item = Item("ce", 100) - # Get the sender's email address from an Item - >>> item = Item("ce", 100) - >>> item.userEmail - # Create an entire Queue (ce) - >>> queue = Queue("ce") - # Get the number of items in a Queue - >>> queue = Queue("ce") - >>> numItems = len(queue) - # Get all queues (and their items) - >>> queues = getQueues() -Attributes: - queueDirectory: The directory to load queues from. - queuesToIgnore: Queues that will not be loaded when running getQueues() -""" - -#------------------------------------------------------------------------------# -# Imports -#------------------------------------------------------------------------------# -import os -import time -import email -import re -import datetime -from dateutil.parser import parse -from dateutil import tz -from typing import Union -import json - - -#------------------------------------------------------------------------------# -# Configuration -#------------------------------------------------------------------------------# - -# The directory where queue items are -#currentFilePath = __file__ -#currentFileDirectory = os.path.dirname(currentFilePath) -#currentFileDirectoryParent = os.path.dirname(currentFileDirectory) -#queueDirectory = os.path.join(currentFileDirectoryParent, "q-snapshot") -queueDirectory = "/home/pier/e/benne238/webqueue2/q-snapshot" - -# Queues to not load in getQueues() -queuesToIgnore = ["archives", "drafts", "inbox", "coral"] - - - -#------------------------------------------------------------------------------# -# Utilities -#------------------------------------------------------------------------------# - -def isValidItemName(name: str) -> bool: - """Returns true if file name is a valid item name. - A file name is true if it contains between 1 and 3 integer numbers allowing for - any integer between 0 and 999. - Example: - isValidItemName("21") -> true - isValidItemName("twentyone") -> false - Args: - name (str): The name to test. - Returns: - bool: Name is valid item name. - """ - itemPattern = re.compile("^[0123456789]{1,3}$") - return True if itemPattern.match(name) else False - - - -#------------------------------------------------------------------------------# -# Classes -#------------------------------------------------------------------------------# -class Item: - """A chronological representation of an interaction with a user. - Example: - # Create an Item (ce100) - >>> item = Item("ce", 100, headersOnly=false) - # Create an Item without parsing its contents (ce100) - >>> item = Item("ce", 100, headersOnly=True) - Args: - queue (str): The name of the Item's queue. - number (int): The number of the Item. - headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. - Attributes: - lastUpdated: An ISO 8601 formatted time string showing the last time the file was updated according to the filesystem. - headers: A list of dictionaries containing header keys and values. - content: A list of section dictionaries (only included if headersOnly is False). - isLocked: A boolean showing whether or not a lockfile for the item is present. - userEmail: The email address of the person who this item is from. - userName: The real name of the person who this item is from. - userAlias: The Purdue career account alias of the person this item is from. - assignedTo: The Purdue career account alias of the person this item is assigned to - subject: The subject of the original message for this item. - status: The most recent status update for the item. - priority: The most recent priority for this item. - department: The most recent department for this item. - dateReceived: The date this item was created. - jsonData: A JSON serializable representation of the Item. - Raises: - ValueError: When the number passed to the constructor cannot be parsed. - """ - - def __init__(self, queue: str, number: int, headersOnly: bool = False) -> None: - self.queue = queue - try: - self.number = int(number) - except ValueError: - raise ValueError(f'Could not convert "{number}" to an integer') - self.__path = "/".join([queueDirectory, self.queue, str(self.number)]) - self.lastUpdated = self.__getLastUpdated() - self.__rawItem = self.__getRawItem() - self.headers = self.__parseHeaders() - if not headersOnly: self.content = self.__parseSections() - self.isLocked = self.__isLocked() - self.userEmail = self.__parseFromData(data="userEmail") - self.userName = self.__parseFromData(data="userName") - self.userAlias = self.__getUserAlias() - self.assignedTo = self.__getMostRecentHeaderByType("Assigned-To") - self.subject = self.__getMostRecentHeaderByType("Subject") - self.status = self.__getMostRecentHeaderByType("Status") - self.priority = self.__getMostRecentHeaderByType("Priority") - self.department = self.__getMostRecentHeaderByType("Department") - self.building = self.__getMostRecentHeaderByType("Building") - self.dateReceived = self.__getFormattedDate(self.__getMostRecentHeaderByType("Date")) - self.jsonData = {} - - for attribute in self.__dir__(): - if "_" not in attribute and attribute != "toJson" and attribute != "jsonData": - self.jsonData[attribute] = self.__getattribute__(attribute) - - def __getLastUpdated(self) -> str: - """Returns last modified time of item reported by the filesystem in mm-dd-yy hh:mm am/pm format. - Example: - 07-23-20 10:34 AM - Returns: - str: last modified time of item reported by the filesystem in mm-dd-yy hh:mm am/pm format. - """ - # TODO: Simplify this code block by allowing __getFormattedDate to accept milliseconds since the epoch. - unixTime = os.path.getmtime(self.__path) - formattedTime = time.strftime( - '%m-%d-%y %I:%M %p', time.localtime(unixTime)) - return self.__getFormattedDate(formattedTime) - - def __getRawItem(self) -> list: - """Returns a list of all lines in the item file - Returns: - list: List of all the lines in the item file - """ - with open(self.__path, errors="replace") as file: - return file.readlines() - - def __getHeaderBoundary(self) -> int: - """Returns the 0 based line number where the Item headers stop. - Example: The header end would be on line 13 - 12: X-ECN-Queue-Original-URL: - 13: - 14: I need help. - Returns: - int: line number where the Item headers end - """ - for lineNumber, line in enumerate(self.__rawItem): - if line == "\n": - return lineNumber - - def __parseHeaders(self) -> list: - """Returns a list containing dictionaries of header type and data. - Removes queue prefixes and whitespace. - Examples: - "[ce] QStatus: Dont Delete\\nFrom: Justin Campbell \\n" - becomes - [ - {"QStatus": "Don't Delete"}, - {"From": "Justin Campbell "} - ] - Returns: - list: Header dicts - """ - headerString = "" - - # Remove '[queue] ' prefixes: - # Example: - # [ce] QTime-Updated-By: campb303 becomes - # QTime-Updated-By: campb303 - queuePrefixPattern = re.compile(r"\[.*?\] {1}") - for lineNumber in range(self.__getHeaderBoundary()): - line = self.__rawItem[lineNumber] - lineHasQueuePrefix = queuePrefixPattern.match(line) - - if lineHasQueuePrefix: - queuePrefix = line[lineHasQueuePrefix.regs[0] - [0]: lineHasQueuePrefix.regs[0][1]] - line = line.replace(queuePrefix, "") - - headerString += line - - # message = email.message_from_string(headerString + "".join(self.__getContent())) - message = email.message_from_string(headerString) - - headers = [] - dateHeaders=[ - "QStatus-Updated-Time", - "Status-Updated-Time", - "Edited-Time", - "QTime-Updated-Time", - "Merged-Time", - "Time-Updated-Time", - "Replied-Time", - "Assigned-To-Updated-Time", - "QAssigned-To-Updated-Time", - "Date", - "Sent" - ] - - for key in message.keys(): - headers.append({"type": key, "content": self.__getFormattedDate(message[key]) if key in dateHeaders else message[key]}) - - return headers - - # TODO: Implement attachment parsing - - def __parseSections(self) -> list: - # List of all item events - sections = [] - - contentStart = self.__getHeaderBoundary() + 1 - contentEnd = len(self.__rawItem) - 1 - - # List of assignments for the item - assignementLsit = self.__assignmentParsing(contentStart) - - # Appends each assignment individually to sections - for assignment in assignementLsit: - sections.append(assignment) - - # Checks for empty content within an item and returns and - if contentEnd <= contentStart: - blankInitialMessage = self.__initialMessageParsing([""]) - sections.append(blankInitialMessage) - return sections - - # Checks for Directory Identifiers - if self.__rawItem[contentStart] == "\n" and self.__rawItem[contentStart + 1].startswith("\t"): - - directoryStartLine = contentStart + 1 - - # Parses the directory information and returns a dictionary of directory values - directoryInfo = self.__directoryParsing(directoryStartLine) - - # Appends Directory Information into the sections array - sections.append(directoryInfo) - - # Sets the initial message start to the next line after all directory lines and newlines - contentStart = contentStart + len(directoryInfo) + 1 - - # The start line, type, and end line for item events - sectionBoundaries = [] - - # Delimiter info - delimiters = [ - {"name": "edit", "pattern": "*** Edited"}, - {"name": "status", "pattern": "*** Status"}, - {"name": "replyToUser", "pattern": "*** Replied"}, - {"name": "replyFromUser", "pattern": "=== "}, - ] - - # Signifies that there is an initial message to parse - initialMessageSection = True - - # Parses the entire contents of the message, stores everything before any delimiter as the initial message - # and the line number of any delimiters as well as the type - for lineNumber in range(contentStart, contentEnd + 1): - - line = self.__rawItem[lineNumber] - - # Looks for a starting delimiter and explicity excludes the reply-from-user ending delimiter - if (line.startswith("*** Edited by: ") or - line.startswith("*** Replied by: ") or - line.startswith("*** Status updated by: ") or - line == "=== Additional information supplied by user ===\n" and not - line == "===============================================\n" - ): - - # Sets the delimiter type based on the pattern within the delimiters list - for delimiter in delimiters: - - if line.startswith(delimiter["pattern"]): - sectionBoundaries.append( - {"start": lineNumber, "type": delimiter["name"]}) - break - - # If a starting delimiter was encountered, then there is no initial message - if initialMessageSection: - initialMessageSection = False - - elif initialMessageSection == True: - # Delimiter not encountered yet, so append initial message starting line as the current lin number - sectionBoundaries.append( - {"start": lineNumber, "type": "initial_message"}) - initialMessageSection = False - - # Used to set the end line of the last delimiter - sectionBoundaries.append({"start": contentEnd + 1}) - - # Sets the end of the section boundary to the begining of the next section boundary - for boundaryIndex in range(0, len(sectionBoundaries) - 1): - - sectionBoundaries[boundaryIndex]["end"] = sectionBoundaries[boundaryIndex + 1]["start"] - - # Remove End of File boundary since the line number has been assigned to the last delimiter - del sectionBoundaries[-1] - - # Parses through all the boundaries in section boundaries - for boundary in sectionBoundaries: - - # Sets line to the first line of the boundary (which is always the delimiter) - line = self.__rawItem[boundary["start"]] - - # Returns all of the lines within the current section - sectionContent = self.__rawItem[boundary["start"]: boundary["end"]] - - # Appends an initial message dictionary to sections - if boundary["type"] == "initial_message": - initialMessageDictionary = self.__initialMessageParsing( - sectionContent) - sections.append(initialMessageDictionary) - - elif boundary["type"] == "edit": - # Returns a dictionary with edit information - editInfo = self.__editParsing( - sectionContent, boundary["start"]) - - # Checks for a parse error and appends it, returning the sections list which stops the parsing - if editInfo["type"] == "parse_error": - sections.append(editInfo) - return self.__getSortedSections(sections) - - # Appends the edit dictionary to sections - sections.append(editInfo) - - elif boundary["type"] == "replyToUser": - # Returns a dictionary with reply-to information - replyToInfo = self.__replyToParsing( - sectionContent, boundary["start"]) - - # Checks for a parse error and appends it, returning the sections list which stops the parsing - if replyToInfo["type"] == "parse_error": - sections.append(replyToInfo) - return self.__getSortedSections(sections) - - # Appends the reply-to to sections - sections.append(replyToInfo) - - elif boundary["type"] == "status": - # Returns a dictionary with status information - statusInfo = self.__statusParsing( - sectionContent, boundary["start"]) - - if statusInfo["type"] == "parse_error": - sections.append(statusInfo) - return self.__getSortedSections(sections) - - # Appends the status to sections - sections.append(statusInfo) - - elif boundary["type"] == "replyFromUser": - # Returns a dictionary with userReply information - replyFromInfo = self.__userReplyParsing( - sectionContent, boundary["start"]) - - if replyFromInfo["type"] == "parse_error": - sections.append(replyFromInfo) - return self.__getSortedSections(sections) - - # Appends the replyFrom to sections - sections.append(replyFromInfo) - - sortedSections = self.__getSortedSections(sections) - - return sortedSections - # return sections - - def __directoryParsing(self, directoryStartLine: int) -> dict: - """Returns a dictionary with directory information - Example: - Name: Nestor Fabian Rodriguez Buitrago - Login: rodri563 - Computer: ce-205-38 (128.46.205.67) - Location: HAMP G230 - Email: rodri563@purdue.edu - Phone: 7654766893 - Office: HAMP G230 - UNIX Dir: /home/bridge/b/rodri563 - Zero Dir: U=\\bridge.ecn.purdue.edu\rodri563 - User ECNDB: http://eng.purdue.edu/jump/2e8399a - Host ECNDB: http://eng.purdue.edu/jump/2e83999 - Subject: Autocad installation - Args: - directoryStartLine (int): line number within the item that the directory starts on - Returns: - dict: dictionary that splits each line within the directory into a key and a value - """ - directoryInformation = {"type": "directory_information"} - - directoryPossibleKeys = [ - "Name", - "Login", - "Computer", - "Location", - "Email", - "Phone", - "Office", - "UNIX Dir", - "Zero Dir", - "User ECNDB", - "Host ECNDB", - "Subject" - ] - # Executies until the directory start line is greater than the directory ending line - while True: - - # Returns the line number at directory start line - info = self.__rawItem[directoryStartLine] - - # Breaks the loop if it encountrs a newline, signifying the end of the directory information - if info == "\n": - - break - - else: - - # Removes white including space, newlines, and tabs from the directory info line - strippedInfo = info.strip() - - # Attempts to find ": " but will accept ":", denoting a blank entry for a directory item - if ": " in strippedInfo: - - # Seperates the directory info line into two variables, the first variable being the key, the second being the value - # swt1 - key, value = strippedInfo.split(": ", 1) - - if key in directoryPossibleKeys: - # Adds the key value pair to the directory info dictionary - directoryInformation[key] = value - else: - # Casts the list type on to a dictionary - dictionaryList = list(directoryInformation) - # Length of dictionary list - lenDictionaryList = len(dictionaryList) - # The last key appended to the directory dictionary - lastKeyAppended = dictionaryList[lenDictionaryList - 1] - - directoryInformation[lastKeyAppended] = directoryInformation[lastKeyAppended] + \ - " " + strippedInfo - - elif ":" in strippedInfo: - - # Seperates the directory info line into two variables, the first variable being the key, the second being the value - key, value = strippedInfo.split(":", 1) - - if key in directoryPossibleKeys: - # Adds the key value pair to the directory info dictionary - directoryInformation[key] = value - else: - # Casts the list type on to a dictionary - dictionaryList = list(directoryInformation) - # Length of dictionary list - lenDictionaryList = len(dictionaryList) - # The last key appended to the directory dictionary - lastKeyAppended = dictionaryList[lenDictionaryList - 1] - - directoryInformation[lastKeyAppended] = directoryInformation[lastKeyAppended] + \ - " " + strippedInfo - - # Signifies that this line belongs to the most previous line - elif ": " not in strippedInfo and ":" not in strippedInfo: - # Casts the list type on to a dictionary - dictionaryList = list(directoryInformation) - # Length of dictionary list - lenDictionaryList = len(dictionaryList) - # The last key appended to the directory dictionary - lastKeyAppended = dictionaryList[lenDictionaryList - 1] - - directoryInformation[lastKeyAppended] = directoryInformation[lastKeyAppended] + \ - " " + strippedInfo - # Counter to denote the end of the directory - directoryStartLine = directoryStartLine + 1 - - # Returns the directory information dictionary - return directoryInformation - - def __assignmentParsing(self, contentStart: int) -> list: - """Returns a list with assignment information dictionaries - Example: - Assigned-To: campb303 - Assigned-To-Updated-Time: Tue, 23 Jun 2020 13:27:00 EDT - Assigned-To-Updated-By: campb303 - Args: - contentStart (int): line number where the content starts - Returns: - list: [ - {"type": "assignment", - "datetime": datetime of the assignment, - "by": user who initiated the assignment, - "to": user who was assigned - }, - ] - """ - assignmentList = [] - - # Assignment Information - assignedBy = "" - assignedDateTime = "" - assignedTo = "" - - # Parses the header looking for assignment delimeters and stores info into their respective variables - for headerContent in range(0, contentStart): - - line = self.__rawItem[headerContent] - - # Gets who the Item was assigned to - if line.startswith("Assigned-To: "): - - assignedTo = ( - re.search("(?<=Assigned-To: )(.*)", line)).group() - - # Gets the date the Item was assigned - elif line.startswith("Assigned-To-Updated-Time: "): - - dateFromLine = ( - re.search("(?<=Assigned-To-Updated-Time: )(.*)", line)).group() - - assignedDateTime = self.__getFormattedDate(dateFromLine) - - # Gets who assigned the Item - elif line.startswith("Assigned-To-Updated-By: "): - - assignedBy = ( - re.search("(?<=Assigned-To-Updated-By: )(.*)", line)).group() - - # Appends the assignment to the sections list - assignmentList.append( - {"type": "assignment", - "datetime": assignedDateTime, - "by": assignedBy, - "to": assignedTo} - ) - - return assignmentList - - def __initialMessageParsing(self, content: list) -> dict: - """Returns a dictionary with initial message information - Example: - \n - Testtest\n - \n - Args: - content (list): content of the initial message - Returns: - dict: - "type": "initial_message", - "datetime": datetime the initial message was sent, - "from_name": from_name, - "from_email": user_email, - "to": [{email, name}], - "cc": [{email, name}], - "subject": initial message subject - "content": content of the initial message - """ - initialMessageDictionary = {} - - initialMessageDictionary["type"] = "initial_message" - - # Gets the initial message date from the header - rawMessageDateStr = self.__getMostRecentHeaderByType("Date") - - # Sets datetime in the intialMessage dictionary to UTC formatted date - initialMessageDictionary["datetime"] = self.__getFormattedDate( - rawMessageDateStr) - - initialMessageDictionary["from_name"] = self.__parseFromData( - data="userName") - - initialMessageDictionary["from_email"] = self.__parseFromData( - data="userEmail") - - # Stores list of dictionaries for the recipients of the initial message - initialMessageDictionary["to"] = [] - - # Parses the header looking for recipients of the initial message and stores it in a list of tuples - rawMessageRecipientsList = email.utils.getaddresses( - [self.__getMostRecentHeaderByType("To")]) - - # Parses the CC list and stores the cc recipient information in a list of dictionaries - for recipients in rawMessageRecipientsList: - - initialMessageDictionary["to"].append( - {"name": recipients[0], - "email": recipients[1]} - ) - - # Stores list of dictionaries for CC information - initialMessageDictionary["cc"] = [] - - # Parses the header looking for CC recipients of the initial message and stores it in a list of tuples - rawMessageCCList = email.utils.getaddresses( - [self.__getMostRecentHeaderByType("CC")]) - - # Parses the CC list and stores the cc recipient information in a list of dictionaries - for ccRecipients in rawMessageCCList: - - initialMessageDictionary["cc"].append( - {"name": ccRecipients[0], - "email": ccRecipients[1]} - ) - - initialMessageDictionary["subject"] = self.__getMostRecentHeaderByType( - "Subject") - - # Removes unecessary newlines from the begining and the end of the initial message - initialMessageDictionary["content"] = self.__getFormattedSectionContent( - content) - - return initialMessageDictionary - - def __editParsing(self, content: list, lineNum: int) -> dict: - """Returns a dictionary with edit information - Example: - *** Edited by: campb303 at: 06/23/20 13:27:56 ***\n - \n - This be an edit my boy\n - \n - \n - \n - Args: - content (list): content of an edit - lineNum (int): line number of an edit within an item - Returns: - dict: a dictionary with these keys, - "type": "edi", - "by": initiator of the edit, - "datetime": datetime of the edit, - "content": content of the edit - """ - - # Edit Info dictionary - editInfo = {} - - for count, line in enumerate(content): - if line == "===============================================\n": - errorMessage = "Reply-from-user ending delimter encountered without Reply-from-user starting delimter" - return self.__errorParsing(line, lineNum + count + 1, errorMessage) - - editInfo["type"] = "edit" - - delimiterLine = content[0] - # Parses for the author of the edit, which is located between the "*** Edited by: " and " at:" substrings - try: - editInfo["by"] = ( - re.search("(?<=\*{3} Edited by: )(.*)(?= at:)", delimiterLine)).group() - except: - errorMessage = "*** Edited by: [username] at: [date and time] ***\n" - return self.__errorParsing(delimiterLine, lineNum, errorMessage) - - try: - # Parses for the date and time of the edit, which is located between the " at: " and "***\n" substrings - dateTimeString = ( - re.search("(?<= at: )(.*)(?= \*\*\*\n)", delimiterLine)).group() - except: - # Returns an error message if there is no space after "at:" - errorMessage = "*** Edited by: [username] at: [date and time] ***\n" - return self.__errorParsing(delimiterLine, lineNum, errorMessage) - - # Attempts to format the date and time into utc format - editInfo["datetime"] = self.__getFormattedDate(dateTimeString) - - # Remove the delimiter String and unecessary newlines - editInfo["content"] = self.__getFormattedSectionContent(content) - - return editInfo - - def __replyToParsing(self, content: list, lineNum: int) -> dict: - """Returns a dictionary with reply to user information - Example: - *** Replied by: campb303 at: 06/23/20 13:28:18 ***\n - \n - This be a reply my son\n - \n - Justin\n - ECN\n - \n - Args: - content (list): content of a reply to user - lineNum (int): line number of a reply to user in an item - Returns: - dict: a dictionary with these keys, - "type": "reply_to_user", - "by": initiator of the reply to user, - "datetime": datetime of the reply to user, - "content": content of the reply to user - """ - replyInfo = {} - - replyInfo["type"] = "reply_to_user" - - delimiterLine = content[0] - - for count, line in enumerate(content): - if line == "===============================================\n": - errorMessage = "Reply-from-user ending delimter encountered without Reply-from-user starting delimter" - return self.__errorParsing(line, lineNum + count + 1, errorMessage) - - try: - # Parses for the author of the reply, which is located between the "*** Replied by: " and " at:" substrings - replyInfo["by"] = ( - re.search("(?<=\*{3} Replied by: )(.*)(?= at:)", delimiterLine)).group() - except: - errorMessage = "*** Replied by: [username] at: [date and time] ***\n" - return self.__errorParsing(delimiterLine, lineNum, errorMessage) - - # Parses for the date and time of the reply, which is located between the " at: " and "***\n" substrings - try: - dateTimeString = ( - re.search("(?<= at: )(.*)(?= \*\*\*\n)", delimiterLine)).group() - except: - errorMessage = "*** Replied by: [username] at: [date and time] ***\n" - return self.__errorParsing(delimiterLine, lineNum, errorMessage) - - # Formats date to UTC - replyInfo["datetime"] = self.__getFormattedDate(dateTimeString) - - replyInfo["content"] = self.__getFormattedSectionContent(content) - - return replyInfo - - def __statusParsing(self, content: list, lineNum: int) -> dict: - """Returns a dictionary with status information - Example: - *** Status updated by: campb303 at: 6/23/2020 13:26:55 ***\n - Dont Delete\n - Args: - content (list): The content of a status update - lineNum (int): The line number of a status update in an item - Returns: - dict: a dictionary with these keys, - "type": "status", - "by": initiator of the status update, - "datetime": datetime of the status update, - "content": content of the status update - """ - statusInfo = {} - - statusInfo["type"] = "status" - - delimiterLine = content[0] - - for count, line in enumerate(content): - if line == "===============================================\n": - errorMessage = "Reply-from-user ending delimter encountered without Reply-from-user starting delimter" - return self.__errorParsing(line, lineNum + count + 1, errorMessage) - - # Parses for the author of the status change, which is located between the "*** Status updated by: " and " at:" substrings - try: - statusInfo["by"] = ( - re.search("(?<=\*{3} Status updated by: )(.*)(?= at:)", delimiterLine)).group() - except: - errorMessage = "*** Status updated by: [username] at: [date and time] ***\n" - - return self.__errorParsing(delimiterLine, lineNum, errorMessage) - - # Parses for the date and time of the status change, which is located between the " at: " and "***\n" substrings - try: - dateTimeString = re.search( - "(?<= at: )(.*)(?= \*\*\*\n)", delimiterLine).group() - except: - errorMessage = "*** Status updated by: [username] at: [date and time] ***\n" - - return self.__errorParsing(delimiterLine, lineNum, errorMessage) - - # Formats the date to UTC - statusInfo["datetime"] = self.__getFormattedDate(dateTimeString) - - # Remove the delimiter String and unecessary newlines - statusInfo["content"] = self.__getFormattedSectionContent(content) - - return statusInfo - - def __userReplyParsing(self, replyContent: list, lineNumber: int) -> dict: - """Returns a dictionary with user reply information - Example: - === Additional information supplied by user ===\n - \n - Subject: Re: Beepboop\n - From: Justin Campbell \n - Date: Tue, 23 Jun 2020 13:30:45 -0400\n - X-ECN-Queue-Original-Path: /home/pier/e/queue/Attachments/inbox/2020-06-23/212-original.txt\n - X-ECN-Queue-Original-URL: https://engineering.purdue.edu/webqueue/Attachments/inbox/2020-06-23/212-original.txt\n - \n - Huzzah!\n - \n - ===============================================\n - \n - Args: - replyContent (list): The entire section of a reply-from-user - lineNumber (int): The line number of the begining of a reply-from-user section within and item - Returns: - dict: a dictionary with these keys, - "type": "reply_from_user", - "from_name": name of the user that sent the reply, - "from_email": email of the user that sent the reply, - "subject": subject of the reply, - "datetime": the datetime of the reply, - "cc": [ - {"name": name of the carbon copied recipient, - "email": email of the carbon copied recipient - }, - ] - "content": content of the reply - "headers": [ - {"type": headerType, - "content": content - }, - ] - """ - replyFromInfo = {} - - replyFromInfo["type"] = "reply_from_user" - - replyFromHeaders = [] - newLineCounter = 0 - endingDelimiterCount = 0 - - # Delimiter information line numbers to remove from reply from user - linesToRemove = [] - - # Parses the section content looking for any line that starts with a metadata, also tracks the line - # number with the enumerate function - for lineNum, line in enumerate(replyContent): - - if endingDelimiterCount == 0 and lineNum == len(replyContent) - 1: - errorMessage = "Did not encounter a reply-from-user ending delimiter" - return self.__errorParsing(line, lineNumber + lineNum + 1, errorMessage) - - if newLineCounter == 1 and line != "\n": - - try: - # Append header information for each headr line - headerType, content = line.split(": ", 1) - replyFromHeaders.append( - {"type": headerType, - "content": content - } - ) - except: - lenReplyFromHeaders = len(replyFromHeaders) - if lenReplyFromHeaders == 0: - errorMessage = ("Expected reply-from-user header information:\n" + - "=== Additional information supplied by user ===\n" + - "\n" + - "[Header Type]: [Header Value]\n" + - "\n" - ) - return self.__errorParsing(line, lineNumber + lineNum + 1, errorMessage) - - else: - replyFromHeaders[lenReplyFromHeaders - - 1]["content"] = replyFromHeaders[lenReplyFromHeaders - 1]["content"] + " " + line - - linesToRemove.append(lineNum) - # Checks for a newline and breaks for loop on second occurance of a newline - if line == "\n": - newLineCounter = newLineCounter + 1 - - if newLineCounter == 2 and "datetime" not in replyFromInfo.keys(): - errorMessage = "Expected \"Date: [datetime]\" in the header info" - return self.__errorParsing(line, lineNumber + lineNum + 1, errorMessage) - - elif line == "===============================================\n": - endingDelimiterCount = endingDelimiterCount + 1 - - elif line.startswith("From: ") and newLineCounter == 1: - # Returns a list of one tuples with a name stored in the first index of the tuple and an email stored in the second index of the tuple - emailList = email.utils.getaddresses([line]) - replyFromInfo["from_name"] = emailList[0][0] - replyFromInfo["from_email"] = emailList[0][1] - - elif line.startswith("Subject: ") and newLineCounter == 1: - # Matches everything after "Subject: " - try: - subjectStr = ( - re.search("(?<=Subject: )(.*)", line)).group() - except: - errorMessage = "Expeted syntax of \"Subject: [subject]\"" - return self.__errorParsing(line, lineNumber + lineNum + 1, errorMessage) - - # Formatts the date to UTC - replyFromInfo["subject"] = subjectStr - - elif line.startswith("Date: ") and newLineCounter == 1: - # Matches everything after "Date: " - try: - dateStr = (re.search("(?<=Date: )(.*)", line)).group() - except: - errorMessage = "\"Date: [datetime]\"" - return self.__errorParsing(line, lineNumber + lineNum + 1, errorMessage) - - # Formatts the date to UTC - replyFromInfo["datetime"] = self.__getFormattedDate(dateStr) - - elif line.startswith("Cc: ") and newLineCounter == 1: - - replyFromInfo["cc"] = [] - - # Returns a list of tuples with email information - recipientsList = email.utils.getaddresses([line]) - - # Parses through the cc tuple list - for cc in recipientsList: - # Stores the cc information in a dictionary and appends it to the ccRecipientsList - replyFromInfo["cc"].append( - {"name": cc[0], - "email": cc[1]} - ) - - # Deletes reduntant lines from the message content in reverse order - for lineNum in sorted(linesToRemove, reverse=True): - replyContent.pop(lineNum) - - # Strips any unnecessary newlines or any delimiters frm the message content - replyFromInfo["content"] = self.__getFormattedSectionContent( - replyContent) - - replyFromInfo["headers"] = replyFromHeaders - - return replyFromInfo - - def __getFormattedSectionContent(self, sectionContent: list) -> list: - """Returns a list with message content that is stripped of unnecessary newlines and begining delimiters - Example: - *** Edited by: mph at: 02/21/20 10:27:16 ***\n - \n - Still need to rename machines - but the networking issue now seems to \n - be resolved via another ticket.\n - \n - \n - \n - \n - \n - Args: - sectionContent (list): The section content of a parsed section - Returns: - list: the section content of a parsed section without any delimiters and unnecessary newlines - """ - # Continually removes the first line of sectionContent if it is a newline or delimiter in each iteration - while len(sectionContent) > 1: - if (sectionContent[0] == "\n" or - sectionContent[0].startswith("*** Edited by: ") or - sectionContent[0].startswith("*** Replied by: ") or - sectionContent[0].startswith("*** Status updated by: ") or - sectionContent[0] == "=== Additional information supplied by user ===\n" or - sectionContent[0] == "===============================================\n" - ): - sectionContent.pop(0) - else: - # Breaks the loop if the first line isn't a newline or delimiter - break - - # Continually removes the last line of sectionContent if it is a newline or delimiter in each iteration - while len(sectionContent) > 1: - # Initializes the Length of sectionContent each iteration of the loop - sectionContentLength = len(sectionContent) - - if (sectionContent[sectionContentLength - 1] == "\n" or - sectionContent[sectionContentLength - - 1] == "===============================================\n" - ): - sectionContent.pop(sectionContentLength - 1) - else: - # Breaks the loop if the last line isn't a newline or delimiter - break - - return sectionContent - - def __errorParsing(self, line: str, lineNum: int, expectedSyntax: str) -> dict: - """Returns a dictionary with error parse information when a line is malformed - Example: - "*** Status updated by: ewhile at: 5/7/2020 10:59:11 *** sharing between\n" - Args: - line (str): line of that threw error - lineNum (int): line number in the item that threw error - expectedSyntax (str): a message stating the syntax the line should follow - Returns: - dict: a dictionary with these keys, - "type": "parse_error", - "datetime": time the error was encountered, - "file_path": path of the item with erroneos line, - "expected": expectedSyntax, - "got": line, - "line_num": lineNum - """ - errorDictionary = {} - - # Type - errorDictionary["type"] = "parse_error" - - # Dateime of the parse error - errorDictionary["datetime"] = self.__getFormattedDate( - str(datetime.datetime.now())) - - # Item filepath - errorDictionary["file_path"] = self.__path - - # Expected value - errorDictionary["expected"] = expectedSyntax - - # line that threw error - errorDictionary["got"] = line - - # line number that threw error - errorDictionary["line_num"] = lineNum - - # returns the error dictionary - return errorDictionary - - def __getSortedSections(self, sectionsList: list) -> list: - """Sorts the sections chronologically by datetime - Example: - [example] need to do - Args: - sections (list): the list of sections to be sorted - Returns: - list: a list of sections sorted by datetime - """ - sectionsLength = len(sectionsList) - sortedSections = [] - oldestSection = {} - - while len(sortedSections) < sectionsLength: - - for iteration, currentSection in enumerate(sectionsList): - - if currentSection["type"] == "directory_information": - sortedSections.append(currentSection) - sectionsList.remove(currentSection) - break - - if iteration == 0: - oldestSection = currentSection - - #datetime.datetime.strptime(date_time_str, '%Y-%m-%d %H:%M:%S.%f') - - elif parse(currentSection["datetime"]) < parse(oldestSection["datetime"]): - oldestSection = currentSection - - if iteration == len(sectionsList) - 1: - sortedSections.append(oldestSection) - sectionsList.remove(oldestSection) - - return sortedSections - - def __isLocked(self) -> Union[str, bool]: - """Returns a string info about the lock if true and a bool False if false - Example: A file is locked - "CE 100 is locked by campb303 using qvi" - Example: a file is not locked - False - Returns: - Union[str, bool]: String with info about lock if true, bool False if false - """ - lockFile = self.__path + ".lck" - if os.path.exists(lockFile): - with open(lockFile) as file: - lockInfo = file.readline().split(" ") - lockedBy = lockInfo[4] - lockedUsing = lockInfo[1] - return "{queue} {number} is locked by {lockedBy} using {lockedUsing}".format(queue=self.queue, number=self.number, lockedBy=lockedBy, lockedUsing=lockedUsing) - else: - return False - - def __getMostRecentHeaderByType(self, headerType: str) -> str: - """Return the data of most recent header of the given type. - If no header of that type exists, return an empty string. - Example: Requesting a Status header that does exist - __getMostRecentHeaderByType("Status") - becomes "Waiting for Reply" - Example: Requesting a Status header that doesn't exist - __getMostRecentHeaderByType("Status") - becomes "" - Args: - headerType (str): Type of header to return. - Returns: - str: data of most recent header of the given type or empty string. - """ - for header in self.headers: - if header["type"] == headerType: - return header["content"] - return "" - - def __parseFromData(self, data: str) -> str: - """Parse From header and return requested data. - Returns empty string if requested data is unavailable. - Examples: From data is "From: Campbell, Justin " - __parseFromData(data="userName") returns "Campbell, Justin" - __parseFromData(data="userEmail") returns "campb303@purdue.edu" - Args: - data (str): The data desired; can be "userName" or "userEmail". - Returns: - str: userName, userEmail or empty string. - """ - fromHeader = self.__getMostRecentHeaderByType("From") - userName, userEmail = email.utils.parseaddr(fromHeader) - - if data == "userName": - return userName - elif data == "userEmail": - return userEmail - else: - raise ValueError( - "data='" + str(data) + "' is not a valid option. data must be \"userName\" or \"userEmail\".") - - def __getUserAlias(self) -> str: - """Returns user's Career Account alias if present. - If Career Account alias isn't present, returns empty string. - Example: Email from campb303@purdue.edu - userAlias = "campb303" - Example: Email from spam@spammer.net - userAlias = "" - Returns: - str: User's Career Account alias if present or empty string - """ - - - try: - emailUser, emailDomain = self.userEmail.split("@") - - # Returns an error parse if the self.useremail doesn't contain exactally one "@" symbol - except ValueError: - # Parses through the self.headers list to find the "From" header and its line number - for lineNum, header in enumerate(self.headers): - if header["type"] == "From": - headerString = header["type"] + ": " + header["content"] - return self.__errorParsing(headerString, lineNum + 1, "Expected valid email Address") - - return emailUser if emailDomain.endswith("purdue.edu") else "" - - def __getFormattedDate(self, date: str) -> str: - """Returns the date/time formatted as RFC 8601 YYYY-MM-DDTHH:MM:SS+00:00. - Returns empty string if the string argument passed to the function is not a datetime. - See: https://en.wikipedia.org/wiki/ISO_8601 - Returns: - str: Properly formatted date/time recieved or empty string. - """ - try: - # This date is never meant to be used. The default attribute is just to set timezone. - parsedDate = parse(date, default=datetime.datetime( - 1970, 1, 1, tzinfo=tz.gettz('EDT'))) - except: - return "" - - parsedDateString = parsedDate.strftime("%Y-%m-%dT%H:%M:%S%z") - - return parsedDateString - - def toJson(self) -> dict: - """Returns a JSON safe representation of the item. - Returns: - dict: JSON safe representation of the item. - """ - return self.jsonData - - def __repr__(self) -> str: - return self.queue + str(self.number) - -class Queue: - """A collection of Items. - Example: - # Create a queue (ce) - >>> queue = Queue("ce") - # Create a queue without parsing item contents (ce) - >>> queue = Queue("ce", headersOnly=False) - Args: - queue (str): The name of the queue. - headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. - Attributes: - name: The name of the queue. - items: A list of Items in the queue. - jsonData: A JSON serializable representation of the Queue. - """ - - def __init__(self, name: str, headersOnly: bool = True) -> None: - self.name = name - self.headersOnly = headersOnly - self.__directory = queueDirectory + "/" + self.name + "/" - self.items = self.__getItems() - self._index = 0 - - self.jsonData = { - "name": self.name, - "length": len(self) - } - - def __getItems(self) -> list: - """Returns a list of items for this Queue - Returns: - list: a list of items for this Queue - """ - items = [] - - for item in os.listdir(self.__directory): - itemPath = self.__directory + "/" + item - - isFile = True if os.path.isfile(itemPath) else False - - if isFile and isValidItemName(item): - items.append(Item(self.name, item, headersOnly=self.headersOnly)) - - return items - - def toJson(self) -> dict: - """Return JSON safe representation of the Queue - The JSON representation of every item in the Queue is added to the - Queue's JSON data then the Queue's JSON data is returned. - Returns: - dict: JSON safe representation of the Queue - """ - items = [] - for item in self.items: - items.append(item.toJson()) - self.jsonData["items"] = items - - return self.jsonData - - def __len__(self) -> int: - return len(self.items) - - def __repr__(self) -> str: - return f'{self.name}_queue' - - # Implements the interable interface requirements by passing direct references - # to the item list's interable values. - def __iter__(self) -> list: - return iter(self.items) - def __next__(self) -> int: - return iter(self.items).__next__() - -def getValidQueues() -> list: - """Returns a list of queues on the filesystem excluding ignored queues. - Example: - ["bidc", "me", "ce"] - Returns: - list: Valid queues - """ - queues = [] - - for file in os.listdir(queueDirectory): - currentFile = queueDirectory + "/" + file - isDirectory = os.path.isdir(currentFile) - isValid = file not in queuesToIgnore - - if isDirectory and isValid: - queues.append(file) - - return queues - -def getQueueCounts() -> list: - """Returns a list of dictionaries with the number of items in each queue. - Example: - [ - { - name: "me", - number_of_items: 42 - }, - { - name: "bidc", - number_of_items: 3 - } - ] - Returns: - list: Dictionaries with the number of items in each queue. - """ - queueInfo = [] - for queue in getValidQueues(): - possibleItems = os.listdir(queueDirectory + "/" + queue) - validItems = [file for file in possibleItems if isValidItemName(file)] - queueInfo.append( {"name": queue, "number_of_items": len(validItems)} ) - - # Sorts list of queue info alphabetically - sortedQueueInfo = sorted(queueInfo, key = lambda queueInfoList: queueInfoList['name']) - - return sortedQueueInfo - -def loadAllQueues(headersOnly: bool = True) -> list: - """Return a list of Queues for each queue. - Example: - # Load all Queues without parsing Item content - >>> loadAllQueues(); - Load all Queues and parsing Item content - >>> loadAllQueues(headersOnly=False) - Args: - headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. - Returns: - list: List of Queues for each queue. - """ - queues = [] - - for queue in getValidQueues(): - queues.append(Queue(queue, headersOnly=headersOnly)) - - return queues \ No newline at end of file From e7e648d5af0fa0f56a5bd7e2ff199c14d36a38b0 Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 2 Apr 2021 16:58:47 -0400 Subject: [PATCH 05/43] modified __init__.py import statement in the ECNQueue subpackage --- webqueue2_api/ECNQueue/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index 891f5f2..767bf72 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -1,4 +1,4 @@ -from webqueue2_api.ECNQueue import Item, Queue, utils, parser +from webqueue2_api.ECNQueue import utils from os import environ import dotenv From 5cd589f03adea81ab5484747286ca0f91cb38304 Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 2 Apr 2021 16:59:34 -0400 Subject: [PATCH 06/43] created Item.py with the Item class in the ECNQueue sub package --- webqueue2_api/ECNQueue/Item.py | 72 ++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 webqueue2_api/ECNQueue/Item.py diff --git a/webqueue2_api/ECNQueue/Item.py b/webqueue2_api/ECNQueue/Item.py new file mode 100644 index 0000000..0730659 --- /dev/null +++ b/webqueue2_api/ECNQueue/Item.py @@ -0,0 +1,72 @@ +from webqueue2_api.ECNQueue.__init__ import queue_directory, queues_to_ignore +from webqueue2_api.ECNQueue.parser import parser +import os +queueDirectory = queue_directory + +class Item: + """A chronological representation of an interaction with a user. + Example: + # Create an Item (ce100) + >>> item = Item("ce", 100, headersOnly=false) + # Create an Item without parsing its contents (ce100) + >>> item = Item("ce", 100, headersOnly=True) + Args: + queue (str): The name of the Item's queue. + number (int): The number of the Item. + headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. + Attributes: + lastUpdated: An ISO 8601 formatted time string showing the last time the file was updated according to the filesystem. + headers: A list of dictionaries containing header keys and values. + content: A list of section dictionaries (only included if headersOnly is False). + isLocked: A boolean showing whether or not a lockfile for the item is present. + userEmail: The email address of the person who this item is from. + userName: The real name of the person who this item is from. + userAlias: The Purdue career account alias of the person this item is from. + assignedTo: The Purdue career account alias of the person this item is assigned to + subject: The subject of the original message for this item. + status: The most recent status update for the item. + priority: The most recent priority for this item. + department: The most recent department for this item. + dateReceived: The date this item was created. + jsonData: A JSON serializable representation of the Item. + Raises: + ValueError: When the number passed to the constructor cannot be parsed. + """ + + def __init__(self, queue: str, number: int, headersOnly: bool = False) -> None: + self.queue = queue + try: + self.number = int(number) + except ValueError: + raise ValueError(f'Could not convert "{number}" to an integer') + self.__path = "/".join([queueDirectory, self.queue, str(self.number)]) + self.lastUpdated = parser.getLastUpdated(self.__path) + self.__rawItem = parser.getRawItem(self.__path) + self.headers = parser.parseHeaders(self.__rawItem) + if not headersOnly: self.content = parser.parseSections(self.headers, self.__rawItem) + self.isLocked = parser.isLocked(self.__path, self.queue, self.number) + self.userEmail = parser.parseFromData(self.headers, data="userEmail") + self.userName = parser.parseFromData(self.headers, data="userName") + self.userAlias = parser.getUserAlias(self.headers, self.userEmail) + self.assignedTo = parser.getMostRecentHeaderByType(self.headers, "Assigned-To") + self.subject = parser.getMostRecentHeaderByType(self.headers, "Subject") + self.status = parser.getMostRecentHeaderByType(self.headers, "Status") + self.priority = parser.getMostRecentHeaderByType(self.headers, "Priority") + self.department = parser.getMostRecentHeaderByType(self.headers, "Department") + self.building = parser.getMostRecentHeaderByType(self.headers, "Building") + self.dateReceived = parser.getFormattedDate(parser.getMostRecentHeaderByType(self.headers, "Date")) + self.jsonData = {} + + for attribute in self.__dir__(): + if "_" not in attribute and attribute != "toJson" and attribute != "jsonData": + self.jsonData[attribute] = self.__getattribute__(attribute) + + def toJson(self) -> dict: + """Returns a JSON safe representation of the item. + Returns: + dict: JSON safe representation of the item. + """ + return self.jsonData + + def __repr__(self) -> str: + return self.queue + str(self.number) \ No newline at end of file From f97082cde73e4c8c882c18b0e1fed70001ee4b14 Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 2 Apr 2021 17:00:07 -0400 Subject: [PATCH 07/43] create Queue module with the Queue class in the ECNQueue subpackage --- webqueue2_api/ECNQueue/Queue.py | 61 +++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 webqueue2_api/ECNQueue/Queue.py diff --git a/webqueue2_api/ECNQueue/Queue.py b/webqueue2_api/ECNQueue/Queue.py new file mode 100644 index 0000000..36decb9 --- /dev/null +++ b/webqueue2_api/ECNQueue/Queue.py @@ -0,0 +1,61 @@ +from webqueue2_api.ECNQueue import utils, Item +from webqueue2_api.ECNQueue.parser import parser +from webqueue2_api.ECNQueue.__init__ import queue_directory +from os import listdir, path + +queueDirectory = queue_directory + +class Queue: + """A collection of Items. + Example: + # Create a queue (ce) + >>> queue = Queue("ce") + # Create a queue without parsing item contents (ce) + >>> queue = Queue("ce", headersOnly=False) + Args: + queue (str): The name of the queue. + headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. + Attributes: + name: The name of the queue. + items: A list of Items in the queue. + jsonData: A JSON serializable representation of the Queue. + """ + + def __init__(self, name: str, headersOnly: bool = True) -> None: + self.name = name + self.headersOnly = headersOnly + self.__directory = queueDirectory + "/" + self.name + "/" + self.items = parser.getItems(self.name, self.headersOnly) + self._index = 0 + + self.jsonData = { + "name": self.name, + "length": len(self) + } + + def toJson(self) -> dict: + """Return JSON safe representation of the Queue + The JSON representation of every item in the Queue is added to the + Queue's JSON data then the Queue's JSON data is returned. + Returns: + dict: JSON safe representation of the Queue + """ + items = [] + for item in self.items: + items.append(item.toJson()) + self.jsonData["items"] = items + + return self.jsonData + + def __len__(self) -> int: + return len(self.items) + + def __repr__(self) -> str: + return f'{self.name}_queue' + + # Implements the interable interface requirements by passing direct references + # to the item list's interable values. + def __iter__(self) -> list: + return iter(self.items) + def __next__(self) -> int: + return iter(self.items).__next__() \ No newline at end of file From d4662b38855762cc2cb43136c04b9608ca5e9896 Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 2 Apr 2021 17:00:39 -0400 Subject: [PATCH 08/43] creation of the __init__.py for the parser subpackage of the ECNQueue subpackage --- webqueue2_api/ECNQueue/parser/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/webqueue2_api/ECNQueue/parser/__init__.py b/webqueue2_api/ECNQueue/parser/__init__.py index e69de29..5f6950d 100644 --- a/webqueue2_api/ECNQueue/parser/__init__.py +++ b/webqueue2_api/ECNQueue/parser/__init__.py @@ -0,0 +1 @@ +from webqueue2_api.ECNQueue.parser import parser \ No newline at end of file From c70cb00bc8dab9e5cfee1bcf0a7a8b4a592bc6e8 Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 2 Apr 2021 17:01:46 -0400 Subject: [PATCH 09/43] creation of the parser.py in the parser subpackage --- webqueue2_api/ECNQueue/parser/parser.py | 1058 +++++++++++++++++++++++ 1 file changed, 1058 insertions(+) create mode 100644 webqueue2_api/ECNQueue/parser/parser.py diff --git a/webqueue2_api/ECNQueue/parser/parser.py b/webqueue2_api/ECNQueue/parser/parser.py new file mode 100644 index 0000000..7fcd7ab --- /dev/null +++ b/webqueue2_api/ECNQueue/parser/parser.py @@ -0,0 +1,1058 @@ +from webqueue2_api.ECNQueue import Item, Queue, utils +from webqueue2_api.ECNQueue.__init__ import queue_directory, queues_to_ignore +import os +from time import strftime, localtime +from re import compile, search +from dateutil.parser import parse +from dateutil import tz +from typing import Union +import email, datetime + +queueDirectory = queue_directory + +def getItems(name, headersOnly) -> list: + """Returns a list of items for this Queue + Returns: + list: a list of items for this Queue + """ + items = [] + + for item in os.listdir(queueDirectory): + itemPath = queueDirectory + "/" + item + + isFile = True if os.path.isfile(itemPath) else False + + if isFile and utils.isValidItemName(item): + items.append(Item.Item(name, item, headersOnly=headersOnly)) + + return items + +def getLastUpdated(path) -> str: + """Returns last modified time of item reported by the filesystem in mm-dd-yy hh:mm am/pm format. + Example: + 07-23-20 10:34 AM + Returns: + str: last modified time of item reported by the filesystem in mm-dd-yy hh:mm am/pm format. + """ + # TODO: Simplify this code block by allowing __getFormattedDate to accept milliseconds since the epoch. + unixTime = os.path.getmtime(path) + formattedTime = strftime( + '%m-%d-%y %I:%M %p', localtime(unixTime)) + return getFormattedDate(formattedTime) + +def getRawItem(path) -> list: + """Returns a list of all lines in the item file + Returns: + list: List of all the lines in the item file + """ + with open(path, errors="replace") as file: + return file.readlines() + +def getHeaderBoundary(rawItem) -> int: + """Returns the 0 based line number where the Item headers stop. + Example: The header end would be on line 13 + 12: X-ECN-Queue-Original-URL: + 13: + 14: I need help. + Returns: + int: line number where the Item headers end + """ + for lineNumber, line in enumerate(rawItem): + if line == "\n": + return lineNumber + +def parseHeaders(rawItem) -> list: + """Returns a list containing dictionaries of header type and data. + Removes queue prefixes and whitespace. + Examples: + "[ce] QStatus: Dont Delete\\nFrom: Justin Campbell \\n" + becomes + [ + {"QStatus": "Don't Delete"}, + {"From": "Justin Campbell "} + ] + Returns: + list: Header dicts + """ + headerString = "" + + # Remove '[queue] ' prefixes: + # Example: + # [ce] QTime-Updated-By: campb303 becomes + # QTime-Updated-By: campb303 + queuePrefixPattern = compile(r"\[.*?\] {1}") + for lineNumber in range(getHeaderBoundary(rawItem)): + line = rawItem[lineNumber] + lineHasQueuePrefix = queuePrefixPattern.match(line) + + if lineHasQueuePrefix: + queuePrefix = line[lineHasQueuePrefix.regs[0] + [0]: lineHasQueuePrefix.regs[0][1]] + line = line.replace(queuePrefix, "") + + headerString += line + + # message = email.message_from_string(headerString + "".join(self.__getContent())) + message = email.message_from_string(headerString) + + headers = [] + dateHeaders=[ + "QStatus-Updated-Time", + "Status-Updated-Time", + "Edited-Time", + "QTime-Updated-Time", + "Merged-Time", + "Time-Updated-Time", + "Replied-Time", + "Assigned-To-Updated-Time", + "QAssigned-To-Updated-Time", + "Date", + "Sent" + ] + + for key in message.keys(): + headers.append({"type": key, "content": getFormattedDate(message[key]) if key in dateHeaders else message[key]}) + + return headers + +# TODO: Implement attachment parsing + +def parseSections(headers, rawItem) -> list: + # List of all item events + sections = [] + + contentStart = getHeaderBoundary(rawItem) + 1 + contentEnd = len(rawItem) - 1 + + # List of assignments for the item + assignementLsit = assignmentParsing(rawItem, contentStart) + + # Appends each assignment individually to sections + for assignment in assignementLsit: + sections.append(assignment) + + # Checks for empty content within an item and returns and + if contentEnd <= contentStart: + blankInitialMessage = initialMessageParsing(rawItem, [""]) + sections.append(blankInitialMessage) + return sections + + # Checks for Directory Identifiers + if rawItem[contentStart] == "\n" and rawItem[contentStart + 1].startswith("\t"): + + directoryStartLine = contentStart + 1 + + # Parses the directory information and returns a dictionary of directory values + directoryInfo = directoryParsing(rawItem, directoryStartLine) + + # Appends Directory Information into the sections array + sections.append(directoryInfo) + + # Sets the initial message start to the next line after all directory lines and newlines + contentStart = contentStart + len(directoryInfo) + 1 + + # The start line, type, and end line for item events + sectionBoundaries = [] + + # Delimiter info + delimiters = [ + {"name": "edit", "pattern": "*** Edited"}, + {"name": "status", "pattern": "*** Status"}, + {"name": "replyToUser", "pattern": "*** Replied"}, + {"name": "replyFromUser", "pattern": "=== "}, + ] + + # Signifies that there is an initial message to parse + initialMessageSection = True + + # Parses the entire contents of the message, stores everything before any delimiter as the initial message + # and the line number of any delimiters as well as the type + for lineNumber in range(contentStart, contentEnd + 1): + + line = rawItem[lineNumber] + + # Looks for a starting delimiter and explicity excludes the reply-from-user ending delimiter + if (line.startswith("*** Edited by: ") or + line.startswith("*** Replied by: ") or + line.startswith("*** Status updated by: ") or + line == "=== Additional information supplied by user ===\n" and not + line == "===============================================\n" + ): + + # Sets the delimiter type based on the pattern within the delimiters list + for delimiter in delimiters: + + if line.startswith(delimiter["pattern"]): + sectionBoundaries.append( + {"start": lineNumber, "type": delimiter["name"]}) + break + + # If a starting delimiter was encountered, then there is no initial message + if initialMessageSection: + initialMessageSection = False + + elif initialMessageSection == True: + # Delimiter not encountered yet, so append initial message starting line as the current lin number + sectionBoundaries.append( + {"start": lineNumber, "type": "initial_message"}) + initialMessageSection = False + + # Used to set the end line of the last delimiter + sectionBoundaries.append({"start": contentEnd + 1}) + + # Sets the end of the section boundary to the begining of the next section boundary + for boundaryIndex in range(0, len(sectionBoundaries) - 1): + + sectionBoundaries[boundaryIndex]["end"] = sectionBoundaries[boundaryIndex + 1]["start"] + + # Remove End of File boundary since the line number has been assigned to the last delimiter + del sectionBoundaries[-1] + + # Parses through all the boundaries in section boundaries + for boundary in sectionBoundaries: + + # Sets line to the first line of the boundary (which is always the delimiter) + line = rawItem[boundary["start"]] + + # Returns all of the lines within the current section + sectionContent = rawItem[boundary["start"]: boundary["end"]] + + # Appends an initial message dictionary to sections + if boundary["type"] == "initial_message": + initialMessageDictionary = initialMessageParsing(headers, + sectionContent) + sections.append(initialMessageDictionary) + + elif boundary["type"] == "edit": + # Returns a dictionary with edit information + editInfo = editParsing( + sectionContent, boundary["start"]) + + # Checks for a parse error and appends it, returning the sections list which stops the parsing + if editInfo["type"] == "parse_error": + sections.append(editInfo) + return getSortedSections(sections) + + # Appends the edit dictionary to sections + sections.append(editInfo) + + elif boundary["type"] == "replyToUser": + # Returns a dictionary with reply-to information + replyToInfo = replyToParsing( + sectionContent, boundary["start"]) + + # Checks for a parse error and appends it, returning the sections list which stops the parsing + if replyToInfo["type"] == "parse_error": + sections.append(replyToInfo) + return getSortedSections(sections) + + # Appends the reply-to to sections + sections.append(replyToInfo) + + elif boundary["type"] == "status": + # Returns a dictionary with status information + statusInfo = statusParsing( + sectionContent, boundary["start"]) + + if statusInfo["type"] == "parse_error": + sections.append(statusInfo) + return getSortedSections(sections) + + # Appends the status to sections + sections.append(statusInfo) + + elif boundary["type"] == "replyFromUser": + # Returns a dictionary with userReply information + replyFromInfo = userReplyParsing( + sectionContent, boundary["start"]) + + if replyFromInfo["type"] == "parse_error": + sections.append(replyFromInfo) + return getSortedSections(sections) + + # Appends the replyFrom to sections + sections.append(replyFromInfo) + + sortedSections = getSortedSections(sections) + + return sortedSections + # return sections + +def directoryParsing(rawItem: str, directoryStartLine: int) -> dict: + """Returns a dictionary with directory information + Example: + Name: Nestor Fabian Rodriguez Buitrago + Login: rodri563 + Computer: ce-205-38 (128.46.205.67) + Location: HAMP G230 + Email: rodri563@purdue.edu + Phone: 7654766893 + Office: HAMP G230 + UNIX Dir: /home/bridge/b/rodri563 + Zero Dir: U=\\bridge.ecn.purdue.edu\rodri563 + User ECNDB: http://eng.purdue.edu/jump/2e8399a + Host ECNDB: http://eng.purdue.edu/jump/2e83999 + Subject: Autocad installation + Args: + directoryStartLine (int): line number within the item that the directory starts on + Returns: + dict: dictionary that splits each line within the directory into a key and a value + """ + directoryInformation = {"type": "directory_information"} + + directoryPossibleKeys = [ + "Name", + "Login", + "Computer", + "Location", + "Email", + "Phone", + "Office", + "UNIX Dir", + "Zero Dir", + "User ECNDB", + "Host ECNDB", + "Subject" + ] + # Executies until the directory start line is greater than the directory ending line + while True: + + # Returns the line number at directory start line + info = rawItem[directoryStartLine] + + # Breaks the loop if it encountrs a newline, signifying the end of the directory information + if info == "\n": + + break + + else: + + # Removes white including space, newlines, and tabs from the directory info line + strippedInfo = info.strip() + + # Attempts to find ": " but will accept ":", denoting a blank entry for a directory item + if ": " in strippedInfo: + + # Seperates the directory info line into two variables, the first variable being the key, the second being the value + # swt1 + key, value = strippedInfo.split(": ", 1) + + if key in directoryPossibleKeys: + # Adds the key value pair to the directory info dictionary + directoryInformation[key] = value + else: + # Casts the list type on to a dictionary + dictionaryList = list(directoryInformation) + # Length of dictionary list + lenDictionaryList = len(dictionaryList) + # The last key appended to the directory dictionary + lastKeyAppended = dictionaryList[lenDictionaryList - 1] + + directoryInformation[lastKeyAppended] = directoryInformation[lastKeyAppended] + \ + " " + strippedInfo + + elif ":" in strippedInfo: + + # Seperates the directory info line into two variables, the first variable being the key, the second being the value + key, value = strippedInfo.split(":", 1) + + if key in directoryPossibleKeys: + # Adds the key value pair to the directory info dictionary + directoryInformation[key] = value + else: + # Casts the list type on to a dictionary + dictionaryList = list(directoryInformation) + # Length of dictionary list + lenDictionaryList = len(dictionaryList) + # The last key appended to the directory dictionary + lastKeyAppended = dictionaryList[lenDictionaryList - 1] + + directoryInformation[lastKeyAppended] = directoryInformation[lastKeyAppended] + \ + " " + strippedInfo + + # Signifies that this line belongs to the most previous line + elif ": " not in strippedInfo and ":" not in strippedInfo: + # Casts the list type on to a dictionary + dictionaryList = list(directoryInformation) + # Length of dictionary list + lenDictionaryList = len(dictionaryList) + # The last key appended to the directory dictionary + lastKeyAppended = dictionaryList[lenDictionaryList - 1] + + directoryInformation[lastKeyAppended] = directoryInformation[lastKeyAppended] + \ + " " + strippedInfo + # Counter to denote the end of the directory + directoryStartLine = directoryStartLine + 1 + + # Returns the directory information dictionary + return directoryInformation + +def assignmentParsing(rawItem: str, contentStart: int) -> list: + """Returns a list with assignment information dictionaries + Example: + Assigned-To: campb303 + Assigned-To-Updated-Time: Tue, 23 Jun 2020 13:27:00 EDT + Assigned-To-Updated-By: campb303 + Args: + contentStart (int): line number where the content starts + Returns: + list: [ + {"type": "assignment", + "datetime": datetime of the assignment, + "by": user who initiated the assignment, + "to": user who was assigned + }, + ] + """ + assignmentList = [] + + # Assignment Information + assignedBy = "" + assignedDateTime = "" + assignedTo = "" + + # Parses the header looking for assignment delimeters and stores info into their respective variables + for headerContent in range(0, contentStart): + + line = rawItem[headerContent] + + # Gets who the Item was assigned to + if line.startswith("Assigned-To: "): + + assignedTo = ( + search("(?<=Assigned-To: )(.*)", line)).group() + + # Gets the date the Item was assigned + elif line.startswith("Assigned-To-Updated-Time: "): + + dateFromLine = ( + search("(?<=Assigned-To-Updated-Time: )(.*)", line)).group() + + assignedDateTime = getFormattedDate(dateFromLine) + + # Gets who assigned the Item + elif line.startswith("Assigned-To-Updated-By: "): + + assignedBy = ( + search("(?<=Assigned-To-Updated-By: )(.*)", line)).group() + + # Appends the assignment to the sections list + assignmentList.append( + {"type": "assignment", + "datetime": assignedDateTime, + "by": assignedBy, + "to": assignedTo} + ) + + return assignmentList + +def initialMessageParsing(headers: list, content: list) -> dict: + """Returns a dictionary with initial message information + Example: + \n + Testtest\n + \n + Args: + content (list): content of the initial message + Returns: + dict: + "type": "initial_message", + "datetime": datetime the initial message was sent, + "from_name": from_name, + "from_email": user_email, + "to": [{email, name}], + "cc": [{email, name}], + "subject": initial message subject + "content": content of the initial message + """ + initialMessageDictionary = {} + + initialMessageDictionary["type"] = "initial_message" + + # Gets the initial message date from the header + rawMessageDateStr = getMostRecentHeaderByType(headers, "Date") + + # Sets datetime in the intialMessage dictionary to UTC formatted date + initialMessageDictionary["datetime"] = getFormattedDate( + rawMessageDateStr) + + initialMessageDictionary["from_name"] = parseFromData(headers, data="userName") + + initialMessageDictionary["from_email"] = parseFromData(headers, data="userEmail") + + # Stores list of dictionaries for the recipients of the initial message + initialMessageDictionary["to"] = [] + + # Parses the header looking for recipients of the initial message and stores it in a list of tuples + rawMessageRecipientsList = email.utils.getaddresses( + [getMostRecentHeaderByType(headers, "To")]) + + # Parses the CC list and stores the cc recipient information in a list of dictionaries + for recipients in rawMessageRecipientsList: + + initialMessageDictionary["to"].append( + {"name": recipients[0], + "email": recipients[1]} + ) + + # Stores list of dictionaries for CC information + initialMessageDictionary["cc"] = [] + + # Parses the header looking for CC recipients of the initial message and stores it in a list of tuples + rawMessageCCList = email.utils.getaddresses( + [getMostRecentHeaderByType(headers, "CC")]) + + # Parses the CC list and stores the cc recipient information in a list of dictionaries + for ccRecipients in rawMessageCCList: + + initialMessageDictionary["cc"].append( + {"name": ccRecipients[0], + "email": ccRecipients[1]} + ) + + initialMessageDictionary["subject"] = getMostRecentHeaderByType(headers, + "Subject") + + # Removes unecessary newlines from the begining and the end of the initial message + initialMessageDictionary["content"] = getFormattedSectionContent( + content) + + return initialMessageDictionary + +def editParsing(content: list, lineNum: int) -> dict: + """Returns a dictionary with edit information + Example: + *** Edited by: campb303 at: 06/23/20 13:27:56 ***\n + \n + This be an edit my boy\n + \n + \n + \n + Args: + content (list): content of an edit + lineNum (int): line number of an edit within an item + Returns: + dict: a dictionary with these keys, + "type": "edi", + "by": initiator of the edit, + "datetime": datetime of the edit, + "content": content of the edit + """ + + # Edit Info dictionary + editInfo = {} + + for count, line in enumerate(content): + if line == "===============================================\n": + errorMessage = "Reply-from-user ending delimter encountered without Reply-from-user starting delimter" + return errorParsing(line, lineNum + count + 1, errorMessage) + + editInfo["type"] = "edit" + + delimiterLine = content[0] + # Parses for the author of the edit, which is located between the "*** Edited by: " and " at:" substrings + try: + editInfo["by"] = ( + search("(?<=\*{3} Edited by: )(.*)(?= at:)", delimiterLine)).group() + except: + errorMessage = "*** Edited by: [username] at: [date and time] ***\n" + return errorParsing(delimiterLine, lineNum, errorMessage) + + try: + # Parses for the date and time of the edit, which is located between the " at: " and "***\n" substrings + dateTimeString = ( + search("(?<= at: )(.*)(?= \*\*\*\n)", delimiterLine)).group() + except: + # Returns an error message if there is no space after "at:" + errorMessage = "*** Edited by: [username] at: [date and time] ***\n" + return errorParsing(delimiterLine, lineNum, errorMessage) + + # Attempts to format the date and time into utc format + editInfo["datetime"] = getFormattedDate(dateTimeString) + + # Remove the delimiter String and unecessary newlines + editInfo["content"] = getFormattedSectionContent(content) + + return editInfo + +def replyToParsing(content: list, lineNum: int) -> dict: + """Returns a dictionary with reply to user information + Example: + *** Replied by: campb303 at: 06/23/20 13:28:18 ***\n + \n + This be a reply my son\n + \n + Justin\n + ECN\n + \n + Args: + content (list): content of a reply to user + lineNum (int): line number of a reply to user in an item + Returns: + dict: a dictionary with these keys, + "type": "reply_to_user", + "by": initiator of the reply to user, + "datetime": datetime of the reply to user, + "content": content of the reply to user + """ + replyInfo = {} + + replyInfo["type"] = "reply_to_user" + + delimiterLine = content[0] + + for count, line in enumerate(content): + if line == "===============================================\n": + errorMessage = "Reply-from-user ending delimter encountered without Reply-from-user starting delimter" + return errorParsing(line, lineNum + count + 1, errorMessage) + + try: + # Parses for the author of the reply, which is located between the "*** Replied by: " and " at:" substrings + replyInfo["by"] = ( + search("(?<=\*{3} Replied by: )(.*)(?= at:)", delimiterLine)).group() + except: + errorMessage = "*** Replied by: [username] at: [date and time] ***\n" + return errorParsing(delimiterLine, lineNum, errorMessage) + + # Parses for the date and time of the reply, which is located between the " at: " and "***\n" substrings + try: + dateTimeString = ( + search("(?<= at: )(.*)(?= \*\*\*\n)", delimiterLine)).group() + except: + errorMessage = "*** Replied by: [username] at: [date and time] ***\n" + return errorParsing(delimiterLine, lineNum, errorMessage) + + # Formats date to UTC + replyInfo["datetime"] = getFormattedDate(dateTimeString) + + replyInfo["content"] = getFormattedSectionContent(content) + + return replyInfo + +def statusParsing(content: list, lineNum: int) -> dict: + """Returns a dictionary with status information + Example: + *** Status updated by: campb303 at: 6/23/2020 13:26:55 ***\n + Dont Delete\n + Args: + content (list): The content of a status update + lineNum (int): The line number of a status update in an item + Returns: + dict: a dictionary with these keys, + "type": "status", + "by": initiator of the status update, + "datetime": datetime of the status update, + "content": content of the status update + """ + statusInfo = {} + + statusInfo["type"] = "status" + + delimiterLine = content[0] + + for count, line in enumerate(content): + if line == "===============================================\n": + errorMessage = "Reply-from-user ending delimter encountered without Reply-from-user starting delimter" + return errorParsing(line, lineNum + count + 1, errorMessage) + + # Parses for the author of the status change, which is located between the "*** Status updated by: " and " at:" substrings + try: + statusInfo["by"] = ( + search("(?<=\*{3} Status updated by: )(.*)(?= at:)", delimiterLine)).group() + except: + errorMessage = "*** Status updated by: [username] at: [date and time] ***\n" + + return errorParsing(delimiterLine, lineNum, errorMessage) + + # Parses for the date and time of the status change, which is located between the " at: " and "***\n" substrings + try: + dateTimeString = search( + "(?<= at: )(.*)(?= \*\*\*\n)", delimiterLine).group() + except: + errorMessage = "*** Status updated by: [username] at: [date and time] ***\n" + + return errorParsing(delimiterLine, lineNum, errorMessage) + + # Formats the date to UTC + statusInfo["datetime"] = getFormattedDate(dateTimeString) + + # Remove the delimiter String and unecessary newlines + statusInfo["content"] = getFormattedSectionContent(content) + + return statusInfo + +def userReplyParsing(replyContent: list, lineNumber: int) -> dict: + """Returns a dictionary with user reply information + Example: + === Additional information supplied by user ===\n + \n + Subject: Re: Beepboop\n + From: Justin Campbell \n + Date: Tue, 23 Jun 2020 13:30:45 -0400\n + X-ECN-Queue-Original-Path: /home/pier/e/queue/Attachments/inbox/2020-06-23/212-original.txt\n + X-ECN-Queue-Original-URL: https://engineering.purdue.edu/webqueue/Attachments/inbox/2020-06-23/212-original.txt\n + \n + Huzzah!\n + \n + ===============================================\n + \n + Args: + replyContent (list): The entire section of a reply-from-user + lineNumber (int): The line number of the begining of a reply-from-user section within and item + Returns: + dict: a dictionary with these keys, + "type": "reply_from_user", + "from_name": name of the user that sent the reply, + "from_email": email of the user that sent the reply, + "subject": subject of the reply, + "datetime": the datetime of the reply, + "cc": [ + {"name": name of the carbon copied recipient, + "email": email of the carbon copied recipient + }, + ] + "content": content of the reply + "headers": [ + {"type": headerType, + "content": content + }, + ] + """ + replyFromInfo = {} + + replyFromInfo["type"] = "reply_from_user" + + replyFromHeaders = [] + newLineCounter = 0 + endingDelimiterCount = 0 + + # Delimiter information line numbers to remove from reply from user + linesToRemove = [] + + # Parses the section content looking for any line that starts with a metadata, also tracks the line + # number with the enumerate function + for lineNum, line in enumerate(replyContent): + + if endingDelimiterCount == 0 and lineNum == len(replyContent) - 1: + errorMessage = "Did not encounter a reply-from-user ending delimiter" + return errorParsing(line, lineNumber + lineNum + 1, errorMessage) + + if newLineCounter == 1 and line != "\n": + + try: + # Append header information for each headr line + headerType, content = line.split(": ", 1) + replyFromHeaders.append( + {"type": headerType, + "content": content + } + ) + except: + lenReplyFromHeaders = len(replyFromHeaders) + if lenReplyFromHeaders == 0: + errorMessage = ("Expected reply-from-user header information:\n" + + "=== Additional information supplied by user ===\n" + + "\n" + + "[Header Type]: [Header Value]\n" + + "\n" + ) + return errorParsing(line, lineNumber + lineNum + 1, errorMessage) + + else: + replyFromHeaders[lenReplyFromHeaders - + 1]["content"] = replyFromHeaders[lenReplyFromHeaders - 1]["content"] + " " + line + + linesToRemove.append(lineNum) + # Checks for a newline and breaks for loop on second occurance of a newline + if line == "\n": + newLineCounter = newLineCounter + 1 + + if newLineCounter == 2 and "datetime" not in replyFromInfo.keys(): + errorMessage = "Expected \"Date: [datetime]\" in the header info" + return errorParsing(line, lineNumber + lineNum + 1, errorMessage) + + elif line == "===============================================\n": + endingDelimiterCount = endingDelimiterCount + 1 + + elif line.startswith("From: ") and newLineCounter == 1: + # Returns a list of one tuples with a name stored in the first index of the tuple and an email stored in the second index of the tuple + emailList = email.utils.getaddresses([line]) + replyFromInfo["from_name"] = emailList[0][0] + replyFromInfo["from_email"] = emailList[0][1] + + elif line.startswith("Subject: ") and newLineCounter == 1: + # Matches everything after "Subject: " + try: + subjectStr = ( + search("(?<=Subject: )(.*)", line)).group() + except: + errorMessage = "Expeted syntax of \"Subject: [subject]\"" + return errorParsing(line, lineNumber + lineNum + 1, errorMessage) + + # Formatts the date to UTC + replyFromInfo["subject"] = subjectStr + + elif line.startswith("Date: ") and newLineCounter == 1: + # Matches everything after "Date: " + try: + dateStr = (search("(?<=Date: )(.*)", line)).group() + except: + errorMessage = "\"Date: [datetime]\"" + return errorParsing(line, lineNumber + lineNum + 1, errorMessage) + + # Formatts the date to UTC + replyFromInfo["datetime"] = getFormattedDate(dateStr) + + elif line.startswith("Cc: ") and newLineCounter == 1: + + replyFromInfo["cc"] = [] + + # Returns a list of tuples with email information + recipientsList = email.utils.getaddresses([line]) + + # Parses through the cc tuple list + for cc in recipientsList: + # Stores the cc information in a dictionary and appends it to the ccRecipientsList + replyFromInfo["cc"].append( + {"name": cc[0], + "email": cc[1]} + ) + + # Deletes reduntant lines from the message content in reverse order + for lineNum in sorted(linesToRemove, reverse=True): + replyContent.pop(lineNum) + + # Strips any unnecessary newlines or any delimiters frm the message content + replyFromInfo["content"] = getFormattedSectionContent( + replyContent) + + replyFromInfo["headers"] = replyFromHeaders + + return replyFromInfo + +def getFormattedSectionContent(sectionContent: list) -> list: + """Returns a list with message content that is stripped of unnecessary newlines and begining delimiters + Example: + *** Edited by: mph at: 02/21/20 10:27:16 ***\n + \n + Still need to rename machines - but the networking issue now seems to \n + be resolved via another ticket.\n + \n + \n + \n + \n + \n + Args: + sectionContent (list): The section content of a parsed section + Returns: + list: the section content of a parsed section without any delimiters and unnecessary newlines + """ + # Continually removes the first line of sectionContent if it is a newline or delimiter in each iteration + while len(sectionContent) > 1: + if (sectionContent[0] == "\n" or + sectionContent[0].startswith("*** Edited by: ") or + sectionContent[0].startswith("*** Replied by: ") or + sectionContent[0].startswith("*** Status updated by: ") or + sectionContent[0] == "=== Additional information supplied by user ===\n" or + sectionContent[0] == "===============================================\n" + ): + sectionContent.pop(0) + else: + # Breaks the loop if the first line isn't a newline or delimiter + break + + # Continually removes the last line of sectionContent if it is a newline or delimiter in each iteration + while len(sectionContent) > 1: + # Initializes the Length of sectionContent each iteration of the loop + sectionContentLength = len(sectionContent) + + if (sectionContent[sectionContentLength - 1] == "\n" or + sectionContent[sectionContentLength - + 1] == "===============================================\n" + ): + sectionContent.pop(sectionContentLength - 1) + else: + # Breaks the loop if the last line isn't a newline or delimiter + break + + return sectionContent + +def errorParsing(line: str, lineNum: int, expectedSyntax: str) -> dict: + """Returns a dictionary with error parse information when a line is malformed + Example: + "*** Status updated by: ewhile at: 5/7/2020 10:59:11 *** sharing between\n" + Args: + line (str): line of that threw error + lineNum (int): line number in the item that threw error + expectedSyntax (str): a message stating the syntax the line should follow + Returns: + dict: a dictionary with these keys, + "type": "parse_error", + "datetime": time the error was encountered, + "file_path": path of the item with erroneos line, + "expected": expectedSyntax, + "got": line, + "line_num": lineNum + """ + errorDictionary = {} + + # Type + errorDictionary["type"] = "parse_error" + + # Dateime of the parse error + errorDictionary["datetime"] = getFormattedDate( + str(datetime.datetime.now())) + + # Item filepath + errorDictionary["file_path"] = os.path + + # Expected value + errorDictionary["expected"] = expectedSyntax + + # line that threw error + errorDictionary["got"] = line + + # line number that threw error + errorDictionary["line_num"] = lineNum + + # returns the error dictionary + return errorDictionary + +def getSortedSections(sectionsList: list) -> list: + """Sorts the sections chronologically by datetime + Example: + [example] need to do + Args: + sections (list): the list of sections to be sorted + Returns: + list: a list of sections sorted by datetime + """ + sectionsLength = len(sectionsList) + sortedSections = [] + oldestSection = {} + + while len(sortedSections) < sectionsLength: + + for iteration, currentSection in enumerate(sectionsList): + + if currentSection["type"] == "directory_information": + sortedSections.append(currentSection) + sectionsList.remove(currentSection) + break + + if iteration == 0: + oldestSection = currentSection + + #datetime.datetime.strptime(date_time_str, '%Y-%m-%d %H:%M:%S.%f') + + elif parse(currentSection["datetime"]) < parse(oldestSection["datetime"]): + oldestSection = currentSection + + if iteration == len(sectionsList) - 1: + sortedSections.append(oldestSection) + sectionsList.remove(oldestSection) + + return sortedSections + +def isLocked(path, queue: str, number: int) -> Union[str, bool]: + """Returns a string info about the lock if true and a bool False if false + Example: A file is locked + "CE 100 is locked by campb303 using qvi" + Example: a file is not locked + False + Returns: + Union[str, bool]: String with info about lock if true, bool False if false + """ + lockFile = path + ".lck" + if os.path.exists(lockFile): + with open(lockFile) as file: + lockInfo = file.readline().split(" ") + lockedBy = lockInfo[4] + lockedUsing = lockInfo[1] + return "{queue} {number} is locked by {lockedBy} using {lockedUsing}".format(queue=queue, number=number, lockedBy=lockedBy, lockedUsing=lockedUsing) + else: + return False + +def getMostRecentHeaderByType(headers: list, headerType: str) -> str: + """Return the data of most recent header of the given type. + If no header of that type exists, return an empty string. + Example: Requesting a Status header that does exist + __getMostRecentHeaderByType("Status") + becomes "Waiting for Reply" + Example: Requesting a Status header that doesn't exist + __getMostRecentHeaderByType("Status") + becomes "" + Args: + headerType (str): Type of header to return. + Returns: + str: data of most recent header of the given type or empty string. + """ + for header in headers: + if header["type"] == headerType: + return header["content"] + return "" + +def parseFromData(headers: list, data: str) -> str: + """Parse From header and return requested data. + Returns empty string if requested data is unavailable. + Examples: From data is "From: Campbell, Justin " + __parseFromData(data="userName") returns "Campbell, Justin" + __parseFromData(data="userEmail") returns "campb303@purdue.edu" + Args: + data (str): The data desired; can be "userName" or "userEmail". + Returns: + str: userName, userEmail or empty string. + """ + fromHeader = getMostRecentHeaderByType(headers, "From") + userName, userEmail = email.utils.parseaddr(fromHeader) + + if data == "userName": + return userName + elif data == "userEmail": + return userEmail + else: + raise ValueError( + "data='" + str(data) + "' is not a valid option. data must be \"userName\" or \"userEmail\".") + +def getUserAlias(headers: list, userEmail: str) -> str: + """Returns user's Career Account alias if present. + If Career Account alias isn't present, returns empty string. + Example: Email from campb303@purdue.edu + userAlias = "campb303" + Example: Email from spam@spammer.net + userAlias = "" + Returns: + str: User's Career Account alias if present or empty string + """ + + + try: + emailUser, emailDomain = userEmail.split("@") + + # Returns an error parse if the self.useremail doesn't contain exactally one "@" symbol + except ValueError: + # Parses through the self.headers list to find the "From" header and its line number + for lineNum, header in enumerate(headers): + if header["type"] == "From": + headerString = header["type"] + ": " + header["content"] + return errorParsing(headerString, lineNum + 1, "Expected valid email Address") + + return emailUser if emailDomain.endswith("purdue.edu") else "" + +def getFormattedDate(date: str) -> str: + """Returns the date/time formatted as RFC 8601 YYYY-MM-DDTHH:MM:SS+00:00. + Returns empty string if the string argument passed to the function is not a datetime. + See: https://en.wikipedia.org/wiki/ISO_8601 + Returns: + str: Properly formatted date/time recieved or empty string. + """ + try: + # This date is never meant to be used. The default attribute is just to set timezone. + parsedDate = parse(date, default=datetime.datetime( + 1970, 1, 1, tzinfo=tz.gettz('EDT'))) + except: + return "" + + parsedDateString = parsedDate.strftime("%Y-%m-%dT%H:%M:%S%z") + + return parsedDateString From 66b337cbfa2da37a7b0eae797fa917c50939382a Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 2 Apr 2021 17:02:06 -0400 Subject: [PATCH 10/43] creation of the utils.py module in the ECNQueue subpackage --- webqueue2_api/ECNQueue/utils.py | 87 +++++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) create mode 100644 webqueue2_api/ECNQueue/utils.py diff --git a/webqueue2_api/ECNQueue/utils.py b/webqueue2_api/ECNQueue/utils.py new file mode 100644 index 0000000..fabb85c --- /dev/null +++ b/webqueue2_api/ECNQueue/utils.py @@ -0,0 +1,87 @@ +from os import path, listdir +from re import compile +from webqueue2_api.ECNQueue import Queue +from webqueue2_api.ECNQueue.__init__ import queue_directory, queues_to_ignore + +queueDirectory = queue_directory +queuesToIgnore = queues_to_ignore + +def getValidQueues() -> list: + """Returns a list of queues on the filesystem excluding ignored queues. + Example: + ["bidc", "me", "ce"] + Returns: + list: Valid queues + """ + queues = [] + + for file in listdir(queueDirectory): + currentFile = queueDirectory + "/" + file + isDirectory = path.isdir(currentFile) + isValid = file not in queuesToIgnore + + if isDirectory and isValid: + queues.append(file) + + return queues + +def getQueueCounts() -> list: + """Returns a list of dictionaries with the number of items in each queue. + Example: + [ + { + name: "me", + number_of_items: 42 + }, + { + name: "bidc", + number_of_items: 3 + } + ] + Returns: + list: Dictionaries with the number of items in each queue. + """ + queueInfo = [] + for queue in getValidQueues(): + possibleItems = listdir(queueDirectory + "/" + queue) + validItems = [file for file in possibleItems if isValidItemName(file)] + queueInfo.append( {"name": queue, "number_of_items": len(validItems)} ) + + # Sorts list of queue info alphabetically + sortedQueueInfo = sorted(queueInfo, key = lambda queueInfoList: queueInfoList['name']) + + return sortedQueueInfo + +def isValidItemName(name: str) -> bool: + """Returns true if file name is a valid item name. + A file name is true if it contains between 1 and 3 integer numbers allowing for + any integer between 0 and 999. + Example: + isValidItemName("21") -> true + isValidItemName("twentyone") -> false + Args: + name (str): The name to test. + Returns: + bool: Name is valid item name. + """ + itemPattern = compile("^[0123456789]{1,3}$") + return True if itemPattern.match(name) else False + +def loadAllQueues(headersOnly: bool = True) -> list: + """Return a list of Queues for each queue. + Example: + # Load all Queues without parsing Item content + >>> loadAllQueues(); + Load all Queues and parsing Item content + >>> loadAllQueues(headersOnly=False) + Args: + headersOnly (bool, optional): Whether or not to parse headers only. Defaults to True. + Returns: + list: List of Queues for each queue. + """ + queues = [] + + for queue in getValidQueues(): + queues.append(Queue.Queue(queue, headersOnly=headersOnly)) + + return queues \ No newline at end of file From d3f1c6adb0c4b3e03699fc1e551ea8cf4615b17a Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 2 Apr 2021 17:18:05 -0400 Subject: [PATCH 11/43] fixed broken import in api.py --- webqueue2_api/api.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/webqueue2_api/api.py b/webqueue2_api/api.py index 0edcd61..9682366 100644 --- a/webqueue2_api/api.py +++ b/webqueue2_api/api.py @@ -10,7 +10,7 @@ from ldap.filter import escape_filter_chars # pylint says this is an error but it works so ¯\_(ツ)_/¯ from ldap import INVALID_CREDENTIALS as LDAP_INVALID_CREDENTIALS -from . import ECNQueue +from webqueue2_api import ECNQueue # Load envrionment variables for ./.env dotenv.load_dotenv() @@ -189,7 +189,7 @@ def get(self, queue: str, number: int) -> tuple: Returns: tuple: Item as JSON and HTTP response code. """ - return (ECNQueue.Item(queue, number).toJson(), 200) + return (ECNQueue.Item.Item(queue, number).toJson(), 200) class Queue(Resource): @jwt_required @@ -209,7 +209,7 @@ def get(self, queues: str) -> tuple: queue_list = [] for queue in queues_requested: - queue_list.append(ECNQueue.Queue(queue).toJson()) + queue_list.append(ECNQueue.Queue.Queue(queue).toJson()) return (queue_list, 200) @@ -236,7 +236,7 @@ def get(self) -> tuple: Returns: tuple: Queues and item counts as JSON and HTTP response code. """ - return (ECNQueue.getQueueCounts(), 200) + return (ECNQueue.utils.getQueueCounts(), 200) From 36695c8e978726b2d6fd8a843453afe1db8437e8 Mon Sep 17 00:00:00 2001 From: benne238 Date: Sun, 4 Apr 2021 22:27:33 -0400 Subject: [PATCH 12/43] added logger subpackage to webqueue2-api package --- webqueue2_api/logger/__init__.py | 44 ++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 webqueue2_api/logger/__init__.py diff --git a/webqueue2_api/logger/__init__.py b/webqueue2_api/logger/__init__.py new file mode 100644 index 0000000..3f2e5c6 --- /dev/null +++ b/webqueue2_api/logger/__init__.py @@ -0,0 +1,44 @@ +import logging +import configparser +import os + +logger_name = __name__ +logger = logging.getLogger(logger_name) +logger.setLevel(logging.DEBUG) + +# See Formatting Details: https://docs.python.org/3/library/logging.html#logrecord-attributes +# Example: Jan 28 2021 12:19:28 venv-manager : [INFO] Message +log_message_format = "%(asctime)s %(name)s : [%(levelname)s] %(message)s" +# See Time Formatting Details: https://docs.python.org/3.6/library/time.html#time.strftime +# Example: Jan 28 2021 12:19:28 +log_time_format = "%b %d %Y %H:%M:%S" +log_formatter = logging.Formatter(log_message_format, log_time_format) + +# Configure output to stdout +stream_handler = logging.StreamHandler() +stream_handler.setFormatter(log_formatter) +stream_handler.setLevel(logging.INFO) +logger.addHandler(stream_handler) + +# Configure out to logfile +config = configparser.ConfigParser() +configFile = 'webqueue2-api.cfg' +config.read(configFile) +if "Logger" not in config.sections(): + logger.debug(f"Logger section not definied in {configFile}") + +elif config.has_option("Logger", "LOGGER_OUT_FILE"): + if config["Logger"]["LOGGER_OUT_FILE"] == "": + logger.debug("LOGGER_OUT_FILE variable empty, not setting logger file path") + + elif not os.path.isdir(config["Logger"]["LOGGER_OUT_FILE"]): + logger.debug("LOGGER_OUT_FILE doesn't point to existing directory, not setting logger file path") + + else: + log_file_path = config["Logger"]["LOGGER_OUT_FILE"] + file_handler = logging.FileHandler(log_file_path) + logger.debug(f"Logger") + file_handler.setFormatter(log_formatter) + logger.addHandler(file_handler) +else: + logger.debug("LOGGER_OUT_FILE not defined in logger section, not setting logger path") \ No newline at end of file From ba6b0b03ef4c9c03e4ae40bf0eb4ee1f94d4a051 Mon Sep 17 00:00:00 2001 From: benne238 Date: Sun, 4 Apr 2021 22:28:58 -0400 Subject: [PATCH 13/43] changed relative import statements for the ENCQueue subpackage --- webqueue2_api/ECNQueue/Item.py | 6 +++--- webqueue2_api/ECNQueue/Queue.py | 6 +++--- webqueue2_api/ECNQueue/utils.py | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/webqueue2_api/ECNQueue/Item.py b/webqueue2_api/ECNQueue/Item.py index 0730659..010f55a 100644 --- a/webqueue2_api/ECNQueue/Item.py +++ b/webqueue2_api/ECNQueue/Item.py @@ -1,5 +1,5 @@ -from webqueue2_api.ECNQueue.__init__ import queue_directory, queues_to_ignore -from webqueue2_api.ECNQueue.parser import parser +from .__init__ import queue_directory, queues_to_ignore +from .parser import parser import os queueDirectory = queue_directory @@ -60,7 +60,7 @@ def __init__(self, queue: str, number: int, headersOnly: bool = False) -> None: for attribute in self.__dir__(): if "_" not in attribute and attribute != "toJson" and attribute != "jsonData": self.jsonData[attribute] = self.__getattribute__(attribute) - + def toJson(self) -> dict: """Returns a JSON safe representation of the item. Returns: diff --git a/webqueue2_api/ECNQueue/Queue.py b/webqueue2_api/ECNQueue/Queue.py index 36decb9..4b4689e 100644 --- a/webqueue2_api/ECNQueue/Queue.py +++ b/webqueue2_api/ECNQueue/Queue.py @@ -1,6 +1,6 @@ -from webqueue2_api.ECNQueue import utils, Item -from webqueue2_api.ECNQueue.parser import parser -from webqueue2_api.ECNQueue.__init__ import queue_directory +from . import utils, Item +from .parser import parser +from .__init__ import queue_directory from os import listdir, path queueDirectory = queue_directory diff --git a/webqueue2_api/ECNQueue/utils.py b/webqueue2_api/ECNQueue/utils.py index fabb85c..c427d0c 100644 --- a/webqueue2_api/ECNQueue/utils.py +++ b/webqueue2_api/ECNQueue/utils.py @@ -1,7 +1,7 @@ from os import path, listdir from re import compile -from webqueue2_api.ECNQueue import Queue -from webqueue2_api.ECNQueue.__init__ import queue_directory, queues_to_ignore +from . import Queue +from .__init__ import queue_directory, queues_to_ignore queueDirectory = queue_directory queuesToIgnore = queues_to_ignore From 5012826b614a8172b4c0899555e63ca2d1240d5a Mon Sep 17 00:00:00 2001 From: benne238 Date: Sun, 4 Apr 2021 22:29:18 -0400 Subject: [PATCH 14/43] added logging to the ECNQueue __init__ module --- webqueue2_api/ECNQueue/__init__.py | 35 +++++++++++++++++++++--------- 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index 767bf72..bd127b4 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -1,13 +1,28 @@ -from webqueue2_api.ECNQueue import utils -from os import environ -import dotenv +from . import utils +import configparser +from ..logger.__init__ import logger -dotenv.load_dotenv("ECNQueue.cfg") +logger.name = __name__ -queue_directory = "/home/pier/e/queue/Mail" -if environ.get("QUEUE_DIRECTORY"): - queue_directory = environ.get("QUEUE_DIRECTORY") +config = configparser.ConfigParser() +config_file = "webqueue2-api.cfg" +config.read(config_file) -queues_to_ignore = ["archives", "drafts", "inbox", "coral"] -if environ.get("QUEUES_TO_IGNORE"): - queues_to_ignore = [queue for queue in environ.get("QUEUES_TO_IGNORE").split(",") if queue in utils.getValidQueues()] \ No newline at end of file +if "ECNQueue" not in config.sections(): + logger.debug(f"ECNQueue section not included in the {config_file} configuration file") + queue_directory = "/home/pier/e/queue/Mail" + logger.debug(f"queue_directory set to {queue_directory}") + queues_to_ignore = ["archives", "drafts", "inbox", "coral"] + logger.debug(f"queues_to_ignore set to {queues_to_ignore}") + +if config.has_option('ECNQueue', 'QUEUE_DIRECTORY'): + queue_directory = config['ECNQueue']['QUEUE_DIRECTORY'] + logger.debug(f"QUEUE_DIRECTORY included in config file, setting to {queue_directory}") +else: + logger.warning(f"QUEUE_DIRECTORY var not included in config file, defaulting to {queue_directory}") + +if config.has_option('ECNQueue', 'QUEUES_TO_IGNORE'): + queues_to_ignore = config['ECNQueue']['QUEUES_TO_IGNORE'] + logger.debug(f"QUEUES_TO_IGNORE included in config file, setting to {queues_to_ignore}") +else: + logger.warning(f"QUEUES_TO_IGNORE var not included in config file, defaulting to {queues_to_ignore}") \ No newline at end of file From cb85b06986f3ce3ef9f5b5c4cb568e2d511e0963 Mon Sep 17 00:00:00 2001 From: benne238 Date: Sun, 4 Apr 2021 22:29:49 -0400 Subject: [PATCH 15/43] modified relative import statments in the parser submodule of ECNQueue --- webqueue2_api/ECNQueue/parser/__init__.py | 2 +- webqueue2_api/ECNQueue/parser/parser.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/webqueue2_api/ECNQueue/parser/__init__.py b/webqueue2_api/ECNQueue/parser/__init__.py index 5f6950d..84ce1c5 100644 --- a/webqueue2_api/ECNQueue/parser/__init__.py +++ b/webqueue2_api/ECNQueue/parser/__init__.py @@ -1 +1 @@ -from webqueue2_api.ECNQueue.parser import parser \ No newline at end of file +from . import parser \ No newline at end of file diff --git a/webqueue2_api/ECNQueue/parser/parser.py b/webqueue2_api/ECNQueue/parser/parser.py index 7fcd7ab..4f951d7 100644 --- a/webqueue2_api/ECNQueue/parser/parser.py +++ b/webqueue2_api/ECNQueue/parser/parser.py @@ -1,5 +1,5 @@ -from webqueue2_api.ECNQueue import Item, Queue, utils -from webqueue2_api.ECNQueue.__init__ import queue_directory, queues_to_ignore +from .. import Item, Queue, utils +from ..__init__ import queue_directory, queues_to_ignore import os from time import strftime, localtime from re import compile, search From a5d04e159e929cff68ec206ba34f5c1b3ce234b5 Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 12:35:07 -0400 Subject: [PATCH 16/43] removal of Path import from the logger __init__.py --- webqueue2_api/logger/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/webqueue2_api/logger/__init__.py b/webqueue2_api/logger/__init__.py index 3f2e5c6..9137297 100644 --- a/webqueue2_api/logger/__init__.py +++ b/webqueue2_api/logger/__init__.py @@ -1,6 +1,7 @@ import logging import configparser import os +from pathlib import Path logger_name = __name__ logger = logging.getLogger(logger_name) @@ -35,7 +36,7 @@ logger.debug("LOGGER_OUT_FILE doesn't point to existing directory, not setting logger file path") else: - log_file_path = config["Logger"]["LOGGER_OUT_FILE"] + log_file_path = Path(config["Logger"]["LOGGER_OUT_FILE"], __name__ + ".log") file_handler = logging.FileHandler(log_file_path) logger.debug(f"Logger") file_handler.setFormatter(log_formatter) From 228ee1e82a05c24566faa2d61d10ff4c950f8e47 Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 12:35:50 -0400 Subject: [PATCH 17/43] fixed broken file paths in parser --- webqueue2_api/ECNQueue/parser/parser.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/webqueue2_api/ECNQueue/parser/parser.py b/webqueue2_api/ECNQueue/parser/parser.py index 4f951d7..da0811a 100644 --- a/webqueue2_api/ECNQueue/parser/parser.py +++ b/webqueue2_api/ECNQueue/parser/parser.py @@ -1,4 +1,4 @@ -from .. import Item, Queue, utils +from webqueue2_api.ECNQueue import Item, Queue, utils from ..__init__ import queue_directory, queues_to_ignore import os from time import strftime, localtime @@ -17,8 +17,8 @@ def getItems(name, headersOnly) -> list: """ items = [] - for item in os.listdir(queueDirectory): - itemPath = queueDirectory + "/" + item + for item in os.listdir(queueDirectory + "/" + name + "/"): + itemPath = queueDirectory + "/" + name + "/" + item isFile = True if os.path.isfile(itemPath) else False @@ -117,10 +117,13 @@ def parseHeaders(rawItem) -> list: # TODO: Implement attachment parsing -def parseSections(headers, rawItem) -> list: +def parseSections(headers, rawItem, filePath) -> list: # List of all item events sections = [] + global file_path + file_path = filePath + contentStart = getHeaderBoundary(rawItem) + 1 contentEnd = len(rawItem) - 1 @@ -904,7 +907,7 @@ def errorParsing(line: str, lineNum: int, expectedSyntax: str) -> dict: str(datetime.datetime.now())) # Item filepath - errorDictionary["file_path"] = os.path + errorDictionary["file_path"] = file_path # Expected value errorDictionary["expected"] = expectedSyntax From db88fd40b1809a3dcc9c1975541d0cb93f546c3e Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 12:37:33 -0400 Subject: [PATCH 18/43] modified parsing argument in Item --- webqueue2_api/ECNQueue/Item.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/webqueue2_api/ECNQueue/Item.py b/webqueue2_api/ECNQueue/Item.py index 010f55a..4d9af93 100644 --- a/webqueue2_api/ECNQueue/Item.py +++ b/webqueue2_api/ECNQueue/Item.py @@ -43,7 +43,7 @@ def __init__(self, queue: str, number: int, headersOnly: bool = False) -> None: self.lastUpdated = parser.getLastUpdated(self.__path) self.__rawItem = parser.getRawItem(self.__path) self.headers = parser.parseHeaders(self.__rawItem) - if not headersOnly: self.content = parser.parseSections(self.headers, self.__rawItem) + if not headersOnly: self.content = parser.parseSections(self.headers, self.__rawItem, self.__path) self.isLocked = parser.isLocked(self.__path, self.queue, self.number) self.userEmail = parser.parseFromData(self.headers, data="userEmail") self.userName = parser.parseFromData(self.headers, data="userName") @@ -56,7 +56,7 @@ def __init__(self, queue: str, number: int, headersOnly: bool = False) -> None: self.building = parser.getMostRecentHeaderByType(self.headers, "Building") self.dateReceived = parser.getFormattedDate(parser.getMostRecentHeaderByType(self.headers, "Date")) self.jsonData = {} - + for attribute in self.__dir__(): if "_" not in attribute and attribute != "toJson" and attribute != "jsonData": self.jsonData[attribute] = self.__getattribute__(attribute) From 60cced2fd5abfb75ce056208f03c03ad515b9e5e Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 22:23:01 -0400 Subject: [PATCH 19/43] fixed init import statement in parser --- webqueue2_api/ECNQueue/parser/parser.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/webqueue2_api/ECNQueue/parser/parser.py b/webqueue2_api/ECNQueue/parser/parser.py index da0811a..e0be3c7 100644 --- a/webqueue2_api/ECNQueue/parser/parser.py +++ b/webqueue2_api/ECNQueue/parser/parser.py @@ -1,5 +1,4 @@ -from webqueue2_api.ECNQueue import Item, Queue, utils -from ..__init__ import queue_directory, queues_to_ignore +from ...ECNQueue import Item, utils, queue_directory, queues_to_ignore import os from time import strftime, localtime from re import compile, search From 05d46a2986ccb2981985663c68dd23662360a17c Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 22:24:59 -0400 Subject: [PATCH 20/43] fixed __init__ import in utils within ECNQueue --- webqueue2_api/ECNQueue/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webqueue2_api/ECNQueue/utils.py b/webqueue2_api/ECNQueue/utils.py index c427d0c..a5eefc9 100644 --- a/webqueue2_api/ECNQueue/utils.py +++ b/webqueue2_api/ECNQueue/utils.py @@ -1,7 +1,7 @@ from os import path, listdir from re import compile from . import Queue -from .__init__ import queue_directory, queues_to_ignore +from . import queue_directory, queues_to_ignore queueDirectory = queue_directory queuesToIgnore = queues_to_ignore From 88841306e602d9f91dd6f42575b22cbb639e67fa Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 22:25:16 -0400 Subject: [PATCH 21/43] fixed __init__ import in Queue within ECNQueue --- webqueue2_api/ECNQueue/Queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webqueue2_api/ECNQueue/Queue.py b/webqueue2_api/ECNQueue/Queue.py index 4b4689e..63ce0a3 100644 --- a/webqueue2_api/ECNQueue/Queue.py +++ b/webqueue2_api/ECNQueue/Queue.py @@ -1,6 +1,6 @@ from . import utils, Item from .parser import parser -from .__init__ import queue_directory +from . import queue_directory from os import listdir, path queueDirectory = queue_directory From 53491184820bbec41f5a0716dff071227c383f72 Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 22:25:29 -0400 Subject: [PATCH 22/43] fixed __init__ import in item within ECNQueue --- webqueue2_api/ECNQueue/Item.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webqueue2_api/ECNQueue/Item.py b/webqueue2_api/ECNQueue/Item.py index 4d9af93..87115cf 100644 --- a/webqueue2_api/ECNQueue/Item.py +++ b/webqueue2_api/ECNQueue/Item.py @@ -1,4 +1,4 @@ -from .__init__ import queue_directory, queues_to_ignore +from . import queue_directory, queues_to_ignore from .parser import parser import os queueDirectory = queue_directory From c0fa057526c995814cec711d86beba3626ea71b6 Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 22:26:02 -0400 Subject: [PATCH 23/43] fixed __init__ import in ECNQueue within ECNQueue --- webqueue2_api/ECNQueue/__init__.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index bd127b4..2a9b46e 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -1,6 +1,8 @@ -from . import utils import configparser -from ..logger.__init__ import logger +from ..logger import logger + +queue_directory = "/home/pier/e/queue/Mail" +queues_to_ignore = [] logger.name = __name__ @@ -25,4 +27,4 @@ queues_to_ignore = config['ECNQueue']['QUEUES_TO_IGNORE'] logger.debug(f"QUEUES_TO_IGNORE included in config file, setting to {queues_to_ignore}") else: - logger.warning(f"QUEUES_TO_IGNORE var not included in config file, defaulting to {queues_to_ignore}") \ No newline at end of file + logger.warning(f"QUEUES_TO_IGNORE var not included in config file, defaulting to {queues_to_ignore}") \ No newline at end of file From fae1303595dbaa0f68c487f5e4de793b917b0266 Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 22:28:41 -0400 Subject: [PATCH 24/43] breakup of the api script in the backend into different modules and subpackages --- webqueue2_api/__init__.py | 2 +- webqueue2_api/api.py | 250 ------------------ webqueue2_api/api/__init__.py | 60 ++++- webqueue2_api/api/__main__.py | 4 + webqueue2_api/api/auth.py | 58 ++++ webqueue2_api/api/resources/__init__.py | 2 +- webqueue2_api/api/resources/item.py | 37 +++ webqueue2_api/api/resources/login.py | 46 ++++ webqueue2_api/api/resources/queue.py | 29 ++ webqueue2_api/api/resources/queue_list.py | 25 ++ .../api/resources/refresh_access_token.py | 9 + 11 files changed, 269 insertions(+), 253 deletions(-) delete mode 100644 webqueue2_api/api.py create mode 100644 webqueue2_api/api/auth.py create mode 100644 webqueue2_api/api/resources/item.py create mode 100644 webqueue2_api/api/resources/login.py create mode 100644 webqueue2_api/api/resources/queue.py create mode 100644 webqueue2_api/api/resources/queue_list.py create mode 100644 webqueue2_api/api/resources/refresh_access_token.py diff --git a/webqueue2_api/__init__.py b/webqueue2_api/__init__.py index f9edf5f..49daa1d 100644 --- a/webqueue2_api/__init__.py +++ b/webqueue2_api/__init__.py @@ -1 +1 @@ -from webqueue2_api import api, ECNQueue \ No newline at end of file +from . import api, ECNQueue, logger \ No newline at end of file diff --git a/webqueue2_api/api.py b/webqueue2_api/api.py deleted file mode 100644 index 9682366..0000000 --- a/webqueue2_api/api.py +++ /dev/null @@ -1,250 +0,0 @@ -from flask import Flask, request, after_this_request -from flask_restful import Api, Resource -from flask_jwt_extended import ( - JWTManager, create_access_token, create_refresh_token, - jwt_required, get_jwt_identity, jwt_refresh_token_required, - set_refresh_cookies, unset_refresh_cookies -) -import os, dotenv -from easyad import EasyAD -from ldap.filter import escape_filter_chars -# pylint says this is an error but it works so ¯\_(ツ)_/¯ -from ldap import INVALID_CREDENTIALS as LDAP_INVALID_CREDENTIALS -from webqueue2_api import ECNQueue - -# Load envrionment variables for ./.env -dotenv.load_dotenv() - -# Create Flask App -app = Flask(__name__) - -# Create API Interface -api = Api(app) - - -################################################################################ -# Configure Flask-JWT-Extended -################################################################################ - -# Set JWT secret key and create JWT manager -app.config["JWT_SECRET_KEY"] = os.environ.get("JWT_SECRET_KEY") -# Set identity claim field key to sub for JWT RFC complience -# Flask-JWT-Extended uses 'identity' by default for compatibility reasons -app.config["JWT_IDENTITY_CLAIM"] = "sub" -# Set the key for error messages generated by Flask-JWT-Extended -app.config["JWT_ERROR_MESSAGE_KEY"] = "message" - -# Look for JWTs in headers (for access) then cookies (for refresh) -app.config["JWT_TOKEN_LOCATION"] = ["headers", "cookies"] -# Restrict cookies to HTTPS in prod, allow HTTP in dev -app.config["JWT_COOKIE_SECURE"] = False if os.environ.get("ENVIRONMENT") == "dev" else True -# Restrict cookies using SameSite=strict flag -app.config["JWT_COOKIE_SAMESITE"] = "strict" -# Set the cookie key for CRSF validation string -# This is the default value. Adding it for easy reference -app.config["JWT_REFRESH_CSRF_HEADER_NAME"] = "X-CSRF-TOKEN" - -tokenManager = JWTManager(app) - - - -def user_is_valid(username: str, password: str) -> bool: - """Checks if user is valid and in webqueue2 login group. - - Args: - username (str): Career account username. - password (str): Career account passphrase. - - Returns: - bool: True if user is valid, otherwise False. - """ - - # Check for empty arguments - if (username == "" or password == ""): - return False - - # Check for adm account - if username.endswith("adm"): - return False; - - # Initialize EasyAD - config = { - "AD_SERVER": "boilerad.purdue.edu", - "AD_DOMAIN": "boilerad.purdue.edu" - } - ad = EasyAD(config) - - # Prepare search critiera for Active Directory - credentials = { - "username": escape_filter_chars(username), - "password": password - } - attributes = [ 'cn', "memberOf" ] - filter_string = f'(&(objectClass=user)(|(sAMAccountName={username})))' - - # Do user search - try: - user = ad.search(credentials=credentials, attributes=attributes, filter_string=filter_string)[0] - except LDAP_INVALID_CREDENTIALS: - return False - - # Isolate group names - # Example: - # 'CN=00000227-ECNStuds,OU=BoilerADGroups,DC=BoilerAD,DC=Purdue,DC=edu' becomes - # `00000227-ECNStuds` - user_groups = [ group.split(',')[0].split('=')[1] for group in user["memberOf"] ] - - # Check group membership - webqueue_login_group = "00000227-ECN-webqueue" - if webqueue_login_group not in user_groups: - return False - - return True - - - -class Login(Resource): - def post(self) -> tuple: - """Validates username/password and returns both access and refresh tokens. - - Return Codes: - 200 (OK): On success. - 401 (Unauthroized): When username or password are incorrect. - 422 (Unprocessable Entitiy): When the username or password can't be parsed. - - Example: - curl -X POST - -H "Content-Type: application/json" - -d '{"username": "bob", "password": "super_secret"}' - - { "access_token": fjr09hfp09h932jp9ruj3.3r8ihf8h0w8hr08ifhj804h8i.8h48ith08ity409hip0t4 } - - Returns: - tuple: Response containing tokens and HTTP response code. - """ - if not request.is_json: - return ({ "message": "JSON missing from request body"}, 422) - - data = request.json - - fields_to_check = ["username", "password"] - for field in fields_to_check: - if field not in data.keys(): - return ({ "message": f"{field} missing from request body"}, 422) - - if not user_is_valid(data["username"], data["password"]): - return ({ "message": "Username or password is invalid"}, 401) - - access_token = create_access_token(data["username"]) - refresh_token = create_refresh_token(data["username"]) - - # This decorator is needed because Flask-RESTful's 'resourceful routing` - # doesn't allow for direct modification to the Flask response object. - # See: https://flask-restful.readthedocs.io/en/latest/quickstart.html#resourceful-routing - @after_this_request - def _does_this_work(response): - set_refresh_cookies(response, refresh_token) - return response - - return ({ "access_token": access_token }, 200) - -class RefreshAccessToken(Resource): - @jwt_refresh_token_required - def post(self): - username = get_jwt_identity() - access_token = create_access_token(username) - return ({"access_token": access_token}, 200) - -class Item(Resource): - @jwt_required - def get(self, queue: str, number: int) -> tuple: - """Returns the JSON representation of the item requested. - - Return Codes: - 200 (OK): On success. - - Example: - /api/ce/100 returns: - { - "lastUpdated": "07-23-20 10:11 PM", - "headers": [...], - "content": [...], - "isLocked": "ce 100 is locked by knewell using qvi", - "userEmail": "campb303@purdue.edu", - "userName": "Justin Campbell", - "userAlias": "campb303", - "assignedTo": "campb303", - "subject": "Beepboop", - "status": "Dont Delete", - "priority": "", - "deparment": "", - "building": "", - "dateReceived": "Tue, 23 Jun 2020 13:25:51 -0400" - } - - Args: - queue (str): The queue of the item requested. - item (int): The number of the item requested. - - Returns: - tuple: Item as JSON and HTTP response code. - """ - return (ECNQueue.Item.Item(queue, number).toJson(), 200) - -class Queue(Resource): - @jwt_required - def get(self, queues: str) -> tuple: - """Returns the JSON representation of the queue requested. - - Return Codes: - 200 (OK): On success. - - Args: - queues (str): Plus (+) deliminited list of queues. - - Returns: - tuple: Queues as JSON and HTTP response code. - """ - queues_requested = queues.split("+") - - queue_list = [] - for queue in queues_requested: - queue_list.append(ECNQueue.Queue.Queue(queue).toJson()) - - return (queue_list, 200) - -class QueueList(Resource): - @jwt_required - def get(self) -> tuple: - """Returns a list of dictionaries with the number of items in each queue. - - Return Codes: - 200 (OK): On success. - - Example: - [ - { - name: "me", - number_of_items: 42 - }, - { - name: "bidc", - number_of_items: 3 - } - ] - - Returns: - tuple: Queues and item counts as JSON and HTTP response code. - """ - return (ECNQueue.utils.getQueueCounts(), 200) - - - -api.add_resource(Login, "/login") -api.add_resource(RefreshAccessToken, "/tokens/refresh") -api.add_resource(Item, "/api//") -api.add_resource(Queue, "/api/") -api.add_resource(QueueList, "/api/get_queues") - -if __name__ == "__main__": - app.run() diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py index e16230a..c7b60cb 100644 --- a/webqueue2_api/api/__init__.py +++ b/webqueue2_api/api/__init__.py @@ -1 +1,59 @@ -from webqueue2_api.api import auth \ No newline at end of file +from . import resources +from ..logger import logger +import os +from flask import Flask, request, after_this_request +from flask_restful import Api +from flask_jwt_extended import JWTManager +import configparser +from pathlib import Path + +config = configparser.ConfigParser() +config_file = os.path.realpath("webqueue2-api.cfg") +config.read(config_file) + +logger.name = __name__ + +# Create Flask App +app = Flask(__name__) + +# Create API Interface +api = Api(app) + +# Set JWT secret key and create JWT manager +app.config["JWT_SECRET_KEY"] = os.urandom(16) +# Set identity claim field key to sub for JWT RFC complience +# Flask-JWT-Extended uses 'identity' by default for compatibility reasons +app.config["JWT_IDENTITY_CLAIM"] = "sub" +# Set the key for error messages generated by Flask-JWT-Extended +app.config["JWT_ERROR_MESSAGE_KEY"] = "message" + +# Look for JWTs in headers (for access) then cookies (for refresh) +app.config["JWT_TOKEN_LOCATION"] = ["headers", "cookies"] +# Restrict cookies to HTTPS in prod, allow HTTP in dev +app.config["JWT_COOKIE_SECURE"] = True +# Restrict cookies using SameSite=strict flag +app.config["JWT_COOKIE_SAMESITE"] = "strict" +# Set the cookie key for CRSF validation string +# This is the default value. Adding it for easy reference +app.config["JWT_REFRESH_CSRF_HEADER_NAME"] = "X-CSRF-TOKEN" + +tokenManager = JWTManager(app) + +if config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "ENVIRONMENT"): + app.config["JWT_COOKIE_SECURE"] = False if config["webqueue2_api"]["ENVIRONMENT"] == "dev" else True + +if config.has_section("webqueue2_api") and config["webqueue2_api"]["JWT_SECRET_KEY"] == "": + logger.warning(f"JWT_SECRET_KEY wasn't defined, defaulting to 16 character random alpha numeric string") + +elif config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "JWT_SECRET_KEY") and len(config["webqueue2_api"]["JWT_SECRET_KEY"]) < 16: + logger.warning(f"The JWT_SECRET_KEY is less than 16 characters.") + +elif config.has_option("webqueue2_api", "JWT_SECRET_KEY") : + logger.debug(f"JWT_SECRET_KEY was defined, setting to {config['webqueue2_api']['JWT_SECRET_KEY']}") + app.config["JWT_SECRET_KEY"] = config["webqueue2_api"]["JWT_SECRET_KEY"] + +api.add_resource(resources.login.Login, "/api/login") +api.add_resource(resources.refresh_access_token.RefreshAccessToken, "/api/tokens/refresh") +api.add_resource(resources.item.Item, "/api/data//") +api.add_resource(resources.queue.Queue, "/api/data/") +api.add_resource(resources.queue_list.QueueList, "/api/data/get_queues") \ No newline at end of file diff --git a/webqueue2_api/api/__main__.py b/webqueue2_api/api/__main__.py index e69de29..26db45c 100644 --- a/webqueue2_api/api/__main__.py +++ b/webqueue2_api/api/__main__.py @@ -0,0 +1,4 @@ +from webqueue2_api.api import app + +app.run() +print() \ No newline at end of file diff --git a/webqueue2_api/api/auth.py b/webqueue2_api/api/auth.py new file mode 100644 index 0000000..3b0d082 --- /dev/null +++ b/webqueue2_api/api/auth.py @@ -0,0 +1,58 @@ +from easyad import EasyAD +from ldap.filter import escape_filter_chars +# pylint says this is an error but it works so ¯\_(ツ)_/¯ +from ldap import INVALID_CREDENTIALS as LDAP_INVALID_CREDENTIALS + +def user_is_valid(username: str, password: str) -> bool: + """Checks if user is valid and in webqueue2 login group. + + Args: + username (str): Career account username. + password (str): Career account passphrase. + + Returns: + bool: True if user is valid, otherwise False. + """ + + # Check for empty arguments + if (username == "" or password == ""): + return False + + # Check for adm account + if username.endswith("adm"): + return False + + # Initialize EasyAD + config = { + "AD_SERVER": "boilerad.purdue.edu", + "AD_DOMAIN": "boilerad.purdue.edu" + } + ad = EasyAD(config) + + # Prepare search critiera for Active Directory + credentials = { + "username": escape_filter_chars(username), + "password": password + } + attributes = [ 'cn', "memberOf" ] + filter_string = f'(&(objectClass=user)(|(sAMAccountName={username})))' + + # Do user search + try: + user = ad.search(credentials=credentials, attributes=attributes, filter_string=filter_string)[0] + except LDAP_INVALID_CREDENTIALS: + return False + + # Isolate group names + # Example: + # 'CN=00000227-ECNStuds,OU=BoilerADGroups,DC=BoilerAD,DC=Purdue,DC=edu' becomes + # `00000227-ECNStuds` + user_groups = [ group.split(',')[0].split('=')[1] for group in user["memberOf"] ] + + # Check group membership + webqueue_login_group = "00000227-ECN-webqueue" + if webqueue_login_group not in user_groups: + return False + + return True + diff --git a/webqueue2_api/api/resources/__init__.py b/webqueue2_api/api/resources/__init__.py index 06b5a8e..b19862f 100644 --- a/webqueue2_api/api/resources/__init__.py +++ b/webqueue2_api/api/resources/__init__.py @@ -1 +1 @@ -from webqueue2_api.api.resources import item, login, queue, queue_list, queue, refresh_access_token \ No newline at end of file +from . import item, login, queue, queue_list, queue, refresh_access_token \ No newline at end of file diff --git a/webqueue2_api/api/resources/item.py b/webqueue2_api/api/resources/item.py new file mode 100644 index 0000000..4ece5cd --- /dev/null +++ b/webqueue2_api/api/resources/item.py @@ -0,0 +1,37 @@ +from flask import request +from flask_restful import Resource +from flask_jwt_extended import jwt_required +from ...ECNQueue.Item import Item as Items + +class Item(Resource): + @jwt_required + def get(self, queue: str, number: int) -> tuple: + """Returns the JSON representation of the item requested. + Return Codes: + 200 (OK): On success. + Example: + { + "lastUpdated": "07-23-20 10:11 PM", + "headers": [...], + "content": [...], + "isLocked": "ce 100 is locked by knewell using qvi", + "userEmail": "campb303@purdue.edu", + "userName": "Justin Campbell", + "userAlias": "campb303", + "assignedTo": "campb303", + "subject": "Beepboop", + "status": "Dont Delete", + "priority": "", + "deparment": "", + "building": "", + "dateReceived": "Tue, 23 Jun 2020 13:25:51 -0400" + } + Args: + queue (str): The queue of the item requested. + item (int): The number of the item requested. + Returns: + tuple: Item as JSON and HTTP response code. + """ + + headersOnly = True if request.args.get("headersOnly") == "True" else False + return Items(queue, number, headersOnly=headersOnly).toJson() \ No newline at end of file diff --git a/webqueue2_api/api/resources/login.py b/webqueue2_api/api/resources/login.py new file mode 100644 index 0000000..ea3d2e8 --- /dev/null +++ b/webqueue2_api/api/resources/login.py @@ -0,0 +1,46 @@ +from flask_restful import Resource +from flask import Flask, request, after_this_request +from ..auth import user_is_valid +from flask_jwt_extended import create_access_token, create_refresh_token, set_refresh_cookies + + +class Login(Resource): + def post(self) -> tuple: + """Validates username/password and returns both access and refresh tokens. + Return Codes: + 200 (OK): On success. + 401 (Unauthroized): When username or password are incorrect. + 422 (Unprocessable Entitiy): When the username or password can't be parsed. + Example: + curl -X POST + -H "Content-Type: application/json" + -d '{"username": "bob", "password": "super_secret"}' + { "access_token": fjr09hfp09h932jp9ruj3.3r8ihf8h0w8hr08ifhj804h8i.8h48ith08ity409hip0t4 } + Returns: + tuple: Response containing tokens and HTTP response code. + """ + if not request.is_json: + return ({ "message": "JSON missing from request body"}, 422) + + data = request.json + + fields_to_check = ["username", "password"] + for field in fields_to_check: + if field not in data.keys(): + return ({ "message": f"{field} missing from request body"}, 422) + + if not user_is_valid(data["username"], data["password"]): + return ({ "message": "Username or password is invalid"}, 401) + + access_token = create_access_token(data["username"]) + refresh_token = create_refresh_token(data["username"]) + + # This decorator is needed because Flask-RESTful's 'resourceful routing` + # doesn't allow for direct modification to the Flask response object. + # See: https://flask-restful.readthedocs.io/en/latest/quickstart.html#resourceful-routing + @after_this_request + def _does_this_work(response): + set_refresh_cookies(response, refresh_token) + return response + + return ({ "access_token": access_token }, 200) diff --git a/webqueue2_api/api/resources/queue.py b/webqueue2_api/api/resources/queue.py new file mode 100644 index 0000000..74d7546 --- /dev/null +++ b/webqueue2_api/api/resources/queue.py @@ -0,0 +1,29 @@ +from flask import request +from flask_restful import Resource +from flask_jwt_extended import jwt_required +import webqueue2_api.ECNQueue.Queue as Queues + +class Queue(Resource): + @jwt_required + def get(self, queues: str) -> tuple: + """Returns the JSON representation of the queue requested. + Example: + { + "name": ce, + "items": [...] + } + + Return Codes: + 200 (OK): On success. + Args: + queues (str): Plus (+) deliminited list of queues. + Returns: + tuple: Queues as JSON and HTTP response code. + """ + headersOnly = False if request.args.get("headersOnly") == "False" else True + queues_requested = queues.split("+") + + queue_list = [] + for queue in queues_requested: + queue_list.append(Queues.Queue(queue, headersOnly=headersOnly).toJson()) + return (queue_list, 200) \ No newline at end of file diff --git a/webqueue2_api/api/resources/queue_list.py b/webqueue2_api/api/resources/queue_list.py new file mode 100644 index 0000000..643243c --- /dev/null +++ b/webqueue2_api/api/resources/queue_list.py @@ -0,0 +1,25 @@ +from flask_restful import Resource +from flask_jwt_extended import jwt_required +from ...ECNQueue import utils + +class QueueList(Resource): + @jwt_required + def get(self) -> tuple: + """Returns a list of dictionaries with the number of items in each queue. + Return Codes: + 200 (OK): On success. + Example: + [ + { + name: "me", + number_of_items: 42 + }, + { + name: "bidc", + number_of_items: 3 + } + ] + Returns: + tuple: Queues and item counts as JSON and HTTP response code. + """ + return (utils.getQueueCounts(), 200) diff --git a/webqueue2_api/api/resources/refresh_access_token.py b/webqueue2_api/api/resources/refresh_access_token.py new file mode 100644 index 0000000..6d695ea --- /dev/null +++ b/webqueue2_api/api/resources/refresh_access_token.py @@ -0,0 +1,9 @@ +from flask_restful import Resource +from flask_jwt_extended import get_jwt_identity, jwt_refresh_token_required, create_access_token + +class RefreshAccessToken(Resource): + @jwt_refresh_token_required + def post(self): + username = get_jwt_identity() + access_token = create_access_token(username) + return ({"access_token": access_token}, 200) \ No newline at end of file From 873d53c58d5b30f67b7b3a0aa2071d68fdb8525c Mon Sep 17 00:00:00 2001 From: benne238 Date: Mon, 12 Apr 2021 23:54:46 -0400 Subject: [PATCH 25/43] added/modified logging in api __init__ to account for bad inputs from the config file --- webqueue2_api/ECNQueue/__init__.py | 42 ++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index 2a9b46e..40d9436 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -1,8 +1,10 @@ import configparser +import os +import json from ..logger import logger queue_directory = "/home/pier/e/queue/Mail" -queues_to_ignore = [] +queues_to_ignore = ["archives", "drafts", "inbox", "coral"] logger.name = __name__ @@ -11,20 +13,32 @@ config.read(config_file) if "ECNQueue" not in config.sections(): - logger.debug(f"ECNQueue section not included in the {config_file} configuration file") - queue_directory = "/home/pier/e/queue/Mail" - logger.debug(f"queue_directory set to {queue_directory}") - queues_to_ignore = ["archives", "drafts", "inbox", "coral"] - logger.debug(f"queues_to_ignore set to {queues_to_ignore}") + logger.warning(f"ECNQueue section not included in the {config_file} configuration file") + logger.warning(f"queue_directory set to {queue_directory}") + logger.warning(f"queues_to_ignore set to {queues_to_ignore}") if config.has_option('ECNQueue', 'QUEUE_DIRECTORY'): - queue_directory = config['ECNQueue']['QUEUE_DIRECTORY'] - logger.debug(f"QUEUE_DIRECTORY included in config file, setting to {queue_directory}") -else: - logger.warning(f"QUEUE_DIRECTORY var not included in config file, defaulting to {queue_directory}") + newQueueDir = config['ECNQueue']['QUEUE_DIRECTORY'] + logger.debug(f"QUEUE_DIRECTORY variable included under ECNQueue section in config file") + if os.path.exists(newQueueDir): + queue_directory = newQueueDir + logger.debug(f"QUEUE_DIRECTORY in config file valid, setting Queue Directory to {queue_directory}") + else: + logger.warning(f"QUEUE_DIRECTORY in config file invalid directory location, defaulting to {queue_directory}") + +elif "ECNQueue" in config.sections(): + logger.warning(f"QUEUE_DIRECTORY var not included under ECNQueue section in config file, defaulting to {queue_directory}") if config.has_option('ECNQueue', 'QUEUES_TO_IGNORE'): - queues_to_ignore = config['ECNQueue']['QUEUES_TO_IGNORE'] - logger.debug(f"QUEUES_TO_IGNORE included in config file, setting to {queues_to_ignore}") -else: - logger.warning(f"QUEUES_TO_IGNORE var not included in config file, defaulting to {queues_to_ignore}") \ No newline at end of file + newQueuesIgnore = config['ECNQueue']['QUEUES_TO_IGNORE'] + logger.debug(f"QUEUES_TO_IGNORE variable included under ECNQueue section in config file") + try: + queues_to_ignore = json.loads(newQueuesIgnore) + logger.debug(f"QUEUES_TO_IGNORE in config file is a valid list, setting to {queues_to_ignore}") + except: + logger.warning(f"QUEUES_TO_IGNORE in config file not a valid list, defaulting to {queues_to_ignore}") + + #queues_to_ignore = config['ECNQueue']['QUEUES_TO_IGNORE'] + +elif "ECNQueue" in config.sections(): + logger.warning(f"QUEUES_TO_IGNORE var not included under ECNQueue section in config file, defaulting to {queues_to_ignore}") From 71512f9888eb275118bf5cc9fb8fc223b256d1a1 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 00:17:09 -0400 Subject: [PATCH 26/43] modified logging statements in api, ECNQueue, and logger __init__ files --- webqueue2_api/ECNQueue/__init__.py | 18 ++++++++---------- webqueue2_api/api/__init__.py | 5 +++-- webqueue2_api/logger/__init__.py | 1 + 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index 40d9436..9d54f6f 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -3,28 +3,28 @@ import json from ..logger import logger +logger.name = __name__ + queue_directory = "/home/pier/e/queue/Mail" queues_to_ignore = ["archives", "drafts", "inbox", "coral"] -logger.name = __name__ - config = configparser.ConfigParser() config_file = "webqueue2-api.cfg" config.read(config_file) if "ECNQueue" not in config.sections(): - logger.warning(f"ECNQueue section not included in the {config_file} configuration file") - logger.warning(f"queue_directory set to {queue_directory}") - logger.warning(f"queues_to_ignore set to {queues_to_ignore}") + logger.warning(f"ECNQueue section not included in the '{config_file}' configuration file") + logger.warning(f"queue_directory set to '{queue_directory}'") + logger.warning(f"queues_to_ignore set to '{queues_to_ignore}'") if config.has_option('ECNQueue', 'QUEUE_DIRECTORY'): newQueueDir = config['ECNQueue']['QUEUE_DIRECTORY'] logger.debug(f"QUEUE_DIRECTORY variable included under ECNQueue section in config file") if os.path.exists(newQueueDir): queue_directory = newQueueDir - logger.debug(f"QUEUE_DIRECTORY in config file valid, setting Queue Directory to {queue_directory}") + logger.debug(f"QUEUE_DIRECTORY in config file valid, setting Queue Directory to '{queue_directory}'") else: - logger.warning(f"QUEUE_DIRECTORY in config file invalid directory location, defaulting to {queue_directory}") + logger.warning(f"QUEUE_DIRECTORY in config file invalid directory location, defaulting to '{queue_directory}'") elif "ECNQueue" in config.sections(): logger.warning(f"QUEUE_DIRECTORY var not included under ECNQueue section in config file, defaulting to {queue_directory}") @@ -37,8 +37,6 @@ logger.debug(f"QUEUES_TO_IGNORE in config file is a valid list, setting to {queues_to_ignore}") except: logger.warning(f"QUEUES_TO_IGNORE in config file not a valid list, defaulting to {queues_to_ignore}") - - #queues_to_ignore = config['ECNQueue']['QUEUES_TO_IGNORE'] elif "ECNQueue" in config.sections(): - logger.warning(f"QUEUES_TO_IGNORE var not included under ECNQueue section in config file, defaulting to {queues_to_ignore}") + logger.warning(f"QUEUES_TO_IGNORE var not included under ECNQueue section in config file, defaulting to '{queues_to_ignore}'") diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py index c7b60cb..89b78db 100644 --- a/webqueue2_api/api/__init__.py +++ b/webqueue2_api/api/__init__.py @@ -41,15 +41,16 @@ if config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "ENVIRONMENT"): app.config["JWT_COOKIE_SECURE"] = False if config["webqueue2_api"]["ENVIRONMENT"] == "dev" else True + logger.debug(f"JWT_COOKIE_SECURE set to {app.config['JWT_COOKIE_SECURE']}") if config.has_section("webqueue2_api") and config["webqueue2_api"]["JWT_SECRET_KEY"] == "": logger.warning(f"JWT_SECRET_KEY wasn't defined, defaulting to 16 character random alpha numeric string") elif config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "JWT_SECRET_KEY") and len(config["webqueue2_api"]["JWT_SECRET_KEY"]) < 16: - logger.warning(f"The JWT_SECRET_KEY is less than 16 characters.") + logger.warning(f"The JWT_SECRET_KEY is less than 16 characters, this is not recomended") elif config.has_option("webqueue2_api", "JWT_SECRET_KEY") : - logger.debug(f"JWT_SECRET_KEY was defined, setting to {config['webqueue2_api']['JWT_SECRET_KEY']}") + logger.debug(f"JWT_SECRET_KEY was defined, setting to specified value") app.config["JWT_SECRET_KEY"] = config["webqueue2_api"]["JWT_SECRET_KEY"] api.add_resource(resources.login.Login, "/api/login") diff --git a/webqueue2_api/logger/__init__.py b/webqueue2_api/logger/__init__.py index 9137297..d5b4e94 100644 --- a/webqueue2_api/logger/__init__.py +++ b/webqueue2_api/logger/__init__.py @@ -41,5 +41,6 @@ logger.debug(f"Logger") file_handler.setFormatter(log_formatter) logger.addHandler(file_handler) + logger.debug(f"LOGGER_OUT_FILE included under Logger section in config file is valid, setting to '{log_file_path}'") else: logger.debug("LOGGER_OUT_FILE not defined in logger section, not setting logger path") \ No newline at end of file From 2ad404f5ee417be51da3dcf1ea0c523e728734a3 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 16:36:19 -0400 Subject: [PATCH 27/43] added logging to __main__ script in api --- webqueue2_api/api/__main__.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/webqueue2_api/api/__main__.py b/webqueue2_api/api/__main__.py index 26db45c..be9b23c 100644 --- a/webqueue2_api/api/__main__.py +++ b/webqueue2_api/api/__main__.py @@ -1,4 +1,12 @@ from webqueue2_api.api import app +from ..logger import logger -app.run() -print() \ No newline at end of file +logger.name = __name__ + +logger.debug('Starting api') + +try: + app.run() + logger.debug(f"Stopping api") +except Exception as e: + logger.error(f"Failed to start the api: {e}") From cd8e0ba55fa59999352c60d8115cbdd91b7b5361 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 17:17:29 -0400 Subject: [PATCH 28/43] added logging for the item resource in the api resource subpackage --- webqueue2_api/api/resources/item.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/webqueue2_api/api/resources/item.py b/webqueue2_api/api/resources/item.py index 4ece5cd..4c2b42f 100644 --- a/webqueue2_api/api/resources/item.py +++ b/webqueue2_api/api/resources/item.py @@ -2,6 +2,7 @@ from flask_restful import Resource from flask_jwt_extended import jwt_required from ...ECNQueue.Item import Item as Items +from ...logger import logger class Item(Resource): @jwt_required @@ -32,6 +33,16 @@ def get(self, queue: str, number: int) -> tuple: Returns: tuple: Item as JSON and HTTP response code. """ - + logger.name = __name__ headersOnly = True if request.args.get("headersOnly") == "True" else False - return Items(queue, number, headersOnly=headersOnly).toJson() \ No newline at end of file + + logger.debug(f"Attempting to get item '{queue} {number}' with headersOnly set to {headersOnly}") + try: + fethedItem = Items(queue, number, headersOnly=headersOnly).toJson() + logger.debug(f"Succesfully got item '{queue} {number}' with headersOnly set to {headersOnly}") + return fethedItem + except FileNotFoundError as e: + logger.warning(f"Item '{queue} {number}' does not exist: {e}") + logger.warning(f"Returning '{None}'") + return None + \ No newline at end of file From eac9b105e9ae2d1b7ce08cda9c2c2be150fb19ed Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 17:24:54 -0400 Subject: [PATCH 29/43] added logging to login script in the resources subpackage of the api --- webqueue2_api/api/resources/login.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/webqueue2_api/api/resources/login.py b/webqueue2_api/api/resources/login.py index ea3d2e8..92e2b7a 100644 --- a/webqueue2_api/api/resources/login.py +++ b/webqueue2_api/api/resources/login.py @@ -2,7 +2,9 @@ from flask import Flask, request, after_this_request from ..auth import user_is_valid from flask_jwt_extended import create_access_token, create_refresh_token, set_refresh_cookies +from ...logger import logger +logger.name = __name__ class Login(Resource): def post(self) -> tuple: @@ -20,6 +22,7 @@ def post(self) -> tuple: tuple: Response containing tokens and HTTP response code. """ if not request.is_json: + logger.warning(f"JSON missing from request body") return ({ "message": "JSON missing from request body"}, 422) data = request.json @@ -27,13 +30,20 @@ def post(self) -> tuple: fields_to_check = ["username", "password"] for field in fields_to_check: if field not in data.keys(): + logger.warning(f"{field} missing from request body") return ({ "message": f"{field} missing from request body"}, 422) if not user_is_valid(data["username"], data["password"]): + logger.warning(f"Username and or password invalid") return ({ "message": "Username or password is invalid"}, 401) + logger.debug(f"Username and password valid") + access_token = create_access_token(data["username"]) + logger.debug(f"Successfully created access token") + refresh_token = create_refresh_token(data["username"]) + logger.debug(f"Successfully created refresh token") # This decorator is needed because Flask-RESTful's 'resourceful routing` # doesn't allow for direct modification to the Flask response object. @@ -43,4 +53,5 @@ def _does_this_work(response): set_refresh_cookies(response, refresh_token) return response + logger.debug(f"Returning access token") return ({ "access_token": access_token }, 200) From 64b214fd4e77e0185ca0b0ee64be0b5b65b34205 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 17:51:46 -0400 Subject: [PATCH 30/43] modified resource.login logging statements --- webqueue2_api/api/resources/login.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/webqueue2_api/api/resources/login.py b/webqueue2_api/api/resources/login.py index 92e2b7a..83f1791 100644 --- a/webqueue2_api/api/resources/login.py +++ b/webqueue2_api/api/resources/login.py @@ -22,7 +22,7 @@ def post(self) -> tuple: tuple: Response containing tokens and HTTP response code. """ if not request.is_json: - logger.warning(f"JSON missing from request body") + logger.warning(f"JSON missing from request body, returning 422 error") return ({ "message": "JSON missing from request body"}, 422) data = request.json @@ -30,11 +30,11 @@ def post(self) -> tuple: fields_to_check = ["username", "password"] for field in fields_to_check: if field not in data.keys(): - logger.warning(f"{field} missing from request body") + logger.warning(f"{field} missing from request body, returning 422 error") return ({ "message": f"{field} missing from request body"}, 422) if not user_is_valid(data["username"], data["password"]): - logger.warning(f"Username and or password invalid") + logger.warning(f"Username and or password invalid, returning 401 error") return ({ "message": "Username or password is invalid"}, 401) logger.debug(f"Username and password valid") From 2e57630ca8c305cb30498234abeaf156886b2d48 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 17:59:42 -0400 Subject: [PATCH 31/43] added logging to queue_list module in resources --- webqueue2_api/api/resources/queue_list.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/webqueue2_api/api/resources/queue_list.py b/webqueue2_api/api/resources/queue_list.py index 643243c..a7a230d 100644 --- a/webqueue2_api/api/resources/queue_list.py +++ b/webqueue2_api/api/resources/queue_list.py @@ -1,6 +1,9 @@ from flask_restful import Resource from flask_jwt_extended import jwt_required from ...ECNQueue import utils +from ...logger import logger + +logger.name = __name__ class QueueList(Resource): @jwt_required @@ -22,4 +25,14 @@ def get(self) -> tuple: Returns: tuple: Queues and item counts as JSON and HTTP response code. """ - return (utils.getQueueCounts(), 200) + try: + logger.debug("Attempting to get queue counts") + queueCount = utils.getQueueCounts() + logger.debug("Successfully got queue counts") + except Exception as e: + logger.warning(f"Unable to get queue counts: {e}") + logger.warning(f"Returning 404 error") + return(None, 404) + + logger.debug("Returning queue counts") + return (queueCount, 200) From 3aaeba492161c4dc0e4277c07b216e9e3548d796 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 18:14:06 -0400 Subject: [PATCH 32/43] modified item resource logging in the api --- webqueue2_api/api/resources/item.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/webqueue2_api/api/resources/item.py b/webqueue2_api/api/resources/item.py index 4c2b42f..fff8196 100644 --- a/webqueue2_api/api/resources/item.py +++ b/webqueue2_api/api/resources/item.py @@ -40,9 +40,10 @@ def get(self, queue: str, number: int) -> tuple: try: fethedItem = Items(queue, number, headersOnly=headersOnly).toJson() logger.debug(f"Succesfully got item '{queue} {number}' with headersOnly set to {headersOnly}") - return fethedItem except FileNotFoundError as e: logger.warning(f"Item '{queue} {number}' does not exist: {e}") logger.warning(f"Returning '{None}'") return None - \ No newline at end of file + + logger.debug(f"Returning json representation of item '{queue} {number}'") + return fethedItem \ No newline at end of file From 00d32aba15eb12a81692b66db56313b5be1c1303 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 18:33:41 -0400 Subject: [PATCH 33/43] added logging to queue resource in the api sub package --- webqueue2_api/api/resources/queue.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/webqueue2_api/api/resources/queue.py b/webqueue2_api/api/resources/queue.py index 74d7546..48d0789 100644 --- a/webqueue2_api/api/resources/queue.py +++ b/webqueue2_api/api/resources/queue.py @@ -2,6 +2,9 @@ from flask_restful import Resource from flask_jwt_extended import jwt_required import webqueue2_api.ECNQueue.Queue as Queues +from ...logger import logger + +logger.name = __name__ class Queue(Resource): @jwt_required @@ -23,7 +26,24 @@ def get(self, queues: str) -> tuple: headersOnly = False if request.args.get("headersOnly") == "False" else True queues_requested = queues.split("+") + logger.debug(f"Requested queues: {queues_requested}") + logger.debug(f"Headers only set to: {headersOnly}") + queue_list = [] + valid_queues = [] + for queue in queues_requested: - queue_list.append(Queues.Queue(queue, headersOnly=headersOnly).toJson()) + try: + queue_list.append(Queues.Queue(queue, headersOnly=headersOnly).toJson()) + valid_queues.append(queue) + except FileNotFoundError as e: + logger.warning(f"'{queue}' does not exist: {e}") + logger.warning(f"Skipping to next queue") + continue + except Exception as e: + logger.warning(f"Unable to get {queue}: {e}") + logger.warning(f"Moving to next queue") + continue + + logger.debug(f"Returning requested queues: {valid_queues} with headers only set to: {headersOnly}") return (queue_list, 200) \ No newline at end of file From ae479147a487a1379e259bef11b71dceb6cd0770 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 19:00:49 -0400 Subject: [PATCH 34/43] moved logger logger.name declaration to the inside of the class declaration in the login, queue_list and queue resources --- webqueue2_api/api/resources/login.py | 5 +++-- webqueue2_api/api/resources/queue.py | 4 ++-- webqueue2_api/api/resources/queue_list.py | 5 +++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/webqueue2_api/api/resources/login.py b/webqueue2_api/api/resources/login.py index 83f1791..951a1a5 100644 --- a/webqueue2_api/api/resources/login.py +++ b/webqueue2_api/api/resources/login.py @@ -4,8 +4,6 @@ from flask_jwt_extended import create_access_token, create_refresh_token, set_refresh_cookies from ...logger import logger -logger.name = __name__ - class Login(Resource): def post(self) -> tuple: """Validates username/password and returns both access and refresh tokens. @@ -21,6 +19,9 @@ def post(self) -> tuple: Returns: tuple: Response containing tokens and HTTP response code. """ + + logger.name = __name__ + if not request.is_json: logger.warning(f"JSON missing from request body, returning 422 error") return ({ "message": "JSON missing from request body"}, 422) diff --git a/webqueue2_api/api/resources/queue.py b/webqueue2_api/api/resources/queue.py index 48d0789..6a0be76 100644 --- a/webqueue2_api/api/resources/queue.py +++ b/webqueue2_api/api/resources/queue.py @@ -4,8 +4,6 @@ import webqueue2_api.ECNQueue.Queue as Queues from ...logger import logger -logger.name = __name__ - class Queue(Resource): @jwt_required def get(self, queues: str) -> tuple: @@ -23,6 +21,8 @@ def get(self, queues: str) -> tuple: Returns: tuple: Queues as JSON and HTTP response code. """ + logger.name = __name__ + headersOnly = False if request.args.get("headersOnly") == "False" else True queues_requested = queues.split("+") diff --git a/webqueue2_api/api/resources/queue_list.py b/webqueue2_api/api/resources/queue_list.py index a7a230d..c5354c2 100644 --- a/webqueue2_api/api/resources/queue_list.py +++ b/webqueue2_api/api/resources/queue_list.py @@ -3,8 +3,6 @@ from ...ECNQueue import utils from ...logger import logger -logger.name = __name__ - class QueueList(Resource): @jwt_required def get(self) -> tuple: @@ -25,6 +23,9 @@ def get(self) -> tuple: Returns: tuple: Queues and item counts as JSON and HTTP response code. """ + + logger.name = __name__ + try: logger.debug("Attempting to get queue counts") queueCount = utils.getQueueCounts() From cf04289c5a6c75bdd14fa4213f19227a10ff92b8 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 21:34:00 -0400 Subject: [PATCH 35/43] added logging to refresh_access_token module in resources --- webqueue2_api/api/resources/refresh_access_token.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/webqueue2_api/api/resources/refresh_access_token.py b/webqueue2_api/api/resources/refresh_access_token.py index 6d695ea..aa6aa11 100644 --- a/webqueue2_api/api/resources/refresh_access_token.py +++ b/webqueue2_api/api/resources/refresh_access_token.py @@ -1,9 +1,18 @@ from flask_restful import Resource from flask_jwt_extended import get_jwt_identity, jwt_refresh_token_required, create_access_token +from ...logger import logger class RefreshAccessToken(Resource): @jwt_refresh_token_required def post(self): + logger.name = __name__ username = get_jwt_identity() - access_token = create_access_token(username) + logger.debug("Creating refresh token") + try: + access_token = create_access_token(username) + except Exception as e: + logger.warning(f"Unable to create refresh token {e}") + return(None, 401) + + logger.debug("Returning access token") return ({"access_token": access_token}, 200) \ No newline at end of file From 9c088c1e2f1fbacee2a6d1743da86d67dd92eda7 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 21:43:59 -0400 Subject: [PATCH 36/43] edited refresh_access token log messages --- webqueue2_api/api/resources/refresh_access_token.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webqueue2_api/api/resources/refresh_access_token.py b/webqueue2_api/api/resources/refresh_access_token.py index aa6aa11..6765684 100644 --- a/webqueue2_api/api/resources/refresh_access_token.py +++ b/webqueue2_api/api/resources/refresh_access_token.py @@ -14,5 +14,5 @@ def post(self): logger.warning(f"Unable to create refresh token {e}") return(None, 401) - logger.debug("Returning access token") + logger.debug("Returning refresh token") return ({"access_token": access_token}, 200) \ No newline at end of file From 911f41f070d1b387d0bfe16cce5c1e93446742da Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 13 Apr 2021 21:56:18 -0400 Subject: [PATCH 37/43] make main script as a part of the webqueue2_api parent package instead of just the api --- webqueue2_api/{api => }/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename webqueue2_api/{api => }/__main__.py (73%) diff --git a/webqueue2_api/api/__main__.py b/webqueue2_api/__main__.py similarity index 73% rename from webqueue2_api/api/__main__.py rename to webqueue2_api/__main__.py index be9b23c..cf6ca26 100644 --- a/webqueue2_api/api/__main__.py +++ b/webqueue2_api/__main__.py @@ -1,7 +1,7 @@ from webqueue2_api.api import app -from ..logger import logger +from webqueue2_api.logger import logger -logger.name = __name__ +logger.name = "webqueue2_api" logger.debug('Starting api') From 5cb439338cd64d4ff7645de2d1691ee912337641 Mon Sep 17 00:00:00 2001 From: benne238 Date: Wed, 14 Apr 2021 15:13:31 -0400 Subject: [PATCH 38/43] implemented argparse when calling the webqueue2_api package --- webqueue2_api/ECNQueue/__init__.py | 11 +++--- webqueue2_api/__init__.py | 2 +- webqueue2_api/__main__.py | 62 ++++++++++++++++++++++++++---- webqueue2_api/api/__init__.py | 7 +++- webqueue2_api/global_configs.py | 2 + 5 files changed, 68 insertions(+), 16 deletions(-) create mode 100644 webqueue2_api/global_configs.py diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index 9d54f6f..208c7c7 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -2,6 +2,7 @@ import os import json from ..logger import logger +from .. import global_configs logger.name = __name__ @@ -9,13 +10,13 @@ queues_to_ignore = ["archives", "drafts", "inbox", "coral"] config = configparser.ConfigParser() -config_file = "webqueue2-api.cfg" -config.read(config_file) +if global_configs.CONFIG_LOCATION is not None: + config.read(global_configs.CONFIG_LOCATION) if "ECNQueue" not in config.sections(): - logger.warning(f"ECNQueue section not included in the '{config_file}' configuration file") + logger.warning(f"ECNQueue section not included in the '{global_configs.CONFIG_LOCATION}' configuration file") logger.warning(f"queue_directory set to '{queue_directory}'") - logger.warning(f"queues_to_ignore set to '{queues_to_ignore}'") + logger.warning(f"queues_to_ignore set to {queues_to_ignore}") if config.has_option('ECNQueue', 'QUEUE_DIRECTORY'): newQueueDir = config['ECNQueue']['QUEUE_DIRECTORY'] @@ -39,4 +40,4 @@ logger.warning(f"QUEUES_TO_IGNORE in config file not a valid list, defaulting to {queues_to_ignore}") elif "ECNQueue" in config.sections(): - logger.warning(f"QUEUES_TO_IGNORE var not included under ECNQueue section in config file, defaulting to '{queues_to_ignore}'") + logger.warning(f"QUEUES_TO_IGNORE var not included under ECNQueue section in config file, defaulting to {queues_to_ignore}") diff --git a/webqueue2_api/__init__.py b/webqueue2_api/__init__.py index 49daa1d..d4f283e 100644 --- a/webqueue2_api/__init__.py +++ b/webqueue2_api/__init__.py @@ -1 +1 @@ -from . import api, ECNQueue, logger \ No newline at end of file +#from . import api, ECNQueue, logger \ No newline at end of file diff --git a/webqueue2_api/__main__.py b/webqueue2_api/__main__.py index cf6ca26..af07d3f 100644 --- a/webqueue2_api/__main__.py +++ b/webqueue2_api/__main__.py @@ -1,12 +1,58 @@ -from webqueue2_api.api import app +#from webqueue2_api.api import app +#import webqueue2_api.ECNQueue from webqueue2_api.logger import logger +import argparse, os +from . import global_configs -logger.name = "webqueue2_api" +def getArguments() -> argparse.Namespace: + parser = argparse.ArgumentParser(description='Argument Parser') + apiActions = parser.add_subparsers(dest="api_action", help="api actions") + apiActions.required = True -logger.debug('Starting api') + startApi = apiActions.add_parser("start-api") + stopApi = apiActions.add_parser("stop-api") + restartApi = apiActions.add_parser("restart-api") + + startApi.add_argument( + "-c", + "--config-dir", + help="directory cotaining the webqueue2_api.cfg file, defaults to current directory", + default=os.path.realpath(os.path.dirname("webqueue2_api.cfg")) + ) + startApi.add_argument( + "-v", + "--verbose", + help="toggles verbose mode", + default=False, + action='store_true' + ) -try: - app.run() - logger.debug(f"Stopping api") -except Exception as e: - logger.error(f"Failed to start the api: {e}") + return parser.parse_args() + +def validateArguments(arguments: argparse.Namespace) -> bool: + if arguments.api_action == "start-api" and not os.path.exists(os.path.join(arguments.config_dir, "webqueue2-api.cfg")): + logger.warning(f"'{arguments.config_dir}' does not contain 'webqueue2-api.cfg' file") + logger.warning(f"Setting default values for webqueue2_api configurations") + elif arguments.api_action == "start-api": + logger.debug(f"Setting configuration file location to '{os.path.join(arguments.config_dir, 'webqueue2-api.cfg')}'") + global_configs.CONFIG_LOCATION = os.path.join(arguments.config_dir, "webqueue2-api.cfg") + + return True + +def startApi() -> None: + try: + from webqueue2_api.api import app + app.run() + logger.debug(f"Stopping api") + except Exception as e: + logger.error(f"Failed to start the api: {e}") + +def main() -> None: + logger.name = "webqueue2_api" + arguments = getArguments() + validateArguments(arguments) + logger.debug('Starting api') + startApi() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py index 89b78db..ea9b3cd 100644 --- a/webqueue2_api/api/__init__.py +++ b/webqueue2_api/api/__init__.py @@ -1,5 +1,6 @@ from . import resources from ..logger import logger +from .. import global_configs import os from flask import Flask, request, after_this_request from flask_restful import Api @@ -8,8 +9,9 @@ from pathlib import Path config = configparser.ConfigParser() -config_file = os.path.realpath("webqueue2-api.cfg") -config.read(config_file) + +if global_configs.CONFIG_LOCATION is not None: + config.read(global_configs.CONFIG_LOCATION) logger.name = __name__ @@ -53,6 +55,7 @@ logger.debug(f"JWT_SECRET_KEY was defined, setting to specified value") app.config["JWT_SECRET_KEY"] = config["webqueue2_api"]["JWT_SECRET_KEY"] + api.add_resource(resources.login.Login, "/api/login") api.add_resource(resources.refresh_access_token.RefreshAccessToken, "/api/tokens/refresh") api.add_resource(resources.item.Item, "/api/data//") diff --git a/webqueue2_api/global_configs.py b/webqueue2_api/global_configs.py new file mode 100644 index 0000000..5a4d85d --- /dev/null +++ b/webqueue2_api/global_configs.py @@ -0,0 +1,2 @@ +CONFIG_LOCATION = None +VERBOSE = None \ No newline at end of file From ae7790438b2f4cfa06957db9d19e74fe098dc70d Mon Sep 17 00:00:00 2001 From: benne238 Date: Wed, 14 Apr 2021 15:20:00 -0400 Subject: [PATCH 39/43] added fix for running api with wrapper script --- webqueue2_api/ECNQueue/__init__.py | 2 ++ webqueue2_api/api/__init__.py | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index 208c7c7..ddd9fc9 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -12,6 +12,8 @@ config = configparser.ConfigParser() if global_configs.CONFIG_LOCATION is not None: config.read(global_configs.CONFIG_LOCATION) +elif os.path.exists(os.path.realpath("webqueue2-api.cfg")): + config.read("webqueue2-api.cfg") if "ECNQueue" not in config.sections(): logger.warning(f"ECNQueue section not included in the '{global_configs.CONFIG_LOCATION}' configuration file") diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py index ea9b3cd..18e811f 100644 --- a/webqueue2_api/api/__init__.py +++ b/webqueue2_api/api/__init__.py @@ -12,7 +12,9 @@ if global_configs.CONFIG_LOCATION is not None: config.read(global_configs.CONFIG_LOCATION) - +elif os.path.exists(os.path.realpath("webqueue2-api.cfg")): + config.read("webqueue2-api.cfg") + logger.name = __name__ # Create Flask App From 5e57b2e135e61203dd5687a4bb9403db1758cf75 Mon Sep 17 00:00:00 2001 From: benne238 Date: Tue, 20 Apr 2021 01:47:08 -0400 Subject: [PATCH 40/43] Inititial version of a functioning(ish) api with gunicorn --- webqueue2_api/ECNQueue/__init__.py | 89 +++++++----- webqueue2_api/__main__.py | 218 ++++++++++++++++++++++++++--- webqueue2_api/api/__init__.py | 62 ++++---- webqueue2_api/global_configs.py | 9 +- webqueue2_api/logger/__init__.py | 33 ++--- 5 files changed, 313 insertions(+), 98 deletions(-) diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index ddd9fc9..8f71a48 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -4,42 +4,63 @@ from ..logger import logger from .. import global_configs -logger.name = __name__ +queue_directory = global_configs.QUEUE_DIRECTORY +queues_to_ignore = global_configs.QUEUES_TO_IGNORE + +default_queue_directory = "/home/pier/e/queue/Mail" +default_queues_to_ignore = ["archives", "drafts", "inbox", "coral"] + +def readConfigs() -> None: + global queue_directory + global queues_to_ignore + global default_queue_directory + global default_queues_to_ignore + + if "ECNQueue" not in config.sections(): + logger.warning(f"ECNQueue section not included in the '{global_configs.CONFIG_LOCATION}' configuration file") + logger.warning(f"queue_directory set to '{queue_directory}'") + logger.warning(f"queues_to_ignore set to {queues_to_ignore}") + return + + if config.has_option('ECNQueue', 'QUEUE_DIRECTORY'): + newQueueDir = config['ECNQueue']['QUEUE_DIRECTORY'] + logger.debug(f"QUEUE_DIRECTORY variable included under ECNQueue section in config file") + if os.path.exists(newQueueDir) and not queue_directory: + queue_directory = newQueueDir + logger.debug(f"QUEUE_DIRECTORY in config file valid, setting Queue Directory to '{queue_directory}'") + elif not os.path.exists(newQueueDir): + logger.warning(f"QUEUE_DIRECTORY in config file invalid directory location, defaulting to '{default_queue_directory}'") -queue_directory = "/home/pier/e/queue/Mail" -queues_to_ignore = ["archives", "drafts", "inbox", "coral"] + elif "ECNQueue" in config.sections(): + logger.warning(f"QUEUE_DIRECTORY var not included under ECNQueue section in config file, defaulting to {default_queue_directory}") + + if config.has_option('ECNQueue', 'QUEUES_TO_IGNORE') and not queues_to_ignore: + newQueuesIgnore = config['ECNQueue']['QUEUES_TO_IGNORE'] + logger.debug(f"QUEUES_TO_IGNORE variable included under ECNQueue section in config file") + try: + queues_to_ignore = json.loads(newQueuesIgnore) + logger.debug(f"QUEUES_TO_IGNORE in config file is a valid list, setting to {queues_to_ignore}") + except: + logger.warning(f"QUEUES_TO_IGNORE in config file not a valid list, defaulting to {default_queues_to_ignore}") + + elif "ECNQueue" in config.sections() and not queues_to_ignore: + logger.warning(f"QUEUES_TO_IGNORE var not included under ECNQueue section in config file, defaulting to {default_queues_to_ignore}") + + +logger.name = __name__ config = configparser.ConfigParser() -if global_configs.CONFIG_LOCATION is not None: +if global_configs.CONFIG_LOCATION and not(queue_directory and queues_to_ignore): config.read(global_configs.CONFIG_LOCATION) -elif os.path.exists(os.path.realpath("webqueue2-api.cfg")): + readConfigs() +elif os.path.exists(os.path.realpath("webqueue2-api.cfg")) and not(queue_directory and queues_to_ignore): + logger.debug(f"No specified config file location, defaulting to {os.path.realpath('webqueue2-api.cfg')}") config.read("webqueue2-api.cfg") - -if "ECNQueue" not in config.sections(): - logger.warning(f"ECNQueue section not included in the '{global_configs.CONFIG_LOCATION}' configuration file") - logger.warning(f"queue_directory set to '{queue_directory}'") - logger.warning(f"queues_to_ignore set to {queues_to_ignore}") - -if config.has_option('ECNQueue', 'QUEUE_DIRECTORY'): - newQueueDir = config['ECNQueue']['QUEUE_DIRECTORY'] - logger.debug(f"QUEUE_DIRECTORY variable included under ECNQueue section in config file") - if os.path.exists(newQueueDir): - queue_directory = newQueueDir - logger.debug(f"QUEUE_DIRECTORY in config file valid, setting Queue Directory to '{queue_directory}'") - else: - logger.warning(f"QUEUE_DIRECTORY in config file invalid directory location, defaulting to '{queue_directory}'") - -elif "ECNQueue" in config.sections(): - logger.warning(f"QUEUE_DIRECTORY var not included under ECNQueue section in config file, defaulting to {queue_directory}") - -if config.has_option('ECNQueue', 'QUEUES_TO_IGNORE'): - newQueuesIgnore = config['ECNQueue']['QUEUES_TO_IGNORE'] - logger.debug(f"QUEUES_TO_IGNORE variable included under ECNQueue section in config file") - try: - queues_to_ignore = json.loads(newQueuesIgnore) - logger.debug(f"QUEUES_TO_IGNORE in config file is a valid list, setting to {queues_to_ignore}") - except: - logger.warning(f"QUEUES_TO_IGNORE in config file not a valid list, defaulting to {queues_to_ignore}") - -elif "ECNQueue" in config.sections(): - logger.warning(f"QUEUES_TO_IGNORE var not included under ECNQueue section in config file, defaulting to {queues_to_ignore}") + readConfigs() +else: + if not queue_directory: + logger.warning(f"queue_directory not defined, defaulting to '/home/pier/e/queue/Mail'") + queue_directory = "/home/pier/e/queue/Mail" + if not queues_to_ignore: + logger.warning(f"queues_to_ignore not defined, defaulting to ['archives', 'drafts', 'inbox', 'coral']") + queues_to_ignore = ["archives", "drafts", "inbox", "coral"] diff --git a/webqueue2_api/__main__.py b/webqueue2_api/__main__.py index af07d3f..3f4e452 100644 --- a/webqueue2_api/__main__.py +++ b/webqueue2_api/__main__.py @@ -1,8 +1,15 @@ #from webqueue2_api.api import app #import webqueue2_api.ECNQueue -from webqueue2_api.logger import logger import argparse, os -from . import global_configs +from webqueue2_api import global_configs +from webqueue2_api import logger +import logging +import subprocess +from typing import Union +#from webqueue2_api.api import app + +logger.logger.name = "webqueue2_api" +logFilePath = "-" def getArguments() -> argparse.Namespace: parser = argparse.ArgumentParser(description='Argument Parser') @@ -13,46 +20,215 @@ def getArguments() -> argparse.Namespace: stopApi = apiActions.add_parser("stop-api") restartApi = apiActions.add_parser("restart-api") - startApi.add_argument( + parser.add_argument( "-c", - "--config-dir", + "--config-file", help="directory cotaining the webqueue2_api.cfg file, defaults to current directory", - default=os.path.realpath(os.path.dirname("webqueue2_api.cfg")) + default=None ) - startApi.add_argument( + + parser.add_argument( "-v", "--verbose", help="toggles verbose mode", - default=False, action='store_true' ) + parser.add_argument( + "-l", + "--log-file", + help="Specifies directory for the output log (defaults to /tmp)", + default=None + ) + parser.add_argument( + "-j", + "--jwt-secret-key", + help="The JWT secret key (defaults to a random 16 alpha-numeric string)", + default=None + ) + parser.add_argument( + "-e", + "--environment", + choices=["dev", "prod"], + help="The environment the api should be started in (dev, prod)", + default=None + ) + parser.add_argument( + "-i", + "--queues-to-ignore", + nargs="+", + help="List of queues to ignore (must include atleast one queue with this flag", + default=None + ) + parser.add_argument( + "-d", + "--queue-dir", + help="The location of the live queue", + default=None + ) + return parser.parse_args() def validateArguments(arguments: argparse.Namespace) -> bool: - if arguments.api_action == "start-api" and not os.path.exists(os.path.join(arguments.config_dir, "webqueue2-api.cfg")): - logger.warning(f"'{arguments.config_dir}' does not contain 'webqueue2-api.cfg' file") - logger.warning(f"Setting default values for webqueue2_api configurations") - elif arguments.api_action == "start-api": - logger.debug(f"Setting configuration file location to '{os.path.join(arguments.config_dir, 'webqueue2-api.cfg')}'") - global_configs.CONFIG_LOCATION = os.path.join(arguments.config_dir, "webqueue2-api.cfg") + apiAction = arguments.api_action + if apiAction == "stop-api": return True + + if arguments.config_file and os.path.isdir(os.path.dirname(os.path.realpath(arguments.config_file))) and not os.path.isdir(arguments.config_file): + global_configs.CONFIG_LOCATION = arguments.config_file + elif arguments.config_file: + logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments.config_file))}' is not a valid file path for the configuration file") + + if arguments.log_file and os.path.isdir(os.path.dirname(os.path.realpath(arguments.log_file))) and not os.path.isdir(arguments.log_file): + for handler in logger.logger.handlers: + if handler.get_name == "File": logger.logger.removeHandler(handler) + log_file_path = arguments.log_file + file_handler = logging.FileHandler(log_file_path) + file_handler.setFormatter(logger.log_formatter) + file_handler.set_name("File") + logger.logger.addHandler(file_handler) + logger.logger.debug(f"Setting logging to output to '{log_file_path}'") + global logFilePath + logFilePath = log_file_path + elif arguments.log_file: + logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments.log_file))}' is not a valid file path for logger output") + + if arguments.jwt_secret_key: + print("+++++++++++++++++++++++++++++++++++++") + logger.logger.debug(f"Setting the JWT_SECRET_KEY to {arguments.jwt_secret_key}") + global_configs.JWT_SECRET_KEY = arguments.jwt_secret_key + + if arguments.queue_dir and os.path.isdir(arguments.queue_dir): + logger.logger.debug(f"Setting Queue Directory to '{arguments.queue_dir}'") + elif arguments.queue_dir and not os.path.isdir(arguments.queue_dir): + logger.logger.warning(f"'{arguments.queue_dir}' is not a valid queue directory") + + if arguments.queues_to_ignore: + global_configs.QUEUES_TO_IGNORE = arguments.queues_to_ignore + logger.logger.debug(f"Setting QUEUES_TO_IGNORE to {arguments.queues_to_ignore}") + + if arguments.environment: + environ = False if arguments.environment == "dev" else True + global_configs.ENVIRONMENT = environ + logger.logger.debug(f"Setting ENVIRONMENT to {environ}") + return True -def startApi() -> None: +def startApi(arguments) -> None: + print("88888888888") + from .api import app + print(app.root_path) + print("hi") try: - from webqueue2_api.api import app - app.run() - logger.debug(f"Stopping api") + logger.logger.debug('Starting api') + subprocess.Popen( + [ + os.path.join(os.environ['VIRTUAL_ENV'], "bin/gunicorn"), + "-b", "0.0.0.0:5000", + "--access-logfile", logFilePath, + "--error-logfile", logFilePath, + "--daemon", + "webqueue2_api.api:app" + ] + ) + #stdout, stderr = process.communicate() + #cd api/ && venv/bin/gunicorn -b 127.0.0.1:5000 api:app + #app.run() + #logger.logger.debug(f"API has been stopped") + #return except Exception as e: - logger.error(f"Failed to start the api: {e}") + logger.logger.error(f"Failed to start the api: {e}") + +def stopApi() -> None: + #user = str(os.system("whoami")) + logger.logger.debug("Stopping the api") + #process = subprocess.Popen(["kill", "-9", "24158"]) + #os.system("kill -9 23670") + getGunicornProcs = subprocess.Popen( + ["pgrep", "-f", "gunicorn"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + + gunicornProcs = getGunicornProcs.communicate()[0].decode("utf-8").strip().split('\n') + if gunicornProcs == ['']: + logger.logger.debug(f"No gunicorn process found, exiting") + return + + for proc in gunicornProcs: + logger.logger.debug(f"Killing process {proc}") + killGunicornProcs = subprocess.Popen( + ["kill", "-9", f"{proc}"], + #stdin=gunicornProcs, + #stdout=subprocess.PIPE, + #stderr=subprocess.PIPE, + ) + #stdout, stderr = killGunicornProcs.communicate(0) + #print(stdout) + #print(stderr) + #os.system("kill " + str(proc)) + #killGunicornProcs.kill() + logger.logger.debug("All gunicorn processes killed, exiting") + return + #os.system("pgrep -u $(whoami) -f gunicorn -d ' ' | xargs kill -9") + #stuff = run_logged_subprocess(command=["pgrep", "-u", user, "-f", "gunicorn", "-d", "' '", "|", "xargs", "kill", "-9"], shell=False) + +def run_logged_subprocess(command: Union[str, list], timeout: int = 60, shell: bool = True) -> tuple: + """Executes a shell command using subprocess with logging. + stderr is redirected to stdout and stdout is pipped to logger. + If the subprocess raises an exception, the exception is logged as critical. + Example: + Running a successful command: + run_logged_subprocess(command=["git", "commit", "-m", "'Commit message.'"]) + Returns: (0, "") + Running an unsuccessful shell command with a 20 second timeout: + run_logged_subprocess(command="cd test/", timeout=20, shell=True) + Returns: (1, "cd: test: No such file or directory\n") + Args: + command (Union): The command to run. If shell=False, pass a list with the first item being the command and the subsequent items being arguments. If shell=True, pass a string as you would type it into a shell. + timeout (int): The number of seconds to wait for a program before killing it. Defaults to 60. + Returns: + tuple: With the first value being the return code and second being the combined stdout+stderr + """ + logger.logger.debug(f"Entering subprocess for '{command}'") + with subprocess.Popen(command,\ + stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=shell, universal_newlines=True)\ + as logged_shell_process: + + subprocess_log_prefix = f"(PID: {logged_shell_process.pid})" + + try: + # Convert combined stdout and stderr stream to list of strings + process_output_stream, _ = logged_shell_process.communicate(timeout=timeout) + process_output_lines = process_output_stream.split("\n") + # Remove last entry in process_output_lines because it is always empty + process_output_lines.pop(-1) + + for line in process_output_lines: + logger.logger.debug(f"{subprocess_log_prefix}: {line}") + except Exception as exception: + logger.logger.critical(str(exception)) + else: + if logged_shell_process.returncode != 0: + logger.logger.debug(f"Something went wrong. '{command}' exited with return code {logged_shell_process.returncode}") + elif logged_shell_process.returncode == 0: + logger.logger.debug(f"Subprocess for '{command}' completed successfuly") + finally: + logger.logger.debug(f"Exiting subprocess for '{command}'") + return (logged_shell_process.returncode, process_output_stream) def main() -> None: - logger.name = "webqueue2_api" arguments = getArguments() + if arguments.verbose == True: + for handler in logger.logger.handlers: + if handler.get_name() == "Terminal": handler.setLevel(logging.DEBUG) + validateArguments(arguments) - logger.debug('Starting api') - startApi() + if arguments.api_action == "start-api": startApi(arguments) + if arguments.api_action == "stop-api": stopApi() + if arguments.api_action == "restart-api": + stopApi() + startApi(arguments) if __name__ == "__main__": main() \ No newline at end of file diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py index 18e811f..e5f508b 100644 --- a/webqueue2_api/api/__init__.py +++ b/webqueue2_api/api/__init__.py @@ -9,12 +9,6 @@ from pathlib import Path config = configparser.ConfigParser() - -if global_configs.CONFIG_LOCATION is not None: - config.read(global_configs.CONFIG_LOCATION) -elif os.path.exists(os.path.realpath("webqueue2-api.cfg")): - config.read("webqueue2-api.cfg") - logger.name = __name__ # Create Flask App @@ -23,8 +17,43 @@ # Create API Interface api = Api(app) +def readConfigs(): + if config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "ENVIRONMENT"): + app.config["JWT_COOKIE_SECURE"] = False if config["webqueue2_api"]["ENVIRONMENT"] == "dev" else True + logger.debug(f"JWT_COOKIE_SECURE set to {app.config['JWT_COOKIE_SECURE']}") + + if config.has_section("webqueue2_api") and config["webqueue2_api"]["JWT_SECRET_KEY"] == "": + logger.warning(f"JWT_SECRET_KEY wasn't defined, defaulting to 16 character random alpha numeric string") + + elif config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "JWT_SECRET_KEY") and len(config["webqueue2_api"]["JWT_SECRET_KEY"]) < 16: + logger.warning(f"The JWT_SECRET_KEY is less than 16 characters, this is not recomended") + + elif config.has_option("webqueue2_api", "JWT_SECRET_KEY") : + logger.debug(f"JWT_SECRET_KEY was defined, setting to specified value") + app.config["JWT_SECRET_KEY"] = config["webqueue2_api"]["JWT_SECRET_KEY"] + +app.config["JWT_SECRET_KEY"] = global_configs.JWT_SECRET_KEY +print(app.config["JWT_SECRET_KEY"]) +app.config["JWT_COOKIE_SECURE"] = global_configs.ENVIRONMENT + +if global_configs.CONFIG_LOCATION and not (app.config["JWT_SECRET_KEY"] and app.config['JWT_COOKIE_SECURE']): + config.read(global_configs.CONFIG_LOCATION) + readConfigs() +elif os.path.exists(os.path.realpath("webqueue2-api.cfg")) and not (app.config["JWT_SECRET_KEY"] and app.config["JWT_COOKIE_SECURE"]): + logger.debug(f"No specified config file location, defaulting to {os.path.realpath('webqueue2-api.cfg')}") + config.read("webqueue2-api.cfg") + readConfigs() +else: + if not global_configs.JWT_SECRET_KEY: + logger.warning(f"JWT_SECRET_KEY not defined, defaulting to random 16 character string") + app.config["JWT_SECRET_KEY"] = os.urandom(16) + if not global_configs.ENVIRONMENT: + logger.warning(f"JWT_COOKIE_SECURE or ENVIRONMENT not defined, defaulting to True") + app.config["JWT_COOKIE_SECURE"] = False + + # Set JWT secret key and create JWT manager -app.config["JWT_SECRET_KEY"] = os.urandom(16) +#app.config["JWT_SECRET_KEY"] = os.urandom(16) # Set identity claim field key to sub for JWT RFC complience # Flask-JWT-Extended uses 'identity' by default for compatibility reasons app.config["JWT_IDENTITY_CLAIM"] = "sub" @@ -34,7 +63,7 @@ # Look for JWTs in headers (for access) then cookies (for refresh) app.config["JWT_TOKEN_LOCATION"] = ["headers", "cookies"] # Restrict cookies to HTTPS in prod, allow HTTP in dev -app.config["JWT_COOKIE_SECURE"] = True +#app.config["JWT_COOKIE_SECURE"] = True # Restrict cookies using SameSite=strict flag app.config["JWT_COOKIE_SAMESITE"] = "strict" # Set the cookie key for CRSF validation string @@ -43,23 +72,8 @@ tokenManager = JWTManager(app) -if config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "ENVIRONMENT"): - app.config["JWT_COOKIE_SECURE"] = False if config["webqueue2_api"]["ENVIRONMENT"] == "dev" else True - logger.debug(f"JWT_COOKIE_SECURE set to {app.config['JWT_COOKIE_SECURE']}") - -if config.has_section("webqueue2_api") and config["webqueue2_api"]["JWT_SECRET_KEY"] == "": - logger.warning(f"JWT_SECRET_KEY wasn't defined, defaulting to 16 character random alpha numeric string") - -elif config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "JWT_SECRET_KEY") and len(config["webqueue2_api"]["JWT_SECRET_KEY"]) < 16: - logger.warning(f"The JWT_SECRET_KEY is less than 16 characters, this is not recomended") - -elif config.has_option("webqueue2_api", "JWT_SECRET_KEY") : - logger.debug(f"JWT_SECRET_KEY was defined, setting to specified value") - app.config["JWT_SECRET_KEY"] = config["webqueue2_api"]["JWT_SECRET_KEY"] - - api.add_resource(resources.login.Login, "/api/login") api.add_resource(resources.refresh_access_token.RefreshAccessToken, "/api/tokens/refresh") api.add_resource(resources.item.Item, "/api/data//") api.add_resource(resources.queue.Queue, "/api/data/") -api.add_resource(resources.queue_list.QueueList, "/api/data/get_queues") \ No newline at end of file +api.add_resource(resources.queue_list.QueueList, "/api/data/get_queues") diff --git a/webqueue2_api/global_configs.py b/webqueue2_api/global_configs.py index 5a4d85d..cb01e10 100644 --- a/webqueue2_api/global_configs.py +++ b/webqueue2_api/global_configs.py @@ -1,2 +1,9 @@ CONFIG_LOCATION = None -VERBOSE = None \ No newline at end of file + +# API Configurations +JWT_SECRET_KEY = None +ENVIRONMENT = None + +# ECNQueue Configurations +QUEUES_TO_IGNORE = [] +QUEUE_DIRECTORY = None \ No newline at end of file diff --git a/webqueue2_api/logger/__init__.py b/webqueue2_api/logger/__init__.py index d5b4e94..5453907 100644 --- a/webqueue2_api/logger/__init__.py +++ b/webqueue2_api/logger/__init__.py @@ -1,7 +1,7 @@ import logging import configparser import os -from pathlib import Path +import sys logger_name = __name__ logger = logging.getLogger(logger_name) @@ -19,28 +19,25 @@ stream_handler = logging.StreamHandler() stream_handler.setFormatter(log_formatter) stream_handler.setLevel(logging.INFO) +stream_handler.set_name("Terminal") + logger.addHandler(stream_handler) -# Configure out to logfile config = configparser.ConfigParser() -configFile = 'webqueue2-api.cfg' -config.read(configFile) + +# default configuration file +configFile = "webqueue2-api.cfg" +if os.path.exists(os.path.realpath(configFile)): + config.read(configFile) + if "Logger" not in config.sections(): logger.debug(f"Logger section not definied in {configFile}") - elif config.has_option("Logger", "LOGGER_OUT_FILE"): - if config["Logger"]["LOGGER_OUT_FILE"] == "": - logger.debug("LOGGER_OUT_FILE variable empty, not setting logger file path") - - elif not os.path.isdir(config["Logger"]["LOGGER_OUT_FILE"]): - logger.debug("LOGGER_OUT_FILE doesn't point to existing directory, not setting logger file path") - - else: - log_file_path = Path(config["Logger"]["LOGGER_OUT_FILE"], __name__ + ".log") - file_handler = logging.FileHandler(log_file_path) - logger.debug(f"Logger") - file_handler.setFormatter(log_formatter) - logger.addHandler(file_handler) - logger.debug(f"LOGGER_OUT_FILE included under Logger section in config file is valid, setting to '{log_file_path}'") + log_file_path = config["Logger"]["LOGGER_OUT_FILE"] + file_handler = logging.FileHandler(log_file_path) + file_handler.setFormatter(log_formatter) + file_handler.set_name("File") + logger.addHandler(file_handler) + logger.debug(f"LOGGER_OUT_FILE included under Logger section in config file is valid, setting to '{log_file_path}'") else: logger.debug("LOGGER_OUT_FILE not defined in logger section, not setting logger path") \ No newline at end of file From 5bf968d6e38dfd67e099b7ff1c200d906275d30d Mon Sep 17 00:00:00 2001 From: benne238 Date: Thu, 22 Apr 2021 17:59:29 -0400 Subject: [PATCH 41/43] gunicorn working with command line arguments --- webqueue2_api/__main__.py | 81 +++++++---------------------------- webqueue2_api/api/__init__.py | 1 - webqueue2_api/start.py | 54 +++++++++++++++++++++++ 3 files changed, 70 insertions(+), 66 deletions(-) create mode 100644 webqueue2_api/start.py diff --git a/webqueue2_api/__main__.py b/webqueue2_api/__main__.py index 3f4e452..738e7d3 100644 --- a/webqueue2_api/__main__.py +++ b/webqueue2_api/__main__.py @@ -93,7 +93,7 @@ def validateArguments(arguments: argparse.Namespace) -> bool: logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments.log_file))}' is not a valid file path for logger output") if arguments.jwt_secret_key: - print("+++++++++++++++++++++++++++++++++++++") + #print("+++++++++++++++++++++++++++++++++++++") logger.logger.debug(f"Setting the JWT_SECRET_KEY to {arguments.jwt_secret_key}") global_configs.JWT_SECRET_KEY = arguments.jwt_secret_key @@ -115,24 +115,19 @@ def validateArguments(arguments: argparse.Namespace) -> bool: return True def startApi(arguments) -> None: - print("88888888888") - from .api import app - print(app.root_path) - print("hi") try: logger.logger.debug('Starting api') - subprocess.Popen( + start = subprocess.Popen( [ os.path.join(os.environ['VIRTUAL_ENV'], "bin/gunicorn"), "-b", "0.0.0.0:5000", "--access-logfile", logFilePath, "--error-logfile", logFilePath, "--daemon", - "webqueue2_api.api:app" + f"webqueue2_api.start:startApi({arguments})" ] ) - #stdout, stderr = process.communicate() - #cd api/ && venv/bin/gunicorn -b 127.0.0.1:5000 api:app + stdout, stderr = start.communicate() #app.run() #logger.logger.debug(f"API has been stopped") #return @@ -140,10 +135,7 @@ def startApi(arguments) -> None: logger.logger.error(f"Failed to start the api: {e}") def stopApi() -> None: - #user = str(os.system("whoami")) logger.logger.debug("Stopping the api") - #process = subprocess.Popen(["kill", "-9", "24158"]) - #os.system("kill -9 23670") getGunicornProcs = subprocess.Popen( ["pgrep", "-f", "gunicorn"], stdout=subprocess.PIPE, @@ -163,7 +155,7 @@ def stopApi() -> None: #stdout=subprocess.PIPE, #stderr=subprocess.PIPE, ) - #stdout, stderr = killGunicornProcs.communicate(0) + # stdout, stderr = killGunicornProcs.communicate(0) #print(stdout) #print(stderr) #os.system("kill " + str(proc)) @@ -173,62 +165,21 @@ def stopApi() -> None: #os.system("pgrep -u $(whoami) -f gunicorn -d ' ' | xargs kill -9") #stuff = run_logged_subprocess(command=["pgrep", "-u", user, "-f", "gunicorn", "-d", "' '", "|", "xargs", "kill", "-9"], shell=False) -def run_logged_subprocess(command: Union[str, list], timeout: int = 60, shell: bool = True) -> tuple: - """Executes a shell command using subprocess with logging. - stderr is redirected to stdout and stdout is pipped to logger. - If the subprocess raises an exception, the exception is logged as critical. - Example: - Running a successful command: - run_logged_subprocess(command=["git", "commit", "-m", "'Commit message.'"]) - Returns: (0, "") - Running an unsuccessful shell command with a 20 second timeout: - run_logged_subprocess(command="cd test/", timeout=20, shell=True) - Returns: (1, "cd: test: No such file or directory\n") - Args: - command (Union): The command to run. If shell=False, pass a list with the first item being the command and the subsequent items being arguments. If shell=True, pass a string as you would type it into a shell. - timeout (int): The number of seconds to wait for a program before killing it. Defaults to 60. - Returns: - tuple: With the first value being the return code and second being the combined stdout+stderr - """ - logger.logger.debug(f"Entering subprocess for '{command}'") - with subprocess.Popen(command,\ - stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=shell, universal_newlines=True)\ - as logged_shell_process: - - subprocess_log_prefix = f"(PID: {logged_shell_process.pid})" - - try: - # Convert combined stdout and stderr stream to list of strings - process_output_stream, _ = logged_shell_process.communicate(timeout=timeout) - process_output_lines = process_output_stream.split("\n") - # Remove last entry in process_output_lines because it is always empty - process_output_lines.pop(-1) - - for line in process_output_lines: - logger.logger.debug(f"{subprocess_log_prefix}: {line}") - except Exception as exception: - logger.logger.critical(str(exception)) - else: - if logged_shell_process.returncode != 0: - logger.logger.debug(f"Something went wrong. '{command}' exited with return code {logged_shell_process.returncode}") - elif logged_shell_process.returncode == 0: - logger.logger.debug(f"Subprocess for '{command}' completed successfuly") - finally: - logger.logger.debug(f"Exiting subprocess for '{command}'") - return (logged_shell_process.returncode, process_output_stream) def main() -> None: - arguments = getArguments() - if arguments.verbose == True: - for handler in logger.logger.handlers: - if handler.get_name() == "Terminal": handler.setLevel(logging.DEBUG) + arguments = getArguments()._get_kwargs() + argumentsDict = {} + for argument in arguments: + argumentsDict[argument[0]] = argument[1] - validateArguments(arguments) - if arguments.api_action == "start-api": startApi(arguments) - if arguments.api_action == "stop-api": stopApi() - if arguments.api_action == "restart-api": + #print(argumentsDict) + + #validateArguments(arguments) + if argumentsDict["api_action"] == "start-api": startApi(argumentsDict) + if argumentsDict["api_action"] == "stop-api": stopApi() + if argumentsDict["api_action"] == "restart-api": stopApi() - startApi(arguments) + startApi(argumentsDict) if __name__ == "__main__": main() \ No newline at end of file diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py index e5f508b..a287412 100644 --- a/webqueue2_api/api/__init__.py +++ b/webqueue2_api/api/__init__.py @@ -33,7 +33,6 @@ def readConfigs(): app.config["JWT_SECRET_KEY"] = config["webqueue2_api"]["JWT_SECRET_KEY"] app.config["JWT_SECRET_KEY"] = global_configs.JWT_SECRET_KEY -print(app.config["JWT_SECRET_KEY"]) app.config["JWT_COOKIE_SECURE"] = global_configs.ENVIRONMENT if global_configs.CONFIG_LOCATION and not (app.config["JWT_SECRET_KEY"] and app.config['JWT_COOKIE_SECURE']): diff --git a/webqueue2_api/start.py b/webqueue2_api/start.py new file mode 100644 index 0000000..bc46b31 --- /dev/null +++ b/webqueue2_api/start.py @@ -0,0 +1,54 @@ +from . import global_configs +from . import logger +import argparse, logging, os +#print("00000000000000000000000") +#print(global_configs.JWT_SECRET_KEY) +#from . import global_configs + +def validateArguments(arguments: dict) -> bool: + if arguments["config_file"] and os.path.isdir(os.path.dirname(os.path.realpath(arguments["config_file"]))) and not os.path.isdir(arguments["config_file"]): + global_configs.CONFIG_LOCATION = arguments["config_file"] + elif arguments["config_file"]: + logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments['config_file']))}' is not a valid file path for the configuration file") + + if arguments["log_file"] and os.path.isdir(os.path.dirname(os.path.realpath(arguments["log_file"]))) and not os.path.isdir(arguments["log_file"]): + for handler in logger.logger.handlers: + if handler.get_name == "File": logger.logger.removeHandler(handler) + log_file_path = arguments["log_file"] + file_handler = logging.FileHandler(log_file_path) + file_handler.setFormatter(logger.log_formatter) + file_handler.set_name("File") + logger.logger.addHandler(file_handler) + logger.logger.debug(f"Setting logging to output to '{log_file_path}'") + #global logFilePath + #logFilePath = log_file_path + elif arguments["log_file"]: + logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments['log_file']))}' is not a valid file path for logger output") + + if arguments["jwt_secret_key"]: + logger.logger.debug(f"Setting the JWT_SECRET_KEY to {arguments['jwt_secret_key']}") + global_configs.JWT_SECRET_KEY = arguments["jwt_secret_key"] + + if arguments["queue_dir"] and os.path.isdir(arguments["queue_dir"]): + logger.logger.debug(f"Setting Queue Directory to '{arguments['queue_dir']}'") + elif arguments["queue_dir"] and not os.path.isdir(arguments["queue_dir"]): + logger.logger.warning(f"'{arguments['queue_dir']}' is not a valid queue directory") + + if arguments["queues_to_ignore"]: + global_configs.QUEUES_TO_IGNORE = arguments["queues_to_ignore"] + logger.logger.debug(f"Setting QUEUES_TO_IGNORE to {arguments['queues_to_ignore']}") + + if arguments["environment"]: + environ = False if arguments["environment"] == "dev" else True + global_configs.ENVIRONMENT = environ + logger.logger.debug(f"Setting ENVIRONMENT to {environ}") + + return True + +def startApi(arguments: dict): + validateArguments(arguments) + from webqueue2_api.api import app + #app.config["JWT_SECRET_KEY"] = global_configs.JWT_SECRET_KEY + #app.config["JWT_COOKIE_SECURE"] = global_configs.ENVIRONMENT + return app + From 45e37bfff33e5b2a1117eafa21ae0340548a844f Mon Sep 17 00:00:00 2001 From: benne238 Date: Thu, 29 Apr 2021 09:33:28 -0400 Subject: [PATCH 42/43] Readability and functionality update --- webqueue2_api/ECNQueue/__init__.py | 68 ++--------- webqueue2_api/__main__.py | 105 +++++------------ webqueue2_api/api/__init__.py | 61 ++++------ webqueue2_api/global_configs.py | 10 +- webqueue2_api/logger/__init__.py | 36 +++--- webqueue2_api/start.py | 54 +-------- webqueue2_api/validate_arguments.py | 175 ++++++++++++++++++++++++++++ 7 files changed, 261 insertions(+), 248 deletions(-) create mode 100644 webqueue2_api/validate_arguments.py diff --git a/webqueue2_api/ECNQueue/__init__.py b/webqueue2_api/ECNQueue/__init__.py index 8f71a48..fa222b4 100644 --- a/webqueue2_api/ECNQueue/__init__.py +++ b/webqueue2_api/ECNQueue/__init__.py @@ -1,66 +1,18 @@ import configparser -import os -import json from ..logger import logger from .. import global_configs -queue_directory = global_configs.QUEUE_DIRECTORY -queues_to_ignore = global_configs.QUEUES_TO_IGNORE +queue_directory = "/home/pier/e/queue/Mail" +queues_to_ignore = ["archives", "drafts", "inbox", "coral"] -default_queue_directory = "/home/pier/e/queue/Mail" -default_queues_to_ignore = ["archives", "drafts", "inbox", "coral"] +logger.name = __name__ -def readConfigs() -> None: +def applyQueueDirectory(dir: str) -> None: global queue_directory - global queues_to_ignore - global default_queue_directory - global default_queues_to_ignore - - if "ECNQueue" not in config.sections(): - logger.warning(f"ECNQueue section not included in the '{global_configs.CONFIG_LOCATION}' configuration file") - logger.warning(f"queue_directory set to '{queue_directory}'") - logger.warning(f"queues_to_ignore set to {queues_to_ignore}") - return - - if config.has_option('ECNQueue', 'QUEUE_DIRECTORY'): - newQueueDir = config['ECNQueue']['QUEUE_DIRECTORY'] - logger.debug(f"QUEUE_DIRECTORY variable included under ECNQueue section in config file") - if os.path.exists(newQueueDir) and not queue_directory: - queue_directory = newQueueDir - logger.debug(f"QUEUE_DIRECTORY in config file valid, setting Queue Directory to '{queue_directory}'") - elif not os.path.exists(newQueueDir): - logger.warning(f"QUEUE_DIRECTORY in config file invalid directory location, defaulting to '{default_queue_directory}'") - - elif "ECNQueue" in config.sections(): - logger.warning(f"QUEUE_DIRECTORY var not included under ECNQueue section in config file, defaulting to {default_queue_directory}") + queue_directory = dir + return - if config.has_option('ECNQueue', 'QUEUES_TO_IGNORE') and not queues_to_ignore: - newQueuesIgnore = config['ECNQueue']['QUEUES_TO_IGNORE'] - logger.debug(f"QUEUES_TO_IGNORE variable included under ECNQueue section in config file") - try: - queues_to_ignore = json.loads(newQueuesIgnore) - logger.debug(f"QUEUES_TO_IGNORE in config file is a valid list, setting to {queues_to_ignore}") - except: - logger.warning(f"QUEUES_TO_IGNORE in config file not a valid list, defaulting to {default_queues_to_ignore}") - - elif "ECNQueue" in config.sections() and not queues_to_ignore: - logger.warning(f"QUEUES_TO_IGNORE var not included under ECNQueue section in config file, defaulting to {default_queues_to_ignore}") - - -logger.name = __name__ - -config = configparser.ConfigParser() -if global_configs.CONFIG_LOCATION and not(queue_directory and queues_to_ignore): - config.read(global_configs.CONFIG_LOCATION) - readConfigs() -elif os.path.exists(os.path.realpath("webqueue2-api.cfg")) and not(queue_directory and queues_to_ignore): - logger.debug(f"No specified config file location, defaulting to {os.path.realpath('webqueue2-api.cfg')}") - config.read("webqueue2-api.cfg") - readConfigs() -else: - if not queue_directory: - logger.warning(f"queue_directory not defined, defaulting to '/home/pier/e/queue/Mail'") - queue_directory = "/home/pier/e/queue/Mail" - if not queues_to_ignore: - logger.warning(f"queues_to_ignore not defined, defaulting to ['archives', 'drafts', 'inbox', 'coral']") - queues_to_ignore = ["archives", "drafts", "inbox", "coral"] +def applyQueuesToIgnore(ignore: list) -> None: + global queues_to_ignore + queues_to_ignore = ignore + return \ No newline at end of file diff --git a/webqueue2_api/__main__.py b/webqueue2_api/__main__.py index 738e7d3..b1f5741 100644 --- a/webqueue2_api/__main__.py +++ b/webqueue2_api/__main__.py @@ -9,7 +9,6 @@ #from webqueue2_api.api import app logger.logger.name = "webqueue2_api" -logFilePath = "-" def getArguments() -> argparse.Namespace: parser = argparse.ArgumentParser(description='Argument Parser') @@ -58,7 +57,7 @@ def getArguments() -> argparse.Namespace: "--queues-to-ignore", nargs="+", help="List of queues to ignore (must include atleast one queue with this flag", - default=None + default=[] ) parser.add_argument( "-d", @@ -69,68 +68,27 @@ def getArguments() -> argparse.Namespace: return parser.parse_args() -def validateArguments(arguments: argparse.Namespace) -> bool: - apiAction = arguments.api_action - if apiAction == "stop-api": return True - - if arguments.config_file and os.path.isdir(os.path.dirname(os.path.realpath(arguments.config_file))) and not os.path.isdir(arguments.config_file): - global_configs.CONFIG_LOCATION = arguments.config_file - elif arguments.config_file: - logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments.config_file))}' is not a valid file path for the configuration file") - - if arguments.log_file and os.path.isdir(os.path.dirname(os.path.realpath(arguments.log_file))) and not os.path.isdir(arguments.log_file): - for handler in logger.logger.handlers: - if handler.get_name == "File": logger.logger.removeHandler(handler) - log_file_path = arguments.log_file - file_handler = logging.FileHandler(log_file_path) - file_handler.setFormatter(logger.log_formatter) - file_handler.set_name("File") - logger.logger.addHandler(file_handler) - logger.logger.debug(f"Setting logging to output to '{log_file_path}'") - global logFilePath - logFilePath = log_file_path - elif arguments.log_file: - logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments.log_file))}' is not a valid file path for logger output") - - if arguments.jwt_secret_key: - #print("+++++++++++++++++++++++++++++++++++++") - logger.logger.debug(f"Setting the JWT_SECRET_KEY to {arguments.jwt_secret_key}") - global_configs.JWT_SECRET_KEY = arguments.jwt_secret_key - - - if arguments.queue_dir and os.path.isdir(arguments.queue_dir): - logger.logger.debug(f"Setting Queue Directory to '{arguments.queue_dir}'") - elif arguments.queue_dir and not os.path.isdir(arguments.queue_dir): - logger.logger.warning(f"'{arguments.queue_dir}' is not a valid queue directory") - - if arguments.queues_to_ignore: - global_configs.QUEUES_TO_IGNORE = arguments.queues_to_ignore - logger.logger.debug(f"Setting QUEUES_TO_IGNORE to {arguments.queues_to_ignore}") - - if arguments.environment: - environ = False if arguments.environment == "dev" else True - global_configs.ENVIRONMENT = environ - logger.logger.debug(f"Setting ENVIRONMENT to {environ}") +def startApi(config_file=None, + log_file=None, + jwt_secret_key=None, + environment=None, + queue_dir=None, + queues_to_ignore=[], + verbose=False, + **kwargs +) -> None: - return True - -def startApi(arguments) -> None: try: logger.logger.debug('Starting api') - start = subprocess.Popen( + subprocess.Popen( [ os.path.join(os.environ['VIRTUAL_ENV'], "bin/gunicorn"), "-b", "0.0.0.0:5000", - "--access-logfile", logFilePath, - "--error-logfile", logFilePath, - "--daemon", - f"webqueue2_api.start:startApi({arguments})" + #"--access-logfile", logger.log_file_output, + #"--error-logfile", logger.log_file_output, + f"webqueue2_api.start:startApi({locals()})" ] ) - stdout, stderr = start.communicate() - #app.run() - #logger.logger.debug(f"API has been stopped") - #return except Exception as e: logger.logger.error(f"Failed to start the api: {e}") @@ -149,37 +107,30 @@ def stopApi() -> None: for proc in gunicornProcs: logger.logger.debug(f"Killing process {proc}") - killGunicornProcs = subprocess.Popen( - ["kill", "-9", f"{proc}"], - #stdin=gunicornProcs, - #stdout=subprocess.PIPE, - #stderr=subprocess.PIPE, + subprocess.Popen( + ["kill", "-9", f"{proc}"] ) - # stdout, stderr = killGunicornProcs.communicate(0) - #print(stdout) - #print(stderr) - #os.system("kill " + str(proc)) - #killGunicornProcs.kill() logger.logger.debug("All gunicorn processes killed, exiting") return - #os.system("pgrep -u $(whoami) -f gunicorn -d ' ' | xargs kill -9") - #stuff = run_logged_subprocess(command=["pgrep", "-u", user, "-f", "gunicorn", "-d", "' '", "|", "xargs", "kill", "-9"], shell=False) - -def main() -> None: - arguments = getArguments()._get_kwargs() +def convertArguments(arguments: argparse.Namespace) -> dict: + argumentsTuple = arguments._get_kwargs() argumentsDict = {} - for argument in arguments: + for argument in argumentsTuple: argumentsDict[argument[0]] = argument[1] - #print(argumentsDict) - - #validateArguments(arguments) - if argumentsDict["api_action"] == "start-api": startApi(argumentsDict) + return argumentsDict + +def main() -> None: + argumentsDict = convertArguments(getArguments()) + + if argumentsDict["verbose"] == True: logger.addVerbosity() + + if argumentsDict["api_action"] == "start-api": startApi(**argumentsDict) if argumentsDict["api_action"] == "stop-api": stopApi() if argumentsDict["api_action"] == "restart-api": stopApi() - startApi(argumentsDict) + startApi(**argumentsDict) if __name__ == "__main__": main() \ No newline at end of file diff --git a/webqueue2_api/api/__init__.py b/webqueue2_api/api/__init__.py index a287412..0f6ce1d 100644 --- a/webqueue2_api/api/__init__.py +++ b/webqueue2_api/api/__init__.py @@ -1,14 +1,11 @@ from . import resources from ..logger import logger -from .. import global_configs -import os from flask import Flask, request, after_this_request from flask_restful import Api from flask_jwt_extended import JWTManager -import configparser -from pathlib import Path +import random +import string -config = configparser.ConfigParser() logger.name = __name__ # Create Flask App @@ -17,42 +14,23 @@ # Create API Interface api = Api(app) -def readConfigs(): - if config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "ENVIRONMENT"): - app.config["JWT_COOKIE_SECURE"] = False if config["webqueue2_api"]["ENVIRONMENT"] == "dev" else True - logger.debug(f"JWT_COOKIE_SECURE set to {app.config['JWT_COOKIE_SECURE']}") - - if config.has_section("webqueue2_api") and config["webqueue2_api"]["JWT_SECRET_KEY"] == "": - logger.warning(f"JWT_SECRET_KEY wasn't defined, defaulting to 16 character random alpha numeric string") - - elif config.has_section("webqueue2_api") and config.has_option("webqueue2_api", "JWT_SECRET_KEY") and len(config["webqueue2_api"]["JWT_SECRET_KEY"]) < 16: - logger.warning(f"The JWT_SECRET_KEY is less than 16 characters, this is not recomended") - - elif config.has_option("webqueue2_api", "JWT_SECRET_KEY") : - logger.debug(f"JWT_SECRET_KEY was defined, setting to specified value") - app.config["JWT_SECRET_KEY"] = config["webqueue2_api"]["JWT_SECRET_KEY"] - -app.config["JWT_SECRET_KEY"] = global_configs.JWT_SECRET_KEY -app.config["JWT_COOKIE_SECURE"] = global_configs.ENVIRONMENT - -if global_configs.CONFIG_LOCATION and not (app.config["JWT_SECRET_KEY"] and app.config['JWT_COOKIE_SECURE']): - config.read(global_configs.CONFIG_LOCATION) - readConfigs() -elif os.path.exists(os.path.realpath("webqueue2-api.cfg")) and not (app.config["JWT_SECRET_KEY"] and app.config["JWT_COOKIE_SECURE"]): - logger.debug(f"No specified config file location, defaulting to {os.path.realpath('webqueue2-api.cfg')}") - config.read("webqueue2-api.cfg") - readConfigs() -else: - if not global_configs.JWT_SECRET_KEY: - logger.warning(f"JWT_SECRET_KEY not defined, defaulting to random 16 character string") - app.config["JWT_SECRET_KEY"] = os.urandom(16) - if not global_configs.ENVIRONMENT: - logger.warning(f"JWT_COOKIE_SECURE or ENVIRONMENT not defined, defaulting to True") - app.config["JWT_COOKIE_SECURE"] = False - - +def applyJWTSecretKey(secretKey: str) -> None: + global app + app.config['JWT_SECRET_KEY'] = secretKey + return + +def applyEnviornment(envrionment: bool)-> None: + global app + app.config['JWT_COOKIE_SECURE'] = envrionment + return + +def randomStringGenerator(size=16) -> str: + randomString = '' + randomString = randomString.join(random.choice(string.ascii_letters) for i in range(size)) + return randomString + # Set JWT secret key and create JWT manager -#app.config["JWT_SECRET_KEY"] = os.urandom(16) +app.config["JWT_SECRET_KEY"] = randomStringGenerator(size=16) # Set identity claim field key to sub for JWT RFC complience # Flask-JWT-Extended uses 'identity' by default for compatibility reasons app.config["JWT_IDENTITY_CLAIM"] = "sub" @@ -62,7 +40,7 @@ def readConfigs(): # Look for JWTs in headers (for access) then cookies (for refresh) app.config["JWT_TOKEN_LOCATION"] = ["headers", "cookies"] # Restrict cookies to HTTPS in prod, allow HTTP in dev -#app.config["JWT_COOKIE_SECURE"] = True +app.config["JWT_COOKIE_SECURE"] = True # Restrict cookies using SameSite=strict flag app.config["JWT_COOKIE_SAMESITE"] = "strict" # Set the cookie key for CRSF validation string @@ -76,3 +54,4 @@ def readConfigs(): api.add_resource(resources.item.Item, "/api/data//") api.add_resource(resources.queue.Queue, "/api/data/") api.add_resource(resources.queue_list.QueueList, "/api/data/get_queues") + diff --git a/webqueue2_api/global_configs.py b/webqueue2_api/global_configs.py index cb01e10..0f0dad9 100644 --- a/webqueue2_api/global_configs.py +++ b/webqueue2_api/global_configs.py @@ -1,9 +1 @@ -CONFIG_LOCATION = None - -# API Configurations -JWT_SECRET_KEY = None -ENVIRONMENT = None - -# ECNQueue Configurations -QUEUES_TO_IGNORE = [] -QUEUE_DIRECTORY = None \ No newline at end of file +CONFIG_LOCATION = None \ No newline at end of file diff --git a/webqueue2_api/logger/__init__.py b/webqueue2_api/logger/__init__.py index 5453907..5847ae4 100644 --- a/webqueue2_api/logger/__init__.py +++ b/webqueue2_api/logger/__init__.py @@ -2,11 +2,14 @@ import configparser import os import sys +from .. import global_configs logger_name = __name__ logger = logging.getLogger(logger_name) logger.setLevel(logging.DEBUG) +log_file_output = None + # See Formatting Details: https://docs.python.org/3/library/logging.html#logrecord-attributes # Example: Jan 28 2021 12:19:28 venv-manager : [INFO] Message log_message_format = "%(asctime)s %(name)s : [%(levelname)s] %(message)s" @@ -19,25 +22,28 @@ stream_handler = logging.StreamHandler() stream_handler.setFormatter(log_formatter) stream_handler.setLevel(logging.INFO) -stream_handler.set_name("Terminal") +stream_handler.set_name("std_out") logger.addHandler(stream_handler) config = configparser.ConfigParser() -# default configuration file -configFile = "webqueue2-api.cfg" -if os.path.exists(os.path.realpath(configFile)): - config.read(configFile) - -if "Logger" not in config.sections(): - logger.debug(f"Logger section not definied in {configFile}") -elif config.has_option("Logger", "LOGGER_OUT_FILE"): - log_file_path = config["Logger"]["LOGGER_OUT_FILE"] - file_handler = logging.FileHandler(log_file_path) +def loggerOutput(path: str) -> None: + file_handler = logging.FileHandler(path) file_handler.setFormatter(log_formatter) - file_handler.set_name("File") + file_handler.set_name("out_file") logger.addHandler(file_handler) - logger.debug(f"LOGGER_OUT_FILE included under Logger section in config file is valid, setting to '{log_file_path}'") -else: - logger.debug("LOGGER_OUT_FILE not defined in logger section, not setting logger path") \ No newline at end of file + + global log_file_output + log_file_output = path + + return + +def addVerbosity() -> None: + global logger + + for handler in logger.handlers: + if handler.get_name() == "std_out": + handler.setLevel(logging.DEBUG) + + return diff --git a/webqueue2_api/start.py b/webqueue2_api/start.py index bc46b31..58cf9eb 100644 --- a/webqueue2_api/start.py +++ b/webqueue2_api/start.py @@ -1,54 +1,12 @@ -from . import global_configs from . import logger +from . import validate_arguments +from .api import app import argparse, logging, os -#print("00000000000000000000000") -#print(global_configs.JWT_SECRET_KEY) -#from . import global_configs -def validateArguments(arguments: dict) -> bool: - if arguments["config_file"] and os.path.isdir(os.path.dirname(os.path.realpath(arguments["config_file"]))) and not os.path.isdir(arguments["config_file"]): - global_configs.CONFIG_LOCATION = arguments["config_file"] - elif arguments["config_file"]: - logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments['config_file']))}' is not a valid file path for the configuration file") +logger.logger.name = __name__ - if arguments["log_file"] and os.path.isdir(os.path.dirname(os.path.realpath(arguments["log_file"]))) and not os.path.isdir(arguments["log_file"]): - for handler in logger.logger.handlers: - if handler.get_name == "File": logger.logger.removeHandler(handler) - log_file_path = arguments["log_file"] - file_handler = logging.FileHandler(log_file_path) - file_handler.setFormatter(logger.log_formatter) - file_handler.set_name("File") - logger.logger.addHandler(file_handler) - logger.logger.debug(f"Setting logging to output to '{log_file_path}'") - #global logFilePath - #logFilePath = log_file_path - elif arguments["log_file"]: - logger.logger.warning(f"'{os.path.dirname(os.path.realpath(arguments['log_file']))}' is not a valid file path for logger output") - - if arguments["jwt_secret_key"]: - logger.logger.debug(f"Setting the JWT_SECRET_KEY to {arguments['jwt_secret_key']}") - global_configs.JWT_SECRET_KEY = arguments["jwt_secret_key"] - - if arguments["queue_dir"] and os.path.isdir(arguments["queue_dir"]): - logger.logger.debug(f"Setting Queue Directory to '{arguments['queue_dir']}'") - elif arguments["queue_dir"] and not os.path.isdir(arguments["queue_dir"]): - logger.logger.warning(f"'{arguments['queue_dir']}' is not a valid queue directory") - - if arguments["queues_to_ignore"]: - global_configs.QUEUES_TO_IGNORE = arguments["queues_to_ignore"] - logger.logger.debug(f"Setting QUEUES_TO_IGNORE to {arguments['queues_to_ignore']}") - - if arguments["environment"]: - environ = False if arguments["environment"] == "dev" else True - global_configs.ENVIRONMENT = environ - logger.logger.debug(f"Setting ENVIRONMENT to {environ}") - - return True - -def startApi(arguments: dict): - validateArguments(arguments) - from webqueue2_api.api import app - #app.config["JWT_SECRET_KEY"] = global_configs.JWT_SECRET_KEY - #app.config["JWT_COOKIE_SECURE"] = global_configs.ENVIRONMENT +def startApi(arguments) -> app: + if arguments["verbose"]: logger.addVerbosity() + validate_arguments.validateArguments(**arguments) return app diff --git a/webqueue2_api/validate_arguments.py b/webqueue2_api/validate_arguments.py new file mode 100644 index 0000000..0ede503 --- /dev/null +++ b/webqueue2_api/validate_arguments.py @@ -0,0 +1,175 @@ +import configparser, os, json +from . import logger +from . import global_configs +from . import api, ECNQueue + +logger.logger.name = __name__ +config = configparser.ConfigParser() + +userGivenConfigFile = None + +def validateArguments( + config_file=None, + log_file=None, + jwt_secret_key=None, + environment=None, + queue_dir=None, + queues_to_ignore=[], + **kwargs +) -> None: + + validateConfigFile(config_file) + validateLogFile(log_file) + validateJWTSecretKey(jwt_secret_key) + validateEnvironment(environment) + validateQueueDir(queue_dir) + validateQueuesToIgnore(queues_to_ignore) + +def validateLogFile(path: str) -> str: + if path is None: + logger.logger.debug(f"Log file output not included in command line arguments") + if not validSectionConfig("Logger", "LOGGER_OUT_FILE", logger.log_file_output): + return + + configLogFile = config["Logger"]["LOGGER_OUT_FILE"] + if not os.path.exists(os.path.dirname(configLogFile)): + logger.logger.warning(f"LOGGER_OUT_FILE found under the Logger section in the '{global_configs.CONFIG_LOCATION}' file is not a valid path name, defaulting to '{logger.log_file_output}'") + return + + logger.logger.debug(f"LOGGER_OUT_FILE is valid, setting logger to output to '{configLogFile}'") + logger.loggerOutput(configLogFile) + return + + if not os.path.exists(os.path.dirname(path)): + logger.logger.warning(f"'{path}' is not a valid log file output path, defaulting to '{logger.log_file_output}'") + return + + logger.logger.debug(f"Setting logger to output to '{path}'") + logger.loggerOutput(path) + return + +def validateConfigFile(path: str) -> None: + if path is None: + logger.logger.debug(f"Config file not included in command line arguments") + validateRelativeConfigFile() + return + + if not os.path.exists(path): + logger.logger.warning(f"Config file '{path}' does not exist") + validateRelativeConfigFile() + return + + logger.logger.debug(f"Config file found at '{path}', arguments will be pulled from here if not provided in the command line") + global_configs.CONFIG_LOCATION = path + config.read(global_configs.CONFIG_LOCATION) + return + +def validateRelativeConfigFile() -> None: + configFile = os.path.realpath("webqueue2-api.cfg") + if not os.path.exists(configFile): + logger.logger.warning(f"No config file was found at '{configFile}', default values will be applied if not provided in the command line") + return + + logger.logger.debug(f"Config File found at '{configFile}', values will be pulled from here if they were not provided in the comamnd line") + global_configs.CONFIG_LOCATION = configFile + config.read(global_configs.CONFIG_LOCATION) + return + +def validateJWTSecretKey(secretKey: str) -> None: + if secretKey is None: + logger.logger.debug(f"JWT_SECRET_KEY not included in command line arguments") + if not validSectionConfig("webqueue2_api", "JWT_SECRET_KEY", api.app.config["JWT_SECRET_KEY"]): + return + + if len(config["webqueue2_api"]["JWT_SECRET_KEY"]) < 16: + logger.logger.warning(f"The JWT_SECRET_KEY is less than 16 characters, this is not recommended.") + + api.applyJWTSecretKey(config["webqueue2_api"]["JWT_SECRET_KEY"]) + logger.logger.debug(f"Setting the JWT_SECRET_KEY to {api.app.config['JWT_SECRET_KEY']}") + return + + if len(secretKey) < 16: + logger.logger.warning(f"The provided JWT_SECRET_KEY is less than 16 characters, this is not recomended") + + api.applyJWTSecretKey(secretKey) + logger.logger.debug(f"Setting the JWT_SECRET_KEY to {secretKey}") + return + +def validSectionConfig(section: str, option: str, default: str) -> bool: + if not(config.has_section(section) and config.has_option(section, option)): + logger.logger.warning(f"{option} not found under the {section} in the '{global_configs.CONFIG_LOCATION}' file, defaulting to {default}") + return False + + if config.has_section(section) and config[section][option] == "": + logger.logger.warning(f"The {option} under {section} in the {global_configs.CONFIG_LOCATION} file does not have a value, defaulting to {default}") + return False + + logger.logger.debug(f"{option} found under the {section} section in the {global_configs.CONFIG_LOCATION} file") + + return True + +def validateEnvironment(environment: bool) -> None: + if environment is None: + logger.logger.debug(f"Environment not included in command line arguments") + if not validSectionConfig("webqueue2_api", "ENVIRONMENT", api.app.config["JWT_COOKIE_SECURE"]): + return + + if config["webqueue2_api"]["ENVIRONMENT"] != "dev": + logger.logger.debug(f"Environment set to 'prod', setting JET_COOKIE_SECURE to True") + api.applyEnviornment(True) + return + + logger.logger.debug(f"Environment set to 'dev', setting JET_COOKIE_SECURE to False") + api.applyEnviornment(False) + return + + if environment != "dev": + logger.logger.debug(f"Environment set to 'prod', setting JET_COOKIE_SECURE to True") + api.applyEnviornment(True) + return + + logger.logger.debug(f"Environment set to 'dev', setting JET_COOKIE_SECURE to False") + api.applyEnviornment(False) + return + +def validateQueuesToIgnore(queuesToIgnore: list) -> None: + if len(queuesToIgnore) == 0: + logger.logger.debug(f"QUEUES_TO_IGNORE not included in commamnd line arguments") + if not validSectionConfig("ECNQueue", "QUEUES_TO_IGNORE", ECNQueue.queues_to_ignore): + return + + try: + configQueuesToIgnore = json.loads(config["ECNQueue"]["QUEUES_TO_IGNORE"]) + except: + logger.logger.warning(f"QUEUES_TO_IGNORE in config file not a valid list, defaulting to {ECNQueue.queues_to_ignore}") + return + + ECNQueue.applyQueuesToIgnore(configQueuesToIgnore) + logger.logger.debug(f"QUEUES_TO_IGNORE in config file is a valid list, setting to {configQueuesToIgnore}") + return + + logger.logger.debug(f"QUEUES_TO_IGNORE set to {queuesToIgnore}") + ECNQueue.applyQueuesToIgnore(queuesToIgnore) + return + +def validateQueueDir(queueDir: str) -> None: + if queueDir is None: + logger.logger.debug(f"QUEUE_DIRECTORY not included in commamnd line arguments") + if not validSectionConfig("ECNQueue", "QUEUE_DIRECTORY", ECNQueue.queue_directory): + return + + if not os.path.isdir(config["ECNQueue"]["QUEUE_DIRECTORY"]): + logger.logger.warning(f"QUEUE_DIRECTORY found under the ECNQUEUE section in the '{global_configs.CONFIG_LOCATION}' file is not a valid directory, defaulting to '{ECNQueue.queue_directory}'") + return + + logger.logger.debug(f"QUEUE_DIRECTORY is valid, setting new queue directory to '{config['ECNQueue']['QUEUE_DIRECTORY']}'") + ECNQueue.applyQueueDirectory(config["ECNQueue"]["QUEUE_DIRECTORY"]) + return + + if not os.path.isdir(queueDir): + logger.logger.warning(f"'{queueDir}' is not a valid directory, setting the queue directory to the default value: '{ECNQueue.queue_directory}' ") + return + + logger.logger.debug(f"QUEUE_DIRECTORY is valid, setting new queuedirectory to '{queueDir}'") + ECNQueue.applyQueueDirectory(queueDir) + return \ No newline at end of file From 61eb186cc08038cd4bbc6104f0a905e298d00445 Mon Sep 17 00:00:00 2001 From: benne238 Date: Fri, 7 May 2021 15:15:05 -0400 Subject: [PATCH 43/43] Tree functionality added to a new utils subpackage in the webqueue2_api package --- webqueue2_api/__init__.py | 3 +- webqueue2_api/__main__.py | 1 - webqueue2_api/logger/__init__.py | 1 - webqueue2_api/utils/__init__.py | 1 + webqueue2_api/utils/tree.py | 94 +++++++++++++++++++++++++++++ webqueue2_api/validate_arguments.py | 4 +- 6 files changed, 98 insertions(+), 6 deletions(-) create mode 100644 webqueue2_api/utils/__init__.py create mode 100644 webqueue2_api/utils/tree.py diff --git a/webqueue2_api/__init__.py b/webqueue2_api/__init__.py index d4f283e..45c706c 100644 --- a/webqueue2_api/__init__.py +++ b/webqueue2_api/__init__.py @@ -1 +1,2 @@ -#from . import api, ECNQueue, logger \ No newline at end of file +#from . import api, ECNQueue, logger +from . import utils \ No newline at end of file diff --git a/webqueue2_api/__main__.py b/webqueue2_api/__main__.py index b1f5741..a212eb6 100644 --- a/webqueue2_api/__main__.py +++ b/webqueue2_api/__main__.py @@ -1,7 +1,6 @@ #from webqueue2_api.api import app #import webqueue2_api.ECNQueue import argparse, os -from webqueue2_api import global_configs from webqueue2_api import logger import logging import subprocess diff --git a/webqueue2_api/logger/__init__.py b/webqueue2_api/logger/__init__.py index 5847ae4..5383221 100644 --- a/webqueue2_api/logger/__init__.py +++ b/webqueue2_api/logger/__init__.py @@ -2,7 +2,6 @@ import configparser import os import sys -from .. import global_configs logger_name = __name__ logger = logging.getLogger(logger_name) diff --git a/webqueue2_api/utils/__init__.py b/webqueue2_api/utils/__init__.py new file mode 100644 index 0000000..571e40e --- /dev/null +++ b/webqueue2_api/utils/__init__.py @@ -0,0 +1 @@ +from . import tree \ No newline at end of file diff --git a/webqueue2_api/utils/tree.py b/webqueue2_api/utils/tree.py new file mode 100644 index 0000000..5a8157a --- /dev/null +++ b/webqueue2_api/utils/tree.py @@ -0,0 +1,94 @@ +#https://stackoverflow.com/questions/9727673/list-directory-tree-structure-in-python + +from pathlib import Path +import os +import time + +class DisplayablePath(object): + display_filename_prefix_middle = '├──' + display_filename_prefix_last = '└──' + display_parent_prefix_middle = ' ' + display_parent_prefix_last = '│ ' + + def __init__(self, path, parent_path, is_last): + self.path = Path(str(path)) + self.parent = parent_path + self.is_last = is_last + if self.parent: + self.depth = self.parent.depth + 1 + else: + self.depth = 0 + + @classmethod + def make_tree(cls, root, parent=None, is_last=False, criteria=None): + root = Path(str(root)) + criteria = criteria or cls._default_criteria + + displayable_root = cls(root, parent, is_last) + yield displayable_root + + children = sorted(list(path + for path in root.iterdir() + if criteria(path)), + key=lambda s: str(s).lower()) + count = 1 + for path in children: + is_last = count == len(children) + if path.is_dir(): + yield from cls.make_tree(path, + parent=displayable_root, + is_last=is_last, + criteria=criteria) + else: + yield cls(path, displayable_root, is_last) + count += 1 + + @classmethod + def _default_criteria(cls, path): + return True + + @property + def displayname(self): + if self.path.is_dir(): + return self.path.name + '/' + return self.path.name + + def displayable(self): + if self.parent is None: + return self.displayname + + _filename_prefix = (self.display_filename_prefix_last + if self.is_last + else self.display_filename_prefix_middle) + + parts = ['{!s} {!s}'.format(_filename_prefix, + self.displayname)] + + parent = self.parent + while parent and parent.parent is not None: + parts.append(self.display_parent_prefix_middle + if parent.is_last + else self.display_parent_prefix_last) + parent = parent.parent + + return ''.join(reversed(parts)) + +def displayTree(givenPath: str = None) -> None: + if givenPath is None: + print("No path was given") + return + + givenPath = os.path.expanduser(givenPath) + print(givenPath) + time.sleep(10) + try: + paths = DisplayablePath.make_tree(Path(givenPath)) + for path in paths: + print(path.displayable()) + + return + except Exception as e: + print(f"'{givenPath}' is not a valid path.") + print(e) + return + \ No newline at end of file diff --git a/webqueue2_api/validate_arguments.py b/webqueue2_api/validate_arguments.py index 0ede503..2794bfd 100644 --- a/webqueue2_api/validate_arguments.py +++ b/webqueue2_api/validate_arguments.py @@ -1,7 +1,5 @@ import configparser, os, json -from . import logger -from . import global_configs -from . import api, ECNQueue +from . import api, ECNQueue, logger, global_configs logger.logger.name = __name__ config = configparser.ConfigParser()