Compare commits

..

No commits in common. "main" and "v.1.1.0" have entirely different histories.

7 changed files with 12 additions and 55 deletions

View File

@ -38,6 +38,4 @@ jobs:
- run: pyinstaller --noconfirm --onefile --console ${{ gitea.workspace }}/inex.py - run: pyinstaller --noconfirm --onefile --console ${{ gitea.workspace }}/inex.py
- uses: softprops/action-gh-release@v2 - uses: softprops/action-gh-release@v2
with: with:
files: | files: ${{ gitea.workspace }}/dist/inex.exe
${{ gitea.workspace }}/dist/inex.exe
${{ gitea.workspace }}/config.toml.example

12
inex.py
View File

@ -13,10 +13,7 @@ import inexSqlquery
class Inex: class Inex:
def __init__(self): def __init__(self):
"""Initilize config, calls functions from inexConnect.py, inexLogging.py """Initilize config, calls functions from inex-connect.py and inex-logging.py"""
inexDataModel.py, inexDataProcessing.py, inexEncoder.py and inexSqlquery.py
Main logic of the program. Requires a config.toml in the same directory it's
being run from."""
# assign libraries # assign libraries
self.db = pyodbc self.db = pyodbc
self.il = logging self.il = logging
@ -28,7 +25,6 @@ class Inex:
self.e = inexEncoder.Encoder self.e = inexEncoder.Encoder
self.sq = inexSqlquery self.sq = inexSqlquery
# Check if local config file exists.
if self.os.path.exists('./config.toml'): if self.os.path.exists('./config.toml'):
config_file_path = './config.toml' config_file_path = './config.toml'
with open(config_file_path, 'rb') as c: with open(config_file_path, 'rb') as c:
@ -75,20 +71,18 @@ class Inex:
# create the connection to the database # create the connection to the database
self.cursor = self.ic.inexSql.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword) self.cursor = self.ic.inexSql.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword)
# Query the database
self.data = self.ic.inexSql.databaseQuery(self, self.cursor, self.sq.sqlQuerymodel.queryData(self.queryOverride,self.dbQuery, self.queryDaystopull)) self.data = self.ic.inexSql.databaseQuery(self, self.cursor, self.sq.sqlQuerymodel.queryData(self.queryOverride,self.dbQuery, self.queryDaystopull))
# Modify the data to meet EFC requirements
self.modifiedData = processData(self.data, dataTemplate, prd_ext_tenant_name=self.prdExttenantname,product_name=self.productName,\ self.modifiedData = processData(self.data, dataTemplate, prd_ext_tenant_name=self.prdExttenantname,product_name=self.productName,\
prd_ext_tenant_id=self.platformConfig["tenant_id"]) prd_ext_tenant_id=self.platformConfig["tenant_id"])
# Push data to EFC. Check for local Auth token -> Authenticate if needed -> push data
if self.pushToplatform: if self.pushToplatform:
inexConnect.fortraEFC.__init__(self) inexConnect.fortraEFC.__init__(self)
# TODO: move this to its own function
if self.useLog: if self.useLog:
self.il.warning(f"Writing to '{self.outputFile}'.") self.il.warning(f"Writing to '{self.outputFile}'.")
# Write data to json
if self.writeJsonfile: if self.writeJsonfile:
with open(self.outputFile, "w") as f: with open(self.outputFile, "w") as f:
self.j.dump(self.modifiedData, f, indent = 2, cls=self.e) self.j.dump(self.modifiedData, f, indent = 2, cls=self.e)

View File

@ -20,7 +20,6 @@ class inexSql:
return cursor return cursor
def databaseQuery(self, cursor, query, args=()): def databaseQuery(self, cursor, query, args=()):
"""Use the database connection to send a query."""
if self.useLog: if self.useLog:
self.il.debug(f"Query:") self.il.debug(f"Query:")
self.il.debug(query) self.il.debug(query)
@ -44,10 +43,7 @@ class inexSql:
return r return r
class fortraEFC: class fortraEFC:
"""Class to connect to fortra EFC. It will authenticate and push rest payloads.
Writes a .token file to the same directory script was run in."""
def __init__(self): def __init__(self):
"""This is the logic for how authentication is handled"""
# Check if .token file is present # Check if .token file is present
if fortraEFC.readToken(self) == 1: if fortraEFC.readToken(self) == 1:
# Get fresh token. First run. # Get fresh token. First run.
@ -61,8 +57,6 @@ class fortraEFC:
fortraEFC.pushPayload(self) fortraEFC.pushPayload(self)
def readToken(self): def readToken(self):
"""Looks locally for a .token file. Returns a numeral code
for logic in the init method."""
if self.os.path.exists(self.tokenFilepath): if self.os.path.exists(self.tokenFilepath):
with open(self.tokenFilepath, 'rb') as t: with open(self.tokenFilepath, 'rb') as t:
self.tokenData = self.j.load(t) self.tokenData = self.j.load(t)
@ -72,7 +66,6 @@ class fortraEFC:
return 1 return 1
def getToken(self): def getToken(self):
"""Gets a token from fortra idp."""
self.tokenData = self.r.post(self.platformConfig["idp"], data={"grant_type":"client_credentials",\ self.tokenData = self.r.post(self.platformConfig["idp"], data={"grant_type":"client_credentials",\
"client_id": self.platformConfig["client_id"],\ "client_id": self.platformConfig["client_id"],\
"client_secret": self.platformConfig["secret"],}) "client_secret": self.platformConfig["secret"],})
@ -80,14 +73,12 @@ class fortraEFC:
self.il.debug(f'getToken {self.tokenData["access_token"]}') self.il.debug(f'getToken {self.tokenData["access_token"]}')
def writeToken(self): def writeToken(self):
"""Writes a token to a local file named '.token'."""
fortraEFC.getToken(self) fortraEFC.getToken(self)
with open(self.tokenFilepath, "w") as f: with open(self.tokenFilepath, "w") as f:
self.j.dump(self.tokenData, f, indent = 2) self.j.dump(self.tokenData, f, indent = 2)
self.il.debug(f'writeToken {self.tokenData["access_token"]}') self.il.debug(f'writeToken {self.tokenData["access_token"]}')
def pushPayload(self): def pushPayload(self):
"""Sends data to fortra EFC. Requires a token from the idp."""
self.il.debug(f'pushPayload {self.tokenData["access_token"]}') self.il.debug(f'pushPayload {self.tokenData["access_token"]}')
url = f'{self.platformConfig["efc_url"]}/api/v1/unity/data/{self.platformConfig["tenant_id"]}/machine_event' url = f'{self.platformConfig["efc_url"]}/api/v1/unity/data/{self.platformConfig["tenant_id"]}/machine_event'
pushPayloadResponse = self.r.post(url, headers={'Authorization': f'Bearer {self.tokenData["access_token"]}'},\ pushPayloadResponse = self.r.post(url, headers={'Authorization': f'Bearer {self.tokenData["access_token"]}'},\

View File

@ -1,8 +1,4 @@
def dataTemplate(transactionType,**kwargs): def dataTemplate(transactionType,**kwargs):
"""Created templates for use. This function forms json data into an
appropriate model for EFC. It returnes the appropriate template based
on the transaction type passed into the function. The logic to process
this is at the bottom of the function."""
upload = { upload = {
"bytes" : kwargs.get('bytes'), "bytes" : kwargs.get('bytes'),
"dst_endpoint": { "dst_endpoint": {

View File

@ -1,8 +1,7 @@
def processData(data, template, **kwargs): def processData(data, template, **kwargs):
"""Translates data from sql query to the appropriate place in the respective template. """Translates data from sql query to the appropriate place in the respective template.
Accepts data, which is the sql query output, the template function, and finally Accepts data, which is the sql query output, the template function, and finally
additional data to insert into the template. Uses other functions to further additional data to insert into the template."""
process row data."""
processedData = [] processedData = []
transactionLoginid = [] transactionLoginid = []
@ -18,7 +17,6 @@ def processData(data, template, **kwargs):
continue continue
userType = identifyUserType(row.get('user_type')) userType = identifyUserType(row.get('user_type'))
userHome = parseHomefolder(row.get('Actor'),row.get('VirtualFolderName'))
try: try:
processedData.append(template(identifyUtypecommand,\ processedData.append(template(identifyUtypecommand,\
prd_ext_tenant_name=kwargs.get('prd_ext_tenant_name'),\ prd_ext_tenant_name=kwargs.get('prd_ext_tenant_name'),\
@ -47,7 +45,7 @@ def processData(data, template, **kwargs):
duration=row.get('TransferTime'),\ duration=row.get('TransferTime'),\
user_type=userType,\ user_type=userType,\
user_name=row.get('Actor'),\ user_name=row.get('Actor'),\
user_home_directory=userHome,\ user_home_directory=row.get('VirtualFolderName'),\
utype=identifyUtypecommand)) utype=identifyUtypecommand))
except UnboundLocalError: except UnboundLocalError:
print(f'Problem row GUID:{row.get("TransactionGUID")} ::: TransactionObject:{row.get("TransactionObject")} Command: {row.get("Command")}') print(f'Problem row GUID:{row.get("TransactionGUID")} ::: TransactionObject:{row.get("TransactionObject")} Command: {row.get("Command")}')
@ -82,7 +80,7 @@ def processData(data, template, **kwargs):
user_uid=row.get('TransactionID'),\ user_uid=row.get('TransactionID'),\
user_type=userType,\ user_type=userType,\
user_name=row.get('Actor'),\ user_name=row.get('Actor'),\
user_home_directory=userHome,\ user_home_directory=row.get('PhysicalFolderName'),\
utype=identifyUtypetransactionObject\ utype=identifyUtypetransactionObject\
)) ))
transactionLoginid.append(row.get('TransactionGUID')) transactionLoginid.append(row.get('TransactionGUID'))
@ -102,20 +100,6 @@ def identifyUserType(obj):
else: else:
return None return None
def parseHomefolder(user, virtualfolder):
"""Extract users home folder using the username. Will not work on edge cases
such as when a users home folder does not have the user name. When that occurs
it is impossible to know based on the arm data what the home folder is.
This function is an assumption so it may return the incorrect home folder.
This function finds the user name and takes the path from the left of the folder
as the home folder. There are cases where this may not be accurate."""
if user:
userSplit = f'/{user}/'
if virtualfolder:
if userSplit in virtualfolder:
home = virtualfolder.split(userSplit)[0] + userSplit
return home if home else None
def identifyUtype(obj): def identifyUtype(obj):
"""Process Type of transaction based on string that passed in. """Process Type of transaction based on string that passed in.
Return transaction type.""" Return transaction type."""

View File

@ -1,6 +1,6 @@
class sqlQuerymodel: class sqlQuerymodel:
def queryData(overRideflag, configQuery, daysTopull): def queryData(overRideflag, configQuery, daysTopull):
"""Embedded query data. Data is slightly modified to change the amount of days to pull.""" """Embedded query data"""
q ="""DECLARE @stopTime DATETIME2 q ="""DECLARE @stopTime DATETIME2
SET @stopTime=DATEADD(DAY, -30, GETDATE()) SET @stopTime=DATEADD(DAY, -30, GETDATE())
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.PhysicalFolderName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.PhysicalFolderName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type

14
test.py
View File

@ -15,6 +15,8 @@ def builddict(keys,*args,**kwargs):
dict[key] = kwargs.get(key) dict[key] = kwargs.get(key)
print(dict) print(dict)
testfolder = '/Usr/a/asdf/asf'
user = 'a'
def identifyUtype(obj): def identifyUtype(obj):
"""Process Type of transaction based on string that passed in. """Process Type of transaction based on string that passed in.
@ -35,14 +37,6 @@ def identifyUtype(obj):
else: else:
return "other" return "other"
transactionType = 'file_uploaded'
testfolder = '/Usr/a/asdf/asf/asdfas/asdfasdf/' print(transactionType.split("_")[1].rstrip("d").rstrip("e"))
user = 'a'
def parsehomefolder(user, virtualfolder):
userSplit = f'/{user}/'
home = virtualfolder.split(userSplit)[0] + userSplit
print(home)
return home
a = parsehomefolder(user, testfolder)