9 Commits

Author SHA1 Message Date
dbd11e02a0 Merge pull request 'dev-create-rest-client' (#7) from dev-create-rest-client into main
Reviewed-on: #7
2024-07-30 12:58:24 -05:00
2b3a1a7078 adjusted utype handling 2024-07-30 12:46:19 -05:00
045467783f moved encoder to seperate file and created .token refresh logic 2024-07-29 17:30:11 -05:00
1800aafd72 updated saving tokens 2024-07-29 14:04:35 -05:00
11b312a1e8 updated client 2024-07-27 18:24:15 -05:00
1f255ec33a Added push payload 2024-07-25 22:22:08 -05:00
573cd651be automated release idea 2024-07-25 16:09:09 -05:00
c030fc69a2 updating build.yaml
Some checks failed
Build / build-windows-binary (push) Failing after 0s
Build / build-linux-binary (push) Failing after 45s
2024-07-25 13:30:21 -05:00
702d93b9da Updated query and mapped the proper data to the template
All checks were successful
Build Inex Executable / linux (push) Successful in 1m20s
2024-07-24 13:00:24 -05:00
9 changed files with 234 additions and 46 deletions

View File

@ -1,10 +1,10 @@
name: Build Inex Executable name: Build
run-name: Deploy to ${{ inputs.deploy_target }} by @${{ gitea.actor }}
on: [push] on: push
jobs: jobs:
Build: build-linux-binary:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
@ -15,7 +15,23 @@ jobs:
- run: apt-get install unixodbc -y - run: apt-get install unixodbc -y
- run: pip install -r requirements.txt - run: pip install -r requirements.txt
- run: pyinstaller --noconfirm --onefile --console ${{ gitea.workspace }}/inex.py - run: pyinstaller --noconfirm --onefile --console ${{ gitea.workspace }}/inex.py
- uses: actions/upload-artifact@v3 - uses: softprops/action-gh-release@v2
- name: Release
uses: softprops/action-gh-release@v2
if: startsWith(gitea.ref, 'refs/tags/')
with: with:
name: Inex files: ${{ gitea.workspace }}/dist/inex
path: ${{ gitea.workspace }}/dist/inex build-windows-binary:
runs-on: windows
permissions:
contents: write # release changes require contents write
steps:
- uses: actions/checkout@v4
- name: Upload Release Asset
env:
GITHUB_TOKEN: ${{ secrets.GITEA_TOKEN }}
run: pip install -r requirements.txt
run: pyinstaller --noconfirm --onefile --console ${{ gitea.workspace }}/inex.py
run: gh release upload <release_tag> <a_file>

5
.gitignore vendored
View File

@ -1,4 +1,5 @@
*config.toml *.toml
*.json *.json
__pycache__/ __pycache__/
*.log *.log
*.token

View File

@ -1,13 +1,75 @@
[fortraPlatform]
selectedPlatform = "dev"
[fortraPlatform.dev]
idp = "https://foundation.foundation-dev.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token"
efc_url = "https://efc.efc-dev.cloudops.fortradev.com"
tenant_id = ""
client_id = "eft-event-generator-confidential"
secret = ""
[fortraPlatform.stage]
idp = "https://foundation.foundation-stage.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token"
efc_url = "https://efc.efc-stage.cloudops.fortradev.com"
tenant_id = ""
client_id = "eft-event-generator-confidential"
secret = ""
[fortraPlatform.prod]
idp ="https://foundation.foundation-prod.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token"
efc_url = "https://efc.efc-prod.cloudops.fortradev.com"
tenant_id = ""
client_id = "eft-event-generator-confidential"
secret = ""
[database] [database]
driver = "ODBC Driver 18 for SQL Server" driver = "ODBC Driver 18 for SQL Server"
server = "192.168.x.x" server = "192.168.x.x"
database = "EFTDB" database = "EFTDB"
user = "a" user = "a"
password = "a" password = "a"
query = "SELECT [Id],[Version] FROM [EFTDB].[dbo].[tbl_Schema_Version]" query = """DECLARE @stopTime DATETIME2
SET @stopTime = DATEADD(DAY, -30, GETDATE())
SELECT p.[ProtocolCommandID]
,t.[Time_stamp]
,p.[RemoteIP]
,p.[RemotePort]
,p.[LocalIP]
,p.[LocalPort]
,p.[Protocol]
,p.[SiteName]
,p.[Command]
,p.[CommandParameters]
,p.[FileName]
,p.[VirtualFolderName]
,p.[PhysicalFolderName]
,p.[IsInternal]
,p.[FileSize]
,p.[TransferTime]
,p.[BytesTransferred]
,p.[ResultID]
,t.[TransactionID]
,p.[Description]
,p.[Actor]
,t.ParentTransactionID
,t.TransactionObject
,t.NodeName
,t.TransactionGUID
,a.Protocol user_type
FROM [EFTDB].[dbo].[tbl_Transactions] t Full JOIN tbl_ProtocolCommands p ON (t.TransactionID = p.TransactionID) Full join tbl_Authentications a ON (t.TransactionID = a.TransactionID)
WHERE p.Time_stamp > @stopTime"""
[immutables]
prd_instance_id = 1
product_guid = "asdf"
product_name = "EFT"
product_version ="8.1.0.9"
[output] [output]
pushToplatform = true
dumpTojson = true
filename ="./data.json" filename ="./data.json"
token = "./.token"
[logging] [logging]
use_log = true use_log = true

View File

@ -4,8 +4,28 @@ server = "192.168.x.x"
database = "EFTDB" database = "EFTDB"
user = "a" user = "a"
password = "a" password = "a"
query = "SELECT [Id],[Version] FROM [EFTDB].[dbo].[tbl_Schema_Version]" query = """SELECT TOP (1) [ProtocolCommandID]
,[Time_stamp]
,[RemoteIP]
,[RemotePort]
,[LocalIP]
,[LocalPort]
,[Protocol]
,[SiteName]
,[Command]
,[CommandParameters]
,[FileName]
,[VirtualFolderName]
,[PhysicalFolderName]
,[IsInternal]
,[FileSize]
,[TransferTime]
,[BytesTransferred]
,[ResultID]
,[TransactionID]
,[Description]
,[Actor]
FROM [EFTDB].[dbo].[tbl_ProtocolCommands]"""
[output] [output]
filename ="./data.json" filename ="./data.json"

60
inex.py
View File

@ -2,28 +2,35 @@ import pyodbc
import os import os
import logging import logging
import datetime import datetime
from tomllib import load import tomllib
from inexLogging import inexLog from inexLogging import inexLog
import inexConnect import inexConnect
from inexDataModel import dataTemplate from inexDataModel import dataTemplate
from inexDataProcessing import processData from inexDataProcessing import processData
import json import json
import decimal import decimal
import requests
import inexEncoder
class Inex: class Inex:
def __init__(self): def __init__(self):
"""Initilize config, calls functions from inex-connect.py and inex-logging.py""" """Initilize config, calls functions from inex-connect.py and inex-logging.py"""
if os.path.exists('./config.toml'):
config_file_path = './config.toml'
with open(config_file_path, 'rb') as c:
self.config = load(c)
# assign libraries # assign libraries
self.db = pyodbc self.db = pyodbc
self.tm = datetime self.tm = datetime
self.il = logging self.il = logging
self.ic = inexConnect self.ic = inexConnect
self.r = requests
self.tl = tomllib
self.os = os
self.j = json
self.e = inexEncoder.Encoder
if self.os.path.exists('./config.toml'):
config_file_path = './config.toml'
with open(config_file_path, 'rb') as c:
self.config = self.tl.load(c)
# set config # set config
self.dbDriver = self.config["database"]["driver"] self.dbDriver = self.config["database"]["driver"]
self.dbServer = self.config["database"]["server"] self.dbServer = self.config["database"]["server"]
@ -35,7 +42,23 @@ class Inex:
self.useLog = self.config["logging"]["useLog"] self.useLog = self.config["logging"]["useLog"]
self.logPath = self.config["logging"]["logPath"] self.logPath = self.config["logging"]["logPath"]
self.logLevel = self.config["logging"]["logLevel"] self.logLevel = self.config["logging"]["logLevel"]
self.prdInstanceID = self.config["immutables"]["prd_instance_id"]
self.productGUID = self.config["immutables"]["product_guid"]
self.productName = self.config["immutables"]["product_name"]
self.productVersion = self.config["immutables"]["product_version"]
self.tokenFilepath = self.config["output"]["token"]
self.selectedPlatform = self.config["fortraPlatform"]["selectedPlatform"]
self.writeJsonfile = self.config["output"]["dumpTojson"]
self.pushToplatform = self.config["output"]["pushToplatform"]
if "dev" in self.selectedPlatform.lower():
self.platformConfig = self.config["fortraPlatform"]["dev"]
if "stag" in self.selectedPlatform.lower():
self.platformConfig = self.config["fortraPlatform"]["stage"]
if "prod" in self.selectedPlatform.lower():
self.platformConfig = self.config["fortraPlatform"]["prod"]
# print(self.platformConfig)
#Setup logging #Setup logging
inexLog(self) inexLog(self)
@ -43,26 +66,19 @@ class Inex:
self.cursor = self.ic.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword) self.cursor = self.ic.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword)
self.data = self.ic.databaseQuery(self, self.cursor, self.dbQuery) self.data = self.ic.databaseQuery(self, self.cursor, self.dbQuery)
# print(f"returned data: {self.data}")
self.modifiedData = processData(self.data, dataTemplate)
print(self.modifiedData) self.modifiedData = processData(self.data, dataTemplate, prd_instance_id=self.prdInstanceID,\
product_guid=self.productGUID,product_name=self.productName,product_version=self.productVersion)
if self.pushToplatform:
inexConnect.fortraEFC.pushPayload(self)
# TODO: move this to its own function # TODO: move this to its own function
if self.useLog: if self.useLog:
self.il.warning(f"Writing to '{self.outputFile}'.") self.il.warning(f"Writing to '{self.outputFile}'.")
if self.writeJsonfile:
with open(self.outputFile, "w") as f: with open(self.outputFile, "w") as f:
json.dump(self.modifiedData, f, indent = 2, cls=Encoder) self.j.dump(self.modifiedData, f, indent = 2, cls=self.e)
# TODO: Move this class to it's own file
class Encoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return int(o)
if isinstance(o, datetime.datetime):
return str(o)
return super().default(o)
# Run # Run
if __name__== "__main__": if __name__== "__main__":

View File

@ -39,5 +39,35 @@ def databaseQuery(self, cursor, query, args=()):
cur.connection.close() cur.connection.close()
if self.useLog: if self.useLog:
self.il.debug(f"Database connection closed") self.il.debug(f"Database connection closed")
# return (r[0] if r else None) if one else r return r
return r
class fortraEFC:
def getToken(self):
self.tokenData = self.r.post(self.platformConfig["idp"], data={"grant_type":"client_credentials",\
"client_id": self.platformConfig["client_id"],\
"client_secret": self.platformConfig["secret"],})
def writeToken(self):
fortraEFC.getToken(self)
with open(self.tokenFilepath, "w") as f:
self.j.dump(self.tokenData.json(), f, indent = 2)
def readToken(self):
if self.os.path.exists(self.tokenFilepath):
with open(self.tokenFilepath, 'rb') as t:
self.tokenData = self.j.load(t)
# print(self.tokenData["access_token"])
else:
fortraEFC.writeToken(self)
def pushPayload(self):
fortraEFC.readToken(self)
try:
url = f'{self.platformConfig["efc_url"]}/api/v1/unity/data/{self.platformConfig["tenant_id"]}/machine_event'
pushPayloadResponse = self.r.post(url, headers={'Authorization': f'bearer {self.tokenData["access_token"]}'},\
json=self.j.dumps(self.modifiedData,indent = 2, cls=self.e))
return pushPayloadResponse.status_code
except self.r.exceptions.HTTPError as errh:
print ("Http Error:",errh)
if "401" in errh:
fortraEFC.writeToken(self)
fortraEFC.pushPayload(self)

View File

@ -1,4 +1,4 @@
def processData(data, template): def processData(data, template, **kwargs):
processedData = [] processedData = []
for row in data: for row in data:
# print(f'Row: {row}') # print(f'Row: {row}')
@ -11,10 +11,10 @@ def processData(data, template):
file_name=row.get('FileName'),\ file_name=row.get('FileName'),\
guid=row.get('TransactionGUID'),\ guid=row.get('TransactionGUID'),\
ref_id=row.get('ProtocolCommandID'),\ ref_id=row.get('ProtocolCommandID'),\
prd_instance_id=row.get(''),\ prd_instance_id=kwargs.get('prd_instance_id'),\
product_guid=row.get(''),\ product_guid=kwargs.get('product_guid'),\
product_name=row.get(''),\ product_name=kwargs.get('product_name'),\
product_version=row.get(''),\ product_version=kwargs.get('product_version'),\
node_name=row.get('NodeName'),\ node_name=row.get('NodeName'),\
src_endpoint_port=row.get('RemotePort'),\ src_endpoint_port=row.get('RemotePort'),\
src_endpoint_ip=row.get('RemoteIP'),\ src_endpoint_ip=row.get('RemoteIP'),\
@ -25,8 +25,33 @@ def processData(data, template):
bytes_out=row.get('BytesTransferred'),\ bytes_out=row.get('BytesTransferred'),\
transfer_time=row.get('TransferTime'),\ transfer_time=row.get('TransferTime'),\
time=row.get('Time_stamp'),\ time=row.get('Time_stamp'),\
user_type=row.get(''),\ user_type=identifyUserType(row.get('user_type')),\
user_domain=row.get('SiteName'),\ user_domain=row.get('SiteName'),\
user_name=row.get('Actor'),\ user_name=row.get('Actor'),\
utype=row.get('Command'))) utype=identifyUtype(row.get('TransactionObject'))))
return processedData return processedData
def identifyUserType(obj):
if obj:
if "Admin" in obj:
return "Administrator"
else:
return "User"
else:
return None
def identifyUtype(obj):
user_logged_on = []
file_deleted = []
file_uploaded = []
file_downloaded = []
if obj in user_logged_on:
return "user_logged_on"
if obj in file_deleted:
return "file_deleted"
if obj in file_uploaded:
return "file_uploaded"
if obj in file_downloaded:
return "file_downloaded"
else:
return None

11
inexEncoder.py Normal file
View File

@ -0,0 +1,11 @@
import json
import decimal
import datetime
class Encoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return int(o)
if isinstance(o, datetime.datetime):
return int(o.timestamp() * 1000)
return super().default(o)

View File

@ -1,5 +1,12 @@
import datetime
def connectDatabase(driver, server, database, user, password): def connectDatabase(driver, server, database, user, password):
connectionString = f'DRIVER={{{driver}}};SERVER={server};DATABASE={database};UID={user};PWD={password};TrustServerCertificate=yes' connectionString = f'DRIVER={{{driver}}};SERVER={server};DATABASE={database};UID={user};PWD={password};TrustServerCertificate=yes'
print(connectionString) print(connectionString)
a = connectDatabase("ODBC Driver 18 for SQL Server","b","c","d","e") # a = connectDatabase("ODBC Driver 18 for SQL Server","b","c","d","e")
def converttimestamp(t):
print(int(t.timestamp()* 1000))
a = converttimestamp(datetime.datetime(2024, 7, 23, 14, 26, 38, 214000))