15 Commits

Author SHA1 Message Date
9bfca58c7d fix(Authentication): BUG #20 - Authentication isnt working with existing logic 2024-08-22 13:49:42 -05:00
c56c9ac208 Fixed auth bug 2024-08-22 13:49:42 -05:00
69b520e97c Updating function docs 2024-08-15 13:23:25 -05:00
82c8a5bb4a Merge pull request 'dev-add-daysTopull' (#19) from dev-add-daysTopull into main
Reviewed-on: #19
2024-08-14 15:21:00 -05:00
8e8312e853 Updated example 2024-08-14 15:19:37 -05:00
267616f0e4 Updated daysTopull value 2024-08-14 15:05:27 -05:00
de3a3fd03d This addresses #17 2024-08-14 15:00:53 -05:00
8dc6efe40e Updated documentation 2024-08-11 01:31:34 -05:00
d978ad57fe Updating documentation 2024-08-11 00:48:34 -05:00
5d6427698d Merge pull request 'dev-make-query-performant' (#16) from dev-make-query-performant into main
All checks were successful
Build / build-windows-binary (push) Successful in 34s
Build / build-linux-binary (push) Successful in 1m13s
Reviewed-on: #16
2024-08-10 02:22:13 -05:00
8384d714f9 fix: #14 now correctly handles 'other' transactions 2024-08-10 02:20:26 -05:00
42880bb334 made query more performance and allowed embedded override 2024-08-10 00:49:53 -05:00
494edd98ee Merge pull request 'dev-update-data-model' (#13) from dev-update-data-model into main
Reviewed-on: #13
2024-08-10 00:03:54 -05:00
ebe10f80ba Updated data models for download/upload, delete and login 2024-08-10 00:01:58 -05:00
7e5a8a2603 Updated data model 2024-08-06 18:29:55 -05:00
9 changed files with 499 additions and 367 deletions

121
README.md
View File

@ -1,34 +1,93 @@
# Inex # Inex
Information Exchange - Pull data from EFT ARM then dump it to a json file ### Information Exchange - Ingest ARM data into EFC
This application is designed to pull data from an Microsoft SQL database, reformat it, and then send that data via the web to the Event Fusion Center. It's designed to be performant. The embedded query doesn't return unneccesary data and data is written to memory before being pushed into EFC. Initially you may have to pull a large amount of days so the system is caught up but the sweet spot would be to pull a set of data every day.
# Data Map: ## Build from source
## tbl_ProtocolCommands You will need to install git and clone the repository:
- ProtocolCommandID
- Time_stamp `git clone https://git.jonb.io/jblu/Inex.git`
- RemoteIP
- RemotePort Alternitively you can download the files from [the releases page](https://git.jonb.io/jblu/Inex/releases).
- LocalIP
- LocalPort Then follow OS specific instructions. These are taken directly from the steps used in automation to build the executables found in the releases page.
- Protocol ##### Windows
- SiteName
- Command You can find the requirements.txt in the repository.
- CommandParameters
- FileName `python -m pip install -r requirements.txt`
- VirtualFolderName
- PhysicalFolderName `pyinstaller --noconfirm --onefile --console path/to/inex.py`
- IsInternal
- FileSize ##### Linux
- TransferTime Update Linux Distro. This is Ubuntu specific. Please check the package management tool in your specific version.
- BytesTransferred
- ResultID `apt-get update`
- TransactionID
- Description You need unixodbc or else pyodbc cannot install.
- Actor
## tbl_Transactions `apt-get install unixodbc -y`
- TransactionID
- ParentTransactionID You can find the requirements.txt in the repository.
- TransactionObject
- TransactionGUID `pip install -r requirements.txt`
- NodeName
- time_stamp `pyinstaller --noconfirm --onefile --console path/to/inex.py`
## Setup
You will need a *config.toml* file in the same directory where *inex.py* or inex executable is. It's recommended to rename the existing config.toml.example file to save sometime and avoid omitting any data.
> If the application doesn't find the *config.toml* at the time of execution, it will not continue.
#### Config.toml
| Table | Purpose |
|-|-|
| fortraPlatform | Fortra Specific data |
| database | MSSQL Configuration |
| immutables | Data that must be included but is not provided by the source database |
| output | If and where to write files|
| logging | Set if, where and level of logging |
The following settings are not obvious as to how they affect the application.
> Note the #comments
```
[fortraPlatform]
selectedPlatform = "dev" # This will modify which environment the data is pushed to. The tenant_id and secret must be manually modified.
[database]
overrideEmbeddedquery = true # Choose if embedded query should be overridden.
daysTopull = 30 # This setting is only related to the embedded query. Please note this will not affect query provided in config.toml
driver = "ODBC Driver 18 for SQL Server" # Select which windows driver should be used. This one is recommended.
[output]
pushToplatform = false # if true, send data to fortraPlatform setting to false is useful in dry-runs.
dumpTojson = true # if true, write data to json file
token = "./.token" # Recommended to leave as-is
```
## Usage
### Windows
##### Run inex.py
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
2. `cd C:\path\to\Inex`
3. `python inex.py`
OR
##### Run inex.exe
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
2. `cd C:\path\to\Inex.exe`
3. CMD `.\inex.exe`
### Linux
##### Run inex.py
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
2. `cd /path/to/Inex`
3. `python ./inex.py`
OR
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
2. `cd /path/to/Inex`
3. RUN `./inex`

View File

@ -1,6 +1,33 @@
[fortraPlatform] [fortraPlatform]
selectedPlatform = "dev" selectedPlatform = "dev"
[output]
pushToplatform = false
dumpTojson = true
filename ="./data.json"
token = "./.token"
[logging]
useLog = true
logLevel = "debug"
logPath = "./inex.log"
[database]
overrideEmbeddedquery = false
daysTopull = 20
driver = "ODBC Driver 18 for SQL Server"
server = "192.168.x.x"
database = "EFTDB"
user = "a"
password = "a"
query = """DECLARE @stopTime DATETIME2
SET @stopTime=DATEADD(DAY, -30, GETDATE())
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.PhysicalFolderName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type
FROM tbl_Transactions t
Full JOIN tbl_ProtocolCommands p ON(t.TransactionID=p.TransactionID)
Full JOIN tbl_Authentications a ON(t.TransactionID=a.TransactionID)
WHERE p.Time_stamp>@stopTime AND p.Command IS NOT NULL"""
[fortraPlatform.dev] [fortraPlatform.dev]
idp = "https://foundation.foundation-dev.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token" idp = "https://foundation.foundation-dev.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token"
efc_url = "https://efc.efc-dev.cloudops.fortradev.com" efc_url = "https://efc.efc-dev.cloudops.fortradev.com"
@ -22,56 +49,6 @@ tenant_id = ""
client_id = "eft-event-generator-confidential" client_id = "eft-event-generator-confidential"
secret = "" secret = ""
[database] [immutables]
driver = "ODBC Driver 18 for SQL Server" product_name = "GlobalScape EFT"
server = "192.168.x.x" prd_ext_tenant_name = "GlobalScape EFT"
database = "EFTDB"
user = "a"
password = "a"
query = """DECLARE @stopTime DATETIME2
SET @stopTime = DATEADD(DAY, -30, GETDATE())
SELECT p.[ProtocolCommandID]
,t.[Time_stamp]
,p.[RemoteIP]
,p.[RemotePort]
,p.[LocalIP]
,p.[LocalPort]
,p.[Protocol]
,p.[SiteName]
,p.[Command]
,p.[CommandParameters]
,p.[FileName]
,p.[VirtualFolderName]
,p.[PhysicalFolderName]
,p.[IsInternal]
,p.[FileSize]
,p.[TransferTime]
,p.[BytesTransferred]
,p.[ResultID]
,t.[TransactionID]
,p.[Description]
,p.[Actor]
,t.ParentTransactionID
,t.TransactionObject
,t.NodeName
,t.TransactionGUID
,a.Protocol user_type
FROM [EFTDB].[dbo].[tbl_Transactions] t Full JOIN tbl_ProtocolCommands p ON (t.TransactionID = p.TransactionID) Full join tbl_Authentications a ON (t.TransactionID = a.TransactionID)
WHERE p.Time_stamp > @stopTime"""
[immutables]
prd_instance_id = 1
product_guid = "asdf"
product_name = "EFT"
product_version ="8.1.0.9"
[output]
pushToplatform = true
dumpTojson = true
filename ="./data.json"
token = "./.token"
[logging]
use_log = true
logLevel = "debug"
logPath = "./inex.log"

57
inex.py
View File

@ -10,6 +10,7 @@ import json
import requests import requests
import inexEncoder import inexEncoder
import inexSqlquery import inexSqlquery
class Inex: class Inex:
def __init__(self): def __init__(self):
"""Initilize config, calls functions from inex-connect.py and inex-logging.py""" """Initilize config, calls functions from inex-connect.py and inex-logging.py"""
@ -30,24 +31,32 @@ class Inex:
self.config = self.tl.load(c) self.config = self.tl.load(c)
# set config # set config
self.dbDriver = self.config["database"]["driver"] try:
self.dbServer = self.config["database"]["server"] if self.config:
self.dbDatabase = self.config["database"]["database"] self.dbDriver = self.config["database"]["driver"]
self.dbUser = self.config["database"]["user"] self.dbServer = self.config["database"]["server"]
self.dbPassword = self.config["database"]["password"] self.dbDatabase = self.config["database"]["database"]
self.dbQuery = self.config["database"]["query"] self.dbUser = self.config["database"]["user"]
self.outputFile = self.config["output"]["filename"] self.dbPassword = self.config["database"]["password"]
self.useLog = self.config["logging"]["useLog"] self.dbQuery = self.config["database"]["query"]
self.logPath = self.config["logging"]["logPath"] self.outputFile = self.config["output"]["filename"]
self.logLevel = self.config["logging"]["logLevel"] self.useLog = self.config["logging"]["useLog"]
self.prdInstanceID = self.config["immutables"]["prd_instance_id"] self.logPath = self.config["logging"]["logPath"]
self.productGUID = self.config["immutables"]["product_guid"] self.logLevel = self.config["logging"]["logLevel"]
self.productName = self.config["immutables"]["product_name"] self.prdExttenantname = self.config["immutables"]["prd_ext_tenant_name"]
self.productVersion = self.config["immutables"]["product_version"] self.productName = self.config["immutables"]["product_name"]
self.tokenFilepath = self.config["output"]["token"] self.tokenFilepath = self.config["output"]["token"]
self.selectedPlatform = self.config["fortraPlatform"]["selectedPlatform"] self.selectedPlatform = self.config["fortraPlatform"]["selectedPlatform"]
self.writeJsonfile = self.config["output"]["dumpTojson"] self.writeJsonfile = self.config["output"]["dumpTojson"]
self.pushToplatform = self.config["output"]["pushToplatform"] self.pushToplatform = self.config["output"]["pushToplatform"]
self.queryOverride = self.config["database"]["overrideEmbeddedquery"]
self.queryDaystopull = self.config["database"]["daysTopull"]
except Exception as e:
print("No config.toml or possibly missing settings in the file. Please use config.toml.example file and configure appropriately")
self.il.error(e)
print(e)
exit(1)
if "dev" in self.selectedPlatform.lower(): if "dev" in self.selectedPlatform.lower():
self.platformConfig = self.config["fortraPlatform"]["dev"] self.platformConfig = self.config["fortraPlatform"]["dev"]
@ -55,7 +64,6 @@ class Inex:
self.platformConfig = self.config["fortraPlatform"]["stage"] self.platformConfig = self.config["fortraPlatform"]["stage"]
if "prod" in self.selectedPlatform.lower(): if "prod" in self.selectedPlatform.lower():
self.platformConfig = self.config["fortraPlatform"]["prod"] self.platformConfig = self.config["fortraPlatform"]["prod"]
# print(self.platformConfig)
#Setup logging #Setup logging
inexLog(self) inexLog(self)
@ -63,15 +71,14 @@ class Inex:
# create the connection to the database # create the connection to the database
self.cursor = self.ic.inexSql.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword) self.cursor = self.ic.inexSql.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword)
# self.data = self.ic.inexSql.databaseQuery(self, self.cursor, self.dbQuery)
self.data = self.ic.inexSql.databaseQuery(self, self.cursor, self.sq.sqlQuerymodel.queryData()) self.data = self.ic.inexSql.databaseQuery(self, self.cursor, self.sq.sqlQuerymodel.queryData(self.queryOverride,self.dbQuery, self.queryDaystopull))
self.modifiedData = processData(self.data, dataTemplate, prd_ext_tenant_name=self.prdExttenantname,product_name=self.productName,\
prd_ext_tenant_id=self.platformConfig["tenant_id"])
self.modifiedData = processData(self.data, dataTemplate, prd_instance_id=self.prdInstanceID,\
product_guid=self.productGUID,product_name=self.productName,product_version=self.productVersion)
if self.pushToplatform: if self.pushToplatform:
inexConnect.fortraEFC.pushPayload(self) inexConnect.fortraEFC.__init__(self)
# TODO: move this to its own function # TODO: move this to its own function
if self.useLog: if self.useLog:

View File

@ -43,32 +43,46 @@ class inexSql:
return r return r
class fortraEFC: class fortraEFC:
def getToken(self): def __init__(self):
self.tokenData = self.r.post(self.platformConfig["idp"], data={"grant_type":"client_credentials",\ # Check if .token file is present
"client_id": self.platformConfig["client_id"],\ if fortraEFC.readToken(self) == 1:
"client_secret": self.platformConfig["secret"],}) # Get fresh token. First run.
def writeToken(self): fortraEFC.getToken(self)
fortraEFC.getToken(self) fortraEFC.writeToken(self)
with open(self.tokenFilepath, "w") as f: # Push data with token
self.j.dump(self.tokenData.json(), f, indent = 2) self.pushPayloadresponse = fortraEFC.pushPayload(self)
if self.pushPayloadresponse == 401:
fortraEFC.getToken(self)
fortraEFC.writeToken(self)
fortraEFC.pushPayload(self)
def readToken(self): def readToken(self):
if self.os.path.exists(self.tokenFilepath): if self.os.path.exists(self.tokenFilepath):
with open(self.tokenFilepath, 'rb') as t: with open(self.tokenFilepath, 'rb') as t:
self.tokenData = self.j.load(t) self.tokenData = self.j.load(t)
# print(self.tokenData["access_token"]) self.il.debug(f'readToken {self.tokenData["access_token"]}')
return 0
else: else:
fortraEFC.writeToken(self) return 1
def getToken(self):
self.tokenData = self.r.post(self.platformConfig["idp"], data={"grant_type":"client_credentials",\
"client_id": self.platformConfig["client_id"],\
"client_secret": self.platformConfig["secret"],})
self.tokenData = self.tokenData.json()
self.il.debug(f'getToken {self.tokenData["access_token"]}')
def writeToken(self):
fortraEFC.getToken(self)
with open(self.tokenFilepath, "w") as f:
self.j.dump(self.tokenData, f, indent = 2)
self.il.debug(f'writeToken {self.tokenData["access_token"]}')
def pushPayload(self): def pushPayload(self):
fortraEFC.readToken(self) self.il.debug(f'pushPayload {self.tokenData["access_token"]}')
try: url = f'{self.platformConfig["efc_url"]}/api/v1/unity/data/{self.platformConfig["tenant_id"]}/machine_event'
url = f'{self.platformConfig["efc_url"]}/api/v1/unity/data/{self.platformConfig["tenant_id"]}/machine_event' pushPayloadResponse = self.r.post(url, headers={'Authorization': f'Bearer {self.tokenData["access_token"]}'},\
pushPayloadResponse = self.r.post(url, headers={'Authorization': f'bearer {self.tokenData["access_token"]}'},\ data=self.j.dumps(self.modifiedData, cls=self.e))
json=self.j.dumps(self.modifiedData,indent = 2, cls=self.e)) self.il.debug(pushPayloadResponse.status_code)
return pushPayloadResponse.status_code self.il.debug(pushPayloadResponse.text)
except self.r.exceptions.HTTPError as errh: return pushPayloadResponse.status_code
print ("Http Error:",errh)
if "401" in errh:
fortraEFC.writeToken(self)
fortraEFC.pushPayload(self)

View File

@ -1,211 +1,191 @@
def dataTemplate(**kwargs): def dataTemplate(transactionType,**kwargs):
"""Expects the following keyword arguments: upload = {
status,status_detail,status_code,file_size,file_path,file_virtual_path,file_name, "bytes" : kwargs.get('bytes'),
guid,ref_id,prd_instance_id,product_guid,product_name,product_version,node_name, "dst_endpoint": {
src_endpoint_port,src_endpoint_ip,dst_endpoint_port,dst_endpoint_ip,dst_endpoint_type, "port": kwargs.get('dst_endpoint_port'),
session_uid,bytes_out,transfer_time,time,user_type,user_domain,user_name and utype. "ip": kwargs.get('dst_endpoint_ip'),
""" "type": kwargs.get('dst_endpoint_type')
template ={ },
"status": kwargs.get('status'), "duration": kwargs.get('duration'),
"status_detail": kwargs.get('status_detail'), "file": {
"status_code": kwargs.get('status_code'), "created_time": kwargs.get('file_created_time'),
"file": { "uid": kwargs.get('file_uid'),
"size": kwargs.get('file_size'), "size": kwargs.get('file_size'),
"path": kwargs.get('file_path'), "name": kwargs.get('file_name'),
"virtual_path": kwargs.get('file_virtual_path'), "path": kwargs.get('file_path')
"name": kwargs.get('file_name') },
}, "guid": kwargs.get('file_uid'),
"guid": kwargs.get('guid'), "node_name": kwargs.get('node_name'),
"ref_id": kwargs.get('ref_id'), "prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
"prd_instance_id": kwargs.get('prd_instance_id'), "product_name": kwargs.get('product_name'),
"product_guid": kwargs.get('product_guid'), "prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
"product_name": kwargs.get('product_name'), "classifications": [{
"product_version": kwargs.get('product_version'), "ref_id": f"globalscape:{kwargs.get('guid')}",
"node_name":kwargs.get('node_name'), "time": kwargs.get('time'),
"src_endpoint": { }],
"port": kwargs.get('src_endpoint_port'), "session": {
"ip": kwargs.get('src_endpoint_ip') "created_time": kwargs.get('time'),
}, "uid": kwargs.get('session_uid')
"dst_endpoint": { },
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip'),
"type": kwargs.get('src_endpoint_type')
},
"tenant": kwargs.get('prd_ext_tenant_id'),
"tenant_name":"GlobalScape",
"time": kwargs.get('time'),
"status_code": kwargs.get('status_code'),
"status_detail": kwargs.get('status_detail'),
"user": {
"home_directory": kwargs.get('user_home_directory'),
"uuid": kwargs.get('guid'),
"uid": kwargs.get('user_uid'),
"type": kwargs.get('user_type'),
"name": kwargs.get('user_name')
},
"utype": kwargs.get('utype')
}
download = {
"bytes" : kwargs.get('bytes'),
"dst_endpoint": {
"port": kwargs.get('dst_endpoint_port'),
"ip": kwargs.get('dst_endpoint_ip'),
"type": kwargs.get('dst_endpoint_type')
},
"duration": kwargs.get('duration'),
"file": {
"uid": kwargs.get('file_uid'),
"size": kwargs.get('file_size'),
"name": kwargs.get('file_name'),
"path": kwargs.get('file_path')
},
"guid": kwargs.get('file_uid'),
"node_name": kwargs.get('node_name'),
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
"product_name": kwargs.get('product_name'),
"prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
"classifications": [{
"ref_id": f"globalscape:{kwargs.get('guid')}",
"time": kwargs.get('time'),
}],
"session": {
"created_time": kwargs.get('time'),
"uid": kwargs.get('session_uid')
},
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip'),
"type": kwargs.get('src_endpoint_type')
},
"tenant": kwargs.get('prd_ext_tenant_id'),
"tenant_name":"GlobalScape",
"time": kwargs.get('time'),
"status_code": kwargs.get('status_code'),
"status_detail": kwargs.get('status_detail'),
"user": {
"home_directory": kwargs.get('user_home_directory'),
"uuid": kwargs.get('guid'),
"uid": kwargs.get('user_uid'),
"type": kwargs.get('user_type'),
"name": kwargs.get('user_name')
},
"utype": kwargs.get('utype')
}
fileDeleted = {
"file": {
"size": kwargs.get('file_size'),
"name": kwargs.get('file_name'),
"path": kwargs.get('file_path'),
"uid": kwargs.get('file_uid'),
},
"guid": f'deleted:{kwargs.get("guid")}',
"node_name": kwargs.get('node_name'),
"classifications": [{
"ref_id": f"globalscape:{kwargs.get('guid')}",
"time": kwargs.get('time'),
}],
"prd_ext_tenant_name": kwargs.get("prd_ext_tenant_name"),
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
"product_name": kwargs.get("product_name"),
"session": {
"created_time": kwargs.get('time'),
"uid": kwargs.get('session_uid')
},
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip'),
"type": kwargs.get('src_endpoint_type')
},
"tenant": kwargs.get('prd_ext_tenant_id'),
"tenant_name":"GlobalScape",
"dst_endpoint": {
"port": kwargs.get('dst_endpoint_port'),
"ip": kwargs.get('dst_endpoint_ip'),
"type": kwargs.get('dst_endpoint_type')
},
"time": kwargs.get('time'),
"status_code": kwargs.get('status_code'),
"status_detail": kwargs.get('status_detail'),
"user": {
"home_directory": kwargs.get('user_home_directory'),
"uuid": kwargs.get('user_session_uid'),
"uid": kwargs.get('user_uid'),
"type": kwargs.get('user_type'),
"name": kwargs.get('user_name')
},
"utype": kwargs.get('utype')
}
logon ={
"classifications": [{
"ref_id": f"globalscape:{kwargs.get('guid')}",
"time": kwargs.get('time'),
}],
"dst_endpoint": {
"port": kwargs.get('dst_endpoint_port'), "port": kwargs.get('dst_endpoint_port'),
"ip": kwargs.get('dst_endpoint_ip'), "ip": kwargs.get('dst_endpoint_ip'),
"type": kwargs.get('dst_endpoint_type') "type": kwargs.get('dst_endpoint_type')
}, },
"session": { "guid": kwargs.get('guid'),
"uid": kwargs.get('session_uid') "node_name": kwargs.get('node_name'),
"tenant": kwargs.get('prd_ext_tenant_id'),
"tenant_name":"GlobalScape",
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
"product_name": kwargs.get("product_name"),
"prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
"status_code": kwargs.get('status_code'),
"status_detail": kwargs.get('status_detail'),
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip'),
"type": kwargs.get('src_endpoint_type')
}, },
"bytes_out" : kwargs.get('bytes_out'),
"transfer_time" : kwargs.get('transfer_time'),
"time": kwargs.get('time'), "time": kwargs.get('time'),
"user": { "user": {
"home_directory": kwargs.get('user_home_directory'),
"uuid": kwargs.get('user_session_uid'),
"uid": kwargs.get('user_uid'),
"type": kwargs.get('user_type'), "type": kwargs.get('user_type'),
"domain": kwargs.get('user_domain'),
"name": kwargs.get('user_name') "name": kwargs.get('user_name')
}, },
"session": {
"created_time": kwargs.get('time'),
"uid": kwargs.get('session_uid')
},
"utype": kwargs.get('utype') "utype": kwargs.get('utype')
} }
return template if transactionType == "file_uploaded":
template = upload
FileUploaded = { if transactionType == "file_downloaded":
"bytes" : 2490, template = download
"dst_endpoint": { if transactionType == "file_deleted":
"port": 22, template = fileDeleted
"ip": "10.91.160.77", if transactionType == "user_logged_on":
"type": "SFTP" template = logon
}, if transactionType == "other":
"duration": 200, template = {}
"file": {
"created_time": 1722485724000,
"size": 2490,
"name": "Case9.vbs",
"path": "\\\\10.255.255.9\\shared\\HASite\\InetPub\\EFTRoot\\MySite\\Usr\\Ivan //<PhysicalFolderName>"
},
"guid": "48D9C7A3-2DC6-11EF-AA59-00155D641204",
"node_name":"PERF01-S2019-77",
"prd_ext_tenant_id": "e71851c2-593f-4f49-9c07-91727b1be94b",
"product_name": "GlobalScape EFT",
"prd_ext_tenant_name": "GlobalScape EFT",
"classifications": [{
"ref_id": "globalscape:48D9C7A3-2DC6-11EF-AA59-00155D641204",
"time":1722485724000,
}],
"session": {
"created_time":1722485724000,
"uid": "3615136"
},
"src_endpoint": {
"port": 58868,
"ip": "10.91.160.45",
"type":"SFTP"
},
"tenant": "e71851c2-593f-4f49-9c07-91727b1be94b",
"tenant_name":"GlobalScape",
"time":1722485724000,
"status_code":226,
"status_detail":"Upload Successful",
"user": {
"home_directory": "/Usr/Ivan/",
"uuid":"48D9C7A3-2DC6-11EF-AA59-00155D641204",
"uid": "3978403",
"type": "User",
"name": "Ivan"
},
"utype": "file_uploaded"
}
FileDownloaded = {
"bytes" : 4891,
"dst_endpoint": {
"port": 443,
"ip": "10.91.160.77",
"type": "HTTPS"
},
"duration": 200,
"file": {
"created_time": 1722518124000,
"size": 4891,
"name": "FileDownload1.exe",
"path": "\\\\10.255.255.9\\shared\\HASite\\InetPub\\EFTRoot\\MySite\\Usr\\Ivan //<PhysicalFolderName>"
},
"guid": "48D9C7A3-2DC6-11EF-AA59-00155D641205",
"node_name":"PERF01-S2019-77",
"prd_ext_tenant_id": "e71851c2-593f-4f49-9c07-91727b1be94b",
"product_name": "GlobalScape EFT",
"prd_ext_tenant_name": "GlobalScape EFT",
"classifications": [{
"ref_id": "globalscape:48D9C7A3-2DC6-11EF-AA59-00155D641205",
"time":1722518124000,
}],
"session": {
"created_time":1722518124000,
"uid": "3615137"
},
"src_endpoint": {
"port": 443,
"ip": "10.91.160.45",
"type":"HTTPS"
},
"tenant": "e71851c2-593f-4f49-9c07-91727b1be94b",
"tenant_name":"GlobalScape",
"time":1722518124000,
"status_code":226,
"status_detail":"Download Successful",
"user": {
"home_directory": "/Usr/Ivan/",
"uuid":"48D9C7A3-2DC6-11EF-AA59-00155D641205",
"uid": "3978404",
"type": "User",
"name": "Ivan"
},
"utype": "file_downloaded"}
FileDeleted = {
"file": {
"size": 304673,
"path": "\\\\10.255.255.9\\shared\\HASite\\InetPub\\EFTRoot\\MySite\\Usr\\Ivan",
"name": "DeleteME.txt"
},
"guid": "48D9C7A3-2DC6-11EF-AA59-00155D641207",
"classifications": [{
"ref_id": "globalscape:48D9C7A3-2DC6-11EF-AA59-00155D641207",
"time":1722515664000,
}],
"prd_ext_tenant_name": "Globalscape EFT",
"prd_ext_tenant_id": "e71851c2-593f-4f49-9c07-91727b1be94b",
"product_name": "Globalscape EFT",
"session": {
"created_time":1722515664000,
"uid": "3615138"
},
"src_endpoint": {
"port": 443,
"ip": "10.91.160.45",
"type":"HTTPS"
},
"dst_endpoint": {
"port": 443,
"ip": "10.91.160.77",
"type": "HTTPS"
},
"time": 1722515664000,
"user": {
"home_directory": "/Usr/Ivan/",
"uuid":"48D9C7A3-2DC6-11EF-AA59-00155D641207",
"uid": "3978406",
"type": "User",
"name": "Ivan"
},
"utype": "file_deleted"
}
Logon ={
"classifications": [{
"ref_id": "globalscape:48D9C7A3-2DC6-11EF-AA59-00155D641206",
"time": 1722510924000,
}], return template
"dst_endpoint": {
"port": 443,
"ip": "10.91.160.77",
"type": "HTTPS"
},
"guid": "48D9C7A3-2DC6-11EF-AA59-00155D641206",
"prd_ext_tenant_id": "e71851c2-593f-4f49-9c07-91727b1be94b",
"product_name": "GlobalScape EFT",
"prd_ext_tenant_name": "GlobalScape EFT",
"src_endpoint": {
"port": 443,
"ip": "10.91.160.45",
"type":"HTTPS"
},
"time": 1722510924000,
"user": {
"home_directory": "/Usr/Ivan/",
"uuid":"48D9C7A3-2DC6-11EF-AA59-00155D641206",
"uid": "3978405",
"type": "User",
"name": "Ivan"
},
"utype": "user_logged_on"
}

View File

@ -1,37 +1,97 @@
def processData(data, template, **kwargs): def processData(data, template, **kwargs):
"""Translates data from sql query to the appropriate place in the respective template.
Accepts data, which is the sql query output, the template function, and finally
additional data to insert into the template."""
processedData = [] processedData = []
transactionLoginid = []
for row in data: for row in data:
# print(f'Row: {row}') # print(f'Row: {row}')
processedData.append(template(status=row.get(''),\ # must set variables for the different templates and do logic based on that. Do not call identifyUtype many times
status_detail=row.get(''),\ identifyUtypecommand = identifyUtype(row.get('Command'))
status_code=row.get('ResultID'),\
file_size=row.get('FileSize'),\ if identifyUtypecommand == "other":
file_path=row.get('PhysicalFolderName'),\ continue
file_virtual_path=row.get('VirtualFolderName'),\
file_name=row.get('FileName'),\ if row.get('Command') == None:
guid=row.get('TransactionGUID'),\ continue
ref_id=row.get('ProtocolCommandID'),\
prd_instance_id=kwargs.get('prd_instance_id'),\ userType = identifyUserType(row.get('user_type'))
product_guid=kwargs.get('product_guid'),\ try:
product_name=kwargs.get('product_name'),\ processedData.append(template(identifyUtypecommand,\
product_version=kwargs.get('product_version'),\ prd_ext_tenant_name=kwargs.get('prd_ext_tenant_name'),\
node_name=row.get('NodeName'),\ user_uid=row.get('TransactionID'),\
src_endpoint_port=row.get('RemotePort'),\ status_detail=row.get('Description'),\
src_endpoint_ip=row.get('RemoteIP'),\ prd_ext_tenant_id=kwargs.get('prd_ext_tenant_id'),\
dst_endpoint_port=row.get('LocalPort'),\ status_code=row.get('ResultID'),\
dst_endpoint_ip=row.get('LocalIP'),\ file_created_time=row.get('Time_stamp'),\
dst_endpoint_type=row.get('Protocol'),\ file_size=row.get('FileSize'),\
session_uid=row.get('TransactionID'),\ file_uid=row.get('ProtocolCommandID'),\
bytes_out=row.get('BytesTransferred'),\ file_path=row.get('PhysicalFolderName'),\
transfer_time=row.get('TransferTime'),\ file_name=row.get('FileName'),\
time=row.get('Time_stamp'),\ guid=row.get('TransactionGUID'),\
user_type=identifyUserType(row.get('user_type')),\ product_name=kwargs.get('product_name'),\
user_domain=row.get('SiteName'),\ node_name=row.get('NodeName'),\
user_name=row.get('Actor'),\ session_uid=row.get('TransactionID'),\
utype=identifyUtype(row.get('TransactionObject')))) src_endpoint_type=row.get('Protocol'),\
src_endpoint_port=row.get('RemotePort'),\
src_endpoint_ip=row.get('RemoteIP'),\
dst_endpoint_port=row.get('LocalPort'),\
dst_endpoint_ip=row.get('LocalIP'),\
dst_endpoint_type=row.get('Protocol'),\
user_session_uid=row.get('TransactionID'),\
bytes=row.get('BytesTransferred'),\
time=row.get('Time_stamp'),\
duration=row.get('TransferTime'),\
user_type=userType,\
user_name=row.get('Actor'),\
user_home_directory=row.get('VirtualFolderName'),\
utype=identifyUtypecommand))
except UnboundLocalError:
print(f'Problem row GUID:{row.get("TransactionGUID")} ::: TransactionObject:{row.get("TransactionObject")} Command: {row.get("Command")}')
continue
identifyUtypetransactionObject = identifyUtype(row.get('TransactionObject'))
if identifyUtypetransactionObject == "other":
continue
if row.get('TransactionGUID') not in transactionLoginid:
try:
processedData.append(template(identifyUtypetransactionObject,\
prd_ext_tenant_id=kwargs.get('prd_ext_tenant_id'),\
prd_ext_tenant_name=kwargs.get('prd_ext_tenant_name'),\
status_detail=row.get('Description'),\
guid=row.get('TransactionGUID'),\
status_code=row.get('ResultID'),\
node_name=row.get('NodeName'),\
prd_instance_id=kwargs.get('prd_instance_id'),\
product_name=kwargs.get('product_name'),\
src_endpoint_type=row.get('Protocol'),\
src_endpoint_port=row.get('RemotePort'),\
src_endpoint_ip=row.get('RemoteIP'),\
dst_endpoint_port=row.get('LocalPort'),\
dst_endpoint_ip=row.get('LocalIP'),\
dst_endpoint_type=row.get('Protocol'),\
session_uid=row.get('TransactionID'),\
transfer_time=row.get('TransferTime'),\
time=row.get('Time_stamp'),\
user_session_uid=row.get('TransactionID'),\
user_uid=row.get('TransactionID'),\
user_type=userType,\
user_name=row.get('Actor'),\
user_home_directory=row.get('PhysicalFolderName'),\
utype=identifyUtypetransactionObject\
))
transactionLoginid.append(row.get('TransactionGUID'))
except UnboundLocalError:
print(f'Problem row GUID:{row.get("TransactionGUID")} ::: TransactionObject:{row.get("TransactionObject")} Command: {row.get("Command")}')
continue
return processedData return processedData
def identifyUserType(obj): def identifyUserType(obj):
"""Check string if it has Admin-> return Administrator else return User."""
if obj: if obj:
if "Admin" in obj: if "Admin" in obj:
return "Administrator" return "Administrator"
@ -39,19 +99,22 @@ def identifyUserType(obj):
return "User" return "User"
else: else:
return None return None
def identifyUtype(obj): def identifyUtype(obj):
user_logged_on = [] """Process Type of transaction based on string that passed in.
file_deleted = [] Return transaction type."""
file_uploaded = [] user_logged_on = ['AUTH']
file_downloaded = [] file_deleted = ["dele"]
file_uploaded = ["created"]
file_downloaded = ["sent"]
if obj in user_logged_on: if obj in user_logged_on:
return "user_logged_on" return "user_logged_on"
if obj in file_deleted: elif obj in file_deleted:
return "file_deleted" return "file_deleted"
if obj in file_uploaded: elif obj in file_uploaded:
return "file_uploaded" return "file_uploaded"
if obj in file_downloaded: elif obj in file_downloaded:
return "file_downloaded" return "file_downloaded"
else: else:
return None return "other"

View File

@ -3,6 +3,8 @@ import decimal
import datetime import datetime
class Encoder(json.JSONEncoder): class Encoder(json.JSONEncoder):
"""Encoder uses json.JSONEncoder and checks for instances of decimal and datetime.
Changes decimal.Decimal to int and datetime.datetime to unix timestamp with miliseconds."""
def default(self, o): def default(self, o):
if isinstance(o, decimal.Decimal): if isinstance(o, decimal.Decimal):
return int(o) return int(o)

View File

@ -1,11 +1,11 @@
class sqlQuerymodel: class sqlQuerymodel:
def queryData(): def queryData(overRideflag, configQuery, daysTopull):
"""Embedded query data""" """Embedded query data"""
q ="""DECLARE @stopTime DATETIME2 q ="""DECLARE @stopTime DATETIME2
SET @stopTime=DATEADD(DAY, -30, GETDATE()) SET @stopTime=DATEADD(DAY, -30, GETDATE())
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.CommandParameters, p.FileName, p.VirtualFolderName, p.PhysicalFolderName, p.IsInternal, p.FileSize, p.TransferTime, p.BytesTransferred, p.ResultID, t.TransactionID, p.Description, p.Actor, t.ParentTransactionID, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.PhysicalFolderName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type
FROM tbl_Transactions t FROM tbl_Transactions t
Full JOIN tbl_ProtocolCommands p ON(t.TransactionID=p.TransactionID) Full JOIN tbl_ProtocolCommands p ON(t.TransactionID=p.TransactionID)
Full join tbl_Authentications a ON(t.TransactionID=a.TransactionID) Full join tbl_Authentications a ON(t.TransactionID=a.TransactionID)
WHERE p.Time_stamp>@stopTime""" WHERE p.Time_stamp>@stopTime AND p.Command IS NOT NULL""".replace("30", str(daysTopull))
return q return configQuery if overRideflag else q

32
test.py
View File

@ -9,4 +9,34 @@ def connectDatabase(driver, server, database, user, password):
def converttimestamp(t): def converttimestamp(t):
print(int(t.timestamp()* 1000)) print(int(t.timestamp()* 1000))
a = converttimestamp(datetime.datetime(2024, 7, 23, 14, 26, 38, 214000)) def builddict(keys,*args,**kwargs):
dict = {}
for key in keys:
dict[key] = kwargs.get(key)
print(dict)
testfolder = '/Usr/a/asdf/asf'
user = 'a'
def identifyUtype(obj):
"""Process Type of transaction based on string that passed in.
Return transaction type."""
user_logged_on = ['AUTH']
file_deleted = ["dele"]
file_uploaded = ["created"]
file_downloaded = ["sent"]
if obj in user_logged_on:
return "user_logged_on"
if obj in file_deleted:
return "file_deleted"
if obj in file_uploaded:
return "file_uploaded"
if obj in file_downloaded:
return "file_downloaded"
else:
return "other"
transactionType = 'file_uploaded'
print(transactionType.split("_")[1].rstrip("d").rstrip("e"))