Compare commits
9 Commits
Author | SHA1 | Date | |
---|---|---|---|
abd669c3bb | |||
487d883297 | |||
69b520e97c | |||
82c8a5bb4a | |||
8e8312e853 | |||
267616f0e4 | |||
de3a3fd03d | |||
8dc6efe40e | |||
d978ad57fe |
121
README.md
121
README.md
@ -1,34 +1,93 @@
|
||||
# Inex
|
||||
|
||||
Information Exchange - Pull data from EFT ARM then dump it to a json file
|
||||
### Information Exchange - Ingest ARM data into EFC
|
||||
This application is designed to pull data from an Microsoft SQL database, reformat it, and then send that data via the web to the Event Fusion Center. It's designed to be performant. The embedded query doesn't return unneccesary data and data is written to memory before being pushed into EFC. Initially you may have to pull a large amount of days so the system is caught up but the sweet spot would be to pull a set of data every day.
|
||||
|
||||
# Data Map:
|
||||
## tbl_ProtocolCommands
|
||||
- ProtocolCommandID
|
||||
- Time_stamp
|
||||
- RemoteIP
|
||||
- RemotePort
|
||||
- LocalIP
|
||||
- LocalPort
|
||||
- Protocol
|
||||
- SiteName
|
||||
- Command
|
||||
- CommandParameters
|
||||
- FileName
|
||||
- VirtualFolderName
|
||||
- PhysicalFolderName
|
||||
- IsInternal
|
||||
- FileSize
|
||||
- TransferTime
|
||||
- BytesTransferred
|
||||
- ResultID
|
||||
- TransactionID
|
||||
- Description
|
||||
- Actor
|
||||
## tbl_Transactions
|
||||
- TransactionID
|
||||
- ParentTransactionID
|
||||
- TransactionObject
|
||||
- TransactionGUID
|
||||
- NodeName
|
||||
- time_stamp
|
||||
## Build from source
|
||||
You will need to install git and clone the repository:
|
||||
|
||||
`git clone https://git.jonb.io/jblu/Inex.git`
|
||||
|
||||
Alternitively you can download the files from [the releases page](https://git.jonb.io/jblu/Inex/releases).
|
||||
|
||||
Then follow OS specific instructions. These are taken directly from the steps used in automation to build the executables found in the releases page.
|
||||
##### Windows
|
||||
|
||||
You can find the requirements.txt in the repository.
|
||||
|
||||
`python -m pip install -r requirements.txt`
|
||||
|
||||
`pyinstaller --noconfirm --onefile --console path/to/inex.py`
|
||||
|
||||
##### Linux
|
||||
Update Linux Distro. This is Ubuntu specific. Please check the package management tool in your specific version.
|
||||
|
||||
`apt-get update`
|
||||
|
||||
You need unixodbc or else pyodbc cannot install.
|
||||
|
||||
`apt-get install unixodbc -y`
|
||||
|
||||
You can find the requirements.txt in the repository.
|
||||
|
||||
`pip install -r requirements.txt`
|
||||
|
||||
`pyinstaller --noconfirm --onefile --console path/to/inex.py`
|
||||
|
||||
## Setup
|
||||
You will need a *config.toml* file in the same directory where *inex.py* or inex executable is. It's recommended to rename the existing config.toml.example file to save sometime and avoid omitting any data.
|
||||
|
||||
> If the application doesn't find the *config.toml* at the time of execution, it will not continue.
|
||||
|
||||
#### Config.toml
|
||||
| Table | Purpose |
|
||||
|-|-|
|
||||
| fortraPlatform | Fortra Specific data |
|
||||
| database | MSSQL Configuration |
|
||||
| immutables | Data that must be included but is not provided by the source database |
|
||||
| output | If and where to write files|
|
||||
| logging | Set if, where and level of logging |
|
||||
|
||||
The following settings are not obvious as to how they affect the application.
|
||||
> Note the #comments
|
||||
```
|
||||
[fortraPlatform]
|
||||
selectedPlatform = "dev" # This will modify which environment the data is pushed to. The tenant_id and secret must be manually modified.
|
||||
|
||||
[database]
|
||||
overrideEmbeddedquery = true # Choose if embedded query should be overridden.
|
||||
daysTopull = 30 # This setting is only related to the embedded query. Please note this will not affect query provided in config.toml
|
||||
driver = "ODBC Driver 18 for SQL Server" # Select which windows driver should be used. This one is recommended.
|
||||
|
||||
[output]
|
||||
pushToplatform = false # if true, send data to fortraPlatform setting to false is useful in dry-runs.
|
||||
dumpTojson = true # if true, write data to json file
|
||||
token = "./.token" # Recommended to leave as-is
|
||||
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Windows
|
||||
##### Run inex.py
|
||||
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
|
||||
2. `cd C:\path\to\Inex`
|
||||
3. `python inex.py`
|
||||
|
||||
OR
|
||||
|
||||
##### Run inex.exe
|
||||
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
|
||||
2. `cd C:\path\to\Inex.exe`
|
||||
3. CMD `.\inex.exe`
|
||||
|
||||
### Linux
|
||||
##### Run inex.py
|
||||
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
|
||||
2. `cd /path/to/Inex`
|
||||
3. `python ./inex.py`
|
||||
|
||||
OR
|
||||
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
|
||||
2. `cd /path/to/Inex`
|
||||
3. RUN `./inex`
|
@ -1,6 +1,33 @@
|
||||
[fortraPlatform]
|
||||
selectedPlatform = "dev"
|
||||
|
||||
[output]
|
||||
pushToplatform = false
|
||||
dumpTojson = true
|
||||
filename ="./data.json"
|
||||
token = "./.token"
|
||||
|
||||
[logging]
|
||||
useLog = true
|
||||
logLevel = "debug"
|
||||
logPath = "./inex.log"
|
||||
|
||||
[database]
|
||||
overrideEmbeddedquery = false
|
||||
daysTopull = 20
|
||||
driver = "ODBC Driver 18 for SQL Server"
|
||||
server = "192.168.x.x"
|
||||
database = "EFTDB"
|
||||
user = "a"
|
||||
password = "a"
|
||||
query = """DECLARE @stopTime DATETIME2
|
||||
SET @stopTime=DATEADD(DAY, -30, GETDATE())
|
||||
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.PhysicalFolderName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type
|
||||
FROM tbl_Transactions t
|
||||
Full JOIN tbl_ProtocolCommands p ON(t.TransactionID=p.TransactionID)
|
||||
Full JOIN tbl_Authentications a ON(t.TransactionID=a.TransactionID)
|
||||
WHERE p.Time_stamp>@stopTime AND p.Command IS NOT NULL"""
|
||||
|
||||
[fortraPlatform.dev]
|
||||
idp = "https://foundation.foundation-dev.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token"
|
||||
efc_url = "https://efc.efc-dev.cloudops.fortradev.com"
|
||||
@ -22,34 +49,6 @@ tenant_id = ""
|
||||
client_id = "eft-event-generator-confidential"
|
||||
secret = ""
|
||||
|
||||
[database]
|
||||
overrideEmbeddedquery = false
|
||||
driver = "ODBC Driver 18 for SQL Server"
|
||||
server = "192.168.x.x"
|
||||
database = "EFTDB"
|
||||
user = "a"
|
||||
password = "a"
|
||||
query = """DECLARE @stopTime DATETIME2
|
||||
SET @stopTime=DATEADD(DAY, -30, GETDATE())
|
||||
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type
|
||||
FROM tbl_Transactions t
|
||||
Full JOIN tbl_ProtocolCommands p ON(t.TransactionID=p.TransactionID)
|
||||
Full JOIN tbl_Authentications a ON(t.TransactionID=a.TransactionID)
|
||||
WHERE p.Time_stamp>@stopTime AND p.Command IS NOT NULL"""
|
||||
|
||||
[immutables]
|
||||
prd_instance_id = 1
|
||||
product_guid = "asdf"
|
||||
product_name = "EFT"
|
||||
product_version ="8.1.0.9"
|
||||
|
||||
[output]
|
||||
pushToplatform = true
|
||||
dumpTojson = true
|
||||
filename ="./data.json"
|
||||
token = "./.token"
|
||||
|
||||
[logging]
|
||||
use_log = true
|
||||
logLevel = "debug"
|
||||
logPath = "./inex.log"
|
||||
[immutables]
|
||||
product_name = "GlobalScape EFT"
|
||||
prd_ext_tenant_name = "GlobalScape EFT"
|
22
inex.py
22
inex.py
@ -43,17 +43,19 @@ class Inex:
|
||||
self.useLog = self.config["logging"]["useLog"]
|
||||
self.logPath = self.config["logging"]["logPath"]
|
||||
self.logLevel = self.config["logging"]["logLevel"]
|
||||
self.prdInstanceID = self.config["immutables"]["prd_instance_id"]
|
||||
self.productGUID = self.config["immutables"]["product_guid"]
|
||||
self.prdExttenantname = self.config["immutables"]["prd_ext_tenant_name"]
|
||||
self.productName = self.config["immutables"]["product_name"]
|
||||
self.productVersion = self.config["immutables"]["product_version"]
|
||||
self.tokenFilepath = self.config["output"]["token"]
|
||||
self.selectedPlatform = self.config["fortraPlatform"]["selectedPlatform"]
|
||||
self.writeJsonfile = self.config["output"]["dumpTojson"]
|
||||
self.pushToplatform = self.config["output"]["pushToplatform"]
|
||||
self.queryOverride = self.config["database"]["overrideEmbeddedquery"]
|
||||
except:
|
||||
print("No config.toml. Please use example file and configure appropriately")
|
||||
self.queryDaystopull = self.config["database"]["daysTopull"]
|
||||
except Exception as e:
|
||||
print("No config.toml or possibly missing settings in the file. Please use config.toml.example file and configure appropriately")
|
||||
self.il.error(e)
|
||||
print(e)
|
||||
|
||||
exit(1)
|
||||
|
||||
if "dev" in self.selectedPlatform.lower():
|
||||
@ -70,13 +72,13 @@ class Inex:
|
||||
self.cursor = self.ic.inexSql.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword)
|
||||
|
||||
|
||||
self.data = self.ic.inexSql.databaseQuery(self, self.cursor, self.sq.sqlQuerymodel.queryData(self.queryOverride,self.dbQuery))
|
||||
self.data = self.ic.inexSql.databaseQuery(self, self.cursor, self.sq.sqlQuerymodel.queryData(self.queryOverride,self.dbQuery, self.queryDaystopull))
|
||||
|
||||
self.modifiedData = processData(self.data, dataTemplate, prd_ext_tenant_name=self.prdExttenantname,product_name=self.productName,\
|
||||
prd_ext_tenant_id=self.platformConfig["tenant_id"])
|
||||
|
||||
self.modifiedData = processData(self.data, dataTemplate, prd_instance_id=self.prdInstanceID,\
|
||||
product_guid=self.productGUID,product_name=self.productName,product_version=self.productVersion)
|
||||
|
||||
if self.pushToplatform:
|
||||
inexConnect.fortraEFC.pushPayload(self)
|
||||
inexConnect.fortraEFC.__init__(self)
|
||||
|
||||
# TODO: move this to its own function
|
||||
if self.useLog:
|
||||
|
@ -43,32 +43,46 @@ class inexSql:
|
||||
return r
|
||||
|
||||
class fortraEFC:
|
||||
def getToken(self):
|
||||
self.tokenData = self.r.post(self.platformConfig["idp"], data={"grant_type":"client_credentials",\
|
||||
"client_id": self.platformConfig["client_id"],\
|
||||
"client_secret": self.platformConfig["secret"],})
|
||||
def writeToken(self):
|
||||
fortraEFC.getToken(self)
|
||||
with open(self.tokenFilepath, "w") as f:
|
||||
self.j.dump(self.tokenData.json(), f, indent = 2)
|
||||
def __init__(self):
|
||||
# Check if .token file is present
|
||||
if fortraEFC.readToken(self) == 1:
|
||||
# Get fresh token. First run.
|
||||
fortraEFC.getToken(self)
|
||||
fortraEFC.writeToken(self)
|
||||
# Push data with token
|
||||
self.pushPayloadresponse = fortraEFC.pushPayload(self)
|
||||
if self.pushPayloadresponse == 401:
|
||||
fortraEFC.getToken(self)
|
||||
fortraEFC.writeToken(self)
|
||||
fortraEFC.pushPayload(self)
|
||||
|
||||
def readToken(self):
|
||||
if self.os.path.exists(self.tokenFilepath):
|
||||
with open(self.tokenFilepath, 'rb') as t:
|
||||
self.tokenData = self.j.load(t)
|
||||
# print(self.tokenData["access_token"])
|
||||
self.il.debug(f'readToken {self.tokenData["access_token"]}')
|
||||
return 0
|
||||
else:
|
||||
fortraEFC.writeToken(self)
|
||||
return 1
|
||||
|
||||
def getToken(self):
|
||||
self.tokenData = self.r.post(self.platformConfig["idp"], data={"grant_type":"client_credentials",\
|
||||
"client_id": self.platformConfig["client_id"],\
|
||||
"client_secret": self.platformConfig["secret"],})
|
||||
self.tokenData = self.tokenData.json()
|
||||
self.il.debug(f'getToken {self.tokenData["access_token"]}')
|
||||
|
||||
def writeToken(self):
|
||||
fortraEFC.getToken(self)
|
||||
with open(self.tokenFilepath, "w") as f:
|
||||
self.j.dump(self.tokenData, f, indent = 2)
|
||||
self.il.debug(f'writeToken {self.tokenData["access_token"]}')
|
||||
|
||||
def pushPayload(self):
|
||||
fortraEFC.readToken(self)
|
||||
try:
|
||||
url = f'{self.platformConfig["efc_url"]}/api/v1/unity/data/{self.platformConfig["tenant_id"]}/machine_event'
|
||||
pushPayloadResponse = self.r.post(url, headers={'Authorization': f'bearer {self.tokenData["access_token"]}'},\
|
||||
json=self.j.dumps(self.modifiedData,indent = 2, cls=self.e))
|
||||
return pushPayloadResponse.status_code
|
||||
except self.r.exceptions.HTTPError as errh:
|
||||
print ("Http Error:",errh)
|
||||
if "401" in errh:
|
||||
fortraEFC.writeToken(self)
|
||||
fortraEFC.pushPayload(self)
|
||||
self.il.debug(f'pushPayload {self.tokenData["access_token"]}')
|
||||
url = f'{self.platformConfig["efc_url"]}/api/v1/unity/data/{self.platformConfig["tenant_id"]}/machine_event'
|
||||
pushPayloadResponse = self.r.post(url, headers={'Authorization': f'Bearer {self.tokenData["access_token"]}'},\
|
||||
data=self.j.dumps(self.modifiedData, cls=self.e))
|
||||
self.il.debug(pushPayloadResponse.status_code)
|
||||
self.il.debug(pushPayloadResponse.text)
|
||||
return pushPayloadResponse.status_code
|
110
inexDataModel.py
110
inexDataModel.py
@ -1,6 +1,6 @@
|
||||
def dataTemplate(transactionType,**kwargs):
|
||||
uploadDownload = {
|
||||
"bytes" : kwargs.get('bytes_out'),
|
||||
upload = {
|
||||
"bytes" : kwargs.get('bytes'),
|
||||
"dst_endpoint": {
|
||||
"port": kwargs.get('dst_endpoint_port'),
|
||||
"ip": kwargs.get('dst_endpoint_ip'),
|
||||
@ -8,16 +8,17 @@ def dataTemplate(transactionType,**kwargs):
|
||||
},
|
||||
"duration": kwargs.get('duration'),
|
||||
"file": {
|
||||
"created_time": kwargs.get('time'),
|
||||
"created_time": kwargs.get('file_created_time'),
|
||||
"uid": kwargs.get('file_uid'),
|
||||
"size": kwargs.get('file_size'),
|
||||
"name": kwargs.get('file_name'),
|
||||
"path": kwargs.get('file_path')
|
||||
},
|
||||
"guid": kwargs.get('guid'),
|
||||
"guid": kwargs.get('file_uid'),
|
||||
"node_name": kwargs.get('node_name'),
|
||||
"prd_ext_tenant_id": kwargs.get('tenant'),
|
||||
"product_name": "GlobalScape EFT",
|
||||
"prd_ext_tenant_name": "GlobalScape EFT",
|
||||
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
|
||||
"product_name": kwargs.get('product_name'),
|
||||
"prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
|
||||
"classifications": [{
|
||||
"ref_id": f"globalscape:{kwargs.get('guid')}",
|
||||
"time": kwargs.get('time'),
|
||||
@ -31,15 +32,61 @@ def dataTemplate(transactionType,**kwargs):
|
||||
"ip": kwargs.get('src_endpoint_ip'),
|
||||
"type": kwargs.get('src_endpoint_type')
|
||||
},
|
||||
"tenant": kwargs.get('tenant'),
|
||||
"tenant": kwargs.get('prd_ext_tenant_id'),
|
||||
"tenant_name":"GlobalScape",
|
||||
"time": kwargs.get('time'),
|
||||
"status_code": kwargs.get('status_code'),
|
||||
"status_detail": kwargs.get('description'),
|
||||
"status_detail": kwargs.get('status_detail'),
|
||||
"user": {
|
||||
"home_directory": kwargs.get('user_home_directory'),
|
||||
"uuid": kwargs.get('guid'),
|
||||
"uid": kwargs.get('uid'),
|
||||
"uid": kwargs.get('user_uid'),
|
||||
"type": kwargs.get('user_type'),
|
||||
"name": kwargs.get('user_name')
|
||||
},
|
||||
"utype": kwargs.get('utype')
|
||||
}
|
||||
download = {
|
||||
"bytes" : kwargs.get('bytes'),
|
||||
"dst_endpoint": {
|
||||
"port": kwargs.get('dst_endpoint_port'),
|
||||
"ip": kwargs.get('dst_endpoint_ip'),
|
||||
"type": kwargs.get('dst_endpoint_type')
|
||||
},
|
||||
"duration": kwargs.get('duration'),
|
||||
"file": {
|
||||
"uid": kwargs.get('file_uid'),
|
||||
"size": kwargs.get('file_size'),
|
||||
"name": kwargs.get('file_name'),
|
||||
"path": kwargs.get('file_path')
|
||||
},
|
||||
"guid": kwargs.get('file_uid'),
|
||||
"node_name": kwargs.get('node_name'),
|
||||
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
|
||||
"product_name": kwargs.get('product_name'),
|
||||
"prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
|
||||
"classifications": [{
|
||||
"ref_id": f"globalscape:{kwargs.get('guid')}",
|
||||
"time": kwargs.get('time'),
|
||||
}],
|
||||
"session": {
|
||||
"created_time": kwargs.get('time'),
|
||||
"uid": kwargs.get('session_uid')
|
||||
},
|
||||
"src_endpoint": {
|
||||
"port": kwargs.get('src_endpoint_port'),
|
||||
"ip": kwargs.get('src_endpoint_ip'),
|
||||
"type": kwargs.get('src_endpoint_type')
|
||||
},
|
||||
"tenant": kwargs.get('prd_ext_tenant_id'),
|
||||
"tenant_name":"GlobalScape",
|
||||
"time": kwargs.get('time'),
|
||||
"status_code": kwargs.get('status_code'),
|
||||
"status_detail": kwargs.get('status_detail'),
|
||||
"user": {
|
||||
"home_directory": kwargs.get('user_home_directory'),
|
||||
"uuid": kwargs.get('guid'),
|
||||
"uid": kwargs.get('user_uid'),
|
||||
"type": kwargs.get('user_type'),
|
||||
"name": kwargs.get('user_name')
|
||||
},
|
||||
@ -50,16 +97,18 @@ def dataTemplate(transactionType,**kwargs):
|
||||
"file": {
|
||||
"size": kwargs.get('file_size'),
|
||||
"name": kwargs.get('file_name'),
|
||||
"path": kwargs.get('file_path')
|
||||
"path": kwargs.get('file_path'),
|
||||
"uid": kwargs.get('file_uid'),
|
||||
},
|
||||
"guid": kwargs.get('guid'),
|
||||
"guid": f'deleted:{kwargs.get("guid")}',
|
||||
"node_name": kwargs.get('node_name'),
|
||||
"classifications": [{
|
||||
"ref_id": f"globalscape:{kwargs.get('guid')}",
|
||||
"time": kwargs.get('time'),
|
||||
}],
|
||||
"prd_ext_tenant_name": "Globalscape EFT",
|
||||
"prd_ext_tenant_id": kwargs.get('tenant'),
|
||||
"product_name": "Globalscape EFT",
|
||||
"prd_ext_tenant_name": kwargs.get("prd_ext_tenant_name"),
|
||||
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
|
||||
"product_name": kwargs.get("product_name"),
|
||||
"session": {
|
||||
"created_time": kwargs.get('time'),
|
||||
"uid": kwargs.get('session_uid')
|
||||
@ -69,16 +118,20 @@ def dataTemplate(transactionType,**kwargs):
|
||||
"ip": kwargs.get('src_endpoint_ip'),
|
||||
"type": kwargs.get('src_endpoint_type')
|
||||
},
|
||||
"tenant": kwargs.get('prd_ext_tenant_id'),
|
||||
"tenant_name":"GlobalScape",
|
||||
"dst_endpoint": {
|
||||
"port": kwargs.get('dst_endpoint_port'),
|
||||
"ip": kwargs.get('dst_endpoint_ip'),
|
||||
"type": kwargs.get('dst_endpoint_type')
|
||||
},
|
||||
"time": kwargs.get('time'),
|
||||
"status_code": kwargs.get('status_code'),
|
||||
"status_detail": kwargs.get('status_detail'),
|
||||
"user": {
|
||||
"home_directory": kwargs.get('user_home_directory'),
|
||||
"uuid": kwargs.get('guid'),
|
||||
"uid": kwargs.get('uid'),
|
||||
"uuid": kwargs.get('user_session_uid'),
|
||||
"uid": kwargs.get('user_uid'),
|
||||
"type": kwargs.get('user_type'),
|
||||
"name": kwargs.get('user_name')
|
||||
},
|
||||
@ -96,9 +149,14 @@ def dataTemplate(transactionType,**kwargs):
|
||||
"type": kwargs.get('dst_endpoint_type')
|
||||
},
|
||||
"guid": kwargs.get('guid'),
|
||||
"prd_ext_tenant_id": kwargs.get('tenant'),
|
||||
"product_name": "GlobalScape EFT",
|
||||
"prd_ext_tenant_name": "GlobalScape EFT",
|
||||
"node_name": kwargs.get('node_name'),
|
||||
"tenant": kwargs.get('prd_ext_tenant_id'),
|
||||
"tenant_name":"GlobalScape",
|
||||
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
|
||||
"product_name": kwargs.get("product_name"),
|
||||
"prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
|
||||
"status_code": kwargs.get('status_code'),
|
||||
"status_detail": kwargs.get('status_detail'),
|
||||
"src_endpoint": {
|
||||
"port": kwargs.get('src_endpoint_port'),
|
||||
"ip": kwargs.get('src_endpoint_ip'),
|
||||
@ -107,18 +165,22 @@ def dataTemplate(transactionType,**kwargs):
|
||||
"time": kwargs.get('time'),
|
||||
"user": {
|
||||
"home_directory": kwargs.get('user_home_directory'),
|
||||
"uuid": kwargs.get('guid'),
|
||||
"uid": kwargs.get('uid'),
|
||||
"uuid": kwargs.get('user_session_uid'),
|
||||
"uid": kwargs.get('user_uid'),
|
||||
"type": kwargs.get('user_type'),
|
||||
"name": kwargs.get('user_name')
|
||||
},
|
||||
"session": {
|
||||
"created_time": kwargs.get('time'),
|
||||
"uid": kwargs.get('session_uid')
|
||||
},
|
||||
"utype": kwargs.get('utype')
|
||||
}
|
||||
|
||||
if transactionType == "file_uploaded":
|
||||
template = uploadDownload
|
||||
template = upload
|
||||
if transactionType == "file_downloaded":
|
||||
template = uploadDownload
|
||||
template = download
|
||||
if transactionType == "file_deleted":
|
||||
template = fileDeleted
|
||||
if transactionType == "user_logged_on":
|
||||
|
@ -1,56 +1,72 @@
|
||||
def processData(data, template, **kwargs):
|
||||
|
||||
"""Translates data from sql query to the appropriate place in the respective template.
|
||||
Accepts data, which is the sql query output, the template function, and finally
|
||||
additional data to insert into the template."""
|
||||
processedData = []
|
||||
transactionLoginid = []
|
||||
|
||||
|
||||
for row in data:
|
||||
# print(f'Row: {row}')
|
||||
# must set variables for the different templates and do logic based on that. Do not call identifyUtype many times
|
||||
identifyUtypecommand = identifyUtype(row.get('Command'))
|
||||
|
||||
if identifyUtypecommand == "other":
|
||||
continue
|
||||
|
||||
if row.get('Command') == None:
|
||||
continue
|
||||
|
||||
userType = identifyUserType(row.get('user_type'))
|
||||
try:
|
||||
processedData.append(template(identifyUtype(row.get('Command')),\
|
||||
prd_ext_tenant_id='',\
|
||||
processedData.append(template(identifyUtypecommand,\
|
||||
prd_ext_tenant_name=kwargs.get('prd_ext_tenant_name'),\
|
||||
user_uid=row.get('TransactionID'),\
|
||||
status_detail=row.get('Description'),\
|
||||
prd_ext_tenant_id=kwargs.get('prd_ext_tenant_id'),\
|
||||
status_code=row.get('ResultID'),\
|
||||
file_created_time=row.get('Time_stamp'),\
|
||||
file_size=row.get('FileSize'),\
|
||||
file_uid=row.get('ProtocolCommandID'),\
|
||||
file_path=row.get('PhysicalFolderName'),\
|
||||
file_virtual_path=row.get('VirtualFolderName'),\
|
||||
file_name=row.get('FileName'),\
|
||||
guid=row.get('TransactionGUID'),\
|
||||
ref_id=row.get('ProtocolCommandID'),\
|
||||
prd_instance_id=kwargs.get('prd_instance_id'),\
|
||||
product_guid=kwargs.get('product_guid'),\
|
||||
product_name=kwargs.get('product_name'),\
|
||||
product_version=kwargs.get('product_version'),\
|
||||
node_name=row.get('NodeName'),\
|
||||
session_uid=row.get('TransactionID'),\
|
||||
src_endpoint_type=row.get('Protocol'),\
|
||||
src_endpoint_port=row.get('RemotePort'),\
|
||||
src_endpoint_ip=row.get('RemoteIP'),\
|
||||
dst_endpoint_port=row.get('LocalPort'),\
|
||||
dst_endpoint_ip=row.get('LocalIP'),\
|
||||
dst_endpoint_type=row.get('Protocol'),\
|
||||
session_uid=row.get('TransactionID'),\
|
||||
bytes_out=row.get('BytesTransferred'),\
|
||||
duration=row.get('TransferTime'),\
|
||||
user_session_uid=row.get('TransactionID'),\
|
||||
bytes=row.get('BytesTransferred'),\
|
||||
time=row.get('Time_stamp'),\
|
||||
user_type=identifyUserType(row.get('user_type')),\
|
||||
user_domain=row.get('SiteName'),\
|
||||
duration=row.get('TransferTime'),\
|
||||
user_type=userType,\
|
||||
user_name=row.get('Actor'),\
|
||||
user_home_directory=row.get('VirtualFolderName'),\
|
||||
description=row.get('Description'),\
|
||||
utype=identifyUtype(row.get('Command'))))
|
||||
utype=identifyUtypecommand))
|
||||
except UnboundLocalError:
|
||||
print(f'Problem row GUID:{row.get("TransactionGUID")} ::: TransactionObject:{row.get("TransactionObject")} Command: {row.get("Command")}')
|
||||
continue
|
||||
|
||||
identifyUtypetransactionObject = identifyUtype(row.get('TransactionObject'))
|
||||
|
||||
if identifyUtypetransactionObject == "other":
|
||||
continue
|
||||
|
||||
if row.get('TransactionGUID') not in transactionLoginid:
|
||||
try:
|
||||
processedData.append(template(identifyUtype(row.get('TransactionObject')),\
|
||||
processedData.append(template(identifyUtypetransactionObject,\
|
||||
prd_ext_tenant_id=kwargs.get('prd_ext_tenant_id'),\
|
||||
prd_ext_tenant_name=kwargs.get('prd_ext_tenant_name'),\
|
||||
status_detail=row.get('Description'),\
|
||||
guid=row.get('TransactionGUID'),\
|
||||
status_code=row.get('ResultID'),\
|
||||
node_name=row.get('NodeName'),\
|
||||
prd_instance_id=kwargs.get('prd_instance_id'),\
|
||||
product_guid=kwargs.get('product_guid'),\
|
||||
product_name=kwargs.get('product_name'),\
|
||||
product_version=kwargs.get('product_version'),\
|
||||
src_endpoint_type=row.get('Protocol'),\
|
||||
src_endpoint_port=row.get('RemotePort'),\
|
||||
src_endpoint_ip=row.get('RemoteIP'),\
|
||||
@ -58,14 +74,14 @@ def processData(data, template, **kwargs):
|
||||
dst_endpoint_ip=row.get('LocalIP'),\
|
||||
dst_endpoint_type=row.get('Protocol'),\
|
||||
session_uid=row.get('TransactionID'),\
|
||||
bytes_out=row.get('BytesTransferred'),\
|
||||
transfer_time=row.get('TransferTime'),\
|
||||
time=row.get('Time_stamp'),\
|
||||
user_type=identifyUserType(row.get('user_type')),\
|
||||
user_domain=row.get('SiteName'),\
|
||||
user_session_uid=row.get('TransactionID'),\
|
||||
user_uid=row.get('TransactionID'),\
|
||||
user_type=userType,\
|
||||
user_name=row.get('Actor'),\
|
||||
user_home_directory=row.get('VirtualFolderName'),\
|
||||
utype=identifyUtype(row.get('TransactionObject'))\
|
||||
user_home_directory=row.get('PhysicalFolderName'),\
|
||||
utype=identifyUtypetransactionObject\
|
||||
))
|
||||
transactionLoginid.append(row.get('TransactionGUID'))
|
||||
except UnboundLocalError:
|
||||
@ -75,6 +91,7 @@ def processData(data, template, **kwargs):
|
||||
return processedData
|
||||
|
||||
def identifyUserType(obj):
|
||||
"""Check string if it has Admin-> return Administrator else return User."""
|
||||
if obj:
|
||||
if "Admin" in obj:
|
||||
return "Administrator"
|
||||
@ -82,7 +99,10 @@ def identifyUserType(obj):
|
||||
return "User"
|
||||
else:
|
||||
return None
|
||||
|
||||
def identifyUtype(obj):
|
||||
"""Process Type of transaction based on string that passed in.
|
||||
Return transaction type."""
|
||||
user_logged_on = ['AUTH']
|
||||
file_deleted = ["dele"]
|
||||
file_uploaded = ["created"]
|
||||
@ -90,11 +110,11 @@ def identifyUtype(obj):
|
||||
|
||||
if obj in user_logged_on:
|
||||
return "user_logged_on"
|
||||
if obj in file_deleted:
|
||||
elif obj in file_deleted:
|
||||
return "file_deleted"
|
||||
if obj in file_uploaded:
|
||||
elif obj in file_uploaded:
|
||||
return "file_uploaded"
|
||||
if obj in file_downloaded:
|
||||
elif obj in file_downloaded:
|
||||
return "file_downloaded"
|
||||
else:
|
||||
return "other"
|
@ -3,6 +3,8 @@ import decimal
|
||||
import datetime
|
||||
|
||||
class Encoder(json.JSONEncoder):
|
||||
"""Encoder uses json.JSONEncoder and checks for instances of decimal and datetime.
|
||||
Changes decimal.Decimal to int and datetime.datetime to unix timestamp with miliseconds."""
|
||||
def default(self, o):
|
||||
if isinstance(o, decimal.Decimal):
|
||||
return int(o)
|
||||
|
@ -1,11 +1,11 @@
|
||||
class sqlQuerymodel:
|
||||
def queryData(overRideflag, configQuery):
|
||||
def queryData(overRideflag, configQuery, daysTopull):
|
||||
"""Embedded query data"""
|
||||
q ="""DECLARE @stopTime DATETIME2
|
||||
SET @stopTime=DATEADD(DAY, -30, GETDATE())
|
||||
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type
|
||||
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.PhysicalFolderName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type
|
||||
FROM tbl_Transactions t
|
||||
Full JOIN tbl_ProtocolCommands p ON(t.TransactionID=p.TransactionID)
|
||||
Full join tbl_Authentications a ON(t.TransactionID=a.TransactionID)
|
||||
WHERE p.Time_stamp>@stopTime AND p.Command IS NOT NULL"""
|
||||
WHERE p.Time_stamp>@stopTime AND p.Command IS NOT NULL""".replace("30", str(daysTopull))
|
||||
return configQuery if overRideflag else q
|
23
test.py
23
test.py
@ -18,4 +18,25 @@ def builddict(keys,*args,**kwargs):
|
||||
testfolder = '/Usr/a/asdf/asf'
|
||||
user = 'a'
|
||||
|
||||
print(testfolder.split(f"/{user}/"))
|
||||
def identifyUtype(obj):
|
||||
"""Process Type of transaction based on string that passed in.
|
||||
Return transaction type."""
|
||||
user_logged_on = ['AUTH']
|
||||
file_deleted = ["dele"]
|
||||
file_uploaded = ["created"]
|
||||
file_downloaded = ["sent"]
|
||||
|
||||
if obj in user_logged_on:
|
||||
return "user_logged_on"
|
||||
if obj in file_deleted:
|
||||
return "file_deleted"
|
||||
if obj in file_uploaded:
|
||||
return "file_uploaded"
|
||||
if obj in file_downloaded:
|
||||
return "file_downloaded"
|
||||
else:
|
||||
return "other"
|
||||
|
||||
transactionType = 'file_uploaded'
|
||||
|
||||
print(transactionType.split("_")[1].rstrip("d").rstrip("e"))
|
Reference in New Issue
Block a user