Compare commits

...

53 Commits
Basic ... main

Author SHA1 Message Date
4d9e136166 Updated bug
All checks were successful
Build / build-windows-binary (push) Successful in 30s
Build / build-linux-binary (push) Successful in 48s
2024-08-26 14:25:47 -05:00
603bc13683 feat(doc strings) #18 Add documentation to all functions in code
All checks were successful
Build / build-windows-binary (push) Successful in 33s
Build / build-linux-binary (push) Successful in 1m52s
2024-08-26 14:21:29 -05:00
394c276131 fix(home directory) #15 home directory not correct 2024-08-26 13:50:35 -05:00
cb96b00e8d feat(config) #10 add config.toml to releases 2024-08-26 13:25:20 -05:00
abd669c3bb Merge pull request 'fix(authentication): BUG #20 - Authentication isnt working with existing logic & updated model' (#21) from fix-first-time-auth-error into main
All checks were successful
Build / build-windows-binary (push) Successful in 34s
Build / build-linux-binary (push) Successful in 1m23s
Reviewed-on: #21
2024-08-22 14:07:43 -05:00
487d883297 fix(authentication): BUG #20 - Authentication isnt working with existing logic & updated model 2024-08-22 14:04:00 -05:00
69b520e97c Updating function docs 2024-08-15 13:23:25 -05:00
82c8a5bb4a Merge pull request 'dev-add-daysTopull' (#19) from dev-add-daysTopull into main
Reviewed-on: #19
2024-08-14 15:21:00 -05:00
8e8312e853 Updated example 2024-08-14 15:19:37 -05:00
267616f0e4 Updated daysTopull value 2024-08-14 15:05:27 -05:00
de3a3fd03d This addresses #17 2024-08-14 15:00:53 -05:00
8dc6efe40e Updated documentation 2024-08-11 01:31:34 -05:00
d978ad57fe Updating documentation 2024-08-11 00:48:34 -05:00
5d6427698d Merge pull request 'dev-make-query-performant' (#16) from dev-make-query-performant into main
All checks were successful
Build / build-windows-binary (push) Successful in 34s
Build / build-linux-binary (push) Successful in 1m13s
Reviewed-on: #16
2024-08-10 02:22:13 -05:00
8384d714f9 fix: #14 now correctly handles 'other' transactions 2024-08-10 02:20:26 -05:00
42880bb334 made query more performance and allowed embedded override 2024-08-10 00:49:53 -05:00
494edd98ee Merge pull request 'dev-update-data-model' (#13) from dev-update-data-model into main
Reviewed-on: #13
2024-08-10 00:03:54 -05:00
ebe10f80ba Updated data models for download/upload, delete and login 2024-08-10 00:01:58 -05:00
7e5a8a2603 Updated data model 2024-08-06 18:29:55 -05:00
5c37b2fca2 Updated build.yaml
All checks were successful
Build / build-windows-binary (push) Successful in 30s
Build / build-linux-binary (push) Successful in 40s
2024-08-05 11:30:43 -05:00
cd9c6d535e Merge pull request 'dev-generalize-db-queries' (#11) from dev-generalize-db-queries into main
All checks were successful
Build / build-windows-binary (push) Successful in 33s
Build / build-linux-binary (push) Successful in 1m9s
Reviewed-on: #11
2024-08-05 11:27:02 -05:00
643aaa946e generalized db query and embedded it.
All checks were successful
Build / build-windows-binary (push) Successful in 33s
Build / build-linux-binary (push) Successful in 1m13s
2024-08-02 20:57:20 -05:00
cc8adbebad embedded queries
All checks were successful
Build / build-windows-binary (push) Successful in 29s
Build / build-linux-binary (push) Successful in 1m10s
2024-08-01 11:02:37 -05:00
c7b4e8ef85 Merge branch 'main' of https://git.jonb.io/jblu/Inex
All checks were successful
Build / build-windows-binary (push) Successful in 29s
Build / build-linux-binary (push) Successful in 37s
2024-07-31 20:58:32 -05:00
09d9b78e15 changed license 2024-07-31 20:55:49 -05:00
de17d1d45b removing executable from code path and moving it to releases. Search basic
All checks were successful
Build / build-windows-binary (push) Successful in 29s
Build / build-linux-binary (push) Successful in 1m11s
2024-07-31 12:45:02 -05:00
f99801fa2b #v.0.0.3 test
All checks were successful
Build / build-windows-binary (push) Successful in 34s
Build / build-linux-binary (push) Successful in 42s
2024-07-30 19:47:41 -05:00
b076be8333 test tag
All checks were successful
Build / build-windows-binary (push) Successful in 34s
2024-07-30 19:40:12 -05:00
5cae8105d0 find the directory
All checks were successful
Build / build-windows-binary (push) Successful in 32s
2024-07-30 18:04:02 -05:00
17155120ab updated pyinstaller
All checks were successful
Build / build-windows-binary (push) Successful in 32s
2024-07-30 17:54:39 -05:00
7c65c0e357 changed pyinstaller to not be a module
Some checks failed
Build / build-windows-binary (push) Failing after 15s
2024-07-30 17:53:32 -05:00
8fbd10d9df updated
Some checks failed
Build / build-windows-binary (push) Failing after 51s
2024-07-30 17:51:21 -05:00
cfa23470a8 changed actions/setup-python to v4
Some checks failed
Build / build-windows-binary (push) Failing after 1m22s
2024-07-30 17:45:33 -05:00
73acbb6646 fixed syntax
Some checks failed
Build / build-windows-binary (push) Failing after 15s
2024-07-30 17:41:07 -05:00
82ad280aa8 updated python version 2024-07-30 17:39:12 -05:00
0174336f39 added python to build
Some checks failed
Build / build-windows-binary (push) Failing after 41s
2024-07-30 17:36:38 -05:00
165ba4533c updated job
Some checks failed
Build / build-windows-binary (push) Failing after 12s
2024-07-30 17:33:21 -05:00
822fb79fd4 test
Some checks failed
Build / build-windows-binary (push) Failing after 12s
2024-07-30 15:56:49 -05:00
9ca8631abb test
Some checks failed
Build / build-windows-binary (push) Failing after 2s
2024-07-30 14:16:33 -05:00
6615637d07 test
Some checks failed
Build / build-windows-binary (push) Has been cancelled
2024-07-30 13:51:01 -05:00
c4bd226b7e test
Some checks failed
Build / build-windows-binary (push) Failing after 11s
2024-07-30 13:31:27 -05:00
6f6e38fe13 Merge pull request 'dev-automate-workflow' (#8) from dev-automate-workflow into main
Reviewed-on: #8
2024-07-30 13:28:40 -05:00
b2ce0f26a8 updaged build yaml 2024-07-30 13:27:21 -05:00
a622253b55 updated build 2024-07-30 13:13:16 -05:00
dbd11e02a0 Merge pull request 'dev-create-rest-client' (#7) from dev-create-rest-client into main
Reviewed-on: #7
2024-07-30 12:58:24 -05:00
2b3a1a7078 adjusted utype handling 2024-07-30 12:46:19 -05:00
045467783f moved encoder to seperate file and created .token refresh logic 2024-07-29 17:30:11 -05:00
1800aafd72 updated saving tokens 2024-07-29 14:04:35 -05:00
11b312a1e8 updated client 2024-07-27 18:24:15 -05:00
1f255ec33a Added push payload 2024-07-25 22:22:08 -05:00
573cd651be automated release idea 2024-07-25 16:09:09 -05:00
c030fc69a2 updating build.yaml
Some checks failed
Build / build-windows-binary (push) Failing after 0s
Build / build-linux-binary (push) Failing after 45s
2024-07-25 13:30:21 -05:00
702d93b9da Updated query and mapped the proper data to the template
All checks were successful
Build Inex Executable / linux (push) Successful in 1m20s
2024-07-24 13:00:24 -05:00
14 changed files with 714 additions and 207 deletions

View File

@ -1,21 +1,43 @@
name: Build Inex Executable
run-name: Deploy to ${{ inputs.deploy_target }} by @${{ gitea.actor }}
on: [push]
name: Build
on:
push:
tags:
- '*'
jobs:
Build:
runs-on: ubuntu-22.04
build-linux-binary:
runs-on: ubuntu-latest
permissions:
contents: write # release changes require contents write
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.12.4'
cache: 'pip' # caching pip dependencies
- run: lsb_release -a
- run: apt-get update
- run: apt-get install unixodbc -y
- run: pip install -r requirements.txt
- run: pyinstaller --noconfirm --onefile --console ${{ gitea.workspace }}/inex.py
- uses: actions/upload-artifact@v3
- uses: softprops/action-gh-release@v2
with:
name: Inex
path: ${{ gitea.workspace }}/dist/inex
files: ${{ gitea.workspace }}/dist/inex
build-windows-binary:
runs-on: windows
permissions:
contents: write # release changes require contents write
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: '3.12.4'
cache: 'pip' # caching pip dependencies
- run: python -m pip install -r requirements.txt
- run: pyinstaller --noconfirm --onefile --console ${{ gitea.workspace }}/inex.py
- uses: softprops/action-gh-release@v2
with:
files: |
${{ gitea.workspace }}/dist/inex.exe
${{ gitea.workspace }}/config.toml.example

3
.gitignore vendored
View File

@ -1,4 +1,5 @@
*config.toml
*.toml
*.json
__pycache__/
*.log
*.token

View File

@ -209,7 +209,7 @@ If you develop a new program, and you want it to be of the greatest possible use
To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found.
Inex
Copyright (C) 2024 jblu
Copyright (C) 2024 Jonathan Branan
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@ -221,7 +221,7 @@ Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode:
Inex Copyright (C) 2024 jblu
Inex Copyright (C) 2024 Jonathan Branan
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details.

121
README.md
View File

@ -1,34 +1,93 @@
# Inex
Information Exchange - Pull data from EFT ARM then dump it to a json file
### Information Exchange - Ingest ARM data into EFC
This application is designed to pull data from an Microsoft SQL database, reformat it, and then send that data via the web to the Event Fusion Center. It's designed to be performant. The embedded query doesn't return unneccesary data and data is written to memory before being pushed into EFC. Initially you may have to pull a large amount of days so the system is caught up but the sweet spot would be to pull a set of data every day.
# Data Map:
## tbl_ProtocolCommands
- ProtocolCommandID
- Time_stamp
- RemoteIP
- RemotePort
- LocalIP
- LocalPort
- Protocol
- SiteName
- Command
- CommandParameters
- FileName
- VirtualFolderName
- PhysicalFolderName
- IsInternal
- FileSize
- TransferTime
- BytesTransferred
- ResultID
- TransactionID
- Description
- Actor
## tbl_Transactions
- TransactionID
- ParentTransactionID
- TransactionObject
- TransactionGUID
- NodeName
- time_stamp
## Build from source
You will need to install git and clone the repository:
`git clone https://git.jonb.io/jblu/Inex.git`
Alternitively you can download the files from [the releases page](https://git.jonb.io/jblu/Inex/releases).
Then follow OS specific instructions. These are taken directly from the steps used in automation to build the executables found in the releases page.
##### Windows
You can find the requirements.txt in the repository.
`python -m pip install -r requirements.txt`
`pyinstaller --noconfirm --onefile --console path/to/inex.py`
##### Linux
Update Linux Distro. This is Ubuntu specific. Please check the package management tool in your specific version.
`apt-get update`
You need unixodbc or else pyodbc cannot install.
`apt-get install unixodbc -y`
You can find the requirements.txt in the repository.
`pip install -r requirements.txt`
`pyinstaller --noconfirm --onefile --console path/to/inex.py`
## Setup
You will need a *config.toml* file in the same directory where *inex.py* or inex executable is. It's recommended to rename the existing config.toml.example file to save sometime and avoid omitting any data.
> If the application doesn't find the *config.toml* at the time of execution, it will not continue.
#### Config.toml
| Table | Purpose |
|-|-|
| fortraPlatform | Fortra Specific data |
| database | MSSQL Configuration |
| immutables | Data that must be included but is not provided by the source database |
| output | If and where to write files|
| logging | Set if, where and level of logging |
The following settings are not obvious as to how they affect the application.
> Note the #comments
```
[fortraPlatform]
selectedPlatform = "dev" # This will modify which environment the data is pushed to. The tenant_id and secret must be manually modified.
[database]
overrideEmbeddedquery = true # Choose if embedded query should be overridden.
daysTopull = 30 # This setting is only related to the embedded query. Please note this will not affect query provided in config.toml
driver = "ODBC Driver 18 for SQL Server" # Select which windows driver should be used. This one is recommended.
[output]
pushToplatform = false # if true, send data to fortraPlatform setting to false is useful in dry-runs.
dumpTojson = true # if true, write data to json file
token = "./.token" # Recommended to leave as-is
```
## Usage
### Windows
##### Run inex.py
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
2. `cd C:\path\to\Inex`
3. `python inex.py`
OR
##### Run inex.exe
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
2. `cd C:\path\to\Inex.exe`
3. CMD `.\inex.exe`
### Linux
##### Run inex.py
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
2. `cd /path/to/Inex`
3. `python ./inex.py`
OR
1. Download [source](https://git.jonb.io/jblu/Inex/releases)
2. `cd /path/to/Inex`
3. RUN `./inex`

View File

@ -1,15 +1,54 @@
[fortraPlatform]
selectedPlatform = "dev"
[output]
pushToplatform = false
dumpTojson = true
filename ="./data.json"
token = "./.token"
[logging]
useLog = true
logLevel = "debug"
logPath = "./inex.log"
[database]
overrideEmbeddedquery = false
daysTopull = 20
driver = "ODBC Driver 18 for SQL Server"
server = "192.168.x.x"
database = "EFTDB"
user = "a"
password = "a"
query = "SELECT [Id],[Version] FROM [EFTDB].[dbo].[tbl_Schema_Version]"
query = """DECLARE @stopTime DATETIME2
SET @stopTime=DATEADD(DAY, -30, GETDATE())
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.PhysicalFolderName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type
FROM tbl_Transactions t
Full JOIN tbl_ProtocolCommands p ON(t.TransactionID=p.TransactionID)
Full JOIN tbl_Authentications a ON(t.TransactionID=a.TransactionID)
WHERE p.Time_stamp>@stopTime AND p.Command IS NOT NULL"""
[output]
filename ="./data.json"
[fortraPlatform.dev]
idp = "https://foundation.foundation-dev.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token"
efc_url = "https://efc.efc-dev.cloudops.fortradev.com"
tenant_id = ""
client_id = "eft-event-generator-confidential"
secret = ""
[logging]
use_log = true
logLevel = "debug"
logPath = "./inex.log"
[fortraPlatform.stage]
idp = "https://foundation.foundation-stage.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token"
efc_url = "https://efc.efc-stage.cloudops.fortradev.com"
tenant_id = ""
client_id = "eft-event-generator-confidential"
secret = ""
[fortraPlatform.prod]
idp ="https://foundation.foundation-prod.cloudops.fortradev.com/idp/realms/products/protocol/openid-connect/token"
efc_url = "https://efc.efc-prod.cloudops.fortradev.com"
tenant_id = ""
client_id = "eft-event-generator-confidential"
secret = ""
[immutables]
product_name = "GlobalScape EFT"
prd_ext_tenant_name = "GlobalScape EFT"

View File

@ -1,15 +0,0 @@
[database]
driver = "ODBC Driver 18 for SQL Server"
server = "192.168.x.x"
database = "EFTDB"
user = "a"
password = "a"
query = "SELECT [Id],[Version] FROM [EFTDB].[dbo].[tbl_Schema_Version]"
[output]
filename ="./data.json"
[logging]
use_log = true
logLevel = "debug"
logPath = "./inex.log"

Binary file not shown.

83
inex.py
View File

@ -1,30 +1,42 @@
import pyodbc
import os
import logging
import datetime
from tomllib import load
import tomllib
from inexLogging import inexLog
import inexConnect
from inexDataModel import dataTemplate
from inexDataProcessing import processData
import json
import decimal
import requests
import inexEncoder
import inexSqlquery
class Inex:
def __init__(self):
"""Initilize config, calls functions from inex-connect.py and inex-logging.py"""
if os.path.exists('./config.toml'):
config_file_path = './config.toml'
with open(config_file_path, 'rb') as c:
self.config = load(c)
"""Initilize config, calls functions from inexConnect.py, inexLogging.py
inexDataModel.py, inexDataProcessing.py, inexEncoder.py and inexSqlquery.py
Main logic of the program. Requires a config.toml in the same directory it's
being run from."""
# assign libraries
self.db = pyodbc
self.tm = datetime
self.il = logging
self.ic = inexConnect
self.r = requests
self.tl = tomllib
self.os = os
self.j = json
self.e = inexEncoder.Encoder
self.sq = inexSqlquery
# Check if local config file exists.
if self.os.path.exists('./config.toml'):
config_file_path = './config.toml'
with open(config_file_path, 'rb') as c:
self.config = self.tl.load(c)
# set config
try:
if self.config:
self.dbDriver = self.config["database"]["driver"]
self.dbServer = self.config["database"]["server"]
self.dbDatabase = self.config["database"]["database"]
@ -35,34 +47,51 @@ class Inex:
self.useLog = self.config["logging"]["useLog"]
self.logPath = self.config["logging"]["logPath"]
self.logLevel = self.config["logging"]["logLevel"]
self.prdExttenantname = self.config["immutables"]["prd_ext_tenant_name"]
self.productName = self.config["immutables"]["product_name"]
self.tokenFilepath = self.config["output"]["token"]
self.selectedPlatform = self.config["fortraPlatform"]["selectedPlatform"]
self.writeJsonfile = self.config["output"]["dumpTojson"]
self.pushToplatform = self.config["output"]["pushToplatform"]
self.queryOverride = self.config["database"]["overrideEmbeddedquery"]
self.queryDaystopull = self.config["database"]["daysTopull"]
except Exception as e:
print("No config.toml or possibly missing settings in the file. Please use config.toml.example file and configure appropriately")
self.il.error(e)
print(e)
exit(1)
if "dev" in self.selectedPlatform.lower():
self.platformConfig = self.config["fortraPlatform"]["dev"]
if "stag" in self.selectedPlatform.lower():
self.platformConfig = self.config["fortraPlatform"]["stage"]
if "prod" in self.selectedPlatform.lower():
self.platformConfig = self.config["fortraPlatform"]["prod"]
#Setup logging
inexLog(self)
# create the connection to the database
self.cursor = self.ic.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword)
self.cursor = self.ic.inexSql.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword)
self.data = self.ic.databaseQuery(self, self.cursor, self.dbQuery)
# print(f"returned data: {self.data}")
self.modifiedData = processData(self.data, dataTemplate)
# Query the database
self.data = self.ic.inexSql.databaseQuery(self, self.cursor, self.sq.sqlQuerymodel.queryData(self.queryOverride,self.dbQuery, self.queryDaystopull))
print(self.modifiedData)
# Modify the data to meet EFC requirements
self.modifiedData = processData(self.data, dataTemplate, prd_ext_tenant_name=self.prdExttenantname,product_name=self.productName,\
prd_ext_tenant_id=self.platformConfig["tenant_id"])
# Push data to EFC. Check for local Auth token -> Authenticate if needed -> push data
if self.pushToplatform:
inexConnect.fortraEFC.__init__(self)
# TODO: move this to its own function
if self.useLog:
self.il.warning(f"Writing to '{self.outputFile}'.")
# Write data to json
if self.writeJsonfile:
with open(self.outputFile, "w") as f:
json.dump(self.modifiedData, f, indent = 2, cls=Encoder)
# TODO: Move this class to it's own file
class Encoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return int(o)
if isinstance(o, datetime.datetime):
return str(o)
return super().default(o)
self.j.dump(self.modifiedData, f, indent = 2, cls=self.e)
# Run
if __name__== "__main__":

View File

@ -1,3 +1,4 @@
class inexSql:
def connectDatabase(self, lib, driver, server, database, user, password):
"""Connects to the database. Requires a windows driver to do so.
Typically there is one installed by default"""
@ -19,6 +20,7 @@ def connectDatabase(self, lib, driver, server, database, user, password):
return cursor
def databaseQuery(self, cursor, query, args=()):
"""Use the database connection to send a query."""
if self.useLog:
self.il.debug(f"Query:")
self.il.debug(query)
@ -39,5 +41,57 @@ def databaseQuery(self, cursor, query, args=()):
cur.connection.close()
if self.useLog:
self.il.debug(f"Database connection closed")
# return (r[0] if r else None) if one else r
return r
class fortraEFC:
"""Class to connect to fortra EFC. It will authenticate and push rest payloads.
Writes a .token file to the same directory script was run in."""
def __init__(self):
"""This is the logic for how authentication is handled"""
# Check if .token file is present
if fortraEFC.readToken(self) == 1:
# Get fresh token. First run.
fortraEFC.getToken(self)
fortraEFC.writeToken(self)
# Push data with token
self.pushPayloadresponse = fortraEFC.pushPayload(self)
if self.pushPayloadresponse == 401:
fortraEFC.getToken(self)
fortraEFC.writeToken(self)
fortraEFC.pushPayload(self)
def readToken(self):
"""Looks locally for a .token file. Returns a numeral code
for logic in the init method."""
if self.os.path.exists(self.tokenFilepath):
with open(self.tokenFilepath, 'rb') as t:
self.tokenData = self.j.load(t)
self.il.debug(f'readToken {self.tokenData["access_token"]}')
return 0
else:
return 1
def getToken(self):
"""Gets a token from fortra idp."""
self.tokenData = self.r.post(self.platformConfig["idp"], data={"grant_type":"client_credentials",\
"client_id": self.platformConfig["client_id"],\
"client_secret": self.platformConfig["secret"],})
self.tokenData = self.tokenData.json()
self.il.debug(f'getToken {self.tokenData["access_token"]}')
def writeToken(self):
"""Writes a token to a local file named '.token'."""
fortraEFC.getToken(self)
with open(self.tokenFilepath, "w") as f:
self.j.dump(self.tokenData, f, indent = 2)
self.il.debug(f'writeToken {self.tokenData["access_token"]}')
def pushPayload(self):
"""Sends data to fortra EFC. Requires a token from the idp."""
self.il.debug(f'pushPayload {self.tokenData["access_token"]}')
url = f'{self.platformConfig["efc_url"]}/api/v1/unity/data/{self.platformConfig["tenant_id"]}/machine_event'
pushPayloadResponse = self.r.post(url, headers={'Authorization': f'Bearer {self.tokenData["access_token"]}'},\
data=self.j.dumps(self.modifiedData, cls=self.e))
self.il.debug(pushPayloadResponse.status_code)
self.il.debug(pushPayloadResponse.text)
return pushPayloadResponse.status_code

View File

@ -1,48 +1,195 @@
def dataTemplate(**kwargs):
"""Expects the following keyword arguments:
status,status_detail,status_code,file_size,file_path,file_virtual_path,file_name,
guid,ref_id,prd_instance_id,product_guid,product_name,product_version,node_name,
src_endpoint_port,src_endpoint_ip,dst_endpoint_port,dst_endpoint_ip,dst_endpoint_type,
session_uid,bytes_out,transfer_time,time,user_type,user_domain,user_name and utype.
"""
template ={
"status": kwargs.get('status'),
"status_detail": kwargs.get('status_detail'),
"status_code": kwargs.get('status_code'),
"file": {
"size": kwargs.get('file_size'),
"path": kwargs.get('file_path'),
"virtual_path": kwargs.get('file_virtual_path'),
"name": kwargs.get('file_name')
},
"guid": kwargs.get('guid'),
"ref_id": kwargs.get('ref_id'),
"prd_instance_id": kwargs.get('prd_instance_id'),
"product_guid": kwargs.get('product_guid'),
"product_name": kwargs.get('product_name'),
"product_version": kwargs.get('product_version'),
"node_name":kwargs.get('node_name'),
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip')
},
def dataTemplate(transactionType,**kwargs):
"""Created templates for use. This function forms json data into an
appropriate model for EFC. It returnes the appropriate template based
on the transaction type passed into the function. The logic to process
this is at the bottom of the function."""
upload = {
"bytes" : kwargs.get('bytes'),
"dst_endpoint": {
"port": kwargs.get('dst_endpoint_port'),
"ip": kwargs.get('dst_endpoint_ip'),
"type": kwargs.get('dst_endpoint_type')
},
"duration": kwargs.get('duration'),
"file": {
"created_time": kwargs.get('file_created_time'),
"uid": kwargs.get('file_uid'),
"size": kwargs.get('file_size'),
"name": kwargs.get('file_name'),
"path": kwargs.get('file_path')
},
"guid": kwargs.get('file_uid'),
"node_name": kwargs.get('node_name'),
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
"product_name": kwargs.get('product_name'),
"prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
"classifications": [{
"ref_id": f"globalscape:{kwargs.get('guid')}",
"time": kwargs.get('time'),
}],
"session": {
"created_time": kwargs.get('time'),
"uid": kwargs.get('session_uid')
},
"bytes_out" : kwargs.get('bytes_out'),
"transfer_time" : kwargs.get('transfer_time'),
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip'),
"type": kwargs.get('src_endpoint_type')
},
"tenant": kwargs.get('prd_ext_tenant_id'),
"tenant_name":"GlobalScape",
"time": kwargs.get('time'),
"status_code": kwargs.get('status_code'),
"status_detail": kwargs.get('status_detail'),
"user": {
"home_directory": kwargs.get('user_home_directory'),
"uuid": kwargs.get('guid'),
"uid": kwargs.get('user_uid'),
"type": kwargs.get('user_type'),
"name": kwargs.get('user_name')
},
"utype": kwargs.get('utype')
}
download = {
"bytes" : kwargs.get('bytes'),
"dst_endpoint": {
"port": kwargs.get('dst_endpoint_port'),
"ip": kwargs.get('dst_endpoint_ip'),
"type": kwargs.get('dst_endpoint_type')
},
"duration": kwargs.get('duration'),
"file": {
"uid": kwargs.get('file_uid'),
"size": kwargs.get('file_size'),
"name": kwargs.get('file_name'),
"path": kwargs.get('file_path')
},
"guid": kwargs.get('file_uid'),
"node_name": kwargs.get('node_name'),
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
"product_name": kwargs.get('product_name'),
"prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
"classifications": [{
"ref_id": f"globalscape:{kwargs.get('guid')}",
"time": kwargs.get('time'),
}],
"session": {
"created_time": kwargs.get('time'),
"uid": kwargs.get('session_uid')
},
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip'),
"type": kwargs.get('src_endpoint_type')
},
"tenant": kwargs.get('prd_ext_tenant_id'),
"tenant_name":"GlobalScape",
"time": kwargs.get('time'),
"status_code": kwargs.get('status_code'),
"status_detail": kwargs.get('status_detail'),
"user": {
"home_directory": kwargs.get('user_home_directory'),
"uuid": kwargs.get('guid'),
"uid": kwargs.get('user_uid'),
"type": kwargs.get('user_type'),
"domain": kwargs.get('user_domain'),
"name": kwargs.get('user_name')
},
"utype": kwargs.get('utype')
}
fileDeleted = {
"file": {
"size": kwargs.get('file_size'),
"name": kwargs.get('file_name'),
"path": kwargs.get('file_path'),
"uid": kwargs.get('file_uid'),
},
"guid": f'deleted:{kwargs.get("guid")}',
"node_name": kwargs.get('node_name'),
"classifications": [{
"ref_id": f"globalscape:{kwargs.get('guid')}",
"time": kwargs.get('time'),
}],
"prd_ext_tenant_name": kwargs.get("prd_ext_tenant_name"),
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
"product_name": kwargs.get("product_name"),
"session": {
"created_time": kwargs.get('time'),
"uid": kwargs.get('session_uid')
},
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip'),
"type": kwargs.get('src_endpoint_type')
},
"tenant": kwargs.get('prd_ext_tenant_id'),
"tenant_name":"GlobalScape",
"dst_endpoint": {
"port": kwargs.get('dst_endpoint_port'),
"ip": kwargs.get('dst_endpoint_ip'),
"type": kwargs.get('dst_endpoint_type')
},
"time": kwargs.get('time'),
"status_code": kwargs.get('status_code'),
"status_detail": kwargs.get('status_detail'),
"user": {
"home_directory": kwargs.get('user_home_directory'),
"uuid": kwargs.get('user_session_uid'),
"uid": kwargs.get('user_uid'),
"type": kwargs.get('user_type'),
"name": kwargs.get('user_name')
},
"utype": kwargs.get('utype')
}
logon ={
"classifications": [{
"ref_id": f"globalscape:{kwargs.get('guid')}",
"time": kwargs.get('time'),
}],
"dst_endpoint": {
"port": kwargs.get('dst_endpoint_port'),
"ip": kwargs.get('dst_endpoint_ip'),
"type": kwargs.get('dst_endpoint_type')
},
"guid": kwargs.get('guid'),
"node_name": kwargs.get('node_name'),
"tenant": kwargs.get('prd_ext_tenant_id'),
"tenant_name":"GlobalScape",
"prd_ext_tenant_id": kwargs.get('prd_ext_tenant_id'),
"product_name": kwargs.get("product_name"),
"prd_ext_tenant_name": kwargs.get('prd_ext_tenant_name'),
"status_code": kwargs.get('status_code'),
"status_detail": kwargs.get('status_detail'),
"src_endpoint": {
"port": kwargs.get('src_endpoint_port'),
"ip": kwargs.get('src_endpoint_ip'),
"type": kwargs.get('src_endpoint_type')
},
"time": kwargs.get('time'),
"user": {
"home_directory": kwargs.get('user_home_directory'),
"uuid": kwargs.get('user_session_uid'),
"uid": kwargs.get('user_uid'),
"type": kwargs.get('user_type'),
"name": kwargs.get('user_name')
},
"session": {
"created_time": kwargs.get('time'),
"uid": kwargs.get('session_uid')
},
"utype": kwargs.get('utype')
}
if transactionType == "file_uploaded":
template = upload
if transactionType == "file_downloaded":
template = download
if transactionType == "file_deleted":
template = fileDeleted
if transactionType == "user_logged_on":
template = logon
if transactionType == "other":
template = {}
return template

View File

@ -1,32 +1,136 @@
def processData(data, template):
def processData(data, template, **kwargs):
"""Translates data from sql query to the appropriate place in the respective template.
Accepts data, which is the sql query output, the template function, and finally
additional data to insert into the template. Uses other functions to further
process row data."""
processedData = []
transactionLoginid = []
for row in data:
# print(f'Row: {row}')
processedData.append(template(status=row.get(''),\
status_detail=row.get(''),\
# must set variables for the different templates and do logic based on that. Do not call identifyUtype many times
identifyUtypecommand = identifyUtype(row.get('Command'))
if identifyUtypecommand == "other":
continue
if row.get('Command') == None:
continue
userType = identifyUserType(row.get('user_type'))
userHome = parseHomefolder(row.get('Actor'),row.get('VirtualFolderName'))
try:
processedData.append(template(identifyUtypecommand,\
prd_ext_tenant_name=kwargs.get('prd_ext_tenant_name'),\
user_uid=row.get('TransactionID'),\
status_detail=row.get('Description'),\
prd_ext_tenant_id=kwargs.get('prd_ext_tenant_id'),\
status_code=row.get('ResultID'),\
file_created_time=row.get('Time_stamp'),\
file_size=row.get('FileSize'),\
file_uid=row.get('ProtocolCommandID'),\
file_path=row.get('PhysicalFolderName'),\
file_virtual_path=row.get('VirtualFolderName'),\
file_name=row.get('FileName'),\
guid=row.get('TransactionGUID'),\
ref_id=row.get('ProtocolCommandID'),\
prd_instance_id=row.get(''),\
product_guid=row.get(''),\
product_name=row.get(''),\
product_version=row.get(''),\
product_name=kwargs.get('product_name'),\
node_name=row.get('NodeName'),\
session_uid=row.get('TransactionID'),\
src_endpoint_type=row.get('Protocol'),\
src_endpoint_port=row.get('RemotePort'),\
src_endpoint_ip=row.get('RemoteIP'),\
dst_endpoint_port=row.get('LocalPort'),\
dst_endpoint_ip=row.get('LocalIP'),\
dst_endpoint_type=row.get('Protocol'),\
user_session_uid=row.get('TransactionID'),\
bytes=row.get('BytesTransferred'),\
time=row.get('Time_stamp'),\
duration=row.get('TransferTime'),\
user_type=userType,\
user_name=row.get('Actor'),\
user_home_directory=userHome,\
utype=identifyUtypecommand))
except UnboundLocalError:
print(f'Problem row GUID:{row.get("TransactionGUID")} ::: TransactionObject:{row.get("TransactionObject")} Command: {row.get("Command")}')
continue
identifyUtypetransactionObject = identifyUtype(row.get('TransactionObject'))
if identifyUtypetransactionObject == "other":
continue
if row.get('TransactionGUID') not in transactionLoginid:
try:
processedData.append(template(identifyUtypetransactionObject,\
prd_ext_tenant_id=kwargs.get('prd_ext_tenant_id'),\
prd_ext_tenant_name=kwargs.get('prd_ext_tenant_name'),\
status_detail=row.get('Description'),\
guid=row.get('TransactionGUID'),\
status_code=row.get('ResultID'),\
node_name=row.get('NodeName'),\
prd_instance_id=kwargs.get('prd_instance_id'),\
product_name=kwargs.get('product_name'),\
src_endpoint_type=row.get('Protocol'),\
src_endpoint_port=row.get('RemotePort'),\
src_endpoint_ip=row.get('RemoteIP'),\
dst_endpoint_port=row.get('LocalPort'),\
dst_endpoint_ip=row.get('LocalIP'),\
dst_endpoint_type=row.get('Protocol'),\
session_uid=row.get('TransactionID'),\
bytes_out=row.get('BytesTransferred'),\
transfer_time=row.get('TransferTime'),\
time=row.get('Time_stamp'),\
user_type=row.get(''),\
user_domain=row.get('SiteName'),\
user_session_uid=row.get('TransactionID'),\
user_uid=row.get('TransactionID'),\
user_type=userType,\
user_name=row.get('Actor'),\
utype=row.get('Command')))
user_home_directory=userHome,\
utype=identifyUtypetransactionObject\
))
transactionLoginid.append(row.get('TransactionGUID'))
except UnboundLocalError:
print(f'Problem row GUID:{row.get("TransactionGUID")} ::: TransactionObject:{row.get("TransactionObject")} Command: {row.get("Command")}')
continue
return processedData
def identifyUserType(obj):
"""Check string if it has Admin-> return Administrator else return User."""
if obj:
if "Admin" in obj:
return "Administrator"
else:
return "User"
else:
return None
def parseHomefolder(user, virtualfolder):
"""Extract users home folder using the username. Will not work on edge cases
such as when a users home folder does not have the user name. When that occurs
it is impossible to know based on the arm data what the home folder is.
This function is an assumption so it may return the incorrect home folder.
This function finds the user name and takes the path from the left of the folder
as the home folder. There are cases where this may not be accurate."""
if user:
userSplit = f'/{user}/'
if virtualfolder:
if userSplit in virtualfolder:
home = virtualfolder.split(userSplit)[0] + userSplit
return home if home else None
def identifyUtype(obj):
"""Process Type of transaction based on string that passed in.
Return transaction type."""
user_logged_on = ['AUTH']
file_deleted = ["dele"]
file_uploaded = ["created"]
file_downloaded = ["sent"]
if obj in user_logged_on:
return "user_logged_on"
elif obj in file_deleted:
return "file_deleted"
elif obj in file_uploaded:
return "file_uploaded"
elif obj in file_downloaded:
return "file_downloaded"
else:
return "other"

13
inexEncoder.py Normal file
View File

@ -0,0 +1,13 @@
import json
import decimal
import datetime
class Encoder(json.JSONEncoder):
"""Encoder uses json.JSONEncoder and checks for instances of decimal and datetime.
Changes decimal.Decimal to int and datetime.datetime to unix timestamp with miliseconds."""
def default(self, o):
if isinstance(o, decimal.Decimal):
return int(o)
if isinstance(o, datetime.datetime):
return int(o.timestamp() * 1000)
return super().default(o)

11
inexSqlquery.py Normal file
View File

@ -0,0 +1,11 @@
class sqlQuerymodel:
def queryData(overRideflag, configQuery, daysTopull):
"""Embedded query data. Data is slightly modified to change the amount of days to pull."""
q ="""DECLARE @stopTime DATETIME2
SET @stopTime=DATEADD(DAY, -30, GETDATE())
SELECT p.ProtocolCommandID, t.Time_stamp, p.RemoteIP, p.RemotePort, p.LocalIP, p.LocalPort, p.Protocol, p.SiteName, p.Command, p.FileName, p.PhysicalFolderName, p.VirtualFolderName, p.FileSize, p.TransferTime, p.BytesTransferred, p.Description, p.ResultID, t.TransactionID, p.Actor, t.TransactionObject, t.NodeName, t.TransactionGUID, a.Protocol user_type
FROM tbl_Transactions t
Full JOIN tbl_ProtocolCommands p ON(t.TransactionID=p.TransactionID)
Full join tbl_Authentications a ON(t.TransactionID=a.TransactionID)
WHERE p.Time_stamp>@stopTime AND p.Command IS NOT NULL""".replace("30", str(daysTopull))
return configQuery if overRideflag else q

45
test.py
View File

@ -1,5 +1,48 @@
import datetime
def connectDatabase(driver, server, database, user, password):
connectionString = f'DRIVER={{{driver}}};SERVER={server};DATABASE={database};UID={user};PWD={password};TrustServerCertificate=yes'
print(connectionString)
a = connectDatabase("ODBC Driver 18 for SQL Server","b","c","d","e")
# a = connectDatabase("ODBC Driver 18 for SQL Server","b","c","d","e")
def converttimestamp(t):
print(int(t.timestamp()* 1000))
def builddict(keys,*args,**kwargs):
dict = {}
for key in keys:
dict[key] = kwargs.get(key)
print(dict)
def identifyUtype(obj):
"""Process Type of transaction based on string that passed in.
Return transaction type."""
user_logged_on = ['AUTH']
file_deleted = ["dele"]
file_uploaded = ["created"]
file_downloaded = ["sent"]
if obj in user_logged_on:
return "user_logged_on"
if obj in file_deleted:
return "file_deleted"
if obj in file_uploaded:
return "file_uploaded"
if obj in file_downloaded:
return "file_downloaded"
else:
return "other"
testfolder = '/Usr/a/asdf/asf/asdfas/asdfasdf/'
user = 'a'
def parsehomefolder(user, virtualfolder):
userSplit = f'/{user}/'
home = virtualfolder.split(userSplit)[0] + userSplit
print(home)
return home
a = parsehomefolder(user, testfolder)