Updated output data model
This commit is contained in:
parent
3c9469147c
commit
40dd852cd1
@ -1,19 +0,0 @@
|
||||
name: Gitea Actions Demo
|
||||
run-name: ${{ gitea.actor }} is testing out Gitea Actions 🚀
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
Explore-Gitea-Actions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ gitea.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by Gitea!"
|
||||
- run: echo "🔎 The name of your branch is ${{ gitea.ref }} and your repository is ${{ gitea.repository }}."
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
- run: echo "💡 The ${{ gitea.repository }} repository has been cloned to the runner."
|
||||
- run: echo "🖥️ The workflow is now ready to test your code on the runner."
|
||||
- name: List files in the repository
|
||||
run: |
|
||||
ls ${{ gitea.workspace }}
|
||||
- run: echo "🍏 This job's status is ${{ job.status }}."
|
8
inex.py
8
inex.py
@ -6,6 +6,8 @@ import datetime
|
||||
from tomllib import load
|
||||
from inexLogging import inexLog
|
||||
import inexConnect
|
||||
from inexDataModel import dataTemplate
|
||||
from inexDataProcessing import processData
|
||||
import json
|
||||
import decimal
|
||||
|
||||
@ -43,15 +45,17 @@ class Inex:
|
||||
self.cursor = self.ic.connectDatabase(self, self.db, self.dbDriver, self.dbServer, self.dbDatabase, self.dbUser, self.dbPassword)
|
||||
|
||||
self.data = self.ic.databaseQuery(self, self.cursor, self.dbQuery)
|
||||
# print(f"returned data: {self.data}")
|
||||
self.modifiedData = processData(self.data, dataTemplate)
|
||||
|
||||
# print(self.data)
|
||||
print(self.modifiedData)
|
||||
|
||||
# TODO: move this to its own function
|
||||
if self.useLog:
|
||||
self.il.warning(f"Writing to '{self.outputFile}'.")
|
||||
|
||||
with open(self.outputFile, "w") as f:
|
||||
json.dump(self.data, f, cls=Encoder)
|
||||
json.dump(self.modifiedData, f, indent = 2, cls=Encoder)
|
||||
|
||||
# TODO: Move this class to it's own file
|
||||
class Encoder(json.JSONEncoder):
|
||||
|
@ -18,7 +18,7 @@ def connectDatabase(self, lib, driver, server, database, user, password):
|
||||
|
||||
return cursor
|
||||
|
||||
def databaseQuery(self, cursor, query, args=(), one=False):
|
||||
def databaseQuery(self, cursor, query, args=()):
|
||||
if self.useLog:
|
||||
self.il.debug(f"Query:")
|
||||
self.il.debug(query)
|
||||
@ -38,5 +38,6 @@ def databaseQuery(self, cursor, query, args=(), one=False):
|
||||
|
||||
cur.connection.close()
|
||||
if self.useLog:
|
||||
self.il.debug(f"Databse connection closed")
|
||||
return (r[0] if r else None) if one else r
|
||||
self.il.debug(f"Database connection closed")
|
||||
# return (r[0] if r else None) if one else r
|
||||
return r
|
48
inexDataModel.py
Normal file
48
inexDataModel.py
Normal file
@ -0,0 +1,48 @@
|
||||
def dataTemplate(**kwargs):
|
||||
"""Expects the following keyword arguments:
|
||||
status,status_detail,status_code,file_size,file_path,file_virtual_path,file_name,
|
||||
guid,ref_id,prd_instance_id,product_guid,product_name,product_version,node_name,
|
||||
src_endpoint_port,src_endpoint_ip,dst_endpoint_port,dst_endpoint_ip,dst_endpoint_type,
|
||||
session_uid,bytes_out,transfer_time,time,user_type,user_domain,user_name and utype.
|
||||
"""
|
||||
template ={
|
||||
"status": kwargs.get('status'),
|
||||
"status_detail": kwargs.get('status_detail'),
|
||||
"status_code": kwargs.get('status_code'),
|
||||
"file": {
|
||||
"size": kwargs.get('file_size'),
|
||||
"path": kwargs.get('file_path'),
|
||||
"virtual_path": kwargs.get('file_virtual_path'),
|
||||
"name": kwargs.get('file_name')
|
||||
},
|
||||
"guid": kwargs.get('guid'),
|
||||
"ref_id": kwargs.get('ref_id'),
|
||||
"prd_instance_id": kwargs.get('prd_instance_id'),
|
||||
"product_guid": kwargs.get('product_guid'),
|
||||
"product_name": kwargs.get('product_name'),
|
||||
"product_version": kwargs.get('product_version'),
|
||||
"node_name":kwargs.get('node_name'),
|
||||
"src_endpoint": {
|
||||
"port": kwargs.get('src_endpoint_port'),
|
||||
"ip": kwargs.get('src_endpoint_ip')
|
||||
},
|
||||
"dst_endpoint": {
|
||||
"port": kwargs.get('dst_endpoint_port'),
|
||||
"ip": kwargs.get('dst_endpoint_ip'),
|
||||
"type": kwargs.get('dst_endpoint_type')
|
||||
},
|
||||
"session": {
|
||||
"uid": kwargs.get('session_uid')
|
||||
},
|
||||
"bytes_out" : kwargs.get('bytes_out'),
|
||||
"transfer_time" : kwargs.get('transfer_time'),
|
||||
"time": kwargs.get('time'),
|
||||
"user": {
|
||||
"type": kwargs.get('user_type'),
|
||||
"domain": kwargs.get('user_domain'),
|
||||
"name": kwargs.get('user_name')
|
||||
},
|
||||
"utype": kwargs.get('utype')
|
||||
}
|
||||
|
||||
return template
|
32
inexDataProcessing.py
Normal file
32
inexDataProcessing.py
Normal file
@ -0,0 +1,32 @@
|
||||
def processData(data, template):
|
||||
processedData = []
|
||||
for row in data:
|
||||
# print(f'Row: {row}')
|
||||
processedData.append(template(status=row.get(''),\
|
||||
status_detail=row.get(''),\
|
||||
status_code=row.get('ResultID'),\
|
||||
file_size=row.get('FileSize'),\
|
||||
file_path=row.get('PhysicalFolderName'),\
|
||||
file_virtual_path=row.get('VirtualFolderName'),\
|
||||
file_name=row.get('FileName'),\
|
||||
guid=row.get(''),\
|
||||
ref_id=row.get('ProtocolCommandID'),\
|
||||
prd_instance_id=row.get(''),\
|
||||
product_guid=row.get(''),\
|
||||
product_name=row.get(''),\
|
||||
product_version=row.get(''),\
|
||||
node_name=row.get(''),\
|
||||
src_endpoint_port=row.get('RemotePort'),\
|
||||
src_endpoint_ip=row.get('RemoteIP'),\
|
||||
dst_endpoint_port=row.get('LocalPort'),\
|
||||
dst_endpoint_ip=row.get('LocalIP'),\
|
||||
dst_endpoint_type=row.get('Protocol'),\
|
||||
session_uid=row.get(''),\
|
||||
bytes_out=row.get('BytesTransferred'),\
|
||||
transfer_time=row.get('TransferTime'),\
|
||||
time=row.get('Time_stamp'),\
|
||||
user_type=row.get(''),\
|
||||
user_domain=row.get('SiteName'),\
|
||||
user_name=row.get('Actor'),\
|
||||
utype=row.get('Command')))
|
||||
return processedData
|
Loading…
x
Reference in New Issue
Block a user