Graph lookup block
All checks were successful
Build and Push Docker Image / test (push) Successful in 10s
Build and Push Docker Image / build_and_push (push) Successful in 20s

This commit is contained in:
admin user 2025-03-12 16:14:28 +00:00
parent 4ca7b2486f
commit 6d94849e2d
8 changed files with 260 additions and 23 deletions

View File

@ -1 +1,3 @@
**Hello world!!!**
# MLG Lookup
Lookup Graph API

View File

@ -1,21 +1,70 @@
@flowx_block
def example_function(request: dict) -> dict:
import logging
from pre_processing import process_record
from processing import processing
from post_processing import post_processing, post_processing_duplicate
# Processing logic here...
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(name)s - %(message)s",
)
logger = logging.getLogger(__name__)
return {
"meta_info": [
{
"name": "created_date",
"type": "string",
"value": "2024-11-05"
}
],
"fields": [
{
"name": "",
"type": "",
"value": ""
}
]
# API URLs
# base_url = "http://localhost:8080/api/v1/clusters"
base_url = "http://centurion-mlgraph.default.svc.cluster.local:8080/api/v1/clusters"
cluster_name = "cluster_deviceid_email_fuzzydevice_direct_new"
url_post = f"{base_url}/{cluster_name}/records"
url_get = f"{base_url}/{cluster_name}/record"
def __main__(
application_key: str,
application_timestamp: str,
deviceid: str,
fuzzydeviceid: str,
application_email_address: str,
hd_score_m1: float
) -> dict:
data = {
"application_key": application_key,
"application_timestamp": application_timestamp,
"deviceid": deviceid,
"fuzzydeviceid": fuzzydeviceid,
"application_email_address": application_email_address,
"hd_score_m1": hd_score_m1,
}
# Step 1: Pre-process the record
pre_process_output = process_record(url_post, data)
if not pre_process_output or "error" in pre_process_output:
logger.error("Pre-processing failed.")
return {"error": "Pre-processing failed"}
# Step 2: Process the record (fetch required fields)
# post_process_output = post_processing(process_output, url_get)
# is_duplicate = pre_process_output.get("record_id") in pre_process_output.get("connected_records", [])
is_duplicate = str(pre_process_output.get("is_duplicate", "false")).strip().lower() == "true"
print(is_duplicate)
# Run the appropriate processing function
if is_duplicate:
process_output = processing(url_get, pre_process_output)
post_process_output = post_processing_duplicate(process_output, url_get)
else:
post_process_output = post_processing(pre_process_output, url_get)
# Conditionally override the keys if they are missing or None (or the string "null")
current_app_key = post_process_output.get("application_key")
if current_app_key is None:
post_process_output["application_key"] = application_key
current_hd_score = post_process_output.get("hd_score_m1")
if current_hd_score is None:
post_process_output["hd_score_m1"] = hd_score_m1
logger.info("Post Processed Record")
logger.info(post_process_output)
return post_process_output

77
post_processing.py Normal file
View File

@ -0,0 +1,77 @@
import requests
import logging
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(name)s - %(message)s",
)
logger = logging.getLogger(__name__)
def fetch_data(url, record_id):
"""Fetches data for a given record_id from the API."""
try:
response = requests.get(f"{url}/{record_id}", headers={"Content-Type": "application/json"})
if response.status_code == 200:
output= response.json()
return output
else:
logger.error(f"Failed to fetch {record_id}: {response.status_code}")
return None
except requests.RequestException as e:
logger.error(f"Error fetching {record_id}: {e}")
return None
def post_processing_duplicate(data, url):
"""Main function to fetch and extract required fields."""
record_id = data.get('record_id')
connected_records = data.get('connected_records', [])
# Fetch main record data
record_data = fetch_data(url, record_id)
application_key = None
hd_score_m1 = None
if record_data:
application_key = record_data['data'].get('application_key')
hd_score_m1 = record_data['data'].get('hd_score_m1')
# Fetch application_key for connected records
connected_keys = [
fetch_data(url, rec_id)['data'].get('application_key')
for rec_id in connected_records
if fetch_data(url, rec_id) and fetch_data(url, rec_id)['data'].get('application_key')
]
return {
"application_key": application_key,
"hd_score_m1": hd_score_m1,
"connected_application_keys": connected_keys
}
def post_processing(data, url):
"""Main function to fetch and extract required fields."""
record_id = data.get('hd_key')
connected_records = data.get('matches', []) + [record_id] # Matches + hd_key
# Fetch main record data
record_data = fetch_data(url, record_id)
application_key = None
hd_score_m1 = None
if record_data:
application_key = record_data['data'].get('application_key')
hd_score_m1 = record_data['data'].get('hd_score_m1')
# Fetch application_key for connected records
connected_keys = [
fetch_data(url, rec_id)['data'].get('application_key')
for rec_id in connected_records
if fetch_data(url, rec_id) and fetch_data(url, rec_id)['data'].get('application_key')
]
return {
"application_key": application_key,
"hd_score_m1": hd_score_m1,
"connected_application_keys": connected_keys
}

28
pre_processing.py Normal file
View File

@ -0,0 +1,28 @@
import requests
import logging
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(name)s - %(message)s",
)
logger = logging.getLogger(__name__)
def process_record(url, record):
try:
response = requests.post(
url,
json={"record": record},
headers={"Content-Type": "application/json"}
)
if response.status_code == 200:
output = response.json()
logger.info(f"Pre Processed record: {record['application_key']} - Response: {output}")
return output
else:
logger.error(f"Failed to process record {record['application_key']}. Status: {response.status_code}, Response: {response.text}")
return {"error": response.text}
except Exception as e:
logger.error(f"Error processing record {record['application_key']}: {str(e)}")
return {"error": str(e)}

28
processing.py Normal file
View File

@ -0,0 +1,28 @@
import requests
import logging
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(name)s - %(message)s",
)
logger = logging.getLogger(__name__)
def processing(url, data):
record_id = data.get('hd_key')
try:
response = requests.get(
f"{url}/{record_id}",
headers={"Content-Type": "application/json"}
)
if response.status_code == 200:
output = response.json()
logger.info(f"Processed record: {record_id} - Response: {output}")
return output
else:
logger.error(f"Failed to process record {record_id}. Status: {response.status_code}, Response: {response.text}")
return {"error": response.text}
except Exception as e:
logger.error(f"Error processing record {record_id}: {str(e)}")
return {"error": str(e)}

View File

@ -1 +1,32 @@
{}
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"application_key": {
"type": ["string", "null"],
"description": "Unique identifier for the application."
},
"application_timestamp": {
"type": ["string", "null"],
"description": "Timestamp of the application."
},
"deviceid": {
"type": ["string", "null"],
"description": "Unique identifier for the device used in the application."
},
"fuzzydeviceid": {
"type": ["string", "null"],
"description": "Anonymized or hashed identifier for the device."
},
"application_email_address": {
"type": ["string", "null"],
"description": "Email address associated with the application."
},
"hd_score_m1": {
"type": ["number", "null"],
"description": "HD Score M1 value associated with the application."
}
},
"required": []
}

View File

@ -1 +1 @@
{}
requests == 2.32.3

View File

@ -1 +1,23 @@
{}
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"application_key": {
"type": ["string", "null"],
"description": "Unique identifier for the application."
},
"hd_score_m1": {
"type": ["number", "null"],
"description": "HD fraud score M1."
},
"connected_application_keys": {
"type": "array",
"items": {
"type": ["string", "null"]
},
"description": "List of connected application keys associated with this application."
}
},
"required": []
}