Upload files to "/"
All checks were successful
Build and Push Docker Image / test (push) Successful in 11s
Build and Push Docker Image / build_and_push (push) Successful in 24s

This commit is contained in:
admin user 2025-07-11 15:10:24 +00:00
parent a0e78fac57
commit 88b296a25a
2 changed files with 45 additions and 48 deletions

View File

@ -2,8 +2,7 @@ import logging
import json
import os
from pre_processing import process_record
from processing import processing
from post_processing import post_processing, post_processing_duplicate
from post_processing import post_processing
# Configure logging
logging.basicConfig(
@ -44,6 +43,7 @@ def __main__(
cluster_name = get_cluster_name(namespace)
url_post = f"{base_url}/{cluster_name}/records"
url_get = f"{base_url}/{cluster_name}/record"
url_get_for_graph = f"{base_url}/{cluster_name}/graph"
data = {
"application_key": application_key,
@ -65,15 +65,15 @@ def __main__(
# post_process_output = post_processing(process_output, url_get)
# is_duplicate = pre_process_output.get("record_id") in pre_process_output.get("connected_records", [])
is_duplicate = str(pre_process_output.get("is_duplicate", "false")).strip().lower() == "true"
print(is_duplicate)
# print(is_duplicate)
# Run the appropriate processing function
if is_duplicate:
process_output = processing(url_get, pre_process_output)
post_process_output = post_processing_duplicate(process_output, url_get)
else:
post_process_output = post_processing(pre_process_output, url_get)
# if is_duplicate:
# process_output = processing(url_get, pre_process_output)
# post_process_output = post_processing_duplicate(process_output, url_get)
# else:
# post_process_output = post_processing(pre_process_output, url_get)
post_process_output = post_processing(pre_process_output, url_get, url_get_for_graph)
# Conditionally override the keys if they are missing or None (or the string "null")
current_app_key = post_process_output.get("application_key")
if current_app_key is None:

View File

@ -22,53 +22,50 @@ def fetch_data(url, record_id):
logger.error(f"Error fetching {record_id}: {e}")
return None
def post_processing_duplicate(data, url):
"""Main function to fetch and extract required fields."""
record_id = data.get('record_id')
connected_records = data.get('connected_records', [])
def fetch_all_nodes_data(url, cluster_id):
try:
response = requests.get(f"{url}/{cluster_id}", headers={"Content-Type": "application/json"})
if response.status_code == 200:
output= response.json()
return output
else:
logger.error(f"Failed to fetch {cluster_id}: {response.status_code}")
return None
except requests.RequestException as e:
logger.error(f"Error fetching {cluster_id}: {e}")
return None
# Fetch main record data
record_data = fetch_data(url, record_id)
application_key = None
hd_score_m1 = None
if record_data:
application_key = record_data['data'].get('application_key')
hd_score_m1 = record_data['data'].get('hd_score_m1')
# Fetch application_key for connected records
connected_keys = [
fetch_data(url, rec_id)['data'].get('application_key')
for rec_id in connected_records
if fetch_data(url, rec_id) and fetch_data(url, rec_id)['data'].get('application_key')
]
return {
"application_key": application_key,
"hd_score_m1": hd_score_m1,
"connected_application_keys": connected_keys
}
def post_processing(data, url):
def post_processing(data, url, url_get_for_graph):
"""Main function to fetch and extract required fields."""
record_id = data.get('hd_key')
connected_records = data.get('matches', []) + [record_id] # Matches + hd_key
# connected_records = data.get('matches', []) + [record_id] # Matches + hd_key
cluster_id = data.get('cluster_id')
# Fetch main record data
record_data = fetch_data(url, record_id)
application_key = None
hd_score_m1 = None
if record_data:
if record_id:
record_data = fetch_data(url, record_id)
if record_data and 'data' in record_data:
application_key = record_data['data'].get('application_key')
hd_score_m1 = record_data['data'].get('hd_score_m1')
# Fetch application_key for connected records
connected_keys = [
fetch_data(url, rec_id)['data'].get('application_key')
for rec_id in connected_records
if fetch_data(url, rec_id) and fetch_data(url, rec_id)['data'].get('application_key')
connected_keys = []
if cluster_id:
response_obj = fetch_all_nodes_data(url_get_for_graph, cluster_id)
if response_obj and "nodes" in response_obj:
# Extract and filter application_keys
all_keys = [
node.get('data', {}).get('application_key')
for node in response_obj['nodes']
if node.get('data') and node['data'].get('application_key') is not None
]
# Exclude the current application_key and limit to 500 entries
connected_keys = [
key for key in all_keys if key != application_key
][:500]
return {
"application_key": application_key,