Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

21 ant 1 #22

Merged
merged 2 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
[markdownlint](https://dlaa.me/markdownlint/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.1.1] - 2024-05-24

### Changed in 1.1.1

- Simplify search output

## [1.1.0] - 2023-12-21

### Changed in 1.1.0
Expand Down
47 changes: 12 additions & 35 deletions Python/Tasks/Searching/Search5kFutures.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,11 @@
import os
import sys
import time
from collections import Counter

from senzing import (
G2BadInputException,
G2Engine,
G2EngineFlags,
G2Exception,
G2RetryableException,
G2UnrecoverableException,
Expand All @@ -26,7 +27,11 @@ def mock_logger(level, exception, error_rec=None):

def search_record(engine, rec_to_search):
search_response = bytearray()
engine.searchByAttributes(rec_to_search, search_response)
engine.searchByAttributes(
rec_to_search,
search_response,
G2EngineFlags.G2_SEARCH_BY_ATTRIBUTES_MINIMAL_ALL,
)
return search_response


Expand All @@ -52,43 +57,15 @@ def record_stats(success, error, prev_time):


def search_results(result, record, out_file):
response_dict = json.loads(result.decode())
response_str = result.decode()
response_dict = json.loads(response_str)
response_entities = response_dict.get("RESOLVED_ENTITIES", None)

out_file.write("-" * 100 + "\n")
if response_entities:
results_str = []
results_count = Counter(
k
for entity in response_entities
for k in entity.keys()
if k.startswith("MATCH_INFO")
)
results_str.append(f'\n{results_count["MATCH_INFO"]} results for {record}')

for idx, entity in enumerate(response_entities, start=1):
results_str.append(f"\n Result {idx}")
results_str.append(
"\n Entity ID: "
f" {entity['ENTITY']['RESOLVED_ENTITY']['ENTITY_ID']}"
)
results_str.append(
"\n Entity name: "
f" {entity['ENTITY']['RESOLVED_ENTITY']['ENTITY_NAME']}"
)
results_str.append(
f'\n Match key: {entity["MATCH_INFO"]["MATCH_KEY"]}'
)
results_str.append("\n Records summary: ")
for record_summary in entity["ENTITY"]["RESOLVED_ENTITY"]["RECORD_SUMMARY"]:
results_str.append(
f'{record_summary["DATA_SOURCE"]}: {record_summary["RECORD_COUNT"]}'
+ " "
)
results_str.append("\n")

out_file.write("".join(results_str))
out_file.write(f"Result for {record.rstrip()}:\n\n{response_str}\n\n")
else:
out_file.write(f"\nNo result for {record}\n")
out_file.write(f"No result for {record}\n\n")


def futures_search(engine, input_file, output_file):
Expand Down
60 changes: 16 additions & 44 deletions Python/Tasks/Searching/SearchRecords.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
import json
import os
import sys
from collections import Counter

from senzing import (
G2BadInputException,
G2Engine,
G2EngineFlags,
G2Exception,
G2RetryableException,
G2UnrecoverableException,
Expand Down Expand Up @@ -42,59 +43,30 @@ def mock_logger(level, exception, error_rec=None):
def searcher(engine):
for rec_to_search in search_records:
try:
rec_str = json.dumps(rec_to_search)
search_response = bytearray()
engine.searchByAttributes(json.dumps(rec_to_search), search_response)
engine.searchByAttributes(
rec_str,
search_response,
G2EngineFlags.G2_SEARCH_BY_ATTRIBUTES_MINIMAL_ALL,
)
except (G2BadInputException, json.JSONDecodeError) as err:
mock_logger("ERROR", err, rec_to_search)
mock_logger("ERROR", err, rec_str)
except G2RetryableException as err:
mock_logger("WARN", err, rec_to_search)
mock_logger("WARN", err, rec_str)
except (G2UnrecoverableException, G2Exception) as err:
mock_logger("CRITICAL", err, rec_to_search)
mock_logger("CRITICAL", err, rec_str)
raise
else:
response_dict = json.loads(search_response.decode())
response_str = search_response.decode()
response_dict = json.loads(response_str)
response_entities = response_dict.get("RESOLVED_ENTITIES", None)

print("-" * 100)
if response_entities:
results_str = []
results_count = Counter(
k
for entity in response_entities
for k in entity.keys()
if k.startswith("MATCH_INFO")
)
results_str.append(
f'\n{results_count["MATCH_INFO"]} results for'
f" {json.dumps(rec_to_search)}\n"
)

for idx, result in enumerate(response_entities, start=1):
results_str.append(f"\n Result {idx}")
results_str.append(
"\n Entity ID: "
f" {result['ENTITY']['RESOLVED_ENTITY']['ENTITY_ID']}"
)
results_str.append(
"\n Entity name: "
f" {result['ENTITY']['RESOLVED_ENTITY']['ENTITY_NAME']}"
)
results_str.append(
f'\n Match key: {result["MATCH_INFO"]["MATCH_KEY"]}'
)
results_str.append("\n Records summary: ")
for record_summary in result["ENTITY"]["RESOLVED_ENTITY"][
"RECORD_SUMMARY"
]:
results_str.append(
f'{record_summary["DATA_SOURCE"]}:'
f' {record_summary["RECORD_COUNT"]}'
+ " "
)
results_str.append("\n")

print("".join(results_str))
print(f"Result for {rec_str}:\n\n{response_str}\n")
else:
print(f"\nNo result for {json.dumps(rec_to_search)}\n")
print(f"No result for {rec_str}\n")


try:
Expand Down