Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New network panel #560

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added pacs_connection/__init__.py
Empty file.
1 change: 1 addition & 0 deletions pacs_connection/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"configured_pacs": [{"IP ADDRESS": "DicomServer.co.uk", "PORT": "104", "AE TITLE": "MEDTECH", "Description": "Public Server", "Retrievel Protocol": "DICOM", "Preferred Transfer Syntax": "Implicit VR Little Endian"}, {"IP ADDRESS": "127.0.0.6", "PORT": "4242", "AE TITLE": "Orthanc6", "Description": "Testing Server", "Retrievel Protocol": "DICOM", "Preferred Transfer Syntax": "Implicit VR Little Endian"}, {"IP ADDRESS": "127.0.0.7", "PORT": "4242", "AE TITLE": "Orthanc7-Testing", "Description": "Testing Server", "Retrievel Protocol": "DICOM", "Preferred Transfer Syntax": "Implicit VR Little Endian"}, {"IP ADDRESS": "127.0.3.9", "PORT": "4247", "AE TITLE": "Orthanc23", "Description": "Testing Server", "Retrievel Protocol": "DICOM", "Preferred Transfer Syntax": "Implicit VR Little Endian"}, {"IP ADDRESS": "127.0.0.4", "PORT": "3287", "AE TITLE": "Best-Test", "Description": "Testing Purpose", "Retrievel Protocol": "DICOM", "Preferred Transfer Syntax": "Implicit VR Little Endian"}, {"IP ADDRESS": "127.1.0.7", "PORT": "4248", "AE TITLE": "Orthanc21-Testing", "Description": "Testing Server", "Retrievel Protocol": "DICOM", "Preferred Transfer Syntax": "Implicit VR Little Endian"}, {"IP ADDRESS": "127.0.5.6", "PORT": "7890", "AE TITLE": "Orthanc45", "Description": "Testing Server", "Retrievel Protocol": "DICOM", "Preferred Transfer Syntax": "Implicit VR Little Endian"}], "menu_options": {"advanced_settings": ["Query Timeout"], "Query Timeout": ["5 sec", "10 sec", "15 sec", "20 sec", "25 sec", "30 sec"]}, "default_settings": {"Query Timeout": "20 sec"}}
12 changes: 12 additions & 0 deletions pacs_connection/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@

COLS = ["PatientName", "PatientID", "StudyInstanceUID", "SeriesInstanceUID", "StudyDate", "StudyTime", "AccessionNumber", "Modality", "PatientBirthDate", "PatientSex", "PatientAge", "IssuerOfPatientID", "Retrieve AE Title", "StudyDescription"]
INV_PORT = 5050
INV_AET = 'INVESALIUS'
INV_HOST = 'localhost'
READ_MAPPER = {
'Patient ID' : 'PatientID',
'Patient Name' : 'PatientName',
'StudyInstanceUID' : 'StudyInstanceUID',
}
CONFIG_FILE = 'pacs_connection\config.json'

Empty file.
32 changes: 32 additions & 0 deletions pacs_connection/dicom_client/cecho.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
from dataclasses import dataclass, field
from pynetdicom import AE
from pynetdicom.sop_class import Verification
from pynetdicom.association import Association

@dataclass
class CEcho:
ip_address: str
port: int = 4242
ae: AE = field(default_factory=AE)
assoc: Association = field(init=False)

def __post_init__(self):
print("Postinit")
self.ae.add_requested_context(Verification)
self.assoc = self.ae.associate(self.ip_address, self.port)

def verify(self) -> bool:
if self.assoc.is_established:
status = self.assoc.send_c_echo()

if status:
print('C-ECHO request status: 0x{0:04x}'.format(status.Status))
self.assoc.release()
return True
else:
print('Connection timed out, was aborted or received invalid response')
self.assoc.release()
return False
else:
print('Association rejected, aborted or never connected')
return False
167 changes: 167 additions & 0 deletions pacs_connection/dicom_client/cfind.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
from dataclasses import dataclass, field
from typing import Any
from pydicom.dataset import Dataset


from pynetdicom import AE
from pynetdicom.sop_class import PatientRootQueryRetrieveInformationModelFind

from pacs_connection.constants import COLS



def date_formater(s):
year = s[0:4]
month = s[4:6]
day = s[6:8]
return f"{year}-{month}-{day}"

def time_formater(s):
try:
hour = int(s[0:2])
minute = int(s[2:4])
second = int(s[4:6])
return f"{hour}:{minute}:{second}"
except Exception as e:
print("ERROR: ", e)
return s

def serializer(obj):
# obj: List[Dict]
for dict_item in obj:
for key, value in dict_item.items():
if 'Date' in key:
dict_item[key] = date_formater(value)
elif 'Time' in key:
dict_item[key] = time_formater(value)
return obj

@dataclass
class CFind:
"""
TODO: Search via patient name/id or accession nu and get all details like patient id, patient name, study date, dob etc
"""
host: str
port: int = 4242
ae: AE = field(default_factory=AE)
mapper: dict[str, Any] = field(init=False)

def __post_init__(self):
self.mapper = {
"PATIENT": self.create_patient_identifier,
"STUDY": self.create_study_identifier,
"SERIES": self.create_series_identifier,
}

def make_request(self, **kwargs) -> list:
self.ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind)
self.assoc = self.ae.associate(self.host, self.port)
self.ae.acse_timeout = 180 # setting timeout to 3 minutes
final_result = []
if self.assoc.is_established:
final_result = self.execute_search(**kwargs)

self.assoc.release()

return final_result

def create_patient_identifier(self, dataset : Dataset, **kwargs) -> Dataset:
dataset.PatientName = kwargs.get('PatientName', '*')
dataset.PatientID = kwargs.get('PatientID', '*')
dataset.PatientBirthDate = kwargs.get('PatientBirthDate', '19000101-99991231')
return dataset

def create_study_identifier(self, dataset: Dataset, **kwargs) -> Dataset:
dataset = self.create_patient_identifier(dataset, **kwargs)
dataset.StudyInstanceUID = kwargs.get('StudyInstanceUID', '*')
dataset.StudyDate = kwargs.get('StudyDate', '19000101-99991231')
dataset.AccessionNumber = kwargs.get('AccessionNumber', '*')
return dataset

def create_series_identifier(self, dataset: Dataset, **kwargs) -> Dataset:
dataset = self.create_study_identifier(dataset, **kwargs)
dataset.SeriesInstanceUID = kwargs.get('SeriesInstanceUID', '*')
dataset.Modality = kwargs.get('Modality', '*')
return dataset

def create_identifier(self, dataset: Dataset= None, **kwargs) -> Dataset:
if not dataset:
dataset = Dataset()
qr_lvl = kwargs.get('QueryRetrieveLevel', 'PATIENT')
dataset.QueryRetrieveLevel = qr_lvl
return self.mapper[qr_lvl](dataset, **kwargs)


def get_user_input(self, **kwargs) -> dict:
inputs = {}
for k, v in kwargs.items():
inputs[k] = v
return inputs


def decode_response(self, identifier: Dataset) -> dict:
import collections
tags = COLS
d = collections.defaultdict()
if not identifier: return {}
for tag in tags:
if tag in identifier:
try:
d[tag] = identifier.get(tag)
except Exception as e:
print(e)
continue
return d

def execute_search(self, **kwargs) -> list:
dataset = Dataset()
kwargs['QueryRetrieveLevel'] = 'PATIENT'
patient_output = self.send_cfind(dataset, **kwargs) # List[Dict]
final_result = []
for p_op in patient_output:
n_op = p_op.copy()
new_dataset = Dataset()
nkwargs = kwargs.copy()
nkwargs['QueryRetrieveLevel'] = 'STUDY'
if n_op.get('PatientID', False):
nkwargs['PatientID'] = n_op['PatientID']
elif n_op.get('PatientName', False):
nkwargs['PatientName'] = n_op['PatientName']

study_output = self.send_cfind(new_dataset, **nkwargs)
for s_op in study_output:
if s_op != {} or (final_result and final_result[-1] != s_op):
f_op = n_op | s_op
final_result.append(f_op)
final_result = serializer(final_result)
return final_result


def send_cfind(self, dataset: Dataset = Dataset(), **kwargs) -> list:
identifier = self.create_identifier(dataset, **kwargs)
retries = 0
while retries <5:
try:
responses = self.assoc.send_c_find(identifier, PatientRootQueryRetrieveInformationModelFind)
break
except RuntimeError:
retries +=1
self.assoc = self.ae.associate(self.host, self.port)

output = []
count = 0
for status, res_identifier in responses:
count +=1
if status and res_identifier:
res = self.decode_response(res_identifier)
if len(res) >0: output.append(res)
else:
print('Connection timed out, was aborted or received invalid response')
return output

if __name__ == '__main__':
host = ['DicomServer.co.uk', '184.73.255.26']
port = [104, 11112]
x = 0
cfind = CFind(host[x], port[x])
cfind.make_request()
165 changes: 165 additions & 0 deletions pacs_connection/dicom_client/cstore.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
import time
import csv
import os
from dataclasses import dataclass, field
import concurrent.futures
from concurrent.futures import ThreadPoolExecutor
from pydicom.dataset import Dataset
from pydicom import dcmread
from pynetdicom import AE, StoragePresentationContexts


@dataclass
class CStore:
"""
Used to Upload DICOM Files to Remote PACS Server
TODO: Use Chunking Concept to Upload Large Files
TODO: Need to handle uploading large number of files
TODO: Add Custom Compressor Handler for JPEG Files/DICOM Files before Upload
TODO: Add FPS Custom Handler for MP4 Files
"""

host: str
port: int = 4242
ae: AE = field(default_factory=AE)

def __post_init__(self) -> None:
self.ae.requested_contexts = StoragePresentationContexts

def send_c_store(self, path: str) -> bool:
ds = dcmread(path)
patient_name = ds.PatientName
study_description = ds.StudyDescription
print(
f"Patient name is: {patient_name}, Study Description is: {study_description}, File Path is: {path}")
self.assoc = self.ae.associate(self.host, self.port)
success = False
if self.assoc.is_established:
status = self.assoc.send_c_store(ds)
if status:
status_str = '0x{0:04x}'.format(status.Status)
if status_str != '0x0000':
print(f"File {path} was not uploaded successfully")
error_cause = self.status_mapper(status_str)
print(f"Error Cause is: {error_cause}")
else:
success = True
else:
print('Connection timed out, was aborted or received invalid response')
self.assoc.release()
else:
print('Association rejected, aborted or never connected')
return success

@staticmethod
def status_mapper(status_code: str) -> str:
status_messages = {
'0x0000': 'Success',
'0x0001': 'Unrecognized Operation',
'0x0106': 'Duplicate SOP Instance',
'0x0122': 'Missing Attribute Value',
}
return status_messages.get(status_code, 'Unknown error')

def upload_full_study(self, folder_path: str) -> bool:

dummy_name = folder_path.split("\\")[-1]
dummy_name = dummy_name.replace(" ", "_")
failed = False
with open(f'pacs_connection/upload_results/result_{dummy_name}.csv', mode='w', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(['File Path', 'Status'])
for file_name in os.listdir(folder_path):
# create full path
full_path = os.path.join(folder_path, file_name)
# call send_c_store
success = self.send_c_store(full_path)
writer.writerow(
[full_path, 'Success' if success else 'Failed'])
if not success:
failed = True
print(
'Connection timed out, was aborted or received invalid response')
return not failed

def upload_full_study_thread(self, folder_path: str) -> bool:
dummy_name = folder_path.split("/")[-1]
dummy_name = dummy_name.replace(" ", "_")
failed = False
with open(f'pacs_connection/upload_results/result_{dummy_name}.csv', mode='w', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(['File Path', 'Status'])
files = list(os.listdir(folder_path))
full_files_path = [os.path.join(
folder_path, file_name) for file_name in files]
with ThreadPoolExecutor(max_workers=10) as executor:
future_to_obj = {executor.submit(
self.send_c_store, file_path): file_path for file_path in full_files_path}
for future in concurrent.futures.as_completed(future_to_obj):
obj = future_to_obj[future]
try:
success = future.result()
writer.writerow(
[obj, 'Success' if success else 'Failed'])
if not success:
failed = True
print(
'Connection timed out, was aborted or received invalid response')
except Exception as exc:
print(f"{obj} generated an exception: {exc}")
return failed

def handle_failed_request(self, report_path: str) -> bool:
print('failed_request report_path', report_path)
failed = False
updated_rows = []
with open(report_path, 'r') as csv_file:
reader = csv.reader(csv_file)
for idx, row in enumerate(reader):
if not row:
continue
file_path = row[0]
if file_path == 'File Path':
continue
# skip the file if it's already success
if row[1] == 'Success':
updated_rows.append(row)
continue
print(f"Uploading file: {file_path}")
success = self.send_c_store(file_path)

# If the upload was successful, update the status in the CSV file
if success:
row[1] = 'Success'
else:
row[1] = 'Failed'
failed = True
updated_rows.append(row)

# Write the updated row to the CSV file
with open(report_path, 'w', newline='') as csv_file:
fieldnames = ['File Path', 'Status']
writer = csv.DictWriter(
csv_file, delimiter=',', fieldnames=fieldnames)
writer.writeheader()
for row in updated_rows:
writer.writerow({'File Path': row[0], 'Status': row[1]})
return not failed

def upload(self, path: str, folder=True) -> bool:
if folder:
dummy_name = path.split("\\")[-1]
dummy_name = dummy_name.replace(" ", "_")
report_file = f"pacs_connection/upload_results/result_{dummy_name}.csv"
if not self.upload_full_study(path):
count = 0
while count < 2:
if not self.handle_failed_request(report_file):
count += 1
print(f"Retrying failed request {count} time(s)")
else:
return True
return False

else:
return self.send_c_store(path)
Loading