Post code clean-up, before attempt to upgrade controls to FigureWidgets
This commit is contained in:
@@ -9,10 +9,10 @@ from typing import List
|
||||
from openpyxl import load_workbook, Workbook
|
||||
from pathlib import Path
|
||||
from backend.db.models import *
|
||||
from backend.validators import PydSubmission, PydReagent, RSLNamer, PydSample, PydEquipment, PydTips
|
||||
from backend.validators import PydSubmission, RSLNamer
|
||||
import logging, re
|
||||
from collections import OrderedDict
|
||||
from tools import check_not_nan, convert_nans_to_nones, is_missing, check_key_or_attr
|
||||
from tools import check_not_nan, is_missing, check_key_or_attr
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -483,17 +483,21 @@ class SampleParser(object):
|
||||
lookup_samples[ii] = {}
|
||||
else:
|
||||
logger.warning(f"Match for {psample['id']} not direct, running search.")
|
||||
for jj, lsample in enumerate(lookup_samples):
|
||||
try:
|
||||
check = lsample[merge_on_id] == psample['id']
|
||||
except KeyError:
|
||||
check = False
|
||||
if check:
|
||||
new = lsample | psample
|
||||
lookup_samples[jj] = {}
|
||||
break
|
||||
else:
|
||||
new = psample
|
||||
# for jj, lsample in enumerate(lookup_samples):
|
||||
# try:
|
||||
# check = lsample[merge_on_id] == psample['id']
|
||||
# except KeyError:
|
||||
# check = False
|
||||
# if check:
|
||||
# new = lsample | psample
|
||||
# lookup_samples[jj] = {}
|
||||
# break
|
||||
# else:
|
||||
# new = psample
|
||||
jj, new = next(((jj, lsample) for jj, lsample in enumerate(lookup_samples) if lsample[merge_on_id] == psample['id']), (-1, psample))
|
||||
logger.debug(f"Assigning from index {jj} - {new}")
|
||||
if jj >= 0:
|
||||
lookup_samples[jj] = {}
|
||||
if not check_key_or_attr(key='submitter_id', interest=new, check_none=True):
|
||||
new['submitter_id'] = psample['id']
|
||||
new = self.sub_object.parse_samples(new)
|
||||
@@ -546,7 +550,7 @@ class EquipmentParser(object):
|
||||
logger.error(f"Error getting asset number for {input}: {e}")
|
||||
return input
|
||||
|
||||
def parse_equipment(self) -> List[dict]:
|
||||
def parse_equipment(self) -> Generator[dict, None, None]:
|
||||
"""
|
||||
Scrapes equipment from xl sheet
|
||||
|
||||
@@ -554,7 +558,6 @@ class EquipmentParser(object):
|
||||
List[dict]: list of equipment
|
||||
"""
|
||||
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
|
||||
output = []
|
||||
# logger.debug(f"Sheets: {sheets}")
|
||||
for sheet in self.xl.sheetnames:
|
||||
ws = self.xl[sheet]
|
||||
@@ -579,14 +582,11 @@ class EquipmentParser(object):
|
||||
eq = Equipment.query(name=asset)
|
||||
process = ws.cell(row=v['process']['row'], column=v['process']['column']).value
|
||||
try:
|
||||
# output.append(
|
||||
yield dict(name=eq.name, processes=[process], role=k, asset_number=eq.asset_number,
|
||||
nickname=eq.nickname)
|
||||
except AttributeError:
|
||||
logger.error(f"Unable to add {eq} to list.")
|
||||
# logger.debug(f"Here is the output so far: {pformat(output)}")
|
||||
# return output
|
||||
|
||||
|
||||
|
||||
class TipParser(object):
|
||||
"""
|
||||
@@ -623,7 +623,6 @@ class TipParser(object):
|
||||
List[dict]: list of equipment
|
||||
"""
|
||||
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
|
||||
output = []
|
||||
# logger.debug(f"Sheets: {sheets}")
|
||||
for sheet in self.xl.sheetnames:
|
||||
ws = self.xl[sheet]
|
||||
@@ -647,22 +646,20 @@ class TipParser(object):
|
||||
# logger.debug(f"asset: {asset}")
|
||||
eq = Tips.query(lot=lot, name=asset, limit=1)
|
||||
try:
|
||||
# output.append(
|
||||
yield dict(name=eq.name, role=k, lot=lot)
|
||||
except AttributeError:
|
||||
logger.error(f"Unable to add {eq} to PydTips list.")
|
||||
# logger.debug(f"Here is the output so far: {pformat(output)}")
|
||||
# return output
|
||||
|
||||
|
||||
|
||||
class PCRParser(object):
|
||||
"""Object to pull data from Design and Analysis PCR export file."""
|
||||
|
||||
def __init__(self, filepath: Path | None = None, submission: BasicSubmission | None = None) -> None:
|
||||
"""
|
||||
Args:
|
||||
filepath (Path | None, optional): file to parse. Defaults to None.
|
||||
"""
|
||||
Args:
|
||||
filepath (Path | None, optional): file to parse. Defaults to None.
|
||||
submission (BasicSubmission | None, optional): Submission parsed data to be added to.
|
||||
"""
|
||||
# logger.debug(f'Parsing {filepath.__str__()}')
|
||||
if filepath is None:
|
||||
logger.error('No filepath given.')
|
||||
@@ -709,73 +706,3 @@ class PCRParser(object):
|
||||
# logger.debug(f"PCR: {pformat(pcr)}")
|
||||
return pcr
|
||||
|
||||
|
||||
class EDSParser(object):
|
||||
expand_device = {"QS7PRO": "QuantStudio tm 7 Pro System"}
|
||||
expand_block = {"BLOCK_96W_01ML": "96-Well 0.1-mL Block"}
|
||||
|
||||
def __init__(self, filepath: str | Path | None = None):
|
||||
logger.info(f"\n\nParsing {filepath.__str__()}\n\n")
|
||||
match filepath:
|
||||
case Path():
|
||||
self.filepath = filepath
|
||||
case str():
|
||||
self.filepath = Path(filepath)
|
||||
case _:
|
||||
logger.error(f"No filepath given.")
|
||||
raise ValueError("No filepath given.")
|
||||
self.eds = ZipFile(self.filepath)
|
||||
self.analysis_settings = json.loads(self.eds.read("primary/analysis_setting.json").decode("utf-8"))
|
||||
self.analysis_results = json.loads(self.eds.read("primary/analysis_setting.json").decode("utf-8"))
|
||||
self.presence_absence_results = json.loads(
|
||||
self.eds.read("extensions/am.pa/presence_absence_result.json").decode("utf-8"))
|
||||
self.presence_absence_settings = json.loads(
|
||||
self.eds.read("extensions/am.pa/presence_absence_setting.json").decode("utf-8"))
|
||||
self.run_summary = json.loads(self.eds.read("run/run_summary.json").decode("utf-8"))
|
||||
self.run_method = json.loads(self.eds.read("setup/run_method.json").decode("utf-8"))
|
||||
self.plate_setup = json.loads(self.eds.read("setup/plate_setup.json").decode("utf-8"))
|
||||
self.eds_summary = json.loads(self.eds.read("summary.json").decode("utf-8"))
|
||||
|
||||
def parse_DA_date_format(self, value: int) -> datetime:
|
||||
value = value / 1000
|
||||
return datetime.utcfromtimestamp(value)
|
||||
|
||||
def get_run_time(self, start: datetime, end: datetime) -> Tuple[str, str, str]:
|
||||
delta = end - start
|
||||
minutes, seconds = divmod(delta.seconds, 60)
|
||||
duration = f"{minutes} minutes {seconds} seconds"
|
||||
start_time = start.strftime("%Y-%m-%d %I:%M:%S %p %Z")
|
||||
end_time = end.strftime("%Y-%m-%d %I:%M:%S %p %Z")
|
||||
return start_time, end_time, duration
|
||||
|
||||
def parse_summary(self):
|
||||
summary = dict()
|
||||
summary['file_name'] = self.filepath.absolute().__str__()
|
||||
summary['comment'] = self.eds_summary['description']
|
||||
summary['operator'] = self.run_summary['operator']
|
||||
summary['barcode'] = self.plate_setup['plateBarcode']
|
||||
try:
|
||||
summary['instrument_type'] = self.__class__.expand_device[self.eds_summary['instrumentType']]
|
||||
except KeyError:
|
||||
summary['instrument_type'] = self.eds_summary['instrumentType']
|
||||
try:
|
||||
summary['block_type'] = self.__class__.expand_block[self.plate_setup['blockType']]
|
||||
except KeyError:
|
||||
summary['block_type'] = self.plate_setup['blockType']
|
||||
summary['instrument_name'] = self.run_summary['instrumentName']
|
||||
summary['instrument_serial_number'] = self.run_summary['instrumentSerialNumber']
|
||||
summary['heated_cover_serial_number'] = self.run_summary['coverSerialNumber']
|
||||
summary['block_serial_number'] = self.run_summary['blockSerialNumber']
|
||||
run_start = self.parse_DA_date_format(self.run_summary['startTime'])
|
||||
run_end = self.parse_DA_date_format(self.run_summary['endTime'])
|
||||
summary['run_start_date/time'], summary['run_end_date/time'], summary['run_duration'] = \
|
||||
self.get_run_time(run_start, run_end)
|
||||
summary['sample_volume'] = self.run_method['sampleVolume']
|
||||
summary['cover_temperature'] = self.run_method['coverTemperature']
|
||||
summary['passive_reference'] = self.plate_setup['passiveReference']
|
||||
summary['pcr_stage/step_number'] = f"Stage {self.analysis_settings['cqAnalysisStageNumber']} Step {self.analysis_settings['cqAnalysisStepNumber']}"
|
||||
summary['quantification_cycle_method'] = self.analysis_results['cqAlgorithmType']
|
||||
summary['analysis_date/time'] = self.parse_DA_date_format(self.eds_summary['analysis']['primary']['analysisTime'])
|
||||
summary['software_name_and_version'] = "Design & Analysis Software v2.8.0"
|
||||
summary['plugin_name_and_version'] = "Primary Analysis v1.8.1, Presence Absence v2.4.0"
|
||||
return summary
|
||||
|
||||
Reference in New Issue
Block a user