Bug fixes.

This commit is contained in:
lwark
2024-08-15 09:30:43 -05:00
parent 1a55b52f31
commit 6c0795e92e
7 changed files with 62 additions and 38 deletions

View File

@@ -11,7 +11,7 @@ from sqlalchemy.ext.associationproxy import association_proxy
from datetime import date
import logging, re
from tools import check_authorization, setup_lookup, Report, Result
from typing import List, Literal, Generator
from typing import List, Literal, Generator, Any
from pandas import ExcelFile
from pathlib import Path
from . import Base, BaseClass, Organization
@@ -261,7 +261,7 @@ class KitType(BaseClass):
base_dict['reagent roles'] = []
base_dict['equipment roles'] = []
for k, v in self.construct_xl_map_for_use(submission_type=submission_type):
logger.debug(f"Value: {v}")
# logger.debug(f"Value: {v}")
try:
assoc = [item for item in self.kit_reagentrole_associations if item.reagent_role.name == k][0]
except IndexError as e:
@@ -275,10 +275,10 @@ class KitType(BaseClass):
except IndexError:
continue
for kk, vv in assoc.to_export_dict(kit_type=self).items():
logger.debug(f"{kk}:{vv}")
# logger.debug(f"{kk}:{vv}")
v[kk] = vv
base_dict['equipment roles'].append(v)
logger.debug(f"KT returning {base_dict}")
# logger.debug(f"KT returning {base_dict}")
return base_dict
@@ -1492,6 +1492,16 @@ class SubmissionEquipmentAssociation(BaseClass):
processes=[process], role=self.role, nickname=self.equipment.nickname)
return output
@classmethod
@setup_lookup
def query(cls, equipment_id:int, submission_id:int, role:str, limit:int=0, **kwargs) -> Any | List[Any]:
query: Query = cls.__database_session__.query(cls)
query = query.filter(cls.equipment_id==equipment_id)
query = query.filter(cls.submission_id==submission_id)
query = query.filter(cls.role==role)
return cls.execute_query(query=query, limit=limit, **kwargs)
class SubmissionTypeEquipmentRoleAssociation(BaseClass):
"""

View File

@@ -295,8 +295,10 @@ class BasicSubmission(BaseClass):
if k == 'info':
continue
if not any([item['role'] == k for item in reagents]):
# expiry = date(year=1970, month=1, day=1)
expiry = "NA"
reagents.append(
dict(role=k, name="Not Applicable", lot="NA", expiry=date(year=1970, month=1, day=1),
dict(role=k, name="Not Applicable", lot="NA", expiry=expiry,
missing=True))
except Exception as e:
logger.error(f"We got an error retrieving reagents: {e}")
@@ -507,10 +509,10 @@ class BasicSubmission(BaseClass):
sample, _ = sample.to_sql(submission=self)
return
case "reagents":
logger.debug(f"Reagents coming into SQL: {value}")
# logger.debug(f"Reagents coming into SQL: {value}")
field_value = [reagent['value'].to_sql()[0] if isinstance(reagent, dict) else reagent.to_sql()[0] for
reagent in value]
logger.debug(f"Reagents coming out of SQL: {field_value}")
# logger.debug(f"Reagents coming out of SQL: {field_value}")
case "submission_type":
field_value = SubmissionType.query(name=value)
case "sample_count":
@@ -521,7 +523,7 @@ class BasicSubmission(BaseClass):
case "ctx" | "csv" | "filepath" | "equipment":
return
case item if item in self.jsons():
logger.debug(f"Setting JSON attribute.")
# logger.debug(f"Setting JSON attribute.")
existing = self.__getattribute__(key)
if value is None or value in ['', 'null']:
logger.error(f"No value given, not setting.")
@@ -617,7 +619,7 @@ class BasicSubmission(BaseClass):
Args:
original (bool, optional): Is this the first save. Defaults to True.
"""
logger.debug("Saving submission.")
# logger.debug("Saving submission.")
if original:
self.uploaded_by = getuser()
super().save()
@@ -1177,9 +1179,12 @@ class BasicSubmission(BaseClass):
# logger.debug(f"Processing: {equip}")
_, assoc = equip.toSQL(submission=self)
# logger.debug(f"Appending SubmissionEquipmentAssociation: {assoc}")
assoc.save()
try:
assoc.save()
except AttributeError as e:
logger.error(f"Couldn't save association with {equip} due to {e}")
if equip.tips:
logger.debug("We have tips in this equipment")
# logger.debug("We have tips in this equipment")
for tips in equip.tips:
tassoc = tips.to_sql(submission=self)
tassoc.save()
@@ -1308,7 +1313,7 @@ class BacterialCulture(BasicSubmission):
idx = df[df[0] == sample.well]
if idx.empty:
new = f"{sample.well[0]}{sample.well[1:].zfill(2)}"
logger.debug(f"Checking: {new}")
# logger.debug(f"Checking: {new}")
idx = df[df[0] == new]
# logger.debug(f"Here is the row: {idx}")
row = idx.index.to_list()[0]
@@ -1366,7 +1371,7 @@ class Wastewater(BasicSubmission):
dict: Updated sample dictionary
"""
input_dict = super().custom_info_parser(input_dict)
logger.debug(f"Input dict: {pformat(input_dict)}")
# logger.debug(f"Input dict: {pformat(input_dict)}")
if xl is not None:
try:
input_dict['csv'] = xl["Copy to import file"]
@@ -1636,12 +1641,14 @@ class WastewaterArtic(BasicSubmission):
input_dict = super().custom_info_parser(input_dict)
egel_section = custom_fields['egel_results']
ws = xl[egel_section['sheet']]
data = [ws.cell(row=ii, column=jj) for jj in range(egel_section['start_column'], egel_section['end_column']) for
ii in range(egel_section['start_row'], egel_section['end_row'])]
data = [ws.cell(row=ii, column=jj) for jj in range(egel_section['start_column'], egel_section['end_column']+1) for
ii in range(egel_section['start_row'], egel_section['end_row']+1)]
data = [cell for cell in data if cell.value is not None and "NTC" in cell.value]
# logger.debug(f"Got gel control map: {data}")
input_dict['gel_controls'] = [
dict(sample_id=cell.value, location=f"{row_map[cell.row - 9]}{str(cell.column - 14).zfill(2)}") for cell in
data]
# logger.debug(f"Got gel control info: {input_dict['gel_controls']}")
# NOTE: Get source plate information
source_plates_section = custom_fields['source_plates']
ws = xl[source_plates_section['sheet']]
@@ -1854,10 +1861,10 @@ class WastewaterArtic(BasicSubmission):
"""
input_excel = super().custom_info_writer(input_excel, info, backup)
if isinstance(info, types.GeneratorType):
logger.debug(f"Unpacking info generator.")
# logger.debug(f"Unpacking info generator.")
info = {k: v for k, v in info}
logger.debug(f"Info:\n{pformat(info)}")
logger.debug(f"Custom fields:\n{pformat(custom_fields)}")
# logger.debug(f"Info:\n{pformat(info)}")
# logger.debug(f"Custom fields:\n{pformat(custom_fields)}")
# NOTE: check for source plate information
if check_key_or_attr(key='source_plates', interest=info, check_none=True):
source_plates_section = custom_fields['source_plates']
@@ -2016,7 +2023,7 @@ class WastewaterArtic(BasicSubmission):
img = zipped.read(input_dict['gel_image_path'])
with tempfile.TemporaryFile(mode="wb", suffix=".jpg", delete=False) as tmp:
tmp.write(img)
logger.debug(f"Tempfile: {tmp.name}")
# logger.debug(f"Tempfile: {tmp.name}")
img = InlineImage(tpl_obj, image_descriptor=tmp.name, width=Inches(5.5)) #, width=5.5)#, height=400)
input_dict['gel_image'] = img
return input_dict

View File

@@ -209,7 +209,7 @@ class InfoParser(object):
"""
dicto = {}
# NOTE: This loop parses generic info
logger.debug(f"Map: {self.map}")
# logger.debug(f"Map: {self.map}")
for sheet in self.xl.sheetnames:
ws = self.xl[sheet]
relevant = []
@@ -590,7 +590,7 @@ class EquipmentParser(object):
Returns:
List[dict]: list of equipment
"""
logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
output = []
# logger.debug(f"Sheets: {sheets}")
for sheet in self.xl.sheetnames:
@@ -600,17 +600,17 @@ class EquipmentParser(object):
except (TypeError, KeyError) as e:
logger.error(f"Error creating relevant equipment list: {e}")
continue
logger.debug(f"Relevant equipment: {pformat(relevant)}")
# logger.debug(f"Relevant equipment: {pformat(relevant)}")
previous_asset = ""
for k, v in relevant.items():
logger.debug(f"Checking: {v}")
# logger.debug(f"Checking: {v}")
asset = ws.cell(v['name']['row'], v['name']['column']).value
if not check_not_nan(asset):
asset = previous_asset
else:
previous_asset = asset
asset = self.get_asset_number(input=asset)
logger.debug(f"asset: {asset}")
# logger.debug(f"asset: {asset}")
eq = Equipment.query(asset_number=asset)
if eq is None:
eq = Equipment.query(name=asset)
@@ -621,7 +621,7 @@ class EquipmentParser(object):
nickname=eq.nickname))
except AttributeError:
logger.error(f"Unable to add {eq} to list.")
logger.debug(f"Here is the output so far: {pformat(output)}")
# logger.debug(f"Here is the output so far: {pformat(output)}")
return output
@@ -668,7 +668,7 @@ class TipParser(object):
except (TypeError, KeyError) as e:
logger.error(f"Error creating relevant equipment list: {e}")
continue
logger.debug(f"Relevant equipment: {pformat(relevant)}")
# logger.debug(f"Relevant equipment: {pformat(relevant)}")
previous_asset = ""
for k, v in relevant.items():
asset = ws.cell(v['name']['row'], v['name']['column']).value
@@ -680,14 +680,14 @@ class TipParser(object):
asset = previous_asset
else:
previous_asset = asset
logger.debug(f"asset: {asset}")
# logger.debug(f"asset: {asset}")
eq = Tips.query(lot=lot, name=asset, limit=1)
try:
output.append(
dict(name=eq.name, role=k, lot=lot))
except AttributeError:
logger.error(f"Unable to add {eq} to PydTips list.")
logger.debug(f"Here is the output so far: {pformat(output)}")
# logger.debug(f"Here is the output so far: {pformat(output)}")
return output

View File

@@ -103,11 +103,11 @@ class RSLNamer(object):
regex (str): string to construct pattern
filename (str): string to be parsed
"""
logger.debug(f"Input string to be parsed: {filename}")
logger.info(f"Input string to be parsed: {filename}")
if regex is None:
regex = BasicSubmission.construct_regex()
else:
logger.debug(f"Incoming regex: {regex}")
# logger.debug(f"Incoming regex: {regex}")
try:
regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE)
except re.error as e:

View File

@@ -334,15 +334,20 @@ class PydEquipment(BaseModel, extra='ignore'):
Tuple[Equipment, SubmissionEquipmentAssociation]: SQL objects
"""
if isinstance(submission, str):
logger.info(f"Got string, querying {submission}")
submission = BasicSubmission.query(rsl_number=submission)
equipment = Equipment.query(asset_number=self.asset_number)
if equipment is None:
logger.error("No equipment found. Returning None.")
return
if submission is not None:
# NOTE: Need to make sure the same association is not added to the submission
assoc = SubmissionEquipmentAssociation.query(equipment_id=equipment.id, submission_id=submission.id,
try:
assoc = SubmissionEquipmentAssociation.query(equipment_id=equipment.id, submission_id=submission.id,
role=self.role, limit=1)
except TypeError as e:
logger.error(f"Couldn't get association due to {e}, returning...")
return equipment, None
if assoc is None:
assoc = SubmissionEquipmentAssociation(submission=submission, equipment=equipment)
process = Process.query(name=self.processes[0])
@@ -351,8 +356,10 @@ class PydEquipment(BaseModel, extra='ignore'):
assoc.process = process
assoc.role = self.role
else:
logger.warning(f"Found already existing association: {assoc}")
assoc = None
else:
logger.warning(f"No submission found")
assoc = None
return equipment, assoc
@@ -608,7 +615,7 @@ class PydSubmission(BaseModel, extra='allow'):
@field_validator("contact")
@classmethod
def get_contact_from_org(cls, value, values):
logger.debug(f"Checking on value: {value}")
# logger.debug(f"Checking on value: {value}")
match value:
case dict():
if isinstance(value['value'], tuple):
@@ -621,7 +628,7 @@ class PydSubmission(BaseModel, extra='allow'):
if check is None:
org = Organization.query(name=values.data['submitting_lab']['value'])
contact = org.contacts[0].name
logger.debug(f"Pulled: {contact}")
# logger.debug(f"Pulled: {contact}")
if isinstance(contact, tuple):
contact = contact[0]
return dict(value=contact, missing=True)
@@ -758,7 +765,7 @@ class PydSubmission(BaseModel, extra='allow'):
for tips in self.tips:
if tips is None:
continue
logger.debug(f"Converting tips: {tips} to sql.")
# logger.debug(f"Converting tips: {tips} to sql.")
try:
association = tips.to_sql(submission=instance)
except AttributeError: