Addition of Equipment and SubmissionType creation.
This commit is contained in:
@@ -10,6 +10,7 @@ import logging
|
||||
from tools import check_authorization, setup_lookup, query_return, Report, Result, Settings
|
||||
from typing import List
|
||||
from pandas import ExcelFile
|
||||
from pathlib import Path
|
||||
from . import Base, BaseClass, Organization
|
||||
|
||||
logger = logging.getLogger(f'submissions.{__name__}')
|
||||
@@ -55,7 +56,7 @@ class KitType(BaseClass):
|
||||
def __repr__(self) -> str:
|
||||
return f"<KitType({self.name})>"
|
||||
|
||||
def get_reagents(self, required:bool=False, submission_type:str|SubmissionType|None=None) -> list:
|
||||
def get_reagents(self, required:bool=False, submission_type:str|SubmissionType|None=None) -> List[ReagentType]:
|
||||
"""
|
||||
Return ReagentTypes linked to kit through KitTypeReagentTypeAssociation.
|
||||
|
||||
@@ -242,6 +243,10 @@ class ReagentType(BaseClass):
|
||||
case _:
|
||||
pass
|
||||
return query_return(query=query, limit=limit)
|
||||
|
||||
def to_pydantic(self):
|
||||
from backend.validators.pydant import PydReagent
|
||||
return PydReagent(lot=None, type=self.name, name=self.name, expiry=date.today())
|
||||
|
||||
class KitTypeReagentTypeAssociation(BaseClass):
|
||||
"""
|
||||
@@ -583,6 +588,14 @@ class SubmissionType(BaseClass):
|
||||
|
||||
kit_types = association_proxy("submissiontype_kit_associations", "kit_type") #: Proxy of kittype association
|
||||
|
||||
submissiontype_equipment_associations = relationship(
|
||||
"SubmissionTypeEquipmentAssociation",
|
||||
back_populates="submission_type",
|
||||
cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
equipment = association_proxy("submissiontype_equipment_associations", "equipment")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<SubmissionType({self.name})>"
|
||||
|
||||
@@ -595,6 +608,35 @@ class SubmissionType(BaseClass):
|
||||
"""
|
||||
return ExcelFile(self.template_file).sheet_names
|
||||
|
||||
def set_template_file(self, filepath:Path|str):
|
||||
if isinstance(filepath, str):
|
||||
filepath = Path(filepath)
|
||||
with open (filepath, "rb") as f:
|
||||
data = f.read()
|
||||
self.template_file = data
|
||||
self.save()
|
||||
|
||||
def get_equipment(self) -> list:
|
||||
from backend.validators.pydant import PydEquipmentPool
|
||||
# if static:
|
||||
# return [item.equipment.to_pydantic() for item in self.submissiontype_equipment_associations if item.static==1]
|
||||
# else:
|
||||
preliminary1 = [item.equipment.to_pydantic(static=item.static) for item in self.submissiontype_equipment_associations]# if item.static==0]
|
||||
preliminary2 = [item.equipment.to_pydantic(static=item.static) for item in self.submissiontype_equipment_associations]# if item.static==0]
|
||||
output = []
|
||||
pools = list(set([item.pool_name for item in preliminary1 if item.pool_name != None]))
|
||||
for pool in pools:
|
||||
c_ = []
|
||||
for item in preliminary1:
|
||||
if item.pool_name == pool:
|
||||
c_.append(item)
|
||||
preliminary2.remove(item)
|
||||
if len(c_) > 0:
|
||||
output.append(PydEquipmentPool(name=pool, equipment=c_))
|
||||
for item in preliminary2:
|
||||
output.append(item)
|
||||
return output
|
||||
|
||||
@classmethod
|
||||
@setup_lookup
|
||||
def query(cls,
|
||||
@@ -772,4 +814,145 @@ class SubmissionReagentAssociation(BaseClass):
|
||||
# limit = query.count()
|
||||
return query_return(query=query, limit=limit)
|
||||
|
||||
def to_sub_dict(self, extraction_kit):
|
||||
output = self.reagent.to_sub_dict(extraction_kit)
|
||||
output['comments'] = self.comments
|
||||
return output
|
||||
|
||||
class Equipment(BaseClass):
|
||||
|
||||
# Currently abstract until ready to implement
|
||||
# __abstract__ = True
|
||||
|
||||
__tablename__ = "_equipment"
|
||||
|
||||
id = Column(INTEGER, primary_key=True)
|
||||
name = Column(String(64))
|
||||
nickname = Column(String(64))
|
||||
asset_number = Column(String(16))
|
||||
pool_name = Column(String(16))
|
||||
|
||||
equipment_submission_associations = relationship(
|
||||
"SubmissionEquipmentAssociation",
|
||||
back_populates="equipment",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
submissions = association_proxy("equipment_submission_associations", "submission")
|
||||
|
||||
equipment_submissiontype_associations = relationship(
|
||||
"SubmissionTypeEquipmentAssociation",
|
||||
back_populates="equipment",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
submission_types = association_proxy("equipment_submission_associations", "submission_type")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Equipment({self.name})>"
|
||||
|
||||
@classmethod
|
||||
@setup_lookup
|
||||
def query(cls,
|
||||
name:str|None=None,
|
||||
nickname:str|None=None,
|
||||
asset_number:str|None=None,
|
||||
limit:int=0
|
||||
) -> Equipment|List[Equipment]:
|
||||
query = cls.__database_session__.query(cls)
|
||||
match name:
|
||||
case str():
|
||||
query = query.filter(cls.name==name)
|
||||
limit = 1
|
||||
case _:
|
||||
pass
|
||||
match nickname:
|
||||
case str():
|
||||
query = query.filter(cls.nickname==nickname)
|
||||
limit = 1
|
||||
case _:
|
||||
pass
|
||||
match asset_number:
|
||||
case str():
|
||||
query = query.filter(cls.asset_number==asset_number)
|
||||
limit = 1
|
||||
case _:
|
||||
pass
|
||||
return query_return(query=query, limit=limit)
|
||||
|
||||
def to_pydantic(self, static):
|
||||
from backend.validators.pydant import PydEquipment
|
||||
return PydEquipment(static=static, **self.__dict__)
|
||||
|
||||
def save(self):
|
||||
self.__database_session__.add(self)
|
||||
self.__database_session__.commit()
|
||||
|
||||
class SubmissionEquipmentAssociation(BaseClass):
|
||||
|
||||
# Currently abstract until ready to implement
|
||||
# __abstract__ = True
|
||||
|
||||
__tablename__ = "_equipment_submissions"
|
||||
|
||||
equipment_id = Column(INTEGER, ForeignKey("_equipment.id"), primary_key=True) #: id of associated equipment
|
||||
submission_id = Column(INTEGER, ForeignKey("_submissions.id"), primary_key=True) #: id of associated submission
|
||||
process = Column(String(64)) #: name of the process run on this equipment
|
||||
start_time = Column(TIMESTAMP)
|
||||
end_time = Column(TIMESTAMP)
|
||||
comments = Column(String(1024))
|
||||
|
||||
submission = relationship("BasicSubmission", back_populates="submission_equipment_associations") #: associated submission
|
||||
|
||||
equipment = relationship(Equipment, back_populates="equipment_submission_associations") #: associated submission
|
||||
|
||||
def __init__(self, submission, equipment):
|
||||
self.submission = submission
|
||||
self.equipment = equipment
|
||||
|
||||
def to_sub_dict(self) -> dict:
|
||||
output = dict(name=self.equipment.name, asset_number=self.equipment.asset_number, comment=self.comments)
|
||||
return output
|
||||
|
||||
def save(self):
|
||||
self.__database_session__.add(self)
|
||||
self.__database_session__.commit()
|
||||
|
||||
class SubmissionTypeEquipmentAssociation(BaseClass):
|
||||
|
||||
# __abstract__ = True
|
||||
|
||||
__tablename__ = "_submissiontype_equipment"
|
||||
|
||||
equipment_id = Column(INTEGER, ForeignKey("_equipment.id"), primary_key=True) #: id of associated equipment
|
||||
submissiontype_id = Column(INTEGER, ForeignKey("_submission_types.id"), primary_key=True) #: id of associated submission
|
||||
uses = Column(JSON) #: locations of equipment on the submission type excel sheet.
|
||||
static = Column(INTEGER, default=1) #: if 1 this piece of equipment will always be used, otherwise it will need to be selected from list?
|
||||
|
||||
submission_type = relationship(SubmissionType, back_populates="submissiontype_equipment_associations") #: associated submission
|
||||
|
||||
equipment = relationship(Equipment, back_populates="equipment_submissiontype_associations") #: associated equipment
|
||||
|
||||
@validates('static')
|
||||
def validate_age(self, key, value):
|
||||
"""
|
||||
Ensures only 1 & 0 used in 'static'
|
||||
|
||||
Args:
|
||||
key (str): name of attribute
|
||||
value (_type_): value of attribute
|
||||
|
||||
Raises:
|
||||
ValueError: Raised if bad value given
|
||||
|
||||
Returns:
|
||||
_type_: value
|
||||
"""
|
||||
if not 0 <= value < 2:
|
||||
raise ValueError(f'Invalid required value {value}. Must be 0 or 1.')
|
||||
return value
|
||||
|
||||
def save(self):
|
||||
self.__database_session__.add(self)
|
||||
self.__database_session__.commit()
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from getpass import getuser
|
||||
import math, json, logging, uuid, tempfile, re, yaml
|
||||
from pprint import pformat
|
||||
from . import Reagent, SubmissionType, KitType, Organization
|
||||
from . import Reagent, SubmissionType, KitType, Organization, Equipment, SubmissionEquipmentAssociation
|
||||
from sqlalchemy import Column, String, TIMESTAMP, INTEGER, ForeignKey, JSON, FLOAT, case
|
||||
from sqlalchemy.orm import relationship, validates, Query
|
||||
from json.decoder import JSONDecodeError
|
||||
@@ -69,6 +69,13 @@ class BasicSubmission(BaseClass):
|
||||
# to "keyword" attribute
|
||||
reagents = association_proxy("submission_reagent_associations", "reagent") #: Association proxy to SubmissionSampleAssociation.samples
|
||||
|
||||
submission_equipment_associations = relationship(
|
||||
"SubmissionEquipmentAssociation",
|
||||
back_populates="submission",
|
||||
cascade="all, delete-orphan"
|
||||
)
|
||||
equipment = association_proxy("submission_equipment_associations", "equipment")
|
||||
|
||||
# Allows for subclassing into ex. BacterialCulture, Wastewater, etc.
|
||||
__mapper_args__ = {
|
||||
"polymorphic_identity": "Basic Submission",
|
||||
@@ -124,7 +131,7 @@ class BasicSubmission(BaseClass):
|
||||
# Updated 2023-09 to use the extraction kit to pull reagents.
|
||||
if full_data:
|
||||
try:
|
||||
reagents = [item.to_sub_dict(extraction_kit=self.extraction_kit) for item in self.reagents]
|
||||
reagents = [item.to_sub_dict(extraction_kit=self.extraction_kit) for item in self.submission_reagent_associations]
|
||||
except Exception as e:
|
||||
logger.error(f"We got an error retrieving reagents: {e}")
|
||||
reagents = None
|
||||
@@ -138,6 +145,13 @@ class BasicSubmission(BaseClass):
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting comment: {self.comment}")
|
||||
comments = None
|
||||
try:
|
||||
equipment = [item.to_sub_dict() for item in self.submission_equipment_associations]
|
||||
if len(equipment) == 0:
|
||||
equipment = None
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting equipment: {self.equipment}")
|
||||
equipment = None
|
||||
output = {
|
||||
"id": self.id,
|
||||
"Plate Number": self.rsl_plate_num,
|
||||
@@ -153,7 +167,8 @@ class BasicSubmission(BaseClass):
|
||||
"reagents": reagents,
|
||||
"samples": samples,
|
||||
"extraction_info": ext_info,
|
||||
"comment": comments
|
||||
"comment": comments,
|
||||
"equipment": equipment
|
||||
}
|
||||
return output
|
||||
|
||||
@@ -447,7 +462,7 @@ class BasicSubmission(BaseClass):
|
||||
logger.debug(f"Got {len(subs)} submissions.")
|
||||
df = pd.DataFrame.from_records(subs)
|
||||
# Exclude sub information
|
||||
for item in ['controls', 'extraction_info', 'pcr_info', 'comment', 'comments', 'samples', 'reagents']:
|
||||
for item in ['controls', 'extraction_info', 'pcr_info', 'comment', 'comments', 'samples', 'reagents', 'equipment']:
|
||||
try:
|
||||
df = df.drop(item, axis=1)
|
||||
except:
|
||||
@@ -520,7 +535,7 @@ class BasicSubmission(BaseClass):
|
||||
_type_: _description_
|
||||
"""
|
||||
# assoc = SubmissionSampleAssociation.query(submission=self, sample=sample, limit=1)
|
||||
assoc = [item.sample for item in self.submission_sample_associations if item.sample==sample][0]
|
||||
assoc = [item for item in self.submission_sample_associations if item.sample==sample][0]
|
||||
for k,v in input_dict.items():
|
||||
try:
|
||||
setattr(assoc, k, v)
|
||||
@@ -750,7 +765,8 @@ class BasicSubmission(BaseClass):
|
||||
code = 1
|
||||
msg = "This submission already exists.\nWould you like to overwrite?"
|
||||
return instance, code, msg
|
||||
|
||||
|
||||
|
||||
# Below are the custom submission types
|
||||
|
||||
class BacterialCulture(BasicSubmission):
|
||||
@@ -877,6 +893,12 @@ class BacterialCulture(BasicSubmission):
|
||||
template += "_{{ submitting_lab }}_{{ submitter_plate_num }}"
|
||||
return template
|
||||
|
||||
@classmethod
|
||||
def parse_info(cls, input_dict: dict, xl: pd.ExcelFile | None = None) -> dict:
|
||||
input_dict = super().parse_info(input_dict, xl)
|
||||
input_dict['submitted_date']['missing'] = True
|
||||
return input_dict
|
||||
|
||||
class Wastewater(BasicSubmission):
|
||||
"""
|
||||
derivative submission type from BasicSubmission
|
||||
@@ -1009,7 +1031,8 @@ class Wastewater(BasicSubmission):
|
||||
Returns:
|
||||
str: String for regex construction
|
||||
"""
|
||||
return "(?P<Wastewater>RSL(?:-|_)?WW(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789]|$)R?\d?)?)"
|
||||
# return "(?P<Wastewater>RSL(?:-|_)?WW(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\s]|$)R?\d?)?)"
|
||||
return "(?P<Wastewater>RSL(?:-|_)?WW(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\s]|$)?R?\d?)?)"
|
||||
|
||||
class WastewaterArtic(BasicSubmission):
|
||||
"""
|
||||
@@ -1416,7 +1439,9 @@ class BasicSample(BaseClass):
|
||||
return instance
|
||||
|
||||
def save(self):
|
||||
raise AttributeError(f"Save not implemented for {self.__class__}")
|
||||
# raise AttributeError(f"Save not implemented for {self.__class__}")
|
||||
self.__database_session__.add(self)
|
||||
self.__database_session__.commit()
|
||||
|
||||
def delete(self):
|
||||
raise AttributeError(f"Delete not implemented for {self.__class__}")
|
||||
@@ -1735,4 +1760,3 @@ class WastewaterAssociation(SubmissionSampleAssociation):
|
||||
pcr_results = Column(JSON) #: imported PCR status from QuantStudio
|
||||
|
||||
__mapper_args__ = {"polymorphic_identity": "Wastewater Association", "polymorphic_load": "inline"}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ import logging, re
|
||||
from collections import OrderedDict
|
||||
from datetime import date
|
||||
from dateutil.parser import parse, ParserError
|
||||
from tools import check_not_nan, convert_nans_to_nones, Settings
|
||||
from tools import check_not_nan, convert_nans_to_nones, Settings, is_missing
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -186,23 +186,15 @@ class InfoParser(object):
|
||||
value = df.iat[relevant[item]['row']-1, relevant[item]['column']-1]
|
||||
match item:
|
||||
case "submission_type":
|
||||
value, missing = is_missing(value)
|
||||
value = value.title()
|
||||
case _:
|
||||
pass
|
||||
value, missing = is_missing(value)
|
||||
logger.debug(f"Setting {item} on {sheet} to {value}")
|
||||
if check_not_nan(value):
|
||||
if value != "None":
|
||||
try:
|
||||
dicto[item] = dict(value=value, missing=False)
|
||||
except (KeyError, IndexError):
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
dicto[item] = dict(value=value, missing=True)
|
||||
except (KeyError, IndexError):
|
||||
continue
|
||||
else:
|
||||
dicto[item] = dict(value=convert_nans_to_nones(value), missing=True)
|
||||
try:
|
||||
dicto[item] = dict(value=value, missing=missing)
|
||||
except (KeyError, IndexError):
|
||||
continue
|
||||
return self.custom_parser(input_dict=dicto, xl=self.xl)
|
||||
|
||||
class ReagentParser(object):
|
||||
@@ -293,7 +285,9 @@ class SampleParser(object):
|
||||
self.xl = xl
|
||||
self.submission_type = submission_type
|
||||
sample_info_map = self.fetch_sample_info_map(submission_type=submission_type)
|
||||
logger.debug(f"sample_info_map: {sample_info_map}")
|
||||
self.plate_map = self.construct_plate_map(plate_map_location=sample_info_map['plate_map'])
|
||||
logger.debug(f"plate_map: {self.plate_map}")
|
||||
self.lookup_table = self.construct_lookup_table(lookup_table_location=sample_info_map['lookup_table'])
|
||||
if "plates" in sample_info_map:
|
||||
self.plates = sample_info_map['plates']
|
||||
@@ -332,10 +326,12 @@ class SampleParser(object):
|
||||
Returns:
|
||||
pd.DataFrame: Plate map grid
|
||||
"""
|
||||
logger.debug(f"Plate map location: {plate_map_location}")
|
||||
df = self.xl.parse(plate_map_location['sheet'], header=None, dtype=object)
|
||||
df = df.iloc[plate_map_location['start_row']-1:plate_map_location['end_row'], plate_map_location['start_column']-1:plate_map_location['end_column']]
|
||||
df = pd.DataFrame(df.values[1:], columns=df.iloc[0])
|
||||
df = df.set_index(df.columns[0])
|
||||
logger.debug(f"Vanilla platemap: {df}")
|
||||
# custom_mapper = get_polymorphic_subclass(models.BasicSubmission, self.submission_type)
|
||||
custom_mapper = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||
df = custom_mapper.custom_platemap(self.xl, df)
|
||||
@@ -440,6 +436,7 @@ class SampleParser(object):
|
||||
"""
|
||||
result = None
|
||||
new_samples = []
|
||||
logger.debug(f"Starting samples: {pformat(self.samples)}")
|
||||
for ii, sample in enumerate(self.samples):
|
||||
# try:
|
||||
# if sample['submitter_id'] in [check_sample['sample'].submitter_id for check_sample in new_samples]:
|
||||
|
||||
@@ -127,9 +127,10 @@ class PydReagent(BaseModel):
|
||||
reagent.name = value
|
||||
case "comment":
|
||||
continue
|
||||
assoc = SubmissionReagentAssociation(reagent=reagent, submission=submission)
|
||||
assoc.comments = self.comment
|
||||
reagent.reagent_submission_associations.append(assoc)
|
||||
if submission != None:
|
||||
assoc = SubmissionReagentAssociation(reagent=reagent, submission=submission)
|
||||
assoc.comments = self.comment
|
||||
reagent.reagent_submission_associations.append(assoc)
|
||||
# add end-of-life extension from reagent type to expiry date
|
||||
# NOTE: this will now be done only in the reporting phase to account for potential changes in end-of-life extensions
|
||||
return reagent, report
|
||||
@@ -199,7 +200,8 @@ class PydSample(BaseModel, extra='allow'):
|
||||
row=row, column=column)
|
||||
try:
|
||||
instance.sample_submission_associations.append(association)
|
||||
except IntegrityError:
|
||||
except IntegrityError as e:
|
||||
logger.error(f"Could not attach submission sample association due to: {e}")
|
||||
instance.metadata.session.rollback()
|
||||
return instance, report
|
||||
|
||||
@@ -420,13 +422,18 @@ class PydSubmission(BaseModel, extra='allow'):
|
||||
if isinstance(value, dict):
|
||||
value = value['value']
|
||||
logger.debug(f"Setting {key} to {value}")
|
||||
try:
|
||||
instance.set_attribute(key=key, value=value)
|
||||
except AttributeError as e:
|
||||
logger.debug(f"Could not set attribute: {key} to {value} due to: \n\n {e}")
|
||||
continue
|
||||
except KeyError:
|
||||
continue
|
||||
match key:
|
||||
case "samples":
|
||||
for sample in self.samples:
|
||||
sample, _ = sample.toSQL(submission=instance)
|
||||
case _:
|
||||
try:
|
||||
instance.set_attribute(key=key, value=value)
|
||||
except AttributeError as e:
|
||||
logger.debug(f"Could not set attribute: {key} to {value} due to: \n\n {e}")
|
||||
continue
|
||||
except KeyError:
|
||||
continue
|
||||
try:
|
||||
logger.debug(f"Calculating costs for procedure...")
|
||||
instance.calculate_base_cost()
|
||||
@@ -735,4 +742,35 @@ class PydKit(BaseModel):
|
||||
[item.toSQL(instance) for item in self.reagent_types]
|
||||
return instance, report
|
||||
|
||||
class PydEquipment(BaseModel, extra='ignore'):
|
||||
|
||||
name: str
|
||||
nickname: str|None
|
||||
asset_number: str
|
||||
pool_name: str|None
|
||||
static: bool|int
|
||||
|
||||
@field_validator("static")
|
||||
@classmethod
|
||||
def to_boolean(cls, value):
|
||||
match value:
|
||||
case int():
|
||||
if value == 0:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
case _:
|
||||
return value
|
||||
|
||||
def toForm(self, parent):
|
||||
from frontend.widgets.equipment_usage import EquipmentCheckBox
|
||||
return EquipmentCheckBox(parent=parent, equipment=self)
|
||||
|
||||
class PydEquipmentPool(BaseModel):
|
||||
|
||||
name: str
|
||||
equipment: List[PydEquipment]
|
||||
|
||||
def toForm(self, parent):
|
||||
from frontend.widgets.equipment_usage import PoolComboBox
|
||||
return PoolComboBox(parent=parent, pool=self)
|
||||
Reference in New Issue
Block a user