Increased robustness of form parsers.
This commit is contained in:
@@ -76,10 +76,10 @@ def store_object(ctx:Settings, object) -> dict|None:
|
||||
dbs.merge(object)
|
||||
try:
|
||||
dbs.commit()
|
||||
except (sqlite3.IntegrityError, sqlalchemy.exc.IntegrityError) as e:
|
||||
except (SQLIntegrityError, AlcIntegrityError) as e:
|
||||
logger.debug(f"Hit an integrity error : {e}")
|
||||
dbs.rollback()
|
||||
return {"message":f"This object {object} already exists, so we can't add it.", "status":"Critical"}
|
||||
return {"message":f"This object {object} already exists, so we can't add it.\n{e}", "status":"Critical"}
|
||||
except (SQLOperationalError, AlcOperationalError):
|
||||
logger.error(f"Hit an operational error: {e}")
|
||||
dbs.rollback()
|
||||
|
||||
@@ -10,6 +10,7 @@ from datetime import date, timedelta
|
||||
from dateutil.parser import parse
|
||||
from typing import Tuple
|
||||
from sqlalchemy.exc import IntegrityError, SAWarning
|
||||
from . import store_object
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -157,7 +158,7 @@ def construct_samples(ctx:Settings, instance:models.BasicSubmission, samples:Lis
|
||||
models.BasicSubmission: Updated submission object.
|
||||
"""
|
||||
for sample in samples:
|
||||
sample_instance = lookup_samples(ctx=ctx, submitter_id=sample['sample'].submitter_id)
|
||||
sample_instance = lookup_samples(ctx=ctx, submitter_id=str(sample['sample'].submitter_id))
|
||||
if sample_instance == None:
|
||||
sample_instance = sample['sample']
|
||||
else:
|
||||
@@ -174,7 +175,7 @@ def construct_samples(ctx:Settings, instance:models.BasicSubmission, samples:Lis
|
||||
try:
|
||||
assoc = getattr(models, f"{sample_query}Association")
|
||||
except AttributeError as e:
|
||||
logger.error(f"Couldn't get type specific association. Getting generic.")
|
||||
logger.error(f"Couldn't get type specific association using {sample_instance.sample_type.replace('Sample', '').strip()}. Getting generic.")
|
||||
assoc = models.SubmissionSampleAssociation
|
||||
assoc = assoc(submission=instance, sample=sample_instance, row=sample['row'], column=sample['column'])
|
||||
instance.submission_sample_associations.append(assoc)
|
||||
@@ -189,7 +190,7 @@ def construct_samples(ctx:Settings, instance:models.BasicSubmission, samples:Lis
|
||||
continue
|
||||
return instance
|
||||
|
||||
def construct_kit_from_yaml(ctx:Settings, exp:dict) -> dict:
|
||||
def construct_kit_from_yaml(ctx:Settings, kit_dict:dict) -> dict:
|
||||
"""
|
||||
Create and store a new kit in the database based on a .yml file
|
||||
TODO: split into create and store functions
|
||||
@@ -206,36 +207,33 @@ def construct_kit_from_yaml(ctx:Settings, exp:dict) -> dict:
|
||||
if not check_is_power_user(ctx=ctx):
|
||||
logger.debug(f"{getuser()} does not have permission to add kits.")
|
||||
return {'code':1, 'message':"This user does not have permission to add kits.", "status":"warning"}
|
||||
# iterate through keys in dict
|
||||
for type in exp:
|
||||
# A submission type may use multiple kits.
|
||||
for kt in exp[type]['kits']:
|
||||
logger.debug(f"Looking up submission type: {type}")
|
||||
# submission_type = lookup_submissiontype_by_name(ctx=ctx, type_name=type)
|
||||
submission_type = lookup_submission_type(ctx=ctx, name=type)
|
||||
logger.debug(f"Looked up submission type: {submission_type}")
|
||||
kit = models.KitType(name=kt)
|
||||
kt_st_assoc = models.SubmissionTypeKitTypeAssociation(kit_type=kit, submission_type=submission_type)
|
||||
kt_st_assoc.constant_cost = exp[type]["kits"][kt]["constant_cost"]
|
||||
kt_st_assoc.mutable_cost_column = exp[type]["kits"][kt]["mutable_cost_column"]
|
||||
kt_st_assoc.mutable_cost_sample = exp[type]["kits"][kt]["mutable_cost_sample"]
|
||||
kit.kit_submissiontype_associations.append(kt_st_assoc)
|
||||
# A kit contains multiple reagent types.
|
||||
for r in exp[type]['kits'][kt]['reagenttypes']:
|
||||
# check if reagent type already exists.
|
||||
r = massage_common_reagents(r)
|
||||
look_up = ctx.database_session.query(models.ReagentType).filter(models.ReagentType.name==r).first()
|
||||
if look_up == None:
|
||||
rt = models.ReagentType(name=r.strip(), eol_ext=timedelta(30*exp[type]['kits'][kt]['reagenttypes'][r]['eol_ext']), last_used="")
|
||||
else:
|
||||
rt = look_up
|
||||
assoc = models.KitTypeReagentTypeAssociation(kit_type=kit, reagent_type=rt, uses={})
|
||||
ctx.database_session.add(rt)
|
||||
kit.kit_reagenttype_associations.append(assoc)
|
||||
logger.debug(f"Kit construction reagent type: {rt.__dict__}")
|
||||
logger.debug(f"Kit construction kit: {kit.__dict__}")
|
||||
ctx.database_session.add(kit)
|
||||
ctx.database_session.commit()
|
||||
submission_type = lookup_submission_type(ctx=ctx, name=kit_dict['used_for'])
|
||||
logger.debug(f"Looked up submission type: {kit_dict['used_for']} and got {submission_type}")
|
||||
kit = models.KitType(name=kit_dict["kit_name"])
|
||||
kt_st_assoc = models.SubmissionTypeKitTypeAssociation(kit_type=kit, submission_type=submission_type)
|
||||
for k,v in kit_dict.items():
|
||||
if k not in ["reagent_types", "kit_name", "used_for"]:
|
||||
kt_st_assoc.set_attrib(k, v)
|
||||
kit.kit_submissiontype_associations.append(kt_st_assoc)
|
||||
# A kit contains multiple reagent types.
|
||||
for r in kit_dict['reagent_types']:
|
||||
# check if reagent type already exists.
|
||||
logger.debug(f"Constructing reagent type: {r}")
|
||||
rtname = massage_common_reagents(r['rtname'])
|
||||
# look_up = ctx.database_session.query(models.ReagentType).filter(models.ReagentType.name==rtname).first()
|
||||
look_up = lookup_reagent_types(name=rtname)
|
||||
if look_up == None:
|
||||
rt = models.ReagentType(name=rtname.strip(), eol_ext=timedelta(30*r['eol']))
|
||||
else:
|
||||
rt = look_up
|
||||
uses = {kit_dict['used_for']:{k:v for k,v in r.items() if k not in ['eol']}}
|
||||
assoc = models.KitTypeReagentTypeAssociation(kit_type=kit, reagent_type=rt, uses=uses)
|
||||
# ctx.database_session.add(rt)
|
||||
store_object(ctx=ctx, object=rt)
|
||||
kit.kit_reagenttype_associations.append(assoc)
|
||||
logger.debug(f"Kit construction reagent type: {rt.__dict__}")
|
||||
logger.debug(f"Kit construction kit: {kit.__dict__}")
|
||||
store_object(ctx=ctx, object=kit)
|
||||
return {'code':0, 'message':'Kit has been added', 'status': 'information'}
|
||||
|
||||
def construct_org_from_yaml(ctx:Settings, org:dict) -> dict:
|
||||
|
||||
@@ -209,7 +209,11 @@ def lookup_submissions(ctx:Settings,
|
||||
match rsl_number:
|
||||
case str():
|
||||
logger.debug(f"Looking up BasicSubmission with rsl number: {rsl_number}")
|
||||
rsl_number = RSLNamer(ctx=ctx, instr=rsl_number).parsed_name
|
||||
try:
|
||||
rsl_number = RSLNamer(ctx=ctx, instr=rsl_number).parsed_name
|
||||
except AttributeError as e:
|
||||
logger.error(f"No parsed name found, returning None.")
|
||||
return None
|
||||
# query = query.filter(models.BasicSubmission.rsl_plate_num==rsl_number)
|
||||
query = query.filter(model.rsl_plate_num==rsl_number)
|
||||
limit = 1
|
||||
@@ -306,6 +310,7 @@ def lookup_controls(ctx:Settings,
|
||||
control_type:models.ControlType|str|None=None,
|
||||
start_date:date|str|int|None=None,
|
||||
end_date:date|str|int|None=None,
|
||||
control_name:str|None=None,
|
||||
limit:int=0
|
||||
) -> models.Control|List[models.Control]:
|
||||
query = setup_lookup(ctx=ctx, locals=locals()).query(models.Control)
|
||||
@@ -343,6 +348,12 @@ def lookup_controls(ctx:Settings,
|
||||
end_date = parse(end_date).strftime("%Y-%m-%d")
|
||||
logger.debug(f"Looking up BasicSubmissions from start date: {start_date} and end date: {end_date}")
|
||||
query = query.filter(models.Control.submitted_date.between(start_date, end_date))
|
||||
match control_name:
|
||||
case str():
|
||||
query = query.filter(models.Control.name.startswith(control_name))
|
||||
limit = 1
|
||||
case _:
|
||||
pass
|
||||
return query_return(query=query, limit=limit)
|
||||
|
||||
def lookup_control_types(ctx:Settings, limit:int=0) -> models.ControlType|List[models.ControlType]:
|
||||
|
||||
@@ -236,3 +236,24 @@ def update_subsampassoc_with_pcr(ctx:Settings, submission:models.BasicSubmission
|
||||
result = store_object(ctx=ctx, object=assoc)
|
||||
return result
|
||||
|
||||
def get_polymorphic_subclass(base:object, polymorphic_identity:str|None=None):
|
||||
"""
|
||||
Retrieves any subclasses of given base class whose polymorphic identity matches the string input.
|
||||
|
||||
Args:
|
||||
base (object): Base (parent) class
|
||||
polymorphic_identity (str | None): Name of subclass of interest. (Defaults to None)
|
||||
|
||||
Returns:
|
||||
_type_: Subclass, or parent class on
|
||||
"""
|
||||
if polymorphic_identity == None:
|
||||
return base
|
||||
else:
|
||||
try:
|
||||
return [item for item in base.__subclasses__() if item.__mapper_args__['polymorphic_identity']==polymorphic_identity][0]
|
||||
except Exception as e:
|
||||
logger.error(f"Could not get polymorph {polymorphic_identity} of {base} due to {e}")
|
||||
return base
|
||||
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
Contains all models for sqlalchemy
|
||||
'''
|
||||
from typing import Any
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy.orm import declarative_base, DeclarativeMeta
|
||||
import logging
|
||||
from pprint import pformat
|
||||
|
||||
Base = declarative_base()
|
||||
Base: DeclarativeMeta = declarative_base()
|
||||
metadata = Base.metadata
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -332,4 +332,7 @@ class SubmissionTypeKitTypeAssociation(Base):
|
||||
self.constant_cost = 0.00
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<SubmissionTypeKitTypeAssociation({self.submission_type.name})"
|
||||
return f"<SubmissionTypeKitTypeAssociation({self.submission_type.name})"
|
||||
|
||||
def set_attrib(self, name, value):
|
||||
self.__setattr__(name, value)
|
||||
@@ -13,6 +13,9 @@ from sqlalchemy.ext.associationproxy import association_proxy
|
||||
import uuid
|
||||
from pandas import Timestamp
|
||||
from dateutil.parser import parse
|
||||
import re
|
||||
import pandas as pd
|
||||
from tools import row_map
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -43,6 +46,7 @@ class BasicSubmission(Base):
|
||||
run_cost = Column(FLOAT(2)) #: total cost of running the plate. Set from constant and mutable kit costs at time of creation.
|
||||
uploaded_by = Column(String(32)) #: user name of person who submitted the submission to the database.
|
||||
comment = Column(JSON)
|
||||
submission_category = Column(String(64))
|
||||
|
||||
submission_sample_associations = relationship(
|
||||
"SubmissionSampleAssociation",
|
||||
@@ -83,7 +87,7 @@ class BasicSubmission(Base):
|
||||
dict: dictionary used in submissions summary and details
|
||||
"""
|
||||
# get lab from nested organization object
|
||||
logger.debug(f"Converting {self.rsl_plate_num} to dict...")
|
||||
# logger.debug(f"Converting {self.rsl_plate_num} to dict...")
|
||||
try:
|
||||
sub_lab = self.submitting_lab.name
|
||||
except AttributeError:
|
||||
@@ -125,6 +129,7 @@ class BasicSubmission(Base):
|
||||
"id": self.id,
|
||||
"Plate Number": self.rsl_plate_num,
|
||||
"Submission Type": self.submission_type_name,
|
||||
"Submission Category": self.submission_category,
|
||||
"Submitter Plate Number": self.submitter_plate_num,
|
||||
"Submitted Date": self.submitted_date.strftime("%Y-%m-%d"),
|
||||
"Submitting Lab": sub_lab,
|
||||
@@ -232,6 +237,34 @@ class BasicSubmission(Base):
|
||||
else:
|
||||
continue
|
||||
return output_list
|
||||
|
||||
@classmethod
|
||||
def parse_info(cls, input_dict:dict, xl:pd.ExcelFile|None=None) -> dict:
|
||||
"""
|
||||
Update submission dictionary with type specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
logger.debug(f"Calling {cls.__name__} info parser.")
|
||||
return input_dict
|
||||
|
||||
@classmethod
|
||||
def parse_samples(cls, input_dict:dict) -> dict:
|
||||
"""
|
||||
Update sample dictionary with type specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
logger.debug(f"Called {cls.__name__} sample parser")
|
||||
return input_dict
|
||||
|
||||
# Below are the custom submission types
|
||||
|
||||
@@ -252,7 +285,7 @@ class BacterialCulture(BasicSubmission):
|
||||
output = super().to_dict(full_data=full_data)
|
||||
if full_data:
|
||||
output['controls'] = [item.to_sub_dict() for item in self.controls]
|
||||
return output
|
||||
return output
|
||||
|
||||
class Wastewater(BasicSubmission):
|
||||
"""
|
||||
@@ -278,6 +311,23 @@ class Wastewater(BasicSubmission):
|
||||
output['Technician'] = f"Enr: {self.technician}, Ext: {self.ext_technician}, PCR: {self.pcr_technician}"
|
||||
return output
|
||||
|
||||
@classmethod
|
||||
def parse_info(cls, input_dict:dict, xl:pd.ExcelFile|None=None) -> dict:
|
||||
"""
|
||||
Update submission dictionary with type specific information. Extends parent
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
input_dict = super().parse_info(input_dict)
|
||||
if xl != None:
|
||||
input_dict['csv'] = xl.parse("Copy to import file")
|
||||
return input_dict
|
||||
|
||||
|
||||
class WastewaterArtic(BasicSubmission):
|
||||
"""
|
||||
derivative submission type for artic wastewater
|
||||
@@ -303,6 +353,25 @@ class WastewaterArtic(BasicSubmission):
|
||||
except Exception as e:
|
||||
logger.error(f"Calculation error: {e}")
|
||||
|
||||
@classmethod
|
||||
def parse_samples(cls, input_dict: dict) -> dict:
|
||||
"""
|
||||
Update sample dictionary with type specific information. Extends parent.
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
input_dict = super().parse_samples(input_dict)
|
||||
input_dict['sample_type'] = "Wastewater Sample"
|
||||
# Because generate_sample_object needs the submitter_id and the artic has the "({origin well})"
|
||||
# at the end, this has to be done here. No moving to sqlalchemy object :(
|
||||
input_dict['submitter_id'] = re.sub(r"\s\(.+\)$", "", str(input_dict['submitter_id'])).strip()
|
||||
return input_dict
|
||||
|
||||
|
||||
class BasicSample(Base):
|
||||
"""
|
||||
Base of basic sample which polymorphs into BCSample and WWSample
|
||||
@@ -364,26 +433,31 @@ class BasicSample(Base):
|
||||
Returns:
|
||||
dict: 'well' and sample submitter_id as 'name'
|
||||
"""
|
||||
row_map = {1:"A", 2:"B", 3:"C", 4:"D", 5:"E", 6:"F", 7:"G", 8:"H"}
|
||||
self.assoc = [item for item in self.sample_submission_associations if item.submission.rsl_plate_num==submission_rsl][0]
|
||||
|
||||
assoc = [item for item in self.sample_submission_associations if item.submission.rsl_plate_num==submission_rsl][0]
|
||||
sample = {}
|
||||
try:
|
||||
sample['well'] = f"{row_map[self.assoc.row]}{self.assoc.column}"
|
||||
sample['well'] = f"{row_map[assoc.row]}{assoc.column}"
|
||||
except KeyError as e:
|
||||
logger.error(f"Unable to find row {self.assoc.row} in row_map.")
|
||||
logger.error(f"Unable to find row {assoc.row} in row_map.")
|
||||
sample['well'] = None
|
||||
sample['name'] = self.submitter_id
|
||||
return sample
|
||||
|
||||
def to_hitpick(self, submission_rsl:str|None=None) -> dict|None:
|
||||
"""
|
||||
Outputs a dictionary of locations
|
||||
Outputs a dictionary usable for html plate maps.
|
||||
|
||||
Returns:
|
||||
dict: dictionary of sample id, row and column in elution plate
|
||||
"""
|
||||
# Since there is no PCR, negliable result is necessary.
|
||||
return dict(name=self.submitter_id, positive=False)
|
||||
assoc = [item for item in self.sample_submission_associations if item.submission.rsl_plate_num==submission_rsl][0]
|
||||
tooltip_text = f"""
|
||||
Sample name: {self.submitter_id}<br>
|
||||
Well: {row_map[assoc.row]}{assoc.column}
|
||||
"""
|
||||
return dict(name=self.submitter_id, positive=False, tooltip=tooltip_text)
|
||||
|
||||
class WastewaterSample(BasicSample):
|
||||
"""
|
||||
@@ -445,42 +519,24 @@ class WastewaterSample(BasicSample):
|
||||
value = self.submitter_id
|
||||
super().set_attribute(name, value)
|
||||
|
||||
|
||||
def to_sub_dict(self, submission_rsl:str) -> dict:
|
||||
"""
|
||||
Gui friendly dictionary. Extends parent method.
|
||||
This version will include PCR status.
|
||||
|
||||
Args:
|
||||
submission_rsl (str): RSL plate number (passed down from the submission.to_dict() functino)
|
||||
|
||||
Returns:
|
||||
dict: Alphanumeric well id and sample name
|
||||
"""
|
||||
# Get the relevant submission association for this sample
|
||||
sample = super().to_sub_dict(submission_rsl=submission_rsl)
|
||||
# check if PCR data exists.
|
||||
try:
|
||||
check = self.assoc.ct_n1 != None and self.assoc.ct_n2 != None
|
||||
except AttributeError as e:
|
||||
check = False
|
||||
if check:
|
||||
sample['name'] = f"{self.submitter_id}\n\t- ct N1: {'{:.2f}'.format(self.assoc.ct_n1)} ({self.assoc.n1_status})\n\t- ct N2: {'{:.2f}'.format(self.assoc.ct_n2)} ({self.assoc.n2_status})"
|
||||
return sample
|
||||
|
||||
def to_hitpick(self, submission_rsl:str) -> dict|None:
|
||||
"""
|
||||
Outputs a dictionary of locations if sample is positive
|
||||
Outputs a dictionary usable for html plate maps. Extends parent method.
|
||||
|
||||
Returns:
|
||||
dict: dictionary of sample id, row and column in elution plate
|
||||
"""
|
||||
sample = super().to_hitpick(submission_rsl=submission_rsl)
|
||||
assoc = [item for item in self.sample_submission_associations if item.submission.rsl_plate_num==submission_rsl][0]
|
||||
# if either n1 or n2 is positive, include this sample
|
||||
try:
|
||||
sample['positive'] = any(["positive" in item for item in [self.assoc.n1_status, self.assoc.n2_status]])
|
||||
sample['positive'] = any(["positive" in item for item in [assoc.n1_status, assoc.n2_status]])
|
||||
except (TypeError, AttributeError) as e:
|
||||
logger.error(f"Couldn't check positives for {self.rsl_number}. Looks like there isn't PCR data.")
|
||||
try:
|
||||
sample['tooltip'] += f"<br>- ct N1: {'{:.2f}'.format(assoc.ct_n1)} ({assoc.n1_status})<br>- ct N2: {'{:.2f}'.format(assoc.ct_n2)} ({assoc.n2_status})"
|
||||
except (TypeError, AttributeError) as e:
|
||||
logger.error(f"Couldn't set tooltip for {self.rsl_number}. Looks like there isn't PCR data.")
|
||||
return sample
|
||||
|
||||
class BacterialCultureSample(BasicSample):
|
||||
|
||||
@@ -6,7 +6,7 @@ import pprint
|
||||
from typing import List
|
||||
import pandas as pd
|
||||
from pathlib import Path
|
||||
from backend.db import models, lookup_kit_types, lookup_submission_type, lookup_samples
|
||||
from backend.db import models, lookup_kit_types, lookup_submission_type, lookup_samples, get_polymorphic_subclass
|
||||
from backend.pydant import PydSubmission, PydReagent
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
@@ -91,12 +91,11 @@ class SheetParser(object):
|
||||
Pulls basic information from the excel sheet
|
||||
"""
|
||||
info = InfoParser(ctx=self.ctx, xl=self.xl, submission_type=self.sub['submission_type']['value']).parse_info()
|
||||
parser_query = f"parse_{self.sub['submission_type']['value'].replace(' ', '_').lower()}"
|
||||
try:
|
||||
custom_parser = getattr(self, parser_query)
|
||||
info = custom_parser(info)
|
||||
except AttributeError:
|
||||
logger.error(f"Couldn't find submission parser: {parser_query}")
|
||||
# parser_query = f"parse_{self.sub['submission_type']['value'].replace(' ', '_').lower()}"
|
||||
# custom_parser = getattr(self, parser_query)
|
||||
|
||||
# except AttributeError:
|
||||
# logger.error(f"Couldn't find submission parser: {parser_query}")
|
||||
for k,v in info.items():
|
||||
match k:
|
||||
case "sample":
|
||||
@@ -120,41 +119,41 @@ class SheetParser(object):
|
||||
"""
|
||||
self.sample_result, self.sub['samples'] = SampleParser(ctx=self.ctx, xl=self.xl, submission_type=self.sub['submission_type']['value']).parse_samples()
|
||||
|
||||
def parse_bacterial_culture(self, input_dict) -> dict:
|
||||
"""
|
||||
Update submission dictionary with type specific information
|
||||
# def parse_bacterial_culture(self, input_dict) -> dict:
|
||||
# """
|
||||
# Update submission dictionary with type specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
# Args:
|
||||
# input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
return input_dict
|
||||
# Returns:
|
||||
# dict: Updated sample dictionary
|
||||
# """
|
||||
# return input_dict
|
||||
|
||||
def parse_wastewater(self, input_dict) -> dict:
|
||||
"""
|
||||
Update submission dictionary with type specific information
|
||||
# def parse_wastewater(self, input_dict) -> dict:
|
||||
# """
|
||||
# Update submission dictionary with type specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
# Args:
|
||||
# input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
return input_dict
|
||||
# Returns:
|
||||
# dict: Updated sample dictionary
|
||||
# """
|
||||
# return input_dict
|
||||
|
||||
def parse_wastewater_artic(self, input_dict:dict) -> dict:
|
||||
"""
|
||||
Update submission dictionary with type specific information
|
||||
# def parse_wastewater_artic(self, input_dict:dict) -> dict:
|
||||
# """
|
||||
# Update submission dictionary with type specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
# Args:
|
||||
# input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
return input_dict
|
||||
# Returns:
|
||||
# dict: Updated sample dictionary
|
||||
# """
|
||||
# return input_dict
|
||||
|
||||
|
||||
def import_kit_validation_check(self):
|
||||
@@ -206,6 +205,7 @@ class InfoParser(object):
|
||||
self.map = self.fetch_submission_info_map(submission_type=submission_type)
|
||||
self.xl = xl
|
||||
logger.debug(f"Info map for InfoParser: {pprint.pformat(self.map)}")
|
||||
|
||||
|
||||
def fetch_submission_info_map(self, submission_type:str|dict) -> dict:
|
||||
"""
|
||||
@@ -223,6 +223,8 @@ class InfoParser(object):
|
||||
# submission_type = lookup_submissiontype_by_name(ctx=self.ctx, type_name=submission_type['value'])
|
||||
submission_type = lookup_submission_type(ctx=self.ctx, name=submission_type['value'])
|
||||
info_map = submission_type.info_map
|
||||
# Get the parse_info method from the submission type specified
|
||||
self.custom_parser = get_polymorphic_subclass(models.BasicSubmission, submission_type.name).parse_info
|
||||
return info_map
|
||||
|
||||
def parse_info(self) -> dict:
|
||||
@@ -263,7 +265,13 @@ class InfoParser(object):
|
||||
continue
|
||||
else:
|
||||
dicto[item] = dict(value=convert_nans_to_nones(value), parsed=False)
|
||||
return dicto
|
||||
try:
|
||||
check = dicto['submission_category'] not in ["", None]
|
||||
except KeyError:
|
||||
check = False
|
||||
return self.custom_parser(input_dict=dicto, xl=self.xl)
|
||||
|
||||
|
||||
|
||||
class ReagentParser(object):
|
||||
|
||||
@@ -351,6 +359,7 @@ class SampleParser(object):
|
||||
submission_type = lookup_submission_type(ctx=self.ctx, name=submission_type)
|
||||
logger.debug(f"info_map: {pprint.pformat(submission_type.info_map)}")
|
||||
sample_info_map = submission_type.info_map['samples']
|
||||
self.custom_parser = get_polymorphic_subclass(models.BasicSubmission, submission_type.name).parse_samples
|
||||
return sample_info_map
|
||||
|
||||
def construct_plate_map(self, plate_map_location:dict) -> pd.DataFrame:
|
||||
@@ -473,12 +482,12 @@ class SampleParser(object):
|
||||
except KeyError:
|
||||
translated_dict[k] = convert_nans_to_nones(v)
|
||||
translated_dict['sample_type'] = f"{self.submission_type} Sample"
|
||||
parser_query = f"parse_{translated_dict['sample_type'].replace(' ', '_').lower()}"
|
||||
try:
|
||||
custom_parser = getattr(self, parser_query)
|
||||
translated_dict = custom_parser(translated_dict)
|
||||
except AttributeError:
|
||||
logger.error(f"Couldn't get custom parser: {parser_query}")
|
||||
# parser_query = f"parse_{translated_dict['sample_type'].replace(' ', '_').lower()}"
|
||||
# try:
|
||||
# custom_parser = getattr(self, parser_query)
|
||||
translated_dict = self.custom_parser(translated_dict)
|
||||
# except AttributeError:
|
||||
# logger.error(f"Couldn't get custom parser: {parser_query}")
|
||||
if generate:
|
||||
new_samples.append(self.generate_sample_object(translated_dict))
|
||||
else:
|
||||
@@ -502,7 +511,7 @@ class SampleParser(object):
|
||||
logger.error(f"Could not find the model {query}. Using generic.")
|
||||
database_obj = models.BasicSample
|
||||
logger.debug(f"Searching database for {input_dict['submitter_id']}...")
|
||||
instance = lookup_samples(ctx=self.ctx, submitter_id=input_dict['submitter_id'])
|
||||
instance = lookup_samples(ctx=self.ctx, submitter_id=str(input_dict['submitter_id']))
|
||||
if instance == None:
|
||||
logger.debug(f"Couldn't find sample {input_dict['submitter_id']}. Creating new sample.")
|
||||
instance = database_obj()
|
||||
@@ -516,63 +525,63 @@ class SampleParser(object):
|
||||
return dict(sample=instance, row=input_dict['row'], column=input_dict['column'])
|
||||
|
||||
|
||||
def parse_bacterial_culture_sample(self, input_dict:dict) -> dict:
|
||||
"""
|
||||
Update sample dictionary with bacterial culture specific information
|
||||
# def parse_bacterial_culture_sample(self, input_dict:dict) -> dict:
|
||||
# """
|
||||
# Update sample dictionary with bacterial culture specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
# Args:
|
||||
# input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
logger.debug("Called bacterial culture sample parser")
|
||||
return input_dict
|
||||
# Returns:
|
||||
# dict: Updated sample dictionary
|
||||
# """
|
||||
# logger.debug("Called bacterial culture sample parser")
|
||||
# return input_dict
|
||||
|
||||
def parse_wastewater_sample(self, input_dict:dict) -> dict:
|
||||
"""
|
||||
Update sample dictionary with wastewater specific information
|
||||
# def parse_wastewater_sample(self, input_dict:dict) -> dict:
|
||||
# """
|
||||
# Update sample dictionary with wastewater specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
# Args:
|
||||
# input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
logger.debug(f"Called wastewater sample parser")
|
||||
return input_dict
|
||||
# Returns:
|
||||
# dict: Updated sample dictionary
|
||||
# """
|
||||
# logger.debug(f"Called wastewater sample parser")
|
||||
# return input_dict
|
||||
|
||||
def parse_wastewater_artic_sample(self, input_dict:dict) -> dict:
|
||||
"""
|
||||
Update sample dictionary with artic specific information
|
||||
# def parse_wastewater_artic_sample(self, input_dict:dict) -> dict:
|
||||
# """
|
||||
# Update sample dictionary with artic specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
# Args:
|
||||
# input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
logger.debug("Called wastewater artic sample parser")
|
||||
input_dict['sample_type'] = "Wastewater Sample"
|
||||
# Because generate_sample_object needs the submitter_id and the artic has the "({origin well})"
|
||||
# at the end, this has to be done here. No moving to sqlalchemy object :(
|
||||
input_dict['submitter_id'] = re.sub(r"\s\(.+\)$", "", str(input_dict['submitter_id'])).strip()
|
||||
return input_dict
|
||||
# Returns:
|
||||
# dict: Updated sample dictionary
|
||||
# """
|
||||
# logger.debug("Called wastewater artic sample parser")
|
||||
# input_dict['sample_type'] = "Wastewater Sample"
|
||||
# # Because generate_sample_object needs the submitter_id and the artic has the "({origin well})"
|
||||
# # at the end, this has to be done here. No moving to sqlalchemy object :(
|
||||
# input_dict['submitter_id'] = re.sub(r"\s\(.+\)$", "", str(input_dict['submitter_id'])).strip()
|
||||
# return input_dict
|
||||
|
||||
def parse_first_strand_sample(self, input_dict:dict) -> dict:
|
||||
"""
|
||||
Update sample dictionary with first strand specific information
|
||||
# def parse_first_strand_sample(self, input_dict:dict) -> dict:
|
||||
# """
|
||||
# Update sample dictionary with first strand specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
# Args:
|
||||
# input_dict (dict): Input sample dictionary
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
logger.debug("Called first strand sample parser")
|
||||
input_dict['well'] = re.search(r"\s\((.*)\)$", input_dict['submitter_id']).groups()[0]
|
||||
input_dict['submitter_id'] = re.sub(r"\s\(.*\)$", "", str(input_dict['submitter_id'])).strip()
|
||||
return input_dict
|
||||
# Returns:
|
||||
# dict: Updated sample dictionary
|
||||
# """
|
||||
# logger.debug("Called first strand sample parser")
|
||||
# input_dict['well'] = re.search(r"\s\((.*)\)$", input_dict['submitter_id']).groups()[0]
|
||||
# input_dict['submitter_id'] = re.sub(r"\s\(.*\)$", "", str(input_dict['submitter_id'])).strip()
|
||||
# return input_dict
|
||||
|
||||
def grab_plates(self) -> List[str]:
|
||||
"""
|
||||
|
||||
@@ -12,8 +12,6 @@ logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
env = jinja_template_loading()
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
def make_report_xlsx(records:list[dict]) -> Tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
create the dataframe for a report
|
||||
|
||||
@@ -86,6 +86,7 @@ class PydSubmission(BaseModel, extra=Extra.allow):
|
||||
sample_count: dict|None
|
||||
extraction_kit: dict|None
|
||||
technician: dict|None
|
||||
submission_category: dict|None = Field(default=dict(value=None, parsed=False), validate_default=True)
|
||||
reagents: List[dict] = []
|
||||
samples: List[Any]
|
||||
|
||||
@@ -205,3 +206,11 @@ class PydSubmission(BaseModel, extra=Extra.allow):
|
||||
return dict(value=value, parsed=True)
|
||||
else:
|
||||
return dict(value=RSLNamer(ctx=values.data['ctx'], instr=values.data['filepath'].__str__()).submission_type.title(), parsed=False)
|
||||
|
||||
@field_validator("submission_category")
|
||||
@classmethod
|
||||
def rescue_category(cls, value, values):
|
||||
if value['value'] not in ["Research", "Diagnostic", "Surveillance"]:
|
||||
value['value'] = values.data['submission_type']['value']
|
||||
return value
|
||||
|
||||
|
||||
Reference in New Issue
Block a user