Post code clean-up, before attempt to upgrade controls to FigureWidgets
This commit is contained in:
@@ -96,7 +96,7 @@ class BaseClass(Base):
|
||||
output = {}
|
||||
for k, v in dicto.items():
|
||||
if len(args) > 0 and k not in args:
|
||||
# logger.debug(f"Don't want {k}")
|
||||
# logger.debug(f"{k} not selected as being of interest.")
|
||||
continue
|
||||
else:
|
||||
output[k] = v
|
||||
|
||||
@@ -3,14 +3,12 @@ All control related models.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from pprint import pformat
|
||||
|
||||
from PyQt6.QtWidgets import QWidget, QCheckBox, QLabel
|
||||
from pandas import DataFrame
|
||||
from sqlalchemy import Column, String, TIMESTAMP, JSON, INTEGER, ForeignKey, case, FLOAT
|
||||
from sqlalchemy.orm import relationship, Query, validates
|
||||
import logging, re
|
||||
from operator import itemgetter
|
||||
|
||||
from . import BaseClass
|
||||
from tools import setup_lookup, report_result, Result, Report, Settings, get_unique_values_in_df_column, super_splitter
|
||||
from datetime import date, datetime, timedelta
|
||||
@@ -73,7 +71,6 @@ class ControlType(BaseClass):
|
||||
if not self.instances:
|
||||
return
|
||||
jsoner = getattr(self.instances[0], mode)
|
||||
# logger.debug(f"JSON retrieved: {jsoner.keys()}")
|
||||
try:
|
||||
# NOTE: Pick genera (all should have same subtypes)
|
||||
genera = list(jsoner.keys())[0]
|
||||
@@ -81,10 +78,15 @@ class ControlType(BaseClass):
|
||||
return []
|
||||
# NOTE: remove items that don't have relevant data
|
||||
subtypes = [item for item in jsoner[genera] if "_hashes" not in item and "_ratio" not in item]
|
||||
# logger.debug(f"subtypes out: {pformat(subtypes)}")
|
||||
return subtypes
|
||||
|
||||
def get_instance_class(self):
|
||||
def get_instance_class(self) -> Control:
|
||||
"""
|
||||
Retrieves the Control class associated with this controltype
|
||||
|
||||
Returns:
|
||||
Control: Associated Control class
|
||||
"""
|
||||
return Control.find_polymorphic_subclass(polymorphic_identity=self.name)
|
||||
|
||||
@classmethod
|
||||
@@ -93,7 +95,7 @@ class ControlType(BaseClass):
|
||||
Gets list of Control types if they have targets
|
||||
|
||||
Returns:
|
||||
List[ControlType]: Control types that have targets
|
||||
Generator[str, None, None]: Control types that have targets
|
||||
"""
|
||||
ct = cls.query(name=control_type).targets
|
||||
return (item for item in ct.keys() if ct[item])
|
||||
@@ -106,7 +108,6 @@ class ControlType(BaseClass):
|
||||
Returns:
|
||||
Pattern: Constructed pattern
|
||||
"""
|
||||
# strings = list(set([item.name.split("-")[0] for item in cls.get_positive_control_types()]))
|
||||
strings = list(set([super_splitter(item, "-", 0) for item in cls.get_positive_control_types(control_type)]))
|
||||
return re.compile(rf"(^{'|^'.join(strings)})-.*", flags=re.IGNORECASE)
|
||||
|
||||
@@ -144,7 +145,7 @@ class Control(BaseClass):
|
||||
|
||||
@classmethod
|
||||
def find_polymorphic_subclass(cls, polymorphic_identity: str | ControlType | None = None,
|
||||
attrs: dict | None = None):
|
||||
attrs: dict | None = None) -> Control:
|
||||
"""
|
||||
Find subclass based on polymorphic identity or relevant attributes.
|
||||
|
||||
@@ -153,7 +154,7 @@ class Control(BaseClass):
|
||||
attrs (str | SubmissionType | None, optional): Attributes of the relevant class. Defaults to None.
|
||||
|
||||
Returns:
|
||||
_type_: Subclass of interest.
|
||||
Control: Subclass of interest.
|
||||
"""
|
||||
if isinstance(polymorphic_identity, dict):
|
||||
# logger.debug(f"Controlling for dict value")
|
||||
@@ -184,7 +185,8 @@ class Control(BaseClass):
|
||||
@classmethod
|
||||
def make_parent_buttons(cls, parent: QWidget) -> None:
|
||||
"""
|
||||
|
||||
Super that will make buttons in a CustomFigure. Made to be overrided.
|
||||
|
||||
Args:
|
||||
parent (QWidget): chart holding widget to add buttons to.
|
||||
|
||||
@@ -196,13 +198,11 @@ class Control(BaseClass):
|
||||
@classmethod
|
||||
def make_chart(cls, parent, chart_settings: dict, ctx):
|
||||
"""
|
||||
Dummy operation to be overridden by child classes.
|
||||
|
||||
Args:
|
||||
chart_settings (dict): settings passed down from chart widget
|
||||
ctx (Settings): settings passed down from gui
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
return None
|
||||
|
||||
@@ -220,7 +220,13 @@ class PCRControl(Control):
|
||||
polymorphic_load="inline",
|
||||
inherit_condition=(id == Control.id))
|
||||
|
||||
def to_sub_dict(self):
|
||||
def to_sub_dict(self) -> dict:
|
||||
"""
|
||||
Creates dictionary of fields for this object
|
||||
|
||||
Returns:
|
||||
dict: Output dict of name, ct, subtype, target, reagent_lot and submitted_date
|
||||
"""
|
||||
return dict(name=self.name, ct=self.ct, subtype=self.subtype, target=self.target, reagent_lot=self.reagent_lot,
|
||||
submitted_date=self.submitted_date.date())
|
||||
|
||||
@@ -237,15 +243,16 @@ class PCRControl(Control):
|
||||
Lookup control objects in the database based on a number of parameters.
|
||||
|
||||
Args:
|
||||
sub_type (models.ControlType | str | None, optional): Control archetype. Defaults to None.
|
||||
sub_type (str | None, optional): Control archetype. Defaults to None.
|
||||
start_date (date | str | int | None, optional): Beginning date to search by. Defaults to 2023-01-01 if end_date not None.
|
||||
end_date (date | str | int | None, optional): End date to search by. Defaults to today if start_date not None.
|
||||
control_name (str | None, optional): Name of control. Defaults to None.
|
||||
limit (int, optional): Maximum number of results to return (0 = all). Defaults to 0.
|
||||
|
||||
Returns:
|
||||
models.Control|List[models.Control]: Control object of interest.
|
||||
PCRControl|List[PCRControl]: Control object of interest.
|
||||
"""
|
||||
from backend.db import SubmissionType
|
||||
query: Query = cls.__database_session__.query(cls)
|
||||
# NOTE: by date range
|
||||
if start_date is not None and end_date is None:
|
||||
@@ -282,7 +289,12 @@ class PCRControl(Control):
|
||||
match sub_type:
|
||||
case str():
|
||||
from backend import BasicSubmission, SubmissionType
|
||||
# logger.debug(f"Lookup controls by SubmissionType str: {sub_type}")
|
||||
query = query.join(BasicSubmission).join(SubmissionType).filter(SubmissionType.name == sub_type)
|
||||
case SubmissionType():
|
||||
from backend import BasicSubmission
|
||||
# logger.debug(f"Lookup controls by SubmissionType: {sub_type}")
|
||||
query = query.join(BasicSubmission).filter(BasicSubmission.submission_type_name==sub_type.name)
|
||||
case _:
|
||||
pass
|
||||
match control_name:
|
||||
@@ -295,7 +307,18 @@ class PCRControl(Control):
|
||||
return cls.execute_query(query=query, limit=limit)
|
||||
|
||||
@classmethod
|
||||
def make_chart(cls, parent, chart_settings: dict, ctx):
|
||||
def make_chart(cls, parent, chart_settings: dict, ctx: Settings) -> Tuple[Report, "PCRFigure"]:
|
||||
"""
|
||||
Creates a PCRFigure. Overrides parent
|
||||
|
||||
Args:
|
||||
parent (__type__): Widget to contain the chart.
|
||||
chart_settings (dict): settings passed down from chart widget
|
||||
ctx (Settings): settings passed down from gui. Not used here.
|
||||
|
||||
Returns:
|
||||
Tuple[Report, "PCRFigure"]: Report of status and resulting figure.
|
||||
"""
|
||||
from frontend.visualizations.pcr_charts import PCRFigure
|
||||
parent.mode_typer.clear()
|
||||
parent.mode_typer.setEnabled(False)
|
||||
@@ -308,7 +331,7 @@ class PCRControl(Control):
|
||||
df = df[df.ct > 0.0]
|
||||
except AttributeError:
|
||||
df = df
|
||||
fig = PCRFigure(df=df, modes=None)
|
||||
fig = PCRFigure(df=df, modes=[])
|
||||
return report, fig
|
||||
|
||||
|
||||
@@ -324,16 +347,26 @@ class IridaControl(Control):
|
||||
sample = relationship("BacterialCultureSample", back_populates="control") #: This control's submission sample
|
||||
sample_id = Column(INTEGER,
|
||||
ForeignKey("_basicsample.id", ondelete="SET NULL", name="cont_BCS_id")) #: sample id key
|
||||
# submission_id = Column(INTEGER, ForeignKey("_basicsubmission.id")) #: parent submission id
|
||||
# submission = relationship("BacterialCulture", back_populates="controls",
|
||||
# foreign_keys=[submission_id]) #: parent submission
|
||||
|
||||
|
||||
__mapper_args__ = dict(polymorphic_identity="Irida Control",
|
||||
polymorphic_load="inline",
|
||||
inherit_condition=(id == Control.id))
|
||||
|
||||
@validates("sub_type")
|
||||
def enforce_subtype_literals(self, key: str, value: str):
|
||||
def enforce_subtype_literals(self, key: str, value: str) -> str:
|
||||
"""
|
||||
Validates sub_type field with acceptable values
|
||||
|
||||
Args:
|
||||
key (str): Field name
|
||||
value (str): Field Value
|
||||
|
||||
Raises:
|
||||
KeyError: Raised if value is not in the acceptable list.
|
||||
|
||||
Returns:
|
||||
str: Validated string.
|
||||
"""
|
||||
acceptables = ['ATCC49226', 'ATCC49619', 'EN-NOS', "EN-SSTI", "MCS-NOS", "MCS-SSTI", "SN-NOS", "SN-SSTI"]
|
||||
if value.upper() not in acceptables:
|
||||
raise KeyError(f"Sub-type must be in {acceptables}")
|
||||
@@ -346,7 +379,6 @@ class IridaControl(Control):
|
||||
Returns:
|
||||
dict: output dictionary containing: Name, Type, Targets, Top Kraken results
|
||||
"""
|
||||
# logger.debug("loading json string into dict")
|
||||
try:
|
||||
kraken = self.kraken
|
||||
except TypeError:
|
||||
@@ -405,8 +437,6 @@ class IridaControl(Control):
|
||||
k.strip("*") not in self.controltype.targets[control_sub_type])
|
||||
on_tar['Off-target'] = {f"{mode}_ratio": off_tar}
|
||||
data = on_tar
|
||||
# logger.debug(pformat(data))
|
||||
# logger.debug(f"Length of data: {len(data)}")
|
||||
# logger.debug("dict keys are genera of bacteria, e.g. 'Streptococcus'")
|
||||
for genus in data:
|
||||
_dict = dict(
|
||||
@@ -416,7 +446,6 @@ class IridaControl(Control):
|
||||
target='Target' if genus.strip("*") in self.controltype.targets[control_sub_type] else "Off-target"
|
||||
)
|
||||
# logger.debug("get Target or Off-target of genus")
|
||||
# logger.debug("set 'contains_hashes', etc for genus")
|
||||
for key in data[genus]:
|
||||
_dict[key] = data[genus][key]
|
||||
yield _dict
|
||||
@@ -462,12 +491,7 @@ class IridaControl(Control):
|
||||
query: Query = cls.__database_session__.query(cls)
|
||||
# NOTE: by control type
|
||||
match sub_type:
|
||||
# case ControlType():
|
||||
# # logger.debug(f"Looking up control by control type: {sub_type}")
|
||||
# query = query.filter(cls.controltype == sub_type)
|
||||
case str():
|
||||
# logger.debug(f"Looking up control by control type: {sub_type}")
|
||||
# query = query.join(ControlType).filter(ControlType.name == sub_type)
|
||||
query = query.filter(cls.sub_type == sub_type)
|
||||
case _:
|
||||
pass
|
||||
@@ -519,8 +543,6 @@ class IridaControl(Control):
|
||||
Args:
|
||||
parent (QWidget): chart holding widget to add buttons to.
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
super().make_parent_buttons(parent=parent)
|
||||
rows = parent.layout.rowCount()
|
||||
@@ -536,6 +558,17 @@ class IridaControl(Control):
|
||||
@classmethod
|
||||
@report_result
|
||||
def make_chart(cls, chart_settings: dict, parent, ctx) -> Tuple[Report, "IridaFigure" | None]:
|
||||
"""
|
||||
Creates a IridaFigure. Overrides parent
|
||||
|
||||
Args:
|
||||
parent (__type__): Widget to contain the chart.
|
||||
chart_settings (dict): settings passed down from chart widget
|
||||
ctx (Settings): settings passed down from gui.
|
||||
|
||||
Returns:
|
||||
Tuple[Report, "IridaFigure"]: Report of status and resulting figure.
|
||||
"""
|
||||
from frontend.visualizations import IridaFigure
|
||||
try:
|
||||
checker = parent.findChild(QCheckBox, name="irida_check")
|
||||
@@ -574,8 +607,6 @@ class IridaControl(Control):
|
||||
# NOTE: send dataframe to chart maker
|
||||
df, modes = cls.prep_df(ctx=ctx, df=df)
|
||||
# logger.debug(f"prepped df: \n {df}")
|
||||
# assert modes
|
||||
# logger.debug(f"modes: {modes}")
|
||||
fig = IridaFigure(df=df, ytitle=title, modes=modes, parent=parent,
|
||||
months=chart_settings['months'])
|
||||
return report, fig
|
||||
@@ -604,7 +635,6 @@ class IridaControl(Control):
|
||||
else:
|
||||
safe.append(column)
|
||||
if "percent" in column:
|
||||
# count_col = [item for item in df.columns if "count" in item][0]
|
||||
try:
|
||||
count_col = next(item for item in df.columns if "count" in item)
|
||||
except StopIteration:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
All kit and reagent related models
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import json
|
||||
from pprint import pformat
|
||||
@@ -152,7 +151,7 @@ class KitType(BaseClass):
|
||||
submission_type (str | Submissiontype | None, optional): Submission type to narrow results. Defaults to None.
|
||||
|
||||
Returns:
|
||||
List[ReagentRole]: List of reagents linked to this kit.
|
||||
Generator[ReagentRole, None, None]: List of reagents linked to this kit.
|
||||
"""
|
||||
match submission_type:
|
||||
case SubmissionType():
|
||||
@@ -173,7 +172,7 @@ class KitType(BaseClass):
|
||||
return (item.reagent_role for item in relevant_associations)
|
||||
|
||||
# TODO: Move to BasicSubmission?
|
||||
def construct_xl_map_for_use(self, submission_type: str | SubmissionType) -> Generator[(str, str)]:
|
||||
def construct_xl_map_for_use(self, submission_type: str | SubmissionType) -> Generator[(str, str), None, None]:
|
||||
"""
|
||||
Creates map of locations in Excel workbook for a SubmissionType
|
||||
|
||||
@@ -181,9 +180,8 @@ class KitType(BaseClass):
|
||||
submission_type (str | SubmissionType): Submissiontype.name
|
||||
|
||||
Returns:
|
||||
dict: Dictionary containing information locations.
|
||||
Generator[(str, str), None, None]: Tuple containing information locations.
|
||||
"""
|
||||
# info_map = {}
|
||||
# NOTE: Account for submission_type variable type.
|
||||
match submission_type:
|
||||
case str():
|
||||
@@ -221,7 +219,7 @@ class KitType(BaseClass):
|
||||
limit (int, optional): Maximum number of results to return (0 = all). Defaults to 0.
|
||||
|
||||
Returns:
|
||||
models.KitType|List[models.KitType]: KitType(s) of interest.
|
||||
KitType|List[KitType]: KitType(s) of interest.
|
||||
"""
|
||||
query: Query = cls.__database_session__.query(cls)
|
||||
match used_for:
|
||||
@@ -257,7 +255,16 @@ class KitType(BaseClass):
|
||||
def save(self):
|
||||
super().save()
|
||||
|
||||
def to_export_dict(self, submission_type: SubmissionType):
|
||||
def to_export_dict(self, submission_type: SubmissionType) -> dict:
|
||||
"""
|
||||
Creates dictionary for exporting to yml used in new SubmissionType Construction
|
||||
|
||||
Args:
|
||||
submission_type (SubmissionType): SubmissionType of interest.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary containing relevant info for SubmissionType construction
|
||||
"""
|
||||
base_dict = dict(name=self.name)
|
||||
base_dict['reagent roles'] = []
|
||||
base_dict['equipment roles'] = []
|
||||
@@ -382,7 +389,13 @@ class ReagentRole(BaseClass):
|
||||
from backend.validators.pydant import PydReagent
|
||||
return PydReagent(lot=None, role=self.name, name=self.name, expiry=date.today())
|
||||
|
||||
def to_export_dict(self):
|
||||
def to_export_dict(self) -> dict:
|
||||
"""
|
||||
Creates dictionary for exporting to yml used in new SubmissionType Construction
|
||||
|
||||
Returns:
|
||||
dict: Dictionary containing relevant info for SubmissionType construction
|
||||
"""
|
||||
return dict(role=self.name, extension_of_life=self.eol_ext.days)
|
||||
|
||||
@check_authorization
|
||||
@@ -664,7 +677,7 @@ class SubmissionType(BaseClass):
|
||||
"SubmissionTypeTipRoleAssociation",
|
||||
back_populates="submission_type",
|
||||
cascade="all, delete-orphan"
|
||||
)
|
||||
) #: Association of tiproles
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
@@ -738,12 +751,12 @@ class SubmissionType(BaseClass):
|
||||
"""
|
||||
return self.sample_map
|
||||
|
||||
def construct_equipment_map(self) -> Generator[str, dict]:
|
||||
def construct_equipment_map(self) -> Generator[(str, dict), None, None]:
|
||||
"""
|
||||
Constructs map of equipment to excel cells.
|
||||
|
||||
Returns:
|
||||
dict: Map equipment locations in excel sheet
|
||||
Generator[(str, dict), None, None]: Map equipment locations in excel sheet
|
||||
"""
|
||||
# logger.debug("Iterating through equipment roles")
|
||||
for item in self.submissiontype_equipmentrole_associations:
|
||||
@@ -752,12 +765,12 @@ class SubmissionType(BaseClass):
|
||||
emap = {}
|
||||
yield item.equipment_role.name, emap
|
||||
|
||||
def construct_tips_map(self) -> Generator[str, dict]:
|
||||
def construct_tips_map(self) -> Generator[(str, dict), None, None]:
|
||||
"""
|
||||
Constructs map of tips to excel cells.
|
||||
|
||||
Returns:
|
||||
dict: Tip locations in the excel sheet.
|
||||
Generator[(str, dict), None, None]: Tip locations in the excel sheet.
|
||||
"""
|
||||
for item in self.submissiontype_tiprole_associations:
|
||||
tmap = item.uses
|
||||
@@ -770,7 +783,7 @@ class SubmissionType(BaseClass):
|
||||
Returns PydEquipmentRole of all equipment associated with this SubmissionType
|
||||
|
||||
Returns:
|
||||
List[PydEquipmentRole]: List of equipment roles
|
||||
Generator['PydEquipmentRole', None, None]: List of equipment roles
|
||||
"""
|
||||
return (item.to_pydantic(submission_type=self, extraction_kit=extraction_kit) for item in self.equipment)
|
||||
|
||||
@@ -846,6 +859,12 @@ class SubmissionType(BaseClass):
|
||||
return cls.execute_query(query=query, limit=limit)
|
||||
|
||||
def to_export_dict(self):
|
||||
"""
|
||||
Creates dictionary for exporting to yml used in new SubmissionType Construction
|
||||
|
||||
Returns:
|
||||
dict: Dictionary containing relevant info for SubmissionType construction
|
||||
"""
|
||||
base_dict = dict(name=self.name)
|
||||
base_dict['info'] = self.construct_info_map(mode='export')
|
||||
base_dict['defaults'] = self.defaults
|
||||
@@ -862,7 +881,20 @@ class SubmissionType(BaseClass):
|
||||
|
||||
@classmethod
|
||||
@check_authorization
|
||||
def import_from_json(cls, filepath: Path | str):
|
||||
def import_from_json(cls, filepath: Path | str) -> SubmissionType:
|
||||
"""
|
||||
Creates a new SubmissionType from a yml file
|
||||
|
||||
Args:
|
||||
filepath (Path | str): Input yml file.
|
||||
|
||||
Raises:
|
||||
Exception: Raised if filetype is not a yml or json
|
||||
|
||||
Returns:
|
||||
SubmissionType: Created SubmissionType
|
||||
"""
|
||||
full = True
|
||||
yaml.add_constructor("!regex", yaml_regex_creator)
|
||||
if isinstance(filepath, str):
|
||||
filepath = Path(filepath)
|
||||
@@ -874,70 +906,76 @@ class SubmissionType(BaseClass):
|
||||
else:
|
||||
raise Exception(f"Filetype {filepath.suffix} not supported.")
|
||||
logger.debug(pformat(import_dict))
|
||||
submission_type = cls.query(name=import_dict['name'])
|
||||
if submission_type:
|
||||
return submission_type
|
||||
submission_type = cls()
|
||||
submission_type.name = import_dict['name']
|
||||
submission_type.info_map = import_dict['info']
|
||||
submission_type.sample_map = import_dict['samples']
|
||||
submission_type.defaults = import_dict['defaults']
|
||||
for kit in import_dict['kits']:
|
||||
new_kit = KitType.query(name=kit['kit_type']['name'])
|
||||
if not new_kit:
|
||||
new_kit = KitType(name=kit['kit_type']['name'])
|
||||
for role in kit['kit_type']['reagent roles']:
|
||||
new_role = ReagentRole.query(name=role['role'])
|
||||
if new_role:
|
||||
check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
||||
if check.lower() == "n":
|
||||
new_role = None
|
||||
else:
|
||||
pass
|
||||
if not new_role:
|
||||
eol = datetime.timedelta(role['extension_of_life'])
|
||||
new_role = ReagentRole(name=role['role'], eol_ext=eol)
|
||||
uses = dict(expiry=role['expiry'], lot=role['lot'], name=role['name'], sheet=role['sheet'])
|
||||
ktrr_assoc = KitTypeReagentRoleAssociation(kit_type=new_kit, reagent_role=new_role, uses=uses)
|
||||
ktrr_assoc.submission_type = submission_type
|
||||
ktrr_assoc.required = role['required']
|
||||
ktst_assoc = SubmissionTypeKitTypeAssociation(
|
||||
kit_type=new_kit,
|
||||
submission_type=submission_type,
|
||||
mutable_cost_sample=kit['mutable_cost_sample'],
|
||||
mutable_cost_column=kit['mutable_cost_column'],
|
||||
constant_cost=kit['constant_cost']
|
||||
)
|
||||
for role in kit['kit_type']['equipment roles']:
|
||||
new_role = EquipmentRole.query(name=role['role'])
|
||||
if new_role:
|
||||
check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
||||
if check.lower() == "n":
|
||||
new_role = None
|
||||
else:
|
||||
pass
|
||||
if not new_role:
|
||||
new_role = EquipmentRole(name=role['role'])
|
||||
for equipment in Equipment.assign_equipment(equipment_role=new_role):
|
||||
new_role.instances.append(equipment)
|
||||
ster_assoc = SubmissionTypeEquipmentRoleAssociation(submission_type=submission_type,
|
||||
equipment_role=new_role)
|
||||
try:
|
||||
uses = dict(name=role['name'], process=role['process'], sheet=role['sheet'], static=role['static'])
|
||||
except KeyError:
|
||||
uses = None
|
||||
ster_assoc.uses = uses
|
||||
for process in role['processes']:
|
||||
new_process = Process.query(name=process)
|
||||
if not new_process:
|
||||
new_process = Process(name=process)
|
||||
new_process.submission_types.append(submission_type)
|
||||
new_process.kit_types.append(new_kit)
|
||||
new_process.equipment_roles.append(new_role)
|
||||
if 'orgs' in import_dict.keys():
|
||||
logger.info("Found Organizations to be imported.")
|
||||
Organization.import_from_yml(filepath=filepath)
|
||||
return submission_type
|
||||
try:
|
||||
submission_type = cls.query(name=import_dict['name'])
|
||||
except KeyError:
|
||||
logger.error(f"Submission type has no name")
|
||||
submission_type = None
|
||||
full = False
|
||||
if full:
|
||||
if submission_type:
|
||||
return submission_type
|
||||
submission_type = cls()
|
||||
submission_type.name = import_dict['name']
|
||||
submission_type.info_map = import_dict['info']
|
||||
submission_type.sample_map = import_dict['samples']
|
||||
submission_type.defaults = import_dict['defaults']
|
||||
for kit in import_dict['kits']:
|
||||
new_kit = KitType.query(name=kit['kit_type']['name'])
|
||||
if not new_kit:
|
||||
new_kit = KitType(name=kit['kit_type']['name'])
|
||||
for role in kit['kit_type']['reagent roles']:
|
||||
new_role = ReagentRole.query(name=role['role'])
|
||||
if new_role:
|
||||
check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
||||
if check.lower() == "n":
|
||||
new_role = None
|
||||
else:
|
||||
pass
|
||||
if not new_role:
|
||||
eol = datetime.timedelta(role['extension_of_life'])
|
||||
new_role = ReagentRole(name=role['role'], eol_ext=eol)
|
||||
uses = dict(expiry=role['expiry'], lot=role['lot'], name=role['name'], sheet=role['sheet'])
|
||||
ktrr_assoc = KitTypeReagentRoleAssociation(kit_type=new_kit, reagent_role=new_role, uses=uses)
|
||||
ktrr_assoc.submission_type = submission_type
|
||||
ktrr_assoc.required = role['required']
|
||||
ktst_assoc = SubmissionTypeKitTypeAssociation(
|
||||
kit_type=new_kit,
|
||||
submission_type=submission_type,
|
||||
mutable_cost_sample=kit['mutable_cost_sample'],
|
||||
mutable_cost_column=kit['mutable_cost_column'],
|
||||
constant_cost=kit['constant_cost']
|
||||
)
|
||||
for role in kit['kit_type']['equipment roles']:
|
||||
new_role = EquipmentRole.query(name=role['role'])
|
||||
if new_role:
|
||||
check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
||||
if check.lower() == "n":
|
||||
new_role = None
|
||||
else:
|
||||
pass
|
||||
if not new_role:
|
||||
new_role = EquipmentRole(name=role['role'])
|
||||
for equipment in Equipment.assign_equipment(equipment_role=new_role):
|
||||
new_role.instances.append(equipment)
|
||||
ster_assoc = SubmissionTypeEquipmentRoleAssociation(submission_type=submission_type,
|
||||
equipment_role=new_role)
|
||||
try:
|
||||
uses = dict(name=role['name'], process=role['process'], sheet=role['sheet'], static=role['static'])
|
||||
except KeyError:
|
||||
uses = None
|
||||
ster_assoc.uses = uses
|
||||
for process in role['processes']:
|
||||
new_process = Process.query(name=process)
|
||||
if not new_process:
|
||||
new_process = Process(name=process)
|
||||
new_process.submission_types.append(submission_type)
|
||||
new_process.kit_types.append(new_kit)
|
||||
new_process.equipment_roles.append(new_role)
|
||||
if 'orgs' in import_dict.keys():
|
||||
logger.info("Found Organizations to be imported.")
|
||||
Organization.import_from_yml(filepath=filepath)
|
||||
return submission_type
|
||||
|
||||
|
||||
class SubmissionTypeKitTypeAssociation(BaseClass):
|
||||
@@ -1574,10 +1612,7 @@ class EquipmentRole(BaseClass):
|
||||
"""
|
||||
return dict(role=self.name,
|
||||
processes=self.get_processes(submission_type=submission_type, extraction_kit=kit_type))
|
||||
# base_dict['role'] = self.name
|
||||
# base_dict['processes'] = self.get_processes(submission_type=submission_type, extraction_kit=kit_type)
|
||||
# return base_dict
|
||||
|
||||
|
||||
|
||||
class SubmissionEquipmentAssociation(BaseClass):
|
||||
"""
|
||||
@@ -1598,7 +1633,7 @@ class SubmissionEquipmentAssociation(BaseClass):
|
||||
|
||||
equipment = relationship(Equipment, back_populates="equipment_submission_associations") #: associated equipment
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return f"<SubmissionEquipmentAssociation({self.submission.rsl_plate_num} & {self.equipment.name})>"
|
||||
|
||||
def __init__(self, submission, equipment, role: str = "None"):
|
||||
@@ -1699,9 +1734,18 @@ class SubmissionTypeEquipmentRoleAssociation(BaseClass):
|
||||
def save(self):
|
||||
super().save()
|
||||
|
||||
def to_export_dict(self, kit_type: KitType):
|
||||
def to_export_dict(self, extraction_kit: KitType) -> dict:
|
||||
"""
|
||||
Creates dictionary for exporting to yml used in new SubmissionType Construction
|
||||
|
||||
Args:
|
||||
kit_type (KitType): KitType of interest.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary containing relevant info for SubmissionType construction
|
||||
"""
|
||||
base_dict = {k: v for k, v in self.equipment_role.to_export_dict(submission_type=self.submission_type,
|
||||
kit_type=kit_type).items()}
|
||||
kit_type=extraction_kit).items()}
|
||||
base_dict['static'] = self.static
|
||||
return base_dict
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
All client organization related models.
|
||||
'''
|
||||
from __future__ import annotations
|
||||
|
||||
import json, yaml, logging
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
@@ -118,8 +117,6 @@ class Organization(BaseClass):
|
||||
cont.__setattr__(k, v)
|
||||
organ.contacts.append(cont)
|
||||
organ.save()
|
||||
# logger.debug(pformat(organ.__dict__))
|
||||
|
||||
|
||||
|
||||
class Contact(BaseClass):
|
||||
@@ -136,10 +133,6 @@ class Contact(BaseClass):
|
||||
submissions = relationship("BasicSubmission", back_populates="contact") #: submissions this contact has submitted
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
Returns:
|
||||
str: Representation of this Contact
|
||||
"""
|
||||
return f"<Contact({self.name})>"
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
Models for the main submission and sample types.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import types
|
||||
from copy import deepcopy
|
||||
@@ -125,10 +124,6 @@ class BasicSubmission(BaseClass):
|
||||
}
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
Returns:
|
||||
str: Representation of this BasicSubmission
|
||||
"""
|
||||
submission_type = self.submission_type or "Basic"
|
||||
return f"<{submission_type}Submission({self.rsl_plate_num})>"
|
||||
|
||||
@@ -184,10 +179,8 @@ class BasicSubmission(BaseClass):
|
||||
# NOTE: Fields not placed in ui form to be moved to pydantic
|
||||
form_recover=recover
|
||||
)
|
||||
# logger.debug(dicto['singles'])
|
||||
# NOTE: Singles tells the query which fields to set limit to 1
|
||||
dicto['singles'] = parent_defs['singles']
|
||||
# logger.debug(dicto['singles'])
|
||||
# NOTE: Grab mode_sub_type specific info.
|
||||
output = {}
|
||||
for k, v in dicto.items():
|
||||
@@ -233,7 +226,7 @@ class BasicSubmission(BaseClass):
|
||||
sub_type (str | SubmissionType, Optional): Identity of the submission type to retrieve. Defaults to None.
|
||||
|
||||
Returns:
|
||||
SubmissionType: SubmissionType with name equal to this polymorphic identity
|
||||
SubmissionType: SubmissionType with name equal sub_type or this polymorphic identity if sub_type is None.
|
||||
"""
|
||||
# logger.debug(f"Running search for {sub_type}")
|
||||
if isinstance(sub_type, dict):
|
||||
@@ -274,20 +267,6 @@ class BasicSubmission(BaseClass):
|
||||
"""
|
||||
return cls.get_submission_type(submission_type).construct_sample_map()
|
||||
|
||||
@classmethod
|
||||
def finalize_details(cls, input_dict: dict) -> dict:
|
||||
"""
|
||||
Make final adjustments to the details dictionary before display.
|
||||
|
||||
Args:
|
||||
input_dict (dict): Incoming dictionary.
|
||||
|
||||
Returns:
|
||||
dict: Final details dictionary.
|
||||
"""
|
||||
del input_dict['id']
|
||||
return input_dict
|
||||
|
||||
def generate_associations(self, name: str, extra: str | None = None):
|
||||
try:
|
||||
field = self.__getattribute__(name)
|
||||
@@ -362,25 +341,10 @@ class BasicSubmission(BaseClass):
|
||||
dict(role=k, name="Not Applicable", lot="NA", expiry=expiry,
|
||||
missing=True))
|
||||
# logger.debug(f"Running samples.")
|
||||
# samples = self.adjust_to_dict_samples(backup=backup)
|
||||
samples = self.generate_associations(name="submission_sample_associations")
|
||||
# logger.debug("Running equipment")
|
||||
equipment = self.generate_associations(name="submission_equipment_associations")
|
||||
# try:
|
||||
# equipment = [item.to_sub_dict() for item in self.submission_equipment_associations]
|
||||
# if not equipment:
|
||||
# equipment = None
|
||||
# except Exception as e:
|
||||
# logger.error(f"Error setting equipment: {e}")
|
||||
# equipment = None
|
||||
tips = self.generate_associations(name="submission_tips_associations")
|
||||
# try:
|
||||
# tips = [item.to_sub_dict() for item in self.submission_tips_associations]
|
||||
# if not tips:
|
||||
# tips = None
|
||||
# except Exception as e:
|
||||
# logger.error(f"Error setting tips: {e}")
|
||||
# tips = None
|
||||
cost_centre = self.cost_centre
|
||||
custom = self.custom
|
||||
else:
|
||||
@@ -428,7 +392,6 @@ class BasicSubmission(BaseClass):
|
||||
Returns:
|
||||
int: Number of unique columns.
|
||||
"""
|
||||
# logger.debug(f"Here's the samples: {self.samples}")
|
||||
columns = set([assoc.column for assoc in self.submission_sample_associations])
|
||||
# logger.debug(f"Here are the columns for {self.rsl_plate_num}: {columns}")
|
||||
return len(columns)
|
||||
@@ -513,6 +476,7 @@ class BasicSubmission(BaseClass):
|
||||
Convert all submissions to dataframe
|
||||
|
||||
Args:
|
||||
page_size (int, optional): Number of items to include in query result. Defaults to 250.
|
||||
page (int, optional): Limits the number of submissions to a page size. Defaults to 1.
|
||||
chronologic (bool, optional): Sort submissions in chronologic order. Defaults to True.
|
||||
submission_type (str | None, optional): Filter by SubmissionType. Defaults to None.
|
||||
@@ -537,11 +501,6 @@ class BasicSubmission(BaseClass):
|
||||
'signed_by', 'artic_date', 'gel_barcode', 'gel_date', 'ngs_date', 'contact_phone', 'contact',
|
||||
'tips', 'gel_image_path', 'custom']
|
||||
df = df.loc[:, ~df.columns.isin(exclude)]
|
||||
# for item in excluded:
|
||||
# try:
|
||||
# df = df.drop(item, axis=1)
|
||||
# except:
|
||||
# logger.warning(f"Couldn't drop '{item}' column from submissionsheet df.")
|
||||
if chronologic:
|
||||
try:
|
||||
df.sort_values(by="id", axis=0, inplace=True, ascending=False)
|
||||
@@ -611,6 +570,7 @@ class BasicSubmission(BaseClass):
|
||||
if value is not None:
|
||||
existing.append(value)
|
||||
self.__setattr__(key, existing)
|
||||
# NOTE: Make sure this gets updated by telling SQLAlchemy it's been modified.
|
||||
flag_modified(self, key)
|
||||
return
|
||||
case _:
|
||||
@@ -645,7 +605,7 @@ class BasicSubmission(BaseClass):
|
||||
for k, v in input_dict.items():
|
||||
try:
|
||||
setattr(assoc, k, v)
|
||||
# NOTE: for some reason I don't think assoc.__setattr__(k, v) doesn't work here.
|
||||
# NOTE: for some reason I don't think assoc.__setattr__(k, v) works here.
|
||||
except AttributeError:
|
||||
logger.error(f"Can't set {k} to {v}")
|
||||
result = assoc.save()
|
||||
@@ -703,7 +663,16 @@ class BasicSubmission(BaseClass):
|
||||
return super().save()
|
||||
|
||||
@classmethod
|
||||
def get_regex(cls, submission_type: SubmissionType | str | None = None):
|
||||
def get_regex(cls, submission_type: SubmissionType | str | None = None) -> str:
|
||||
"""
|
||||
Gets the regex string for identifying a certain class of submission.
|
||||
|
||||
Args:
|
||||
submission_type (SubmissionType | str | None, optional): submission type of interest. Defaults to None.
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
# logger.debug(f"Attempting to get regex for {cls.__mapper_args__['polymorphic_identity']}")
|
||||
logger.debug(f"Attempting to get regex for {submission_type}")
|
||||
try:
|
||||
@@ -755,11 +724,8 @@ class BasicSubmission(BaseClass):
|
||||
f"Could not get polymorph {polymorphic_identity} of {cls} due to {e}, falling back to BasicSubmission")
|
||||
case _:
|
||||
pass
|
||||
# if attrs is None or len(attrs) == 0:
|
||||
# logger.info(f"Recruiting: {cls}")
|
||||
# return model
|
||||
if attrs and any([not hasattr(cls, attr) for attr in attrs.keys()]):
|
||||
# looks for first model that has all included kwargs
|
||||
# NOTE: looks for first model that has all included kwargs
|
||||
try:
|
||||
model = next(subclass for subclass in cls.__subclasses__() if
|
||||
all([hasattr(subclass, attr) for attr in attrs.keys()]))
|
||||
@@ -797,7 +763,6 @@ class BasicSubmission(BaseClass):
|
||||
input_dict['custom'][k] = ws.cell(row=v['read']['row'], column=v['read']['column']).value
|
||||
case "range":
|
||||
ws = xl[v['sheet']]
|
||||
# input_dict['custom'][k] = []
|
||||
if v['start_row'] != v['end_row']:
|
||||
v['end_row'] = v['end_row'] + 1
|
||||
rows = range(v['start_row'], v['end_row'])
|
||||
@@ -806,10 +771,6 @@ class BasicSubmission(BaseClass):
|
||||
columns = range(v['start_column'], v['end_column'])
|
||||
input_dict['custom'][k] = [dict(value=ws.cell(row=row, column=column).value, row=row, column=column)
|
||||
for row in rows for column in columns]
|
||||
# for ii in range(v['start_row'], v['end_row']):
|
||||
# for jj in range(v['start_column'], v['end_column'] + 1):
|
||||
# input_dict['custom'][k].append(
|
||||
# dict(value=ws.cell(row=ii, column=jj).value, row=ii, column=jj))
|
||||
return input_dict
|
||||
|
||||
@classmethod
|
||||
@@ -952,7 +913,7 @@ class BasicSubmission(BaseClass):
|
||||
rsl_plate_number (str): rsl plate num of interest
|
||||
|
||||
Returns:
|
||||
list: _description_
|
||||
Generator[dict, None, None]: Updated samples
|
||||
"""
|
||||
# logger.debug(f"Hello from {cls.__mapper_args__['polymorphic_identity']} PCR parser!")
|
||||
pcr_sample_map = cls.get_submission_type().sample_map['pcr_samples']
|
||||
@@ -968,7 +929,17 @@ class BasicSubmission(BaseClass):
|
||||
yield sample
|
||||
|
||||
@classmethod
|
||||
def parse_pcr_controls(cls, xl: Workbook, rsl_plate_num: str) -> list:
|
||||
def parse_pcr_controls(cls, xl: Workbook, rsl_plate_num: str) -> Generator[dict, None, None]:
|
||||
"""
|
||||
Custom parsing of pcr controls from Design & Analysis Software export.
|
||||
|
||||
Args:
|
||||
xl (Workbook): D&A export file
|
||||
rsl_plate_num (str): Plate number of the submission to be joined.
|
||||
|
||||
Yields:
|
||||
Generator[dict, None, None]: Dictionaries of row values.
|
||||
"""
|
||||
location_map = cls.get_submission_type().sample_map['pcr_controls']
|
||||
submission = cls.query(rsl_plate_num=rsl_plate_num)
|
||||
name_column = 1
|
||||
@@ -981,12 +952,16 @@ class BasicSubmission(BaseClass):
|
||||
logger.debug(f"Pulling from row {iii}, column {item['ct_column']}")
|
||||
subtype, target = item['name'].split("-")
|
||||
ct = worksheet.cell(row=iii, column=item['ct_column']).value
|
||||
# NOTE: Kind of a stop gap solution to find control reagents.
|
||||
if subtype == "PC":
|
||||
ctrl = next((assoc.reagent for assoc in submission.submission_reagent_associations
|
||||
if any(["positive control" in item.name.lower() for item in assoc.reagent.role])), None)
|
||||
if
|
||||
any(["positive control" in item.name.lower() for item in assoc.reagent.role])),
|
||||
None)
|
||||
elif subtype == "NC":
|
||||
ctrl = next((assoc.reagent for assoc in submission.submission_reagent_associations
|
||||
if any(["molecular grade water" in item.name.lower() for item in assoc.reagent.role])), None)
|
||||
if any(["molecular grade water" in item.name.lower() for item in
|
||||
assoc.reagent.role])), None)
|
||||
try:
|
||||
ct = float(ct)
|
||||
except ValueError:
|
||||
@@ -1124,8 +1099,6 @@ class BasicSubmission(BaseClass):
|
||||
case _:
|
||||
# logger.debug(f"Lookup BasicSubmission by parsed str end_date {end_date}")
|
||||
end_date = parse(end_date).strftime("%Y-%m-%d")
|
||||
# logger.debug(f"Looking up BasicSubmissions from start date: {start_date} and end date: {end_date}")
|
||||
# logger.debug(f"Start date {start_date} == End date {end_date}: {start_date == end_date}")
|
||||
# logger.debug(f"Compensating for same date by using time")
|
||||
if start_date == end_date:
|
||||
start_date = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%d %H:%M:%S.%f")
|
||||
@@ -1336,7 +1309,7 @@ class BasicSubmission(BaseClass):
|
||||
|
||||
def backup(self, obj=None, fname: Path | None = None, full_backup: bool = False):
|
||||
"""
|
||||
Exports xlsx and yml info files for this instance.
|
||||
Exports xlsx info files for this instance.
|
||||
|
||||
Args:
|
||||
obj (_type_, optional): _description_. Defaults to None.
|
||||
@@ -1352,13 +1325,6 @@ class BasicSubmission(BaseClass):
|
||||
if fname.name == "":
|
||||
# logger.debug(f"export cancelled.")
|
||||
return
|
||||
# if full_backup:
|
||||
# backup = self.to_dict(full_data=True)
|
||||
# try:
|
||||
# with open(self.__backup_path__.joinpath(fname.with_suffix(".yml")), "w") as f:
|
||||
# yaml.dump(backup, f)
|
||||
# except KeyError as e:
|
||||
# logger.error(f"Problem saving yml backup file: {e}")
|
||||
writer = pyd.to_writer()
|
||||
writer.xl.save(filename=fname.with_suffix(".xlsx"))
|
||||
|
||||
@@ -1436,6 +1402,17 @@ class BacterialCulture(BasicSubmission):
|
||||
|
||||
@classmethod
|
||||
def custom_info_parser(cls, input_dict: dict, xl: Workbook | None = None, custom_fields: dict = {}) -> dict:
|
||||
"""
|
||||
Performs class specific info parsing before info parsing is finalized.
|
||||
|
||||
Args:
|
||||
input_dict (dict): Generic input info
|
||||
xl (Workbook | None, optional): Original xl workbook. Defaults to None.
|
||||
custom_fields (dict, optional): Map of custom fields to be parsed. Defaults to {}.
|
||||
|
||||
Returns:
|
||||
dict: Updated info dictionary.
|
||||
"""
|
||||
input_dict = super().custom_info_parser(input_dict=input_dict, xl=xl, custom_fields=custom_fields)
|
||||
# logger.debug(f"\n\nInfo dictionary:\n\n{pformat(input_dict)}\n\n")
|
||||
return input_dict
|
||||
@@ -1476,12 +1453,30 @@ class Wastewater(BasicSubmission):
|
||||
output["pcr_technician"] = self.technician
|
||||
else:
|
||||
output['pcr_technician'] = self.pcr_technician
|
||||
############### Updated from finalize_details - testing 2024-1017 ################
|
||||
if full_data:
|
||||
output['samples'] = [sample for sample in output['samples']]
|
||||
dummy_samples = []
|
||||
for item in output['samples']:
|
||||
# logger.debug(f"Sample dict: {item}")
|
||||
thing = deepcopy(item)
|
||||
try:
|
||||
thing['row'] = thing['source_row']
|
||||
thing['column'] = thing['source_column']
|
||||
except KeyError:
|
||||
logger.error(f"No row or column for sample: {item['submitter_id']}")
|
||||
continue
|
||||
thing['tooltip'] = f"Sample Name: {thing['name']}\nWell: {thing['sample_location']}"
|
||||
dummy_samples.append(thing)
|
||||
output['origin_plate'] = self.__class__.make_plate_map(sample_list=dummy_samples, plate_rows=4,
|
||||
plate_columns=6)
|
||||
###############################
|
||||
return output
|
||||
|
||||
@classmethod
|
||||
def custom_info_parser(cls, input_dict: dict, xl: Workbook | None = None, custom_fields: dict = {}) -> dict:
|
||||
"""
|
||||
Update submission dictionary with type specific information. Extends parent
|
||||
Update submission dictionary with class specific information. Extends parent
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
@@ -1489,7 +1484,7 @@ class Wastewater(BasicSubmission):
|
||||
custom_fields: Dictionary of locations, ranges, etc to be used by this function
|
||||
|
||||
Returns:
|
||||
dict: Updated sample dictionary
|
||||
dict: Updated info dictionary
|
||||
"""
|
||||
input_dict = super().custom_info_parser(input_dict)
|
||||
# logger.debug(f"Input dict: {pformat(input_dict)}")
|
||||
@@ -1513,10 +1508,18 @@ class Wastewater(BasicSubmission):
|
||||
@classmethod
|
||||
def parse_pcr(cls, xl: Workbook, rsl_plate_num: str) -> Generator[dict, None, None]:
|
||||
"""
|
||||
Parse specific to wastewater samples.
|
||||
Perform parsing of pcr info. Since most of our PC outputs are the same format, this should work for most.
|
||||
|
||||
Args:
|
||||
xl (pd.DataFrame): pcr info form
|
||||
rsl_plate_number (str): rsl plate num of interest
|
||||
|
||||
Returns:
|
||||
Generator[dict, None, None]: Updated samples
|
||||
"""
|
||||
samples = [item for item in super().parse_pcr(xl=xl, rsl_plate_num=rsl_plate_num)]
|
||||
# logger.debug(f'Samples from parent pcr parser: {pformat(samples)}')
|
||||
# NOTE: Due to having to run through samples in for loop we need to convert to list.
|
||||
output = []
|
||||
for sample in samples:
|
||||
# NOTE: remove '-{target}' from controls
|
||||
@@ -1542,20 +1545,23 @@ class Wastewater(BasicSubmission):
|
||||
del sample['assessment']
|
||||
except KeyError:
|
||||
pass
|
||||
# yield sample
|
||||
output.append(sample)
|
||||
# NOTE: And then convert back to list ot keep fidelity with parent method.
|
||||
for sample in output:
|
||||
yield sample
|
||||
|
||||
# @classmethod
|
||||
# def parse_pcr_controls(cls, xl: Workbook, location_map: list) -> list:
|
||||
|
||||
@classmethod
|
||||
def enforce_name(cls, instr: str, data: dict | None = {}) -> str:
|
||||
"""
|
||||
Extends parent
|
||||
"""
|
||||
Custom naming method for this class. Extends parent.
|
||||
|
||||
Args:
|
||||
instr (str): Initial name.
|
||||
data (dict | None, optional): Additional parameters for name. Defaults to None.
|
||||
|
||||
Returns:
|
||||
str: Updated name.
|
||||
"""
|
||||
try:
|
||||
# NOTE: Deal with PCR file.
|
||||
instr = re.sub(r"PCR(-|_)", "", instr)
|
||||
@@ -1567,27 +1573,18 @@ class Wastewater(BasicSubmission):
|
||||
@classmethod
|
||||
def adjust_autofill_samples(cls, samples: List[Any]) -> List[Any]:
|
||||
"""
|
||||
Extends parent
|
||||
Makes adjustments to samples before writing to excel. Extends parent.
|
||||
|
||||
Args:
|
||||
samples (List[Any]): List of Samples
|
||||
|
||||
Returns:
|
||||
List[Any]: Updated list of samples
|
||||
"""
|
||||
samples = super().adjust_autofill_samples(samples)
|
||||
samples = [item for item in samples if not item.submitter_id.startswith("EN")]
|
||||
return samples
|
||||
|
||||
# @classmethod
|
||||
# def custom_sample_autofill_row(cls, sample, worksheet: Worksheet) -> int:
|
||||
# """
|
||||
# Extends parent
|
||||
# """
|
||||
# # logger.debug(f"Checking {sample.well}")
|
||||
# # logger.debug(f"here's the worksheet: {worksheet}")
|
||||
# row = super().custom_sample_autofill_row(sample, worksheet)
|
||||
# df = pd.DataFrame(list(worksheet.values))
|
||||
# # logger.debug(f"Here's the dataframe: {df}")
|
||||
# idx = df[df[1] == sample.sample_location]
|
||||
# # logger.debug(f"Here is the row: {idx}")
|
||||
# row = idx.index.to_list()[0]
|
||||
# return row + 1
|
||||
|
||||
@classmethod
|
||||
def get_details_template(cls, base_dict: dict) -> Tuple[dict, Template]:
|
||||
"""
|
||||
@@ -1603,35 +1600,6 @@ class Wastewater(BasicSubmission):
|
||||
base_dict['excluded'] += ['origin_plate']
|
||||
return base_dict, template
|
||||
|
||||
@classmethod
|
||||
def finalize_details(cls, input_dict: dict) -> dict:
|
||||
"""
|
||||
Makes changes to information before display
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input information
|
||||
|
||||
Returns:
|
||||
dict: Updated information
|
||||
"""
|
||||
input_dict = super().finalize_details(input_dict)
|
||||
# NOTE: Currently this is preserving the generator items, can we come up with a better way?
|
||||
input_dict['samples'] = [sample for sample in input_dict['samples']]
|
||||
dummy_samples = []
|
||||
for item in input_dict['samples']:
|
||||
# logger.debug(f"Sample dict: {item}")
|
||||
thing = deepcopy(item)
|
||||
try:
|
||||
thing['row'] = thing['source_row']
|
||||
thing['column'] = thing['source_column']
|
||||
except KeyError:
|
||||
logger.error(f"No row or column for sample: {item['submitter_id']}")
|
||||
continue
|
||||
thing['tooltip'] = f"Sample Name: {thing['name']}\nWell: {thing['sample_location']}"
|
||||
dummy_samples.append(thing)
|
||||
input_dict['origin_plate'] = cls.make_plate_map(sample_list=dummy_samples, plate_rows=4, plate_columns=6)
|
||||
return input_dict
|
||||
|
||||
def custom_context_events(self) -> dict:
|
||||
"""
|
||||
Sets context events for main widget
|
||||
@@ -1646,7 +1614,7 @@ class Wastewater(BasicSubmission):
|
||||
@report_result
|
||||
def link_pcr(self, obj):
|
||||
"""
|
||||
Adds PCR info to this submission
|
||||
PYQT6 function to add PCR info to this submission
|
||||
|
||||
Args:
|
||||
obj (_type_): Parent widget
|
||||
@@ -1733,7 +1701,7 @@ class WastewaterArtic(BasicSubmission):
|
||||
@classmethod
|
||||
def custom_info_parser(cls, input_dict: dict, xl: Workbook | None = None, custom_fields: dict = {}) -> dict:
|
||||
"""
|
||||
Update submission dictionary with type specific information
|
||||
Update submission dictionary with class specific information
|
||||
|
||||
Args:
|
||||
input_dict (dict): Input sample dictionary
|
||||
@@ -1763,10 +1731,8 @@ class WastewaterArtic(BasicSubmission):
|
||||
return None
|
||||
|
||||
input_dict = super().custom_info_parser(input_dict)
|
||||
|
||||
input_dict['submission_type'] = dict(value="Wastewater Artic", missing=False)
|
||||
|
||||
logger.debug(f"Custom fields: {custom_fields}")
|
||||
# logger.debug(f"Custom fields: {custom_fields}")
|
||||
egel_section = custom_fields['egel_controls']
|
||||
ws = xl[egel_section['sheet']]
|
||||
# NOTE: Here we should be scraping the control results.
|
||||
@@ -1788,7 +1754,7 @@ class WastewaterArtic(BasicSubmission):
|
||||
ii in
|
||||
range(source_plates_section['start_row'], source_plates_section['end_row'] + 1)]
|
||||
for datum in data:
|
||||
logger.debug(f"Datum: {datum}")
|
||||
# logger.debug(f"Datum: {datum}")
|
||||
if datum['plate'] in ["None", None, ""]:
|
||||
continue
|
||||
else:
|
||||
@@ -1843,7 +1809,14 @@ class WastewaterArtic(BasicSubmission):
|
||||
@classmethod
|
||||
def enforce_name(cls, instr: str, data: dict = {}) -> str:
|
||||
"""
|
||||
Extends parent
|
||||
Custom naming method for this class. Extends parent.
|
||||
|
||||
Args:
|
||||
instr (str): Initial name.
|
||||
data (dict | None, optional): Additional parameters for name. Defaults to None.
|
||||
|
||||
Returns:
|
||||
str: Updated name.
|
||||
"""
|
||||
try:
|
||||
# NOTE: Deal with PCR file.
|
||||
@@ -1873,7 +1846,7 @@ class WastewaterArtic(BasicSubmission):
|
||||
dict: Updated sample dictionary
|
||||
"""
|
||||
input_dict = super().parse_samples(input_dict)
|
||||
logger.debug(f"WWA input dict: {pformat(input_dict)}")
|
||||
# logger.debug(f"WWA input dict: {pformat(input_dict)}")
|
||||
input_dict['sample_type'] = "Wastewater Sample"
|
||||
# NOTE: Stop gap solution because WW is sloppy with their naming schemes
|
||||
try:
|
||||
@@ -1952,14 +1925,14 @@ class WastewaterArtic(BasicSubmission):
|
||||
@classmethod
|
||||
def pbs_adapter(cls, input_str):
|
||||
"""
|
||||
Stopgap solution because WW names their controls different
|
||||
Stopgap solution because WW names their controls different
|
||||
|
||||
Args:
|
||||
input_str (str): input name
|
||||
Args:
|
||||
input_str (str): input name
|
||||
|
||||
Returns:
|
||||
str: output name
|
||||
"""
|
||||
Returns:
|
||||
str: output name
|
||||
"""
|
||||
# logger.debug(f"input string raw: {input_str}")
|
||||
# NOTE: Remove letters.
|
||||
processed = input_str.replace("RSL", "")
|
||||
@@ -2155,7 +2128,7 @@ class WastewaterArtic(BasicSubmission):
|
||||
|
||||
def gel_box(self, obj):
|
||||
"""
|
||||
Creates widget to perform gel viewing operations
|
||||
Creates PYQT6 widget to perform gel viewing operations
|
||||
|
||||
Args:
|
||||
obj (_type_): parent widget
|
||||
@@ -2221,7 +2194,7 @@ class BasicSample(BaseClass):
|
||||
submissions = association_proxy("sample_submission_associations", "submission") #: proxy of associated submissions
|
||||
|
||||
@validates('submitter_id')
|
||||
def create_id(self, key: str, value: str):
|
||||
def create_id(self, key: str, value: str) -> str:
|
||||
"""
|
||||
Creates a random string as a submitter id.
|
||||
|
||||
@@ -2330,7 +2303,7 @@ class BasicSample(BaseClass):
|
||||
|
||||
@classmethod
|
||||
def parse_sample(cls, input_dict: dict) -> dict:
|
||||
f"""
|
||||
"""
|
||||
Custom sample parser
|
||||
|
||||
Args:
|
||||
@@ -2413,7 +2386,7 @@ class BasicSample(BaseClass):
|
||||
ValueError: Raised if unallowed key is given.
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
BasicSample: Instance of BasicSample
|
||||
"""
|
||||
disallowed = ["id"]
|
||||
if kwargs == {}:
|
||||
@@ -2434,7 +2407,7 @@ class BasicSample(BaseClass):
|
||||
**kwargs
|
||||
) -> List[BasicSample]:
|
||||
"""
|
||||
Allows for fuzzy search of samples. (Experimental)
|
||||
Allows for fuzzy search of samples.
|
||||
|
||||
Args:
|
||||
sample_type (str | BasicSample | None, optional): Type of sample. Defaults to None.
|
||||
@@ -2764,9 +2737,13 @@ class SubmissionSampleAssociation(BaseClass):
|
||||
Returns:
|
||||
int: incremented id
|
||||
"""
|
||||
|
||||
if cls.__name__ == "SubmissionSampleAssociation":
|
||||
model = cls
|
||||
else:
|
||||
model = next((base for base in cls.__bases__ if base.__name__ == "SubmissionSampleAssociation"),
|
||||
SubmissionSampleAssociation)
|
||||
try:
|
||||
return max([item.id for item in cls.query()]) + 1
|
||||
return max([item.id for item in model.query()]) + 1
|
||||
except ValueError as e:
|
||||
logger.error(f"Problem incrementing id: {e}")
|
||||
return 1
|
||||
@@ -2980,26 +2957,6 @@ class WastewaterAssociation(SubmissionSampleAssociation):
|
||||
logger.error(f"Couldn't set tooltip for {self.sample.rsl_number}. Looks like there isn't PCR data.")
|
||||
return sample
|
||||
|
||||
@classmethod
|
||||
def autoincrement_id_local(cls) -> int:
|
||||
"""
|
||||
Increments the association id automatically. Overrides parent
|
||||
|
||||
Returns:
|
||||
int: incremented id
|
||||
"""
|
||||
try:
|
||||
parent = next((base for base in cls.__bases__ if base.__name__ == "SubmissionSampleAssociation"),
|
||||
SubmissionSampleAssociation)
|
||||
return max([item.id for item in parent.query()]) + 1
|
||||
except StopIteration as e:
|
||||
logger.error(f"Problem incrementing id: {e}")
|
||||
return 1
|
||||
|
||||
@classmethod
|
||||
def autoincrement_id(cls) -> int:
|
||||
return super().autoincrement_id()
|
||||
|
||||
|
||||
class WastewaterArticAssociation(SubmissionSampleAssociation):
|
||||
"""
|
||||
@@ -3030,19 +2987,3 @@ class WastewaterArticAssociation(SubmissionSampleAssociation):
|
||||
sample['source_plate_number'] = self.source_plate_number
|
||||
sample['source_well'] = self.source_well
|
||||
return sample
|
||||
|
||||
@classmethod
|
||||
def autoincrement_id(cls) -> int:
|
||||
"""
|
||||
Increments the association id automatically. Overrides parent
|
||||
|
||||
Returns:
|
||||
int: incremented id
|
||||
"""
|
||||
try:
|
||||
parent = next((base for base in cls.__bases__ if base.__name__ == "SubmissionSampleAssociation"),
|
||||
SubmissionSampleAssociation)
|
||||
return max([item.id for item in parent.query()]) + 1
|
||||
except StopIteration as e:
|
||||
logger.error(f"Problem incrementing id: {e}")
|
||||
return 1
|
||||
|
||||
@@ -9,10 +9,10 @@ from typing import List
|
||||
from openpyxl import load_workbook, Workbook
|
||||
from pathlib import Path
|
||||
from backend.db.models import *
|
||||
from backend.validators import PydSubmission, PydReagent, RSLNamer, PydSample, PydEquipment, PydTips
|
||||
from backend.validators import PydSubmission, RSLNamer
|
||||
import logging, re
|
||||
from collections import OrderedDict
|
||||
from tools import check_not_nan, convert_nans_to_nones, is_missing, check_key_or_attr
|
||||
from tools import check_not_nan, is_missing, check_key_or_attr
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -483,17 +483,21 @@ class SampleParser(object):
|
||||
lookup_samples[ii] = {}
|
||||
else:
|
||||
logger.warning(f"Match for {psample['id']} not direct, running search.")
|
||||
for jj, lsample in enumerate(lookup_samples):
|
||||
try:
|
||||
check = lsample[merge_on_id] == psample['id']
|
||||
except KeyError:
|
||||
check = False
|
||||
if check:
|
||||
new = lsample | psample
|
||||
lookup_samples[jj] = {}
|
||||
break
|
||||
else:
|
||||
new = psample
|
||||
# for jj, lsample in enumerate(lookup_samples):
|
||||
# try:
|
||||
# check = lsample[merge_on_id] == psample['id']
|
||||
# except KeyError:
|
||||
# check = False
|
||||
# if check:
|
||||
# new = lsample | psample
|
||||
# lookup_samples[jj] = {}
|
||||
# break
|
||||
# else:
|
||||
# new = psample
|
||||
jj, new = next(((jj, lsample) for jj, lsample in enumerate(lookup_samples) if lsample[merge_on_id] == psample['id']), (-1, psample))
|
||||
logger.debug(f"Assigning from index {jj} - {new}")
|
||||
if jj >= 0:
|
||||
lookup_samples[jj] = {}
|
||||
if not check_key_or_attr(key='submitter_id', interest=new, check_none=True):
|
||||
new['submitter_id'] = psample['id']
|
||||
new = self.sub_object.parse_samples(new)
|
||||
@@ -546,7 +550,7 @@ class EquipmentParser(object):
|
||||
logger.error(f"Error getting asset number for {input}: {e}")
|
||||
return input
|
||||
|
||||
def parse_equipment(self) -> List[dict]:
|
||||
def parse_equipment(self) -> Generator[dict, None, None]:
|
||||
"""
|
||||
Scrapes equipment from xl sheet
|
||||
|
||||
@@ -554,7 +558,6 @@ class EquipmentParser(object):
|
||||
List[dict]: list of equipment
|
||||
"""
|
||||
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
|
||||
output = []
|
||||
# logger.debug(f"Sheets: {sheets}")
|
||||
for sheet in self.xl.sheetnames:
|
||||
ws = self.xl[sheet]
|
||||
@@ -579,14 +582,11 @@ class EquipmentParser(object):
|
||||
eq = Equipment.query(name=asset)
|
||||
process = ws.cell(row=v['process']['row'], column=v['process']['column']).value
|
||||
try:
|
||||
# output.append(
|
||||
yield dict(name=eq.name, processes=[process], role=k, asset_number=eq.asset_number,
|
||||
nickname=eq.nickname)
|
||||
except AttributeError:
|
||||
logger.error(f"Unable to add {eq} to list.")
|
||||
# logger.debug(f"Here is the output so far: {pformat(output)}")
|
||||
# return output
|
||||
|
||||
|
||||
|
||||
class TipParser(object):
|
||||
"""
|
||||
@@ -623,7 +623,6 @@ class TipParser(object):
|
||||
List[dict]: list of equipment
|
||||
"""
|
||||
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
|
||||
output = []
|
||||
# logger.debug(f"Sheets: {sheets}")
|
||||
for sheet in self.xl.sheetnames:
|
||||
ws = self.xl[sheet]
|
||||
@@ -647,22 +646,20 @@ class TipParser(object):
|
||||
# logger.debug(f"asset: {asset}")
|
||||
eq = Tips.query(lot=lot, name=asset, limit=1)
|
||||
try:
|
||||
# output.append(
|
||||
yield dict(name=eq.name, role=k, lot=lot)
|
||||
except AttributeError:
|
||||
logger.error(f"Unable to add {eq} to PydTips list.")
|
||||
# logger.debug(f"Here is the output so far: {pformat(output)}")
|
||||
# return output
|
||||
|
||||
|
||||
|
||||
class PCRParser(object):
|
||||
"""Object to pull data from Design and Analysis PCR export file."""
|
||||
|
||||
def __init__(self, filepath: Path | None = None, submission: BasicSubmission | None = None) -> None:
|
||||
"""
|
||||
Args:
|
||||
filepath (Path | None, optional): file to parse. Defaults to None.
|
||||
"""
|
||||
Args:
|
||||
filepath (Path | None, optional): file to parse. Defaults to None.
|
||||
submission (BasicSubmission | None, optional): Submission parsed data to be added to.
|
||||
"""
|
||||
# logger.debug(f'Parsing {filepath.__str__()}')
|
||||
if filepath is None:
|
||||
logger.error('No filepath given.')
|
||||
@@ -709,73 +706,3 @@ class PCRParser(object):
|
||||
# logger.debug(f"PCR: {pformat(pcr)}")
|
||||
return pcr
|
||||
|
||||
|
||||
class EDSParser(object):
|
||||
expand_device = {"QS7PRO": "QuantStudio tm 7 Pro System"}
|
||||
expand_block = {"BLOCK_96W_01ML": "96-Well 0.1-mL Block"}
|
||||
|
||||
def __init__(self, filepath: str | Path | None = None):
|
||||
logger.info(f"\n\nParsing {filepath.__str__()}\n\n")
|
||||
match filepath:
|
||||
case Path():
|
||||
self.filepath = filepath
|
||||
case str():
|
||||
self.filepath = Path(filepath)
|
||||
case _:
|
||||
logger.error(f"No filepath given.")
|
||||
raise ValueError("No filepath given.")
|
||||
self.eds = ZipFile(self.filepath)
|
||||
self.analysis_settings = json.loads(self.eds.read("primary/analysis_setting.json").decode("utf-8"))
|
||||
self.analysis_results = json.loads(self.eds.read("primary/analysis_setting.json").decode("utf-8"))
|
||||
self.presence_absence_results = json.loads(
|
||||
self.eds.read("extensions/am.pa/presence_absence_result.json").decode("utf-8"))
|
||||
self.presence_absence_settings = json.loads(
|
||||
self.eds.read("extensions/am.pa/presence_absence_setting.json").decode("utf-8"))
|
||||
self.run_summary = json.loads(self.eds.read("run/run_summary.json").decode("utf-8"))
|
||||
self.run_method = json.loads(self.eds.read("setup/run_method.json").decode("utf-8"))
|
||||
self.plate_setup = json.loads(self.eds.read("setup/plate_setup.json").decode("utf-8"))
|
||||
self.eds_summary = json.loads(self.eds.read("summary.json").decode("utf-8"))
|
||||
|
||||
def parse_DA_date_format(self, value: int) -> datetime:
|
||||
value = value / 1000
|
||||
return datetime.utcfromtimestamp(value)
|
||||
|
||||
def get_run_time(self, start: datetime, end: datetime) -> Tuple[str, str, str]:
|
||||
delta = end - start
|
||||
minutes, seconds = divmod(delta.seconds, 60)
|
||||
duration = f"{minutes} minutes {seconds} seconds"
|
||||
start_time = start.strftime("%Y-%m-%d %I:%M:%S %p %Z")
|
||||
end_time = end.strftime("%Y-%m-%d %I:%M:%S %p %Z")
|
||||
return start_time, end_time, duration
|
||||
|
||||
def parse_summary(self):
|
||||
summary = dict()
|
||||
summary['file_name'] = self.filepath.absolute().__str__()
|
||||
summary['comment'] = self.eds_summary['description']
|
||||
summary['operator'] = self.run_summary['operator']
|
||||
summary['barcode'] = self.plate_setup['plateBarcode']
|
||||
try:
|
||||
summary['instrument_type'] = self.__class__.expand_device[self.eds_summary['instrumentType']]
|
||||
except KeyError:
|
||||
summary['instrument_type'] = self.eds_summary['instrumentType']
|
||||
try:
|
||||
summary['block_type'] = self.__class__.expand_block[self.plate_setup['blockType']]
|
||||
except KeyError:
|
||||
summary['block_type'] = self.plate_setup['blockType']
|
||||
summary['instrument_name'] = self.run_summary['instrumentName']
|
||||
summary['instrument_serial_number'] = self.run_summary['instrumentSerialNumber']
|
||||
summary['heated_cover_serial_number'] = self.run_summary['coverSerialNumber']
|
||||
summary['block_serial_number'] = self.run_summary['blockSerialNumber']
|
||||
run_start = self.parse_DA_date_format(self.run_summary['startTime'])
|
||||
run_end = self.parse_DA_date_format(self.run_summary['endTime'])
|
||||
summary['run_start_date/time'], summary['run_end_date/time'], summary['run_duration'] = \
|
||||
self.get_run_time(run_start, run_end)
|
||||
summary['sample_volume'] = self.run_method['sampleVolume']
|
||||
summary['cover_temperature'] = self.run_method['coverTemperature']
|
||||
summary['passive_reference'] = self.plate_setup['passiveReference']
|
||||
summary['pcr_stage/step_number'] = f"Stage {self.analysis_settings['cqAnalysisStageNumber']} Step {self.analysis_settings['cqAnalysisStepNumber']}"
|
||||
summary['quantification_cycle_method'] = self.analysis_results['cqAlgorithmType']
|
||||
summary['analysis_date/time'] = self.parse_DA_date_format(self.eds_summary['analysis']['primary']['analysisTime'])
|
||||
summary['software_name_and_version'] = "Design & Analysis Software v2.8.0"
|
||||
summary['plugin_name_and_version'] = "Primary Analysis v1.8.1, Presence Absence v2.4.0"
|
||||
return summary
|
||||
|
||||
@@ -7,8 +7,7 @@ from pathlib import Path
|
||||
from datetime import date
|
||||
from typing import Tuple
|
||||
from backend.db.models import BasicSubmission
|
||||
from tools import jinja_template_loading, get_first_blank_df_row, \
|
||||
row_map
|
||||
from tools import jinja_template_loading, get_first_blank_df_row, row_map
|
||||
from PyQt6.QtWidgets import QWidget
|
||||
from openpyxl.worksheet.worksheet import Worksheet
|
||||
|
||||
@@ -65,7 +64,7 @@ class ReportMaker(object):
|
||||
old_lab = ""
|
||||
output = []
|
||||
# logger.debug(f"Report DataFrame: {df}")
|
||||
for ii, row in enumerate(df.iterrows()):
|
||||
for row in df.iterrows():
|
||||
# logger.debug(f"Row {ii}: {row}")
|
||||
lab = row[0][0]
|
||||
# logger.debug(type(row))
|
||||
@@ -111,7 +110,7 @@ class ReportMaker(object):
|
||||
|
||||
def fix_up_xl(self):
|
||||
"""
|
||||
Handles formatting of xl file.
|
||||
Handles formatting of xl file, mediocrely.
|
||||
"""
|
||||
# logger.debug(f"Updating worksheet")
|
||||
worksheet: Worksheet = self.writer.sheets['Report']
|
||||
|
||||
@@ -4,7 +4,7 @@ contains writer objects for pushing values to submission sheet templates.
|
||||
import logging
|
||||
from copy import copy
|
||||
from pprint import pformat
|
||||
from typing import List, Generator
|
||||
from typing import List, Generator, Tuple
|
||||
from openpyxl import load_workbook, Workbook
|
||||
from backend.db.models import SubmissionType, KitType, BasicSubmission
|
||||
from backend.validators.pydant import PydSubmission
|
||||
@@ -19,13 +19,13 @@ class SheetWriter(object):
|
||||
object to manage data placement into excel file
|
||||
"""
|
||||
|
||||
def __init__(self, submission: PydSubmission, missing_only: bool = False):
|
||||
def __init__(self, submission: PydSubmission):
|
||||
"""
|
||||
Args:
|
||||
submission (PydSubmission): Object containing submission information.
|
||||
missing_only (bool, optional): Whether to only fill in missing values. Defaults to False.
|
||||
"""
|
||||
self.sub = OrderedDict(submission.improved_dict())
|
||||
# NOTE: Set values from pydantic object.
|
||||
for k, v in self.sub.items():
|
||||
match k:
|
||||
case 'filepath':
|
||||
@@ -41,18 +41,16 @@ class SheetWriter(object):
|
||||
else:
|
||||
self.sub[k] = v
|
||||
# logger.debug(f"\n\nWriting to {submission.filepath.__str__()}\n\n")
|
||||
if self.filepath.stem.startswith("tmp"):
|
||||
template = self.submission_type.template_file
|
||||
workbook = load_workbook(BytesIO(template))
|
||||
missing_only = False
|
||||
else:
|
||||
try:
|
||||
workbook = load_workbook(self.filepath)
|
||||
except Exception as e:
|
||||
logger.error(f"Couldn't open workbook due to {e}")
|
||||
template = self.submission_type.template_file
|
||||
workbook = load_workbook(BytesIO(template))
|
||||
missing_only = False
|
||||
# if self.filepath.stem.startswith("tmp"):
|
||||
# template = self.submission_type.template_file
|
||||
# workbook = load_workbook(BytesIO(template))
|
||||
# else:
|
||||
# try:
|
||||
# workbook = load_workbook(self.filepath)
|
||||
# except Exception as e:
|
||||
# logger.error(f"Couldn't open workbook due to {e}")
|
||||
template = self.submission_type.template_file
|
||||
workbook = load_workbook(BytesIO(template))
|
||||
# self.workbook = workbook
|
||||
self.xl = workbook
|
||||
self.write_info()
|
||||
@@ -130,7 +128,7 @@ class InfoWriter(object):
|
||||
self.info = self.reconcile_map(info_dict, self.info_map)
|
||||
# logger.debug(pformat(self.info))
|
||||
|
||||
def reconcile_map(self, info_dict: dict, info_map: dict) -> dict:
|
||||
def reconcile_map(self, info_dict: dict, info_map: dict) -> Generator[(Tuple[str, dict]), None, None]:
|
||||
"""
|
||||
Merge info with its locations
|
||||
|
||||
@@ -246,7 +244,7 @@ class ReagentWriter(object):
|
||||
"""
|
||||
for reagent in self.reagents:
|
||||
sheet = self.xl[reagent['sheet']]
|
||||
for k, v in reagent.items():
|
||||
for v in reagent.values():
|
||||
if not isinstance(v, dict):
|
||||
continue
|
||||
# logger.debug(
|
||||
@@ -440,7 +438,6 @@ class TipWriter(object):
|
||||
if tips_list is None:
|
||||
return
|
||||
for ii, tips in enumerate(tips_list, start=1):
|
||||
# mp_info = tips_map[tips['role']]
|
||||
mp_info = tips_map[tips.role]
|
||||
# logger.debug(f"{tips['role']} map: {mp_info}")
|
||||
placeholder = {}
|
||||
|
||||
@@ -23,15 +23,15 @@ class RSLNamer(object):
|
||||
# NOTE: Preferred method is path retrieval, but might also need validation for just string.
|
||||
filename = Path(filename) if Path(filename).exists() else filename
|
||||
self.submission_type = sub_type
|
||||
if self.submission_type is None:
|
||||
if not self.submission_type:
|
||||
# logger.debug("Creating submission type because none exists")
|
||||
self.submission_type = self.retrieve_submission_type(filename=filename)
|
||||
logger.info(f"got submission type: {self.submission_type}")
|
||||
if self.submission_type is not None:
|
||||
if self.submission_type:
|
||||
# logger.debug("Retrieving BasicSubmission subclass")
|
||||
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||
self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex(submission_type=sub_type))
|
||||
if data is None:
|
||||
if not data:
|
||||
data = dict(submission_type=self.submission_type)
|
||||
if "submission_type" not in data.keys():
|
||||
data['submission_type'] = self.submission_type
|
||||
@@ -45,6 +45,9 @@ class RSLNamer(object):
|
||||
Args:
|
||||
filename (str | Path): filename
|
||||
|
||||
Raises:
|
||||
TypeError: Raised if unsupported variable type for filename given.
|
||||
|
||||
Returns:
|
||||
str: parsed submission type
|
||||
"""
|
||||
@@ -84,6 +87,7 @@ class RSLNamer(object):
|
||||
case str():
|
||||
submission_type = st_from_str(filename=filename)
|
||||
case _:
|
||||
raise TypeError(f"Unsupported filename type: {type(filename)}.")
|
||||
submission_type = None
|
||||
try:
|
||||
check = submission_type is None
|
||||
|
||||
@@ -157,7 +157,6 @@ class PydReagent(BaseModel):
|
||||
if submission is not None and reagent not in submission.reagents:
|
||||
assoc = SubmissionReagentAssociation(reagent=reagent, submission=submission)
|
||||
assoc.comments = self.comment
|
||||
# reagent.reagent_submission_associations.append(assoc)
|
||||
else:
|
||||
assoc = None
|
||||
report.add_result(Result(owner=__name__, code=0, msg="New reagent created.", status="Information"))
|
||||
@@ -165,7 +164,6 @@ class PydReagent(BaseModel):
|
||||
if submission is not None and reagent not in submission.reagents:
|
||||
assoc = SubmissionReagentAssociation(reagent=reagent, submission=submission)
|
||||
assoc.comments = self.comment
|
||||
# reagent.reagent_submission_associations.append(assoc)
|
||||
else:
|
||||
assoc = None
|
||||
# add end-of-life extension from reagent type to expiry date
|
||||
@@ -187,12 +185,10 @@ class PydSample(BaseModel, extra='allow'):
|
||||
# logger.debug(f"Data for pydsample: {data}")
|
||||
model = BasicSample.find_polymorphic_subclass(polymorphic_identity=data.sample_type)
|
||||
for k, v in data.model_extra.items():
|
||||
# print(k, v)
|
||||
if k in model.timestamps():
|
||||
if isinstance(v, str):
|
||||
v = datetime.strptime(v, "%Y-%m-%d")
|
||||
data.__setattr__(k, v)
|
||||
# print(dir(data))
|
||||
# logger.debug(f"Data coming out of validation: {pformat(data)}")
|
||||
return data
|
||||
|
||||
@@ -329,8 +325,6 @@ class PydEquipment(BaseModel, extra='ignore'):
|
||||
value = convert_nans_to_nones(value)
|
||||
if not value:
|
||||
value = ['']
|
||||
# if len(value) == 0:
|
||||
# value = ['']
|
||||
try:
|
||||
value = [item.strip() for item in value]
|
||||
except AttributeError:
|
||||
@@ -416,7 +410,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
||||
@field_validator("tips", mode="before")
|
||||
@classmethod
|
||||
def expand_tips(cls, value):
|
||||
# print(f"\n{type(value)}\n")
|
||||
if isinstance(value, dict):
|
||||
value = value['value']
|
||||
if isinstance(value, Generator):
|
||||
@@ -594,7 +587,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
||||
value = value['value'].title()
|
||||
return dict(value=value, missing=False)
|
||||
else:
|
||||
# return dict(value=RSLNamer(instr=values.data['filepath'].__str__()).submission_type.title(), missing=True)
|
||||
return dict(value=RSLNamer.retrieve_submission_type(filename=values.data['filepath']).title(), missing=True)
|
||||
|
||||
@field_validator("submission_category", mode="before")
|
||||
@@ -688,7 +680,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
||||
def __init__(self, run_custom: bool = False, **data):
|
||||
super().__init__(**data)
|
||||
# NOTE: this could also be done with default_factory
|
||||
logger.debug(data)
|
||||
self.submission_object = BasicSubmission.find_polymorphic_subclass(
|
||||
polymorphic_identity=self.submission_type['value'])
|
||||
self.namer = RSLNamer(self.rsl_plate_num['value'], sub_type=self.submission_type['value'])
|
||||
@@ -729,7 +720,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
||||
output.append(dummy)
|
||||
self.samples = output
|
||||
|
||||
# TODO: Return samples, reagents, etc to dictionaries as well.
|
||||
def improved_dict(self, dictionaries: bool = True) -> dict:
|
||||
"""
|
||||
Adds model_extra to fields.
|
||||
@@ -786,7 +776,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
||||
Returns:
|
||||
Tuple[BasicSubmission, Result]: BasicSubmission instance, result object
|
||||
"""
|
||||
# self.__dict__.update(self.model_extra)
|
||||
report = Report()
|
||||
dicto = self.improved_dict()
|
||||
instance, result = BasicSubmission.query_or_create(submission_type=self.submission_type['value'],
|
||||
@@ -817,7 +806,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
||||
# logger.debug(f"Association: {assoc}")
|
||||
if assoc is not None: # and assoc not in instance.submission_reagent_associations:
|
||||
instance.submission_reagent_associations.append(assoc)
|
||||
# instance.reagents.append(reagent)
|
||||
case "samples":
|
||||
for sample in self.samples:
|
||||
sample, associations, _ = sample.toSQL(submission=instance)
|
||||
|
||||
@@ -4,6 +4,7 @@ Contains all operations for creating charts, graphs and visual effects.
|
||||
from PyQt6.QtWidgets import QWidget
|
||||
import plotly
|
||||
from plotly.graph_objects import Figure
|
||||
from plotly.graph_objs import FigureWidget
|
||||
import pandas as pd
|
||||
from frontend.widgets.functions import select_save_file
|
||||
|
||||
@@ -35,7 +36,6 @@ class CustomFigure(Figure):
|
||||
output = select_save_file(obj=parent, default_name=group_name, extension="xlsx")
|
||||
self.df.to_excel(output.absolute().__str__(), engine="openpyxl", index=False)
|
||||
|
||||
|
||||
def to_html(self) -> str:
|
||||
"""
|
||||
Creates final html code from plotly
|
||||
|
||||
@@ -3,14 +3,13 @@ Functions for constructing irida controls graphs using plotly.
|
||||
"""
|
||||
from datetime import date
|
||||
from pprint import pformat
|
||||
import plotly
|
||||
from typing import Generator
|
||||
import plotly.express as px
|
||||
import pandas as pd
|
||||
from PyQt6.QtWidgets import QWidget
|
||||
from . import CustomFigure
|
||||
import logging
|
||||
from tools import get_unique_values_in_df_column, divide_chunks
|
||||
from frontend.widgets.functions import select_save_file
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -67,7 +66,6 @@ class IridaFigure(CustomFigure):
|
||||
)
|
||||
bar.update_traces(visible=ii == 0)
|
||||
self.add_traces(bar.data)
|
||||
# return generic_figure_markers(modes=modes, ytitle=ytitle)
|
||||
|
||||
def generic_figure_markers(self, modes: list = [], ytitle: str | None = None, months: int = 6):
|
||||
"""
|
||||
@@ -83,7 +81,7 @@ class IridaFigure(CustomFigure):
|
||||
"""
|
||||
if modes:
|
||||
ytitle = modes[0]
|
||||
# Creating visibles list for each mode.
|
||||
# logger.debug("Creating visibles list for each mode.")
|
||||
self.update_layout(
|
||||
xaxis_title="Submitted Date (* - Date parsed from fastq file creation date)",
|
||||
yaxis_title=ytitle,
|
||||
@@ -100,7 +98,6 @@ class IridaFigure(CustomFigure):
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
self.update_xaxes(
|
||||
rangeslider_visible=True,
|
||||
rangeselector=dict(
|
||||
@@ -109,7 +106,16 @@ class IridaFigure(CustomFigure):
|
||||
)
|
||||
assert isinstance(self, CustomFigure)
|
||||
|
||||
def make_plotly_buttons(self, months: int = 6):
|
||||
def make_plotly_buttons(self, months: int = 6) -> Generator[dict, None, None]:
|
||||
"""
|
||||
Creates html buttons to zoom in on date areas
|
||||
|
||||
Args:
|
||||
months (int, optional): Number of months of data given. Defaults to 6.
|
||||
|
||||
Yields:
|
||||
Generator[dict, None, None]: Button details.
|
||||
"""
|
||||
rng = [1]
|
||||
if months > 2:
|
||||
rng += [iii for iii in range(3, months, 3)]
|
||||
@@ -121,7 +127,7 @@ class IridaFigure(CustomFigure):
|
||||
for button in buttons:
|
||||
yield button
|
||||
|
||||
def make_pyqt_buttons(self, modes: list) -> list:
|
||||
def make_pyqt_buttons(self, modes: list) -> Generator[dict, None, None]:
|
||||
"""
|
||||
Creates list of buttons with one for each mode to be used in showing/hiding mode traces.
|
||||
|
||||
@@ -130,7 +136,7 @@ class IridaFigure(CustomFigure):
|
||||
fig_len (int): number of traces in the figure
|
||||
|
||||
Returns:
|
||||
list: list of buttons.
|
||||
Generator[dict, None, None]: list of buttons.
|
||||
"""
|
||||
fig_len = len(self.data)
|
||||
if len(modes) > 1:
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
"""
|
||||
Functions for constructing irida controls graphs using plotly.
|
||||
"""
|
||||
from datetime import date
|
||||
from pprint import pformat
|
||||
import plotly
|
||||
|
||||
from plotly.graph_objs import FigureWidget, Scatter
|
||||
|
||||
from . import CustomFigure
|
||||
import plotly.express as px
|
||||
import pandas as pd
|
||||
from PyQt6.QtWidgets import QWidget
|
||||
from plotly.graph_objects import Figure
|
||||
import logging
|
||||
from tools import get_unique_values_in_df_column, divide_chunks
|
||||
from frontend.widgets.functions import select_save_file
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
# NOTE: For click events try (haven't got working yet) ipywidgets >=7.0.0 required for figurewidgets:
|
||||
# https://plotly.com/python/click-events/
|
||||
|
||||
|
||||
class PCRFigure(CustomFigure):
|
||||
|
||||
@@ -23,13 +24,20 @@ class PCRFigure(CustomFigure):
|
||||
super().__init__(df=df, modes=modes)
|
||||
logger.debug(f"DF: {self.df}")
|
||||
self.construct_chart(df=df)
|
||||
# self.generic_figure_markers(modes=modes, ytitle=ytitle, months=months)
|
||||
|
||||
def hello(self):
|
||||
print("hello")
|
||||
|
||||
def construct_chart(self, df: pd.DataFrame):
|
||||
logger.debug(f"PCR df: {df}")
|
||||
logger.debug(f"PCR df:\n {df}")
|
||||
try:
|
||||
scatter = px.scatter(data_frame=df, x='submitted_date', y="ct", hover_data=["name", "target", "ct", "reagent_lot"], color='target')
|
||||
express = px.scatter(data_frame=df, x='submitted_date', y="ct",
|
||||
hover_data=["name", "target", "ct", "reagent_lot"],
|
||||
color="target")
|
||||
except ValueError:
|
||||
scatter = px.scatter()
|
||||
express = px.scatter()
|
||||
scatter = FigureWidget([datum for datum in express.data])
|
||||
self.add_traces(scatter.data)
|
||||
self.update_traces(marker={'size': 15})
|
||||
|
||||
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
'''
|
||||
"""
|
||||
Contains all custom generated PyQT6 derivative widgets.
|
||||
'''
|
||||
"""
|
||||
|
||||
# from .app import App
|
||||
from .functions import *
|
||||
from .misc import *
|
||||
from .pop_ups import *
|
||||
from .submission_table import *
|
||||
from .submission_widget import *
|
||||
from .controls_chart import *
|
||||
from .kit_creator import *
|
||||
from .submission_details import *
|
||||
from .equipment_usage import *
|
||||
from .gel_checker import *
|
||||
from .submission_type_creator import *
|
||||
from .app import App
|
||||
|
||||
@@ -9,7 +9,6 @@ from PyQt6.QtWidgets import (
|
||||
)
|
||||
from PyQt6.QtGui import QAction
|
||||
from pathlib import Path
|
||||
|
||||
from markdown import markdown
|
||||
from __init__ import project_path
|
||||
from tools import check_if_app, Settings, Report, jinja_template_loading, check_authorization, page_size
|
||||
@@ -21,8 +20,6 @@ import logging, webbrowser, sys, shutil
|
||||
from .submission_table import SubmissionsSheet
|
||||
from .submission_widget import SubmissionFormContainer
|
||||
from .controls_chart import ControlsViewer
|
||||
from .kit_creator import KitAdder
|
||||
from .submission_type_creator import SubmissionTypeAdder, SubmissionType
|
||||
from .sample_search import SearchBox
|
||||
from .summary import Summary
|
||||
|
||||
@@ -72,7 +69,6 @@ class App(QMainWindow):
|
||||
fileMenu = menuBar.addMenu("&File")
|
||||
# NOTE: Creating menus using a title
|
||||
methodsMenu = menuBar.addMenu("&Methods")
|
||||
# reportMenu = menuBar.addMenu("&Reports")
|
||||
maintenanceMenu = menuBar.addMenu("&Monthly")
|
||||
helpMenu = menuBar.addMenu("&Help")
|
||||
helpMenu.addAction(self.helpAction)
|
||||
@@ -83,7 +79,6 @@ class App(QMainWindow):
|
||||
fileMenu.addAction(self.yamlImportAction)
|
||||
methodsMenu.addAction(self.searchLog)
|
||||
methodsMenu.addAction(self.searchSample)
|
||||
# reportMenu.addAction(self.generateReportAction)
|
||||
maintenanceMenu.addAction(self.joinExtractionAction)
|
||||
maintenanceMenu.addAction(self.joinPCRAction)
|
||||
|
||||
@@ -105,7 +100,6 @@ class App(QMainWindow):
|
||||
# logger.debug(f"Creating actions...")
|
||||
self.importAction = QAction("&Import Submission", self)
|
||||
self.addReagentAction = QAction("Add Reagent", self)
|
||||
# self.generateReportAction = QAction("Make Report", self)
|
||||
self.addKitAction = QAction("Import Kit", self)
|
||||
self.addOrgAction = QAction("Import Org", self)
|
||||
self.joinExtractionAction = QAction("Link Extraction Logs")
|
||||
@@ -125,7 +119,6 @@ class App(QMainWindow):
|
||||
# logger.debug(f"Connecting actions...")
|
||||
self.importAction.triggered.connect(self.table_widget.formwidget.importSubmission)
|
||||
self.addReagentAction.triggered.connect(self.table_widget.formwidget.add_reagent)
|
||||
# self.generateReportAction.triggered.connect(self.table_widget.sub_wid.generate_report)
|
||||
self.joinExtractionAction.triggered.connect(self.table_widget.sub_wid.link_extractions)
|
||||
self.joinPCRAction.triggered.connect(self.table_widget.sub_wid.link_pcr)
|
||||
self.helpAction.triggered.connect(self.showAbout)
|
||||
@@ -233,6 +226,7 @@ class App(QMainWindow):
|
||||
ap.exec()
|
||||
st = SubmissionType.import_from_json(filepath=fname)
|
||||
if st:
|
||||
# NOTE: Do not delete the print statement below.
|
||||
print(pformat(st.to_export_dict()))
|
||||
choice = input("Save the above submission type? [y/N]: ")
|
||||
if choice.lower() == "y":
|
||||
@@ -262,7 +256,6 @@ class AddSubForm(QWidget):
|
||||
self.tabs.addTab(self.tab2, "Irida Controls")
|
||||
self.tabs.addTab(self.tab3, "PCR Controls")
|
||||
self.tabs.addTab(self.tab4, "Cost Report")
|
||||
# self.tabs.addTab(self.tab4, "Add Kit")
|
||||
# NOTE: Create submission adder form
|
||||
self.formwidget = SubmissionFormContainer(self)
|
||||
self.formlayout = QVBoxLayout(self)
|
||||
@@ -294,16 +287,10 @@ class AddSubForm(QWidget):
|
||||
self.pcr_viewer = ControlsViewer(self, archetype="PCR Control")
|
||||
self.tab3.layout.addWidget(self.pcr_viewer)
|
||||
self.tab3.setLayout(self.tab3.layout)
|
||||
# NOTE: create custom widget to add new tabs
|
||||
# ST_adder = SubmissionTypeAdder(self)
|
||||
summary_report = Summary(self)
|
||||
self.tab4.layout = QVBoxLayout(self)
|
||||
self.tab4.layout.addWidget(summary_report)
|
||||
self.tab4.setLayout(self.tab4.layout)
|
||||
# kit_adder = KitAdder(self)
|
||||
# self.tab4.layout = QVBoxLayout(self)
|
||||
# self.tab4.layout.addWidget(kit_adder)
|
||||
# self.tab4.setLayout(self.tab4.layout)
|
||||
# NOTE: add tabs to main widget
|
||||
self.layout.addWidget(self.tabs)
|
||||
self.setLayout(self.layout)
|
||||
|
||||
@@ -1,23 +1,17 @@
|
||||
"""
|
||||
Handles display of control charts
|
||||
"""
|
||||
import re
|
||||
import sys
|
||||
from datetime import timedelta, date
|
||||
from datetime import date
|
||||
from pprint import pformat
|
||||
from typing import Tuple
|
||||
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||
from PyQt6.QtWidgets import (
|
||||
QWidget, QVBoxLayout, QComboBox, QHBoxLayout,
|
||||
QDateEdit, QLabel, QSizePolicy, QPushButton, QGridLayout
|
||||
QWidget, QComboBox, QPushButton, QGridLayout
|
||||
)
|
||||
from PyQt6.QtCore import QSignalBlocker
|
||||
from backend.db import ControlType, IridaControl
|
||||
from PyQt6.QtCore import QDate, QSize
|
||||
import logging
|
||||
from pandas import DataFrame
|
||||
from tools import Report, Result, get_unique_values_in_df_column, Settings, report_result
|
||||
from frontend.visualizations import IridaFigure, PCRFigure, CustomFigure
|
||||
from tools import Report, report_result
|
||||
from frontend.visualizations import CustomFigure
|
||||
from .misc import StartEndDatePicker
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
@@ -70,19 +64,12 @@ class ControlsViewer(QWidget):
|
||||
self.save_button.pressed.connect(self.save_chart_function)
|
||||
self.export_button.pressed.connect(self.save_data_function)
|
||||
|
||||
|
||||
def save_chart_function(self):
|
||||
self.fig.save_figure(parent=self)
|
||||
|
||||
def save_data_function(self):
|
||||
self.fig.save_data(parent=self)
|
||||
|
||||
# def controls_getter(self):
|
||||
# """
|
||||
# Lookup controls from database and send to chartmaker
|
||||
# """
|
||||
# self.controls_getter_function()
|
||||
|
||||
@report_result
|
||||
def controls_getter_function(self, *args, **kwargs):
|
||||
"""
|
||||
@@ -128,7 +115,18 @@ class ControlsViewer(QWidget):
|
||||
self.chart_maker_function()
|
||||
return report
|
||||
|
||||
def diff_month(self, d1: date, d2: date):
|
||||
@classmethod
|
||||
def diff_month(self, d1: date, d2: date) -> float:
|
||||
"""
|
||||
Gets the number of months difference between two different dates
|
||||
|
||||
Args:
|
||||
d1 (date): Start date.
|
||||
d2 (date): End date.
|
||||
|
||||
Returns:
|
||||
float: Number of months difference
|
||||
"""
|
||||
return abs((d1.year - d2.year) * 12 + d1.month - d2.month)
|
||||
|
||||
@report_result
|
||||
@@ -169,164 +167,3 @@ class ControlsViewer(QWidget):
|
||||
# logger.debug("Figure updated... I hope.")
|
||||
return report
|
||||
|
||||
# def convert_data_list_to_df(self, input_df: list[dict]) -> DataFrame:
|
||||
# """
|
||||
# Convert list of control records to dataframe
|
||||
#
|
||||
# Args:
|
||||
# ctx (dict): settings passed from gui
|
||||
# input_df (list[dict]): list of dictionaries containing records
|
||||
# mode_sub_type (str | None, optional): sub_type of submission type. Defaults to None.
|
||||
#
|
||||
# Returns:
|
||||
# DataFrame: dataframe of controls
|
||||
# """
|
||||
#
|
||||
# df = DataFrame.from_records(input_df)
|
||||
# safe = ['name', 'submitted_date', 'genus', 'target']
|
||||
# for column in df.columns:
|
||||
# if column not in safe:
|
||||
# if self.mode_sub_type is not None and column != self.mode_sub_type:
|
||||
# continue
|
||||
# else:
|
||||
# safe.append(column)
|
||||
# if "percent" in column:
|
||||
# # count_col = [item for item in df.columns if "count" in item][0]
|
||||
# try:
|
||||
# count_col = next(item for item in df.columns if "count" in item)
|
||||
# except StopIteration:
|
||||
# continue
|
||||
# # NOTE: The actual percentage from kraken was off due to exclusion of NaN, recalculating.
|
||||
# df[column] = 100 * df[count_col] / df.groupby('name')[count_col].transform('sum')
|
||||
# df = df[[c for c in df.columns if c in safe]]
|
||||
# # NOTE: move date of sample submitted on same date as previous ahead one.
|
||||
# df = self.displace_date(df=df)
|
||||
# # NOTE: ad hoc method to make data labels more accurate.
|
||||
# df = self.df_column_renamer(df=df)
|
||||
# return df
|
||||
#
|
||||
# def df_column_renamer(self, df: DataFrame) -> DataFrame:
|
||||
# """
|
||||
# Ad hoc function I created to clarify some fields
|
||||
#
|
||||
# Args:
|
||||
# df (DataFrame): input dataframe
|
||||
#
|
||||
# Returns:
|
||||
# DataFrame: dataframe with 'clarified' column names
|
||||
# """
|
||||
# df = df[df.columns.drop(list(df.filter(regex='_hashes')))]
|
||||
# return df.rename(columns={
|
||||
# "contains_ratio": "contains_shared_hashes_ratio",
|
||||
# "matches_ratio": "matches_shared_hashes_ratio",
|
||||
# "kraken_count": "kraken2_read_count_(top_50)",
|
||||
# "kraken_percent": "kraken2_read_percent_(top_50)"
|
||||
# })
|
||||
#
|
||||
# def displace_date(self, df: DataFrame) -> DataFrame:
|
||||
# """
|
||||
# This function serves to split samples that were submitted on the same date by incrementing dates.
|
||||
# It will shift the date forward by one day if it is the same day as an existing date in a list.
|
||||
#
|
||||
# Args:
|
||||
# df (DataFrame): input dataframe composed of control records
|
||||
#
|
||||
# Returns:
|
||||
# DataFrame: output dataframe with dates incremented.
|
||||
# """
|
||||
# # logger.debug(f"Unique items: {df['name'].unique()}")
|
||||
# # NOTE: get submitted dates for each control
|
||||
# dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in
|
||||
# sorted(df['name'].unique())]
|
||||
# previous_dates = set()
|
||||
# # for _, item in enumerate(dict_list):
|
||||
# for item in dict_list:
|
||||
# df, previous_dates = self.check_date(df=df, item=item, previous_dates=previous_dates)
|
||||
# return df
|
||||
#
|
||||
# def check_date(self, df: DataFrame, item: dict, previous_dates: set) -> Tuple[DataFrame, list]:
|
||||
# """
|
||||
# Checks if an items date is already present in df and adjusts df accordingly
|
||||
#
|
||||
# Args:
|
||||
# df (DataFrame): input dataframe
|
||||
# item (dict): control for checking
|
||||
# previous_dates (list): list of dates found in previous controls
|
||||
#
|
||||
# Returns:
|
||||
# Tuple[DataFrame, list]: Output dataframe and appended list of previous dates
|
||||
# """
|
||||
# try:
|
||||
# check = item['date'] in previous_dates
|
||||
# except IndexError:
|
||||
# check = False
|
||||
# previous_dates.add(item['date'])
|
||||
# if check:
|
||||
# # logger.debug(f"We found one! Increment date!\n\t{item['date']} to {item['date'] + timedelta(days=1)}")
|
||||
# # NOTE: get df locations where name == item name
|
||||
# mask = df['name'] == item['name']
|
||||
# # NOTE: increment date in dataframe
|
||||
# df.loc[mask, 'submitted_date'] = df.loc[mask, 'submitted_date'].apply(lambda x: x + timedelta(days=1))
|
||||
# item['date'] += timedelta(days=1)
|
||||
# passed = False
|
||||
# else:
|
||||
# passed = True
|
||||
# # logger.debug(f"\n\tCurrent date: {item['date']}\n\tPrevious dates:{previous_dates}")
|
||||
# # logger.debug(f"DF: {type(df)}, previous_dates: {type(previous_dates)}")
|
||||
# # NOTE: if run didn't lead to changed date, return values
|
||||
# if passed:
|
||||
# # logger.debug(f"Date check passed, returning.")
|
||||
# return df, previous_dates
|
||||
# # NOTE: if date was changed, rerun with new date
|
||||
# else:
|
||||
# logger.warning(f"Date check failed, running recursion")
|
||||
# df, previous_dates = self.check_date(df, item, previous_dates)
|
||||
# return df, previous_dates
|
||||
#
|
||||
# def prep_df(self, ctx: Settings, df: DataFrame) -> Tuple[DataFrame, list]:
|
||||
# """
|
||||
# Constructs figures based on parsed pandas dataframe.
|
||||
#
|
||||
# Args:
|
||||
# ctx (Settings): settings passed down from gui
|
||||
# df (pd.DataFrame): input dataframe
|
||||
# ytitle (str | None, optional): title for the y-axis. Defaults to None.
|
||||
#
|
||||
# Returns:
|
||||
# Figure: Plotly figure
|
||||
# """
|
||||
# # NOTE: converts starred genera to normal and splits off list of starred
|
||||
# if df.empty:
|
||||
# return None
|
||||
# df['genus'] = df['genus'].replace({'\*': ''}, regex=True).replace({"NaN": "Unknown"})
|
||||
# df['genera'] = [item[-1] if item and item[-1] == "*" else "" for item in df['genus'].to_list()]
|
||||
# # NOTE: remove original runs, using reruns if applicable
|
||||
# df = self.drop_reruns_from_df(ctx=ctx, df=df)
|
||||
# # NOTE: sort by and exclude from
|
||||
# sorts = ['submitted_date', "target", "genus"]
|
||||
# exclude = ['name', 'genera']
|
||||
# modes = [item for item in df.columns if item not in sorts and item not in exclude]
|
||||
# # NOTE: Set descending for any columns that have "{mode}" in the header.
|
||||
# ascending = [False if item == "target" else True for item in sorts]
|
||||
# df = df.sort_values(by=sorts, ascending=ascending)
|
||||
# # logger.debug(df[df.isna().any(axis=1)])
|
||||
# # NOTE: actual chart construction is done by
|
||||
# return df, modes
|
||||
#
|
||||
# def drop_reruns_from_df(self, ctx: Settings, df: DataFrame) -> DataFrame:
|
||||
# """
|
||||
# Removes semi-duplicates from dataframe after finding sequencing repeats.
|
||||
#
|
||||
# Args:
|
||||
# settings (dict): settings passed from gui
|
||||
# df (DataFrame): initial dataframe
|
||||
#
|
||||
# Returns:
|
||||
# DataFrame: dataframe with originals removed in favour of repeats.
|
||||
# """
|
||||
# if 'rerun_regex' in ctx:
|
||||
# sample_names = get_unique_values_in_df_column(df, column_name="name")
|
||||
# rerun_regex = re.compile(fr"{ctx.rerun_regex}")
|
||||
# exclude = [re.sub(rerun_regex, "", sample) for sample in sample_names if rerun_regex.search(sample)]
|
||||
# df = df[df.name not in exclude]
|
||||
# return df
|
||||
|
||||
@@ -8,7 +8,7 @@ from PyQt6.QtWidgets import (QDialog, QComboBox, QCheckBox,
|
||||
from backend.db.models import Equipment, BasicSubmission, Process
|
||||
from backend.validators.pydant import PydEquipment, PydEquipmentRole, PydTips
|
||||
import logging
|
||||
from typing import List, Generator
|
||||
from typing import Generator
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -50,7 +50,7 @@ class EquipmentUsage(QDialog):
|
||||
Pull info from all RoleComboBox widgets
|
||||
|
||||
Returns:
|
||||
List[PydEquipment]: All equipment pulled from widgets
|
||||
Generator[PydEquipment, None, None]: All equipment pulled from widgets
|
||||
"""
|
||||
for widget in self.findChildren(QWidget):
|
||||
match widget:
|
||||
|
||||
@@ -4,7 +4,6 @@ functions used by all windows in the application's frontend
|
||||
from pathlib import Path
|
||||
import logging
|
||||
from PyQt6.QtWidgets import QMainWindow, QFileDialog
|
||||
from tools import Result
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
|
||||
@@ -1,232 +0,0 @@
|
||||
from PyQt6.QtWidgets import (
|
||||
QWidget, QVBoxLayout, QScrollArea,
|
||||
QGridLayout, QPushButton, QLabel,
|
||||
QLineEdit, QComboBox, QDoubleSpinBox,
|
||||
QSpinBox, QDateEdit
|
||||
)
|
||||
from sqlalchemy import FLOAT, INTEGER
|
||||
from backend.db import SubmissionTypeKitTypeAssociation, SubmissionType, ReagentRole
|
||||
from backend.validators import PydReagentRole, PydKit
|
||||
import logging
|
||||
from pprint import pformat
|
||||
from tools import Report
|
||||
from typing import Tuple
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
|
||||
class KitAdder(QWidget):
|
||||
"""
|
||||
dialog to get information to add kit
|
||||
"""
|
||||
def __init__(self, parent) -> None:
|
||||
super().__init__(parent)
|
||||
self.report = Report()
|
||||
self.app = parent.parent
|
||||
main_box = QVBoxLayout(self)
|
||||
scroll = QScrollArea(self)
|
||||
main_box.addWidget(scroll)
|
||||
scroll.setWidgetResizable(True)
|
||||
scrollContent = QWidget(scroll)
|
||||
self.grid = QGridLayout()
|
||||
scrollContent.setLayout(self.grid)
|
||||
# NOTE: insert submit button at top
|
||||
self.submit_btn = QPushButton("Submit")
|
||||
self.grid.addWidget(self.submit_btn,0,0,1,1)
|
||||
self.grid.addWidget(QLabel("Kit Name:"),2,0)
|
||||
# NOTE: widget to get kit name
|
||||
kit_name = QLineEdit()
|
||||
kit_name.setObjectName("kit_name")
|
||||
self.grid.addWidget(kit_name,2,1)
|
||||
self.grid.addWidget(QLabel("Used For Submission Type:"),3,0)
|
||||
# NOTE: widget to get uses of kit
|
||||
used_for = QComboBox()
|
||||
used_for.setObjectName("used_for")
|
||||
# NOTE: Insert all existing sample types
|
||||
used_for.addItems([item.name for item in SubmissionType.query()])
|
||||
used_for.setEditable(True)
|
||||
self.grid.addWidget(used_for,3,1)
|
||||
# NOTE: Get all fields in SubmissionTypeKitTypeAssociation
|
||||
self.columns = [item for item in SubmissionTypeKitTypeAssociation.__table__.columns if len(item.foreign_keys) == 0]
|
||||
for iii, column in enumerate(self.columns):
|
||||
idx = iii + 4
|
||||
# NOTE: convert field name to human readable.
|
||||
field_name = column.name.replace("_", " ").title()
|
||||
self.grid.addWidget(QLabel(field_name),idx,0)
|
||||
match column.type:
|
||||
case FLOAT():
|
||||
add_widget = QDoubleSpinBox()
|
||||
add_widget.setMinimum(0)
|
||||
add_widget.setMaximum(9999)
|
||||
case INTEGER():
|
||||
add_widget = QSpinBox()
|
||||
add_widget.setMinimum(0)
|
||||
add_widget.setMaximum(9999)
|
||||
case _:
|
||||
add_widget = QLineEdit()
|
||||
add_widget.setObjectName(column.name)
|
||||
self.grid.addWidget(add_widget, idx,1)
|
||||
self.add_RT_btn = QPushButton("Add Reagent Type")
|
||||
self.grid.addWidget(self.add_RT_btn)
|
||||
self.add_RT_btn.clicked.connect(self.add_RT)
|
||||
self.submit_btn.clicked.connect(self.submit)
|
||||
scroll.setWidget(scrollContent)
|
||||
self.ignore = [None, "", "qt_spinbox_lineedit", "qt_scrollarea_viewport", "qt_scrollarea_hcontainer",
|
||||
"qt_scrollarea_vcontainer", "submit_btn"
|
||||
]
|
||||
|
||||
def add_RT(self) -> None:
|
||||
"""
|
||||
insert new reagent type row
|
||||
"""
|
||||
# NOTE: get bottommost row
|
||||
maxrow = self.grid.rowCount()
|
||||
reg_form = ReagentRoleForm(parent=self)
|
||||
reg_form.setObjectName(f"ReagentForm_{maxrow}")
|
||||
self.grid.addWidget(reg_form, maxrow,0,1,4)
|
||||
|
||||
def submit(self) -> None:
|
||||
"""
|
||||
send kit to database
|
||||
"""
|
||||
report = Report()
|
||||
# NOTE: get form info
|
||||
info, reagents = self.parse_form()
|
||||
info = {k:v for k,v in info.items() if k in [column.name for column in self.columns] + ['kit_name', 'used_for']}
|
||||
# logger.debug(f"kit info: {pformat(info)}")
|
||||
# logger.debug(f"kit reagents: {pformat(reagents)}")
|
||||
info['reagent_roles'] = reagents
|
||||
# logger.debug(pformat(info))
|
||||
# NOTE: send to kit constructor
|
||||
kit = PydKit(name=info['kit_name'])
|
||||
for reagent in info['reagent_roles']:
|
||||
uses = {
|
||||
info['used_for']:
|
||||
{'sheet':reagent['sheet'],
|
||||
'name':reagent['name'],
|
||||
'lot':reagent['lot'],
|
||||
'expiry':reagent['expiry']
|
||||
}}
|
||||
kit.reagent_roles.append(PydReagentRole(name=reagent['rtname'], eol_ext=reagent['eol'], uses=uses))
|
||||
# logger.debug(f"Output pyd object: {kit.__dict__}")
|
||||
sqlobj, result = kit.toSQL(self.ctx)
|
||||
report.add_result(result=result)
|
||||
sqlobj.save()
|
||||
|
||||
self.__init__(self.parent())
|
||||
|
||||
def parse_form(self) -> Tuple[dict, list]:
|
||||
"""
|
||||
Pulls reagent and general info from form
|
||||
|
||||
Returns:
|
||||
Tuple[dict, list]: dict=info, list=reagents
|
||||
"""
|
||||
# logger.debug(f"Hello from {self.__class__} parser!")
|
||||
info = {}
|
||||
reagents = []
|
||||
widgets = [widget for widget in self.findChildren(QWidget) if widget.objectName() not in self.ignore and not isinstance(widget.parent(), ReagentRoleForm)]
|
||||
for widget in widgets:
|
||||
# logger.debug(f"Parsed widget: {widget.objectName()} of type {type(widget)} with parent {widget.parent()}")
|
||||
match widget:
|
||||
case ReagentRoleForm():
|
||||
reagents.append(widget.parse_form())
|
||||
case QLineEdit():
|
||||
info[widget.objectName()] = widget.text()
|
||||
case QComboBox():
|
||||
info[widget.objectName()] = widget.currentText()
|
||||
case QDateEdit():
|
||||
info[widget.objectName()] = widget.date().toPyDate()
|
||||
return info, reagents
|
||||
|
||||
|
||||
class ReagentRoleForm(QWidget):
|
||||
"""
|
||||
custom widget to add information about a new reagenttype
|
||||
"""
|
||||
def __init__(self, parent) -> None:
|
||||
super().__init__(parent)
|
||||
grid = QGridLayout()
|
||||
self.setLayout(grid)
|
||||
grid.addWidget(QLabel("Reagent Type Name"),0,0)
|
||||
# Widget to get reagent info
|
||||
self.reagent_getter = QComboBox()
|
||||
self.reagent_getter.setObjectName("rtname")
|
||||
# lookup all reagent type names from db
|
||||
lookup = ReagentRole.query()
|
||||
# logger.debug(f"Looked up ReagentType names: {lookup}")
|
||||
self.reagent_getter.addItems([item.name for item in lookup])
|
||||
self.reagent_getter.setEditable(True)
|
||||
grid.addWidget(self.reagent_getter,0,1)
|
||||
grid.addWidget(QLabel("Extension of Life (months):"),0,2)
|
||||
# NOTE: widget to get extension of life
|
||||
self.eol = QSpinBox()
|
||||
self.eol.setObjectName('eol')
|
||||
self.eol.setMinimum(0)
|
||||
grid.addWidget(self.eol, 0,3)
|
||||
grid.addWidget(QLabel("Excel Location Sheet Name:"),1,0)
|
||||
self.location_sheet_name = QLineEdit()
|
||||
self.location_sheet_name.setObjectName("sheet")
|
||||
self.location_sheet_name.setText("e.g. 'Reagent Info'")
|
||||
grid.addWidget(self.location_sheet_name, 1,1)
|
||||
for iii, item in enumerate(["Name", "Lot", "Expiry"]):
|
||||
idx = iii + 2
|
||||
grid.addWidget(QLabel(f"{item} Row:"), idx, 0)
|
||||
row = QSpinBox()
|
||||
row.setFixedWidth(50)
|
||||
row.setObjectName(f'{item.lower()}_row')
|
||||
row.setMinimum(0)
|
||||
grid.addWidget(row, idx, 1)
|
||||
grid.addWidget(QLabel(f"{item} Column:"), idx, 2)
|
||||
col = QSpinBox()
|
||||
col.setFixedWidth(50)
|
||||
col.setObjectName(f'{item.lower()}_column')
|
||||
col.setMinimum(0)
|
||||
grid.addWidget(col, idx, 3)
|
||||
self.setFixedHeight(175)
|
||||
max_row = grid.rowCount()
|
||||
self.r_button = QPushButton("Remove")
|
||||
self.r_button.clicked.connect(self.remove)
|
||||
grid.addWidget(self.r_button,max_row,0,1,1)
|
||||
self.ignore = [None, "", "qt_spinbox_lineedit", "qt_scrollarea_viewport", "qt_scrollarea_hcontainer",
|
||||
"qt_scrollarea_vcontainer", "submit_btn", "eol", "sheet", "rtname"
|
||||
]
|
||||
|
||||
def remove(self):
|
||||
"""
|
||||
Destroys this row of reagenttype from the form
|
||||
"""
|
||||
self.setParent(None)
|
||||
self.destroy()
|
||||
|
||||
def parse_form(self) -> dict:
|
||||
"""
|
||||
Pulls ReagentType info from the form.
|
||||
|
||||
Returns:
|
||||
dict: _description_
|
||||
"""
|
||||
# logger.debug(f"Hello from {self.__class__} parser!")
|
||||
info = {}
|
||||
info['eol'] = self.eol.value()
|
||||
info['sheet'] = self.location_sheet_name.text()
|
||||
info['rtname'] = self.reagent_getter.currentText()
|
||||
widgets = [widget for widget in self.findChildren(QWidget) if widget.objectName() not in self.ignore]
|
||||
for widget in widgets:
|
||||
# logger.debug(f"Parsed widget: {widget.objectName()} of type {type(widget)} with parent {widget.parent()}")
|
||||
match widget:
|
||||
case QLineEdit():
|
||||
info[widget.objectName()] = widget.text()
|
||||
case QComboBox():
|
||||
info[widget.objectName()] = widget.currentText()
|
||||
case QDateEdit():
|
||||
info[widget.objectName()] = widget.date().toPyDate()
|
||||
case QSpinBox() | QDoubleSpinBox():
|
||||
if "_" in widget.objectName():
|
||||
key, sub_key = widget.objectName().split("_")
|
||||
if key not in info.keys():
|
||||
info[key] = {}
|
||||
# logger.debug(f"Adding key {key}, {sub_key} and value {widget.value()} to {info}")
|
||||
info[key][sub_key] = widget.value()
|
||||
return info
|
||||
|
||||
@@ -3,7 +3,6 @@ Contains miscellaneous widgets for frontend functions
|
||||
'''
|
||||
import math
|
||||
from datetime import date
|
||||
|
||||
from PyQt6.QtGui import QPageLayout, QPageSize, QStandardItem, QIcon
|
||||
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||
from PyQt6.QtWidgets import (
|
||||
@@ -51,7 +50,6 @@ class AddReagentForm(QDialog):
|
||||
self.exp_input.setObjectName('expiry')
|
||||
# NOTE: if expiry is not passed in from gui, use today
|
||||
if expiry is None:
|
||||
# self.exp_input.setDate(QDate.currentDate())
|
||||
self.exp_input.setDate(QDate(1970, 1, 1))
|
||||
else:
|
||||
try:
|
||||
@@ -244,4 +242,4 @@ class Pagifier(QWidget):
|
||||
self.update_current_page()
|
||||
|
||||
def update_current_page(self):
|
||||
self.current_page.setText(f"{self.page_anchor} of {self.page_max}")
|
||||
self.current_page.setText(f"{self.page_anchor} of {self.page_max}")
|
||||
|
||||
@@ -9,7 +9,7 @@ from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||
from tools import jinja_template_loading
|
||||
import logging
|
||||
from backend.db import models
|
||||
from typing import Literal
|
||||
from typing import Any, Literal
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -54,9 +54,8 @@ class AlertPop(QMessageBox):
|
||||
|
||||
class HTMLPop(QDialog):
|
||||
|
||||
def __init__(self, html: str, owner: str | None = None, title: str = "python"):
|
||||
def __init__(self, html: str, title: str = "python"):
|
||||
super().__init__()
|
||||
|
||||
self.webview = QWebEngineView(parent=self)
|
||||
self.layout = QVBoxLayout()
|
||||
self.setWindowTitle(title)
|
||||
|
||||
@@ -74,7 +74,6 @@ class SearchBox(QDialog):
|
||||
# logger.debug(f"Running update_data with sample type: {self.type}")
|
||||
fields = self.parse_form()
|
||||
# logger.debug(f"Got fields: {fields}")
|
||||
# sample_list_creator = self.type.fuzzy_search(sample_type=self.type, **fields)
|
||||
sample_list_creator = self.type.fuzzy_search(**fields)
|
||||
data = self.type.samples_to_df(sample_list=sample_list_creator)
|
||||
# logger.debug(f"Data: {data}")
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
"""
|
||||
Webview to show submission and sample details.
|
||||
"""
|
||||
from PyQt6.QtGui import QColor, QPageSize, QPageLayout
|
||||
from PyQt6.QtPrintSupport import QPrinter
|
||||
from PyQt6.QtWidgets import (QDialog, QPushButton, QVBoxLayout,
|
||||
QDialogButtonBox, QTextEdit, QGridLayout)
|
||||
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||
from PyQt6.QtWebChannel import QWebChannel
|
||||
from PyQt6.QtCore import Qt, pyqtSlot, QMarginsF, QSize
|
||||
from PyQt6.QtCore import Qt, pyqtSlot
|
||||
from jinja2 import TemplateNotFound
|
||||
from backend.db.models import BasicSubmission, BasicSample, Reagent, KitType
|
||||
from tools import is_power_user, jinja_template_loading
|
||||
@@ -41,7 +39,6 @@ class SubmissionDetails(QDialog):
|
||||
self.webview.setMaximumWidth(900)
|
||||
self.webview.loadFinished.connect(self.activate_export)
|
||||
self.layout = QGridLayout()
|
||||
# self.setFixedSize(900, 500)
|
||||
# NOTE: button to export a pdf version
|
||||
self.btn = QPushButton("Export PDF")
|
||||
self.btn.setFixedWidth(775)
|
||||
@@ -69,7 +66,6 @@ class SubmissionDetails(QDialog):
|
||||
def back_function(self):
|
||||
self.webview.back()
|
||||
|
||||
# @pyqtSlot(bool)
|
||||
def activate_export(self):
|
||||
title = self.webview.title()
|
||||
self.setWindowTitle(title)
|
||||
@@ -144,7 +140,6 @@ class SubmissionDetails(QDialog):
|
||||
self.base_dict = submission.to_dict(full_data=True)
|
||||
# logger.debug(f"Submission details data:\n{pformat({k:v for k,v in self.base_dict.items() if k == 'reagents'})}")
|
||||
# NOTE: don't want id
|
||||
self.base_dict = submission.finalize_details(self.base_dict)
|
||||
# logger.debug(f"Creating barcode.")
|
||||
# logger.debug(f"Making platemap...")
|
||||
self.base_dict['platemap'] = submission.make_plate_map(sample_list=submission.hitpick_plate())
|
||||
|
||||
@@ -10,8 +10,6 @@ from backend.db.models import BasicSubmission
|
||||
from tools import Report, Result, report_result
|
||||
from .functions import select_open_file
|
||||
|
||||
# from .misc import ReportDatePicker
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
|
||||
@@ -91,7 +89,7 @@ class SubmissionsSheet(QTableView):
|
||||
"""
|
||||
sets data in model
|
||||
"""
|
||||
self.data = BasicSubmission.submissions_to_df(page=page)
|
||||
self.data = BasicSubmission.submissions_to_df(page=page, page_size=page_size)
|
||||
try:
|
||||
self.data['Id'] = self.data['Id'].apply(str)
|
||||
self.data['Id'] = self.data['Id'].str.zfill(4)
|
||||
@@ -101,7 +99,7 @@ class SubmissionsSheet(QTableView):
|
||||
proxyModel.setSourceModel(pandasModel(self.data))
|
||||
self.setModel(proxyModel)
|
||||
|
||||
def contextMenuEvent(self, event):
|
||||
def contextMenuEvent(self):
|
||||
"""
|
||||
Creates actions for right click menu events.
|
||||
|
||||
@@ -157,7 +155,7 @@ class SubmissionsSheet(QTableView):
|
||||
report = Report()
|
||||
fname = select_open_file(self, file_extension="csv")
|
||||
with open(fname.__str__(), 'r') as f:
|
||||
# split csv on commas
|
||||
# NOTE: split csv on commas
|
||||
runs = [col.strip().split(",") for col in f.readlines()]
|
||||
count = 0
|
||||
for run in runs:
|
||||
|
||||
@@ -1,124 +0,0 @@
|
||||
from PyQt6.QtCore import Qt
|
||||
from PyQt6.QtWidgets import (
|
||||
QWidget, QVBoxLayout, QScrollArea,
|
||||
QGridLayout, QPushButton, QLabel,
|
||||
QLineEdit, QSpinBox, QCheckBox
|
||||
)
|
||||
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
||||
from backend.db import SubmissionType, BasicSubmission
|
||||
import logging
|
||||
from tools import Report
|
||||
from .functions import select_open_file
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
|
||||
class SubmissionTypeAdder(QWidget):
|
||||
|
||||
def __init__(self, parent) -> None:
|
||||
super().__init__(parent)
|
||||
self.report = Report()
|
||||
self.app = parent.parent()
|
||||
self.template_path = ""
|
||||
main_box = QVBoxLayout(self)
|
||||
scroll = QScrollArea(self)
|
||||
main_box.addWidget(scroll)
|
||||
scroll.setWidgetResizable(True)
|
||||
scrollContent = QWidget(scroll)
|
||||
self.grid = QGridLayout()
|
||||
scrollContent.setLayout(self.grid)
|
||||
# NOTE: insert submit button at top
|
||||
self.submit_btn = QPushButton("Submit")
|
||||
self.grid.addWidget(self.submit_btn,0,0,1,1)
|
||||
self.grid.addWidget(QLabel("Submission Type Name:"),2,0)
|
||||
# NOTE: widget to get kit name
|
||||
self.st_name = QLineEdit()
|
||||
self.st_name.setObjectName("submission_type_name")
|
||||
self.grid.addWidget(self.st_name,2,1,1,2)
|
||||
self.grid.addWidget(QLabel("Template File"),3,0)
|
||||
template_selector = QPushButton("Select")
|
||||
self.grid.addWidget(template_selector,3,1)
|
||||
self.template_label = QLabel("None")
|
||||
self.grid.addWidget(self.template_label,3,2)
|
||||
# NOTE: widget to get uses of kit
|
||||
exclude = ['id', 'submitting_lab_id', 'extraction_kit_id', 'reagents_id', 'extraction_info', 'pcr_info', 'run_cost']
|
||||
self.columns = {key:value for key, value in BasicSubmission.__dict__.items() if isinstance(value, InstrumentedAttribute)}
|
||||
self.columns = {key:value for key, value in self.columns.items() if hasattr(value, "type") and key not in exclude}
|
||||
for iii, key in enumerate(self.columns):
|
||||
idx = iii + 4
|
||||
self.grid.addWidget(InfoWidget(parent=self, key=key), idx,0,1,3)
|
||||
scroll.setWidget(scrollContent)
|
||||
self.submit_btn.clicked.connect(self.submit)
|
||||
template_selector.clicked.connect(self.get_template_path)
|
||||
|
||||
def submit(self):
|
||||
"""
|
||||
Create SubmissionType and send to db
|
||||
"""
|
||||
info = self.parse_form()
|
||||
ST = SubmissionType(name=self.st_name.text(), info_map=info)
|
||||
try:
|
||||
with open(self.template_path, "rb") as f:
|
||||
ST.template_file = f.read()
|
||||
except FileNotFoundError:
|
||||
logger.error(f"Could not find template file: {self.template_path}")
|
||||
ST.save()
|
||||
|
||||
def parse_form(self) -> dict:
|
||||
"""
|
||||
Pulls info from form
|
||||
|
||||
Returns:
|
||||
dict: information from form
|
||||
"""
|
||||
widgets = [widget for widget in self.findChildren(QWidget) if isinstance(widget, InfoWidget)]
|
||||
return {widget.objectName():widget.parse_form() for widget in widgets}
|
||||
|
||||
def get_template_path(self):
|
||||
"""
|
||||
Sets path for loading a submission form template
|
||||
"""
|
||||
self.template_path = select_open_file(obj=self, file_extension="xlsx")
|
||||
self.template_label.setText(self.template_path.__str__())
|
||||
|
||||
|
||||
class InfoWidget(QWidget):
|
||||
|
||||
def __init__(self, parent: QWidget, key) -> None:
|
||||
super().__init__(parent)
|
||||
grid = QGridLayout()
|
||||
self.setLayout(grid)
|
||||
self.active = QCheckBox()
|
||||
self.active.setChecked(True)
|
||||
grid.addWidget(self.active, 0,0,1,1)
|
||||
grid.addWidget(QLabel(key.replace("_", " ").title()),0,1,1,4)
|
||||
self.setObjectName(key)
|
||||
grid.addWidget(QLabel("Sheet Names (comma seperated):"),1,0)
|
||||
self.sheet = QLineEdit()
|
||||
self.sheet.setObjectName("sheets")
|
||||
grid.addWidget(self.sheet, 1,1,1,3)
|
||||
grid.addWidget(QLabel("Row:"),2,0,alignment=Qt.AlignmentFlag.AlignRight)
|
||||
self.row = QSpinBox()
|
||||
self.row.setObjectName("row")
|
||||
grid.addWidget(self.row,2,1)
|
||||
grid.addWidget(QLabel("Column:"),2,2,alignment=Qt.AlignmentFlag.AlignRight)
|
||||
self.column = QSpinBox()
|
||||
self.column.setObjectName("column")
|
||||
grid.addWidget(self.column,2,3)
|
||||
|
||||
def parse_form(self) -> dict|None:
|
||||
"""
|
||||
Pulls info from the Info form.
|
||||
|
||||
Returns:
|
||||
dict: sheets, row, column
|
||||
"""
|
||||
if self.active.isChecked():
|
||||
return dict(
|
||||
sheets = self.sheet.text().split(","),
|
||||
row = self.row.value(),
|
||||
column = self.column.value()
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
'''
|
||||
Contains all submission related frontend functions
|
||||
'''
|
||||
import sys
|
||||
|
||||
from PyQt6.QtWidgets import (
|
||||
QWidget, QPushButton, QVBoxLayout,
|
||||
QComboBox, QDateEdit, QLineEdit, QLabel
|
||||
@@ -11,7 +9,7 @@ from PyQt6.QtCore import pyqtSignal, Qt
|
||||
from . import select_open_file, select_save_file
|
||||
import logging, difflib
|
||||
from pathlib import Path
|
||||
from tools import Report, Result, check_not_nan, main_form_style, report_result, check_regex_match
|
||||
from tools import Report, Result, check_not_nan, main_form_style, report_result
|
||||
from backend.excel.parser import SheetParser
|
||||
from backend.validators import PydSubmission, PydReagent
|
||||
from backend.db import (
|
||||
@@ -28,6 +26,9 @@ logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
|
||||
class MyQComboBox(QComboBox):
|
||||
"""
|
||||
Custom combobox that disables wheel events until focussed on.
|
||||
"""
|
||||
def __init__(self, scrollWidget=None, *args, **kwargs):
|
||||
super(MyQComboBox, self).__init__(*args, **kwargs)
|
||||
self.scrollWidget = scrollWidget
|
||||
@@ -42,6 +43,9 @@ class MyQComboBox(QComboBox):
|
||||
|
||||
|
||||
class MyQDateEdit(QDateEdit):
|
||||
"""
|
||||
Custom date editor that disables wheel events until focussed on.
|
||||
"""
|
||||
def __init__(self, scrollWidget=None, *args, **kwargs):
|
||||
super(MyQDateEdit, self).__init__(*args, **kwargs)
|
||||
self.scrollWidget = scrollWidget
|
||||
@@ -340,8 +344,6 @@ class SubmissionFormWidget(QWidget):
|
||||
_, result = self.pyd.check_kit_integrity()
|
||||
report.add_result(result)
|
||||
if len(result.results) > 0:
|
||||
# self.app.report.add_result(report)
|
||||
# self.app.report_result()
|
||||
return
|
||||
# logger.debug(f"PYD before transformation into SQL:\n\n{self.pyd}\n\n")
|
||||
base_submission, result = self.pyd.to_sql()
|
||||
@@ -370,14 +372,10 @@ class SubmissionFormWidget(QWidget):
|
||||
else:
|
||||
self.app.ctx.database_session.rollback()
|
||||
report.add_result(Result(msg="Overwrite cancelled", status="Information"))
|
||||
# self.app.report.add_result(report)
|
||||
# self.app.report_result()
|
||||
return report
|
||||
# NOTE: code 2: No RSL plate number given
|
||||
case 2:
|
||||
report.add_result(result)
|
||||
# self.app.report.add_result(report)
|
||||
# self.app.report_result()
|
||||
return report
|
||||
case _:
|
||||
pass
|
||||
@@ -451,7 +449,6 @@ class SubmissionFormWidget(QWidget):
|
||||
info[item] = value
|
||||
for k, v in info.items():
|
||||
self.pyd.set_attribute(key=k, value=v)
|
||||
# NOTE: return submission
|
||||
report.add_result(report)
|
||||
return report
|
||||
|
||||
@@ -527,18 +524,18 @@ class SubmissionFormWidget(QWidget):
|
||||
match key:
|
||||
case 'submitting_lab':
|
||||
add_widget = MyQComboBox(scrollWidget=parent)
|
||||
# lookup organizations suitable for submitting_lab (ctx: self.InfoItem.SubmissionFormWidget.SubmissionFormContainer.AddSubForm )
|
||||
# NOTE: lookup organizations suitable for submitting_lab (ctx: self.InfoItem.SubmissionFormWidget.SubmissionFormContainer.AddSubForm )
|
||||
labs = [item.name for item in Organization.query()]
|
||||
# try to set closest match to top of list
|
||||
# NOTE: try to set closest match to top of list
|
||||
try:
|
||||
labs = difflib.get_close_matches(value, labs, len(labs), 0)
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
# set combobox values to lookedup values
|
||||
# NOTE: set combobox values to lookedup values
|
||||
add_widget.addItems(labs)
|
||||
add_widget.setToolTip("Select submitting lab.")
|
||||
case 'extraction_kit':
|
||||
# if extraction kit not available, all other values fail
|
||||
# NOTE: if extraction kit not available, all other values fail
|
||||
if not check_not_nan(value):
|
||||
msg = AlertPop(message="Make sure to check your extraction kit in the excel sheet!",
|
||||
status="warning")
|
||||
@@ -573,8 +570,6 @@ class SubmissionFormWidget(QWidget):
|
||||
add_widget.addItems(cats)
|
||||
add_widget.setToolTip("Enter submission category or select from list.")
|
||||
case _:
|
||||
# if key in sub_obj.get_default_info("form_ignore", submission_type=submission_type):
|
||||
# return None
|
||||
if key in sub_obj.timestamps():
|
||||
add_widget = MyQDateEdit(calendarPopup=True, scrollWidget=parent)
|
||||
# NOTE: sets submitted date based on date found in excel sheet
|
||||
@@ -593,7 +588,6 @@ class SubmissionFormWidget(QWidget):
|
||||
if add_widget is not None:
|
||||
add_widget.setObjectName(key)
|
||||
add_widget.setParent(parent)
|
||||
# add_widget.setStyleSheet(main_form_style)
|
||||
return add_widget
|
||||
|
||||
def update_missing(self):
|
||||
@@ -649,7 +643,6 @@ class SubmissionFormWidget(QWidget):
|
||||
self.label = self.ReagentParsedLabel(reagent=reagent)
|
||||
layout.addWidget(self.label)
|
||||
self.lot = self.ReagentLot(scrollWidget=parent, reagent=reagent, extraction_kit=extraction_kit)
|
||||
# self.lot.setStyleSheet(main_form_style)
|
||||
layout.addWidget(self.lot)
|
||||
# NOTE: Remove spacing between reagents
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
@@ -738,8 +731,6 @@ class SubmissionFormWidget(QWidget):
|
||||
if check_not_nan(reagent.lot):
|
||||
relevant_reagents.insert(0, str(reagent.lot))
|
||||
else:
|
||||
# looked_up_rt = KitTypeReagentRoleAssociation.query(reagent_role=reagent.role,
|
||||
# kit_type=extraction_kit)
|
||||
try:
|
||||
looked_up_reg = Reagent.query(lot_number=looked_up_rt.last_used)
|
||||
except AttributeError:
|
||||
@@ -768,22 +759,4 @@ class SubmissionFormWidget(QWidget):
|
||||
self.setObjectName(f"lot_{reagent.role}")
|
||||
self.addItems(relevant_reagents)
|
||||
self.setToolTip(f"Enter lot number for the reagent used for {reagent.role}")
|
||||
# self.setStyleSheet(main_form_style)
|
||||
|
||||
# def relevant_reagents(self, assoc: KitTypeReagentRoleAssociation):
|
||||
# # logger.debug(f"Attempting lookup of reagents by type: {reagent.type}")
|
||||
# lookup = Reagent.query(reagent_role=assoc.reagent_role)
|
||||
# try:
|
||||
# regex = assoc.uses['exclude_regex']
|
||||
# except KeyError:
|
||||
# regex = "^$"
|
||||
# relevant_reagents = [item for item in lookup if
|
||||
# not check_regex_match(pattern=regex, check=str(item.lot))]
|
||||
# for rel_reagent in relevant_reagents:
|
||||
# # # NOTE: extract strings from any sets.
|
||||
# # if isinstance(rel_reagent, set):
|
||||
# # for thing in rel_reagent:
|
||||
# # yield thing
|
||||
# # elif isinstance(rel_reagent, str):
|
||||
# # yield rel_reagent
|
||||
# yield rel_reagent
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from PyQt6.QtCore import QSignalBlocker
|
||||
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||
from PyQt6.QtWidgets import QWidget, QGridLayout, QPushButton, QComboBox, QLabel
|
||||
from PyQt6.QtWidgets import QWidget, QGridLayout, QPushButton, QLabel
|
||||
from backend.db import Organization
|
||||
from backend.excel import ReportMaker
|
||||
from tools import Report
|
||||
@@ -34,7 +34,6 @@ class Summary(QWidget):
|
||||
for org in [org.name for org in Organization.query()]:
|
||||
self.org_select.addItem(org)
|
||||
self.org_select.model().itemChanged.connect(self.get_report)
|
||||
# self.org_select.itemChecked.connect(self.get_report)
|
||||
self.layout.addWidget(self.save_excel_button, 0, 2, 1, 1)
|
||||
self.layout.addWidget(self.save_pdf_button, 0, 3, 1, 1)
|
||||
self.layout.addWidget(self.webview, 2, 0, 1, 4)
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"submission_type" : {
|
||||
|
||||
},
|
||||
"kit_type": {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -153,7 +153,6 @@ def check_not_nan(cell_contents) -> bool:
|
||||
try:
|
||||
if cell_contents.lower() in exclude:
|
||||
cell_contents = np.nan
|
||||
# cell_contents = cell_contents.lower()
|
||||
except (TypeError, AttributeError):
|
||||
pass
|
||||
try:
|
||||
@@ -314,7 +313,7 @@ class Settings(BaseSettings, extra="allow"):
|
||||
check = value.exists()
|
||||
except AttributeError:
|
||||
check = False
|
||||
if not check: # and values.data['database_schema'] == "sqlite":
|
||||
if not check:
|
||||
# print(f"No directory found, using Documents/submissions")
|
||||
value.mkdir(exist_ok=True)
|
||||
# print(f"Final return of directory_path: {value}")
|
||||
@@ -922,7 +921,6 @@ def check_authorization(func):
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
logger.error(f"User {getpass.getuser()} is not authorized for this function.")
|
||||
# return dict(code=1, message="This user does not have permission for this function.", status="warning")
|
||||
report = Report()
|
||||
report.add_result(Result(owner=func.__str__(), code=1, msg="This user does not have permission for this function.", status="warning"))
|
||||
return report
|
||||
|
||||
Reference in New Issue
Block a user