Removed logger.debugs for proven functions.

This commit is contained in:
lwark
2024-12-12 12:17:21 -06:00
parent b174eb1221
commit 67520cb784
32 changed files with 80 additions and 758 deletions

View File

@@ -1,6 +1,5 @@
import sys, os
from tools import ctx, setup_logger, check_if_app
from backend import scripts
# environment variable must be set to enable qtwebengine in network path
if check_if_app():
@@ -9,6 +8,7 @@ if check_if_app():
# setup custom logger
logger = setup_logger(verbosity=3)
from backend import scripts
from PyQt6.QtWidgets import QApplication
from frontend.widgets.app import App
@@ -25,6 +25,7 @@ def run_startup():
except AttributeError as e:
logger.error(f"Couldn't run startup script {script} due to {e}")
continue
logger.info(f"Running startup script: {func.__name__}")
func(ctx)
@@ -40,6 +41,7 @@ def run_teardown():
except AttributeError as e:
logger.error(f"Couldn't run teardown script {script} due to {e}")
continue
logger.info(f"Running teardown script: {func.__name__}")
func(ctx)
if __name__ == '__main__':

View File

@@ -18,12 +18,8 @@ def set_sqlite_pragma(dbapi_connection, connection_record):
connection_record (_type_): _description_
"""
cursor = dbapi_connection.cursor()
# print(ctx.database_schema)
if ctx.database_schema == "sqlite":
execution_phrase = "PRAGMA foreign_keys=ON"
# cursor.execute(execution_phrase)
# elif ctx.database_schema == "mssql+pyodbc":
# execution_phrase = "SET IDENTITY_INSERT dbo._wastewater ON;"
else:
print("Nothing to execute, returning")
cursor.close()
@@ -37,12 +33,9 @@ from .models import *
def update_log(mapper, connection, target):
# logger.debug("\n\nBefore update\n\n")
state = inspect(target)
# logger.debug(state)
object_name = state.object.truncated_name()
update = dict(user=getuser(), time=datetime.now(), object=object_name, changes=[])
# logger.debug(update)
for attr in state.attrs:
hist = attr.load_history()
if not hist.has_changes():
@@ -56,24 +49,19 @@ def update_log(mapper, connection, target):
continue
deleted = [str(item) for item in hist.deleted]
change = dict(field=attr.key, added=added, deleted=deleted)
# logger.debug(f"Adding: {pformat(change)}")
if added != deleted:
try:
update['changes'].append(change)
except Exception as e:
logger.error(f"Something went wrong adding attr: {attr.key}: {e}")
continue
# logger.debug(f"Adding to audit logs: {pformat(update)}")
if update['changes']:
# Note: must use execute as the session will be busy at this point.
# https://medium.com/@singh.surbhicse/creating-audit-table-to-log-insert-update-and-delete-changes-in-flask-sqlalchemy-f2ca53f7b02f
table = AuditLog.__table__
# logger.debug(f"Adding to {table}")
connection.execute(table.insert().values(**update))
# logger.debug("Here is where I would insert values, if I was able.")
else:
logger.info(f"No changes detected, not updating logs.")
# if ctx.logging_enabled:
event.listen(LogMixin, 'after_update', update_log, propagate=True)
event.listen(LogMixin, 'after_insert', update_log, propagate=True)

View File

@@ -3,7 +3,6 @@ Contains all models for sqlalchemy
"""
from __future__ import annotations
import sys, logging
from pandas import DataFrame
from sqlalchemy import Column, INTEGER, String, JSON
from sqlalchemy.orm import DeclarativeMeta, declarative_base, Query, Session
@@ -131,7 +130,6 @@ class BaseClass(Base):
search = name.title().replace(" ", "")
else:
search = name
logger.debug(f"Searching for subclass: {search}")
return next((item for item in cls.__subclasses__() if item.__name__ == search), cls)
@classmethod
@@ -146,9 +144,7 @@ class BaseClass(Base):
List[Any]: Results of sqlalchemy query.
"""
query: Query = cls.__database_session__.query(cls)
# logger.debug(f"Queried model. Now running searches in {kwargs}")
for k, v in kwargs.items():
# logger.debug(f"Running fuzzy search for attribute: {k} with value {v}")
# NOTE: Not sure why this is necessary, but it is.
search = f"%{v}%"
try:
@@ -200,9 +196,7 @@ class BaseClass(Base):
model = cls
if query is None:
query: Query = cls.__database_session__.query(model)
# logger.debug(f"Grabbing singles using {model.get_default_info}")
singles = model.get_default_info('singles')
# logger.info(f"Querying: {model}, with kwargs: {kwargs}")
for k, v in kwargs.items():
logger.info(f"Using key: {k} with value: {v}")
try:
@@ -227,7 +221,6 @@ class BaseClass(Base):
"""
Add the object to the database and commit
"""
# logger.debug(f"Saving object: {pformat(self.__dict__)}")
report = Report()
try:
self.__database_session__.add(self)

View File

@@ -2,7 +2,6 @@
Contains the audit log class and functions.
"""
from typing import List
from dateutil.parser import parse
from sqlalchemy.orm import declarative_base, DeclarativeMeta, Query
from . import BaseClass
@@ -48,32 +47,24 @@ class AuditLog(Base):
logger.warning(f"End date with no start date, using Jan 1, 2023")
start_date = session.query(cls, func.min(cls.time)).first()[1]
if start_date is not None:
# logger.debug(f"Querying with start date: {start_date} and end date: {end_date}")
match start_date:
case date():
# logger.debug(f"Lookup BasicSubmission by start_date({start_date})")
start_date = start_date.strftime("%Y-%m-%d")
case int():
# logger.debug(f"Lookup BasicSubmission by ordinal start_date {start_date}")
start_date = datetime.fromordinal(
datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d")
case _:
# logger.debug(f"Lookup BasicSubmission by parsed str start_date {start_date}")
start_date = parse(start_date).strftime("%Y-%m-%d")
match end_date:
case date() | datetime():
# logger.debug(f"Lookup BasicSubmission by end_date({end_date})")
end_date = end_date + timedelta(days=1)
end_date = end_date.strftime("%Y-%m-%d")
case int():
# logger.debug(f"Lookup BasicSubmission by ordinal end_date {end_date}")
end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date() + timedelta(days=1)
end_date = end_date.strftime("%Y-%m-%d")
case _:
# logger.debug(f"Lookup BasicSubmission by parsed str end_date {end_date}")
end_date = parse(end_date) + timedelta(days=1)
end_date = end_date.strftime("%Y-%m-%d")
# logger.debug(f"Compensating for same date by using time")
if start_date == end_date:
start_date = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%d %H:%M:%S.%f")
query = query.filter(cls.time == start_date)

View File

@@ -171,11 +171,9 @@ class Control(BaseClass):
match submission_type:
case str():
from backend import BasicSubmission, SubmissionType
# logger.debug(f"Lookup controls by SubmissionType str: {submission_type}")
query = query.join(BasicSubmission).join(SubmissionType).filter(SubmissionType.name == submission_type)
case SubmissionType():
from backend import BasicSubmission
# logger.debug(f"Lookup controls by SubmissionType: {submission_type}")
query = query.join(BasicSubmission).filter(BasicSubmission.submission_type_name == submission_type.name)
case _:
pass
@@ -203,31 +201,23 @@ class Control(BaseClass):
if start_date is not None:
match start_date:
case date():
# logger.debug(f"Lookup control by start date({start_date})")
start_date = start_date.strftime("%Y-%m-%d")
case int():
# logger.debug(f"Lookup control by ordinal start date {start_date}")
start_date = datetime.fromordinal(
datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d")
case _:
# logger.debug(f"Lookup control with parsed start date {start_date}")
start_date = parse(start_date).strftime("%Y-%m-%d")
match end_date:
case date():
# logger.debug(f"Lookup control by end date({end_date})")
end_date = end_date.strftime("%Y-%m-%d")
case int():
# logger.debug(f"Lookup control by ordinal end date {end_date}")
end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date().strftime(
"%Y-%m-%d")
case _:
# logger.debug(f"Lookup control with parsed end date {end_date}")
end_date = parse(end_date).strftime("%Y-%m-%d")
# logger.debug(f"Looking up BasicSubmissions from start date: {start_date} and end date: {end_date}")
query = query.filter(cls.submitted_date.between(start_date, end_date))
match name:
case str():
# logger.debug(f"Lookup control by name {control_name}")
query = query.filter(cls.name.startswith(name))
limit = 1
case _:
@@ -273,7 +263,6 @@ class Control(BaseClass):
except StopIteration as e:
raise AttributeError(
f"Couldn't find existing class/subclass of {cls} with all attributes:\n{pformat(attrs.keys())}")
# logger.info(f"Recruiting model: {model}")
return model
@classmethod
@@ -343,7 +332,6 @@ class PCRControl(Control):
parent.mode_typer.clear()
parent.mode_typer.setEnabled(False)
report = Report()
# logger.debug(f"Chart settings: {pformat(chart_settings)}")
controls = cls.query(submission_type=chart_settings['sub_type'], start_date=chart_settings['start_date'],
end_date=chart_settings['end_date'])
data = [control.to_sub_dict() for control in controls]
@@ -411,21 +399,16 @@ class IridaControl(Control):
kraken = self.kraken
except TypeError:
kraken = {}
# logger.debug("calculating kraken count total to use in percentage")
kraken_cnt_total = sum([kraken[item]['kraken_count'] for item in kraken])
# logger.debug("Creating new kraken.")
new_kraken = [dict(name=item, kraken_count=kraken[item]['kraken_count'],
kraken_percent="{0:.0%}".format(kraken[item]['kraken_count'] / kraken_cnt_total),
target=item in self.controltype.targets)
for item in kraken]
# logger.debug(f"New kraken before sort: {new_kraken}")
new_kraken = sorted(new_kraken, key=itemgetter('kraken_count'), reverse=True)
# logger.debug("setting targets")
if self.controltype.targets:
targets = self.controltype.targets
else:
targets = ["None"]
# logger.debug("constructing output dictionary")
output = dict(
name=self.name,
type=self.controltype.name,
@@ -447,7 +430,6 @@ class IridaControl(Control):
Returns:
List[dict]: list of records
"""
# logger.debug("load json string for mode (i.e. contains, matches, kraken2)")
try:
data = self.__getattribute__(mode)
except TypeError:
@@ -460,12 +442,10 @@ class IridaControl(Control):
else:
if consolidate:
on_tar = {k: v for k, v in data.items() if k.strip("*") in self.controltype.targets[control_sub_type]}
# logger.debug(f"Consolidating off-targets to: {self.controltype.targets[control_sub_type]}")
off_tar = sum(v[f'{mode}_ratio'] for k, v in data.items() if
k.strip("*") not in self.controltype.targets[control_sub_type])
on_tar['Off-target'] = {f"{mode}_ratio": off_tar}
data = on_tar
# logger.debug("dict keys are genera of bacteria, e.g. 'Streptococcus'")
for genus in data:
_dict = dict(
name=self.name,
@@ -473,7 +453,6 @@ class IridaControl(Control):
genus=genus,
target='Target' if genus.strip("*") in self.controltype.targets[control_sub_type] else "Off-target"
)
# logger.debug("get Target or Off-target of genus")
for key in data[genus]:
_dict[key] = data[genus][key]
yield _dict
@@ -487,7 +466,6 @@ class IridaControl(Control):
List[str]: List of control mode names.
"""
try:
# logger.debug("Creating a list of JSON columns in _controls table")
cols = [item.name for item in list(cls.__table__.columns) if isinstance(item.type, JSON)]
except AttributeError as e:
logger.error(f"Failed to get available modes from db: {e}")
@@ -504,7 +482,6 @@ class IridaControl(Control):
"""
super().make_parent_buttons(parent=parent)
rows = parent.layout.rowCount() - 2
# logger.debug(f"Parent rows: {rows}")
checker = QCheckBox(parent)
checker.setChecked(True)
checker.setObjectName("irida_check")
@@ -539,10 +516,8 @@ class IridaControl(Control):
except AttributeError:
consolidate = False
report = Report()
# logger.debug(f"settings: {pformat(chart_settings)}")
controls = cls.query(subtype=chart_settings['sub_type'], start_date=chart_settings['start_date'],
end_date=chart_settings['end_date'])
# logger.debug(f"Controls found: {controls}")
if not controls:
report.add_result(Result(status="Critical", msg="No controls found in given date range."))
return report, None
@@ -552,19 +527,16 @@ class IridaControl(Control):
control in controls]
# NOTE: flatten data to one dimensional list
data = [item for sublist in data for item in sublist]
# logger.debug(f"Control objects going into df conversion: {pformat(data)}")
if not data:
report.add_result(Result(status="Critical", msg="No data found for controls in given date range."))
return report, None
df = cls.convert_data_list_to_df(input_df=data, sub_mode=chart_settings['sub_mode'])
# logger.debug(f"Chart df: \n {df}")
if chart_settings['sub_mode'] is None:
title = chart_settings['sub_mode']
else:
title = f"{chart_settings['mode']} - {chart_settings['sub_mode']}"
# NOTE: send dataframe to chart maker
df, modes = cls.prep_df(ctx=ctx, df=df)
# logger.debug(f"prepped df: \n {df}")
fig = IridaFigure(df=df, ytitle=title, modes=modes, parent=parent,
settings=chart_settings)
return report, fig
@@ -581,9 +553,7 @@ class IridaControl(Control):
Returns:
DataFrame: dataframe of controls
"""
# logger.debug(f"Subtype: {sub_mode}")
df = DataFrame.from_records(input_df)
# logger.debug(f"DF from records: {df}")
safe = ['name', 'submitted_date', 'genus', 'target']
for column in df.columns:
if column not in safe:
@@ -636,7 +606,6 @@ class IridaControl(Control):
Returns:
DataFrame: output dataframe with dates incremented.
"""
# logger.debug(f"Unique items: {df['name'].unique()}")
# NOTE: get submitted dates for each control
dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in
sorted(df['name'].unique())]
@@ -664,7 +633,6 @@ class IridaControl(Control):
check = False
previous_dates.add(item['date'])
if check:
# logger.debug(f"We found one! Increment date!\n\t{item['date']} to {item['date'] + timedelta(days=1)}")
# NOTE: get df locations where name == item name
mask = df['name'] == item['name']
# NOTE: increment date in dataframe
@@ -673,15 +641,12 @@ class IridaControl(Control):
passed = False
else:
passed = True
# logger.debug(f"\n\tCurrent date: {item['date']}\n\tPrevious dates:{previous_dates}")
# logger.debug(f"DF: {type(df)}, previous_dates: {type(previous_dates)}")
# NOTE: if run didn't lead to changed date, return values
if passed:
# logger.debug(f"Date check passed, returning.")
return df, previous_dates
# NOTE: if date was changed, rerun with new date
else:
# logger.warning(f"Date check failed, running recursion")
logger.warning(f"Date check failed, running recursion")
df, previous_dates = cls.check_date(df, item, previous_dates)
return df, previous_dates
@@ -708,13 +673,10 @@ class IridaControl(Control):
# NOTE: sort by and exclude from
sorts = ['submitted_date', "target", "genus"]
exclude = ['name', 'genera']
# logger.debug(df.columns)
modes = [item for item in df.columns if item not in sorts and item not in exclude]
# logger.debug(f"Modes coming out: {modes}")
# NOTE: Set descending for any columns that have "{mode}" in the header.
ascending = [False if item == "target" else True for item in sorts]
df = df.sort_values(by=sorts, ascending=ascending)
# logger.debug(df[df.isna().any(axis=1)])
# NOTE: actual chart construction is done by
return df, modes

View File

@@ -17,7 +17,6 @@ from io import BytesIO
logger = logging.getLogger(f'submissions.{__name__}')
# logger.debug("Table for ReagentType/Reagent relations")
reagentroles_reagents = Table(
"_reagentroles_reagents",
Base.metadata,
@@ -26,7 +25,6 @@ reagentroles_reagents = Table(
extend_existing=True
)
# logger.debug("Table for EquipmentRole/Equipment relations")
equipmentroles_equipment = Table(
"_equipmentroles_equipment",
Base.metadata,
@@ -35,7 +33,6 @@ equipmentroles_equipment = Table(
extend_existing=True
)
# logger.debug("Table for Equipment/Process relations")
equipment_processes = Table(
"_equipment_processes",
Base.metadata,
@@ -44,7 +41,6 @@ equipment_processes = Table(
extend_existing=True
)
# logger.debug("Table for EquipmentRole/Process relations")
equipmentroles_processes = Table(
"_equipmentroles_processes",
Base.metadata,
@@ -53,7 +49,6 @@ equipmentroles_processes = Table(
extend_existing=True
)
# logger.debug("Table for SubmissionType/Process relations")
submissiontypes_processes = Table(
"_submissiontypes_processes",
Base.metadata,
@@ -62,7 +57,6 @@ submissiontypes_processes = Table(
extend_existing=True
)
# logger.debug("Table for KitType/Process relations")
kittypes_processes = Table(
"_kittypes_processes",
Base.metadata,
@@ -71,7 +65,6 @@ kittypes_processes = Table(
extend_existing=True
)
# logger.debug("Table for TipRole/Tips relations")
tiproles_tips = Table(
"_tiproles_tips",
Base.metadata,
@@ -80,7 +73,6 @@ tiproles_tips = Table(
extend_existing=True
)
# logger.debug("Table for Process/TipRole relations")
process_tiprole = Table(
"_process_tiprole",
Base.metadata,
@@ -89,7 +81,6 @@ process_tiprole = Table(
extend_existing=True
)
# logger.debug("Table for Equipment/Tips relations")
equipment_tips = Table(
"_equipment_tips",
Base.metadata,
@@ -116,7 +107,7 @@ class KitType(BaseClass):
cascade="all, delete-orphan",
)
# creator function: https://stackoverflow.com/questions/11091491/keyerror-when-adding-objects-to-sqlalchemy-association-object/11116291#11116291
# NOTE: creator function: https://stackoverflow.com/questions/11091491/keyerror-when-adding-objects-to-sqlalchemy-association-object/11116291#11116291
reagent_roles = association_proxy("kit_reagentrole_associations", "reagent_role",
creator=lambda RT: KitTypeReagentRoleAssociation(
reagent_role=RT)) #: Association proxy to KitTypeReagentRoleAssociation
@@ -152,18 +143,14 @@ class KitType(BaseClass):
"""
match submission_type:
case SubmissionType():
# logger.debug(f"Getting reagents by SubmissionType {submission_type}")
relevant_associations = [item for item in self.kit_reagentrole_associations if
item.submission_type == submission_type]
case str():
# logger.debug(f"Getting reagents by str {submission_type}")
relevant_associations = [item for item in self.kit_reagentrole_associations if
item.submission_type.name == submission_type]
case _:
# logger.debug(f"Getting reagents")
relevant_associations = [item for item in self.kit_reagentrole_associations]
if required:
# logger.debug(f"Filtering by required.")
return (item.reagent_role for item in relevant_associations if item.required == 1)
else:
return (item.reagent_role for item in relevant_associations)
@@ -181,18 +168,14 @@ class KitType(BaseClass):
# NOTE: Account for submission_type variable type.
match submission_type:
case str():
# logger.debug(f"Constructing xl map with str {submission_type}")
assocs = [item for item in self.kit_reagentrole_associations if
item.submission_type.name == submission_type]
case SubmissionType():
# logger.debug(f"Constructing xl map with SubmissionType {submission_type}")
assocs = [item for item in self.kit_reagentrole_associations if item.submission_type == submission_type]
case _:
raise ValueError(f"Wrong variable type: {type(submission_type)} used!")
# logger.debug("Get all KitTypeReagentTypeAssociation for SubmissionType")
for assoc in assocs:
try:
# logger.debug(f"Yielding: {assoc.reagent_role.name}, {assoc.uses}")
yield assoc.reagent_role.name, assoc.uses
except TypeError:
continue
@@ -220,27 +203,22 @@ class KitType(BaseClass):
query: Query = cls.__database_session__.query(cls)
match used_for:
case str():
# logger.debug(f"Looking up kit type by used_for str: {used_for}")
query = query.filter(cls.used_for.any(name=used_for))
case SubmissionType():
# logger.debug(f"Looking up kit type by used_for SubmissionType: {used_for}")
query = query.filter(cls.used_for.contains(used_for))
case _:
pass
match name:
case str():
# logger.debug(f"Looking up kit type by name str: {name}")
query = query.filter(cls.name == name)
limit = 1
case _:
pass
match id:
case int():
# logger.debug(f"Looking up kit type by id int: {id}")
query = query.filter(cls.id == id)
limit = 1
case str():
# logger.debug(f"Looking up kit type by id str: {id}")
query = query.filter(cls.id == int(id))
limit = 1
case _:
@@ -262,10 +240,7 @@ class KitType(BaseClass):
dict: Dictionary containing relevant info for SubmissionType construction
"""
base_dict = dict(name=self.name, reagent_roles=[], equipment_roles=[])
# base_dict['reagent roles'] = []
# base_dict['equipment roles'] = []
for k, v in self.construct_xl_map_for_use(submission_type=submission_type):
# logger.debug(f"Value: {v}")
try:
assoc = next(item for item in self.kit_reagentrole_associations if item.reagent_role.name == k)
except StopIteration as e:
@@ -280,10 +255,8 @@ class KitType(BaseClass):
except StopIteration:
continue
for kk, vv in assoc.to_export_dict(extraction_kit=self).items():
# logger.debug(f"{kk}:{vv}")
v[kk] = vv
base_dict['equipment_roles'].append(v)
# logger.debug(f"KT returning {base_dict}")
return base_dict
@@ -347,28 +320,19 @@ class ReagentRole(BaseClass):
else:
match kit_type:
case str():
# logger.debug(f"Lookup ReagentType by kittype str {kit_type}")
kit_type = KitType.query(name=kit_type)
case _:
pass
match reagent:
case str():
# logger.debug(f"Lookup ReagentType by reagent str {reagent}")
reagent = Reagent.query(lot=reagent)
case _:
pass
assert reagent.role
# logger.debug(f"Looking up reagent type for {type(kit_type)} {kit_type} and {type(reagent)} {reagent}")
# logger.debug(f"Kit reagent types: {kit_type.reagent_types}")
result = set(kit_type.reagent_roles).intersection(reagent.role)
# logger.debug(f"Result: {result}")
# try:
return next((item for item in result), None)
# except IndexError:
# return None
match name:
case str():
# logger.debug(f"Looking up reagent type by name str: {name}")
query = query.filter(cls.name == name)
limit = 1
case _:
@@ -457,7 +421,6 @@ class Reagent(BaseClass, LogMixin):
rtype = reagent_role.name.replace("_", " ")
except AttributeError:
rtype = "Unknown"
# logger.debug(f"Role for {self.name}: {rtype}")
# NOTE: Calculate expiry with EOL from ReagentType
try:
place_holder = self.expiry + reagent_role.eol_ext
@@ -493,14 +456,11 @@ class Reagent(BaseClass, LogMixin):
Report: Result of operation
"""
report = Report()
# logger.debug(f"Attempting update of last used reagent type at intersection of ({self}), ({kit})")
rt = ReagentRole.query(kit_type=kit, reagent=self, limit=1)
if rt is not None:
# logger.debug(f"got reagenttype {rt}")
assoc = KitTypeReagentRoleAssociation.query(kit_type=kit, reagent_role=rt)
if assoc is not None:
if assoc.last_used != self.lot:
# logger.debug(f"Updating {assoc} last used to {self.lot}")
assoc.last_used = self.lot
result = assoc.save()
report.add_result(result)
@@ -539,23 +499,19 @@ class Reagent(BaseClass, LogMixin):
pass
match role:
case str():
# logger.debug(f"Looking up reagents by reagent type str: {reagent_type}")
query = query.join(cls.role).filter(ReagentRole.name == role)
case ReagentRole():
# logger.debug(f"Looking up reagents by reagent type ReagentType: {reagent_type}")
query = query.filter(cls.role.contains(role))
case _:
pass
match name:
case str():
# logger.debug(f"Looking up reagent by name str: {name}")
# NOTE: Not limited due to multiple reagents having same name.
query = query.filter(cls.name == name)
case _:
pass
match lot:
case str():
# logger.debug(f"Looking up reagent by lot number str: {lot}")
query = query.filter(cls.lot == lot)
# NOTE: In this case limit number returned.
limit = 1
@@ -579,7 +535,6 @@ class Reagent(BaseClass, LogMixin):
case "expiry":
if isinstance(value, str):
field_value = datetime.strptime(value, "%Y-%m-%d")
# field_value.replace(tzinfo=timezone)
elif isinstance(value, date):
field_value = datetime.combine(value, datetime.min.time())
else:
@@ -589,7 +544,6 @@ class Reagent(BaseClass, LogMixin):
continue
case _:
field_value = value
# logger.debug(f"Setting reagent {key} to {field_value}")
self.__setattr__(key, field_value)
self.save()
@@ -634,25 +588,19 @@ class Discount(BaseClass):
query: Query = cls.__database_session__.query(cls)
match organization:
case Organization():
# logger.debug(f"Looking up discount with organization Organization: {organization}")
query = query.filter(cls.client == Organization)
case str():
# logger.debug(f"Looking up discount with organization str: {organization}")
query = query.join(Organization).filter(Organization.name == organization)
case int():
# logger.debug(f"Looking up discount with organization id: {organization}")
query = query.join(Organization).filter(Organization.id == organization)
case _:
pass
match kit_type:
case KitType():
# logger.debug(f"Looking up discount with kit type KitType: {kit_type}")
query = query.filter(cls.kit == kit_type)
case str():
# logger.debug(f"Looking up discount with kit type str: {kit_type}")
query = query.join(KitType).filter(KitType.name == kit_type)
case int():
# logger.debug(f"Looking up discount with kit type id: {kit_type}")
query = query.join(KitType).filter(KitType.id == kit_type)
case _:
pass
@@ -723,7 +671,6 @@ class SubmissionType(BaseClass):
return submission_type.template_file
def get_template_file_sheets(self) -> List[str]:
logger.debug(f"Submission type to get sheets for: {self.name}")
"""
Gets names of sheet in the stored blank form.
@@ -768,7 +715,6 @@ class SubmissionType(BaseClass):
dict: Map of locations
"""
info = {k: v for k, v in self.info_map.items() if k != "custom"}
logger.debug(f"Info map: {info}")
match mode:
case "read":
output = {k: v[mode] for k, v in info.items() if v[mode]}
@@ -844,11 +790,9 @@ class SubmissionType(BaseClass):
"""
match equipment_role:
case str():
# logger.debug(f"Getting processes for equipmentrole str {equipment_role}")
relevant = [item.get_all_processes(kit) for item in self.submissiontype_equipmentrole_associations if
item.equipment_role.name == equipment_role]
case EquipmentRole():
# logger.debug(f"Getting processes for equipmentrole EquipmentRole {equipment_role}")
relevant = [item.get_all_processes(kit) for item in self.submissiontype_equipmentrole_associations if
item.equipment_role == equipment_role]
case _:
@@ -886,14 +830,12 @@ class SubmissionType(BaseClass):
query: Query = cls.__database_session__.query(cls)
match name:
case str():
# logger.debug(f"Looking up submission type by name str: {name}")
query = query.filter(cls.name == name)
limit = 1
case _:
pass
match key:
case str():
# logger.debug(f"Looking up submission type by info-map key str: {key}")
query = query.filter(cls.info_map.op('->')(key) is not None)
case _:
pass
@@ -946,7 +888,6 @@ class SubmissionType(BaseClass):
import_dict = yaml.load(stream=f, Loader=yaml.Loader)
else:
raise Exception(f"Filetype {filepath.suffix} not supported.")
# logger.debug(pformat(import_dict))
try:
submission_type = cls.query(name=import_dict['name'])
except KeyError:
@@ -1076,23 +1017,17 @@ class SubmissionTypeKitTypeAssociation(BaseClass):
query: Query = cls.__database_session__.query(cls)
match submission_type:
case SubmissionType():
# logger.debug(f"Looking up {cls.__name__} by SubmissionType {submission_type}")
query = query.filter(cls.submission_type == submission_type)
case str():
# logger.debug(f"Looking up {cls.__name__} by name {submission_type}")
query = query.join(SubmissionType).filter(SubmissionType.name == submission_type)
case int():
# logger.debug(f"Looking up {cls.__name__} by id {submission_type}")
query = query.join(SubmissionType).filter(SubmissionType.id == submission_type)
match kit_type:
case KitType():
# logger.debug(f"Looking up {cls.__name__} by KitType {kit_type}")
query = query.filter(cls.kit_type == kit_type)
case str():
# logger.debug(f"Looking up {cls.__name__} by name {kit_type}")
query = query.join(KitType).filter(KitType.name == kit_type)
case int():
# logger.debug(f"Looking up {cls.__name__} by id {kit_type}")
query = query.join(KitType).filter(KitType.id == kit_type)
limit = query.count()
return cls.execute_query(query=query, limit=limit)
@@ -1107,7 +1042,6 @@ class SubmissionTypeKitTypeAssociation(BaseClass):
exclude = ['_sa_instance_state', 'submission_types_id', 'kits_id', 'submission_type', 'kit_type']
base_dict = {k: v for k, v in self.__dict__.items() if k not in exclude}
base_dict['kit_type'] = self.kit_type.to_export_dict(submission_type=self.submission_type)
# logger.debug(f"STKTA returning: {base_dict}")
return base_dict
@@ -1128,10 +1062,11 @@ class KitTypeReagentRoleAssociation(BaseClass):
kit_type = relationship(KitType,
back_populates="kit_reagentrole_associations") #: relationship to associated KitType
# reference to the "ReagentType" object
# NOTE: reference to the "ReagentType" object
reagent_role = relationship(ReagentRole,
back_populates="reagentrole_kit_associations") #: relationship to associated ReagentType
# NOTE: reference to the "SubmissionType" object
submission_type = relationship(SubmissionType,
back_populates="submissiontype_kit_rt_associations") #: relationship to associated SubmissionType
@@ -1203,19 +1138,15 @@ class KitTypeReagentRoleAssociation(BaseClass):
query: Query = cls.__database_session__.query(cls)
match kit_type:
case KitType():
# logger.debug(f"Lookup KitTypeReagentTypeAssociation by kit_type KitType {kit_type}")
query = query.filter(cls.kit_type == kit_type)
case str():
# logger.debug(f"Lookup KitTypeReagentTypeAssociation by kit_type str {kit_type}")
query = query.join(KitType).filter(KitType.name == kit_type)
case _:
pass
match reagent_role:
case ReagentRole():
# logger.debug(f"Lookup KitTypeReagentTypeAssociation by reagent_type ReagentType {reagent_type}")
query = query.filter(cls.reagent_role == reagent_role)
case str():
# logger.debug(f"Lookup KitTypeReagentTypeAssociation by reagent_type ReagentType {reagent_type}")
query = query.join(ReagentRole).filter(ReagentRole.name == reagent_role)
case _:
pass
@@ -1242,7 +1173,6 @@ class KitTypeReagentRoleAssociation(BaseClass):
Returns:
Generator: Generates of reagents.
"""
# logger.debug(f"Attempting lookup of reagents by type: {reagent.type}")
reagents = self.reagent_role.instances
try:
regex = self.uses['exclude_regex']
@@ -1309,7 +1239,6 @@ class SubmissionReagentAssociation(BaseClass):
query: Query = cls.__database_session__.query(cls)
match reagent:
case Reagent() | str():
# logger.debug(f"Lookup SubmissionReagentAssociation by reagent Reagent {reagent}")
if isinstance(reagent, str):
reagent = Reagent.query(lot=reagent)
query = query.filter(cls.reagent == reagent)
@@ -1319,10 +1248,8 @@ class SubmissionReagentAssociation(BaseClass):
case BasicSubmission() | str():
if isinstance(submission, str):
submission = BasicSubmission.query(rsl_plate_num=submission)
# logger.debug(f"Lookup SubmissionReagentAssociation by submission BasicSubmission {submission}")
query = query.filter(cls.submission == submission)
case int():
# logger.debug(f"Lookup SubmissionReagentAssociation by submission id {submission}")
submission = BasicSubmission.query(id=submission)
query = query.join(BasicSubmission).filter(BasicSubmission.id == submission)
case _:
@@ -1439,21 +1366,18 @@ class Equipment(BaseClass, LogMixin):
query = cls.__database_session__.query(cls)
match name:
case str():
# logger.debug(f"Lookup Equipment by name str {name}")
query = query.filter(cls.name == name)
limit = 1
case _:
pass
match nickname:
case str():
# logger.debug(f"Lookup Equipment by nickname str {nickname}")
query = query.filter(cls.nickname == nickname)
limit = 1
case _:
pass
match asset_number:
case str():
# logger.debug(f"Lookup Equipment by asset_number str {asset_number}")
query = query.filter(cls.asset_number == asset_number)
limit = 1
case _:
@@ -1569,11 +1493,9 @@ class EquipmentRole(BaseClass):
PydEquipmentRole: This EquipmentRole as PydEquipmentRole
"""
from backend.validators.pydant import PydEquipmentRole
# logger.debug("Creating list of PydEquipment in this role")
equipment = [item.to_pydantic(submission_type=submission_type, extraction_kit=extraction_kit) for item in
self.instances]
pyd_dict = self.to_dict()
# logger.debug("Creating list of Processes in this role")
pyd_dict['processes'] = self.get_processes(submission_type=submission_type, extraction_kit=extraction_kit)
return PydEquipmentRole(equipment=equipment, **pyd_dict)
@@ -1595,14 +1517,12 @@ class EquipmentRole(BaseClass):
query = cls.__database_session__.query(cls)
match id:
case int():
# logger.debug(f"Lookup EquipmentRole by id {id}")
query = query.filter(cls.id == id)
limit = 1
case _:
pass
match name:
case str():
# logger.debug(f"Lookup EquipmentRole by name str {name}")
query = query.filter(cls.name == name)
limit = 1
case _:
@@ -1622,7 +1542,6 @@ class EquipmentRole(BaseClass):
List[Process]: List of processes
"""
if isinstance(submission_type, str):
# logger.debug(f"Checking if str {submission_type} exists")
submission_type = SubmissionType.query(name=submission_type)
if isinstance(extraction_kit, str):
extraction_kit = KitType.query(name=extraction_kit)
@@ -1808,7 +1727,6 @@ class Process(BaseClass):
query = cls.__database_session__.query(cls)
match name:
case str():
# logger.debug(f"Lookup Process with name str {name}")
query = query.filter(cls.name == name)
limit = 1
case _:
@@ -1892,13 +1810,11 @@ class Tips(BaseClass, LogMixin):
query = cls.__database_session__.query(cls)
match name:
case str():
# logger.debug(f"Lookup Equipment by name str {name}")
query = query.filter(cls.name == name)
case _:
pass
match lot:
case str():
# logger.debug(f"Lookup Equipment by nickname str {nickname}")
query = query.filter(cls.lot == lot)
limit = 1
case _:

View File

@@ -65,7 +65,6 @@ class Organization(BaseClass):
pass
match name:
case str():
# logger.debug(f"Looking up organization with name starting with: {name}")
query = query.filter(cls.name.startswith(name))
limit = 1
case _:
@@ -159,21 +158,18 @@ class Contact(BaseClass):
query: Query = cls.__database_session__.query(cls)
match name:
case str():
# logger.debug(f"Looking up contact with name: {name}")
query = query.filter(cls.name == name.title())
limit = 1
case _:
pass
match email:
case str():
# logger.debug(f"Looking up contact with email: {name}")
query = query.filter(cls.email == email)
limit = 1
case _:
pass
match phone:
case str():
# logger.debug(f"Looking up contact with phone: {name}")
query = query.filter(cls.phone == phone)
limit = 1
case _:

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
'''
"""
contains parser objects for pulling values from client generated submission sheets.
'''
"""
import logging
from copy import copy
from getpass import getuser
@@ -53,7 +53,6 @@ class SheetParser(object):
self.parse_samples()
self.parse_equipment()
self.parse_tips()
# logger.debug(f"Parser.sub after info scrape: {pformat(self.sub)}")
def parse_info(self):
"""
@@ -71,7 +70,6 @@ class SheetParser(object):
logger.info(
f"Checking for updated submission type: {self.submission_type.name} against new: {info['submission_type']['value']}")
if self.submission_type.name != info['submission_type']['value']:
# logger.debug(f"info submission type: {info}")
if check:
self.submission_type = SubmissionType.query(name=info['submission_type']['value'])
logger.info(f"Updated self.submission_type to {self.submission_type}. Rerunning parse.")
@@ -90,11 +88,9 @@ class SheetParser(object):
"""
if extraction_kit is None:
extraction_kit = self.sub['extraction_kit']
# logger.debug(f"Parsing reagents for {extraction_kit}")
parser = ReagentParser(xl=self.xl, submission_type=self.submission_type,
extraction_kit=extraction_kit)
self.sub['reagents'] = parser.parse_reagents()
# logger.debug(f"Reagents out of parser: {pformat(self.sub['reagents'])}")
def parse_samples(self):
"""
@@ -155,7 +151,6 @@ class InfoParser(object):
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
"""
logger.info(f"\n\nHello from InfoParser!\n\n")
if isinstance(submission_type, str):
submission_type = SubmissionType.query(name=submission_type)
if sub_object is None:
@@ -164,7 +159,6 @@ class InfoParser(object):
self.sub_object = sub_object
self.map = self.fetch_submission_info_map()
self.xl = xl
# logger.debug(f"Info map for InfoParser: {pformat(self.map)}")
def fetch_submission_info_map(self) -> dict:
"""
@@ -174,7 +168,6 @@ class InfoParser(object):
dict: Location map of all info for this submission type
"""
self.submission_type = dict(value=self.submission_type_obj.name, missing=True)
# logger.debug(f"Looking up submission type: {self.submission_type['value']}")
info_map = self.sub_object.construct_info_map(submission_type=self.submission_type_obj, mode="read")
# NOTE: Get the parse_info method from the submission type specified
return info_map
@@ -188,7 +181,6 @@ class InfoParser(object):
"""
dicto = {}
# NOTE: This loop parses generic info
# logger.debug(f"Map: {self.map}")
for sheet in self.xl.sheetnames:
ws = self.xl[sheet]
relevant = []
@@ -197,11 +189,8 @@ class InfoParser(object):
if k == "custom":
continue
if isinstance(v, str):
logger.debug(f"Found string for {k}, setting value to {v}")
dicto[k] = dict(value=v, missing=False)
continue
# logger.debug(f"Looking for {k} in self.map")
# logger.debug(f"Locations: {v}")
for location in v:
try:
check = location['sheet'] == sheet
@@ -213,21 +202,18 @@ class InfoParser(object):
new = location
new['name'] = k
relevant.append(new)
# logger.debug(f"relevant map for {sheet}: {pformat(relevant)}")
# NOTE: make sure relevant is not an empty list.
if not relevant:
continue
for item in relevant:
# NOTE: Get cell contents at this location
value = ws.cell(row=item['row'], column=item['column']).value
# logger.debug(f"Value for {item['name']} = {value}")
match item['name']:
case "submission_type":
value, missing = is_missing(value)
value = value.title()
case "submitted_date":
value, missing = is_missing(value)
logger.debug(f"Parsed submitted date: {value}")
# NOTE: is field a JSON? Includes: Extraction info, PCR info, comment, custom
case thing if thing in self.sub_object.jsons():
value, missing = is_missing(value)
@@ -240,7 +226,6 @@ class InfoParser(object):
logger.error(f"New value for {item['name']}")
case _:
value, missing = is_missing(value)
# logger.debug(f"Setting {item} on {sheet} to {value}")
if item['name'] not in dicto.keys():
try:
dicto[item['name']] = dict(value=value, missing=missing)
@@ -264,7 +249,6 @@ class ReagentParser(object):
extraction_kit (str): Extraction kit used.
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
"""
logger.info("\n\nHello from ReagentParser!\n\n")
if isinstance(submission_type, str):
submission_type = SubmissionType.query(name=submission_type)
self.submission_type_obj = submission_type
@@ -272,9 +256,7 @@ class ReagentParser(object):
if isinstance(extraction_kit, dict):
extraction_kit = extraction_kit['value']
self.kit_object = KitType.query(name=extraction_kit)
logger.debug(f"Got extraction kit object: {self.kit_object}")
self.map = self.fetch_kit_info_map(submission_type=submission_type)
logger.debug(f"Reagent Parser map: {self.map}")
self.xl = xl
@report_result
@@ -298,14 +280,11 @@ class ReagentParser(object):
del reagent_map['info']
except KeyError:
pass
# logger.debug(f"Reagent map: {pformat(reagent_map)}")
# NOTE: If reagent map is empty, maybe the wrong kit was given, check if there's only one kit for that submission type and use it if so.
if not reagent_map:
temp_kit_object = self.submission_type_obj.get_default_kit()
# logger.debug(f"Temp kit: {temp_kit_object}")
if temp_kit_object:
self.kit_object = temp_kit_object
# reagent_map = {k: v for k, v in self.kit_object.construct_xl_map_for_use(submission_type)}
logger.warning(f"Attempting to salvage with default kit {self.kit_object} and submission_type: {self.submission_type_obj}")
return self.fetch_kit_info_map(submission_type=self.submission_type_obj)
else:
@@ -331,18 +310,15 @@ class ReagentParser(object):
for sheet in self.xl.sheetnames:
ws = self.xl[sheet]
relevant = {k.strip(): v for k, v in self.map.items() if sheet in self.map[k]['sheet']}
# logger.debug(f"relevant map for {sheet}: {pformat(relevant)}")
if relevant == {}:
continue
for item in relevant:
# logger.debug(f"Attempting to scrape: {item}")
try:
reagent = relevant[item]
name = ws.cell(row=reagent['name']['row'], column=reagent['name']['column']).value
lot = ws.cell(row=reagent['lot']['row'], column=reagent['lot']['column']).value
expiry = ws.cell(row=reagent['expiry']['row'], column=reagent['expiry']['column']).value
if 'comment' in relevant[item].keys():
# logger.debug(f"looking for {relevant[item]} comment.")
comment = ws.cell(row=reagent['comment']['row'], column=reagent['comment']['column']).value
else:
comment = ""
@@ -353,10 +329,7 @@ class ReagentParser(object):
missing = False
else:
missing = True
# logger.debug(f"Got lot for {item}-{name}: {lot} as {type(lot)}")
lot = str(lot)
# logger.debug(
# f"Going into pydantic: name: {name}, lot: {lot}, expiry: {expiry}, type: {item.strip()}, comment: {comment}")
try:
check = name.lower() != "not applicable"
except AttributeError:
@@ -381,12 +354,10 @@ class SampleParser(object):
sample_map (dict | None, optional): Locations in database where samples are found. Defaults to None.
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
"""
logger.info("\n\nHello from SampleParser!\n\n")
self.samples = []
self.xl = xl
if isinstance(submission_type, str):
submission_type = SubmissionType.query(name=submission_type)
# logger.debug(f"Sample parser is using submission type: {submission_type}")
self.submission_type = submission_type.name
self.submission_type_obj = submission_type
if sub_object is None:
@@ -395,7 +366,6 @@ class SampleParser(object):
sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
self.sub_object = sub_object
self.sample_info_map = self.fetch_sample_info_map(submission_type=submission_type, sample_map=sample_map)
# logger.debug(f"sample_info_map: {self.sample_info_map}")
self.plate_map_samples = self.parse_plate_map()
self.lookup_samples = self.parse_lookup_table()
@@ -409,11 +379,8 @@ class SampleParser(object):
Returns:
dict: Info locations.
"""
# logger.debug(f"Looking up submission type: {submission_type}")
self.sample_type = self.sub_object.get_default_info("sample_type", submission_type=submission_type)
self.samp_object = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type)
# logger.debug(f"Got sample class: {self.samp_object.__name__}")
# logger.debug(f"info_map: {pformat(se)}")
if sample_map is None:
sample_info_map = self.sub_object.construct_sample_map(submission_type=self.submission_type_obj)
else:
@@ -432,9 +399,7 @@ class SampleParser(object):
ws = self.xl[smap['sheet']]
plate_map_samples = []
for ii, row in enumerate(range(smap['start_row'], smap['end_row'] + 1), start=1):
# logger.debug(f"Parsing row: {row}")
for jj, column in enumerate(range(smap['start_column'], smap['end_column'] + 1), start=1):
# logger.debug(f"Parsing column: {column}")
id = str(ws.cell(row=row, column=column).value)
if check_not_nan(id):
if id not in invalids:
@@ -442,10 +407,8 @@ class SampleParser(object):
sample_dict['sample_type'] = self.sample_type
plate_map_samples.append(sample_dict)
else:
# logger.error(f"Sample cell ({row}, {column}) has invalid value: {id}.")
pass
else:
# logger.error(f"Sample cell ({row}, {column}) has no info: {id}.")
pass
return plate_map_samples
@@ -507,7 +470,6 @@ class SampleParser(object):
except (KeyError, IndexError):
check = False
if check:
# logger.debug(f"Direct match found for {psample['id']}")
new = lookup_samples[ii] | psample
lookup_samples[ii] = {}
else:
@@ -516,7 +478,6 @@ class SampleParser(object):
if merge_on_id in sample.keys()]
jj, new = next(((jj, lsample | psample) for jj, lsample in searchables
if lsample[merge_on_id] == psample['id']), (-1, psample))
# logger.debug(f"Assigning from index {jj} - {new}")
if jj >= 0:
lookup_samples[jj] = {}
if not check_key_or_attr(key='submitter_id', interest=new, check_none=True):
@@ -540,7 +501,6 @@ class EquipmentParser(object):
xl (Workbook): Openpyxl workbook from submitted excel file.
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
"""
logger.info("\n\nHello from EquipmentParser!\n\n")
if isinstance(submission_type, str):
submission_type = SubmissionType.query(name=submission_type)
self.submission_type = submission_type
@@ -567,7 +527,6 @@ class EquipmentParser(object):
str: asset number
"""
regex = Equipment.get_regex()
# logger.debug(f"Using equipment regex: {regex} on {input}")
try:
return regex.search(input).group().strip("-")
except AttributeError as e:
@@ -581,8 +540,6 @@ class EquipmentParser(object):
Returns:
List[dict]: list of equipment
"""
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
# logger.debug(f"Sheets: {sheets}")
for sheet in self.xl.sheetnames:
ws = self.xl[sheet]
try:
@@ -590,17 +547,14 @@ class EquipmentParser(object):
except (TypeError, KeyError) as e:
logger.error(f"Error creating relevant equipment list: {e}")
continue
# logger.debug(f"Relevant equipment: {pformat(relevant)}")
previous_asset = ""
for k, v in relevant.items():
# logger.debug(f"Checking: {v}")
asset = ws.cell(v['name']['row'], v['name']['column']).value
if not check_not_nan(asset):
asset = previous_asset
else:
previous_asset = asset
asset = self.get_asset_number(input=asset)
# logger.debug(f"asset: {asset}")
eq = Equipment.query(asset_number=asset)
if eq is None:
eq = Equipment.query(name=asset)
@@ -623,7 +577,6 @@ class TipParser(object):
xl (Workbook): Openpyxl workbook from submitted excel file.
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
"""
logger.info("\n\nHello from TipParser!\n\n")
if isinstance(submission_type, str):
submission_type = SubmissionType.query(name=submission_type)
self.submission_type = submission_type
@@ -646,8 +599,6 @@ class TipParser(object):
Returns:
List[dict]: list of equipment
"""
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
# logger.debug(f"Sheets: {sheets}")
for sheet in self.xl.sheetnames:
ws = self.xl[sheet]
try:
@@ -655,7 +606,6 @@ class TipParser(object):
except (TypeError, KeyError) as e:
logger.error(f"Error creating relevant equipment list: {e}")
continue
# logger.debug(f"Relevant equipment: {pformat(relevant)}")
previous_asset = ""
for k, v in relevant.items():
asset = ws.cell(v['name']['row'], v['name']['column']).value
@@ -667,7 +617,6 @@ class TipParser(object):
asset = previous_asset
else:
previous_asset = asset
# logger.debug(f"asset: {asset}")
eq = Tips.query(lot=lot, name=asset, limit=1)
try:
yield dict(name=eq.name, role=k, lot=lot)
@@ -684,7 +633,6 @@ class PCRParser(object):
filepath (Path | None, optional): file to parse. Defaults to None.
submission (BasicSubmission | None, optional): Submission parsed data to be added to.
"""
# logger.debug(f'Parsing {filepath.__str__()}')
if filepath is None:
logger.error('No filepath given.')
self.xl = None
@@ -727,5 +675,4 @@ class PCRParser(object):
value = row[1].value or ""
pcr[key] = value
pcr['imported_by'] = getuser()
# logger.debug(f"PCR: {pformat(pcr)}")
return pcr

View File

@@ -32,7 +32,6 @@ class ReportArchetype(object):
filename = filename.absolute()
self.writer = ExcelWriter(filename.with_suffix(".xlsx"), engine='openpyxl')
self.df.to_excel(self.writer, sheet_name=self.sheet_name)
# logger.debug(f"Writing report to: {filename}")
self.writer.close()
@@ -43,7 +42,6 @@ class ReportMaker(object):
self.end_date = end_date
# NOTE: Set page size to zero to override limiting query size.
self.subs = BasicSubmission.query(start_date=start_date, end_date=end_date, page_size=0)
# logger.debug(f"Number of subs returned: {len(self.subs)}")
if organizations is not None:
self.subs = [sub for sub in self.subs if sub.submitting_lab.name in organizations]
self.detailed_df, self.summary_df = self.make_report_xlsx()
@@ -65,10 +63,8 @@ class ReportMaker(object):
df2 = df.groupby(["submitting_lab", "extraction_kit"]).agg(
{'extraction_kit': 'count', 'cost': 'sum', 'sample_count': 'sum'})
df2 = df2.rename(columns={"extraction_kit": 'run_count'})
# logger.debug(f"Output daftaframe for xlsx: {df2.columns}")
df = df.drop('id', axis=1)
df = df.sort_values(['submitting_lab', "submitted_date"])
# logger.debug(f"Details dataframe:\n{df2}")
return df, df2
def make_report_html(self, df: DataFrame) -> str:
@@ -86,12 +82,8 @@ class ReportMaker(object):
"""
old_lab = ""
output = []
# logger.debug(f"Report DataFrame: {df}")
for row in df.iterrows():
# logger.debug(f"Row {ii}: {row}")
lab = row[0][0]
# logger.debug(f"Old lab: {old_lab}, Current lab: {lab}")
# logger.debug(f"Name: {row[0][1]}")
data = [item for item in row[1]]
kit = dict(name=row[0][1], cost=data[1], run_count=int(data[0]), sample_count=int(data[2]))
# NOTE: if this is the same lab as before add together
@@ -106,7 +98,6 @@ class ReportMaker(object):
total_runs=kit['run_count'])
output.append(adder)
old_lab = lab
# logger.debug(output)
dicto = {'start_date': self.start_date, 'end_date': self.end_date, 'labs': output}
temp = env.get_template('summary_report.html')
html = temp.render(input=dicto)
@@ -127,14 +118,12 @@ class ReportMaker(object):
self.summary_df.to_excel(self.writer, sheet_name="Report")
self.detailed_df.to_excel(self.writer, sheet_name="Details", index=False)
self.fix_up_xl()
# logger.debug(f"Writing report to: {filename}")
self.writer.close()
def fix_up_xl(self):
"""
Handles formatting of xl file, mediocrely.
"""
# logger.debug(f"Updating worksheet")
worksheet: Worksheet = self.writer.sheets['Report']
for idx, col in enumerate(self.summary_df, start=1): # NOTE: loop through all columns
series = self.summary_df[col]
@@ -149,7 +138,6 @@ class ReportMaker(object):
except ValueError as e:
logger.error(f"Couldn't resize column {col} due to {e}")
blank_row = get_first_blank_df_row(self.summary_df) + 1
# logger.debug(f"Blank row index = {blank_row}")
for col in range(3, 6):
col_letter = row_map[col]
worksheet.cell(row=blank_row, column=col, value=f"=SUM({col_letter}2:{col_letter}{str(blank_row - 1)})")

View File

@@ -3,7 +3,6 @@ contains writer objects for pushing values to submission sheet templates.
"""
import logging
from copy import copy
from datetime import date
from operator import itemgetter
from pprint import pformat
from typing import List, Generator, Tuple
@@ -111,7 +110,6 @@ class InfoWriter(object):
info_dict (dict): Dictionary of information to write.
sub_object (BasicSubmission | None, optional): Submission object containing methods. Defaults to None.
"""
logger.debug(f"Info_dict coming into InfoWriter: {pformat(info_dict)}")
if isinstance(submission_type, str):
submission_type = SubmissionType.query(name=submission_type)
if sub_object is None:
@@ -121,7 +119,6 @@ class InfoWriter(object):
self.xl = xl
self.info_map = submission_type.construct_info_map(mode='write')
self.info = self.reconcile_map(info_dict, self.info_map)
# logger.debug(pformat(self.info))
def reconcile_map(self, info_dict: dict, info_map: dict) -> Generator[(Tuple[str, dict]), None, None]:
"""
@@ -170,7 +167,6 @@ class InfoWriter(object):
logger.error(f"No locations for {k}, skipping")
continue
for loc in locations:
logger.debug(f"Writing {k} to {loc['sheet']}, row: {loc['row']}, column: {loc['column']}")
sheet = self.xl[loc['sheet']]
try:
sheet.cell(row=loc['row'], column=loc['column'], value=v['value'])
@@ -247,8 +243,6 @@ class ReagentWriter(object):
for v in reagent.values():
if not isinstance(v, dict):
continue
# logger.debug(
# f"Writing {reagent['type']} {k} to {reagent['sheet']}, row: {v['row']}, column: {v['column']}")
sheet.cell(row=v['row'], column=v['column'], value=v['value'])
return self.xl
@@ -288,7 +282,6 @@ class SampleWriter(object):
multiples = ['row', 'column', 'assoc_id', 'submission_rank']
for sample in sample_list:
sample = self.submission_type.get_submission_class().custom_sample_writer(sample)
logger.debug(f"Writing sample: {sample}")
for assoc in zip(sample['row'], sample['column'], sample['submission_rank']):
new = dict(row=assoc[0], column=assoc[1], submission_rank=assoc[2])
for k, v in sample.items():
@@ -369,9 +362,8 @@ class EquipmentWriter(object):
mp_info = equipment_map[equipment['role']]
except KeyError:
logger.error(f"No {equipment['role']} in {pformat(equipment_map)}")
# logger.debug(f"{equipment['role']} map: {mp_info}")
mp_info = None
placeholder = copy(equipment)
# if mp_info == {}:
if not mp_info:
for jj, (k, v) in enumerate(equipment.items(), start=1):
dicto = dict(value=v, row=ii, column=jj)
@@ -381,7 +373,6 @@ class EquipmentWriter(object):
try:
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
except KeyError as e:
# logger.error(f"Keyerror: {e}")
continue
placeholder[k] = dicto
if "asset_number" not in mp_info.keys():
@@ -400,17 +391,12 @@ class EquipmentWriter(object):
Workbook: Workbook with equipment written
"""
for equipment in self.equipment:
try:
sheet = self.xl[equipment['sheet']]
except KeyError:
if not equipment['sheet'] in self.xl.sheetnames:
self.xl.create_sheet("Equipment")
finally:
sheet = self.xl[equipment['sheet']]
for k, v in equipment.items():
if not isinstance(v, dict):
continue
# logger.debug(
# f"Writing {k}: {v['value']} to {equipment['sheet']}, row: {v['row']}, column: {v['column']}")
if isinstance(v['value'], list):
v['value'] = v['value'][0]
try:
@@ -455,7 +441,6 @@ class TipWriter(object):
return
for ii, tips in enumerate(tips_list, start=1):
mp_info = tips_map[tips.role]
# logger.debug(f"{tips['role']} map: {mp_info}")
placeholder = {}
if mp_info == {}:
for jj, (k, v) in enumerate(tips.__dict__.items(), start=1):
@@ -466,14 +451,12 @@ class TipWriter(object):
try:
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
except KeyError as e:
# logger.error(f"Keyerror: {e}")
continue
placeholder[k] = dicto
try:
placeholder['sheet'] = mp_info['sheet']
except KeyError:
placeholder['sheet'] = "Tips"
# logger.debug(f"Final output of {tips['role']} : {placeholder}")
yield placeholder
def write_tips(self) -> Workbook:
@@ -484,17 +467,12 @@ class TipWriter(object):
Workbook: Workbook with tips written
"""
for tips in self.tips:
try:
sheet = self.xl[tips['sheet']]
except KeyError:
if not tips['sheet'] in self.xl.sheetnames:
self.xl.create_sheet("Tips")
finally:
sheet = self.xl[tips['sheet']]
for k, v in tips.items():
if not isinstance(v, dict):
continue
# logger.debug(
# f"Writing {k}: {v['value']} to {equipment['sheet']}, row: {v['row']}, column: {v['column']}")
if isinstance(v['value'], list):
v['value'] = v['value'][0]
try:

View File

@@ -1,7 +1,7 @@
from .irida import import_irida
def hello(ctx):
print("\n\nHello!\n\n")
print("\n\nHello! Welcome to Robotics Submission Tracker.\n\n")
def goodbye(ctx):
print("\n\nGoodbye\n\n")
print("\n\nGoodbye. Thank you for using Robotics Submission Tracker.\n\n")

View File

@@ -19,11 +19,10 @@ def import_irida(ctx:Settings):
existing_controls = [item.name for item in IridaControl.query()]
prm_list = ", ".join([f"'{thing}'" for thing in existing_controls])
ctrl_db_path = ctx.directory_path.joinpath("submissions_parser_output", "submissions.db")
# print(f"Incoming settings: {pformat(ctx)}")
try:
conn = sqlite3.connect(ctrl_db_path)
except AttributeError as e:
print(f"Error, could not import from irida due to {e}")
logger.error(f"Error, could not import from irida due to {e}")
return
sql = f"SELECT name, submitted_date, submission_id, contains, matches, kraken, subtype, refseq_version, " \
f"kraken2_version, kraken2_db_version, sample_id FROM _iridacontrol INNER JOIN _control on _control.id " \
@@ -32,8 +31,6 @@ def import_irida(ctx:Settings):
records = [dict(name=row[0], submitted_date=row[1], submission_id=row[2], contains=row[3], matches=row[4], kraken=row[5],
subtype=row[6], refseq_version=row[7], kraken2_version=row[8], kraken2_db_version=row[9],
sample_id=row[10]) for row in cursor]
# incoming_controls = set(item['name'] for item in records)
# relevant = list(incoming_controls - existing_controls)
for record in records:
instance = IridaControl.query(name=record['name'])
if instance:
@@ -52,5 +49,4 @@ def import_irida(ctx:Settings):
if sample:
instance.sample = sample
instance.submission = sample.submissions[0]
# pprint(instance.__dict__)
instance.save()

View File

@@ -24,11 +24,9 @@ class RSLNamer(object):
filename = Path(filename) if Path(filename).exists() else filename
self.submission_type = sub_type
if not self.submission_type:
# logger.debug("Creating submission type because none exists")
self.submission_type = self.retrieve_submission_type(filename=filename)
logger.info(f"got submission type: {self.submission_type}")
if self.submission_type:
# logger.debug("Retrieving BasicSubmission subclass")
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex(submission_type=sub_type))
if not data:
@@ -52,7 +50,6 @@ class RSLNamer(object):
str: parsed submission type
"""
def st_from_path(filename:Path) -> str:
# logger.info(f"Using path method for {filename}.")
if filename.exists():
wb = load_workbook(filename)
try:
@@ -73,12 +70,9 @@ class RSLNamer(object):
if filename.startswith("tmp"):
return "Bacterial Culture"
regex = BasicSubmission.construct_regex()
# logger.info(f"Using string method for {filename}.")
# logger.debug(f"Using regex: {regex}")
m = regex.search(filename)
try:
submission_type = m.lastgroup
# logger.debug(f"Got submission type: {submission_type}")
except AttributeError as e:
submission_type = None
logger.critical(f"No submission type found or submission type found!: {e}")
@@ -98,7 +92,6 @@ class RSLNamer(object):
if check:
if "pytest" in sys.modules:
raise ValueError("Submission Type came back as None.")
# logger.debug("Final option, ask the user for submission type")
from frontend.widgets import ObjectSelector
dlg = ObjectSelector(title="Couldn't parse submission type.",
message="Please select submission type from list below.", obj_type=SubmissionType)
@@ -116,21 +109,17 @@ class RSLNamer(object):
regex (str): string to construct pattern
filename (str): string to be parsed
"""
logger.info(f"Input string to be parsed: {filename}")
if regex is None:
regex = BasicSubmission.construct_regex()
else:
# logger.debug(f"Incoming regex: {regex}")
try:
regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE)
except re.error as e:
regex = BasicSubmission.construct_regex()
logger.info(f"Using regex: {regex}")
match filename:
case Path():
m = regex.search(filename.stem)
case str():
# logger.debug(f"Using string method.")
m = regex.search(filename)
case _:
m = None
@@ -141,7 +130,6 @@ class RSLNamer(object):
parsed_name = None
else:
parsed_name = None
# logger.debug(f"Got parsed submission name: {parsed_name}")
return parsed_name
@classmethod
@@ -187,8 +175,6 @@ class RSLNamer(object):
Returns:
str: output file name.
"""
# logger.debug(f"Kwargs: {kwargs}")
# logger.debug(f"Template: {template}")
environment = jinja_template_loading()
template = environment.from_string(template)
return template.render(**kwargs)

View File

@@ -1,6 +1,6 @@
'''
"""
Contains pydantic models and accompanying validators
'''
"""
from __future__ import annotations
import uuid, re, logging, csv, sys
from pydantic import BaseModel, field_validator, Field, model_validator
@@ -123,18 +123,14 @@ class PydReagent(BaseModel):
Tuple[Reagent, Report]: Reagent instance and result of function
"""
report = Report()
# logger.debug("Adding extra fields.")
if self.model_extra is not None:
self.__dict__.update(self.model_extra)
# logger.debug(f"Reagent SQL constructor is looking up type: {self.type}, lot: {self.lot}")
reagent = Reagent.query(lot=self.lot, name=self.name)
# logger.debug(f"Result: {reagent}")
if reagent is None:
reagent = Reagent()
for key, value in self.__dict__.items():
if isinstance(value, dict):
value = value['value']
# logger.debug(f"Reagent info item for {key}: {value}")
# NOTE: set fields based on keys in dictionary
match key:
case "lot":
@@ -149,7 +145,6 @@ class PydReagent(BaseModel):
if isinstance(value, str):
value = date(year=1970, month=1, day=1)
value = datetime.combine(value, datetime.min.time())
logger.debug(f"Expiry date coming into sql: {value} with type {type(value)}")
reagent.expiry = value.replace(tzinfo=timezone)
case _:
try:
@@ -179,14 +174,12 @@ class PydSample(BaseModel, extra='allow'):
@model_validator(mode='after')
@classmethod
def validate_model(cls, data):
# logger.debug(f"Data for pydsample: {data}")
model = BasicSample.find_polymorphic_subclass(polymorphic_identity=data.sample_type)
for k, v in data.model_extra.items():
if k in model.timestamps():
if isinstance(v, str):
v = datetime.strptime(v, "%Y-%m-%d")
data.__setattr__(k, v)
# logger.debug(f"Data coming out of validation: {pformat(data)}")
return data
@field_validator("row", "column", "assoc_id", "submission_rank")
@@ -238,7 +231,6 @@ class PydSample(BaseModel, extra='allow'):
"""
report = None
self.__dict__.update(self.model_extra)
# logger.debug(f"Here is the incoming sample dict: \n{self.__dict__}")
instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id)
for key, value in self.__dict__.items():
match key:
@@ -246,7 +238,6 @@ class PydSample(BaseModel, extra='allow'):
case "row" | "column":
continue
case _:
# logger.debug(f"Setting sample field {key} to {value}")
instance.__setattr__(key, value)
out_associations = []
if submission is not None:
@@ -254,15 +245,12 @@ class PydSample(BaseModel, extra='allow'):
submission = BasicSubmission.query(rsl_plate_num=submission)
assoc_type = submission.submission_type_name
for row, column, aid, submission_rank in zip(self.row, self.column, self.assoc_id, self.submission_rank):
# logger.debug(f"Looking up association with identity: ({submission.submission_type_name} Association)")
# logger.debug(f"Looking up association with identity: ({assoc_type} Association)")
association = SubmissionSampleAssociation.query_or_create(association_type=f"{assoc_type} Association",
submission=submission,
sample=instance,
row=row, column=column, id=aid,
submission_rank=submission_rank,
**self.model_extra)
# logger.debug(f"Using submission_sample_association: {association}")
try:
out_associations.append(association)
except IntegrityError as e:
@@ -332,7 +320,6 @@ class PydEquipment(BaseModel, extra='ignore'):
@field_validator('processes', mode='before')
@classmethod
def make_empty_list(cls, value):
# logger.debug(f"Pydantic value: {value}")
if isinstance(value, GeneratorType):
value = [item.name for item in value]
value = convert_nans_to_nones(value)
@@ -355,7 +342,6 @@ class PydEquipment(BaseModel, extra='ignore'):
Tuple[Equipment, SubmissionEquipmentAssociation]: SQL objects
"""
if isinstance(submission, str):
# logger.debug(f"Got string, querying {submission}")
submission = BasicSubmission.query(rsl_plate_num=submission)
equipment = Equipment.query(asset_number=self.asset_number)
if equipment is None:
@@ -403,7 +389,6 @@ class PydEquipment(BaseModel, extra='ignore'):
class PydSubmission(BaseModel, extra='allow'):
filepath: Path
submission_type: dict | None
# For defaults
submitter_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True)
submitted_date: dict | None
rsl_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True)
@@ -427,7 +412,6 @@ class PydSubmission(BaseModel, extra='allow'):
if isinstance(value, dict):
value = value['value']
if isinstance(value, Generator):
# logger.debug("We have a generator")
return [PydTips(**tips) for tips in value]
if not value:
return []
@@ -436,9 +420,7 @@ class PydSubmission(BaseModel, extra='allow'):
@field_validator('equipment', mode='before')
@classmethod
def convert_equipment_dict(cls, value):
# logger.debug(f"Equipment: {value}")
if isinstance(value, Generator):
logger.debug("We have a generator")
return [PydEquipment(**equipment) for equipment in value]
if isinstance(value, dict):
return value['value']
@@ -454,7 +436,6 @@ class PydSubmission(BaseModel, extra='allow'):
@field_validator("submitter_plate_num")
@classmethod
def enforce_with_uuid(cls, value):
# logger.debug(f"submitter_plate_num coming into pydantic: {value}")
if value['value'] in [None, "None"]:
return dict(value=uuid.uuid4().hex.upper(), missing=True)
else:
@@ -464,7 +445,6 @@ class PydSubmission(BaseModel, extra='allow'):
@field_validator("submitted_date", mode="before")
@classmethod
def rescue_date(cls, value):
# logger.debug(f"\n\nDate coming into pydantic: {value}\n\n")
try:
check = value['value'] is None
except TypeError:
@@ -509,7 +489,6 @@ class PydSubmission(BaseModel, extra='allow'):
@classmethod
def lookup_submitting_lab(cls, value):
if isinstance(value['value'], str):
# logger.debug(f"Looking up organization {value['value']}")
try:
value['value'] = Organization.query(name=value['value']).name
except AttributeError:
@@ -540,13 +519,11 @@ class PydSubmission(BaseModel, extra='allow'):
@field_validator("rsl_plate_num")
@classmethod
def rsl_from_file(cls, value, values):
# logger.debug(f"RSL-plate initial value: {value['value']} and other values: {values.data}")
sub_type = values.data['submission_type']['value']
if check_not_nan(value['value']):
value['value'] = value['value'].strip()
return value
else:
# logger.debug("Constructing plate sub_type.")
if "pytest" in sys.modules and sub_type.replace(" ", "") == "BasicSubmission":
output = "RSL-BS-Test001"
else:
@@ -623,7 +600,6 @@ class PydSubmission(BaseModel, extra='allow'):
@classmethod
def expand_reagents(cls, value):
if isinstance(value, Generator):
# logger.debug("We have a generator")
return [PydReagent(**reagent) for reagent in value]
return value
@@ -631,7 +607,6 @@ class PydSubmission(BaseModel, extra='allow'):
@classmethod
def expand_samples(cls, value):
if isinstance(value, Generator):
# logger.debug("We have a generator")[
return [PydSample(**sample) for sample in value]
return value
@@ -656,7 +631,6 @@ class PydSubmission(BaseModel, extra='allow'):
@field_validator("cost_centre")
@classmethod
def get_cost_centre(cls, value, values):
# logger.debug(f"Value coming in for cost_centre: {value}")
match value['value']:
case None:
from backend.db.models import Organization
@@ -671,7 +645,6 @@ class PydSubmission(BaseModel, extra='allow'):
@field_validator("contact")
@classmethod
def get_contact_from_org(cls, value, values):
# logger.debug(f"Checking on value: {value}")
match value:
case dict():
if isinstance(value['value'], tuple):
@@ -684,7 +657,6 @@ class PydSubmission(BaseModel, extra='allow'):
if check is None:
org = Organization.query(name=values.data['submitting_lab']['value'])
contact = org.contacts[0].name
# logger.debug(f"Pulled: {contact}")
if isinstance(contact, tuple):
contact = contact[0]
return dict(value=contact, missing=True)
@@ -692,7 +664,6 @@ class PydSubmission(BaseModel, extra='allow'):
return value
def __init__(self, run_custom: bool = False, **data):
logger.debug(f"{__name__} input data: {data}")
super().__init__(**data)
# NOTE: this could also be done with default_factory
self.submission_object = BasicSubmission.find_polymorphic_subclass(
@@ -755,13 +726,11 @@ class PydSubmission(BaseModel, extra='allow'):
except TypeError:
pass
else:
# logger.debug("Extracting 'value' from attributes")
output = {k: self.filter_field(k) for k in fields}
return output
def filter_field(self, key: str):
item = getattr(self, key)
# logger.debug(f"Attempting deconstruction of {key}: {item} with type {type(item)}")
match item:
case dict():
try:
@@ -793,13 +762,10 @@ class PydSubmission(BaseModel, extra='allow'):
"""
report = Report()
dicto = self.improved_dict()
# logger.warning(f"\n\nQuery or create: {self.submission_type['value']}, {self.rsl_plate_num['value']}")
instance, result = BasicSubmission.query_or_create(submission_type=self.submission_type['value'],
rsl_plate_num=self.rsl_plate_num['value'])
logger.debug(f"Result of query or create: {instance}")
report.add_result(result)
self.handle_duplicate_samples()
# logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
for key, value in dicto.items():
if isinstance(value, dict):
try:
@@ -811,18 +777,13 @@ class PydSubmission(BaseModel, extra='allow'):
continue
if value is None:
continue
# logger.debug(f"Setting {key} to {value}")
match key:
case "reagents":
for reagent in self.reagents:
logger.debug(f"Checking reagent {reagent.lot}")
reagent, _ = reagent.toSQL(submission=instance)
# logger.debug(f"Association: {assoc}")
case "samples":
for sample in self.samples:
sample, associations, _ = sample.toSQL(submission=instance)
# logger.debug(f"Sample SQL object to be added to submission: {sample.__dict__}")
# logger.debug(associations)
for assoc in associations:
if assoc is not None:
if assoc not in instance.submission_sample_associations:
@@ -830,19 +791,16 @@ class PydSubmission(BaseModel, extra='allow'):
else:
logger.warning(f"Sample association {assoc} is already present in {instance}")
case "equipment":
# logger.debug(f"Equipment: {pformat(self.equipment)}")
for equip in self.equipment:
if equip is None:
continue
equip, association = equip.toSQL(submission=instance)
if association is not None:
instance.submission_equipment_associations.append(association)
logger.debug(f"Equipment associations: {instance.submission_equipment_associations}")
case "tips":
for tips in self.tips:
if tips is None:
continue
# logger.debug(f"Converting tips: {tips} to sql.")
try:
association = tips.to_sql(submission=instance)
except AttributeError:
@@ -864,14 +822,11 @@ class PydSubmission(BaseModel, extra='allow'):
value = value
instance.set_attribute(key=key, value=value)
case item if item in instance.jsons():
# logger.debug(f"{item} is a json.")
try:
ii = value.items()
except AttributeError:
ii = {}
logger.debug(f"ii is {ii}, value is {value}")
for k, v in ii:
logger.debug(f"k is {k}, v is {v}")
if isinstance(v, datetime):
value[k] = v.strftime("%Y-%m-%d %H:%M:%S")
else:
@@ -893,21 +848,17 @@ class PydSubmission(BaseModel, extra='allow'):
else:
logger.warning(f"{key} already == {value} so no updating.")
try:
# logger.debug(f"Calculating costs for procedure...")
instance.calculate_base_cost()
except (TypeError, AttributeError) as e:
logger.debug(f"Looks like that kit doesn't have cost breakdown yet due to: {e}, using 0.")
logger.error(f"Looks like that kit doesn't have cost breakdown yet due to: {e}, using 0.")
try:
instance.run_cost = instance.extraction_kit.cost_per_run
except AttributeError:
instance.run_cost = 0
# logger.debug(f"Calculated base run cost of: {instance.run_cost}")
# NOTE: Apply any discounts that are applicable for client and kit.
try:
# logger.debug("Checking and applying discounts...")
discounts = [item.amount for item in
Discount.query(kit_type=instance.extraction_kit, organization=instance.submitting_lab)]
# logger.debug(f"We got discounts: {discounts}")
if len(discounts) > 0:
instance.run_cost = instance.run_cost - sum(discounts)
except Exception as e:
@@ -925,7 +876,6 @@ class PydSubmission(BaseModel, extra='allow'):
SubmissionFormWidget: Submission form widget
"""
from frontend.widgets.submission_widget import SubmissionFormWidget
# logger.debug(f"Disable: {disable}")
return SubmissionFormWidget(parent=parent, submission=self, disable=disable)
def to_writer(self) -> "SheetWriter":
@@ -946,10 +896,8 @@ class PydSubmission(BaseModel, extra='allow'):
str: Output filename
"""
template = self.submission_object.filename_template()
# logger.debug(f"Using template string: {template}")
render = self.namer.construct_export_name(template=template, **self.improved_dict(dictionaries=False)).replace(
"/", "")
# logger.debug(f"Template rendered as: {render}")
return render
# @report_result
@@ -964,26 +912,20 @@ class PydSubmission(BaseModel, extra='allow'):
Report: Result object containing a message and any missing components.
"""
report = Report()
# logger.debug(f"Extraction kit: {extraction_kit}. Is it a string? {isinstance(extraction_kit, str)}")
if isinstance(extraction_kit, str):
extraction_kit = dict(value=extraction_kit)
if extraction_kit is not None and extraction_kit != self.extraction_kit['value']:
self.extraction_kit['value'] = extraction_kit['value']
# logger.debug(f"Looking up {self.extraction_kit['value']}")
ext_kit = KitType.query(name=self.extraction_kit['value'])
ext_kit_rtypes = [item.to_pydantic() for item in
ext_kit.get_reagents(required=True, submission_type=self.submission_type['value'])]
# logger.debug(f"Kit reagents: {ext_kit_rtypes}")
# logger.debug(f"Submission reagents: {self.reagents}")
# NOTE: Exclude any reagenttype found in this pyd not expected in kit.
expected_check = [item.role for item in ext_kit_rtypes]
output_reagents = [rt for rt in self.reagents if rt.role in expected_check]
logger.debug(f"Already have these reagent types: {output_reagents}")
missing_check = [item.role for item in output_reagents]
missing_reagents = [rt for rt in ext_kit_rtypes if rt.role not in missing_check]
missing_reagents += [rt for rt in output_reagents if rt.missing]
output_reagents += [rt for rt in missing_reagents if rt not in output_reagents]
# logger.debug(f"Missing reagents types: {missing_reagents}")
# NOTE: if lists are equal return no problem
if len(missing_reagents) == 0:
result = None
@@ -1072,7 +1014,6 @@ class PydReagentRole(BaseModel):
instance: ReagentRole = ReagentRole.query(name=self.name)
if instance is None:
instance = ReagentRole(name=self.name, eol_ext=self.eol_ext)
# logger.debug(f"This is the reagent type instance: {instance.__dict__}")
try:
assoc = KitTypeReagentRoleAssociation.query(reagent_role=instance, kit_type=kit)
except StatementError:

View File

@@ -41,7 +41,6 @@ class CustomFigure(Figure):
"""
if modes:
ytitle = modes[0]
# logger.debug("Creating visibles list for each mode.")
self.update_layout(
xaxis_title="Submitted Date (* - Date parsed from fastq file creation date)",
yaxis_title=ytitle,
@@ -79,7 +78,6 @@ class CustomFigure(Figure):
rng = [1]
if months > 2:
rng += [iii for iii in range(3, months, 3)]
# logger.debug(f"Making buttons for months: {rng}")
buttons = [dict(count=iii, label=f"{iii}m", step="month", stepmode="backward") for iii in rng]
if months > date.today().month:
buttons += [dict(count=1, label="YTD", step="year", stepmode="todate")]
@@ -117,24 +115,6 @@ class CustomFigure(Figure):
{"yaxis.title.text": mode},
])
# def save_figure(self, group_name: str = "plotly_output", parent: QWidget | None = None):
# """
# Writes plotly figure to html file.
#
# Args:
# figs ():
# settings (dict): settings passed down from click
# fig (Figure): input figure object
# group_name (str): controltype
# """
#
# output = select_save_file(obj=parent, default_name=group_name, extension="png")
# self.write_image(output.absolute().__str__(), engine="kaleido")
#
# def save_data(self, group_name: str = "plotly_export", parent:QWidget|None=None):
# output = select_save_file(obj=parent, default_name=group_name, extension="xlsx")
# self.df.to_excel(output.absolute().__str__(), engine="openpyxl", index=False)
def to_html(self) -> str:
"""
Creates final html code from plotly

View File

@@ -3,13 +3,12 @@ Functions for constructing irida controls graphs using plotly.
"""
from datetime import date
from pprint import pformat
from typing import Generator
import plotly.express as px
import pandas as pd
from PyQt6.QtWidgets import QWidget
from . import CustomFigure
import logging
from tools import get_unique_values_in_df_column, divide_chunks
from tools import get_unique_values_in_df_column
logger = logging.getLogger(f"submissions.{__name__}")

View File

@@ -21,11 +21,9 @@ class PCRFigure(CustomFigure):
months = int(settings['months'])
except KeyError:
months = 6
# logger.debug(f"DF: {self.df}")
self.construct_chart(df=df)
def construct_chart(self, df: pd.DataFrame):
# logger.debug(f"PCR df:\n {df}")
try:
scatter = px.scatter(data_frame=df, x='submitted_date', y="ct",
hover_data=["name", "target", "ct", "reagent_lot"],

View File

@@ -23,7 +23,6 @@ class TurnaroundChart(CustomFigure):
months = int(settings['months'])
except KeyError:
months = 6
# logger.debug(f"DF: {self.df}")
self.construct_chart()
if threshold:
self.add_hline(y=threshold)
@@ -32,11 +31,9 @@ class TurnaroundChart(CustomFigure):
def construct_chart(self, df: pd.DataFrame | None = None):
if df:
self.df = df
# logger.debug(f"PCR df:\n {df}")
self.df = self.df[self.df.days.notnull()]
self.df = self.df.sort_values(['submitted_date', 'name'], ascending=[True, True]).reset_index(drop=True)
self.df = self.df.reset_index().rename(columns={"index": "idx"})
# logger.debug(f"DF: {self.df}")
try:
scatter = px.scatter(data_frame=self.df, x='idx', y="days",
hover_data=["name", "submitted_date", "completed_date", "days"],

View File

@@ -27,13 +27,11 @@ from .turnaround import TurnaroundTime
from .omni_search import SearchBox
logger = logging.getLogger(f'submissions.{__name__}')
# logger.info("Hello, I am a logger")
class App(QMainWindow):
def __init__(self, ctx: Settings = None):
# logger.debug(f"Initializing main window...")
super().__init__()
qInstallMessageHandler(lambda x, y, z: None)
self.ctx = ctx
@@ -68,7 +66,6 @@ class App(QMainWindow):
"""
adds items to menu bar
"""
# logger.debug(f"Creating menu bar...")
menuBar = self.menuBar()
fileMenu = menuBar.addMenu("&File")
editMenu = menuBar.addMenu("&Edit")
@@ -82,7 +79,6 @@ class App(QMainWindow):
fileMenu.addAction(self.importAction)
fileMenu.addAction(self.yamlExportAction)
fileMenu.addAction(self.yamlImportAction)
# methodsMenu.addAction(self.searchLog)
methodsMenu.addAction(self.searchSample)
maintenanceMenu.addAction(self.joinExtractionAction)
maintenanceMenu.addAction(self.joinPCRAction)
@@ -92,27 +88,20 @@ class App(QMainWindow):
"""
adds items to toolbar
"""
# logger.debug(f"Creating toolbar...")
toolbar = QToolBar("My main toolbar")
self.addToolBar(toolbar)
toolbar.addAction(self.addReagentAction)
# toolbar.addAction(self.addKitAction)
# toolbar.addAction(self.addOrgAction)
def _createActions(self):
"""
creates actions
"""
# logger.debug(f"Creating actions...")
self.importAction = QAction("&Import Submission", self)
self.addReagentAction = QAction("Add Reagent", self)
# self.addKitAction = QAction("Import Kit", self)
# self.addOrgAction = QAction("Import Org", self)
self.joinExtractionAction = QAction("Link Extraction Logs")
self.joinPCRAction = QAction("Link PCR Logs")
self.helpAction = QAction("&About", self)
self.docsAction = QAction("&Docs", self)
# self.searchLog = QAction("Search Log", self)
self.searchSample = QAction("Search Sample", self)
self.githubAction = QAction("Github", self)
self.yamlExportAction = QAction("Export Type Example", self)
@@ -123,14 +112,12 @@ class App(QMainWindow):
"""
connect menu and tool bar item to functions
"""
# logger.debug(f"Connecting actions...")
self.importAction.triggered.connect(self.table_widget.formwidget.importSubmission)
self.addReagentAction.triggered.connect(self.table_widget.formwidget.add_reagent)
self.joinExtractionAction.triggered.connect(self.table_widget.sub_wid.link_extractions)
self.joinPCRAction.triggered.connect(self.table_widget.sub_wid.link_pcr)
self.helpAction.triggered.connect(self.showAbout)
self.docsAction.triggered.connect(self.openDocs)
# self.searchLog.triggered.connect(self.runSearch)
self.searchSample.triggered.connect(self.runSampleSearch)
self.githubAction.triggered.connect(self.openGithub)
self.yamlExportAction.triggered.connect(self.export_ST_yaml)
@@ -145,7 +132,6 @@ class App(QMainWindow):
j_env = jinja_template_loading()
template = j_env.get_template("project.html")
html = template.render(info=self.ctx.package.__dict__)
# logger.debug(html)
about = HTMLPop(html=html, title="About")
about.exec()
@@ -157,7 +143,6 @@ class App(QMainWindow):
url = Path(sys._MEIPASS).joinpath("files", "docs", "index.html")
else:
url = Path("docs\\build\\index.html").absolute()
# logger.debug(f"Attempting to open {url}")
webbrowser.get('windows-default').open(f"file://{url.__str__()}")
def openGithub(self):
@@ -177,10 +162,6 @@ class App(QMainWindow):
instr = HTMLPop(html=html, title="Instructions")
instr.exec()
# def runSearch(self):
# dlg = LogParser(self)
# dlg.exec()
def runSampleSearch(self):
"""
Create a search for samples.
@@ -253,7 +234,6 @@ class App(QMainWindow):
class AddSubForm(QWidget):
def __init__(self, parent: QWidget):
# logger.debug(f"Initializating subform...")
super(QWidget, self).__init__(parent)
self.layout = QVBoxLayout(self)
# NOTE: Initialize tab screen

View File

@@ -6,7 +6,6 @@ from PyQt6.QtWidgets import (
QWidget, QComboBox, QPushButton
)
from PyQt6.QtCore import QSignalBlocker
from backend import ChartReportMaker
from backend.db import ControlType, IridaControl
import logging
@@ -21,12 +20,9 @@ class ControlsViewer(InfoPane):
def __init__(self, parent: QWidget, archetype: str) -> None:
super().__init__(parent)
logger.debug(f"Incoming Archetype: {archetype}")
self.archetype = ControlType.query(name=archetype)
if not self.archetype:
return
logger.debug(f"Archetype set as: {self.archetype}")
# logger.debug(f"\n\n{self.app}\n\n")
# NOTE: set tab2 layout
self.control_sub_typer = QComboBox()
# NOTE: fetch types of controls
@@ -54,12 +50,6 @@ class ControlsViewer(InfoPane):
self.save_button.pressed.connect(self.save_png)
self.export_button.pressed.connect(self.save_excel)
# def save_chart_function(self):
# self.fig.save_figure(parent=self)
#
# def save_data_function(self):
# self.fig.save_data(parent=self)
@report_result
def update_data(self, *args, **kwargs):
"""
@@ -71,20 +61,6 @@ class ControlsViewer(InfoPane):
self.mode_sub_typer.disconnect()
except TypeError:
pass
# NOTE: correct start date being more recent than end date and rerun
# if self.datepicker.start_date.date() > self.datepicker.end_date.date():
# threemonthsago = self.datepicker.end_date.date().addDays(-60)
# msg = f"Start date after end date is not allowed! Setting to {threemonthsago.toString()}."
# logger.warning(msg)
# # NOTE: block signal that will rerun controls getter and set start date Without triggering this function again
# with QSignalBlocker(self.datepicker.start_date) as blocker:
# self.datepicker.start_date.setDate(threemonthsago)
# self.update_data()
# report.add_result(Result(owner=self.__str__(), msg=msg, status="Warning"))
# return report
# # NOTE: convert to python useable date objects
# self.start_date = self.datepicker.start_date.date().toPyDate()
# self.end_date = self.datepicker.end_date.date().toPyDate()
self.con_sub_type = self.control_sub_typer.currentText()
self.mode = self.mode_typer.currentText()
self.mode_sub_typer.clear()
@@ -104,7 +80,6 @@ class ControlsViewer(InfoPane):
self.mode_sub_typer.clear()
self.mode_sub_typer.setEnabled(False)
self.chart_maker_function()
# return report
@report_result
def chart_maker_function(self, *args, **kwargs):
@@ -119,14 +94,11 @@ class ControlsViewer(InfoPane):
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
"""
report = Report()
# logger.debug(f"Control getter context: \n\tControl type: {self.con_sub_type}\n\tMode: {self.mode}\n\tStart \
# Date: {self.start_date}\n\tEnd Date: {self.end_date}")
# NOTE: set the mode_sub_type for kraken
if self.mode_sub_typer.currentText() == "":
self.mode_sub_type = None
else:
self.mode_sub_type = self.mode_sub_typer.currentText()
logger.debug(f"Subtype: {self.mode_sub_type}")
months = self.diff_month(self.start_date, self.end_date)
# NOTE: query all controls using the type/start and end dates from the gui
chart_settings = dict(sub_type=self.con_sub_type, start_date=self.start_date, end_date=self.end_date,
@@ -136,14 +108,11 @@ class ControlsViewer(InfoPane):
self.report_obj = ChartReportMaker(df=self.fig.df, sheet_name=self.archetype.name)
if issubclass(self.fig.__class__, CustomFigure):
self.save_button.setEnabled(True)
# logger.debug(f"Updating figure...")
# NOTE: construct html for webview
try:
html = self.fig.to_html()
except AttributeError:
html = ""
# logger.debug(f"The length of html code is: {len(html)}")
self.webview.setHtml(html)
self.webview.update()
# logger.debug("Figure updated... I hope.")
return report

View File

@@ -21,9 +21,7 @@ class EquipmentUsage(QDialog):
self.setWindowTitle(f"Equipment Checklist - {submission.rsl_plate_num}")
self.used_equipment = self.submission.get_used_equipment()
self.kit = self.submission.extraction_kit
# logger.debug(f"Existing equipment: {self.used_equipment}")
self.opt_equipment = submission.submission_type.get_equipment()
# logger.debug(f"EquipmentRoles: {self.opt_equipment}")
self.layout = QVBoxLayout()
self.setLayout(self.layout)
self.populate_form()
@@ -38,7 +36,6 @@ class EquipmentUsage(QDialog):
self.buttonBox.rejected.connect(self.reject)
label = self.LabelRow(parent=self)
self.layout.addWidget(label)
# logger.debug("Creating widgets for equipment")
for eq in self.opt_equipment:
widg = eq.to_form(parent=self, used=self.used_equipment)
self.layout.addWidget(widg)
@@ -124,9 +121,7 @@ class RoleComboBox(QWidget):
Changes processes when equipment is changed
"""
equip = self.box.currentText()
# logger.debug(f"Updating equipment: {equip}")
equip2 = next((item for item in self.role.equipment if item.name == equip), self.role.equipment[0])
# logger.debug(f"Using: {equip2}")
with QSignalBlocker(self.process) as blocker:
self.process.clear()
self.process.addItems([item for item in equip2.processes if item in self.role.processes])
@@ -136,7 +131,6 @@ class RoleComboBox(QWidget):
Changes what tips are available when process is changed
"""
process = self.process.currentText().strip()
# logger.debug(f"Checking process: {process} for equipment {self.role.name}")
process = Process.query(name=process)
if process.tip_roles:
for iii, tip_role in enumerate(process.tip_roles):
@@ -144,7 +138,6 @@ class RoleComboBox(QWidget):
tip_choices = [item.name for item in tip_role.instances]
widget.setEditable(False)
widget.addItems(tip_choices)
# logger.debug(f"Tiprole: {tip_role.__dict__}")
widget.setObjectName(f"tips_{tip_role.name}")
widget.setMinimumWidth(200)
widget.setMaximumWidth(200)
@@ -169,7 +162,6 @@ class RoleComboBox(QWidget):
eq = Equipment.query(name=self.box.currentText())
tips = [PydTips(name=item.currentText(), role=item.objectName().lstrip("tips").lstrip("_")) for item in
self.findChildren(QComboBox) if item.objectName().startswith("tips")]
# logger.debug(tips)
try:
return PydEquipment(
name=eq.name,

View File

@@ -148,5 +148,4 @@ class ControlsForm(QWidget):
dicto['values'].append(dict(name=label[1], value=le.currentText()))
if label[0] not in [item['name'] for item in output]:
output.append(dicto)
# logger.debug(pformat(output))
return output, self.comment_field.toPlainText()

View File

@@ -18,7 +18,6 @@ class InfoPane(QWidget):
def __init__(self, parent: QWidget) -> None:
super().__init__(parent)
self.app = self.parent().parent()
# logger.debug(f"\n\n{self.app}\n\n")
self.report = Report()
self.datepicker = StartEndDatePicker(default_start=-180)
self.webview = QWebEngineView()

View File

@@ -1,6 +1,6 @@
'''
"""
Contains miscellaneous widgets for frontend functions
'''
"""
import math
from datetime import date
from PyQt6.QtGui import QPageLayout, QPageSize, QStandardItem, QIcon
@@ -8,7 +8,7 @@ from PyQt6.QtWebEngineWidgets import QWebEngineView
from PyQt6.QtWidgets import (
QLabel, QVBoxLayout,
QLineEdit, QComboBox, QDialog,
QDialogButtonBox, QDateEdit, QPushButton, QFormLayout, QWidget, QHBoxLayout, QSizePolicy
QDialogButtonBox, QDateEdit, QPushButton, QWidget, QHBoxLayout, QSizePolicy
)
from PyQt6.QtCore import Qt, QDate, QSize, QMarginsF
from tools import jinja_template_loading
@@ -66,7 +66,6 @@ class AddReagentForm(QDialog):
self.type_input.addItems([item.name for item in ReagentRole.query() if kit in item.kit_types])
else:
self.type_input.addItems([item.name for item in ReagentRole.query()])
# logger.debug(f"Trying to find index of {reagent_type}")
# NOTE: convert input to user-friendly string?
try:
reagent_role = reagent_role.replace("_", " ").title()
@@ -106,7 +105,6 @@ class AddReagentForm(QDialog):
"""
Updates reagent names form field with examples from reagent type
"""
# logger.debug(self.type_input.currentText())
self.name_input.clear()
lookup = Reagent.query(role=self.type_input.currentText())
self.name_input.addItems(list(set([item.name for item in lookup])))
@@ -145,7 +143,8 @@ def save_pdf(obj: QWebEngineView, filename: Path):
obj.page().printToPdf(filename.absolute().__str__(), page_layout)
# subclass
# NOTE: subclass
class CheckableComboBox(QComboBox):
# once there is a checkState set, it is rendered
# here we assume default Unchecked
@@ -162,7 +161,6 @@ class CheckableComboBox(QComboBox):
return item.checkState() == Qt.CheckState.Checked
def changed(self):
logger.debug("emitting updated")
self.updated.emit()

View File

@@ -1,6 +1,6 @@
'''
"""
Search box that performs fuzzy search for samples
'''
"""
from pprint import pformat
from typing import Tuple, Any, List
from pandas import DataFrame
@@ -39,7 +39,6 @@ class SearchBox(QDialog):
else:
self.sub_class = None
self.results = SearchResults(parent=self, object_type=self.object_type, extras=self.extras, **kwargs)
# logger.debug(f"results: {self.results}")
self.layout.addWidget(self.results, 5, 0)
self.setLayout(self.layout)
self.setWindowTitle(f"Search {self.object_type.__name__}")
@@ -51,7 +50,6 @@ class SearchBox(QDialog):
Changes form inputs based on sample type
"""
deletes = [item for item in self.findChildren(FieldSearch)]
# logger.debug(deletes)
for item in deletes:
item.setParent(None)
# NOTE: Handle any subclasses
@@ -62,7 +60,6 @@ class SearchBox(QDialog):
self.object_type = self.original_type
else:
self.object_type = self.original_type.find_regular_subclass(self.sub_class.currentText())
logger.debug(f"{self.object_type} searchables: {self.object_type.searchables}")
for iii, searchable in enumerate(self.object_type.searchables):
widget = FieldSearch(parent=self, label=searchable, field_name=searchable)
widget.setObjectName(searchable)
@@ -85,10 +82,9 @@ class SearchBox(QDialog):
Shows dataframe of relevant samples.
"""
fields = self.parse_form()
# logger.debug(f"Got fields: {fields}")
sample_list_creator = self.object_type.fuzzy_search(**fields)
data = self.object_type.results_to_df(objects=sample_list_creator)
# Setting results moved to here from __init__ 202411118
# NOTE: Setting results moved to here from __init__ 202411118
self.results.setData(df=data)
@@ -154,7 +150,6 @@ class SearchResults(QTableView):
def parse_row(self, x):
context = {item['name']: x.sibling(x.row(), item['column']).data() for item in self.columns_of_interest}
logger.debug(f"Context: {context}")
try:
object = self.object_type.query(**context)
except KeyError:

View File

@@ -9,7 +9,7 @@ from PyQt6.QtWebEngineWidgets import QWebEngineView
from tools import jinja_template_loading
import logging
from backend.db import models
from typing import Any, Literal
from typing import Literal
logger = logging.getLogger(f"submissions.{__name__}")

View File

@@ -45,7 +45,6 @@ class SubmissionDetails(QDialog):
self.btn.clicked.connect(self.save_pdf)
self.back = QPushButton("Back")
self.back.setFixedWidth(100)
# self.back.clicked.connect(self.back_function)
self.back.clicked.connect(self.webview.back)
self.layout.addWidget(self.back, 0, 0, 1, 1)
self.layout.addWidget(self.btn, 0, 1, 1, 9)
@@ -70,7 +69,6 @@ class SubmissionDetails(QDialog):
if "Submission" in title:
self.btn.setEnabled(True)
self.export_plate = title.split(" ")[-1]
# logger.debug(f"Updating export plate to: {self.export_plate}")
else:
self.btn.setEnabled(False)
try:
@@ -78,7 +76,6 @@ class SubmissionDetails(QDialog):
except IndexError as e:
check = title
if title == check:
# logger.debug("Disabling back button")
self.back.setEnabled(False)
else:
self.back.setEnabled(True)
@@ -91,7 +88,6 @@ class SubmissionDetails(QDialog):
Args:
sample (str): Submitter Id of the sample.
"""
# logger.debug(f"Details: {sample}")
if isinstance(sample, str):
sample = BasicSample.query(submitter_id=sample)
base_dict = sample.to_sub_dict(full_data=True)
@@ -114,7 +110,6 @@ class SubmissionDetails(QDialog):
base_dict = reagent.to_sub_dict(extraction_kit=self.kit, full_data=True)
env = jinja_template_loading()
temp_name = "reagent_details.html"
# logger.debug(f"Returning template: {temp_name}")
try:
template = env.get_template(temp_name)
except TemplateNotFound as e:
@@ -147,29 +142,23 @@ class SubmissionDetails(QDialog):
Args:
submission (str | BasicSubmission): Submission of interest.
"""
# logger.debug(f"Details for: {submission}")
if isinstance(submission, str):
submission = BasicSubmission.query(rsl_plate_num=submission)
self.rsl_plate_num = submission.rsl_plate_num
self.base_dict = submission.to_dict(full_data=True)
# logger.debug(f"Submission details data:\n{pformat({k:v for k,v in self.base_dict.items() if k == 'reagents'})}")
# NOTE: don't want id
# logger.debug(f"Creating barcode.")
# logger.debug(f"Making platemap...")
self.base_dict['platemap'] = submission.make_plate_map(sample_list=submission.hitpick_plate())
self.base_dict['excluded'] = submission.get_default_info("details_ignore")
self.base_dict, self.template = submission.get_details_template(base_dict=self.base_dict)
template_path = Path(self.template.environment.loader.__getattribute__("searchpath")[0])
with open(template_path.joinpath("css", "styles.css"), "r") as f:
css = f.read()
# logger.debug(f"Submission_details: {pformat(self.base_dict)}")
# logger.debug(f"User is power user: {is_power_user()}")
self.html = self.template.render(sub=self.base_dict, permission=is_power_user(), css=css)
self.webview.setHtml(self.html)
@pyqtSlot(str)
def sign_off(self, submission: str | BasicSubmission):
logger.debug(f"Signing off on {submission} - ({getuser()})")
logger.info(f"Signing off on {submission} - ({getuser()})")
if isinstance(submission, str):
submission = BasicSubmission.query(rsl_plate_num=submission)
submission.signed_by = getuser()
@@ -195,7 +184,6 @@ class SubmissionComment(QDialog):
super().__init__(parent)
try:
self.app = parent.parent().parent().parent().parent().parent().parent
# logger.debug(f"App: {self.app}")
except AttributeError:
pass
self.submission = submission
@@ -225,5 +213,4 @@ class SubmissionComment(QDialog):
return None
dt = datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S")
full_comment = {"name": commenter, "time": dt, "text": comment}
# logger.debug(f"Full comment: {full_comment}")
return full_comment

View File

@@ -1,6 +1,6 @@
'''
"""
Contains widgets specific to the submission summary and submission details.
'''
"""
import logging
from pprint import pformat
from PyQt6.QtWidgets import QTableView, QMenu
@@ -107,20 +107,16 @@ class SubmissionsSheet(QTableView):
Args:
event (_type_): the item of interest
"""
# logger.debug(event.__dict__)
id = self.selectionModel().currentIndex()
id = id.sibling(id.row(), 0).data()
submission = BasicSubmission.query(id=id)
# logger.debug(f"Event submission: {submission}")
self.menu = QMenu(self)
self.con_actions = submission.custom_context_events()
# logger.debug(f"Menu options: {self.con_actions}")
for k in self.con_actions.keys():
# logger.debug(f"Adding {k}")
action = QAction(k, self)
action.triggered.connect(lambda _, action_name=k: self.triggered_action(action_name=action_name))
self.menu.addAction(action)
# add other required actions
# NOTE: add other required actions
self.menu.popup(QCursor.pos())
def triggered_action(self, action_name: str):
@@ -130,8 +126,6 @@ class SubmissionsSheet(QTableView):
Args:
action_name (str): name of the action from the menu
"""
# logger.debug(f"Action: {action_name}")
# logger.debug(f"Responding with {self.con_actions[action_name]}")
func = self.con_actions[action_name]
func(obj=self)
@@ -179,7 +173,6 @@ class SubmissionsSheet(QTableView):
if sub is None:
continue
try:
# logger.debug(f"Found submission: {sub.rsl_plate_num}")
count += 1
except AttributeError:
continue

View File

@@ -1,9 +1,9 @@
'''
"""
Contains all submission related frontend functions
'''
"""
from PyQt6.QtWidgets import (
QWidget, QPushButton, QVBoxLayout,
QComboBox, QDateEdit, QLineEdit, QLabel, QCheckBox, QBoxLayout, QHBoxLayout, QGridLayout
QComboBox, QDateEdit, QLineEdit, QLabel, QCheckBox, QHBoxLayout, QGridLayout
)
from PyQt6.QtCore import pyqtSignal, Qt, QSignalBlocker
from . import select_open_file, select_save_file
@@ -34,7 +34,6 @@ class MyQComboBox(QComboBox):
super(MyQComboBox, self).__init__(*args, **kwargs)
self.scrollWidget = scrollWidget
self.setFocusPolicy(Qt.FocusPolicy.StrongFocus)
logger.debug(f"Scrollwidget: {scrollWidget}")
def wheelEvent(self, *args, **kwargs):
if self.hasFocus():
@@ -61,14 +60,12 @@ class MyQDateEdit(QDateEdit):
class SubmissionFormContainer(QWidget):
# A signal carrying a path
# NOTE: A signal carrying a path
import_drag = pyqtSignal(Path)
def __init__(self, parent: QWidget) -> None:
# logger.debug(f"Setting form widget...")
super().__init__(parent)
self.app = self.parent().parent()
# logger.debug(f"App: {self.app}")
self.report = Report()
self.setStyleSheet('background-color: light grey;')
self.setAcceptDrops(True)
@@ -89,7 +86,6 @@ class SubmissionFormContainer(QWidget):
Sets filename when file dropped
"""
fname = Path([u.toLocalFile() for u in event.mimeData().urls()][0])
# logger.debug(f"App: {self.app}")
self.app.last_dir = fname.parent
self.import_drag.emit(fname)
@@ -127,7 +123,6 @@ class SubmissionFormContainer(QWidget):
# NOTE: set file dialog
if isinstance(fname, bool) or fname is None:
fname = select_open_file(self, file_extension="xlsx")
# logger.debug(f"Attempting to parse file: {fname}")
if not fname:
report.add_result(Result(msg=f"File {fname.__str__()} not found.", status="critical"))
return report
@@ -139,14 +134,10 @@ class SubmissionFormContainer(QWidget):
return
except AttributeError:
self.prsr = SheetParser(filepath=fname)
# logger.debug(f"Submission dictionary:\n{pformat(self.prsr.sub)}")
self.pyd = self.prsr.to_pydantic()
# logger.debug(f"Pydantic result: \n\n{pformat(self.pyd)}\n\n")
self.form = self.pyd.to_form(parent=self)
self.layout().addWidget(self.form)
return report
# logger.debug(f"Outgoing report: {self.report.results}")
# logger.debug(f"All attributes of submission container:\n{pformat(self.__dict__)}")
@report_result
def add_reagent(self, reagent_lot: str | None = None, reagent_role: str | None = None, expiry: date | None = None,
@@ -172,14 +163,12 @@ class SubmissionFormContainer(QWidget):
if dlg.exec():
# NOTE: extract form info
info = dlg.parse_form()
# logger.debug(f"Reagent info: {info}")
# NOTE: create reagent object
reagent = PydReagent(ctx=self.app.ctx, **info, missing=False)
# NOTE: send reagent to db
sqlobj, result = reagent.toSQL()
sqlobj.save()
report.add_result(result)
# logger.debug(f"Reagent: {reagent}, Report: {report}")
return reagent, report
@@ -189,7 +178,6 @@ class SubmissionFormWidget(QWidget):
def __init__(self, parent: QWidget, submission: PydSubmission, disable: list | None = None) -> None:
super().__init__(parent)
# logger.debug(f"Disable: {disable}")
if disable is None:
disable = []
self.app = parent.app
@@ -200,17 +188,13 @@ class SubmissionFormWidget(QWidget):
defaults = st.get_default_info("form_recover", "form_ignore", submission_type=self.pyd.submission_type['value'])
self.recover = defaults['form_recover']
self.ignore = defaults['form_ignore']
# logger.debug(f"Attempting to extend ignore list with {self.pyd.submission_type['value']}")
self.layout = QVBoxLayout()
for k in list(self.pyd.model_fields.keys()) + list(self.pyd.model_extra.keys()):
# logger.debug(f"Creating widget: {k}")
if k in self.ignore:
logger.warning(f"{k} in form_ignore {self.ignore}, not creating widget")
continue
try:
# logger.debug(f"Key: {k}, Disable: {disable}")
check = k in disable
# logger.debug(f"Check: {check}")
except TypeError:
check = False
try:
@@ -225,7 +209,6 @@ class SubmissionFormWidget(QWidget):
sub_obj=st, disable=check)
if add_widget is not None:
self.layout.addWidget(add_widget)
# if k == "extraction_kit":
if k in self.__class__.update_reagent_fields:
add_widget.input.currentTextChanged.connect(self.scrape_reagents)
self.disabler = self.DisableReagents(self)
@@ -236,15 +219,10 @@ class SubmissionFormWidget(QWidget):
self.scrape_reagents(self.extraction_kit)
def disable_reagents(self):
"""
Disables all ReagentFormWidgets in this form/
"""
for reagent in self.findChildren(self.ReagentFormWidget):
# if self.disabler.checkbox.isChecked():
# # reagent.setVisible(True)
# # with QSignalBlocker(self.disabler.checkbox) as b:
# reagent.flip_check()
# else:
# # reagent.setVisible(False)
# # with QSignalBlocker(self.disabler.checkbox) as b:
# reagent.check.setChecked(False)
reagent.flip_check(self.disabler.checkbox.isChecked())
@@ -263,7 +241,6 @@ class SubmissionFormWidget(QWidget):
Returns:
self.InfoItem: Form widget to hold name:value
"""
# logger.debug(f"Key: {key}, Disable: {disable}")
if isinstance(submission_type, str):
submission_type = SubmissionType.query(name=submission_type)
if key not in self.ignore:
@@ -276,7 +253,6 @@ class SubmissionFormWidget(QWidget):
case _:
widget = self.InfoItem(parent=self, key=key, value=value, submission_type=submission_type,
sub_obj=sub_obj)
# logger.debug(f"Setting widget enabled to: {not disable}")
if disable:
widget.input.setEnabled(False)
widget.input.setToolTip("Widget disabled to protect database integrity.")
@@ -298,24 +274,20 @@ class SubmissionFormWidget(QWidget):
"""
self.extraction_kit = args[0]
report = Report()
logger.debug(f"Extraction kit: {self.extraction_kit}")
# NOTE: Remove previous reagent widgets
try:
old_reagents = self.find_widgets()
except AttributeError:
logger.error(f"Couldn't find old reagents.")
old_reagents = []
# logger.debug(f"\n\nAttempting to clear: {old_reagents}\n\n")
for reagent in old_reagents:
if isinstance(reagent, self.ReagentFormWidget) or isinstance(reagent, QPushButton):
reagent.setParent(None)
reagents, integrity_report = self.pyd.check_kit_integrity(extraction_kit=self.extraction_kit)
# logger.debug(f"Got reagents: {pformat(reagents)}")
for reagent in reagents:
add_widget = self.ReagentFormWidget(parent=self, reagent=reagent, extraction_kit=self.extraction_kit)
self.layout.addWidget(add_widget)
report.add_result(integrity_report)
# logger.debug(f"Outgoing report: {report.results}")
if hasattr(self.pyd, "csv"):
export_csv_btn = QPushButton("Export CSV")
export_csv_btn.setObjectName("export_csv_btn")
@@ -326,6 +298,7 @@ class SubmissionFormWidget(QWidget):
self.layout.addWidget(submit_btn)
submit_btn.clicked.connect(self.submit_new_sample_function)
self.setLayout(self.layout)
self.disabler.checkbox.setChecked(True)
return report
def clear_form(self):
@@ -365,23 +338,16 @@ class SubmissionFormWidget(QWidget):
report = Report()
result = self.parse_form()
report.add_result(result)
# logger.debug(f"Submission: {pformat(self.pyd)}")
# logger.debug("Checking kit integrity...")
if self.disabler.checkbox.isChecked():
_, result = self.pyd.check_kit_integrity()
report.add_result(result)
if len(result.results) > 0:
return
# logger.debug(f"PYD before transformation into SQL:\n\n{self.pyd}\n\n")
base_submission, result = self.pyd.to_sql()
# logger.debug(f"SQL object: {pformat(base_submission.__dict__)}")
# logger.debug(f"Base submission: {base_submission.to_dict()}")
# NOTE: check output message for issues
# logger.debug(f"Result of to_sql: {result}")
try:
trigger = result.results[-1]
code = trigger.code
# logger.debug(f"Code from return: {code}")
except IndexError as e:
logger.error(result.results)
logger.error(f"Problem getting error code: {e}")
@@ -408,11 +374,8 @@ class SubmissionFormWidget(QWidget):
pass
# NOTE: add reagents to submission object
for reagent in base_submission.reagents:
# logger.debug(f"Updating: {reagent} with {reagent.lot}")
reagent.update_last_used(kit=base_submission.extraction_kit)
# logger.debug(f"Final reagents: {pformat(base_submission.reagents)}")
save_output = base_submission.save()
# logger.debug(f"Save output: {save_output}")
# NOTE: update summary sheet
self.app.table_widget.sub_wid.setData()
# NOTE: reset form
@@ -423,7 +386,6 @@ class SubmissionFormWidget(QWidget):
check = True
if check:
self.setParent(None)
# logger.debug(f"All attributes of obj: {pformat(self.__dict__)}")
return report
def export_csv_function(self, fname: Path | None = None):
@@ -454,7 +416,6 @@ class SubmissionFormWidget(QWidget):
info = {}
reagents = []
for widget in self.findChildren(QWidget):
# logger.debug(f"Parsed widget of type {type(widget)}")
match widget:
case self.ReagentFormWidget():
reagent, _ = widget.parse_form()
@@ -464,16 +425,10 @@ class SubmissionFormWidget(QWidget):
field, value = widget.parse_form()
if field is not None:
info[field] = value
# logger.debug(f"Info: {pformat(info)}")
logger.debug(f"Reagents going into pyd: {pformat(reagents)}")
self.pyd.reagents = reagents
logger.debug(f"Reagents after insertion in pyd: {pformat(self.pyd.reagents)}")
# logger.debug(f"Attrs not in info: {[k for k, v in self.__dict__.items() if k not in info.keys()]}")
for item in self.recover:
# logger.debug(f"Attempting to recover: {item}")
if hasattr(self, item):
value = getattr(self, item)
# logger.debug(f"Setting {item}")
info[item] = value
for k, v in info.items():
self.pyd.set_attribute(key=k, value=v)
@@ -551,9 +506,6 @@ class SubmissionFormWidget(QWidget):
except (TypeError, KeyError):
pass
obj = parent.parent().parent()
# logger.debug(f"Object: {obj}")
# logger.debug(f"Parent: {parent.parent()}")
# logger.debug(f"Creating widget for: {key}")
match key:
case 'submitting_lab':
add_widget = MyQComboBox(scrollWidget=parent)
@@ -567,7 +519,6 @@ class SubmissionFormWidget(QWidget):
looked_up_lab = Organization.query(name=value, limit=1)
except AttributeError:
looked_up_lab = None
# logger.debug(f"\n\nLooked up lab: {looked_up_lab}")
if looked_up_lab:
try:
labs.remove(str(looked_up_lab.name))
@@ -586,12 +537,9 @@ class SubmissionFormWidget(QWidget):
# NOTE: create combobox to hold looked up kits
add_widget = MyQComboBox(scrollWidget=parent)
# NOTE: lookup existing kits by 'submission_type' decided on by sheetparser
# logger.debug(f"Looking up kits used for {submission_type}")
uses = [item.name for item in submission_type.kit_types]
obj.uses = uses
# logger.debug(f"Kits received for {submission_type}: {uses}")
if check_not_nan(value):
# logger.debug(f"The extraction kit in parser was: {value}")
try:
uses.insert(0, uses.pop(uses.index(value)))
except ValueError:
@@ -626,7 +574,6 @@ class SubmissionFormWidget(QWidget):
else:
# NOTE: anything else gets added in as a line edit
add_widget = QLineEdit()
# logger.debug(f"Setting widget text to {str(value).replace('_', ' ')}")
add_widget.setText(str(value).replace("_", " "))
add_widget.setToolTip(f"Enter value for {key}")
if add_widget is not None:
@@ -725,7 +672,6 @@ class SubmissionFormWidget(QWidget):
if not self.lot.isEnabled():
return None, report
lot = self.lot.currentText()
# logger.debug(f"Using this lot for the reagent {self.reagent}: {lot}")
wanted_reagent = Reagent.query(lot=lot, role=self.reagent.role)
# NOTE: if reagent doesn't exist in database, offer to add it (uses App.add_reagent)
if wanted_reagent is None:
@@ -741,7 +687,6 @@ class SubmissionFormWidget(QWidget):
return wanted_reagent, report
else:
# NOTE: In this case we will have an empty reagent and the submission will fail kit integrity check
# logger.debug("Will not add reagent.")
report.add_result(Result(msg="Failed integrity check", status="Critical"))
return None, report
else:
@@ -791,7 +736,6 @@ class SubmissionFormWidget(QWidget):
looked_up_rt = KitTypeReagentRoleAssociation.query(reagent_role=reagent.role,
kit_type=extraction_kit)
relevant_reagents = [str(item.lot) for item in looked_up_rt.get_all_relevant_reagents()]
logger.debug(f"Relevant reagents for {reagent}: {relevant_reagents}")
# NOTE: if reagent in sheet is not found insert it into the front of relevant reagents so it shows
if str(reagent.lot) not in relevant_reagents:
if check_not_nan(reagent.lot):
@@ -803,7 +747,6 @@ class SubmissionFormWidget(QWidget):
looked_up_reg = None
if isinstance(looked_up_reg, list):
looked_up_reg = None
# logger.debug(f"Because there was no reagent listed for {reagent.lot}, we will insert the last lot used: {looked_up_reg}")
if looked_up_reg:
try:
relevant_reagents.remove(str(looked_up_reg.lot))
@@ -812,15 +755,11 @@ class SubmissionFormWidget(QWidget):
relevant_reagents.insert(0, str(looked_up_reg.lot))
else:
if len(relevant_reagents) > 1:
# logger.debug(f"Found {reagent.lot} in relevant reagents: {relevant_reagents}. Moving to front of list.")
idx = relevant_reagents.index(str(reagent.lot))
# logger.debug(f"The index we got for {reagent.lot} in {relevant_reagents} was {idx}")
moved_reag = relevant_reagents.pop(idx)
relevant_reagents.insert(0, moved_reag)
else:
# logger.debug(f"Found {reagent.lot} in relevant reagents: {relevant_reagents}. But no need to move due to short list.")
pass
logger.debug(f"New relevant reagents: {relevant_reagents}")
self.setObjectName(f"lot_{reagent.role}")
self.addItems(relevant_reagents)
self.setToolTip(f"Enter lot number for the reagent used for {reagent.role}")

View File

@@ -35,7 +35,6 @@ class Summary(InfoPane):
def update_data(self):
super().update_data()
orgs = [self.org_select.itemText(i) for i in range(self.org_select.count()) if self.org_select.itemChecked(i)]
# logger.debug(f"Getting report from {self.start_date} to {self.end_date} using {orgs}")
self.report_obj = ReportMaker(start_date=self.start_date, end_date=self.end_date, organizations=orgs)
self.webview.setHtml(self.report_obj.html)
if self.report_obj.subs:

View File

@@ -27,14 +27,14 @@ timezone = tz("America/Winnipeg")
logger = logging.getLogger(f"submissions.{__name__}")
logger.debug(f"Package dir: {project_path}")
logger.info(f"Package dir: {project_path}")
if platform.system() == "Windows":
os_config_dir = "AppData/local"
print(f"Got platform Windows, config_dir: {os_config_dir}")
logger.info(f"Got platform Windows, config_dir: {os_config_dir}")
else:
os_config_dir = ".config"
print(f"Got platform other, config_dir: {os_config_dir}")
logger.info(f"Got platform other, config_dir: {os_config_dir}")
main_aux_dir = Path.home().joinpath(f"{os_config_dir}/submissions")
@@ -184,7 +184,6 @@ def convert_nans_to_nones(input_str) -> str | None:
Returns:
str: _description_
"""
# logger.debug(f"Input value of: {input_str}")
if check_not_nan(input_str):
return input_str
return None
@@ -512,7 +511,6 @@ def get_config(settings_path: Path | str | None = None) -> Settings:
Returns:
Settings: Pydantic settings object
"""
# logger.debug(f"Creating settings...")
if isinstance(settings_path, str):
settings_path = Path(settings_path)
@@ -566,7 +564,6 @@ def get_config(settings_path: Path | str | None = None) -> Settings:
default_settings = yaml.load(dset, Loader=yaml.Loader)
settings = Settings(**default_settings)
settings.save(settings_path=settings_path)
# logger.debug(f"Using {settings_path} for config file.")
with open(settings_path, "r") as stream:
settings = yaml.load(stream, Loader=yaml.Loader)
return Settings(**settings)
@@ -755,7 +752,6 @@ def setup_lookup(func):
raise ValueError("Could not sanitize dictionary in query. Make sure you parse it first.")
elif v is not None:
sanitized_kwargs[k] = v
# logger.debug(f"sanitized kwargs: {sanitized_kwargs}")
return func(*args, **sanitized_kwargs)
return wrapper
@@ -800,7 +796,6 @@ class Result(BaseModel, arbitrary_types_allowed=True):
logger.error(f"Exception origin: {origin}")
if "unique constraint failed:" in origin:
field = " ".join(origin.split(".")[1:]).replace("_", " ").upper()
# logger.debug(field)
value = f"{field} doesn't have a unique value.\nIt must be changed."
else:
value = f"Got unknown integrity error: {value}"
@@ -844,7 +839,6 @@ class Report(BaseModel):
except AttributeError:
logger.error(f"Problem adding result.")
case Report():
# logger.debug(f"Adding all results in report to new report")
for res in result.results:
logger.info(f"Adding {res} from {result} to results.")
self.results.append(res)
@@ -934,7 +928,7 @@ def check_authorization(func):
"""
def wrapper(*args, **kwargs):
logger.debug(f"Checking authorization")
logger.info(f"Checking authorization")
if is_power_user():
return func(*args, **kwargs)
else:
@@ -957,7 +951,7 @@ def report_result(func):
"""
def wrapper(*args, **kwargs):
logger.debug(f"Report result being called by {func.__name__}")
logger.info(f"Report result being called by {func.__name__}")
output = func(*args, **kwargs)
match output:
case Report():
@@ -970,14 +964,13 @@ def report_result(func):
case _:
report = None
return report
logger.debug(f"Got report: {report}")
logger.info(f"Got report: {report}")
try:
results = report.results
except AttributeError:
logger.error("No results available")
results = []
for iii, result in enumerate(results):
logger.debug(f"Result {iii}: {result}")
try:
dlg = result.report()
dlg.exec()
@@ -990,7 +983,6 @@ def report_result(func):
true_output = true_output[0]
else:
true_output = None
# logger.debug(f"Returning true output: {true_output}")
return true_output
return wrapper