diff --git a/src/submissions/__main__.py b/src/submissions/__main__.py index 39af6f8..9dde891 100644 --- a/src/submissions/__main__.py +++ b/src/submissions/__main__.py @@ -1,6 +1,5 @@ import sys, os from tools import ctx, setup_logger, check_if_app -from backend import scripts # environment variable must be set to enable qtwebengine in network path if check_if_app(): @@ -9,6 +8,7 @@ if check_if_app(): # setup custom logger logger = setup_logger(verbosity=3) +from backend import scripts from PyQt6.QtWidgets import QApplication from frontend.widgets.app import App @@ -25,6 +25,7 @@ def run_startup(): except AttributeError as e: logger.error(f"Couldn't run startup script {script} due to {e}") continue + logger.info(f"Running startup script: {func.__name__}") func(ctx) @@ -40,6 +41,7 @@ def run_teardown(): except AttributeError as e: logger.error(f"Couldn't run teardown script {script} due to {e}") continue + logger.info(f"Running teardown script: {func.__name__}") func(ctx) if __name__ == '__main__': diff --git a/src/submissions/backend/db/__init__.py b/src/submissions/backend/db/__init__.py index 93eb7a6..77c92b0 100644 --- a/src/submissions/backend/db/__init__.py +++ b/src/submissions/backend/db/__init__.py @@ -18,12 +18,8 @@ def set_sqlite_pragma(dbapi_connection, connection_record): connection_record (_type_): _description_ """ cursor = dbapi_connection.cursor() - # print(ctx.database_schema) if ctx.database_schema == "sqlite": execution_phrase = "PRAGMA foreign_keys=ON" - # cursor.execute(execution_phrase) - # elif ctx.database_schema == "mssql+pyodbc": - # execution_phrase = "SET IDENTITY_INSERT dbo._wastewater ON;" else: print("Nothing to execute, returning") cursor.close() @@ -37,12 +33,9 @@ from .models import * def update_log(mapper, connection, target): - # logger.debug("\n\nBefore update\n\n") state = inspect(target) - # logger.debug(state) object_name = state.object.truncated_name() update = dict(user=getuser(), time=datetime.now(), object=object_name, changes=[]) - # logger.debug(update) for attr in state.attrs: hist = attr.load_history() if not hist.has_changes(): @@ -56,24 +49,19 @@ def update_log(mapper, connection, target): continue deleted = [str(item) for item in hist.deleted] change = dict(field=attr.key, added=added, deleted=deleted) - # logger.debug(f"Adding: {pformat(change)}") if added != deleted: try: update['changes'].append(change) except Exception as e: logger.error(f"Something went wrong adding attr: {attr.key}: {e}") continue - # logger.debug(f"Adding to audit logs: {pformat(update)}") if update['changes']: # Note: must use execute as the session will be busy at this point. # https://medium.com/@singh.surbhicse/creating-audit-table-to-log-insert-update-and-delete-changes-in-flask-sqlalchemy-f2ca53f7b02f table = AuditLog.__table__ - # logger.debug(f"Adding to {table}") connection.execute(table.insert().values(**update)) - # logger.debug("Here is where I would insert values, if I was able.") else: logger.info(f"No changes detected, not updating logs.") -# if ctx.logging_enabled: event.listen(LogMixin, 'after_update', update_log, propagate=True) event.listen(LogMixin, 'after_insert', update_log, propagate=True) diff --git a/src/submissions/backend/db/models/__init__.py b/src/submissions/backend/db/models/__init__.py index 46f9250..d52ee1b 100644 --- a/src/submissions/backend/db/models/__init__.py +++ b/src/submissions/backend/db/models/__init__.py @@ -3,7 +3,6 @@ Contains all models for sqlalchemy """ from __future__ import annotations import sys, logging - from pandas import DataFrame from sqlalchemy import Column, INTEGER, String, JSON from sqlalchemy.orm import DeclarativeMeta, declarative_base, Query, Session @@ -131,7 +130,6 @@ class BaseClass(Base): search = name.title().replace(" ", "") else: search = name - logger.debug(f"Searching for subclass: {search}") return next((item for item in cls.__subclasses__() if item.__name__ == search), cls) @classmethod @@ -146,9 +144,7 @@ class BaseClass(Base): List[Any]: Results of sqlalchemy query. """ query: Query = cls.__database_session__.query(cls) - # logger.debug(f"Queried model. Now running searches in {kwargs}") for k, v in kwargs.items(): - # logger.debug(f"Running fuzzy search for attribute: {k} with value {v}") # NOTE: Not sure why this is necessary, but it is. search = f"%{v}%" try: @@ -200,9 +196,7 @@ class BaseClass(Base): model = cls if query is None: query: Query = cls.__database_session__.query(model) - # logger.debug(f"Grabbing singles using {model.get_default_info}") singles = model.get_default_info('singles') - # logger.info(f"Querying: {model}, with kwargs: {kwargs}") for k, v in kwargs.items(): logger.info(f"Using key: {k} with value: {v}") try: @@ -227,7 +221,6 @@ class BaseClass(Base): """ Add the object to the database and commit """ - # logger.debug(f"Saving object: {pformat(self.__dict__)}") report = Report() try: self.__database_session__.add(self) diff --git a/src/submissions/backend/db/models/audit.py b/src/submissions/backend/db/models/audit.py index 477af90..59473cd 100644 --- a/src/submissions/backend/db/models/audit.py +++ b/src/submissions/backend/db/models/audit.py @@ -2,7 +2,6 @@ Contains the audit log class and functions. """ from typing import List - from dateutil.parser import parse from sqlalchemy.orm import declarative_base, DeclarativeMeta, Query from . import BaseClass @@ -48,32 +47,24 @@ class AuditLog(Base): logger.warning(f"End date with no start date, using Jan 1, 2023") start_date = session.query(cls, func.min(cls.time)).first()[1] if start_date is not None: - # logger.debug(f"Querying with start date: {start_date} and end date: {end_date}") match start_date: case date(): - # logger.debug(f"Lookup BasicSubmission by start_date({start_date})") start_date = start_date.strftime("%Y-%m-%d") case int(): - # logger.debug(f"Lookup BasicSubmission by ordinal start_date {start_date}") start_date = datetime.fromordinal( datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d") case _: - # logger.debug(f"Lookup BasicSubmission by parsed str start_date {start_date}") start_date = parse(start_date).strftime("%Y-%m-%d") match end_date: case date() | datetime(): - # logger.debug(f"Lookup BasicSubmission by end_date({end_date})") end_date = end_date + timedelta(days=1) end_date = end_date.strftime("%Y-%m-%d") case int(): - # logger.debug(f"Lookup BasicSubmission by ordinal end_date {end_date}") end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date() + timedelta(days=1) end_date = end_date.strftime("%Y-%m-%d") case _: - # logger.debug(f"Lookup BasicSubmission by parsed str end_date {end_date}") end_date = parse(end_date) + timedelta(days=1) end_date = end_date.strftime("%Y-%m-%d") - # logger.debug(f"Compensating for same date by using time") if start_date == end_date: start_date = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%d %H:%M:%S.%f") query = query.filter(cls.time == start_date) diff --git a/src/submissions/backend/db/models/controls.py b/src/submissions/backend/db/models/controls.py index d603580..1420777 100644 --- a/src/submissions/backend/db/models/controls.py +++ b/src/submissions/backend/db/models/controls.py @@ -171,11 +171,9 @@ class Control(BaseClass): match submission_type: case str(): from backend import BasicSubmission, SubmissionType - # logger.debug(f"Lookup controls by SubmissionType str: {submission_type}") query = query.join(BasicSubmission).join(SubmissionType).filter(SubmissionType.name == submission_type) case SubmissionType(): from backend import BasicSubmission - # logger.debug(f"Lookup controls by SubmissionType: {submission_type}") query = query.join(BasicSubmission).filter(BasicSubmission.submission_type_name == submission_type.name) case _: pass @@ -203,31 +201,23 @@ class Control(BaseClass): if start_date is not None: match start_date: case date(): - # logger.debug(f"Lookup control by start date({start_date})") start_date = start_date.strftime("%Y-%m-%d") case int(): - # logger.debug(f"Lookup control by ordinal start date {start_date}") start_date = datetime.fromordinal( datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d") case _: - # logger.debug(f"Lookup control with parsed start date {start_date}") start_date = parse(start_date).strftime("%Y-%m-%d") match end_date: case date(): - # logger.debug(f"Lookup control by end date({end_date})") end_date = end_date.strftime("%Y-%m-%d") case int(): - # logger.debug(f"Lookup control by ordinal end date {end_date}") end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date().strftime( "%Y-%m-%d") case _: - # logger.debug(f"Lookup control with parsed end date {end_date}") end_date = parse(end_date).strftime("%Y-%m-%d") - # logger.debug(f"Looking up BasicSubmissions from start date: {start_date} and end date: {end_date}") query = query.filter(cls.submitted_date.between(start_date, end_date)) match name: case str(): - # logger.debug(f"Lookup control by name {control_name}") query = query.filter(cls.name.startswith(name)) limit = 1 case _: @@ -273,7 +263,6 @@ class Control(BaseClass): except StopIteration as e: raise AttributeError( f"Couldn't find existing class/subclass of {cls} with all attributes:\n{pformat(attrs.keys())}") - # logger.info(f"Recruiting model: {model}") return model @classmethod @@ -343,7 +332,6 @@ class PCRControl(Control): parent.mode_typer.clear() parent.mode_typer.setEnabled(False) report = Report() - # logger.debug(f"Chart settings: {pformat(chart_settings)}") controls = cls.query(submission_type=chart_settings['sub_type'], start_date=chart_settings['start_date'], end_date=chart_settings['end_date']) data = [control.to_sub_dict() for control in controls] @@ -411,21 +399,16 @@ class IridaControl(Control): kraken = self.kraken except TypeError: kraken = {} - # logger.debug("calculating kraken count total to use in percentage") kraken_cnt_total = sum([kraken[item]['kraken_count'] for item in kraken]) - # logger.debug("Creating new kraken.") new_kraken = [dict(name=item, kraken_count=kraken[item]['kraken_count'], kraken_percent="{0:.0%}".format(kraken[item]['kraken_count'] / kraken_cnt_total), target=item in self.controltype.targets) for item in kraken] - # logger.debug(f"New kraken before sort: {new_kraken}") new_kraken = sorted(new_kraken, key=itemgetter('kraken_count'), reverse=True) - # logger.debug("setting targets") if self.controltype.targets: targets = self.controltype.targets else: targets = ["None"] - # logger.debug("constructing output dictionary") output = dict( name=self.name, type=self.controltype.name, @@ -447,7 +430,6 @@ class IridaControl(Control): Returns: List[dict]: list of records """ - # logger.debug("load json string for mode (i.e. contains, matches, kraken2)") try: data = self.__getattribute__(mode) except TypeError: @@ -460,12 +442,10 @@ class IridaControl(Control): else: if consolidate: on_tar = {k: v for k, v in data.items() if k.strip("*") in self.controltype.targets[control_sub_type]} - # logger.debug(f"Consolidating off-targets to: {self.controltype.targets[control_sub_type]}") off_tar = sum(v[f'{mode}_ratio'] for k, v in data.items() if k.strip("*") not in self.controltype.targets[control_sub_type]) on_tar['Off-target'] = {f"{mode}_ratio": off_tar} data = on_tar - # logger.debug("dict keys are genera of bacteria, e.g. 'Streptococcus'") for genus in data: _dict = dict( name=self.name, @@ -473,7 +453,6 @@ class IridaControl(Control): genus=genus, target='Target' if genus.strip("*") in self.controltype.targets[control_sub_type] else "Off-target" ) - # logger.debug("get Target or Off-target of genus") for key in data[genus]: _dict[key] = data[genus][key] yield _dict @@ -487,7 +466,6 @@ class IridaControl(Control): List[str]: List of control mode names. """ try: - # logger.debug("Creating a list of JSON columns in _controls table") cols = [item.name for item in list(cls.__table__.columns) if isinstance(item.type, JSON)] except AttributeError as e: logger.error(f"Failed to get available modes from db: {e}") @@ -504,7 +482,6 @@ class IridaControl(Control): """ super().make_parent_buttons(parent=parent) rows = parent.layout.rowCount() - 2 - # logger.debug(f"Parent rows: {rows}") checker = QCheckBox(parent) checker.setChecked(True) checker.setObjectName("irida_check") @@ -539,10 +516,8 @@ class IridaControl(Control): except AttributeError: consolidate = False report = Report() - # logger.debug(f"settings: {pformat(chart_settings)}") controls = cls.query(subtype=chart_settings['sub_type'], start_date=chart_settings['start_date'], end_date=chart_settings['end_date']) - # logger.debug(f"Controls found: {controls}") if not controls: report.add_result(Result(status="Critical", msg="No controls found in given date range.")) return report, None @@ -552,19 +527,16 @@ class IridaControl(Control): control in controls] # NOTE: flatten data to one dimensional list data = [item for sublist in data for item in sublist] - # logger.debug(f"Control objects going into df conversion: {pformat(data)}") if not data: report.add_result(Result(status="Critical", msg="No data found for controls in given date range.")) return report, None df = cls.convert_data_list_to_df(input_df=data, sub_mode=chart_settings['sub_mode']) - # logger.debug(f"Chart df: \n {df}") if chart_settings['sub_mode'] is None: title = chart_settings['sub_mode'] else: title = f"{chart_settings['mode']} - {chart_settings['sub_mode']}" # NOTE: send dataframe to chart maker df, modes = cls.prep_df(ctx=ctx, df=df) - # logger.debug(f"prepped df: \n {df}") fig = IridaFigure(df=df, ytitle=title, modes=modes, parent=parent, settings=chart_settings) return report, fig @@ -581,9 +553,7 @@ class IridaControl(Control): Returns: DataFrame: dataframe of controls """ - # logger.debug(f"Subtype: {sub_mode}") df = DataFrame.from_records(input_df) - # logger.debug(f"DF from records: {df}") safe = ['name', 'submitted_date', 'genus', 'target'] for column in df.columns: if column not in safe: @@ -636,7 +606,6 @@ class IridaControl(Control): Returns: DataFrame: output dataframe with dates incremented. """ - # logger.debug(f"Unique items: {df['name'].unique()}") # NOTE: get submitted dates for each control dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in sorted(df['name'].unique())] @@ -664,7 +633,6 @@ class IridaControl(Control): check = False previous_dates.add(item['date']) if check: - # logger.debug(f"We found one! Increment date!\n\t{item['date']} to {item['date'] + timedelta(days=1)}") # NOTE: get df locations where name == item name mask = df['name'] == item['name'] # NOTE: increment date in dataframe @@ -673,15 +641,12 @@ class IridaControl(Control): passed = False else: passed = True - # logger.debug(f"\n\tCurrent date: {item['date']}\n\tPrevious dates:{previous_dates}") - # logger.debug(f"DF: {type(df)}, previous_dates: {type(previous_dates)}") # NOTE: if run didn't lead to changed date, return values if passed: - # logger.debug(f"Date check passed, returning.") return df, previous_dates # NOTE: if date was changed, rerun with new date else: - # logger.warning(f"Date check failed, running recursion") + logger.warning(f"Date check failed, running recursion") df, previous_dates = cls.check_date(df, item, previous_dates) return df, previous_dates @@ -708,13 +673,10 @@ class IridaControl(Control): # NOTE: sort by and exclude from sorts = ['submitted_date', "target", "genus"] exclude = ['name', 'genera'] - # logger.debug(df.columns) modes = [item for item in df.columns if item not in sorts and item not in exclude] - # logger.debug(f"Modes coming out: {modes}") # NOTE: Set descending for any columns that have "{mode}" in the header. ascending = [False if item == "target" else True for item in sorts] df = df.sort_values(by=sorts, ascending=ascending) - # logger.debug(df[df.isna().any(axis=1)]) # NOTE: actual chart construction is done by return df, modes diff --git a/src/submissions/backend/db/models/kits.py b/src/submissions/backend/db/models/kits.py index 0c45d4f..b01c490 100644 --- a/src/submissions/backend/db/models/kits.py +++ b/src/submissions/backend/db/models/kits.py @@ -17,7 +17,6 @@ from io import BytesIO logger = logging.getLogger(f'submissions.{__name__}') -# logger.debug("Table for ReagentType/Reagent relations") reagentroles_reagents = Table( "_reagentroles_reagents", Base.metadata, @@ -26,7 +25,6 @@ reagentroles_reagents = Table( extend_existing=True ) -# logger.debug("Table for EquipmentRole/Equipment relations") equipmentroles_equipment = Table( "_equipmentroles_equipment", Base.metadata, @@ -35,7 +33,6 @@ equipmentroles_equipment = Table( extend_existing=True ) -# logger.debug("Table for Equipment/Process relations") equipment_processes = Table( "_equipment_processes", Base.metadata, @@ -44,7 +41,6 @@ equipment_processes = Table( extend_existing=True ) -# logger.debug("Table for EquipmentRole/Process relations") equipmentroles_processes = Table( "_equipmentroles_processes", Base.metadata, @@ -53,7 +49,6 @@ equipmentroles_processes = Table( extend_existing=True ) -# logger.debug("Table for SubmissionType/Process relations") submissiontypes_processes = Table( "_submissiontypes_processes", Base.metadata, @@ -62,7 +57,6 @@ submissiontypes_processes = Table( extend_existing=True ) -# logger.debug("Table for KitType/Process relations") kittypes_processes = Table( "_kittypes_processes", Base.metadata, @@ -71,7 +65,6 @@ kittypes_processes = Table( extend_existing=True ) -# logger.debug("Table for TipRole/Tips relations") tiproles_tips = Table( "_tiproles_tips", Base.metadata, @@ -80,7 +73,6 @@ tiproles_tips = Table( extend_existing=True ) -# logger.debug("Table for Process/TipRole relations") process_tiprole = Table( "_process_tiprole", Base.metadata, @@ -89,7 +81,6 @@ process_tiprole = Table( extend_existing=True ) -# logger.debug("Table for Equipment/Tips relations") equipment_tips = Table( "_equipment_tips", Base.metadata, @@ -116,7 +107,7 @@ class KitType(BaseClass): cascade="all, delete-orphan", ) - # creator function: https://stackoverflow.com/questions/11091491/keyerror-when-adding-objects-to-sqlalchemy-association-object/11116291#11116291 + # NOTE: creator function: https://stackoverflow.com/questions/11091491/keyerror-when-adding-objects-to-sqlalchemy-association-object/11116291#11116291 reagent_roles = association_proxy("kit_reagentrole_associations", "reagent_role", creator=lambda RT: KitTypeReagentRoleAssociation( reagent_role=RT)) #: Association proxy to KitTypeReagentRoleAssociation @@ -152,18 +143,14 @@ class KitType(BaseClass): """ match submission_type: case SubmissionType(): - # logger.debug(f"Getting reagents by SubmissionType {submission_type}") relevant_associations = [item for item in self.kit_reagentrole_associations if item.submission_type == submission_type] case str(): - # logger.debug(f"Getting reagents by str {submission_type}") relevant_associations = [item for item in self.kit_reagentrole_associations if item.submission_type.name == submission_type] case _: - # logger.debug(f"Getting reagents") relevant_associations = [item for item in self.kit_reagentrole_associations] if required: - # logger.debug(f"Filtering by required.") return (item.reagent_role for item in relevant_associations if item.required == 1) else: return (item.reagent_role for item in relevant_associations) @@ -181,18 +168,14 @@ class KitType(BaseClass): # NOTE: Account for submission_type variable type. match submission_type: case str(): - # logger.debug(f"Constructing xl map with str {submission_type}") assocs = [item for item in self.kit_reagentrole_associations if item.submission_type.name == submission_type] case SubmissionType(): - # logger.debug(f"Constructing xl map with SubmissionType {submission_type}") assocs = [item for item in self.kit_reagentrole_associations if item.submission_type == submission_type] case _: raise ValueError(f"Wrong variable type: {type(submission_type)} used!") - # logger.debug("Get all KitTypeReagentTypeAssociation for SubmissionType") for assoc in assocs: try: - # logger.debug(f"Yielding: {assoc.reagent_role.name}, {assoc.uses}") yield assoc.reagent_role.name, assoc.uses except TypeError: continue @@ -220,27 +203,22 @@ class KitType(BaseClass): query: Query = cls.__database_session__.query(cls) match used_for: case str(): - # logger.debug(f"Looking up kit type by used_for str: {used_for}") query = query.filter(cls.used_for.any(name=used_for)) case SubmissionType(): - # logger.debug(f"Looking up kit type by used_for SubmissionType: {used_for}") query = query.filter(cls.used_for.contains(used_for)) case _: pass match name: case str(): - # logger.debug(f"Looking up kit type by name str: {name}") query = query.filter(cls.name == name) limit = 1 case _: pass match id: case int(): - # logger.debug(f"Looking up kit type by id int: {id}") query = query.filter(cls.id == id) limit = 1 case str(): - # logger.debug(f"Looking up kit type by id str: {id}") query = query.filter(cls.id == int(id)) limit = 1 case _: @@ -262,10 +240,7 @@ class KitType(BaseClass): dict: Dictionary containing relevant info for SubmissionType construction """ base_dict = dict(name=self.name, reagent_roles=[], equipment_roles=[]) - # base_dict['reagent roles'] = [] - # base_dict['equipment roles'] = [] for k, v in self.construct_xl_map_for_use(submission_type=submission_type): - # logger.debug(f"Value: {v}") try: assoc = next(item for item in self.kit_reagentrole_associations if item.reagent_role.name == k) except StopIteration as e: @@ -280,10 +255,8 @@ class KitType(BaseClass): except StopIteration: continue for kk, vv in assoc.to_export_dict(extraction_kit=self).items(): - # logger.debug(f"{kk}:{vv}") v[kk] = vv base_dict['equipment_roles'].append(v) - # logger.debug(f"KT returning {base_dict}") return base_dict @@ -347,28 +320,19 @@ class ReagentRole(BaseClass): else: match kit_type: case str(): - # logger.debug(f"Lookup ReagentType by kittype str {kit_type}") kit_type = KitType.query(name=kit_type) case _: pass match reagent: case str(): - # logger.debug(f"Lookup ReagentType by reagent str {reagent}") reagent = Reagent.query(lot=reagent) case _: pass assert reagent.role - # logger.debug(f"Looking up reagent type for {type(kit_type)} {kit_type} and {type(reagent)} {reagent}") - # logger.debug(f"Kit reagent types: {kit_type.reagent_types}") result = set(kit_type.reagent_roles).intersection(reagent.role) - # logger.debug(f"Result: {result}") - # try: return next((item for item in result), None) - # except IndexError: - # return None match name: case str(): - # logger.debug(f"Looking up reagent type by name str: {name}") query = query.filter(cls.name == name) limit = 1 case _: @@ -457,7 +421,6 @@ class Reagent(BaseClass, LogMixin): rtype = reagent_role.name.replace("_", " ") except AttributeError: rtype = "Unknown" - # logger.debug(f"Role for {self.name}: {rtype}") # NOTE: Calculate expiry with EOL from ReagentType try: place_holder = self.expiry + reagent_role.eol_ext @@ -493,14 +456,11 @@ class Reagent(BaseClass, LogMixin): Report: Result of operation """ report = Report() - # logger.debug(f"Attempting update of last used reagent type at intersection of ({self}), ({kit})") rt = ReagentRole.query(kit_type=kit, reagent=self, limit=1) if rt is not None: - # logger.debug(f"got reagenttype {rt}") assoc = KitTypeReagentRoleAssociation.query(kit_type=kit, reagent_role=rt) if assoc is not None: if assoc.last_used != self.lot: - # logger.debug(f"Updating {assoc} last used to {self.lot}") assoc.last_used = self.lot result = assoc.save() report.add_result(result) @@ -539,23 +499,19 @@ class Reagent(BaseClass, LogMixin): pass match role: case str(): - # logger.debug(f"Looking up reagents by reagent type str: {reagent_type}") query = query.join(cls.role).filter(ReagentRole.name == role) case ReagentRole(): - # logger.debug(f"Looking up reagents by reagent type ReagentType: {reagent_type}") query = query.filter(cls.role.contains(role)) case _: pass match name: case str(): - # logger.debug(f"Looking up reagent by name str: {name}") # NOTE: Not limited due to multiple reagents having same name. query = query.filter(cls.name == name) case _: pass match lot: case str(): - # logger.debug(f"Looking up reagent by lot number str: {lot}") query = query.filter(cls.lot == lot) # NOTE: In this case limit number returned. limit = 1 @@ -579,7 +535,6 @@ class Reagent(BaseClass, LogMixin): case "expiry": if isinstance(value, str): field_value = datetime.strptime(value, "%Y-%m-%d") - # field_value.replace(tzinfo=timezone) elif isinstance(value, date): field_value = datetime.combine(value, datetime.min.time()) else: @@ -589,7 +544,6 @@ class Reagent(BaseClass, LogMixin): continue case _: field_value = value - # logger.debug(f"Setting reagent {key} to {field_value}") self.__setattr__(key, field_value) self.save() @@ -634,25 +588,19 @@ class Discount(BaseClass): query: Query = cls.__database_session__.query(cls) match organization: case Organization(): - # logger.debug(f"Looking up discount with organization Organization: {organization}") query = query.filter(cls.client == Organization) case str(): - # logger.debug(f"Looking up discount with organization str: {organization}") query = query.join(Organization).filter(Organization.name == organization) case int(): - # logger.debug(f"Looking up discount with organization id: {organization}") query = query.join(Organization).filter(Organization.id == organization) case _: pass match kit_type: case KitType(): - # logger.debug(f"Looking up discount with kit type KitType: {kit_type}") query = query.filter(cls.kit == kit_type) case str(): - # logger.debug(f"Looking up discount with kit type str: {kit_type}") query = query.join(KitType).filter(KitType.name == kit_type) case int(): - # logger.debug(f"Looking up discount with kit type id: {kit_type}") query = query.join(KitType).filter(KitType.id == kit_type) case _: pass @@ -723,7 +671,6 @@ class SubmissionType(BaseClass): return submission_type.template_file def get_template_file_sheets(self) -> List[str]: - logger.debug(f"Submission type to get sheets for: {self.name}") """ Gets names of sheet in the stored blank form. @@ -768,7 +715,6 @@ class SubmissionType(BaseClass): dict: Map of locations """ info = {k: v for k, v in self.info_map.items() if k != "custom"} - logger.debug(f"Info map: {info}") match mode: case "read": output = {k: v[mode] for k, v in info.items() if v[mode]} @@ -844,11 +790,9 @@ class SubmissionType(BaseClass): """ match equipment_role: case str(): - # logger.debug(f"Getting processes for equipmentrole str {equipment_role}") relevant = [item.get_all_processes(kit) for item in self.submissiontype_equipmentrole_associations if item.equipment_role.name == equipment_role] case EquipmentRole(): - # logger.debug(f"Getting processes for equipmentrole EquipmentRole {equipment_role}") relevant = [item.get_all_processes(kit) for item in self.submissiontype_equipmentrole_associations if item.equipment_role == equipment_role] case _: @@ -886,14 +830,12 @@ class SubmissionType(BaseClass): query: Query = cls.__database_session__.query(cls) match name: case str(): - # logger.debug(f"Looking up submission type by name str: {name}") query = query.filter(cls.name == name) limit = 1 case _: pass match key: case str(): - # logger.debug(f"Looking up submission type by info-map key str: {key}") query = query.filter(cls.info_map.op('->')(key) is not None) case _: pass @@ -946,7 +888,6 @@ class SubmissionType(BaseClass): import_dict = yaml.load(stream=f, Loader=yaml.Loader) else: raise Exception(f"Filetype {filepath.suffix} not supported.") - # logger.debug(pformat(import_dict)) try: submission_type = cls.query(name=import_dict['name']) except KeyError: @@ -1076,23 +1017,17 @@ class SubmissionTypeKitTypeAssociation(BaseClass): query: Query = cls.__database_session__.query(cls) match submission_type: case SubmissionType(): - # logger.debug(f"Looking up {cls.__name__} by SubmissionType {submission_type}") query = query.filter(cls.submission_type == submission_type) case str(): - # logger.debug(f"Looking up {cls.__name__} by name {submission_type}") query = query.join(SubmissionType).filter(SubmissionType.name == submission_type) case int(): - # logger.debug(f"Looking up {cls.__name__} by id {submission_type}") query = query.join(SubmissionType).filter(SubmissionType.id == submission_type) match kit_type: case KitType(): - # logger.debug(f"Looking up {cls.__name__} by KitType {kit_type}") query = query.filter(cls.kit_type == kit_type) case str(): - # logger.debug(f"Looking up {cls.__name__} by name {kit_type}") query = query.join(KitType).filter(KitType.name == kit_type) case int(): - # logger.debug(f"Looking up {cls.__name__} by id {kit_type}") query = query.join(KitType).filter(KitType.id == kit_type) limit = query.count() return cls.execute_query(query=query, limit=limit) @@ -1107,7 +1042,6 @@ class SubmissionTypeKitTypeAssociation(BaseClass): exclude = ['_sa_instance_state', 'submission_types_id', 'kits_id', 'submission_type', 'kit_type'] base_dict = {k: v for k, v in self.__dict__.items() if k not in exclude} base_dict['kit_type'] = self.kit_type.to_export_dict(submission_type=self.submission_type) - # logger.debug(f"STKTA returning: {base_dict}") return base_dict @@ -1128,10 +1062,11 @@ class KitTypeReagentRoleAssociation(BaseClass): kit_type = relationship(KitType, back_populates="kit_reagentrole_associations") #: relationship to associated KitType - # reference to the "ReagentType" object + # NOTE: reference to the "ReagentType" object reagent_role = relationship(ReagentRole, back_populates="reagentrole_kit_associations") #: relationship to associated ReagentType + # NOTE: reference to the "SubmissionType" object submission_type = relationship(SubmissionType, back_populates="submissiontype_kit_rt_associations") #: relationship to associated SubmissionType @@ -1203,19 +1138,15 @@ class KitTypeReagentRoleAssociation(BaseClass): query: Query = cls.__database_session__.query(cls) match kit_type: case KitType(): - # logger.debug(f"Lookup KitTypeReagentTypeAssociation by kit_type KitType {kit_type}") query = query.filter(cls.kit_type == kit_type) case str(): - # logger.debug(f"Lookup KitTypeReagentTypeAssociation by kit_type str {kit_type}") query = query.join(KitType).filter(KitType.name == kit_type) case _: pass match reagent_role: case ReagentRole(): - # logger.debug(f"Lookup KitTypeReagentTypeAssociation by reagent_type ReagentType {reagent_type}") query = query.filter(cls.reagent_role == reagent_role) case str(): - # logger.debug(f"Lookup KitTypeReagentTypeAssociation by reagent_type ReagentType {reagent_type}") query = query.join(ReagentRole).filter(ReagentRole.name == reagent_role) case _: pass @@ -1242,7 +1173,6 @@ class KitTypeReagentRoleAssociation(BaseClass): Returns: Generator: Generates of reagents. """ - # logger.debug(f"Attempting lookup of reagents by type: {reagent.type}") reagents = self.reagent_role.instances try: regex = self.uses['exclude_regex'] @@ -1309,7 +1239,6 @@ class SubmissionReagentAssociation(BaseClass): query: Query = cls.__database_session__.query(cls) match reagent: case Reagent() | str(): - # logger.debug(f"Lookup SubmissionReagentAssociation by reagent Reagent {reagent}") if isinstance(reagent, str): reagent = Reagent.query(lot=reagent) query = query.filter(cls.reagent == reagent) @@ -1319,10 +1248,8 @@ class SubmissionReagentAssociation(BaseClass): case BasicSubmission() | str(): if isinstance(submission, str): submission = BasicSubmission.query(rsl_plate_num=submission) - # logger.debug(f"Lookup SubmissionReagentAssociation by submission BasicSubmission {submission}") query = query.filter(cls.submission == submission) case int(): - # logger.debug(f"Lookup SubmissionReagentAssociation by submission id {submission}") submission = BasicSubmission.query(id=submission) query = query.join(BasicSubmission).filter(BasicSubmission.id == submission) case _: @@ -1439,21 +1366,18 @@ class Equipment(BaseClass, LogMixin): query = cls.__database_session__.query(cls) match name: case str(): - # logger.debug(f"Lookup Equipment by name str {name}") query = query.filter(cls.name == name) limit = 1 case _: pass match nickname: case str(): - # logger.debug(f"Lookup Equipment by nickname str {nickname}") query = query.filter(cls.nickname == nickname) limit = 1 case _: pass match asset_number: case str(): - # logger.debug(f"Lookup Equipment by asset_number str {asset_number}") query = query.filter(cls.asset_number == asset_number) limit = 1 case _: @@ -1569,11 +1493,9 @@ class EquipmentRole(BaseClass): PydEquipmentRole: This EquipmentRole as PydEquipmentRole """ from backend.validators.pydant import PydEquipmentRole - # logger.debug("Creating list of PydEquipment in this role") equipment = [item.to_pydantic(submission_type=submission_type, extraction_kit=extraction_kit) for item in self.instances] pyd_dict = self.to_dict() - # logger.debug("Creating list of Processes in this role") pyd_dict['processes'] = self.get_processes(submission_type=submission_type, extraction_kit=extraction_kit) return PydEquipmentRole(equipment=equipment, **pyd_dict) @@ -1595,14 +1517,12 @@ class EquipmentRole(BaseClass): query = cls.__database_session__.query(cls) match id: case int(): - # logger.debug(f"Lookup EquipmentRole by id {id}") query = query.filter(cls.id == id) limit = 1 case _: pass match name: case str(): - # logger.debug(f"Lookup EquipmentRole by name str {name}") query = query.filter(cls.name == name) limit = 1 case _: @@ -1622,7 +1542,6 @@ class EquipmentRole(BaseClass): List[Process]: List of processes """ if isinstance(submission_type, str): - # logger.debug(f"Checking if str {submission_type} exists") submission_type = SubmissionType.query(name=submission_type) if isinstance(extraction_kit, str): extraction_kit = KitType.query(name=extraction_kit) @@ -1808,7 +1727,6 @@ class Process(BaseClass): query = cls.__database_session__.query(cls) match name: case str(): - # logger.debug(f"Lookup Process with name str {name}") query = query.filter(cls.name == name) limit = 1 case _: @@ -1892,13 +1810,11 @@ class Tips(BaseClass, LogMixin): query = cls.__database_session__.query(cls) match name: case str(): - # logger.debug(f"Lookup Equipment by name str {name}") query = query.filter(cls.name == name) case _: pass match lot: case str(): - # logger.debug(f"Lookup Equipment by nickname str {nickname}") query = query.filter(cls.lot == lot) limit = 1 case _: diff --git a/src/submissions/backend/db/models/organizations.py b/src/submissions/backend/db/models/organizations.py index 0de7736..4f3f191 100644 --- a/src/submissions/backend/db/models/organizations.py +++ b/src/submissions/backend/db/models/organizations.py @@ -65,7 +65,6 @@ class Organization(BaseClass): pass match name: case str(): - # logger.debug(f"Looking up organization with name starting with: {name}") query = query.filter(cls.name.startswith(name)) limit = 1 case _: @@ -159,21 +158,18 @@ class Contact(BaseClass): query: Query = cls.__database_session__.query(cls) match name: case str(): - # logger.debug(f"Looking up contact with name: {name}") query = query.filter(cls.name == name.title()) limit = 1 case _: pass match email: case str(): - # logger.debug(f"Looking up contact with email: {name}") query = query.filter(cls.email == email) limit = 1 case _: pass match phone: case str(): - # logger.debug(f"Looking up contact with phone: {name}") query = query.filter(cls.phone == phone) limit = 1 case _: diff --git a/src/submissions/backend/db/models/submissions.py b/src/submissions/backend/db/models/submissions.py index 19e74ce..8841441 100644 --- a/src/submissions/backend/db/models/submissions.py +++ b/src/submissions/backend/db/models/submissions.py @@ -10,7 +10,7 @@ from tempfile import TemporaryDirectory, TemporaryFile from operator import itemgetter from pprint import pformat from . import BaseClass, Reagent, SubmissionType, KitType, Organization, Contact, LogMixin -from sqlalchemy import Column, String, TIMESTAMP, INTEGER, ForeignKey, JSON, FLOAT, case, event, inspect, func +from sqlalchemy import Column, String, TIMESTAMP, INTEGER, ForeignKey, JSON, FLOAT, case, func from sqlalchemy.orm import relationship, validates, Query from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.ext.associationproxy import association_proxy @@ -20,7 +20,7 @@ from sqlite3 import OperationalError as SQLOperationalError, IntegrityError as S from openpyxl import Workbook from openpyxl.drawing.image import Image as OpenpyxlImage from tools import row_map, setup_lookup, jinja_template_loading, rreplace, row_keys, check_key_or_attr, Result, Report, \ - report_result, create_holidays_for_year, ctx + report_result, create_holidays_for_year from datetime import datetime, date, timedelta from typing import List, Any, Tuple, Literal, Generator from dateutil.parser import parse @@ -51,7 +51,6 @@ class BasicSubmission(BaseClass, LogMixin): submission_type_name = Column(String, ForeignKey("_submissiontype.name", ondelete="SET NULL", name="fk_BS_subtype_name")) #: name of joined submission type technician = Column(String(64)) #: initials of processing tech(s) - # Move this into custom types? reagents_id = Column(String, ForeignKey("_reagent.id", ondelete="SET NULL", name="fk_BS_reagents_id")) #: id of used reagents extraction_info = Column(JSON) #: unstructured output from the extraction table logger. @@ -122,7 +121,6 @@ class BasicSubmission(BaseClass, LogMixin): } def __repr__(self) -> str: - submission_type = self.submission_type or "Basic" return f"" @classmethod @@ -194,7 +192,6 @@ class BasicSubmission(BaseClass, LogMixin): output['submission_type'] = st.name for k, v in st.defaults.items(): if args and k not in args: - # logger.debug(f"Don't want {k}") continue else: match v: @@ -223,7 +220,6 @@ class BasicSubmission(BaseClass, LogMixin): Returns: SubmissionType: SubmissionType with name equal sub_type or this polymorphic identity if sub_type is None. """ - # logger.debug(f"Running search for {sub_type}") if isinstance(sub_type, dict): try: sub_type = sub_type['value'] @@ -267,7 +263,6 @@ class BasicSubmission(BaseClass, LogMixin): field = self.__getattribute__(name) except AttributeError: return None - # assert isinstance(field, list) for item in field: if extra: yield item.to_sub_dict(extra) @@ -286,7 +281,6 @@ class BasicSubmission(BaseClass, LogMixin): dict: dictionary used in submissions summary and details """ # NOTE: get lab from nested organization object - # logger.debug(f"Converting {self.rsl_plate_num} to dict...") try: sub_lab = self.submitting_lab.name except AttributeError: @@ -319,7 +313,6 @@ class BasicSubmission(BaseClass, LogMixin): if report: return output if full_data: - # logger.debug(f"Attempting reagents.") try: reagents = [item.to_sub_dict(extraction_kit=self.extraction_kit) for item in self.submission_reagent_associations] @@ -331,20 +324,16 @@ class BasicSubmission(BaseClass, LogMixin): if k == 'info': continue if not any([item['role'] == k for item in reagents]): - # expiry = date(year=1970, month=1, day=1) expiry = "NA" reagents.append( dict(role=k, name="Not Applicable", lot="NA", expiry=expiry, missing=True)) - # logger.debug(f"Running samples.") samples = self.generate_associations(name="submission_sample_associations") - # logger.debug("Running equipment") equipment = self.generate_associations(name="submission_equipment_associations") tips = self.generate_associations(name="submission_tips_associations") cost_centre = self.cost_centre custom = self.custom controls = [item.to_sub_dict() for item in self.controls] - else: reagents = None samples = None @@ -353,7 +342,6 @@ class BasicSubmission(BaseClass, LogMixin): cost_centre = None custom = None controls = None - # logger.debug("Getting comments") try: comments = self.comment except Exception as e: @@ -362,7 +350,6 @@ class BasicSubmission(BaseClass, LogMixin): try: contact = self.contact.name except AttributeError as e: - # logger.error(f"Problem setting contact: {e}") contact = "NA" try: contact_phone = self.contact.phone @@ -378,7 +365,6 @@ class BasicSubmission(BaseClass, LogMixin): output["tips"] = tips output["cost_centre"] = cost_centre output["signed_by"] = self.signed_by - # logger.debug(f"Setting contact to: {contact} of type: {type(contact)}") output["contact"] = contact output["contact_phone"] = contact_phone output["custom"] = custom @@ -394,7 +380,6 @@ class BasicSubmission(BaseClass, LogMixin): int: Number of unique columns. """ columns = set([assoc.column for assoc in self.submission_sample_associations]) - # logger.debug(f"Here are the columns for {self.rsl_plate_num}: {columns}") return len(columns) def calculate_base_cost(self): @@ -410,7 +395,6 @@ class BasicSubmission(BaseClass, LogMixin): assoc = next((item for item in self.extraction_kit.kit_submissiontype_associations if item.submission_type == self.submission_type), None) - # logger.debug(f"Came up with association: {assoc}") # NOTE: If every individual cost is 0 this is probably an old plate. if all(item == 0.0 for item in [assoc.constant_cost, assoc.mutable_cost_column, assoc.mutable_cost_sample]): try: @@ -486,15 +470,11 @@ class BasicSubmission(BaseClass, LogMixin): Returns: pd.DataFrame: Pandas Dataframe of all relevant submissions """ - # logger.debug(f"Querying Type: {submission_type}") - # logger.debug(f"Using limit: {limit}") # NOTE: use lookup function to create list of dicts subs = [item.to_dict() for item in cls.query(submission_type=submission_type, limit=limit, chronologic=chronologic, page=page, page_size=page_size)] - # logger.debug(f"Got {len(subs)} submissions.") df = pd.DataFrame.from_records(subs) - # logger.debug(f"Column names: {df.columns}") # NOTE: Exclude sub information exclude = ['controls', 'extraction_info', 'pcr_info', 'comment', 'comments', 'samples', 'reagents', 'equipment', 'gel_info', 'gel_image', 'dna_core_submission_number', 'gel_controls', @@ -521,25 +501,18 @@ class BasicSubmission(BaseClass, LogMixin): """ match key: case "extraction_kit": - # logger.debug(f"Looking up kit {value}") field_value = KitType.query(name=value) - # logger.debug(f"Got {field_value} for kit {value}") case "submitting_lab": - # logger.debug(f"Looking up organization: {value}") field_value = Organization.query(name=value) - # logger.debug(f"Got {field_value} for organization {value}") case "contact": field_value = Contact.query(name=value) case "samples": for sample in value: - # logger.debug(f"Parsing {sample} to sql.") sample, _ = sample.to_sql(submission=self) return case "reagents": - # logger.debug(f"Reagents coming into SQL: {value}") field_value = [reagent['value'].to_sql()[0] if isinstance(reagent, dict) else reagent.to_sql()[0] for reagent in value] - # logger.debug(f"Reagents coming out of SQL: {field_value}") case "submission_type": field_value = SubmissionType.query(name=value) case "sample_count": @@ -554,9 +527,8 @@ class BasicSubmission(BaseClass, LogMixin): case "custom" | "source_plates": existing = value case _: - # logger.debug(f"Setting JSON attribute.") existing = self.__getattribute__(key) - if value is None or value in ['', 'null']: + if value in ['', 'null', None]: logger.error(f"No value given, not setting.") return if existing is None: @@ -582,7 +554,6 @@ class BasicSubmission(BaseClass, LogMixin): # NOTE: insert into field current = self.__getattribute__(key) if field_value and current != field_value: - logger.debug(f"Updated value: {key}: {current} to {field_value}") try: self.__setattr__(key, field_value) except AttributeError as e: @@ -635,14 +606,11 @@ class BasicSubmission(BaseClass, LogMixin): """ from backend.validators import PydSubmission dicto = self.to_dict(full_data=True, backup=backup) - # logger.debug("To dict complete") new_dict = {} for key, value in dicto.items(): - # logger.debug(f"Checking {key}") missing = value in ['', 'None', None] match key: case "reagents": - # new_dict[key] = [PydReagent(**reagent) for reagent in value] field_value = [item.to_pydantic(extraction_kit=self.extraction_kit) for item in self.submission_reagent_associations] case "samples": field_value = [item.to_pydantic() for item in self.submission_sample_associations] @@ -661,10 +629,7 @@ class BasicSubmission(BaseClass, LogMixin): case "plate_number": key = 'rsl_plate_num' field_value = dict(value=self.rsl_plate_num, missing=missing) - # continue case "submitter_plate_number": - # new_dict['submitter_plate_num'] = dict(value=self.submitter_plate_num, missing=missing) - # continue key = "submitter_plate_num" field_value = dict(value=self.submitter_plate_num, missing=missing) case "id": @@ -673,15 +638,11 @@ class BasicSubmission(BaseClass, LogMixin): try: key = key.lower().replace(" ", "_") field_value = dict(value=self.__getattribute__(key), missing=missing) - # new_dict[key.lower().replace(" ", "_")] = dict(value=self.__getattribute__(key), missing=missing) except AttributeError: logger.error(f"{key} is not available in {self}") continue - logger.debug(f"Setting dict {key}") new_dict[key] = field_value - # logger.debug(f"{key} complete after {time()-start}") new_dict['filepath'] = Path(tempfile.TemporaryFile().name) - # logger.debug("Done converting fields.") return PydSubmission(**new_dict) def save(self, original: bool = True): @@ -691,7 +652,6 @@ class BasicSubmission(BaseClass, LogMixin): Args: original (bool, optional): Is this the first save. Defaults to True. """ - # logger.debug("Saving submission.") if original: self.uploaded_by = getuser() return super().save() @@ -707,15 +667,13 @@ class BasicSubmission(BaseClass, LogMixin): Returns: str: _description_ """ - # logger.debug(f"Attempting to get regex for {cls.__mapper_args__['polymorphic_identity']}") - logger.debug(f"Attempting to get regex for {submission_type}") try: return cls.get_submission_type(submission_type).defaults['regex'] except AttributeError as e: logger.error(f"Couldn't get submission type for {cls.__mapper_args__['polymorphic_identity']}") return "" - # Polymorphic functions + # NOTE: Polymorphic functions @classmethod def construct_regex(cls) -> re.Pattern: @@ -743,7 +701,6 @@ class BasicSubmission(BaseClass, LogMixin): Returns: _type_: Subclass of interest. """ - # logger.debug(f"Controlling for dict value") if isinstance(polymorphic_identity, dict): polymorphic_identity = polymorphic_identity['value'] if isinstance(polymorphic_identity, SubmissionType): @@ -766,10 +723,9 @@ class BasicSubmission(BaseClass, LogMixin): except StopIteration as e: raise AttributeError( f"Couldn't find existing class/subclass of {cls} with all attributes:\n{pformat(attrs.keys())}") - # logger.info(f"Recruiting model: {model}") return model - # Child class custom functions + # NOTE: Child class custom functions @classmethod def custom_info_parser(cls, input_dict: dict, xl: Workbook | None = None, custom_fields: dict = {}) -> dict: @@ -784,12 +740,8 @@ class BasicSubmission(BaseClass, LogMixin): Returns: dict: Updated sample dictionary """ - logger.info(f"Calling {cls.__mapper_args__['polymorphic_identity']} info parser.") - # logger.debug(f"Input dict: {input_dict}") - # logger.debug(f"Custom fields: {custom_fields}") input_dict['custom'] = {} for k, v in custom_fields.items(): - # logger.debug(f"Attempting custom parse of {k}: {v}") match v['type']: case "exempt": continue @@ -819,7 +771,6 @@ class BasicSubmission(BaseClass, LogMixin): Returns: dict: Updated sample dictionary """ - logger.info(f"Called {cls.__mapper_args__['polymorphic_identity']} sample parser") return input_dict @classmethod @@ -836,7 +787,6 @@ class BasicSubmission(BaseClass, LogMixin): Returns: dict: Updated parser product. """ - logger.info(f"Called {cls.__mapper_args__['polymorphic_identity']} finalizer") return pyd @classmethod @@ -854,9 +804,6 @@ class BasicSubmission(BaseClass, LogMixin): Returns: Workbook: Updated workbook """ - logger.info(f"Hello from {cls.__mapper_args__['polymorphic_identity']} autofill") - # logger.debug(f"Input dict: {info}") - # logger.debug(f"Custom fields: {custom_fields}") for k, v in custom_fields.items(): try: assert v['type'] in ['exempt', 'range', 'cell'] @@ -896,15 +843,11 @@ class BasicSubmission(BaseClass, LogMixin): Returns: str: Updated name. """ - # logger.info(f"Hello from {cls.__mapper_args__['polymorphic_identity']} Enforcer!") from backend.validators import RSLNamer - # logger.debug(f"instr coming into {cls}: {instr}") - logger.debug(f"data coming into {cls}: {data}") if "submission_type" not in data.keys(): data['submission_type'] = cls.__mapper_args__['polymorphic_identity'] data['abbreviation'] = cls.get_default_info("abbreviation", submission_type=data['submission_type']) if instr in [None, ""]: - # logger.debug("Sending to RSLNamer to make new plate name.") outstr = RSLNamer.construct_new_plate_name(data=data) else: outstr = instr @@ -923,12 +866,10 @@ class BasicSubmission(BaseClass, LogMixin): try: # NOTE: Grab plate number plate_number = re.search(r"(?:(-|_)\d)(?!\d)", outstr).group().strip("_").strip("-") - # logger.debug(f"Plate number is: {plate_number}") except AttributeError as e: plate_number = "1" # NOTE: insert dash between date and plate number outstr = re.sub(r"(\d{8})(-|_)?\d?(R\d?)?", rf"\1-{plate_number}\3", outstr) - # logger.debug(f"After addition of plate number the plate name is: {outstr}") try: # NOTE: grab repeat number repeat = re.search(r"-\dR(?P\d)?", outstr).groupdict()['repeat'] @@ -954,9 +895,7 @@ class BasicSubmission(BaseClass, LogMixin): Returns: Generator[dict, None, None]: Updated samples """ - # logger.debug(f"Hello from {cls.__mapper_args__['polymorphic_identity']} PCR parser!") pcr_sample_map = cls.get_submission_type().sample_map['pcr_samples'] - # logger.debug(f'sample map: {pcr_sample_map}') main_sheet = xl[pcr_sample_map['main_sheet']] fields = {k: v for k, v in pcr_sample_map.items() if k not in ['main_sheet', 'start_row']} for row in main_sheet.iter_rows(min_row=pcr_sample_map['start_row']): @@ -983,12 +922,10 @@ class BasicSubmission(BaseClass, LogMixin): submission = cls.query(rsl_plate_num=rsl_plate_num) name_column = 1 for item in location_map: - logger.debug(f"Looking for {item['name']}") worksheet = xl[item['sheet']] for iii, row in enumerate(worksheet.iter_rows(max_row=len(worksheet['A']), max_col=name_column), start=1): for cell in row: if cell.value == item['name']: - logger.debug(f"Pulling from row {iii}, column {item['ct_column']}") subtype, target = item['name'].split("-") ct = worksheet.cell(row=iii, column=item['ct_column']).value # NOTE: Kind of a stop gap solution to find control reagents. @@ -1039,7 +976,6 @@ class BasicSubmission(BaseClass, LogMixin): Returns: List[Any]: Updated list of samples """ - logger.info(f"Hello from {cls.__mapper_args__['polymorphic_identity']} sampler") return samples @classmethod @@ -1057,7 +993,6 @@ class BasicSubmission(BaseClass, LogMixin): base_dict['excluded'] += ['controls'] env = jinja_template_loading() temp_name = f"{cls.__name__.lower()}_details.html" - # logger.debug(f"Returning template: {temp_name}") try: template = env.get_template(temp_name) except TemplateNotFound as e: @@ -1065,7 +1000,7 @@ class BasicSubmission(BaseClass, LogMixin): template = env.get_template("basicsubmission_details.html") return base_dict, template - # Query functions + # NOTE: Query functions @classmethod @setup_lookup @@ -1099,14 +1034,11 @@ class BasicSubmission(BaseClass, LogMixin): Returns: models.BasicSubmission | List[models.BasicSubmission]: Submission(s) of interest """ - # logger.debug(f"Incoming kwargs: {kwargs}") # NOTE: if you go back to using 'model' change the appropriate cls to model in the query filters - # logger.debug(f"Page size: {page_size}") if submission_type is not None: model = cls.find_polymorphic_subclass(polymorphic_identity=submission_type) elif len(kwargs) > 0: # NOTE: find the subclass containing the relevant attributes - # logger.debug(f"Attributes for search: {kwargs}") model = cls.find_polymorphic_subclass(attrs=kwargs) else: model = cls @@ -1118,34 +1050,25 @@ class BasicSubmission(BaseClass, LogMixin): logger.warning(f"End date with no start date, using Jan 1, 2023") start_date = cls.__database_session__.query(cls, func.min(cls.submitted_date)).first()[1] if start_date is not None: - # logger.debug(f"Querying with start date: {start_date} and end date: {end_date}") match start_date: case date() | datetime(): - # logger.debug(f"Lookup BasicSubmission by start_date({start_date})") start_date = start_date.strftime("%Y-%m-%d") case int(): - # logger.debug(f"Lookup BasicSubmission by ordinal start_date {start_date}") start_date = datetime.fromordinal( datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d") case _: - # logger.debug(f"Lookup BasicSubmission by parsed str start_date {start_date}") start_date = parse(start_date).strftime("%Y-%m-%d") match end_date: case date() | datetime(): - # logger.debug(f"Lookup BasicSubmission by end_date({end_date})") end_date = end_date + timedelta(days=1) end_date = end_date.strftime("%Y-%m-%d") case int(): - # logger.debug(f"Lookup BasicSubmission by ordinal end_date {end_date}") - end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date() + timedelta( - days=1) - end_date = end_date.strftime( - "%Y-%m-%d") + end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date() \ + + timedelta(days=1) + end_date = end_date.strftime("%Y-%m-%d") case _: - # logger.debug(f"Lookup BasicSubmission by parsed str end_date {end_date}") end_date = parse(end_date) + timedelta(days=1) end_date = end_date.strftime("%Y-%m-%d") - # logger.debug(f"Compensating for same date by using time") if start_date == end_date: start_date = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%d %H:%M:%S.%f") query = query.filter(model.submitted_date == start_date) @@ -1154,11 +1077,9 @@ class BasicSubmission(BaseClass, LogMixin): # NOTE: by reagent (for some reason) match reagent: case str(): - # logger.debug(f"Looking up BasicSubmission with reagent: {reagent}") query = query.join(model.submission_reagent_associations).filter( SubmissionSampleAssociation.reagent.lot == reagent) case Reagent(): - # logger.debug(f"Looking up BasicSubmission with reagent: {reagent}") query = query.join(model.submission_reagent_associations).join( SubmissionSampleAssociation.reagent).filter(Reagent.lot == reagent) case _: @@ -1167,7 +1088,6 @@ class BasicSubmission(BaseClass, LogMixin): match rsl_plate_num: case str(): query = query.filter(model.rsl_plate_num == rsl_plate_num) - # logger.debug(f"At this point the query gets: {query.all()}") limit = 1 case _: pass @@ -1179,18 +1099,15 @@ class BasicSubmission(BaseClass, LogMixin): # NOTE: by id (returns only a single value) match id: case int(): - # logger.debug(f"Looking up BasicSubmission with id: {id}") query = query.filter(model.id == id) limit = 1 case str(): - # logger.debug(f"Looking up BasicSubmission with id: {id}") query = query.filter(model.id == int(id)) limit = 1 case _: pass - # if chronologic: - # logger.debug("Attempting sort by date descending") query = query.order_by(cls.submitted_date.desc()) + # NOTE: Split query results into pages of size {page_size} if page_size > 0: query = query.limit(page_size) page = page - 1 @@ -1221,10 +1138,8 @@ class BasicSubmission(BaseClass, LogMixin): raise ValueError("Need to narrow down query or the first available instance will be returned.") sanitized_kwargs = {k: v for k, v in kwargs.items() if k not in disallowed} instance = cls.query(submission_type=submission_type, limit=1, **sanitized_kwargs) - # logger.debug(f"Retrieved instance: {instance}") if instance is None: used_class = cls.find_polymorphic_subclass(attrs=kwargs, polymorphic_identity=submission_type) - # instance = used_class(**kwargs) instance = used_class(**sanitized_kwargs) match submission_type: case str(): @@ -1242,7 +1157,7 @@ class BasicSubmission(BaseClass, LogMixin): report.add_result(Result(msg=msg, code=code)) return instance, report - # Custom context events for the ui + # NOTE: Custom context events for the ui def custom_context_events(self) -> dict: """ @@ -1267,7 +1182,6 @@ class BasicSubmission(BaseClass, LogMixin): e: SQLIntegrityError or SQLOperationalError if problem with commit. """ from frontend.widgets.pop_ups import QuestionAsker - # logger.debug("Hello from delete") fname = self.__backup_path__.joinpath(f"{self.rsl_plate_num}-backup({date.today().strftime('%Y%m%d')})") msg = QuestionAsker(title="Delete?", message=f"Are you sure you want to delete {self.rsl_plate_num}?\n") if msg.exec(): @@ -1284,7 +1198,7 @@ class BasicSubmission(BaseClass, LogMixin): try: obj.setData() except AttributeError: - logger.debug("App will not refresh data at this time.") + logger.error("App will not refresh data at this time.") def show_details(self, obj): """ @@ -1293,7 +1207,6 @@ class BasicSubmission(BaseClass, LogMixin): Args: obj (Widget): Parent widget """ - # logger.debug("Hello from details") from frontend.widgets.submission_details import SubmissionDetails dlg = SubmissionDetails(parent=obj, sub=self) if dlg.exec(): @@ -1308,7 +1221,6 @@ class BasicSubmission(BaseClass, LogMixin): """ from frontend.widgets.submission_widget import SubmissionFormWidget for widget in obj.app.table_widget.formwidget.findChildren(SubmissionFormWidget): - # logger.debug(widget) widget.setParent(None) pyd = self.to_pydantic(backup=True) form = pyd.to_form(parent=obj, disable=['rsl_plate_num']) @@ -1328,7 +1240,6 @@ class BasicSubmission(BaseClass, LogMixin): if comment in ["", None]: return self.set_attribute(key='comment', value=comment) - # logger.debug(self.comment) self.save(original=False) def add_equipment(self, obj): @@ -1342,17 +1253,13 @@ class BasicSubmission(BaseClass, LogMixin): dlg = EquipmentUsage(parent=obj, submission=self) if dlg.exec(): equipment = dlg.parse_form() - # logger.debug(f"We've got equipment: {equipment}") for equip in equipment: - # logger.debug(f"Processing: {equip}") _, assoc = equip.toSQL(submission=self) - # logger.debug(f"Appending SubmissionEquipmentAssociation: {assoc}") try: assoc.save() except AttributeError as e: logger.error(f"Couldn't save association with {equip} due to {e}") if equip.tips: - # logger.debug("We have tips in this equipment") for tips in equip.tips: tassoc = tips.to_sql(submission=self) if tassoc not in self.submission_tips_associations: @@ -1371,14 +1278,11 @@ class BasicSubmission(BaseClass, LogMixin): fname (Path | None, optional): Filename of xlsx file. Defaults to None. full_backup (bool, optional): Whether or not to make yaml file. Defaults to False. """ - # logger.debug("Hello from backup.") pyd = self.to_pydantic(backup=True) if fname is None: from frontend.widgets.functions import select_save_file fname = select_save_file(default_name=pyd.construct_filename(), extension="xlsx", obj=obj) - # logger.debug(fname.name) if fname.name == "": - # logger.debug(f"export cancelled.") return writer = pyd.to_writer() writer.xl.save(filename=fname.with_suffix(".xlsx")) @@ -1411,7 +1315,7 @@ class BasicSubmission(BaseClass, LogMixin): return delta, delta <= tat -# Below are the custom submission types +# NOTE: Below are the custom submission types class BacterialCulture(BasicSubmission): """ @@ -1460,16 +1364,13 @@ class BacterialCulture(BasicSubmission): dict: Updated dictionary. """ from . import ControlType - # logger.debug(f"\n\nHello from BacterialCulture custom_validation") pyd = super().custom_validation(pyd) # NOTE: build regex for all control types that have targets regex = ControlType.build_positive_regex(control_type="Irida Control") - logger.debug(regex) # NOTE: search samples for match for sample in pyd.samples: matched = regex.match(sample.submitter_id) if bool(matched): - # logger.debug(f"Control match found: {sample.submitter_id}") new_lot = matched.group() try: pos_control_reg = \ @@ -1479,7 +1380,6 @@ class BacterialCulture(BasicSubmission): return pyd pos_control_reg.lot = new_lot pos_control_reg.missing = False - # logger.debug(f"Got positive control: {pos_control_reg}") return pyd @classmethod @@ -1496,7 +1396,6 @@ class BacterialCulture(BasicSubmission): dict: Updated info dictionary. """ input_dict = super().custom_info_parser(input_dict=input_dict, xl=xl, custom_fields=custom_fields) - # logger.debug(f"\n\nInfo dictionary:\n\n{pformat(input_dict)}\n\n") return input_dict @@ -1535,12 +1434,10 @@ class Wastewater(BasicSubmission): output["pcr_technician"] = self.technician else: output['pcr_technician'] = self.pcr_technician - ############### Updated from finalize_details - testing 2024-1017 ################ if full_data: output['samples'] = [sample for sample in output['samples']] dummy_samples = [] for item in output['samples']: - # logger.debug(f"Sample dict: {item}") thing = deepcopy(item) try: thing['row'] = thing['source_row'] @@ -1552,7 +1449,6 @@ class Wastewater(BasicSubmission): dummy_samples.append(thing) output['origin_plate'] = self.__class__.make_plate_map(sample_list=dummy_samples, plate_rows=4, plate_columns=6) - ############################### return output @classmethod @@ -1569,7 +1465,6 @@ class Wastewater(BasicSubmission): dict: Updated info dictionary """ input_dict = super().custom_info_parser(input_dict) - # logger.debug(f"Input dict: {pformat(input_dict)}") if xl is not None: try: input_dict['csv'] = xl["Copy to import file"] @@ -1600,7 +1495,6 @@ class Wastewater(BasicSubmission): Generator[dict, None, None]: Updated samples """ samples = [item for item in super().parse_pcr(xl=xl, rsl_plate_num=rsl_plate_num)] - # logger.debug(f'Samples from parent pcr parser: {pformat(samples)}') # NOTE: Due to having to run through samples in for loop we need to convert to list. output = [] for sample in samples: @@ -1714,17 +1608,13 @@ class Wastewater(BasicSubmission): pcr_samples = [sample for sample in parser.samples] pcr_controls = [control for control in parser.controls] self.save(original=False) - # logger.debug(f"Got {len(parser.samples)} samples to update!") - # logger.debug(f"Parser samples: {parser.samples}") for sample in self.samples: - # logger.debug(f"Running update on: {sample}") try: sample_dict = next(item for item in pcr_samples if item['sample'] == sample.rsl_number) except StopIteration: continue self.update_subsampassoc(sample=sample, input_dict=sample_dict) controltype = ControlType.query(name="PCR Control") - logger.debug(parser.pcr) submitted_date = datetime.strptime(" ".join(parser.pcr['run_start_date/time'].split(" ")[:-1]), "%Y-%m-%d %I:%M:%S %p") for control in pcr_controls: @@ -1770,8 +1660,6 @@ class WastewaterArtic(BasicSubmission): output['artic_technician'] = self.technician else: output['artic_technician'] = self.artic_technician - # logger.debug(full_data) - # logger.debug(output.keys()) output['gel_info'] = self.gel_info output['gel_image_path'] = self.gel_image output['dna_core_submission_number'] = self.dna_core_submission_number @@ -1802,9 +1690,7 @@ class WastewaterArtic(BasicSubmission): ws = wb[info_dict['sheet']] img_loader = SheetImageLoader(ws) for ii in range(info_dict['start_row'], info_dict['end_row'] + 1): - # logger.debug(f"Checking row: {ii}") for jj in range(info_dict['start_column'], info_dict['end_column'] + 1): - # logger.debug(f"Checking column: {jj}") cell_str = f"{row_map[jj]}{ii}" if img_loader.image_in(cell_str): try: @@ -1816,7 +1702,6 @@ class WastewaterArtic(BasicSubmission): input_dict = super().custom_info_parser(input_dict) input_dict['submission_type'] = dict(value="Wastewater Artic", missing=False) - # logger.debug(f"Custom fields: {custom_fields}") egel_section = custom_fields['egel_controls'] ws = xl[egel_section['sheet']] # NOTE: Here we should be scraping the control results. @@ -1824,12 +1709,9 @@ class WastewaterArtic(BasicSubmission): for ii in range(egel_section['start_row'], egel_section['end_row'] + 1)] data = [cell for cell in data if cell.value is not None and "NTC" in cell.value] - # logger.debug(f"Got gel control map: {data}") - # logger.debug(f"Checking against row_map: {row_map}") input_dict['gel_controls'] = [ dict(sample_id=cell.value, location=f"{row_map[cell.row - 9]}{str(cell.column - 14).zfill(2)}") for cell in data] - # logger.debug(f"Got gel control info: {input_dict['gel_controls']}") # NOTE: Get source plate information source_plates_section = custom_fields['source_plates'] ws = xl[source_plates_section['sheet']] @@ -1838,7 +1720,6 @@ class WastewaterArtic(BasicSubmission): ii in range(source_plates_section['start_row'], source_plates_section['end_row'] + 1)] for datum in data: - # logger.debug(f"Datum: {datum}") if datum['plate'] in ["None", None, ""]: continue else: @@ -1876,7 +1757,6 @@ class WastewaterArtic(BasicSubmission): datum['values'].append(d) data.append(datum) input_dict['gel_info'] = data - # logger.debug(f"Wastewater Artic custom info:\n\n{pformat(input_dict)}") egel_image_section = custom_fields['image_range'] img: Image = scrape_image(wb=xl, info_dict=egel_image_section) if img is not None: @@ -1907,13 +1787,11 @@ class WastewaterArtic(BasicSubmission): instr = re.sub(r"Artic", "", instr, flags=re.IGNORECASE) except (AttributeError, TypeError) as e: logger.error(f"Problem using regex: {e}") - # logger.debug(f"Before RSL addition: {instr}") try: instr = instr.replace("-", "") except AttributeError: instr = date.today().strftime("%Y%m%d") instr = re.sub(r"^(\d{6})", f"RSL-AR-\\1", instr) - # logger.debug(f"name coming out of Artic namer: {instr}") outstr = super().enforce_name(instr=instr, data=data) outstr = outstr.replace("RSLAR", "RSL-AR") return outstr @@ -1930,7 +1808,6 @@ class WastewaterArtic(BasicSubmission): dict: Updated sample dictionary """ input_dict = super().parse_samples(input_dict) - logger.debug(f"WWA input dict: {pformat(input_dict)}") input_dict['sample_type'] = "Wastewater Sample" # NOTE: Stop gap solution because WW is sloppy with their naming schemes try: @@ -1974,40 +1851,36 @@ class WastewaterArtic(BasicSubmission): Returns: str: output name """ - # logger.debug(f"input string raw: {input_str}") - # NOTE: Remove letters. processed = input_str.replace("RSL", "") + # NOTE: Remove anything in brackets at the end of string? processed = re.sub(r"\(.*\)$", "", processed).strip() + # NOTE: Remove letters that are not R. processed = re.sub(r"[A-QS-Z]+\d*", "", processed) # NOTE: Remove trailing '-' if any processed = processed.strip("-") - # logger.debug(f"Processed after stripping letters: {processed}") try: + # NOTE: get digit at the end of the string. en_num = re.search(r"\-\d{1}$", processed).group() processed = rreplace(processed, en_num, "") except AttributeError: en_num = "1" en_num = en_num.strip("-") - # logger.debug(f"Processed after en_num: {processed}") try: + # NOTE: Get last digit and maybe 'R' with another digit. plate_num = re.search(r"\-\d{1}R?\d?$", processed).group() processed = rreplace(processed, plate_num, "") except AttributeError: plate_num = "1" # NOTE: plate_num not currently used, but will keep incase it is in the future plate_num = plate_num.strip("-") - # logger.debug(f"Processed after plate-num: {processed}") day = re.search(r"\d{2}$", processed).group() processed = rreplace(processed, day, "") - # logger.debug(f"Processed after day: {processed}") month = re.search(r"\d{2}$", processed).group() processed = rreplace(processed, month, "") processed = processed.replace("--", "") - # logger.debug(f"Processed after month: {processed}") year = re.search(r'^(?:\d{2})?\d{2}', processed).group() year = f"20{year}" final_en_name = f"EN{en_num}-{year}{month}{day}" - # logger.debug(f"Final EN name: {final_en_name}") return final_en_name @classmethod @@ -2021,37 +1894,30 @@ class WastewaterArtic(BasicSubmission): Returns: str: output name """ - # logger.debug(f"input string raw: {input_str}") # NOTE: Remove letters. processed = input_str.replace("RSL", "") processed = re.sub(r"\(.*\)$", "", processed).strip() processed = re.sub(r"[A-QS-Z]+\d*", "", processed) # NOTE: Remove trailing '-' if any processed = processed.strip("-") - # logger.debug(f"Processed after stripping letters: {processed}") try: plate_num = re.search(r"\-\d{1}R?\d?$", processed).group() processed = rreplace(processed, plate_num, "") except AttributeError: plate_num = "1" plate_num = plate_num.strip("-") - # logger.debug(f"Plate num: {plate_num}") repeat_num = re.search(r"R(?P\d)?$", "PBS20240426-2R").groups()[0] if repeat_num is None and "R" in plate_num: repeat_num = "1" plate_num = re.sub(r"R", rf"R{repeat_num}", plate_num) - # logger.debug(f"Processed after plate-num: {processed}") day = re.search(r"\d{2}$", processed).group() processed = rreplace(processed, day, "") - # logger.debug(f"Processed after day: {processed}") month = re.search(r"\d{2}$", processed).group() processed = rreplace(processed, month, "") processed = processed.replace("--", "") - # logger.debug(f"Processed after month: {processed}") year = re.search(r'^(?:\d{2})?\d{2}', processed).group() year = f"20{year}" final_en_name = f"PBS{year}{month}{day}-{plate_num}" - # logger.debug(f"Final EN name: {final_en_name}") return final_en_name @classmethod @@ -2069,18 +1935,15 @@ class WastewaterArtic(BasicSubmission): dict: Updated parser product. """ input_dict = super().custom_validation(pyd) - # logger.debug(f"Incoming input_dict: {pformat(input_dict)}") exclude_plates = [None, "", "none", "na"] pyd.source_plates = [plate for plate in pyd.source_plates if plate['plate'].lower() not in exclude_plates] for sample in pyd.samples: - # logger.debug(f"Sample: {sample}") if re.search(r"^NTC", sample.submitter_id): if isinstance(pyd.rsl_plate_num, dict): placeholder = pyd.rsl_plate_num['value'] else: placeholder = pyd.rsl_plate_num sample.submitter_id = f"{sample.submitter_id}-WWG-{placeholder}" - # logger.debug(f"sample id: {sample.submitter_id}") return input_dict @classmethod @@ -2100,10 +1963,7 @@ class WastewaterArtic(BasicSubmission): """ input_excel = super().custom_info_writer(input_excel, info, backup) if isinstance(info, types.GeneratorType): - # logger.debug(f"Unpacking info generator.") info = {k: v for k, v in info} - # logger.debug(f"Info:\n{pformat(info)}") - # logger.debug(f"Custom fields:\n{pformat(custom_fields)}") # NOTE: check for source plate information if check_key_or_attr(key='source_plates', interest=info, check_none=True): source_plates_section = custom_fields['source_plates'] @@ -2111,9 +1971,7 @@ class WastewaterArtic(BasicSubmission): start_row = source_plates_section['start_row'] # NOTE: write source plates to First strand list for iii, plate in enumerate(info['source_plates']['value']): - # logger.debug(f"Plate: {plate}") row = start_row + iii - logger.debug(f"Writing {plate} to row {iii}") try: worksheet.cell(row=row, column=source_plates_section['plate_column'], value=plate['plate']) except TypeError: @@ -2128,23 +1986,17 @@ class WastewaterArtic(BasicSubmission): # NOTE: check for gel information if check_key_or_attr(key='gel_info', interest=info, check_none=True): egel_section = custom_fields['egel_info'] - # logger.debug(f"Gel info check passed.") # NOTE: print json field gel results to Egel results worksheet = input_excel[egel_section['sheet']] # TODO: Move all this into a seperate function? start_row = egel_section['start_row'] - 1 start_column = egel_section['start_column'] - 3 for row, ki in enumerate(info['gel_info']['value'], start=1): - # logger.debug(f"ki: {ki}") - # logger.debug(f"vi: {vi}") row = start_row + row worksheet.cell(row=row, column=start_column, value=ki['name']) for jjj, kj in enumerate(ki['values'], start=1): - # logger.debug(f"kj: {kj}") - # logger.debug(f"vj: {vj}") column = start_column + 2 + jjj worksheet.cell(row=start_row, column=column, value=kj['name']) - # logger.debug(f"Writing {kj['name']} with value {kj['value']} to row {row}, column {column}") try: worksheet.cell(row=row, column=column, value=kj['value']) except AttributeError: @@ -2153,7 +2005,6 @@ class WastewaterArtic(BasicSubmission): logger.warning("No gel info found.") if check_key_or_attr(key='gel_image_path', interest=info, check_none=True): worksheet = input_excel[egel_section['sheet']] - # logger.debug(f"We got an image: {info['gel_image']}") with ZipFile(cls.__directory_path__.joinpath("submission_imgs.zip")) as zipped: z = zipped.extract(info['gel_image_path']['value'], Path(TemporaryDirectory().name)) img = OpenpyxlImage(z) @@ -2167,7 +2018,6 @@ class WastewaterArtic(BasicSubmission): @classmethod def custom_sample_writer(self, sample: dict) -> dict: - logger.debug("Wastewater Artic custom sample writer") if sample['source_plate_number'] in [0, "0"]: sample['source_plate_number'] = "control" return sample @@ -2191,7 +2041,6 @@ class WastewaterArtic(BasicSubmission): headers = [item['name'] for item in base_dict['gel_info'][0]['values']] base_dict['headers'] = [''] * (4 - len(headers)) base_dict['headers'] += headers - # logger.debug(f"Gel info: {pformat(base_dict['headers'])}") if check_key_or_attr(key='gel_image_path', interest=base_dict, check_none=True): with ZipFile(cls.__directory_path__.joinpath("submission_imgs.zip")) as zipped: base_dict['gel_image'] = base64.b64encode(zipped.read(base_dict['gel_image_path'])).decode('utf-8') @@ -2247,7 +2096,6 @@ class WastewaterArtic(BasicSubmission): self.comment.append(com) else: self.comment = [com] - # logger.debug(pformat(self.gel_info)) with ZipFile(self.__directory_path__.joinpath("submission_imgs.zip"), 'a') as zipf: # NOTE: Add a file located at the source_path to the destination within the zip # file. It will overwrite existing files if the names collide, but it @@ -2256,7 +2104,7 @@ class WastewaterArtic(BasicSubmission): self.save() -# Sample Classes +# NOTE: Sample Classes class BasicSample(BaseClass, LogMixin): """ @@ -2336,7 +2184,6 @@ class BasicSample(BaseClass, LogMixin): Returns: dict: submitter id and sample type and linked submissions if full data """ - # logger.debug(f"Converting {self} to dict.") sample = dict( submitter_id=self.submitter_id, sample_type=self.sample_type @@ -2344,7 +2191,6 @@ class BasicSample(BaseClass, LogMixin): if full_data: sample['submissions'] = sorted([item.to_sub_dict() for item in self.sample_submission_associations], key=itemgetter('submitted_date')) - # logger.debug(f"Done converting {self} after {time()-start}") return sample def to_pydantic(self): @@ -2385,7 +2231,6 @@ class BasicSample(BaseClass, LogMixin): except Exception as e: logger.error(f"Could not get polymorph {polymorphic_identity} of {cls} due to {e}, using {cls}") model = cls - # logger.info(f"Recruiting model: {model}") return model else: model = cls @@ -2399,7 +2244,6 @@ class BasicSample(BaseClass, LogMixin): except StopIteration as e: raise AttributeError( f"Couldn't find existing class/subclass of {cls} with all attributes:\n{pformat(attrs.keys())}") - # logger.info(f"Recruiting model: {model}") return model @classmethod @@ -2413,7 +2257,6 @@ class BasicSample(BaseClass, LogMixin): Returns: dict: Updated parser results. """ - # logger.debug(f"Hello from {cls.__name__} sample parser!") return input_dict @classmethod @@ -2429,7 +2272,6 @@ class BasicSample(BaseClass, LogMixin): """ env = jinja_template_loading() temp_name = f"{cls.__name__.lower()}_details.html" - # logger.debug(f"Returning template: {temp_name}") try: template = env.get_template(temp_name) except TemplateNotFound as e: @@ -2463,11 +2305,9 @@ class BasicSample(BaseClass, LogMixin): model = sample_type case _: model = cls.find_polymorphic_subclass(attrs=kwargs) - # logger.debug(f"Length of kwargs: {len(kwargs)}") query: Query = cls.__database_session__.query(model) match submitter_id: case str(): - # logger.debug(f"Looking up {model} with submitter id: {submitter_id}") query = query.filter(model.submitter_id == submitter_id) limit = 1 case _: @@ -2494,12 +2334,10 @@ class BasicSample(BaseClass, LogMixin): raise ValueError("Need to narrow down query or the first available instance will be returned.") sanitized_kwargs = {k: v for k, v in kwargs.items() if k not in disallowed} instance = cls.query(sample_type=sample_type, limit=1, **kwargs) - # logger.debug(f"Retrieved instance: {instance}") if instance is None: used_class = cls.find_polymorphic_subclass(attrs=sanitized_kwargs, polymorphic_identity=sample_type) instance = used_class(**sanitized_kwargs) instance.sample_type = sample_type - # logger.debug(f"Creating instance: {instance}") return instance @classmethod @@ -2525,12 +2363,8 @@ class BasicSample(BaseClass, LogMixin): model = cls case _: model = cls.find_polymorphic_subclass(attrs=kwargs) - # logger.debug(f"Length of kwargs: {len(kwargs)}") - # logger.debug(f"Fuzzy search received sample type: {sample_type}") query: Query = cls.__database_session__.query(model) - # logger.debug(f"Queried model. Now running searches in {kwargs}") for k, v in kwargs.items(): - # logger.debug(f"Running fuzzy search for attribute: {k} with value {v}") search = f"%{v}%" try: attr = getattr(model, k) @@ -2583,7 +2417,6 @@ class BasicSample(BaseClass, LogMixin): Args: obj (_type_): parent widget """ - # logger.debug("Hello from details") from frontend.widgets.submission_details import SubmissionDetails dlg = SubmissionDetails(parent=obj, sub=self) if dlg.exec(): @@ -2629,7 +2462,6 @@ class WastewaterSample(BasicSample): output = {} for k, v in dicto.items(): if len(args) > 0 and k not in args: - # logger.debug(f"Don't want {k}") continue else: output[k] = v @@ -2668,7 +2500,6 @@ class WastewaterSample(BasicSample): dict: Updated parser results. """ output_dict = super().parse_sample(input_dict) - # logger.debug(f"Initial sample dict: {pformat(output_dict)}") disallowed = ["", None, "None"] try: check = output_dict['rsl_number'] in disallowed @@ -2721,11 +2552,10 @@ class BacterialCultureSample(BasicSample): if self.control is not None: sample['colour'] = [0, 128, 0] sample['tooltip'] = f"Control: {self.control.controltype.name} - {self.control.controltype.targets}" - # logger.debug(f"Done converting to {self} to dict after {time()-start}") return sample -# Submission to Sample Associations +# NOTE: Submission to Sample Associations class SubmissionSampleAssociation(BaseClass): """ @@ -2740,17 +2570,16 @@ class SubmissionSampleAssociation(BaseClass): column = Column(INTEGER, primary_key=True) #: column on the 96 well plate submission_rank = Column(INTEGER, nullable=False, default=0) #: Location in sample list - # reference to the Submission object + # NOTE: reference to the Submission object submission = relationship(BasicSubmission, back_populates="submission_sample_associations") #: associated submission - # reference to the Sample object + # NOTE: reference to the Sample object sample = relationship(BasicSample, back_populates="sample_submission_associations") #: associated sample base_sub_type = Column(String) #: string of mode_sub_type name - # Refers to the type of parent. - # Hooooooo boy, polymorphic association type, now we're getting into the weeds! + # NOTE: Refers to the type of parent. __mapper_args__ = { "polymorphic_identity": "Basic Association", "polymorphic_on": base_sub_type, @@ -2768,13 +2597,11 @@ class SubmissionSampleAssociation(BaseClass): self.id = id else: self.id = self.__class__.autoincrement_id() - # logger.debug(f"Looking at kwargs: {pformat(kwargs)}") for k, v in kwargs.items(): try: self.__setattr__(k, v) except AttributeError: logger.error(f"Couldn't set {k} to {v}") - # logger.debug(f"Using submission sample association id: {self.id}") def __repr__(self) -> str: try: @@ -2791,9 +2618,7 @@ class SubmissionSampleAssociation(BaseClass): dict: Updated dictionary with row, column and well updated """ # NOTE: Get associated sample info - # logger.debug(f"Running {self.__repr__()}") sample = self.sample.to_sub_dict() - # logger.debug("Sample conversion complete.") sample['name'] = self.sample.submitter_id sample['row'] = self.row sample['column'] = self.column @@ -2808,7 +2633,13 @@ class SubmissionSampleAssociation(BaseClass): sample['submission_rank'] = self.submission_rank return sample - def to_pydantic(self): + def to_pydantic(self) -> "PydSample": + """ + Creates a pydantic model for this sample. + + Returns: + PydSample: Pydantic Model + """ from backend.validators import PydSample return PydSample(**self.to_sub_dict()) @@ -2821,7 +2652,6 @@ class SubmissionSampleAssociation(BaseClass): """ # NOTE: Since there is no PCR, negliable result is necessary. sample = self.to_sub_dict() - # logger.debug(f"Sample dict to hitpick: {sample}") env = jinja_template_loading() template = env.get_template("tooltip.html") tooltip_text = template.render(fields=sample) @@ -2880,7 +2710,6 @@ class SubmissionSampleAssociation(BaseClass): except Exception as e: logger.error(f"Could not get polymorph {polymorphic_identity} of {cls} due to {e}") model = cls - # logger.debug(f"Using SubmissionSampleAssociation subclass: {output}") return model @classmethod @@ -2913,19 +2742,15 @@ class SubmissionSampleAssociation(BaseClass): query: Query = cls.__database_session__.query(cls) match submission: case BasicSubmission(): - # logger.debug(f"Lookup SampleSubmissionAssociation with submission BasicSubmission {submission}") query = query.filter(cls.submission == submission) case str(): - # logger.debug(f"Lookup SampleSubmissionAssociation with submission str {submission}") query = query.join(BasicSubmission).filter(BasicSubmission.rsl_plate_num == submission) case _: pass match sample: case BasicSample(): - # logger.debug(f"Lookup SampleSubmissionAssociation with sample BasicSample {sample}") query = query.filter(cls.sample == sample) case str(): - # logger.debug(f"Lookup SampleSubmissionAssociation with sample str {sample}") query = query.join(BasicSample).filter(BasicSample.submitter_id == sample) case _: pass @@ -2935,12 +2760,10 @@ class SubmissionSampleAssociation(BaseClass): query = query.filter(cls.column == column) match exclude_submission_type: case str(): - # logger.debug(f"filter SampleSubmissionAssociation to exclude submission type {exclude_submission_type}") query = query.join(BasicSubmission).filter( BasicSubmission.submission_type_name != exclude_submission_type) case _: pass - # logger.debug(f"Query count: {query.count()}") if reverse and not chronologic: query = query.order_by(BasicSubmission.id.desc()) if chronologic: @@ -2969,8 +2792,7 @@ class SubmissionSampleAssociation(BaseClass): Returns: SubmissionSampleAssociation: Queried or new association. """ - # logger.debug(f"Attempting create or query with {kwargs}") - disallowed = ['id'] + # disallowed = ['id'] match submission: case BasicSubmission(): pass diff --git a/src/submissions/backend/excel/parser.py b/src/submissions/backend/excel/parser.py index 42a5999..b54f710 100644 --- a/src/submissions/backend/excel/parser.py +++ b/src/submissions/backend/excel/parser.py @@ -1,6 +1,6 @@ -''' +""" contains parser objects for pulling values from client generated submission sheets. -''' +""" import logging from copy import copy from getpass import getuser @@ -53,7 +53,6 @@ class SheetParser(object): self.parse_samples() self.parse_equipment() self.parse_tips() - # logger.debug(f"Parser.sub after info scrape: {pformat(self.sub)}") def parse_info(self): """ @@ -71,7 +70,6 @@ class SheetParser(object): logger.info( f"Checking for updated submission type: {self.submission_type.name} against new: {info['submission_type']['value']}") if self.submission_type.name != info['submission_type']['value']: - # logger.debug(f"info submission type: {info}") if check: self.submission_type = SubmissionType.query(name=info['submission_type']['value']) logger.info(f"Updated self.submission_type to {self.submission_type}. Rerunning parse.") @@ -90,11 +88,9 @@ class SheetParser(object): """ if extraction_kit is None: extraction_kit = self.sub['extraction_kit'] - # logger.debug(f"Parsing reagents for {extraction_kit}") parser = ReagentParser(xl=self.xl, submission_type=self.submission_type, extraction_kit=extraction_kit) self.sub['reagents'] = parser.parse_reagents() - # logger.debug(f"Reagents out of parser: {pformat(self.sub['reagents'])}") def parse_samples(self): """ @@ -155,7 +151,6 @@ class InfoParser(object): submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.) sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None. """ - logger.info(f"\n\nHello from InfoParser!\n\n") if isinstance(submission_type, str): submission_type = SubmissionType.query(name=submission_type) if sub_object is None: @@ -164,7 +159,6 @@ class InfoParser(object): self.sub_object = sub_object self.map = self.fetch_submission_info_map() self.xl = xl - # logger.debug(f"Info map for InfoParser: {pformat(self.map)}") def fetch_submission_info_map(self) -> dict: """ @@ -174,7 +168,6 @@ class InfoParser(object): dict: Location map of all info for this submission type """ self.submission_type = dict(value=self.submission_type_obj.name, missing=True) - # logger.debug(f"Looking up submission type: {self.submission_type['value']}") info_map = self.sub_object.construct_info_map(submission_type=self.submission_type_obj, mode="read") # NOTE: Get the parse_info method from the submission type specified return info_map @@ -188,7 +181,6 @@ class InfoParser(object): """ dicto = {} # NOTE: This loop parses generic info - # logger.debug(f"Map: {self.map}") for sheet in self.xl.sheetnames: ws = self.xl[sheet] relevant = [] @@ -197,11 +189,8 @@ class InfoParser(object): if k == "custom": continue if isinstance(v, str): - logger.debug(f"Found string for {k}, setting value to {v}") dicto[k] = dict(value=v, missing=False) continue - # logger.debug(f"Looking for {k} in self.map") - # logger.debug(f"Locations: {v}") for location in v: try: check = location['sheet'] == sheet @@ -213,21 +202,18 @@ class InfoParser(object): new = location new['name'] = k relevant.append(new) - # logger.debug(f"relevant map for {sheet}: {pformat(relevant)}") # NOTE: make sure relevant is not an empty list. if not relevant: continue for item in relevant: # NOTE: Get cell contents at this location value = ws.cell(row=item['row'], column=item['column']).value - # logger.debug(f"Value for {item['name']} = {value}") match item['name']: case "submission_type": value, missing = is_missing(value) value = value.title() case "submitted_date": value, missing = is_missing(value) - logger.debug(f"Parsed submitted date: {value}") # NOTE: is field a JSON? Includes: Extraction info, PCR info, comment, custom case thing if thing in self.sub_object.jsons(): value, missing = is_missing(value) @@ -240,7 +226,6 @@ class InfoParser(object): logger.error(f"New value for {item['name']}") case _: value, missing = is_missing(value) - # logger.debug(f"Setting {item} on {sheet} to {value}") if item['name'] not in dicto.keys(): try: dicto[item['name']] = dict(value=value, missing=missing) @@ -264,7 +249,6 @@ class ReagentParser(object): extraction_kit (str): Extraction kit used. sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None. """ - logger.info("\n\nHello from ReagentParser!\n\n") if isinstance(submission_type, str): submission_type = SubmissionType.query(name=submission_type) self.submission_type_obj = submission_type @@ -272,9 +256,7 @@ class ReagentParser(object): if isinstance(extraction_kit, dict): extraction_kit = extraction_kit['value'] self.kit_object = KitType.query(name=extraction_kit) - logger.debug(f"Got extraction kit object: {self.kit_object}") self.map = self.fetch_kit_info_map(submission_type=submission_type) - logger.debug(f"Reagent Parser map: {self.map}") self.xl = xl @report_result @@ -298,14 +280,11 @@ class ReagentParser(object): del reagent_map['info'] except KeyError: pass - # logger.debug(f"Reagent map: {pformat(reagent_map)}") # NOTE: If reagent map is empty, maybe the wrong kit was given, check if there's only one kit for that submission type and use it if so. if not reagent_map: temp_kit_object = self.submission_type_obj.get_default_kit() - # logger.debug(f"Temp kit: {temp_kit_object}") if temp_kit_object: self.kit_object = temp_kit_object - # reagent_map = {k: v for k, v in self.kit_object.construct_xl_map_for_use(submission_type)} logger.warning(f"Attempting to salvage with default kit {self.kit_object} and submission_type: {self.submission_type_obj}") return self.fetch_kit_info_map(submission_type=self.submission_type_obj) else: @@ -331,18 +310,15 @@ class ReagentParser(object): for sheet in self.xl.sheetnames: ws = self.xl[sheet] relevant = {k.strip(): v for k, v in self.map.items() if sheet in self.map[k]['sheet']} - # logger.debug(f"relevant map for {sheet}: {pformat(relevant)}") if relevant == {}: continue for item in relevant: - # logger.debug(f"Attempting to scrape: {item}") try: reagent = relevant[item] name = ws.cell(row=reagent['name']['row'], column=reagent['name']['column']).value lot = ws.cell(row=reagent['lot']['row'], column=reagent['lot']['column']).value expiry = ws.cell(row=reagent['expiry']['row'], column=reagent['expiry']['column']).value if 'comment' in relevant[item].keys(): - # logger.debug(f"looking for {relevant[item]} comment.") comment = ws.cell(row=reagent['comment']['row'], column=reagent['comment']['column']).value else: comment = "" @@ -353,10 +329,7 @@ class ReagentParser(object): missing = False else: missing = True - # logger.debug(f"Got lot for {item}-{name}: {lot} as {type(lot)}") lot = str(lot) - # logger.debug( - # f"Going into pydantic: name: {name}, lot: {lot}, expiry: {expiry}, type: {item.strip()}, comment: {comment}") try: check = name.lower() != "not applicable" except AttributeError: @@ -381,12 +354,10 @@ class SampleParser(object): sample_map (dict | None, optional): Locations in database where samples are found. Defaults to None. sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None. """ - logger.info("\n\nHello from SampleParser!\n\n") self.samples = [] self.xl = xl if isinstance(submission_type, str): submission_type = SubmissionType.query(name=submission_type) - # logger.debug(f"Sample parser is using submission type: {submission_type}") self.submission_type = submission_type.name self.submission_type_obj = submission_type if sub_object is None: @@ -395,7 +366,6 @@ class SampleParser(object): sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type) self.sub_object = sub_object self.sample_info_map = self.fetch_sample_info_map(submission_type=submission_type, sample_map=sample_map) - # logger.debug(f"sample_info_map: {self.sample_info_map}") self.plate_map_samples = self.parse_plate_map() self.lookup_samples = self.parse_lookup_table() @@ -409,11 +379,8 @@ class SampleParser(object): Returns: dict: Info locations. """ - # logger.debug(f"Looking up submission type: {submission_type}") self.sample_type = self.sub_object.get_default_info("sample_type", submission_type=submission_type) self.samp_object = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type) - # logger.debug(f"Got sample class: {self.samp_object.__name__}") - # logger.debug(f"info_map: {pformat(se)}") if sample_map is None: sample_info_map = self.sub_object.construct_sample_map(submission_type=self.submission_type_obj) else: @@ -432,9 +399,7 @@ class SampleParser(object): ws = self.xl[smap['sheet']] plate_map_samples = [] for ii, row in enumerate(range(smap['start_row'], smap['end_row'] + 1), start=1): - # logger.debug(f"Parsing row: {row}") for jj, column in enumerate(range(smap['start_column'], smap['end_column'] + 1), start=1): - # logger.debug(f"Parsing column: {column}") id = str(ws.cell(row=row, column=column).value) if check_not_nan(id): if id not in invalids: @@ -442,10 +407,8 @@ class SampleParser(object): sample_dict['sample_type'] = self.sample_type plate_map_samples.append(sample_dict) else: - # logger.error(f"Sample cell ({row}, {column}) has invalid value: {id}.") pass else: - # logger.error(f"Sample cell ({row}, {column}) has no info: {id}.") pass return plate_map_samples @@ -507,7 +470,6 @@ class SampleParser(object): except (KeyError, IndexError): check = False if check: - # logger.debug(f"Direct match found for {psample['id']}") new = lookup_samples[ii] | psample lookup_samples[ii] = {} else: @@ -516,7 +478,6 @@ class SampleParser(object): if merge_on_id in sample.keys()] jj, new = next(((jj, lsample | psample) for jj, lsample in searchables if lsample[merge_on_id] == psample['id']), (-1, psample)) - # logger.debug(f"Assigning from index {jj} - {new}") if jj >= 0: lookup_samples[jj] = {} if not check_key_or_attr(key='submitter_id', interest=new, check_none=True): @@ -540,7 +501,6 @@ class EquipmentParser(object): xl (Workbook): Openpyxl workbook from submitted excel file. submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.) """ - logger.info("\n\nHello from EquipmentParser!\n\n") if isinstance(submission_type, str): submission_type = SubmissionType.query(name=submission_type) self.submission_type = submission_type @@ -567,7 +527,6 @@ class EquipmentParser(object): str: asset number """ regex = Equipment.get_regex() - # logger.debug(f"Using equipment regex: {regex} on {input}") try: return regex.search(input).group().strip("-") except AttributeError as e: @@ -581,8 +540,6 @@ class EquipmentParser(object): Returns: List[dict]: list of equipment """ - # logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}") - # logger.debug(f"Sheets: {sheets}") for sheet in self.xl.sheetnames: ws = self.xl[sheet] try: @@ -590,17 +547,14 @@ class EquipmentParser(object): except (TypeError, KeyError) as e: logger.error(f"Error creating relevant equipment list: {e}") continue - # logger.debug(f"Relevant equipment: {pformat(relevant)}") previous_asset = "" for k, v in relevant.items(): - # logger.debug(f"Checking: {v}") asset = ws.cell(v['name']['row'], v['name']['column']).value if not check_not_nan(asset): asset = previous_asset else: previous_asset = asset asset = self.get_asset_number(input=asset) - # logger.debug(f"asset: {asset}") eq = Equipment.query(asset_number=asset) if eq is None: eq = Equipment.query(name=asset) @@ -623,7 +577,6 @@ class TipParser(object): xl (Workbook): Openpyxl workbook from submitted excel file. submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.) """ - logger.info("\n\nHello from TipParser!\n\n") if isinstance(submission_type, str): submission_type = SubmissionType.query(name=submission_type) self.submission_type = submission_type @@ -646,8 +599,6 @@ class TipParser(object): Returns: List[dict]: list of equipment """ - # logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}") - # logger.debug(f"Sheets: {sheets}") for sheet in self.xl.sheetnames: ws = self.xl[sheet] try: @@ -655,7 +606,6 @@ class TipParser(object): except (TypeError, KeyError) as e: logger.error(f"Error creating relevant equipment list: {e}") continue - # logger.debug(f"Relevant equipment: {pformat(relevant)}") previous_asset = "" for k, v in relevant.items(): asset = ws.cell(v['name']['row'], v['name']['column']).value @@ -667,7 +617,6 @@ class TipParser(object): asset = previous_asset else: previous_asset = asset - # logger.debug(f"asset: {asset}") eq = Tips.query(lot=lot, name=asset, limit=1) try: yield dict(name=eq.name, role=k, lot=lot) @@ -684,7 +633,6 @@ class PCRParser(object): filepath (Path | None, optional): file to parse. Defaults to None. submission (BasicSubmission | None, optional): Submission parsed data to be added to. """ - # logger.debug(f'Parsing {filepath.__str__()}') if filepath is None: logger.error('No filepath given.') self.xl = None @@ -727,5 +675,4 @@ class PCRParser(object): value = row[1].value or "" pcr[key] = value pcr['imported_by'] = getuser() - # logger.debug(f"PCR: {pformat(pcr)}") return pcr diff --git a/src/submissions/backend/excel/reports.py b/src/submissions/backend/excel/reports.py index f92d107..9e500a6 100644 --- a/src/submissions/backend/excel/reports.py +++ b/src/submissions/backend/excel/reports.py @@ -32,7 +32,6 @@ class ReportArchetype(object): filename = filename.absolute() self.writer = ExcelWriter(filename.with_suffix(".xlsx"), engine='openpyxl') self.df.to_excel(self.writer, sheet_name=self.sheet_name) - # logger.debug(f"Writing report to: {filename}") self.writer.close() @@ -43,7 +42,6 @@ class ReportMaker(object): self.end_date = end_date # NOTE: Set page size to zero to override limiting query size. self.subs = BasicSubmission.query(start_date=start_date, end_date=end_date, page_size=0) - # logger.debug(f"Number of subs returned: {len(self.subs)}") if organizations is not None: self.subs = [sub for sub in self.subs if sub.submitting_lab.name in organizations] self.detailed_df, self.summary_df = self.make_report_xlsx() @@ -65,10 +63,8 @@ class ReportMaker(object): df2 = df.groupby(["submitting_lab", "extraction_kit"]).agg( {'extraction_kit': 'count', 'cost': 'sum', 'sample_count': 'sum'}) df2 = df2.rename(columns={"extraction_kit": 'run_count'}) - # logger.debug(f"Output daftaframe for xlsx: {df2.columns}") df = df.drop('id', axis=1) df = df.sort_values(['submitting_lab', "submitted_date"]) - # logger.debug(f"Details dataframe:\n{df2}") return df, df2 def make_report_html(self, df: DataFrame) -> str: @@ -86,12 +82,8 @@ class ReportMaker(object): """ old_lab = "" output = [] - # logger.debug(f"Report DataFrame: {df}") for row in df.iterrows(): - # logger.debug(f"Row {ii}: {row}") lab = row[0][0] - # logger.debug(f"Old lab: {old_lab}, Current lab: {lab}") - # logger.debug(f"Name: {row[0][1]}") data = [item for item in row[1]] kit = dict(name=row[0][1], cost=data[1], run_count=int(data[0]), sample_count=int(data[2])) # NOTE: if this is the same lab as before add together @@ -106,7 +98,6 @@ class ReportMaker(object): total_runs=kit['run_count']) output.append(adder) old_lab = lab - # logger.debug(output) dicto = {'start_date': self.start_date, 'end_date': self.end_date, 'labs': output} temp = env.get_template('summary_report.html') html = temp.render(input=dicto) @@ -127,14 +118,12 @@ class ReportMaker(object): self.summary_df.to_excel(self.writer, sheet_name="Report") self.detailed_df.to_excel(self.writer, sheet_name="Details", index=False) self.fix_up_xl() - # logger.debug(f"Writing report to: {filename}") self.writer.close() def fix_up_xl(self): """ Handles formatting of xl file, mediocrely. """ - # logger.debug(f"Updating worksheet") worksheet: Worksheet = self.writer.sheets['Report'] for idx, col in enumerate(self.summary_df, start=1): # NOTE: loop through all columns series = self.summary_df[col] @@ -149,7 +138,6 @@ class ReportMaker(object): except ValueError as e: logger.error(f"Couldn't resize column {col} due to {e}") blank_row = get_first_blank_df_row(self.summary_df) + 1 - # logger.debug(f"Blank row index = {blank_row}") for col in range(3, 6): col_letter = row_map[col] worksheet.cell(row=blank_row, column=col, value=f"=SUM({col_letter}2:{col_letter}{str(blank_row - 1)})") diff --git a/src/submissions/backend/excel/writer.py b/src/submissions/backend/excel/writer.py index d41bcff..17ac7e6 100644 --- a/src/submissions/backend/excel/writer.py +++ b/src/submissions/backend/excel/writer.py @@ -3,7 +3,6 @@ contains writer objects for pushing values to submission sheet templates. """ import logging from copy import copy -from datetime import date from operator import itemgetter from pprint import pformat from typing import List, Generator, Tuple @@ -111,7 +110,6 @@ class InfoWriter(object): info_dict (dict): Dictionary of information to write. sub_object (BasicSubmission | None, optional): Submission object containing methods. Defaults to None. """ - logger.debug(f"Info_dict coming into InfoWriter: {pformat(info_dict)}") if isinstance(submission_type, str): submission_type = SubmissionType.query(name=submission_type) if sub_object is None: @@ -121,7 +119,6 @@ class InfoWriter(object): self.xl = xl self.info_map = submission_type.construct_info_map(mode='write') self.info = self.reconcile_map(info_dict, self.info_map) - # logger.debug(pformat(self.info)) def reconcile_map(self, info_dict: dict, info_map: dict) -> Generator[(Tuple[str, dict]), None, None]: """ @@ -170,7 +167,6 @@ class InfoWriter(object): logger.error(f"No locations for {k}, skipping") continue for loc in locations: - logger.debug(f"Writing {k} to {loc['sheet']}, row: {loc['row']}, column: {loc['column']}") sheet = self.xl[loc['sheet']] try: sheet.cell(row=loc['row'], column=loc['column'], value=v['value']) @@ -247,8 +243,6 @@ class ReagentWriter(object): for v in reagent.values(): if not isinstance(v, dict): continue - # logger.debug( - # f"Writing {reagent['type']} {k} to {reagent['sheet']}, row: {v['row']}, column: {v['column']}") sheet.cell(row=v['row'], column=v['column'], value=v['value']) return self.xl @@ -288,7 +282,6 @@ class SampleWriter(object): multiples = ['row', 'column', 'assoc_id', 'submission_rank'] for sample in sample_list: sample = self.submission_type.get_submission_class().custom_sample_writer(sample) - logger.debug(f"Writing sample: {sample}") for assoc in zip(sample['row'], sample['column'], sample['submission_rank']): new = dict(row=assoc[0], column=assoc[1], submission_rank=assoc[2]) for k, v in sample.items(): @@ -369,9 +362,8 @@ class EquipmentWriter(object): mp_info = equipment_map[equipment['role']] except KeyError: logger.error(f"No {equipment['role']} in {pformat(equipment_map)}") - # logger.debug(f"{equipment['role']} map: {mp_info}") + mp_info = None placeholder = copy(equipment) - # if mp_info == {}: if not mp_info: for jj, (k, v) in enumerate(equipment.items(), start=1): dicto = dict(value=v, row=ii, column=jj) @@ -381,7 +373,6 @@ class EquipmentWriter(object): try: dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column']) except KeyError as e: - # logger.error(f"Keyerror: {e}") continue placeholder[k] = dicto if "asset_number" not in mp_info.keys(): @@ -400,17 +391,12 @@ class EquipmentWriter(object): Workbook: Workbook with equipment written """ for equipment in self.equipment: - try: - sheet = self.xl[equipment['sheet']] - except KeyError: + if not equipment['sheet'] in self.xl.sheetnames: self.xl.create_sheet("Equipment") - finally: - sheet = self.xl[equipment['sheet']] + sheet = self.xl[equipment['sheet']] for k, v in equipment.items(): if not isinstance(v, dict): continue - # logger.debug( - # f"Writing {k}: {v['value']} to {equipment['sheet']}, row: {v['row']}, column: {v['column']}") if isinstance(v['value'], list): v['value'] = v['value'][0] try: @@ -455,7 +441,6 @@ class TipWriter(object): return for ii, tips in enumerate(tips_list, start=1): mp_info = tips_map[tips.role] - # logger.debug(f"{tips['role']} map: {mp_info}") placeholder = {} if mp_info == {}: for jj, (k, v) in enumerate(tips.__dict__.items(), start=1): @@ -466,14 +451,12 @@ class TipWriter(object): try: dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column']) except KeyError as e: - # logger.error(f"Keyerror: {e}") continue placeholder[k] = dicto try: placeholder['sheet'] = mp_info['sheet'] except KeyError: placeholder['sheet'] = "Tips" - # logger.debug(f"Final output of {tips['role']} : {placeholder}") yield placeholder def write_tips(self) -> Workbook: @@ -484,17 +467,12 @@ class TipWriter(object): Workbook: Workbook with tips written """ for tips in self.tips: - try: - sheet = self.xl[tips['sheet']] - except KeyError: + if not tips['sheet'] in self.xl.sheetnames: self.xl.create_sheet("Tips") - finally: - sheet = self.xl[tips['sheet']] + sheet = self.xl[tips['sheet']] for k, v in tips.items(): if not isinstance(v, dict): continue - # logger.debug( - # f"Writing {k}: {v['value']} to {equipment['sheet']}, row: {v['row']}, column: {v['column']}") if isinstance(v['value'], list): v['value'] = v['value'][0] try: diff --git a/src/submissions/backend/scripts/__init__.py b/src/submissions/backend/scripts/__init__.py index 89b4971..a6a3d8e 100644 --- a/src/submissions/backend/scripts/__init__.py +++ b/src/submissions/backend/scripts/__init__.py @@ -1,7 +1,7 @@ from .irida import import_irida def hello(ctx): - print("\n\nHello!\n\n") + print("\n\nHello! Welcome to Robotics Submission Tracker.\n\n") def goodbye(ctx): - print("\n\nGoodbye\n\n") + print("\n\nGoodbye. Thank you for using Robotics Submission Tracker.\n\n") diff --git a/src/submissions/backend/scripts/irida.py b/src/submissions/backend/scripts/irida.py index f555dac..78d9224 100644 --- a/src/submissions/backend/scripts/irida.py +++ b/src/submissions/backend/scripts/irida.py @@ -19,11 +19,10 @@ def import_irida(ctx:Settings): existing_controls = [item.name for item in IridaControl.query()] prm_list = ", ".join([f"'{thing}'" for thing in existing_controls]) ctrl_db_path = ctx.directory_path.joinpath("submissions_parser_output", "submissions.db") - # print(f"Incoming settings: {pformat(ctx)}") try: conn = sqlite3.connect(ctrl_db_path) except AttributeError as e: - print(f"Error, could not import from irida due to {e}") + logger.error(f"Error, could not import from irida due to {e}") return sql = f"SELECT name, submitted_date, submission_id, contains, matches, kraken, subtype, refseq_version, " \ f"kraken2_version, kraken2_db_version, sample_id FROM _iridacontrol INNER JOIN _control on _control.id " \ @@ -32,8 +31,6 @@ def import_irida(ctx:Settings): records = [dict(name=row[0], submitted_date=row[1], submission_id=row[2], contains=row[3], matches=row[4], kraken=row[5], subtype=row[6], refseq_version=row[7], kraken2_version=row[8], kraken2_db_version=row[9], sample_id=row[10]) for row in cursor] - # incoming_controls = set(item['name'] for item in records) - # relevant = list(incoming_controls - existing_controls) for record in records: instance = IridaControl.query(name=record['name']) if instance: @@ -52,5 +49,4 @@ def import_irida(ctx:Settings): if sample: instance.sample = sample instance.submission = sample.submissions[0] - # pprint(instance.__dict__) - instance.save() \ No newline at end of file + instance.save() diff --git a/src/submissions/backend/validators/__init__.py b/src/submissions/backend/validators/__init__.py index 19686b1..394a118 100644 --- a/src/submissions/backend/validators/__init__.py +++ b/src/submissions/backend/validators/__init__.py @@ -24,11 +24,9 @@ class RSLNamer(object): filename = Path(filename) if Path(filename).exists() else filename self.submission_type = sub_type if not self.submission_type: - # logger.debug("Creating submission type because none exists") self.submission_type = self.retrieve_submission_type(filename=filename) logger.info(f"got submission type: {self.submission_type}") if self.submission_type: - # logger.debug("Retrieving BasicSubmission subclass") self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type) self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex(submission_type=sub_type)) if not data: @@ -52,7 +50,6 @@ class RSLNamer(object): str: parsed submission type """ def st_from_path(filename:Path) -> str: - # logger.info(f"Using path method for {filename}.") if filename.exists(): wb = load_workbook(filename) try: @@ -73,12 +70,9 @@ class RSLNamer(object): if filename.startswith("tmp"): return "Bacterial Culture" regex = BasicSubmission.construct_regex() - # logger.info(f"Using string method for {filename}.") - # logger.debug(f"Using regex: {regex}") m = regex.search(filename) try: submission_type = m.lastgroup - # logger.debug(f"Got submission type: {submission_type}") except AttributeError as e: submission_type = None logger.critical(f"No submission type found or submission type found!: {e}") @@ -98,7 +92,6 @@ class RSLNamer(object): if check: if "pytest" in sys.modules: raise ValueError("Submission Type came back as None.") - # logger.debug("Final option, ask the user for submission type") from frontend.widgets import ObjectSelector dlg = ObjectSelector(title="Couldn't parse submission type.", message="Please select submission type from list below.", obj_type=SubmissionType) @@ -116,21 +109,17 @@ class RSLNamer(object): regex (str): string to construct pattern filename (str): string to be parsed """ - logger.info(f"Input string to be parsed: {filename}") if regex is None: regex = BasicSubmission.construct_regex() else: - # logger.debug(f"Incoming regex: {regex}") try: regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE) except re.error as e: regex = BasicSubmission.construct_regex() - logger.info(f"Using regex: {regex}") match filename: case Path(): m = regex.search(filename.stem) case str(): - # logger.debug(f"Using string method.") m = regex.search(filename) case _: m = None @@ -141,7 +130,6 @@ class RSLNamer(object): parsed_name = None else: parsed_name = None - # logger.debug(f"Got parsed submission name: {parsed_name}") return parsed_name @classmethod @@ -187,8 +175,6 @@ class RSLNamer(object): Returns: str: output file name. """ - # logger.debug(f"Kwargs: {kwargs}") - # logger.debug(f"Template: {template}") environment = jinja_template_loading() template = environment.from_string(template) return template.render(**kwargs) diff --git a/src/submissions/backend/validators/pydant.py b/src/submissions/backend/validators/pydant.py index 7ffd234..69bf71f 100644 --- a/src/submissions/backend/validators/pydant.py +++ b/src/submissions/backend/validators/pydant.py @@ -1,6 +1,6 @@ -''' +""" Contains pydantic models and accompanying validators -''' +""" from __future__ import annotations import uuid, re, logging, csv, sys from pydantic import BaseModel, field_validator, Field, model_validator @@ -123,18 +123,14 @@ class PydReagent(BaseModel): Tuple[Reagent, Report]: Reagent instance and result of function """ report = Report() - # logger.debug("Adding extra fields.") if self.model_extra is not None: self.__dict__.update(self.model_extra) - # logger.debug(f"Reagent SQL constructor is looking up type: {self.type}, lot: {self.lot}") reagent = Reagent.query(lot=self.lot, name=self.name) - # logger.debug(f"Result: {reagent}") if reagent is None: reagent = Reagent() for key, value in self.__dict__.items(): if isinstance(value, dict): value = value['value'] - # logger.debug(f"Reagent info item for {key}: {value}") # NOTE: set fields based on keys in dictionary match key: case "lot": @@ -149,7 +145,6 @@ class PydReagent(BaseModel): if isinstance(value, str): value = date(year=1970, month=1, day=1) value = datetime.combine(value, datetime.min.time()) - logger.debug(f"Expiry date coming into sql: {value} with type {type(value)}") reagent.expiry = value.replace(tzinfo=timezone) case _: try: @@ -179,14 +174,12 @@ class PydSample(BaseModel, extra='allow'): @model_validator(mode='after') @classmethod def validate_model(cls, data): - # logger.debug(f"Data for pydsample: {data}") model = BasicSample.find_polymorphic_subclass(polymorphic_identity=data.sample_type) for k, v in data.model_extra.items(): if k in model.timestamps(): if isinstance(v, str): v = datetime.strptime(v, "%Y-%m-%d") data.__setattr__(k, v) - # logger.debug(f"Data coming out of validation: {pformat(data)}") return data @field_validator("row", "column", "assoc_id", "submission_rank") @@ -238,7 +231,6 @@ class PydSample(BaseModel, extra='allow'): """ report = None self.__dict__.update(self.model_extra) - # logger.debug(f"Here is the incoming sample dict: \n{self.__dict__}") instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id) for key, value in self.__dict__.items(): match key: @@ -246,7 +238,6 @@ class PydSample(BaseModel, extra='allow'): case "row" | "column": continue case _: - # logger.debug(f"Setting sample field {key} to {value}") instance.__setattr__(key, value) out_associations = [] if submission is not None: @@ -254,15 +245,12 @@ class PydSample(BaseModel, extra='allow'): submission = BasicSubmission.query(rsl_plate_num=submission) assoc_type = submission.submission_type_name for row, column, aid, submission_rank in zip(self.row, self.column, self.assoc_id, self.submission_rank): - # logger.debug(f"Looking up association with identity: ({submission.submission_type_name} Association)") - # logger.debug(f"Looking up association with identity: ({assoc_type} Association)") association = SubmissionSampleAssociation.query_or_create(association_type=f"{assoc_type} Association", submission=submission, sample=instance, row=row, column=column, id=aid, submission_rank=submission_rank, **self.model_extra) - # logger.debug(f"Using submission_sample_association: {association}") try: out_associations.append(association) except IntegrityError as e: @@ -332,7 +320,6 @@ class PydEquipment(BaseModel, extra='ignore'): @field_validator('processes', mode='before') @classmethod def make_empty_list(cls, value): - # logger.debug(f"Pydantic value: {value}") if isinstance(value, GeneratorType): value = [item.name for item in value] value = convert_nans_to_nones(value) @@ -355,7 +342,6 @@ class PydEquipment(BaseModel, extra='ignore'): Tuple[Equipment, SubmissionEquipmentAssociation]: SQL objects """ if isinstance(submission, str): - # logger.debug(f"Got string, querying {submission}") submission = BasicSubmission.query(rsl_plate_num=submission) equipment = Equipment.query(asset_number=self.asset_number) if equipment is None: @@ -403,7 +389,6 @@ class PydEquipment(BaseModel, extra='ignore'): class PydSubmission(BaseModel, extra='allow'): filepath: Path submission_type: dict | None - # For defaults submitter_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True) submitted_date: dict | None rsl_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True) @@ -427,7 +412,6 @@ class PydSubmission(BaseModel, extra='allow'): if isinstance(value, dict): value = value['value'] if isinstance(value, Generator): - # logger.debug("We have a generator") return [PydTips(**tips) for tips in value] if not value: return [] @@ -436,9 +420,7 @@ class PydSubmission(BaseModel, extra='allow'): @field_validator('equipment', mode='before') @classmethod def convert_equipment_dict(cls, value): - # logger.debug(f"Equipment: {value}") if isinstance(value, Generator): - logger.debug("We have a generator") return [PydEquipment(**equipment) for equipment in value] if isinstance(value, dict): return value['value'] @@ -454,7 +436,6 @@ class PydSubmission(BaseModel, extra='allow'): @field_validator("submitter_plate_num") @classmethod def enforce_with_uuid(cls, value): - # logger.debug(f"submitter_plate_num coming into pydantic: {value}") if value['value'] in [None, "None"]: return dict(value=uuid.uuid4().hex.upper(), missing=True) else: @@ -464,7 +445,6 @@ class PydSubmission(BaseModel, extra='allow'): @field_validator("submitted_date", mode="before") @classmethod def rescue_date(cls, value): - # logger.debug(f"\n\nDate coming into pydantic: {value}\n\n") try: check = value['value'] is None except TypeError: @@ -509,7 +489,6 @@ class PydSubmission(BaseModel, extra='allow'): @classmethod def lookup_submitting_lab(cls, value): if isinstance(value['value'], str): - # logger.debug(f"Looking up organization {value['value']}") try: value['value'] = Organization.query(name=value['value']).name except AttributeError: @@ -540,13 +519,11 @@ class PydSubmission(BaseModel, extra='allow'): @field_validator("rsl_plate_num") @classmethod def rsl_from_file(cls, value, values): - # logger.debug(f"RSL-plate initial value: {value['value']} and other values: {values.data}") sub_type = values.data['submission_type']['value'] if check_not_nan(value['value']): value['value'] = value['value'].strip() return value else: - # logger.debug("Constructing plate sub_type.") if "pytest" in sys.modules and sub_type.replace(" ", "") == "BasicSubmission": output = "RSL-BS-Test001" else: @@ -623,7 +600,6 @@ class PydSubmission(BaseModel, extra='allow'): @classmethod def expand_reagents(cls, value): if isinstance(value, Generator): - # logger.debug("We have a generator") return [PydReagent(**reagent) for reagent in value] return value @@ -631,7 +607,6 @@ class PydSubmission(BaseModel, extra='allow'): @classmethod def expand_samples(cls, value): if isinstance(value, Generator): - # logger.debug("We have a generator")[ return [PydSample(**sample) for sample in value] return value @@ -656,7 +631,6 @@ class PydSubmission(BaseModel, extra='allow'): @field_validator("cost_centre") @classmethod def get_cost_centre(cls, value, values): - # logger.debug(f"Value coming in for cost_centre: {value}") match value['value']: case None: from backend.db.models import Organization @@ -671,7 +645,6 @@ class PydSubmission(BaseModel, extra='allow'): @field_validator("contact") @classmethod def get_contact_from_org(cls, value, values): - # logger.debug(f"Checking on value: {value}") match value: case dict(): if isinstance(value['value'], tuple): @@ -684,7 +657,6 @@ class PydSubmission(BaseModel, extra='allow'): if check is None: org = Organization.query(name=values.data['submitting_lab']['value']) contact = org.contacts[0].name - # logger.debug(f"Pulled: {contact}") if isinstance(contact, tuple): contact = contact[0] return dict(value=contact, missing=True) @@ -692,7 +664,6 @@ class PydSubmission(BaseModel, extra='allow'): return value def __init__(self, run_custom: bool = False, **data): - logger.debug(f"{__name__} input data: {data}") super().__init__(**data) # NOTE: this could also be done with default_factory self.submission_object = BasicSubmission.find_polymorphic_subclass( @@ -755,13 +726,11 @@ class PydSubmission(BaseModel, extra='allow'): except TypeError: pass else: - # logger.debug("Extracting 'value' from attributes") output = {k: self.filter_field(k) for k in fields} return output def filter_field(self, key: str): item = getattr(self, key) - # logger.debug(f"Attempting deconstruction of {key}: {item} with type {type(item)}") match item: case dict(): try: @@ -793,13 +762,10 @@ class PydSubmission(BaseModel, extra='allow'): """ report = Report() dicto = self.improved_dict() - # logger.warning(f"\n\nQuery or create: {self.submission_type['value']}, {self.rsl_plate_num['value']}") instance, result = BasicSubmission.query_or_create(submission_type=self.submission_type['value'], rsl_plate_num=self.rsl_plate_num['value']) - logger.debug(f"Result of query or create: {instance}") report.add_result(result) self.handle_duplicate_samples() - # logger.debug(f"Here's our list of duplicate removed samples: {self.samples}") for key, value in dicto.items(): if isinstance(value, dict): try: @@ -811,18 +777,13 @@ class PydSubmission(BaseModel, extra='allow'): continue if value is None: continue - # logger.debug(f"Setting {key} to {value}") match key: case "reagents": for reagent in self.reagents: - logger.debug(f"Checking reagent {reagent.lot}") reagent, _ = reagent.toSQL(submission=instance) - # logger.debug(f"Association: {assoc}") case "samples": for sample in self.samples: sample, associations, _ = sample.toSQL(submission=instance) - # logger.debug(f"Sample SQL object to be added to submission: {sample.__dict__}") - # logger.debug(associations) for assoc in associations: if assoc is not None: if assoc not in instance.submission_sample_associations: @@ -830,19 +791,16 @@ class PydSubmission(BaseModel, extra='allow'): else: logger.warning(f"Sample association {assoc} is already present in {instance}") case "equipment": - # logger.debug(f"Equipment: {pformat(self.equipment)}") for equip in self.equipment: if equip is None: continue equip, association = equip.toSQL(submission=instance) if association is not None: instance.submission_equipment_associations.append(association) - logger.debug(f"Equipment associations: {instance.submission_equipment_associations}") case "tips": for tips in self.tips: if tips is None: continue - # logger.debug(f"Converting tips: {tips} to sql.") try: association = tips.to_sql(submission=instance) except AttributeError: @@ -864,14 +822,11 @@ class PydSubmission(BaseModel, extra='allow'): value = value instance.set_attribute(key=key, value=value) case item if item in instance.jsons(): - # logger.debug(f"{item} is a json.") try: ii = value.items() except AttributeError: ii = {} - logger.debug(f"ii is {ii}, value is {value}") for k, v in ii: - logger.debug(f"k is {k}, v is {v}") if isinstance(v, datetime): value[k] = v.strftime("%Y-%m-%d %H:%M:%S") else: @@ -893,21 +848,17 @@ class PydSubmission(BaseModel, extra='allow'): else: logger.warning(f"{key} already == {value} so no updating.") try: - # logger.debug(f"Calculating costs for procedure...") instance.calculate_base_cost() except (TypeError, AttributeError) as e: - logger.debug(f"Looks like that kit doesn't have cost breakdown yet due to: {e}, using 0.") + logger.error(f"Looks like that kit doesn't have cost breakdown yet due to: {e}, using 0.") try: instance.run_cost = instance.extraction_kit.cost_per_run except AttributeError: instance.run_cost = 0 - # logger.debug(f"Calculated base run cost of: {instance.run_cost}") # NOTE: Apply any discounts that are applicable for client and kit. try: - # logger.debug("Checking and applying discounts...") discounts = [item.amount for item in Discount.query(kit_type=instance.extraction_kit, organization=instance.submitting_lab)] - # logger.debug(f"We got discounts: {discounts}") if len(discounts) > 0: instance.run_cost = instance.run_cost - sum(discounts) except Exception as e: @@ -925,7 +876,6 @@ class PydSubmission(BaseModel, extra='allow'): SubmissionFormWidget: Submission form widget """ from frontend.widgets.submission_widget import SubmissionFormWidget - # logger.debug(f"Disable: {disable}") return SubmissionFormWidget(parent=parent, submission=self, disable=disable) def to_writer(self) -> "SheetWriter": @@ -946,10 +896,8 @@ class PydSubmission(BaseModel, extra='allow'): str: Output filename """ template = self.submission_object.filename_template() - # logger.debug(f"Using template string: {template}") render = self.namer.construct_export_name(template=template, **self.improved_dict(dictionaries=False)).replace( "/", "") - # logger.debug(f"Template rendered as: {render}") return render # @report_result @@ -964,26 +912,20 @@ class PydSubmission(BaseModel, extra='allow'): Report: Result object containing a message and any missing components. """ report = Report() - # logger.debug(f"Extraction kit: {extraction_kit}. Is it a string? {isinstance(extraction_kit, str)}") if isinstance(extraction_kit, str): extraction_kit = dict(value=extraction_kit) if extraction_kit is not None and extraction_kit != self.extraction_kit['value']: self.extraction_kit['value'] = extraction_kit['value'] - # logger.debug(f"Looking up {self.extraction_kit['value']}") ext_kit = KitType.query(name=self.extraction_kit['value']) ext_kit_rtypes = [item.to_pydantic() for item in ext_kit.get_reagents(required=True, submission_type=self.submission_type['value'])] - # logger.debug(f"Kit reagents: {ext_kit_rtypes}") - # logger.debug(f"Submission reagents: {self.reagents}") # NOTE: Exclude any reagenttype found in this pyd not expected in kit. expected_check = [item.role for item in ext_kit_rtypes] output_reagents = [rt for rt in self.reagents if rt.role in expected_check] - logger.debug(f"Already have these reagent types: {output_reagents}") missing_check = [item.role for item in output_reagents] missing_reagents = [rt for rt in ext_kit_rtypes if rt.role not in missing_check] missing_reagents += [rt for rt in output_reagents if rt.missing] output_reagents += [rt for rt in missing_reagents if rt not in output_reagents] - # logger.debug(f"Missing reagents types: {missing_reagents}") # NOTE: if lists are equal return no problem if len(missing_reagents) == 0: result = None @@ -1072,7 +1014,6 @@ class PydReagentRole(BaseModel): instance: ReagentRole = ReagentRole.query(name=self.name) if instance is None: instance = ReagentRole(name=self.name, eol_ext=self.eol_ext) - # logger.debug(f"This is the reagent type instance: {instance.__dict__}") try: assoc = KitTypeReagentRoleAssociation.query(reagent_role=instance, kit_type=kit) except StatementError: diff --git a/src/submissions/frontend/visualizations/__init__.py b/src/submissions/frontend/visualizations/__init__.py index 1dccb7d..614828b 100644 --- a/src/submissions/frontend/visualizations/__init__.py +++ b/src/submissions/frontend/visualizations/__init__.py @@ -41,7 +41,6 @@ class CustomFigure(Figure): """ if modes: ytitle = modes[0] - # logger.debug("Creating visibles list for each mode.") self.update_layout( xaxis_title="Submitted Date (* - Date parsed from fastq file creation date)", yaxis_title=ytitle, @@ -79,7 +78,6 @@ class CustomFigure(Figure): rng = [1] if months > 2: rng += [iii for iii in range(3, months, 3)] - # logger.debug(f"Making buttons for months: {rng}") buttons = [dict(count=iii, label=f"{iii}m", step="month", stepmode="backward") for iii in rng] if months > date.today().month: buttons += [dict(count=1, label="YTD", step="year", stepmode="todate")] @@ -117,24 +115,6 @@ class CustomFigure(Figure): {"yaxis.title.text": mode}, ]) - # def save_figure(self, group_name: str = "plotly_output", parent: QWidget | None = None): - # """ - # Writes plotly figure to html file. - # - # Args: - # figs (): - # settings (dict): settings passed down from click - # fig (Figure): input figure object - # group_name (str): controltype - # """ - # - # output = select_save_file(obj=parent, default_name=group_name, extension="png") - # self.write_image(output.absolute().__str__(), engine="kaleido") - # - # def save_data(self, group_name: str = "plotly_export", parent:QWidget|None=None): - # output = select_save_file(obj=parent, default_name=group_name, extension="xlsx") - # self.df.to_excel(output.absolute().__str__(), engine="openpyxl", index=False) - def to_html(self) -> str: """ Creates final html code from plotly diff --git a/src/submissions/frontend/visualizations/irida_charts.py b/src/submissions/frontend/visualizations/irida_charts.py index 06e04ac..28592ef 100644 --- a/src/submissions/frontend/visualizations/irida_charts.py +++ b/src/submissions/frontend/visualizations/irida_charts.py @@ -3,13 +3,12 @@ Functions for constructing irida controls graphs using plotly. """ from datetime import date from pprint import pformat -from typing import Generator import plotly.express as px import pandas as pd from PyQt6.QtWidgets import QWidget from . import CustomFigure import logging -from tools import get_unique_values_in_df_column, divide_chunks +from tools import get_unique_values_in_df_column logger = logging.getLogger(f"submissions.{__name__}") diff --git a/src/submissions/frontend/visualizations/pcr_charts.py b/src/submissions/frontend/visualizations/pcr_charts.py index b5b8d44..6b2ce1f 100644 --- a/src/submissions/frontend/visualizations/pcr_charts.py +++ b/src/submissions/frontend/visualizations/pcr_charts.py @@ -21,11 +21,9 @@ class PCRFigure(CustomFigure): months = int(settings['months']) except KeyError: months = 6 - # logger.debug(f"DF: {self.df}") self.construct_chart(df=df) def construct_chart(self, df: pd.DataFrame): - # logger.debug(f"PCR df:\n {df}") try: scatter = px.scatter(data_frame=df, x='submitted_date', y="ct", hover_data=["name", "target", "ct", "reagent_lot"], diff --git a/src/submissions/frontend/visualizations/turnaround_chart.py b/src/submissions/frontend/visualizations/turnaround_chart.py index 32f170f..a3173b0 100644 --- a/src/submissions/frontend/visualizations/turnaround_chart.py +++ b/src/submissions/frontend/visualizations/turnaround_chart.py @@ -23,7 +23,6 @@ class TurnaroundChart(CustomFigure): months = int(settings['months']) except KeyError: months = 6 - # logger.debug(f"DF: {self.df}") self.construct_chart() if threshold: self.add_hline(y=threshold) @@ -32,11 +31,9 @@ class TurnaroundChart(CustomFigure): def construct_chart(self, df: pd.DataFrame | None = None): if df: self.df = df - # logger.debug(f"PCR df:\n {df}") self.df = self.df[self.df.days.notnull()] self.df = self.df.sort_values(['submitted_date', 'name'], ascending=[True, True]).reset_index(drop=True) self.df = self.df.reset_index().rename(columns={"index": "idx"}) - # logger.debug(f"DF: {self.df}") try: scatter = px.scatter(data_frame=self.df, x='idx', y="days", hover_data=["name", "submitted_date", "completed_date", "days"], diff --git a/src/submissions/frontend/widgets/app.py b/src/submissions/frontend/widgets/app.py index 39cb739..d552b11 100644 --- a/src/submissions/frontend/widgets/app.py +++ b/src/submissions/frontend/widgets/app.py @@ -27,13 +27,11 @@ from .turnaround import TurnaroundTime from .omni_search import SearchBox logger = logging.getLogger(f'submissions.{__name__}') -# logger.info("Hello, I am a logger") class App(QMainWindow): def __init__(self, ctx: Settings = None): - # logger.debug(f"Initializing main window...") super().__init__() qInstallMessageHandler(lambda x, y, z: None) self.ctx = ctx @@ -68,7 +66,6 @@ class App(QMainWindow): """ adds items to menu bar """ - # logger.debug(f"Creating menu bar...") menuBar = self.menuBar() fileMenu = menuBar.addMenu("&File") editMenu = menuBar.addMenu("&Edit") @@ -82,7 +79,6 @@ class App(QMainWindow): fileMenu.addAction(self.importAction) fileMenu.addAction(self.yamlExportAction) fileMenu.addAction(self.yamlImportAction) - # methodsMenu.addAction(self.searchLog) methodsMenu.addAction(self.searchSample) maintenanceMenu.addAction(self.joinExtractionAction) maintenanceMenu.addAction(self.joinPCRAction) @@ -92,27 +88,20 @@ class App(QMainWindow): """ adds items to toolbar """ - # logger.debug(f"Creating toolbar...") toolbar = QToolBar("My main toolbar") self.addToolBar(toolbar) toolbar.addAction(self.addReagentAction) - # toolbar.addAction(self.addKitAction) - # toolbar.addAction(self.addOrgAction) def _createActions(self): """ creates actions """ - # logger.debug(f"Creating actions...") self.importAction = QAction("&Import Submission", self) self.addReagentAction = QAction("Add Reagent", self) - # self.addKitAction = QAction("Import Kit", self) - # self.addOrgAction = QAction("Import Org", self) self.joinExtractionAction = QAction("Link Extraction Logs") self.joinPCRAction = QAction("Link PCR Logs") self.helpAction = QAction("&About", self) self.docsAction = QAction("&Docs", self) - # self.searchLog = QAction("Search Log", self) self.searchSample = QAction("Search Sample", self) self.githubAction = QAction("Github", self) self.yamlExportAction = QAction("Export Type Example", self) @@ -123,14 +112,12 @@ class App(QMainWindow): """ connect menu and tool bar item to functions """ - # logger.debug(f"Connecting actions...") self.importAction.triggered.connect(self.table_widget.formwidget.importSubmission) self.addReagentAction.triggered.connect(self.table_widget.formwidget.add_reagent) self.joinExtractionAction.triggered.connect(self.table_widget.sub_wid.link_extractions) self.joinPCRAction.triggered.connect(self.table_widget.sub_wid.link_pcr) self.helpAction.triggered.connect(self.showAbout) self.docsAction.triggered.connect(self.openDocs) - # self.searchLog.triggered.connect(self.runSearch) self.searchSample.triggered.connect(self.runSampleSearch) self.githubAction.triggered.connect(self.openGithub) self.yamlExportAction.triggered.connect(self.export_ST_yaml) @@ -145,7 +132,6 @@ class App(QMainWindow): j_env = jinja_template_loading() template = j_env.get_template("project.html") html = template.render(info=self.ctx.package.__dict__) - # logger.debug(html) about = HTMLPop(html=html, title="About") about.exec() @@ -157,7 +143,6 @@ class App(QMainWindow): url = Path(sys._MEIPASS).joinpath("files", "docs", "index.html") else: url = Path("docs\\build\\index.html").absolute() - # logger.debug(f"Attempting to open {url}") webbrowser.get('windows-default').open(f"file://{url.__str__()}") def openGithub(self): @@ -177,10 +162,6 @@ class App(QMainWindow): instr = HTMLPop(html=html, title="Instructions") instr.exec() - # def runSearch(self): - # dlg = LogParser(self) - # dlg.exec() - def runSampleSearch(self): """ Create a search for samples. @@ -253,7 +234,6 @@ class App(QMainWindow): class AddSubForm(QWidget): def __init__(self, parent: QWidget): - # logger.debug(f"Initializating subform...") super(QWidget, self).__init__(parent) self.layout = QVBoxLayout(self) # NOTE: Initialize tab screen diff --git a/src/submissions/frontend/widgets/controls_chart.py b/src/submissions/frontend/widgets/controls_chart.py index 20bd161..e7562d1 100644 --- a/src/submissions/frontend/widgets/controls_chart.py +++ b/src/submissions/frontend/widgets/controls_chart.py @@ -6,7 +6,6 @@ from PyQt6.QtWidgets import ( QWidget, QComboBox, QPushButton ) from PyQt6.QtCore import QSignalBlocker - from backend import ChartReportMaker from backend.db import ControlType, IridaControl import logging @@ -21,12 +20,9 @@ class ControlsViewer(InfoPane): def __init__(self, parent: QWidget, archetype: str) -> None: super().__init__(parent) - logger.debug(f"Incoming Archetype: {archetype}") self.archetype = ControlType.query(name=archetype) if not self.archetype: return - logger.debug(f"Archetype set as: {self.archetype}") - # logger.debug(f"\n\n{self.app}\n\n") # NOTE: set tab2 layout self.control_sub_typer = QComboBox() # NOTE: fetch types of controls @@ -54,12 +50,6 @@ class ControlsViewer(InfoPane): self.save_button.pressed.connect(self.save_png) self.export_button.pressed.connect(self.save_excel) - # def save_chart_function(self): - # self.fig.save_figure(parent=self) - # - # def save_data_function(self): - # self.fig.save_data(parent=self) - @report_result def update_data(self, *args, **kwargs): """ @@ -71,20 +61,6 @@ class ControlsViewer(InfoPane): self.mode_sub_typer.disconnect() except TypeError: pass - # NOTE: correct start date being more recent than end date and rerun - # if self.datepicker.start_date.date() > self.datepicker.end_date.date(): - # threemonthsago = self.datepicker.end_date.date().addDays(-60) - # msg = f"Start date after end date is not allowed! Setting to {threemonthsago.toString()}." - # logger.warning(msg) - # # NOTE: block signal that will rerun controls getter and set start date Without triggering this function again - # with QSignalBlocker(self.datepicker.start_date) as blocker: - # self.datepicker.start_date.setDate(threemonthsago) - # self.update_data() - # report.add_result(Result(owner=self.__str__(), msg=msg, status="Warning")) - # return report - # # NOTE: convert to python useable date objects - # self.start_date = self.datepicker.start_date.date().toPyDate() - # self.end_date = self.datepicker.end_date.date().toPyDate() self.con_sub_type = self.control_sub_typer.currentText() self.mode = self.mode_typer.currentText() self.mode_sub_typer.clear() @@ -104,7 +80,6 @@ class ControlsViewer(InfoPane): self.mode_sub_typer.clear() self.mode_sub_typer.setEnabled(False) self.chart_maker_function() - # return report @report_result def chart_maker_function(self, *args, **kwargs): @@ -119,14 +94,11 @@ class ControlsViewer(InfoPane): Tuple[QMainWindow, dict]: Collection of new main app window and result dict """ report = Report() - # logger.debug(f"Control getter context: \n\tControl type: {self.con_sub_type}\n\tMode: {self.mode}\n\tStart \ - # Date: {self.start_date}\n\tEnd Date: {self.end_date}") # NOTE: set the mode_sub_type for kraken if self.mode_sub_typer.currentText() == "": self.mode_sub_type = None else: self.mode_sub_type = self.mode_sub_typer.currentText() - logger.debug(f"Subtype: {self.mode_sub_type}") months = self.diff_month(self.start_date, self.end_date) # NOTE: query all controls using the type/start and end dates from the gui chart_settings = dict(sub_type=self.con_sub_type, start_date=self.start_date, end_date=self.end_date, @@ -136,14 +108,11 @@ class ControlsViewer(InfoPane): self.report_obj = ChartReportMaker(df=self.fig.df, sheet_name=self.archetype.name) if issubclass(self.fig.__class__, CustomFigure): self.save_button.setEnabled(True) - # logger.debug(f"Updating figure...") # NOTE: construct html for webview try: html = self.fig.to_html() except AttributeError: html = "" - # logger.debug(f"The length of html code is: {len(html)}") self.webview.setHtml(html) self.webview.update() - # logger.debug("Figure updated... I hope.") return report diff --git a/src/submissions/frontend/widgets/equipment_usage.py b/src/submissions/frontend/widgets/equipment_usage.py index adb0fa7..69393da 100644 --- a/src/submissions/frontend/widgets/equipment_usage.py +++ b/src/submissions/frontend/widgets/equipment_usage.py @@ -21,9 +21,7 @@ class EquipmentUsage(QDialog): self.setWindowTitle(f"Equipment Checklist - {submission.rsl_plate_num}") self.used_equipment = self.submission.get_used_equipment() self.kit = self.submission.extraction_kit - # logger.debug(f"Existing equipment: {self.used_equipment}") self.opt_equipment = submission.submission_type.get_equipment() - # logger.debug(f"EquipmentRoles: {self.opt_equipment}") self.layout = QVBoxLayout() self.setLayout(self.layout) self.populate_form() @@ -38,7 +36,6 @@ class EquipmentUsage(QDialog): self.buttonBox.rejected.connect(self.reject) label = self.LabelRow(parent=self) self.layout.addWidget(label) - # logger.debug("Creating widgets for equipment") for eq in self.opt_equipment: widg = eq.to_form(parent=self, used=self.used_equipment) self.layout.addWidget(widg) @@ -124,9 +121,7 @@ class RoleComboBox(QWidget): Changes processes when equipment is changed """ equip = self.box.currentText() - # logger.debug(f"Updating equipment: {equip}") equip2 = next((item for item in self.role.equipment if item.name == equip), self.role.equipment[0]) - # logger.debug(f"Using: {equip2}") with QSignalBlocker(self.process) as blocker: self.process.clear() self.process.addItems([item for item in equip2.processes if item in self.role.processes]) @@ -136,7 +131,6 @@ class RoleComboBox(QWidget): Changes what tips are available when process is changed """ process = self.process.currentText().strip() - # logger.debug(f"Checking process: {process} for equipment {self.role.name}") process = Process.query(name=process) if process.tip_roles: for iii, tip_role in enumerate(process.tip_roles): @@ -144,7 +138,6 @@ class RoleComboBox(QWidget): tip_choices = [item.name for item in tip_role.instances] widget.setEditable(False) widget.addItems(tip_choices) - # logger.debug(f"Tiprole: {tip_role.__dict__}") widget.setObjectName(f"tips_{tip_role.name}") widget.setMinimumWidth(200) widget.setMaximumWidth(200) @@ -169,7 +162,6 @@ class RoleComboBox(QWidget): eq = Equipment.query(name=self.box.currentText()) tips = [PydTips(name=item.currentText(), role=item.objectName().lstrip("tips").lstrip("_")) for item in self.findChildren(QComboBox) if item.objectName().startswith("tips")] - # logger.debug(tips) try: return PydEquipment( name=eq.name, diff --git a/src/submissions/frontend/widgets/gel_checker.py b/src/submissions/frontend/widgets/gel_checker.py index 7dc3a7c..5810aca 100644 --- a/src/submissions/frontend/widgets/gel_checker.py +++ b/src/submissions/frontend/widgets/gel_checker.py @@ -148,5 +148,4 @@ class ControlsForm(QWidget): dicto['values'].append(dict(name=label[1], value=le.currentText())) if label[0] not in [item['name'] for item in output]: output.append(dicto) - # logger.debug(pformat(output)) return output, self.comment_field.toPlainText() diff --git a/src/submissions/frontend/widgets/info_tab.py b/src/submissions/frontend/widgets/info_tab.py index e9010cb..37f6f82 100644 --- a/src/submissions/frontend/widgets/info_tab.py +++ b/src/submissions/frontend/widgets/info_tab.py @@ -18,7 +18,6 @@ class InfoPane(QWidget): def __init__(self, parent: QWidget) -> None: super().__init__(parent) self.app = self.parent().parent() - # logger.debug(f"\n\n{self.app}\n\n") self.report = Report() self.datepicker = StartEndDatePicker(default_start=-180) self.webview = QWebEngineView() @@ -75,4 +74,4 @@ class InfoPane(QWidget): fname = select_save_file(obj=self, default_name=f"Plotly {self.start_date.strftime('%Y%m%d')} - {self.end_date.strftime('%Y%m%d')}", extension="png") - self.fig.write_image(fname.absolute().__str__(), engine="kaleido") \ No newline at end of file + self.fig.write_image(fname.absolute().__str__(), engine="kaleido") diff --git a/src/submissions/frontend/widgets/misc.py b/src/submissions/frontend/widgets/misc.py index 938928e..51249f1 100644 --- a/src/submissions/frontend/widgets/misc.py +++ b/src/submissions/frontend/widgets/misc.py @@ -1,6 +1,6 @@ -''' +""" Contains miscellaneous widgets for frontend functions -''' +""" import math from datetime import date from PyQt6.QtGui import QPageLayout, QPageSize, QStandardItem, QIcon @@ -8,7 +8,7 @@ from PyQt6.QtWebEngineWidgets import QWebEngineView from PyQt6.QtWidgets import ( QLabel, QVBoxLayout, QLineEdit, QComboBox, QDialog, - QDialogButtonBox, QDateEdit, QPushButton, QFormLayout, QWidget, QHBoxLayout, QSizePolicy + QDialogButtonBox, QDateEdit, QPushButton, QWidget, QHBoxLayout, QSizePolicy ) from PyQt6.QtCore import Qt, QDate, QSize, QMarginsF from tools import jinja_template_loading @@ -66,7 +66,6 @@ class AddReagentForm(QDialog): self.type_input.addItems([item.name for item in ReagentRole.query() if kit in item.kit_types]) else: self.type_input.addItems([item.name for item in ReagentRole.query()]) - # logger.debug(f"Trying to find index of {reagent_type}") # NOTE: convert input to user-friendly string? try: reagent_role = reagent_role.replace("_", " ").title() @@ -106,7 +105,6 @@ class AddReagentForm(QDialog): """ Updates reagent names form field with examples from reagent type """ - # logger.debug(self.type_input.currentText()) self.name_input.clear() lookup = Reagent.query(role=self.type_input.currentText()) self.name_input.addItems(list(set([item.name for item in lookup]))) @@ -145,7 +143,8 @@ def save_pdf(obj: QWebEngineView, filename: Path): obj.page().printToPdf(filename.absolute().__str__(), page_layout) -# subclass +# NOTE: subclass + class CheckableComboBox(QComboBox): # once there is a checkState set, it is rendered # here we assume default Unchecked @@ -162,7 +161,6 @@ class CheckableComboBox(QComboBox): return item.checkState() == Qt.CheckState.Checked def changed(self): - logger.debug("emitting updated") self.updated.emit() diff --git a/src/submissions/frontend/widgets/omni_search.py b/src/submissions/frontend/widgets/omni_search.py index 2774f04..c7c0d1a 100644 --- a/src/submissions/frontend/widgets/omni_search.py +++ b/src/submissions/frontend/widgets/omni_search.py @@ -1,6 +1,6 @@ -''' +""" Search box that performs fuzzy search for samples -''' +""" from pprint import pformat from typing import Tuple, Any, List from pandas import DataFrame @@ -39,7 +39,6 @@ class SearchBox(QDialog): else: self.sub_class = None self.results = SearchResults(parent=self, object_type=self.object_type, extras=self.extras, **kwargs) - # logger.debug(f"results: {self.results}") self.layout.addWidget(self.results, 5, 0) self.setLayout(self.layout) self.setWindowTitle(f"Search {self.object_type.__name__}") @@ -51,7 +50,6 @@ class SearchBox(QDialog): Changes form inputs based on sample type """ deletes = [item for item in self.findChildren(FieldSearch)] - # logger.debug(deletes) for item in deletes: item.setParent(None) # NOTE: Handle any subclasses @@ -62,7 +60,6 @@ class SearchBox(QDialog): self.object_type = self.original_type else: self.object_type = self.original_type.find_regular_subclass(self.sub_class.currentText()) - logger.debug(f"{self.object_type} searchables: {self.object_type.searchables}") for iii, searchable in enumerate(self.object_type.searchables): widget = FieldSearch(parent=self, label=searchable, field_name=searchable) widget.setObjectName(searchable) @@ -85,10 +82,9 @@ class SearchBox(QDialog): Shows dataframe of relevant samples. """ fields = self.parse_form() - # logger.debug(f"Got fields: {fields}") sample_list_creator = self.object_type.fuzzy_search(**fields) data = self.object_type.results_to_df(objects=sample_list_creator) - # Setting results moved to here from __init__ 202411118 + # NOTE: Setting results moved to here from __init__ 202411118 self.results.setData(df=data) @@ -154,7 +150,6 @@ class SearchResults(QTableView): def parse_row(self, x): context = {item['name']: x.sibling(x.row(), item['column']).data() for item in self.columns_of_interest} - logger.debug(f"Context: {context}") try: object = self.object_type.query(**context) except KeyError: diff --git a/src/submissions/frontend/widgets/pop_ups.py b/src/submissions/frontend/widgets/pop_ups.py index 1ea4fb8..638d1c7 100644 --- a/src/submissions/frontend/widgets/pop_ups.py +++ b/src/submissions/frontend/widgets/pop_ups.py @@ -9,7 +9,7 @@ from PyQt6.QtWebEngineWidgets import QWebEngineView from tools import jinja_template_loading import logging from backend.db import models -from typing import Any, Literal +from typing import Literal logger = logging.getLogger(f"submissions.{__name__}") diff --git a/src/submissions/frontend/widgets/submission_details.py b/src/submissions/frontend/widgets/submission_details.py index 24474ac..737c9d1 100644 --- a/src/submissions/frontend/widgets/submission_details.py +++ b/src/submissions/frontend/widgets/submission_details.py @@ -45,7 +45,6 @@ class SubmissionDetails(QDialog): self.btn.clicked.connect(self.save_pdf) self.back = QPushButton("Back") self.back.setFixedWidth(100) - # self.back.clicked.connect(self.back_function) self.back.clicked.connect(self.webview.back) self.layout.addWidget(self.back, 0, 0, 1, 1) self.layout.addWidget(self.btn, 0, 1, 1, 9) @@ -70,7 +69,6 @@ class SubmissionDetails(QDialog): if "Submission" in title: self.btn.setEnabled(True) self.export_plate = title.split(" ")[-1] - # logger.debug(f"Updating export plate to: {self.export_plate}") else: self.btn.setEnabled(False) try: @@ -78,7 +76,6 @@ class SubmissionDetails(QDialog): except IndexError as e: check = title if title == check: - # logger.debug("Disabling back button") self.back.setEnabled(False) else: self.back.setEnabled(True) @@ -91,7 +88,6 @@ class SubmissionDetails(QDialog): Args: sample (str): Submitter Id of the sample. """ - # logger.debug(f"Details: {sample}") if isinstance(sample, str): sample = BasicSample.query(submitter_id=sample) base_dict = sample.to_sub_dict(full_data=True) @@ -114,7 +110,6 @@ class SubmissionDetails(QDialog): base_dict = reagent.to_sub_dict(extraction_kit=self.kit, full_data=True) env = jinja_template_loading() temp_name = "reagent_details.html" - # logger.debug(f"Returning template: {temp_name}") try: template = env.get_template(temp_name) except TemplateNotFound as e: @@ -147,29 +142,23 @@ class SubmissionDetails(QDialog): Args: submission (str | BasicSubmission): Submission of interest. """ - # logger.debug(f"Details for: {submission}") if isinstance(submission, str): submission = BasicSubmission.query(rsl_plate_num=submission) self.rsl_plate_num = submission.rsl_plate_num self.base_dict = submission.to_dict(full_data=True) - # logger.debug(f"Submission details data:\n{pformat({k:v for k,v in self.base_dict.items() if k == 'reagents'})}") # NOTE: don't want id - # logger.debug(f"Creating barcode.") - # logger.debug(f"Making platemap...") self.base_dict['platemap'] = submission.make_plate_map(sample_list=submission.hitpick_plate()) self.base_dict['excluded'] = submission.get_default_info("details_ignore") self.base_dict, self.template = submission.get_details_template(base_dict=self.base_dict) template_path = Path(self.template.environment.loader.__getattribute__("searchpath")[0]) with open(template_path.joinpath("css", "styles.css"), "r") as f: css = f.read() - # logger.debug(f"Submission_details: {pformat(self.base_dict)}") - # logger.debug(f"User is power user: {is_power_user()}") self.html = self.template.render(sub=self.base_dict, permission=is_power_user(), css=css) self.webview.setHtml(self.html) @pyqtSlot(str) def sign_off(self, submission: str | BasicSubmission): - logger.debug(f"Signing off on {submission} - ({getuser()})") + logger.info(f"Signing off on {submission} - ({getuser()})") if isinstance(submission, str): submission = BasicSubmission.query(rsl_plate_num=submission) submission.signed_by = getuser() @@ -195,7 +184,6 @@ class SubmissionComment(QDialog): super().__init__(parent) try: self.app = parent.parent().parent().parent().parent().parent().parent - # logger.debug(f"App: {self.app}") except AttributeError: pass self.submission = submission @@ -225,5 +213,4 @@ class SubmissionComment(QDialog): return None dt = datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S") full_comment = {"name": commenter, "time": dt, "text": comment} - # logger.debug(f"Full comment: {full_comment}") return full_comment diff --git a/src/submissions/frontend/widgets/submission_table.py b/src/submissions/frontend/widgets/submission_table.py index dfa9da5..f98cfb8 100644 --- a/src/submissions/frontend/widgets/submission_table.py +++ b/src/submissions/frontend/widgets/submission_table.py @@ -1,6 +1,6 @@ -''' +""" Contains widgets specific to the submission summary and submission details. -''' +""" import logging from pprint import pformat from PyQt6.QtWidgets import QTableView, QMenu @@ -107,20 +107,16 @@ class SubmissionsSheet(QTableView): Args: event (_type_): the item of interest """ - # logger.debug(event.__dict__) id = self.selectionModel().currentIndex() id = id.sibling(id.row(), 0).data() submission = BasicSubmission.query(id=id) - # logger.debug(f"Event submission: {submission}") self.menu = QMenu(self) self.con_actions = submission.custom_context_events() - # logger.debug(f"Menu options: {self.con_actions}") for k in self.con_actions.keys(): - # logger.debug(f"Adding {k}") action = QAction(k, self) action.triggered.connect(lambda _, action_name=k: self.triggered_action(action_name=action_name)) self.menu.addAction(action) - # add other required actions + # NOTE: add other required actions self.menu.popup(QCursor.pos()) def triggered_action(self, action_name: str): @@ -130,8 +126,6 @@ class SubmissionsSheet(QTableView): Args: action_name (str): name of the action from the menu """ - # logger.debug(f"Action: {action_name}") - # logger.debug(f"Responding with {self.con_actions[action_name]}") func = self.con_actions[action_name] func(obj=self) @@ -179,7 +173,6 @@ class SubmissionsSheet(QTableView): if sub is None: continue try: - # logger.debug(f"Found submission: {sub.rsl_plate_num}") count += 1 except AttributeError: continue diff --git a/src/submissions/frontend/widgets/submission_widget.py b/src/submissions/frontend/widgets/submission_widget.py index 0e4ec5b..a6dd38a 100644 --- a/src/submissions/frontend/widgets/submission_widget.py +++ b/src/submissions/frontend/widgets/submission_widget.py @@ -1,9 +1,9 @@ -''' +""" Contains all submission related frontend functions -''' +""" from PyQt6.QtWidgets import ( QWidget, QPushButton, QVBoxLayout, - QComboBox, QDateEdit, QLineEdit, QLabel, QCheckBox, QBoxLayout, QHBoxLayout, QGridLayout + QComboBox, QDateEdit, QLineEdit, QLabel, QCheckBox, QHBoxLayout, QGridLayout ) from PyQt6.QtCore import pyqtSignal, Qt, QSignalBlocker from . import select_open_file, select_save_file @@ -34,7 +34,6 @@ class MyQComboBox(QComboBox): super(MyQComboBox, self).__init__(*args, **kwargs) self.scrollWidget = scrollWidget self.setFocusPolicy(Qt.FocusPolicy.StrongFocus) - logger.debug(f"Scrollwidget: {scrollWidget}") def wheelEvent(self, *args, **kwargs): if self.hasFocus(): @@ -61,14 +60,12 @@ class MyQDateEdit(QDateEdit): class SubmissionFormContainer(QWidget): - # A signal carrying a path + # NOTE: A signal carrying a path import_drag = pyqtSignal(Path) def __init__(self, parent: QWidget) -> None: - # logger.debug(f"Setting form widget...") super().__init__(parent) self.app = self.parent().parent() - # logger.debug(f"App: {self.app}") self.report = Report() self.setStyleSheet('background-color: light grey;') self.setAcceptDrops(True) @@ -89,7 +86,6 @@ class SubmissionFormContainer(QWidget): Sets filename when file dropped """ fname = Path([u.toLocalFile() for u in event.mimeData().urls()][0]) - # logger.debug(f"App: {self.app}") self.app.last_dir = fname.parent self.import_drag.emit(fname) @@ -127,7 +123,6 @@ class SubmissionFormContainer(QWidget): # NOTE: set file dialog if isinstance(fname, bool) or fname is None: fname = select_open_file(self, file_extension="xlsx") - # logger.debug(f"Attempting to parse file: {fname}") if not fname: report.add_result(Result(msg=f"File {fname.__str__()} not found.", status="critical")) return report @@ -139,14 +134,10 @@ class SubmissionFormContainer(QWidget): return except AttributeError: self.prsr = SheetParser(filepath=fname) - # logger.debug(f"Submission dictionary:\n{pformat(self.prsr.sub)}") self.pyd = self.prsr.to_pydantic() - # logger.debug(f"Pydantic result: \n\n{pformat(self.pyd)}\n\n") self.form = self.pyd.to_form(parent=self) self.layout().addWidget(self.form) return report - # logger.debug(f"Outgoing report: {self.report.results}") - # logger.debug(f"All attributes of submission container:\n{pformat(self.__dict__)}") @report_result def add_reagent(self, reagent_lot: str | None = None, reagent_role: str | None = None, expiry: date | None = None, @@ -172,14 +163,12 @@ class SubmissionFormContainer(QWidget): if dlg.exec(): # NOTE: extract form info info = dlg.parse_form() - # logger.debug(f"Reagent info: {info}") # NOTE: create reagent object reagent = PydReagent(ctx=self.app.ctx, **info, missing=False) # NOTE: send reagent to db sqlobj, result = reagent.toSQL() sqlobj.save() report.add_result(result) - # logger.debug(f"Reagent: {reagent}, Report: {report}") return reagent, report @@ -189,7 +178,6 @@ class SubmissionFormWidget(QWidget): def __init__(self, parent: QWidget, submission: PydSubmission, disable: list | None = None) -> None: super().__init__(parent) - # logger.debug(f"Disable: {disable}") if disable is None: disable = [] self.app = parent.app @@ -200,17 +188,13 @@ class SubmissionFormWidget(QWidget): defaults = st.get_default_info("form_recover", "form_ignore", submission_type=self.pyd.submission_type['value']) self.recover = defaults['form_recover'] self.ignore = defaults['form_ignore'] - # logger.debug(f"Attempting to extend ignore list with {self.pyd.submission_type['value']}") self.layout = QVBoxLayout() for k in list(self.pyd.model_fields.keys()) + list(self.pyd.model_extra.keys()): - # logger.debug(f"Creating widget: {k}") if k in self.ignore: logger.warning(f"{k} in form_ignore {self.ignore}, not creating widget") continue try: - # logger.debug(f"Key: {k}, Disable: {disable}") check = k in disable - # logger.debug(f"Check: {check}") except TypeError: check = False try: @@ -225,7 +209,6 @@ class SubmissionFormWidget(QWidget): sub_obj=st, disable=check) if add_widget is not None: self.layout.addWidget(add_widget) - # if k == "extraction_kit": if k in self.__class__.update_reagent_fields: add_widget.input.currentTextChanged.connect(self.scrape_reagents) self.disabler = self.DisableReagents(self) @@ -236,15 +219,10 @@ class SubmissionFormWidget(QWidget): self.scrape_reagents(self.extraction_kit) def disable_reagents(self): + """ + Disables all ReagentFormWidgets in this form/ + """ for reagent in self.findChildren(self.ReagentFormWidget): - # if self.disabler.checkbox.isChecked(): - # # reagent.setVisible(True) - # # with QSignalBlocker(self.disabler.checkbox) as b: - # reagent.flip_check() - # else: - # # reagent.setVisible(False) - # # with QSignalBlocker(self.disabler.checkbox) as b: - # reagent.check.setChecked(False) reagent.flip_check(self.disabler.checkbox.isChecked()) @@ -263,7 +241,6 @@ class SubmissionFormWidget(QWidget): Returns: self.InfoItem: Form widget to hold name:value """ - # logger.debug(f"Key: {key}, Disable: {disable}") if isinstance(submission_type, str): submission_type = SubmissionType.query(name=submission_type) if key not in self.ignore: @@ -276,7 +253,6 @@ class SubmissionFormWidget(QWidget): case _: widget = self.InfoItem(parent=self, key=key, value=value, submission_type=submission_type, sub_obj=sub_obj) - # logger.debug(f"Setting widget enabled to: {not disable}") if disable: widget.input.setEnabled(False) widget.input.setToolTip("Widget disabled to protect database integrity.") @@ -298,24 +274,20 @@ class SubmissionFormWidget(QWidget): """ self.extraction_kit = args[0] report = Report() - logger.debug(f"Extraction kit: {self.extraction_kit}") # NOTE: Remove previous reagent widgets try: old_reagents = self.find_widgets() except AttributeError: logger.error(f"Couldn't find old reagents.") old_reagents = [] - # logger.debug(f"\n\nAttempting to clear: {old_reagents}\n\n") for reagent in old_reagents: if isinstance(reagent, self.ReagentFormWidget) or isinstance(reagent, QPushButton): reagent.setParent(None) reagents, integrity_report = self.pyd.check_kit_integrity(extraction_kit=self.extraction_kit) - # logger.debug(f"Got reagents: {pformat(reagents)}") for reagent in reagents: add_widget = self.ReagentFormWidget(parent=self, reagent=reagent, extraction_kit=self.extraction_kit) self.layout.addWidget(add_widget) report.add_result(integrity_report) - # logger.debug(f"Outgoing report: {report.results}") if hasattr(self.pyd, "csv"): export_csv_btn = QPushButton("Export CSV") export_csv_btn.setObjectName("export_csv_btn") @@ -326,6 +298,7 @@ class SubmissionFormWidget(QWidget): self.layout.addWidget(submit_btn) submit_btn.clicked.connect(self.submit_new_sample_function) self.setLayout(self.layout) + self.disabler.checkbox.setChecked(True) return report def clear_form(self): @@ -365,23 +338,16 @@ class SubmissionFormWidget(QWidget): report = Report() result = self.parse_form() report.add_result(result) - # logger.debug(f"Submission: {pformat(self.pyd)}") - # logger.debug("Checking kit integrity...") if self.disabler.checkbox.isChecked(): _, result = self.pyd.check_kit_integrity() report.add_result(result) if len(result.results) > 0: return - # logger.debug(f"PYD before transformation into SQL:\n\n{self.pyd}\n\n") base_submission, result = self.pyd.to_sql() - # logger.debug(f"SQL object: {pformat(base_submission.__dict__)}") - # logger.debug(f"Base submission: {base_submission.to_dict()}") # NOTE: check output message for issues - # logger.debug(f"Result of to_sql: {result}") try: trigger = result.results[-1] code = trigger.code - # logger.debug(f"Code from return: {code}") except IndexError as e: logger.error(result.results) logger.error(f"Problem getting error code: {e}") @@ -408,11 +374,8 @@ class SubmissionFormWidget(QWidget): pass # NOTE: add reagents to submission object for reagent in base_submission.reagents: - # logger.debug(f"Updating: {reagent} with {reagent.lot}") reagent.update_last_used(kit=base_submission.extraction_kit) - # logger.debug(f"Final reagents: {pformat(base_submission.reagents)}") save_output = base_submission.save() - # logger.debug(f"Save output: {save_output}") # NOTE: update summary sheet self.app.table_widget.sub_wid.setData() # NOTE: reset form @@ -423,7 +386,6 @@ class SubmissionFormWidget(QWidget): check = True if check: self.setParent(None) - # logger.debug(f"All attributes of obj: {pformat(self.__dict__)}") return report def export_csv_function(self, fname: Path | None = None): @@ -454,7 +416,6 @@ class SubmissionFormWidget(QWidget): info = {} reagents = [] for widget in self.findChildren(QWidget): - # logger.debug(f"Parsed widget of type {type(widget)}") match widget: case self.ReagentFormWidget(): reagent, _ = widget.parse_form() @@ -464,16 +425,10 @@ class SubmissionFormWidget(QWidget): field, value = widget.parse_form() if field is not None: info[field] = value - # logger.debug(f"Info: {pformat(info)}") - logger.debug(f"Reagents going into pyd: {pformat(reagents)}") self.pyd.reagents = reagents - logger.debug(f"Reagents after insertion in pyd: {pformat(self.pyd.reagents)}") - # logger.debug(f"Attrs not in info: {[k for k, v in self.__dict__.items() if k not in info.keys()]}") for item in self.recover: - # logger.debug(f"Attempting to recover: {item}") if hasattr(self, item): value = getattr(self, item) - # logger.debug(f"Setting {item}") info[item] = value for k, v in info.items(): self.pyd.set_attribute(key=k, value=v) @@ -551,9 +506,6 @@ class SubmissionFormWidget(QWidget): except (TypeError, KeyError): pass obj = parent.parent().parent() - # logger.debug(f"Object: {obj}") - # logger.debug(f"Parent: {parent.parent()}") - # logger.debug(f"Creating widget for: {key}") match key: case 'submitting_lab': add_widget = MyQComboBox(scrollWidget=parent) @@ -567,7 +519,6 @@ class SubmissionFormWidget(QWidget): looked_up_lab = Organization.query(name=value, limit=1) except AttributeError: looked_up_lab = None - # logger.debug(f"\n\nLooked up lab: {looked_up_lab}") if looked_up_lab: try: labs.remove(str(looked_up_lab.name)) @@ -586,12 +537,9 @@ class SubmissionFormWidget(QWidget): # NOTE: create combobox to hold looked up kits add_widget = MyQComboBox(scrollWidget=parent) # NOTE: lookup existing kits by 'submission_type' decided on by sheetparser - # logger.debug(f"Looking up kits used for {submission_type}") uses = [item.name for item in submission_type.kit_types] obj.uses = uses - # logger.debug(f"Kits received for {submission_type}: {uses}") if check_not_nan(value): - # logger.debug(f"The extraction kit in parser was: {value}") try: uses.insert(0, uses.pop(uses.index(value))) except ValueError: @@ -626,7 +574,6 @@ class SubmissionFormWidget(QWidget): else: # NOTE: anything else gets added in as a line edit add_widget = QLineEdit() - # logger.debug(f"Setting widget text to {str(value).replace('_', ' ')}") add_widget.setText(str(value).replace("_", " ")) add_widget.setToolTip(f"Enter value for {key}") if add_widget is not None: @@ -725,7 +672,6 @@ class SubmissionFormWidget(QWidget): if not self.lot.isEnabled(): return None, report lot = self.lot.currentText() - # logger.debug(f"Using this lot for the reagent {self.reagent}: {lot}") wanted_reagent = Reagent.query(lot=lot, role=self.reagent.role) # NOTE: if reagent doesn't exist in database, offer to add it (uses App.add_reagent) if wanted_reagent is None: @@ -741,7 +687,6 @@ class SubmissionFormWidget(QWidget): return wanted_reagent, report else: # NOTE: In this case we will have an empty reagent and the submission will fail kit integrity check - # logger.debug("Will not add reagent.") report.add_result(Result(msg="Failed integrity check", status="Critical")) return None, report else: @@ -791,7 +736,6 @@ class SubmissionFormWidget(QWidget): looked_up_rt = KitTypeReagentRoleAssociation.query(reagent_role=reagent.role, kit_type=extraction_kit) relevant_reagents = [str(item.lot) for item in looked_up_rt.get_all_relevant_reagents()] - logger.debug(f"Relevant reagents for {reagent}: {relevant_reagents}") # NOTE: if reagent in sheet is not found insert it into the front of relevant reagents so it shows if str(reagent.lot) not in relevant_reagents: if check_not_nan(reagent.lot): @@ -803,7 +747,6 @@ class SubmissionFormWidget(QWidget): looked_up_reg = None if isinstance(looked_up_reg, list): looked_up_reg = None - # logger.debug(f"Because there was no reagent listed for {reagent.lot}, we will insert the last lot used: {looked_up_reg}") if looked_up_reg: try: relevant_reagents.remove(str(looked_up_reg.lot)) @@ -812,15 +755,11 @@ class SubmissionFormWidget(QWidget): relevant_reagents.insert(0, str(looked_up_reg.lot)) else: if len(relevant_reagents) > 1: - # logger.debug(f"Found {reagent.lot} in relevant reagents: {relevant_reagents}. Moving to front of list.") idx = relevant_reagents.index(str(reagent.lot)) - # logger.debug(f"The index we got for {reagent.lot} in {relevant_reagents} was {idx}") moved_reag = relevant_reagents.pop(idx) relevant_reagents.insert(0, moved_reag) else: - # logger.debug(f"Found {reagent.lot} in relevant reagents: {relevant_reagents}. But no need to move due to short list.") pass - logger.debug(f"New relevant reagents: {relevant_reagents}") self.setObjectName(f"lot_{reagent.role}") self.addItems(relevant_reagents) self.setToolTip(f"Enter lot number for the reagent used for {reagent.role}") diff --git a/src/submissions/frontend/widgets/summary.py b/src/submissions/frontend/widgets/summary.py index a8d8f66..27e25a1 100644 --- a/src/submissions/frontend/widgets/summary.py +++ b/src/submissions/frontend/widgets/summary.py @@ -35,7 +35,6 @@ class Summary(InfoPane): def update_data(self): super().update_data() orgs = [self.org_select.itemText(i) for i in range(self.org_select.count()) if self.org_select.itemChecked(i)] - # logger.debug(f"Getting report from {self.start_date} to {self.end_date} using {orgs}") self.report_obj = ReportMaker(start_date=self.start_date, end_date=self.end_date, organizations=orgs) self.webview.setHtml(self.report_obj.html) if self.report_obj.subs: diff --git a/src/submissions/tools/__init__.py b/src/submissions/tools/__init__.py index a11f595..a17b5eb 100644 --- a/src/submissions/tools/__init__.py +++ b/src/submissions/tools/__init__.py @@ -27,14 +27,14 @@ timezone = tz("America/Winnipeg") logger = logging.getLogger(f"submissions.{__name__}") -logger.debug(f"Package dir: {project_path}") +logger.info(f"Package dir: {project_path}") if platform.system() == "Windows": os_config_dir = "AppData/local" - print(f"Got platform Windows, config_dir: {os_config_dir}") + logger.info(f"Got platform Windows, config_dir: {os_config_dir}") else: os_config_dir = ".config" - print(f"Got platform other, config_dir: {os_config_dir}") + logger.info(f"Got platform other, config_dir: {os_config_dir}") main_aux_dir = Path.home().joinpath(f"{os_config_dir}/submissions") @@ -184,7 +184,6 @@ def convert_nans_to_nones(input_str) -> str | None: Returns: str: _description_ """ - # logger.debug(f"Input value of: {input_str}") if check_not_nan(input_str): return input_str return None @@ -512,7 +511,6 @@ def get_config(settings_path: Path | str | None = None) -> Settings: Returns: Settings: Pydantic settings object """ - # logger.debug(f"Creating settings...") if isinstance(settings_path, str): settings_path = Path(settings_path) @@ -566,7 +564,6 @@ def get_config(settings_path: Path | str | None = None) -> Settings: default_settings = yaml.load(dset, Loader=yaml.Loader) settings = Settings(**default_settings) settings.save(settings_path=settings_path) - # logger.debug(f"Using {settings_path} for config file.") with open(settings_path, "r") as stream: settings = yaml.load(stream, Loader=yaml.Loader) return Settings(**settings) @@ -755,7 +752,6 @@ def setup_lookup(func): raise ValueError("Could not sanitize dictionary in query. Make sure you parse it first.") elif v is not None: sanitized_kwargs[k] = v - # logger.debug(f"sanitized kwargs: {sanitized_kwargs}") return func(*args, **sanitized_kwargs) return wrapper @@ -800,7 +796,6 @@ class Result(BaseModel, arbitrary_types_allowed=True): logger.error(f"Exception origin: {origin}") if "unique constraint failed:" in origin: field = " ".join(origin.split(".")[1:]).replace("_", " ").upper() - # logger.debug(field) value = f"{field} doesn't have a unique value.\nIt must be changed." else: value = f"Got unknown integrity error: {value}" @@ -844,7 +839,6 @@ class Report(BaseModel): except AttributeError: logger.error(f"Problem adding result.") case Report(): - # logger.debug(f"Adding all results in report to new report") for res in result.results: logger.info(f"Adding {res} from {result} to results.") self.results.append(res) @@ -934,7 +928,7 @@ def check_authorization(func): """ def wrapper(*args, **kwargs): - logger.debug(f"Checking authorization") + logger.info(f"Checking authorization") if is_power_user(): return func(*args, **kwargs) else: @@ -957,7 +951,7 @@ def report_result(func): """ def wrapper(*args, **kwargs): - logger.debug(f"Report result being called by {func.__name__}") + logger.info(f"Report result being called by {func.__name__}") output = func(*args, **kwargs) match output: case Report(): @@ -970,14 +964,13 @@ def report_result(func): case _: report = None return report - logger.debug(f"Got report: {report}") + logger.info(f"Got report: {report}") try: results = report.results except AttributeError: logger.error("No results available") results = [] for iii, result in enumerate(results): - logger.debug(f"Result {iii}: {result}") try: dlg = result.report() dlg.exec() @@ -990,7 +983,6 @@ def report_result(func): true_output = true_output[0] else: true_output = None - # logger.debug(f"Returning true output: {true_output}") return true_output return wrapper