Removed logger.debugs for proven functions.
This commit is contained in:
@@ -1,6 +1,5 @@
|
|||||||
import sys, os
|
import sys, os
|
||||||
from tools import ctx, setup_logger, check_if_app
|
from tools import ctx, setup_logger, check_if_app
|
||||||
from backend import scripts
|
|
||||||
|
|
||||||
# environment variable must be set to enable qtwebengine in network path
|
# environment variable must be set to enable qtwebengine in network path
|
||||||
if check_if_app():
|
if check_if_app():
|
||||||
@@ -9,6 +8,7 @@ if check_if_app():
|
|||||||
# setup custom logger
|
# setup custom logger
|
||||||
logger = setup_logger(verbosity=3)
|
logger = setup_logger(verbosity=3)
|
||||||
|
|
||||||
|
from backend import scripts
|
||||||
from PyQt6.QtWidgets import QApplication
|
from PyQt6.QtWidgets import QApplication
|
||||||
from frontend.widgets.app import App
|
from frontend.widgets.app import App
|
||||||
|
|
||||||
@@ -25,6 +25,7 @@ def run_startup():
|
|||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
logger.error(f"Couldn't run startup script {script} due to {e}")
|
logger.error(f"Couldn't run startup script {script} due to {e}")
|
||||||
continue
|
continue
|
||||||
|
logger.info(f"Running startup script: {func.__name__}")
|
||||||
func(ctx)
|
func(ctx)
|
||||||
|
|
||||||
|
|
||||||
@@ -40,6 +41,7 @@ def run_teardown():
|
|||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
logger.error(f"Couldn't run teardown script {script} due to {e}")
|
logger.error(f"Couldn't run teardown script {script} due to {e}")
|
||||||
continue
|
continue
|
||||||
|
logger.info(f"Running teardown script: {func.__name__}")
|
||||||
func(ctx)
|
func(ctx)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -18,12 +18,8 @@ def set_sqlite_pragma(dbapi_connection, connection_record):
|
|||||||
connection_record (_type_): _description_
|
connection_record (_type_): _description_
|
||||||
"""
|
"""
|
||||||
cursor = dbapi_connection.cursor()
|
cursor = dbapi_connection.cursor()
|
||||||
# print(ctx.database_schema)
|
|
||||||
if ctx.database_schema == "sqlite":
|
if ctx.database_schema == "sqlite":
|
||||||
execution_phrase = "PRAGMA foreign_keys=ON"
|
execution_phrase = "PRAGMA foreign_keys=ON"
|
||||||
# cursor.execute(execution_phrase)
|
|
||||||
# elif ctx.database_schema == "mssql+pyodbc":
|
|
||||||
# execution_phrase = "SET IDENTITY_INSERT dbo._wastewater ON;"
|
|
||||||
else:
|
else:
|
||||||
print("Nothing to execute, returning")
|
print("Nothing to execute, returning")
|
||||||
cursor.close()
|
cursor.close()
|
||||||
@@ -37,12 +33,9 @@ from .models import *
|
|||||||
|
|
||||||
|
|
||||||
def update_log(mapper, connection, target):
|
def update_log(mapper, connection, target):
|
||||||
# logger.debug("\n\nBefore update\n\n")
|
|
||||||
state = inspect(target)
|
state = inspect(target)
|
||||||
# logger.debug(state)
|
|
||||||
object_name = state.object.truncated_name()
|
object_name = state.object.truncated_name()
|
||||||
update = dict(user=getuser(), time=datetime.now(), object=object_name, changes=[])
|
update = dict(user=getuser(), time=datetime.now(), object=object_name, changes=[])
|
||||||
# logger.debug(update)
|
|
||||||
for attr in state.attrs:
|
for attr in state.attrs:
|
||||||
hist = attr.load_history()
|
hist = attr.load_history()
|
||||||
if not hist.has_changes():
|
if not hist.has_changes():
|
||||||
@@ -56,24 +49,19 @@ def update_log(mapper, connection, target):
|
|||||||
continue
|
continue
|
||||||
deleted = [str(item) for item in hist.deleted]
|
deleted = [str(item) for item in hist.deleted]
|
||||||
change = dict(field=attr.key, added=added, deleted=deleted)
|
change = dict(field=attr.key, added=added, deleted=deleted)
|
||||||
# logger.debug(f"Adding: {pformat(change)}")
|
|
||||||
if added != deleted:
|
if added != deleted:
|
||||||
try:
|
try:
|
||||||
update['changes'].append(change)
|
update['changes'].append(change)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Something went wrong adding attr: {attr.key}: {e}")
|
logger.error(f"Something went wrong adding attr: {attr.key}: {e}")
|
||||||
continue
|
continue
|
||||||
# logger.debug(f"Adding to audit logs: {pformat(update)}")
|
|
||||||
if update['changes']:
|
if update['changes']:
|
||||||
# Note: must use execute as the session will be busy at this point.
|
# Note: must use execute as the session will be busy at this point.
|
||||||
# https://medium.com/@singh.surbhicse/creating-audit-table-to-log-insert-update-and-delete-changes-in-flask-sqlalchemy-f2ca53f7b02f
|
# https://medium.com/@singh.surbhicse/creating-audit-table-to-log-insert-update-and-delete-changes-in-flask-sqlalchemy-f2ca53f7b02f
|
||||||
table = AuditLog.__table__
|
table = AuditLog.__table__
|
||||||
# logger.debug(f"Adding to {table}")
|
|
||||||
connection.execute(table.insert().values(**update))
|
connection.execute(table.insert().values(**update))
|
||||||
# logger.debug("Here is where I would insert values, if I was able.")
|
|
||||||
else:
|
else:
|
||||||
logger.info(f"No changes detected, not updating logs.")
|
logger.info(f"No changes detected, not updating logs.")
|
||||||
|
|
||||||
# if ctx.logging_enabled:
|
|
||||||
event.listen(LogMixin, 'after_update', update_log, propagate=True)
|
event.listen(LogMixin, 'after_update', update_log, propagate=True)
|
||||||
event.listen(LogMixin, 'after_insert', update_log, propagate=True)
|
event.listen(LogMixin, 'after_insert', update_log, propagate=True)
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ Contains all models for sqlalchemy
|
|||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import sys, logging
|
import sys, logging
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
from sqlalchemy import Column, INTEGER, String, JSON
|
from sqlalchemy import Column, INTEGER, String, JSON
|
||||||
from sqlalchemy.orm import DeclarativeMeta, declarative_base, Query, Session
|
from sqlalchemy.orm import DeclarativeMeta, declarative_base, Query, Session
|
||||||
@@ -131,7 +130,6 @@ class BaseClass(Base):
|
|||||||
search = name.title().replace(" ", "")
|
search = name.title().replace(" ", "")
|
||||||
else:
|
else:
|
||||||
search = name
|
search = name
|
||||||
logger.debug(f"Searching for subclass: {search}")
|
|
||||||
return next((item for item in cls.__subclasses__() if item.__name__ == search), cls)
|
return next((item for item in cls.__subclasses__() if item.__name__ == search), cls)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -146,9 +144,7 @@ class BaseClass(Base):
|
|||||||
List[Any]: Results of sqlalchemy query.
|
List[Any]: Results of sqlalchemy query.
|
||||||
"""
|
"""
|
||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
# logger.debug(f"Queried model. Now running searches in {kwargs}")
|
|
||||||
for k, v in kwargs.items():
|
for k, v in kwargs.items():
|
||||||
# logger.debug(f"Running fuzzy search for attribute: {k} with value {v}")
|
|
||||||
# NOTE: Not sure why this is necessary, but it is.
|
# NOTE: Not sure why this is necessary, but it is.
|
||||||
search = f"%{v}%"
|
search = f"%{v}%"
|
||||||
try:
|
try:
|
||||||
@@ -200,9 +196,7 @@ class BaseClass(Base):
|
|||||||
model = cls
|
model = cls
|
||||||
if query is None:
|
if query is None:
|
||||||
query: Query = cls.__database_session__.query(model)
|
query: Query = cls.__database_session__.query(model)
|
||||||
# logger.debug(f"Grabbing singles using {model.get_default_info}")
|
|
||||||
singles = model.get_default_info('singles')
|
singles = model.get_default_info('singles')
|
||||||
# logger.info(f"Querying: {model}, with kwargs: {kwargs}")
|
|
||||||
for k, v in kwargs.items():
|
for k, v in kwargs.items():
|
||||||
logger.info(f"Using key: {k} with value: {v}")
|
logger.info(f"Using key: {k} with value: {v}")
|
||||||
try:
|
try:
|
||||||
@@ -227,7 +221,6 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
Add the object to the database and commit
|
Add the object to the database and commit
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Saving object: {pformat(self.__dict__)}")
|
|
||||||
report = Report()
|
report = Report()
|
||||||
try:
|
try:
|
||||||
self.__database_session__.add(self)
|
self.__database_session__.add(self)
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
Contains the audit log class and functions.
|
Contains the audit log class and functions.
|
||||||
"""
|
"""
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
from sqlalchemy.orm import declarative_base, DeclarativeMeta, Query
|
from sqlalchemy.orm import declarative_base, DeclarativeMeta, Query
|
||||||
from . import BaseClass
|
from . import BaseClass
|
||||||
@@ -48,32 +47,24 @@ class AuditLog(Base):
|
|||||||
logger.warning(f"End date with no start date, using Jan 1, 2023")
|
logger.warning(f"End date with no start date, using Jan 1, 2023")
|
||||||
start_date = session.query(cls, func.min(cls.time)).first()[1]
|
start_date = session.query(cls, func.min(cls.time)).first()[1]
|
||||||
if start_date is not None:
|
if start_date is not None:
|
||||||
# logger.debug(f"Querying with start date: {start_date} and end date: {end_date}")
|
|
||||||
match start_date:
|
match start_date:
|
||||||
case date():
|
case date():
|
||||||
# logger.debug(f"Lookup BasicSubmission by start_date({start_date})")
|
|
||||||
start_date = start_date.strftime("%Y-%m-%d")
|
start_date = start_date.strftime("%Y-%m-%d")
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Lookup BasicSubmission by ordinal start_date {start_date}")
|
|
||||||
start_date = datetime.fromordinal(
|
start_date = datetime.fromordinal(
|
||||||
datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d")
|
datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d")
|
||||||
case _:
|
case _:
|
||||||
# logger.debug(f"Lookup BasicSubmission by parsed str start_date {start_date}")
|
|
||||||
start_date = parse(start_date).strftime("%Y-%m-%d")
|
start_date = parse(start_date).strftime("%Y-%m-%d")
|
||||||
match end_date:
|
match end_date:
|
||||||
case date() | datetime():
|
case date() | datetime():
|
||||||
# logger.debug(f"Lookup BasicSubmission by end_date({end_date})")
|
|
||||||
end_date = end_date + timedelta(days=1)
|
end_date = end_date + timedelta(days=1)
|
||||||
end_date = end_date.strftime("%Y-%m-%d")
|
end_date = end_date.strftime("%Y-%m-%d")
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Lookup BasicSubmission by ordinal end_date {end_date}")
|
|
||||||
end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date() + timedelta(days=1)
|
end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date() + timedelta(days=1)
|
||||||
end_date = end_date.strftime("%Y-%m-%d")
|
end_date = end_date.strftime("%Y-%m-%d")
|
||||||
case _:
|
case _:
|
||||||
# logger.debug(f"Lookup BasicSubmission by parsed str end_date {end_date}")
|
|
||||||
end_date = parse(end_date) + timedelta(days=1)
|
end_date = parse(end_date) + timedelta(days=1)
|
||||||
end_date = end_date.strftime("%Y-%m-%d")
|
end_date = end_date.strftime("%Y-%m-%d")
|
||||||
# logger.debug(f"Compensating for same date by using time")
|
|
||||||
if start_date == end_date:
|
if start_date == end_date:
|
||||||
start_date = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%d %H:%M:%S.%f")
|
start_date = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%d %H:%M:%S.%f")
|
||||||
query = query.filter(cls.time == start_date)
|
query = query.filter(cls.time == start_date)
|
||||||
|
|||||||
@@ -171,11 +171,9 @@ class Control(BaseClass):
|
|||||||
match submission_type:
|
match submission_type:
|
||||||
case str():
|
case str():
|
||||||
from backend import BasicSubmission, SubmissionType
|
from backend import BasicSubmission, SubmissionType
|
||||||
# logger.debug(f"Lookup controls by SubmissionType str: {submission_type}")
|
|
||||||
query = query.join(BasicSubmission).join(SubmissionType).filter(SubmissionType.name == submission_type)
|
query = query.join(BasicSubmission).join(SubmissionType).filter(SubmissionType.name == submission_type)
|
||||||
case SubmissionType():
|
case SubmissionType():
|
||||||
from backend import BasicSubmission
|
from backend import BasicSubmission
|
||||||
# logger.debug(f"Lookup controls by SubmissionType: {submission_type}")
|
|
||||||
query = query.join(BasicSubmission).filter(BasicSubmission.submission_type_name == submission_type.name)
|
query = query.join(BasicSubmission).filter(BasicSubmission.submission_type_name == submission_type.name)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
@@ -203,31 +201,23 @@ class Control(BaseClass):
|
|||||||
if start_date is not None:
|
if start_date is not None:
|
||||||
match start_date:
|
match start_date:
|
||||||
case date():
|
case date():
|
||||||
# logger.debug(f"Lookup control by start date({start_date})")
|
|
||||||
start_date = start_date.strftime("%Y-%m-%d")
|
start_date = start_date.strftime("%Y-%m-%d")
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Lookup control by ordinal start date {start_date}")
|
|
||||||
start_date = datetime.fromordinal(
|
start_date = datetime.fromordinal(
|
||||||
datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d")
|
datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d")
|
||||||
case _:
|
case _:
|
||||||
# logger.debug(f"Lookup control with parsed start date {start_date}")
|
|
||||||
start_date = parse(start_date).strftime("%Y-%m-%d")
|
start_date = parse(start_date).strftime("%Y-%m-%d")
|
||||||
match end_date:
|
match end_date:
|
||||||
case date():
|
case date():
|
||||||
# logger.debug(f"Lookup control by end date({end_date})")
|
|
||||||
end_date = end_date.strftime("%Y-%m-%d")
|
end_date = end_date.strftime("%Y-%m-%d")
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Lookup control by ordinal end date {end_date}")
|
|
||||||
end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date().strftime(
|
end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date().strftime(
|
||||||
"%Y-%m-%d")
|
"%Y-%m-%d")
|
||||||
case _:
|
case _:
|
||||||
# logger.debug(f"Lookup control with parsed end date {end_date}")
|
|
||||||
end_date = parse(end_date).strftime("%Y-%m-%d")
|
end_date = parse(end_date).strftime("%Y-%m-%d")
|
||||||
# logger.debug(f"Looking up BasicSubmissions from start date: {start_date} and end date: {end_date}")
|
|
||||||
query = query.filter(cls.submitted_date.between(start_date, end_date))
|
query = query.filter(cls.submitted_date.between(start_date, end_date))
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup control by name {control_name}")
|
|
||||||
query = query.filter(cls.name.startswith(name))
|
query = query.filter(cls.name.startswith(name))
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
@@ -273,7 +263,6 @@ class Control(BaseClass):
|
|||||||
except StopIteration as e:
|
except StopIteration as e:
|
||||||
raise AttributeError(
|
raise AttributeError(
|
||||||
f"Couldn't find existing class/subclass of {cls} with all attributes:\n{pformat(attrs.keys())}")
|
f"Couldn't find existing class/subclass of {cls} with all attributes:\n{pformat(attrs.keys())}")
|
||||||
# logger.info(f"Recruiting model: {model}")
|
|
||||||
return model
|
return model
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -343,7 +332,6 @@ class PCRControl(Control):
|
|||||||
parent.mode_typer.clear()
|
parent.mode_typer.clear()
|
||||||
parent.mode_typer.setEnabled(False)
|
parent.mode_typer.setEnabled(False)
|
||||||
report = Report()
|
report = Report()
|
||||||
# logger.debug(f"Chart settings: {pformat(chart_settings)}")
|
|
||||||
controls = cls.query(submission_type=chart_settings['sub_type'], start_date=chart_settings['start_date'],
|
controls = cls.query(submission_type=chart_settings['sub_type'], start_date=chart_settings['start_date'],
|
||||||
end_date=chart_settings['end_date'])
|
end_date=chart_settings['end_date'])
|
||||||
data = [control.to_sub_dict() for control in controls]
|
data = [control.to_sub_dict() for control in controls]
|
||||||
@@ -411,21 +399,16 @@ class IridaControl(Control):
|
|||||||
kraken = self.kraken
|
kraken = self.kraken
|
||||||
except TypeError:
|
except TypeError:
|
||||||
kraken = {}
|
kraken = {}
|
||||||
# logger.debug("calculating kraken count total to use in percentage")
|
|
||||||
kraken_cnt_total = sum([kraken[item]['kraken_count'] for item in kraken])
|
kraken_cnt_total = sum([kraken[item]['kraken_count'] for item in kraken])
|
||||||
# logger.debug("Creating new kraken.")
|
|
||||||
new_kraken = [dict(name=item, kraken_count=kraken[item]['kraken_count'],
|
new_kraken = [dict(name=item, kraken_count=kraken[item]['kraken_count'],
|
||||||
kraken_percent="{0:.0%}".format(kraken[item]['kraken_count'] / kraken_cnt_total),
|
kraken_percent="{0:.0%}".format(kraken[item]['kraken_count'] / kraken_cnt_total),
|
||||||
target=item in self.controltype.targets)
|
target=item in self.controltype.targets)
|
||||||
for item in kraken]
|
for item in kraken]
|
||||||
# logger.debug(f"New kraken before sort: {new_kraken}")
|
|
||||||
new_kraken = sorted(new_kraken, key=itemgetter('kraken_count'), reverse=True)
|
new_kraken = sorted(new_kraken, key=itemgetter('kraken_count'), reverse=True)
|
||||||
# logger.debug("setting targets")
|
|
||||||
if self.controltype.targets:
|
if self.controltype.targets:
|
||||||
targets = self.controltype.targets
|
targets = self.controltype.targets
|
||||||
else:
|
else:
|
||||||
targets = ["None"]
|
targets = ["None"]
|
||||||
# logger.debug("constructing output dictionary")
|
|
||||||
output = dict(
|
output = dict(
|
||||||
name=self.name,
|
name=self.name,
|
||||||
type=self.controltype.name,
|
type=self.controltype.name,
|
||||||
@@ -447,7 +430,6 @@ class IridaControl(Control):
|
|||||||
Returns:
|
Returns:
|
||||||
List[dict]: list of records
|
List[dict]: list of records
|
||||||
"""
|
"""
|
||||||
# logger.debug("load json string for mode (i.e. contains, matches, kraken2)")
|
|
||||||
try:
|
try:
|
||||||
data = self.__getattribute__(mode)
|
data = self.__getattribute__(mode)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@@ -460,12 +442,10 @@ class IridaControl(Control):
|
|||||||
else:
|
else:
|
||||||
if consolidate:
|
if consolidate:
|
||||||
on_tar = {k: v for k, v in data.items() if k.strip("*") in self.controltype.targets[control_sub_type]}
|
on_tar = {k: v for k, v in data.items() if k.strip("*") in self.controltype.targets[control_sub_type]}
|
||||||
# logger.debug(f"Consolidating off-targets to: {self.controltype.targets[control_sub_type]}")
|
|
||||||
off_tar = sum(v[f'{mode}_ratio'] for k, v in data.items() if
|
off_tar = sum(v[f'{mode}_ratio'] for k, v in data.items() if
|
||||||
k.strip("*") not in self.controltype.targets[control_sub_type])
|
k.strip("*") not in self.controltype.targets[control_sub_type])
|
||||||
on_tar['Off-target'] = {f"{mode}_ratio": off_tar}
|
on_tar['Off-target'] = {f"{mode}_ratio": off_tar}
|
||||||
data = on_tar
|
data = on_tar
|
||||||
# logger.debug("dict keys are genera of bacteria, e.g. 'Streptococcus'")
|
|
||||||
for genus in data:
|
for genus in data:
|
||||||
_dict = dict(
|
_dict = dict(
|
||||||
name=self.name,
|
name=self.name,
|
||||||
@@ -473,7 +453,6 @@ class IridaControl(Control):
|
|||||||
genus=genus,
|
genus=genus,
|
||||||
target='Target' if genus.strip("*") in self.controltype.targets[control_sub_type] else "Off-target"
|
target='Target' if genus.strip("*") in self.controltype.targets[control_sub_type] else "Off-target"
|
||||||
)
|
)
|
||||||
# logger.debug("get Target or Off-target of genus")
|
|
||||||
for key in data[genus]:
|
for key in data[genus]:
|
||||||
_dict[key] = data[genus][key]
|
_dict[key] = data[genus][key]
|
||||||
yield _dict
|
yield _dict
|
||||||
@@ -487,7 +466,6 @@ class IridaControl(Control):
|
|||||||
List[str]: List of control mode names.
|
List[str]: List of control mode names.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# logger.debug("Creating a list of JSON columns in _controls table")
|
|
||||||
cols = [item.name for item in list(cls.__table__.columns) if isinstance(item.type, JSON)]
|
cols = [item.name for item in list(cls.__table__.columns) if isinstance(item.type, JSON)]
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
logger.error(f"Failed to get available modes from db: {e}")
|
logger.error(f"Failed to get available modes from db: {e}")
|
||||||
@@ -504,7 +482,6 @@ class IridaControl(Control):
|
|||||||
"""
|
"""
|
||||||
super().make_parent_buttons(parent=parent)
|
super().make_parent_buttons(parent=parent)
|
||||||
rows = parent.layout.rowCount() - 2
|
rows = parent.layout.rowCount() - 2
|
||||||
# logger.debug(f"Parent rows: {rows}")
|
|
||||||
checker = QCheckBox(parent)
|
checker = QCheckBox(parent)
|
||||||
checker.setChecked(True)
|
checker.setChecked(True)
|
||||||
checker.setObjectName("irida_check")
|
checker.setObjectName("irida_check")
|
||||||
@@ -539,10 +516,8 @@ class IridaControl(Control):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
consolidate = False
|
consolidate = False
|
||||||
report = Report()
|
report = Report()
|
||||||
# logger.debug(f"settings: {pformat(chart_settings)}")
|
|
||||||
controls = cls.query(subtype=chart_settings['sub_type'], start_date=chart_settings['start_date'],
|
controls = cls.query(subtype=chart_settings['sub_type'], start_date=chart_settings['start_date'],
|
||||||
end_date=chart_settings['end_date'])
|
end_date=chart_settings['end_date'])
|
||||||
# logger.debug(f"Controls found: {controls}")
|
|
||||||
if not controls:
|
if not controls:
|
||||||
report.add_result(Result(status="Critical", msg="No controls found in given date range."))
|
report.add_result(Result(status="Critical", msg="No controls found in given date range."))
|
||||||
return report, None
|
return report, None
|
||||||
@@ -552,19 +527,16 @@ class IridaControl(Control):
|
|||||||
control in controls]
|
control in controls]
|
||||||
# NOTE: flatten data to one dimensional list
|
# NOTE: flatten data to one dimensional list
|
||||||
data = [item for sublist in data for item in sublist]
|
data = [item for sublist in data for item in sublist]
|
||||||
# logger.debug(f"Control objects going into df conversion: {pformat(data)}")
|
|
||||||
if not data:
|
if not data:
|
||||||
report.add_result(Result(status="Critical", msg="No data found for controls in given date range."))
|
report.add_result(Result(status="Critical", msg="No data found for controls in given date range."))
|
||||||
return report, None
|
return report, None
|
||||||
df = cls.convert_data_list_to_df(input_df=data, sub_mode=chart_settings['sub_mode'])
|
df = cls.convert_data_list_to_df(input_df=data, sub_mode=chart_settings['sub_mode'])
|
||||||
# logger.debug(f"Chart df: \n {df}")
|
|
||||||
if chart_settings['sub_mode'] is None:
|
if chart_settings['sub_mode'] is None:
|
||||||
title = chart_settings['sub_mode']
|
title = chart_settings['sub_mode']
|
||||||
else:
|
else:
|
||||||
title = f"{chart_settings['mode']} - {chart_settings['sub_mode']}"
|
title = f"{chart_settings['mode']} - {chart_settings['sub_mode']}"
|
||||||
# NOTE: send dataframe to chart maker
|
# NOTE: send dataframe to chart maker
|
||||||
df, modes = cls.prep_df(ctx=ctx, df=df)
|
df, modes = cls.prep_df(ctx=ctx, df=df)
|
||||||
# logger.debug(f"prepped df: \n {df}")
|
|
||||||
fig = IridaFigure(df=df, ytitle=title, modes=modes, parent=parent,
|
fig = IridaFigure(df=df, ytitle=title, modes=modes, parent=parent,
|
||||||
settings=chart_settings)
|
settings=chart_settings)
|
||||||
return report, fig
|
return report, fig
|
||||||
@@ -581,9 +553,7 @@ class IridaControl(Control):
|
|||||||
Returns:
|
Returns:
|
||||||
DataFrame: dataframe of controls
|
DataFrame: dataframe of controls
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Subtype: {sub_mode}")
|
|
||||||
df = DataFrame.from_records(input_df)
|
df = DataFrame.from_records(input_df)
|
||||||
# logger.debug(f"DF from records: {df}")
|
|
||||||
safe = ['name', 'submitted_date', 'genus', 'target']
|
safe = ['name', 'submitted_date', 'genus', 'target']
|
||||||
for column in df.columns:
|
for column in df.columns:
|
||||||
if column not in safe:
|
if column not in safe:
|
||||||
@@ -636,7 +606,6 @@ class IridaControl(Control):
|
|||||||
Returns:
|
Returns:
|
||||||
DataFrame: output dataframe with dates incremented.
|
DataFrame: output dataframe with dates incremented.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Unique items: {df['name'].unique()}")
|
|
||||||
# NOTE: get submitted dates for each control
|
# NOTE: get submitted dates for each control
|
||||||
dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in
|
dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in
|
||||||
sorted(df['name'].unique())]
|
sorted(df['name'].unique())]
|
||||||
@@ -664,7 +633,6 @@ class IridaControl(Control):
|
|||||||
check = False
|
check = False
|
||||||
previous_dates.add(item['date'])
|
previous_dates.add(item['date'])
|
||||||
if check:
|
if check:
|
||||||
# logger.debug(f"We found one! Increment date!\n\t{item['date']} to {item['date'] + timedelta(days=1)}")
|
|
||||||
# NOTE: get df locations where name == item name
|
# NOTE: get df locations where name == item name
|
||||||
mask = df['name'] == item['name']
|
mask = df['name'] == item['name']
|
||||||
# NOTE: increment date in dataframe
|
# NOTE: increment date in dataframe
|
||||||
@@ -673,15 +641,12 @@ class IridaControl(Control):
|
|||||||
passed = False
|
passed = False
|
||||||
else:
|
else:
|
||||||
passed = True
|
passed = True
|
||||||
# logger.debug(f"\n\tCurrent date: {item['date']}\n\tPrevious dates:{previous_dates}")
|
|
||||||
# logger.debug(f"DF: {type(df)}, previous_dates: {type(previous_dates)}")
|
|
||||||
# NOTE: if run didn't lead to changed date, return values
|
# NOTE: if run didn't lead to changed date, return values
|
||||||
if passed:
|
if passed:
|
||||||
# logger.debug(f"Date check passed, returning.")
|
|
||||||
return df, previous_dates
|
return df, previous_dates
|
||||||
# NOTE: if date was changed, rerun with new date
|
# NOTE: if date was changed, rerun with new date
|
||||||
else:
|
else:
|
||||||
# logger.warning(f"Date check failed, running recursion")
|
logger.warning(f"Date check failed, running recursion")
|
||||||
df, previous_dates = cls.check_date(df, item, previous_dates)
|
df, previous_dates = cls.check_date(df, item, previous_dates)
|
||||||
return df, previous_dates
|
return df, previous_dates
|
||||||
|
|
||||||
@@ -708,13 +673,10 @@ class IridaControl(Control):
|
|||||||
# NOTE: sort by and exclude from
|
# NOTE: sort by and exclude from
|
||||||
sorts = ['submitted_date', "target", "genus"]
|
sorts = ['submitted_date', "target", "genus"]
|
||||||
exclude = ['name', 'genera']
|
exclude = ['name', 'genera']
|
||||||
# logger.debug(df.columns)
|
|
||||||
modes = [item for item in df.columns if item not in sorts and item not in exclude]
|
modes = [item for item in df.columns if item not in sorts and item not in exclude]
|
||||||
# logger.debug(f"Modes coming out: {modes}")
|
|
||||||
# NOTE: Set descending for any columns that have "{mode}" in the header.
|
# NOTE: Set descending for any columns that have "{mode}" in the header.
|
||||||
ascending = [False if item == "target" else True for item in sorts]
|
ascending = [False if item == "target" else True for item in sorts]
|
||||||
df = df.sort_values(by=sorts, ascending=ascending)
|
df = df.sort_values(by=sorts, ascending=ascending)
|
||||||
# logger.debug(df[df.isna().any(axis=1)])
|
|
||||||
# NOTE: actual chart construction is done by
|
# NOTE: actual chart construction is done by
|
||||||
return df, modes
|
return df, modes
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ from io import BytesIO
|
|||||||
|
|
||||||
logger = logging.getLogger(f'submissions.{__name__}')
|
logger = logging.getLogger(f'submissions.{__name__}')
|
||||||
|
|
||||||
# logger.debug("Table for ReagentType/Reagent relations")
|
|
||||||
reagentroles_reagents = Table(
|
reagentroles_reagents = Table(
|
||||||
"_reagentroles_reagents",
|
"_reagentroles_reagents",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -26,7 +25,6 @@ reagentroles_reagents = Table(
|
|||||||
extend_existing=True
|
extend_existing=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# logger.debug("Table for EquipmentRole/Equipment relations")
|
|
||||||
equipmentroles_equipment = Table(
|
equipmentroles_equipment = Table(
|
||||||
"_equipmentroles_equipment",
|
"_equipmentroles_equipment",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -35,7 +33,6 @@ equipmentroles_equipment = Table(
|
|||||||
extend_existing=True
|
extend_existing=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# logger.debug("Table for Equipment/Process relations")
|
|
||||||
equipment_processes = Table(
|
equipment_processes = Table(
|
||||||
"_equipment_processes",
|
"_equipment_processes",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -44,7 +41,6 @@ equipment_processes = Table(
|
|||||||
extend_existing=True
|
extend_existing=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# logger.debug("Table for EquipmentRole/Process relations")
|
|
||||||
equipmentroles_processes = Table(
|
equipmentroles_processes = Table(
|
||||||
"_equipmentroles_processes",
|
"_equipmentroles_processes",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -53,7 +49,6 @@ equipmentroles_processes = Table(
|
|||||||
extend_existing=True
|
extend_existing=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# logger.debug("Table for SubmissionType/Process relations")
|
|
||||||
submissiontypes_processes = Table(
|
submissiontypes_processes = Table(
|
||||||
"_submissiontypes_processes",
|
"_submissiontypes_processes",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -62,7 +57,6 @@ submissiontypes_processes = Table(
|
|||||||
extend_existing=True
|
extend_existing=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# logger.debug("Table for KitType/Process relations")
|
|
||||||
kittypes_processes = Table(
|
kittypes_processes = Table(
|
||||||
"_kittypes_processes",
|
"_kittypes_processes",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -71,7 +65,6 @@ kittypes_processes = Table(
|
|||||||
extend_existing=True
|
extend_existing=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# logger.debug("Table for TipRole/Tips relations")
|
|
||||||
tiproles_tips = Table(
|
tiproles_tips = Table(
|
||||||
"_tiproles_tips",
|
"_tiproles_tips",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -80,7 +73,6 @@ tiproles_tips = Table(
|
|||||||
extend_existing=True
|
extend_existing=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# logger.debug("Table for Process/TipRole relations")
|
|
||||||
process_tiprole = Table(
|
process_tiprole = Table(
|
||||||
"_process_tiprole",
|
"_process_tiprole",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -89,7 +81,6 @@ process_tiprole = Table(
|
|||||||
extend_existing=True
|
extend_existing=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# logger.debug("Table for Equipment/Tips relations")
|
|
||||||
equipment_tips = Table(
|
equipment_tips = Table(
|
||||||
"_equipment_tips",
|
"_equipment_tips",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
@@ -116,7 +107,7 @@ class KitType(BaseClass):
|
|||||||
cascade="all, delete-orphan",
|
cascade="all, delete-orphan",
|
||||||
)
|
)
|
||||||
|
|
||||||
# creator function: https://stackoverflow.com/questions/11091491/keyerror-when-adding-objects-to-sqlalchemy-association-object/11116291#11116291
|
# NOTE: creator function: https://stackoverflow.com/questions/11091491/keyerror-when-adding-objects-to-sqlalchemy-association-object/11116291#11116291
|
||||||
reagent_roles = association_proxy("kit_reagentrole_associations", "reagent_role",
|
reagent_roles = association_proxy("kit_reagentrole_associations", "reagent_role",
|
||||||
creator=lambda RT: KitTypeReagentRoleAssociation(
|
creator=lambda RT: KitTypeReagentRoleAssociation(
|
||||||
reagent_role=RT)) #: Association proxy to KitTypeReagentRoleAssociation
|
reagent_role=RT)) #: Association proxy to KitTypeReagentRoleAssociation
|
||||||
@@ -152,18 +143,14 @@ class KitType(BaseClass):
|
|||||||
"""
|
"""
|
||||||
match submission_type:
|
match submission_type:
|
||||||
case SubmissionType():
|
case SubmissionType():
|
||||||
# logger.debug(f"Getting reagents by SubmissionType {submission_type}")
|
|
||||||
relevant_associations = [item for item in self.kit_reagentrole_associations if
|
relevant_associations = [item for item in self.kit_reagentrole_associations if
|
||||||
item.submission_type == submission_type]
|
item.submission_type == submission_type]
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Getting reagents by str {submission_type}")
|
|
||||||
relevant_associations = [item for item in self.kit_reagentrole_associations if
|
relevant_associations = [item for item in self.kit_reagentrole_associations if
|
||||||
item.submission_type.name == submission_type]
|
item.submission_type.name == submission_type]
|
||||||
case _:
|
case _:
|
||||||
# logger.debug(f"Getting reagents")
|
|
||||||
relevant_associations = [item for item in self.kit_reagentrole_associations]
|
relevant_associations = [item for item in self.kit_reagentrole_associations]
|
||||||
if required:
|
if required:
|
||||||
# logger.debug(f"Filtering by required.")
|
|
||||||
return (item.reagent_role for item in relevant_associations if item.required == 1)
|
return (item.reagent_role for item in relevant_associations if item.required == 1)
|
||||||
else:
|
else:
|
||||||
return (item.reagent_role for item in relevant_associations)
|
return (item.reagent_role for item in relevant_associations)
|
||||||
@@ -181,18 +168,14 @@ class KitType(BaseClass):
|
|||||||
# NOTE: Account for submission_type variable type.
|
# NOTE: Account for submission_type variable type.
|
||||||
match submission_type:
|
match submission_type:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Constructing xl map with str {submission_type}")
|
|
||||||
assocs = [item for item in self.kit_reagentrole_associations if
|
assocs = [item for item in self.kit_reagentrole_associations if
|
||||||
item.submission_type.name == submission_type]
|
item.submission_type.name == submission_type]
|
||||||
case SubmissionType():
|
case SubmissionType():
|
||||||
# logger.debug(f"Constructing xl map with SubmissionType {submission_type}")
|
|
||||||
assocs = [item for item in self.kit_reagentrole_associations if item.submission_type == submission_type]
|
assocs = [item for item in self.kit_reagentrole_associations if item.submission_type == submission_type]
|
||||||
case _:
|
case _:
|
||||||
raise ValueError(f"Wrong variable type: {type(submission_type)} used!")
|
raise ValueError(f"Wrong variable type: {type(submission_type)} used!")
|
||||||
# logger.debug("Get all KitTypeReagentTypeAssociation for SubmissionType")
|
|
||||||
for assoc in assocs:
|
for assoc in assocs:
|
||||||
try:
|
try:
|
||||||
# logger.debug(f"Yielding: {assoc.reagent_role.name}, {assoc.uses}")
|
|
||||||
yield assoc.reagent_role.name, assoc.uses
|
yield assoc.reagent_role.name, assoc.uses
|
||||||
except TypeError:
|
except TypeError:
|
||||||
continue
|
continue
|
||||||
@@ -220,27 +203,22 @@ class KitType(BaseClass):
|
|||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
match used_for:
|
match used_for:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up kit type by used_for str: {used_for}")
|
|
||||||
query = query.filter(cls.used_for.any(name=used_for))
|
query = query.filter(cls.used_for.any(name=used_for))
|
||||||
case SubmissionType():
|
case SubmissionType():
|
||||||
# logger.debug(f"Looking up kit type by used_for SubmissionType: {used_for}")
|
|
||||||
query = query.filter(cls.used_for.contains(used_for))
|
query = query.filter(cls.used_for.contains(used_for))
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up kit type by name str: {name}")
|
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match id:
|
match id:
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Looking up kit type by id int: {id}")
|
|
||||||
query = query.filter(cls.id == id)
|
query = query.filter(cls.id == id)
|
||||||
limit = 1
|
limit = 1
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up kit type by id str: {id}")
|
|
||||||
query = query.filter(cls.id == int(id))
|
query = query.filter(cls.id == int(id))
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
@@ -262,10 +240,7 @@ class KitType(BaseClass):
|
|||||||
dict: Dictionary containing relevant info for SubmissionType construction
|
dict: Dictionary containing relevant info for SubmissionType construction
|
||||||
"""
|
"""
|
||||||
base_dict = dict(name=self.name, reagent_roles=[], equipment_roles=[])
|
base_dict = dict(name=self.name, reagent_roles=[], equipment_roles=[])
|
||||||
# base_dict['reagent roles'] = []
|
|
||||||
# base_dict['equipment roles'] = []
|
|
||||||
for k, v in self.construct_xl_map_for_use(submission_type=submission_type):
|
for k, v in self.construct_xl_map_for_use(submission_type=submission_type):
|
||||||
# logger.debug(f"Value: {v}")
|
|
||||||
try:
|
try:
|
||||||
assoc = next(item for item in self.kit_reagentrole_associations if item.reagent_role.name == k)
|
assoc = next(item for item in self.kit_reagentrole_associations if item.reagent_role.name == k)
|
||||||
except StopIteration as e:
|
except StopIteration as e:
|
||||||
@@ -280,10 +255,8 @@ class KitType(BaseClass):
|
|||||||
except StopIteration:
|
except StopIteration:
|
||||||
continue
|
continue
|
||||||
for kk, vv in assoc.to_export_dict(extraction_kit=self).items():
|
for kk, vv in assoc.to_export_dict(extraction_kit=self).items():
|
||||||
# logger.debug(f"{kk}:{vv}")
|
|
||||||
v[kk] = vv
|
v[kk] = vv
|
||||||
base_dict['equipment_roles'].append(v)
|
base_dict['equipment_roles'].append(v)
|
||||||
# logger.debug(f"KT returning {base_dict}")
|
|
||||||
return base_dict
|
return base_dict
|
||||||
|
|
||||||
|
|
||||||
@@ -347,28 +320,19 @@ class ReagentRole(BaseClass):
|
|||||||
else:
|
else:
|
||||||
match kit_type:
|
match kit_type:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup ReagentType by kittype str {kit_type}")
|
|
||||||
kit_type = KitType.query(name=kit_type)
|
kit_type = KitType.query(name=kit_type)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match reagent:
|
match reagent:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup ReagentType by reagent str {reagent}")
|
|
||||||
reagent = Reagent.query(lot=reagent)
|
reagent = Reagent.query(lot=reagent)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
assert reagent.role
|
assert reagent.role
|
||||||
# logger.debug(f"Looking up reagent type for {type(kit_type)} {kit_type} and {type(reagent)} {reagent}")
|
|
||||||
# logger.debug(f"Kit reagent types: {kit_type.reagent_types}")
|
|
||||||
result = set(kit_type.reagent_roles).intersection(reagent.role)
|
result = set(kit_type.reagent_roles).intersection(reagent.role)
|
||||||
# logger.debug(f"Result: {result}")
|
|
||||||
# try:
|
|
||||||
return next((item for item in result), None)
|
return next((item for item in result), None)
|
||||||
# except IndexError:
|
|
||||||
# return None
|
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up reagent type by name str: {name}")
|
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
@@ -457,7 +421,6 @@ class Reagent(BaseClass, LogMixin):
|
|||||||
rtype = reagent_role.name.replace("_", " ")
|
rtype = reagent_role.name.replace("_", " ")
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
rtype = "Unknown"
|
rtype = "Unknown"
|
||||||
# logger.debug(f"Role for {self.name}: {rtype}")
|
|
||||||
# NOTE: Calculate expiry with EOL from ReagentType
|
# NOTE: Calculate expiry with EOL from ReagentType
|
||||||
try:
|
try:
|
||||||
place_holder = self.expiry + reagent_role.eol_ext
|
place_holder = self.expiry + reagent_role.eol_ext
|
||||||
@@ -493,14 +456,11 @@ class Reagent(BaseClass, LogMixin):
|
|||||||
Report: Result of operation
|
Report: Result of operation
|
||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
# logger.debug(f"Attempting update of last used reagent type at intersection of ({self}), ({kit})")
|
|
||||||
rt = ReagentRole.query(kit_type=kit, reagent=self, limit=1)
|
rt = ReagentRole.query(kit_type=kit, reagent=self, limit=1)
|
||||||
if rt is not None:
|
if rt is not None:
|
||||||
# logger.debug(f"got reagenttype {rt}")
|
|
||||||
assoc = KitTypeReagentRoleAssociation.query(kit_type=kit, reagent_role=rt)
|
assoc = KitTypeReagentRoleAssociation.query(kit_type=kit, reagent_role=rt)
|
||||||
if assoc is not None:
|
if assoc is not None:
|
||||||
if assoc.last_used != self.lot:
|
if assoc.last_used != self.lot:
|
||||||
# logger.debug(f"Updating {assoc} last used to {self.lot}")
|
|
||||||
assoc.last_used = self.lot
|
assoc.last_used = self.lot
|
||||||
result = assoc.save()
|
result = assoc.save()
|
||||||
report.add_result(result)
|
report.add_result(result)
|
||||||
@@ -539,23 +499,19 @@ class Reagent(BaseClass, LogMixin):
|
|||||||
pass
|
pass
|
||||||
match role:
|
match role:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up reagents by reagent type str: {reagent_type}")
|
|
||||||
query = query.join(cls.role).filter(ReagentRole.name == role)
|
query = query.join(cls.role).filter(ReagentRole.name == role)
|
||||||
case ReagentRole():
|
case ReagentRole():
|
||||||
# logger.debug(f"Looking up reagents by reagent type ReagentType: {reagent_type}")
|
|
||||||
query = query.filter(cls.role.contains(role))
|
query = query.filter(cls.role.contains(role))
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up reagent by name str: {name}")
|
|
||||||
# NOTE: Not limited due to multiple reagents having same name.
|
# NOTE: Not limited due to multiple reagents having same name.
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match lot:
|
match lot:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up reagent by lot number str: {lot}")
|
|
||||||
query = query.filter(cls.lot == lot)
|
query = query.filter(cls.lot == lot)
|
||||||
# NOTE: In this case limit number returned.
|
# NOTE: In this case limit number returned.
|
||||||
limit = 1
|
limit = 1
|
||||||
@@ -579,7 +535,6 @@ class Reagent(BaseClass, LogMixin):
|
|||||||
case "expiry":
|
case "expiry":
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
field_value = datetime.strptime(value, "%Y-%m-%d")
|
field_value = datetime.strptime(value, "%Y-%m-%d")
|
||||||
# field_value.replace(tzinfo=timezone)
|
|
||||||
elif isinstance(value, date):
|
elif isinstance(value, date):
|
||||||
field_value = datetime.combine(value, datetime.min.time())
|
field_value = datetime.combine(value, datetime.min.time())
|
||||||
else:
|
else:
|
||||||
@@ -589,7 +544,6 @@ class Reagent(BaseClass, LogMixin):
|
|||||||
continue
|
continue
|
||||||
case _:
|
case _:
|
||||||
field_value = value
|
field_value = value
|
||||||
# logger.debug(f"Setting reagent {key} to {field_value}")
|
|
||||||
self.__setattr__(key, field_value)
|
self.__setattr__(key, field_value)
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@@ -634,25 +588,19 @@ class Discount(BaseClass):
|
|||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
match organization:
|
match organization:
|
||||||
case Organization():
|
case Organization():
|
||||||
# logger.debug(f"Looking up discount with organization Organization: {organization}")
|
|
||||||
query = query.filter(cls.client == Organization)
|
query = query.filter(cls.client == Organization)
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up discount with organization str: {organization}")
|
|
||||||
query = query.join(Organization).filter(Organization.name == organization)
|
query = query.join(Organization).filter(Organization.name == organization)
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Looking up discount with organization id: {organization}")
|
|
||||||
query = query.join(Organization).filter(Organization.id == organization)
|
query = query.join(Organization).filter(Organization.id == organization)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match kit_type:
|
match kit_type:
|
||||||
case KitType():
|
case KitType():
|
||||||
# logger.debug(f"Looking up discount with kit type KitType: {kit_type}")
|
|
||||||
query = query.filter(cls.kit == kit_type)
|
query = query.filter(cls.kit == kit_type)
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up discount with kit type str: {kit_type}")
|
|
||||||
query = query.join(KitType).filter(KitType.name == kit_type)
|
query = query.join(KitType).filter(KitType.name == kit_type)
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Looking up discount with kit type id: {kit_type}")
|
|
||||||
query = query.join(KitType).filter(KitType.id == kit_type)
|
query = query.join(KitType).filter(KitType.id == kit_type)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
@@ -723,7 +671,6 @@ class SubmissionType(BaseClass):
|
|||||||
return submission_type.template_file
|
return submission_type.template_file
|
||||||
|
|
||||||
def get_template_file_sheets(self) -> List[str]:
|
def get_template_file_sheets(self) -> List[str]:
|
||||||
logger.debug(f"Submission type to get sheets for: {self.name}")
|
|
||||||
"""
|
"""
|
||||||
Gets names of sheet in the stored blank form.
|
Gets names of sheet in the stored blank form.
|
||||||
|
|
||||||
@@ -768,7 +715,6 @@ class SubmissionType(BaseClass):
|
|||||||
dict: Map of locations
|
dict: Map of locations
|
||||||
"""
|
"""
|
||||||
info = {k: v for k, v in self.info_map.items() if k != "custom"}
|
info = {k: v for k, v in self.info_map.items() if k != "custom"}
|
||||||
logger.debug(f"Info map: {info}")
|
|
||||||
match mode:
|
match mode:
|
||||||
case "read":
|
case "read":
|
||||||
output = {k: v[mode] for k, v in info.items() if v[mode]}
|
output = {k: v[mode] for k, v in info.items() if v[mode]}
|
||||||
@@ -844,11 +790,9 @@ class SubmissionType(BaseClass):
|
|||||||
"""
|
"""
|
||||||
match equipment_role:
|
match equipment_role:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Getting processes for equipmentrole str {equipment_role}")
|
|
||||||
relevant = [item.get_all_processes(kit) for item in self.submissiontype_equipmentrole_associations if
|
relevant = [item.get_all_processes(kit) for item in self.submissiontype_equipmentrole_associations if
|
||||||
item.equipment_role.name == equipment_role]
|
item.equipment_role.name == equipment_role]
|
||||||
case EquipmentRole():
|
case EquipmentRole():
|
||||||
# logger.debug(f"Getting processes for equipmentrole EquipmentRole {equipment_role}")
|
|
||||||
relevant = [item.get_all_processes(kit) for item in self.submissiontype_equipmentrole_associations if
|
relevant = [item.get_all_processes(kit) for item in self.submissiontype_equipmentrole_associations if
|
||||||
item.equipment_role == equipment_role]
|
item.equipment_role == equipment_role]
|
||||||
case _:
|
case _:
|
||||||
@@ -886,14 +830,12 @@ class SubmissionType(BaseClass):
|
|||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up submission type by name str: {name}")
|
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match key:
|
match key:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up submission type by info-map key str: {key}")
|
|
||||||
query = query.filter(cls.info_map.op('->')(key) is not None)
|
query = query.filter(cls.info_map.op('->')(key) is not None)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
@@ -946,7 +888,6 @@ class SubmissionType(BaseClass):
|
|||||||
import_dict = yaml.load(stream=f, Loader=yaml.Loader)
|
import_dict = yaml.load(stream=f, Loader=yaml.Loader)
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Filetype {filepath.suffix} not supported.")
|
raise Exception(f"Filetype {filepath.suffix} not supported.")
|
||||||
# logger.debug(pformat(import_dict))
|
|
||||||
try:
|
try:
|
||||||
submission_type = cls.query(name=import_dict['name'])
|
submission_type = cls.query(name=import_dict['name'])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -1076,23 +1017,17 @@ class SubmissionTypeKitTypeAssociation(BaseClass):
|
|||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
match submission_type:
|
match submission_type:
|
||||||
case SubmissionType():
|
case SubmissionType():
|
||||||
# logger.debug(f"Looking up {cls.__name__} by SubmissionType {submission_type}")
|
|
||||||
query = query.filter(cls.submission_type == submission_type)
|
query = query.filter(cls.submission_type == submission_type)
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up {cls.__name__} by name {submission_type}")
|
|
||||||
query = query.join(SubmissionType).filter(SubmissionType.name == submission_type)
|
query = query.join(SubmissionType).filter(SubmissionType.name == submission_type)
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Looking up {cls.__name__} by id {submission_type}")
|
|
||||||
query = query.join(SubmissionType).filter(SubmissionType.id == submission_type)
|
query = query.join(SubmissionType).filter(SubmissionType.id == submission_type)
|
||||||
match kit_type:
|
match kit_type:
|
||||||
case KitType():
|
case KitType():
|
||||||
# logger.debug(f"Looking up {cls.__name__} by KitType {kit_type}")
|
|
||||||
query = query.filter(cls.kit_type == kit_type)
|
query = query.filter(cls.kit_type == kit_type)
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up {cls.__name__} by name {kit_type}")
|
|
||||||
query = query.join(KitType).filter(KitType.name == kit_type)
|
query = query.join(KitType).filter(KitType.name == kit_type)
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Looking up {cls.__name__} by id {kit_type}")
|
|
||||||
query = query.join(KitType).filter(KitType.id == kit_type)
|
query = query.join(KitType).filter(KitType.id == kit_type)
|
||||||
limit = query.count()
|
limit = query.count()
|
||||||
return cls.execute_query(query=query, limit=limit)
|
return cls.execute_query(query=query, limit=limit)
|
||||||
@@ -1107,7 +1042,6 @@ class SubmissionTypeKitTypeAssociation(BaseClass):
|
|||||||
exclude = ['_sa_instance_state', 'submission_types_id', 'kits_id', 'submission_type', 'kit_type']
|
exclude = ['_sa_instance_state', 'submission_types_id', 'kits_id', 'submission_type', 'kit_type']
|
||||||
base_dict = {k: v for k, v in self.__dict__.items() if k not in exclude}
|
base_dict = {k: v for k, v in self.__dict__.items() if k not in exclude}
|
||||||
base_dict['kit_type'] = self.kit_type.to_export_dict(submission_type=self.submission_type)
|
base_dict['kit_type'] = self.kit_type.to_export_dict(submission_type=self.submission_type)
|
||||||
# logger.debug(f"STKTA returning: {base_dict}")
|
|
||||||
return base_dict
|
return base_dict
|
||||||
|
|
||||||
|
|
||||||
@@ -1128,10 +1062,11 @@ class KitTypeReagentRoleAssociation(BaseClass):
|
|||||||
kit_type = relationship(KitType,
|
kit_type = relationship(KitType,
|
||||||
back_populates="kit_reagentrole_associations") #: relationship to associated KitType
|
back_populates="kit_reagentrole_associations") #: relationship to associated KitType
|
||||||
|
|
||||||
# reference to the "ReagentType" object
|
# NOTE: reference to the "ReagentType" object
|
||||||
reagent_role = relationship(ReagentRole,
|
reagent_role = relationship(ReagentRole,
|
||||||
back_populates="reagentrole_kit_associations") #: relationship to associated ReagentType
|
back_populates="reagentrole_kit_associations") #: relationship to associated ReagentType
|
||||||
|
|
||||||
|
# NOTE: reference to the "SubmissionType" object
|
||||||
submission_type = relationship(SubmissionType,
|
submission_type = relationship(SubmissionType,
|
||||||
back_populates="submissiontype_kit_rt_associations") #: relationship to associated SubmissionType
|
back_populates="submissiontype_kit_rt_associations") #: relationship to associated SubmissionType
|
||||||
|
|
||||||
@@ -1203,19 +1138,15 @@ class KitTypeReagentRoleAssociation(BaseClass):
|
|||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
match kit_type:
|
match kit_type:
|
||||||
case KitType():
|
case KitType():
|
||||||
# logger.debug(f"Lookup KitTypeReagentTypeAssociation by kit_type KitType {kit_type}")
|
|
||||||
query = query.filter(cls.kit_type == kit_type)
|
query = query.filter(cls.kit_type == kit_type)
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup KitTypeReagentTypeAssociation by kit_type str {kit_type}")
|
|
||||||
query = query.join(KitType).filter(KitType.name == kit_type)
|
query = query.join(KitType).filter(KitType.name == kit_type)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match reagent_role:
|
match reagent_role:
|
||||||
case ReagentRole():
|
case ReagentRole():
|
||||||
# logger.debug(f"Lookup KitTypeReagentTypeAssociation by reagent_type ReagentType {reagent_type}")
|
|
||||||
query = query.filter(cls.reagent_role == reagent_role)
|
query = query.filter(cls.reagent_role == reagent_role)
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup KitTypeReagentTypeAssociation by reagent_type ReagentType {reagent_type}")
|
|
||||||
query = query.join(ReagentRole).filter(ReagentRole.name == reagent_role)
|
query = query.join(ReagentRole).filter(ReagentRole.name == reagent_role)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
@@ -1242,7 +1173,6 @@ class KitTypeReagentRoleAssociation(BaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
Generator: Generates of reagents.
|
Generator: Generates of reagents.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Attempting lookup of reagents by type: {reagent.type}")
|
|
||||||
reagents = self.reagent_role.instances
|
reagents = self.reagent_role.instances
|
||||||
try:
|
try:
|
||||||
regex = self.uses['exclude_regex']
|
regex = self.uses['exclude_regex']
|
||||||
@@ -1309,7 +1239,6 @@ class SubmissionReagentAssociation(BaseClass):
|
|||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
match reagent:
|
match reagent:
|
||||||
case Reagent() | str():
|
case Reagent() | str():
|
||||||
# logger.debug(f"Lookup SubmissionReagentAssociation by reagent Reagent {reagent}")
|
|
||||||
if isinstance(reagent, str):
|
if isinstance(reagent, str):
|
||||||
reagent = Reagent.query(lot=reagent)
|
reagent = Reagent.query(lot=reagent)
|
||||||
query = query.filter(cls.reagent == reagent)
|
query = query.filter(cls.reagent == reagent)
|
||||||
@@ -1319,10 +1248,8 @@ class SubmissionReagentAssociation(BaseClass):
|
|||||||
case BasicSubmission() | str():
|
case BasicSubmission() | str():
|
||||||
if isinstance(submission, str):
|
if isinstance(submission, str):
|
||||||
submission = BasicSubmission.query(rsl_plate_num=submission)
|
submission = BasicSubmission.query(rsl_plate_num=submission)
|
||||||
# logger.debug(f"Lookup SubmissionReagentAssociation by submission BasicSubmission {submission}")
|
|
||||||
query = query.filter(cls.submission == submission)
|
query = query.filter(cls.submission == submission)
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Lookup SubmissionReagentAssociation by submission id {submission}")
|
|
||||||
submission = BasicSubmission.query(id=submission)
|
submission = BasicSubmission.query(id=submission)
|
||||||
query = query.join(BasicSubmission).filter(BasicSubmission.id == submission)
|
query = query.join(BasicSubmission).filter(BasicSubmission.id == submission)
|
||||||
case _:
|
case _:
|
||||||
@@ -1439,21 +1366,18 @@ class Equipment(BaseClass, LogMixin):
|
|||||||
query = cls.__database_session__.query(cls)
|
query = cls.__database_session__.query(cls)
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup Equipment by name str {name}")
|
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match nickname:
|
match nickname:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup Equipment by nickname str {nickname}")
|
|
||||||
query = query.filter(cls.nickname == nickname)
|
query = query.filter(cls.nickname == nickname)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match asset_number:
|
match asset_number:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup Equipment by asset_number str {asset_number}")
|
|
||||||
query = query.filter(cls.asset_number == asset_number)
|
query = query.filter(cls.asset_number == asset_number)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
@@ -1569,11 +1493,9 @@ class EquipmentRole(BaseClass):
|
|||||||
PydEquipmentRole: This EquipmentRole as PydEquipmentRole
|
PydEquipmentRole: This EquipmentRole as PydEquipmentRole
|
||||||
"""
|
"""
|
||||||
from backend.validators.pydant import PydEquipmentRole
|
from backend.validators.pydant import PydEquipmentRole
|
||||||
# logger.debug("Creating list of PydEquipment in this role")
|
|
||||||
equipment = [item.to_pydantic(submission_type=submission_type, extraction_kit=extraction_kit) for item in
|
equipment = [item.to_pydantic(submission_type=submission_type, extraction_kit=extraction_kit) for item in
|
||||||
self.instances]
|
self.instances]
|
||||||
pyd_dict = self.to_dict()
|
pyd_dict = self.to_dict()
|
||||||
# logger.debug("Creating list of Processes in this role")
|
|
||||||
pyd_dict['processes'] = self.get_processes(submission_type=submission_type, extraction_kit=extraction_kit)
|
pyd_dict['processes'] = self.get_processes(submission_type=submission_type, extraction_kit=extraction_kit)
|
||||||
return PydEquipmentRole(equipment=equipment, **pyd_dict)
|
return PydEquipmentRole(equipment=equipment, **pyd_dict)
|
||||||
|
|
||||||
@@ -1595,14 +1517,12 @@ class EquipmentRole(BaseClass):
|
|||||||
query = cls.__database_session__.query(cls)
|
query = cls.__database_session__.query(cls)
|
||||||
match id:
|
match id:
|
||||||
case int():
|
case int():
|
||||||
# logger.debug(f"Lookup EquipmentRole by id {id}")
|
|
||||||
query = query.filter(cls.id == id)
|
query = query.filter(cls.id == id)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup EquipmentRole by name str {name}")
|
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
@@ -1622,7 +1542,6 @@ class EquipmentRole(BaseClass):
|
|||||||
List[Process]: List of processes
|
List[Process]: List of processes
|
||||||
"""
|
"""
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
# logger.debug(f"Checking if str {submission_type} exists")
|
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
if isinstance(extraction_kit, str):
|
if isinstance(extraction_kit, str):
|
||||||
extraction_kit = KitType.query(name=extraction_kit)
|
extraction_kit = KitType.query(name=extraction_kit)
|
||||||
@@ -1808,7 +1727,6 @@ class Process(BaseClass):
|
|||||||
query = cls.__database_session__.query(cls)
|
query = cls.__database_session__.query(cls)
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup Process with name str {name}")
|
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
@@ -1892,13 +1810,11 @@ class Tips(BaseClass, LogMixin):
|
|||||||
query = cls.__database_session__.query(cls)
|
query = cls.__database_session__.query(cls)
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup Equipment by name str {name}")
|
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match lot:
|
match lot:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Lookup Equipment by nickname str {nickname}")
|
|
||||||
query = query.filter(cls.lot == lot)
|
query = query.filter(cls.lot == lot)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
|
|||||||
@@ -65,7 +65,6 @@ class Organization(BaseClass):
|
|||||||
pass
|
pass
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up organization with name starting with: {name}")
|
|
||||||
query = query.filter(cls.name.startswith(name))
|
query = query.filter(cls.name.startswith(name))
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
@@ -159,21 +158,18 @@ class Contact(BaseClass):
|
|||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up contact with name: {name}")
|
|
||||||
query = query.filter(cls.name == name.title())
|
query = query.filter(cls.name == name.title())
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match email:
|
match email:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up contact with email: {name}")
|
|
||||||
query = query.filter(cls.email == email)
|
query = query.filter(cls.email == email)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
match phone:
|
match phone:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up contact with phone: {name}")
|
|
||||||
query = query.filter(cls.phone == phone)
|
query = query.filter(cls.phone == phone)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
'''
|
"""
|
||||||
contains parser objects for pulling values from client generated submission sheets.
|
contains parser objects for pulling values from client generated submission sheets.
|
||||||
'''
|
"""
|
||||||
import logging
|
import logging
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from getpass import getuser
|
from getpass import getuser
|
||||||
@@ -53,7 +53,6 @@ class SheetParser(object):
|
|||||||
self.parse_samples()
|
self.parse_samples()
|
||||||
self.parse_equipment()
|
self.parse_equipment()
|
||||||
self.parse_tips()
|
self.parse_tips()
|
||||||
# logger.debug(f"Parser.sub after info scrape: {pformat(self.sub)}")
|
|
||||||
|
|
||||||
def parse_info(self):
|
def parse_info(self):
|
||||||
"""
|
"""
|
||||||
@@ -71,7 +70,6 @@ class SheetParser(object):
|
|||||||
logger.info(
|
logger.info(
|
||||||
f"Checking for updated submission type: {self.submission_type.name} against new: {info['submission_type']['value']}")
|
f"Checking for updated submission type: {self.submission_type.name} against new: {info['submission_type']['value']}")
|
||||||
if self.submission_type.name != info['submission_type']['value']:
|
if self.submission_type.name != info['submission_type']['value']:
|
||||||
# logger.debug(f"info submission type: {info}")
|
|
||||||
if check:
|
if check:
|
||||||
self.submission_type = SubmissionType.query(name=info['submission_type']['value'])
|
self.submission_type = SubmissionType.query(name=info['submission_type']['value'])
|
||||||
logger.info(f"Updated self.submission_type to {self.submission_type}. Rerunning parse.")
|
logger.info(f"Updated self.submission_type to {self.submission_type}. Rerunning parse.")
|
||||||
@@ -90,11 +88,9 @@ class SheetParser(object):
|
|||||||
"""
|
"""
|
||||||
if extraction_kit is None:
|
if extraction_kit is None:
|
||||||
extraction_kit = self.sub['extraction_kit']
|
extraction_kit = self.sub['extraction_kit']
|
||||||
# logger.debug(f"Parsing reagents for {extraction_kit}")
|
|
||||||
parser = ReagentParser(xl=self.xl, submission_type=self.submission_type,
|
parser = ReagentParser(xl=self.xl, submission_type=self.submission_type,
|
||||||
extraction_kit=extraction_kit)
|
extraction_kit=extraction_kit)
|
||||||
self.sub['reagents'] = parser.parse_reagents()
|
self.sub['reagents'] = parser.parse_reagents()
|
||||||
# logger.debug(f"Reagents out of parser: {pformat(self.sub['reagents'])}")
|
|
||||||
|
|
||||||
def parse_samples(self):
|
def parse_samples(self):
|
||||||
"""
|
"""
|
||||||
@@ -155,7 +151,6 @@ class InfoParser(object):
|
|||||||
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
|
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
|
||||||
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
|
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
|
||||||
"""
|
"""
|
||||||
logger.info(f"\n\nHello from InfoParser!\n\n")
|
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
if sub_object is None:
|
if sub_object is None:
|
||||||
@@ -164,7 +159,6 @@ class InfoParser(object):
|
|||||||
self.sub_object = sub_object
|
self.sub_object = sub_object
|
||||||
self.map = self.fetch_submission_info_map()
|
self.map = self.fetch_submission_info_map()
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
# logger.debug(f"Info map for InfoParser: {pformat(self.map)}")
|
|
||||||
|
|
||||||
def fetch_submission_info_map(self) -> dict:
|
def fetch_submission_info_map(self) -> dict:
|
||||||
"""
|
"""
|
||||||
@@ -174,7 +168,6 @@ class InfoParser(object):
|
|||||||
dict: Location map of all info for this submission type
|
dict: Location map of all info for this submission type
|
||||||
"""
|
"""
|
||||||
self.submission_type = dict(value=self.submission_type_obj.name, missing=True)
|
self.submission_type = dict(value=self.submission_type_obj.name, missing=True)
|
||||||
# logger.debug(f"Looking up submission type: {self.submission_type['value']}")
|
|
||||||
info_map = self.sub_object.construct_info_map(submission_type=self.submission_type_obj, mode="read")
|
info_map = self.sub_object.construct_info_map(submission_type=self.submission_type_obj, mode="read")
|
||||||
# NOTE: Get the parse_info method from the submission type specified
|
# NOTE: Get the parse_info method from the submission type specified
|
||||||
return info_map
|
return info_map
|
||||||
@@ -188,7 +181,6 @@ class InfoParser(object):
|
|||||||
"""
|
"""
|
||||||
dicto = {}
|
dicto = {}
|
||||||
# NOTE: This loop parses generic info
|
# NOTE: This loop parses generic info
|
||||||
# logger.debug(f"Map: {self.map}")
|
|
||||||
for sheet in self.xl.sheetnames:
|
for sheet in self.xl.sheetnames:
|
||||||
ws = self.xl[sheet]
|
ws = self.xl[sheet]
|
||||||
relevant = []
|
relevant = []
|
||||||
@@ -197,11 +189,8 @@ class InfoParser(object):
|
|||||||
if k == "custom":
|
if k == "custom":
|
||||||
continue
|
continue
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
logger.debug(f"Found string for {k}, setting value to {v}")
|
|
||||||
dicto[k] = dict(value=v, missing=False)
|
dicto[k] = dict(value=v, missing=False)
|
||||||
continue
|
continue
|
||||||
# logger.debug(f"Looking for {k} in self.map")
|
|
||||||
# logger.debug(f"Locations: {v}")
|
|
||||||
for location in v:
|
for location in v:
|
||||||
try:
|
try:
|
||||||
check = location['sheet'] == sheet
|
check = location['sheet'] == sheet
|
||||||
@@ -213,21 +202,18 @@ class InfoParser(object):
|
|||||||
new = location
|
new = location
|
||||||
new['name'] = k
|
new['name'] = k
|
||||||
relevant.append(new)
|
relevant.append(new)
|
||||||
# logger.debug(f"relevant map for {sheet}: {pformat(relevant)}")
|
|
||||||
# NOTE: make sure relevant is not an empty list.
|
# NOTE: make sure relevant is not an empty list.
|
||||||
if not relevant:
|
if not relevant:
|
||||||
continue
|
continue
|
||||||
for item in relevant:
|
for item in relevant:
|
||||||
# NOTE: Get cell contents at this location
|
# NOTE: Get cell contents at this location
|
||||||
value = ws.cell(row=item['row'], column=item['column']).value
|
value = ws.cell(row=item['row'], column=item['column']).value
|
||||||
# logger.debug(f"Value for {item['name']} = {value}")
|
|
||||||
match item['name']:
|
match item['name']:
|
||||||
case "submission_type":
|
case "submission_type":
|
||||||
value, missing = is_missing(value)
|
value, missing = is_missing(value)
|
||||||
value = value.title()
|
value = value.title()
|
||||||
case "submitted_date":
|
case "submitted_date":
|
||||||
value, missing = is_missing(value)
|
value, missing = is_missing(value)
|
||||||
logger.debug(f"Parsed submitted date: {value}")
|
|
||||||
# NOTE: is field a JSON? Includes: Extraction info, PCR info, comment, custom
|
# NOTE: is field a JSON? Includes: Extraction info, PCR info, comment, custom
|
||||||
case thing if thing in self.sub_object.jsons():
|
case thing if thing in self.sub_object.jsons():
|
||||||
value, missing = is_missing(value)
|
value, missing = is_missing(value)
|
||||||
@@ -240,7 +226,6 @@ class InfoParser(object):
|
|||||||
logger.error(f"New value for {item['name']}")
|
logger.error(f"New value for {item['name']}")
|
||||||
case _:
|
case _:
|
||||||
value, missing = is_missing(value)
|
value, missing = is_missing(value)
|
||||||
# logger.debug(f"Setting {item} on {sheet} to {value}")
|
|
||||||
if item['name'] not in dicto.keys():
|
if item['name'] not in dicto.keys():
|
||||||
try:
|
try:
|
||||||
dicto[item['name']] = dict(value=value, missing=missing)
|
dicto[item['name']] = dict(value=value, missing=missing)
|
||||||
@@ -264,7 +249,6 @@ class ReagentParser(object):
|
|||||||
extraction_kit (str): Extraction kit used.
|
extraction_kit (str): Extraction kit used.
|
||||||
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
|
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
|
||||||
"""
|
"""
|
||||||
logger.info("\n\nHello from ReagentParser!\n\n")
|
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
self.submission_type_obj = submission_type
|
self.submission_type_obj = submission_type
|
||||||
@@ -272,9 +256,7 @@ class ReagentParser(object):
|
|||||||
if isinstance(extraction_kit, dict):
|
if isinstance(extraction_kit, dict):
|
||||||
extraction_kit = extraction_kit['value']
|
extraction_kit = extraction_kit['value']
|
||||||
self.kit_object = KitType.query(name=extraction_kit)
|
self.kit_object = KitType.query(name=extraction_kit)
|
||||||
logger.debug(f"Got extraction kit object: {self.kit_object}")
|
|
||||||
self.map = self.fetch_kit_info_map(submission_type=submission_type)
|
self.map = self.fetch_kit_info_map(submission_type=submission_type)
|
||||||
logger.debug(f"Reagent Parser map: {self.map}")
|
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
|
|
||||||
@report_result
|
@report_result
|
||||||
@@ -298,14 +280,11 @@ class ReagentParser(object):
|
|||||||
del reagent_map['info']
|
del reagent_map['info']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
# logger.debug(f"Reagent map: {pformat(reagent_map)}")
|
|
||||||
# NOTE: If reagent map is empty, maybe the wrong kit was given, check if there's only one kit for that submission type and use it if so.
|
# NOTE: If reagent map is empty, maybe the wrong kit was given, check if there's only one kit for that submission type and use it if so.
|
||||||
if not reagent_map:
|
if not reagent_map:
|
||||||
temp_kit_object = self.submission_type_obj.get_default_kit()
|
temp_kit_object = self.submission_type_obj.get_default_kit()
|
||||||
# logger.debug(f"Temp kit: {temp_kit_object}")
|
|
||||||
if temp_kit_object:
|
if temp_kit_object:
|
||||||
self.kit_object = temp_kit_object
|
self.kit_object = temp_kit_object
|
||||||
# reagent_map = {k: v for k, v in self.kit_object.construct_xl_map_for_use(submission_type)}
|
|
||||||
logger.warning(f"Attempting to salvage with default kit {self.kit_object} and submission_type: {self.submission_type_obj}")
|
logger.warning(f"Attempting to salvage with default kit {self.kit_object} and submission_type: {self.submission_type_obj}")
|
||||||
return self.fetch_kit_info_map(submission_type=self.submission_type_obj)
|
return self.fetch_kit_info_map(submission_type=self.submission_type_obj)
|
||||||
else:
|
else:
|
||||||
@@ -331,18 +310,15 @@ class ReagentParser(object):
|
|||||||
for sheet in self.xl.sheetnames:
|
for sheet in self.xl.sheetnames:
|
||||||
ws = self.xl[sheet]
|
ws = self.xl[sheet]
|
||||||
relevant = {k.strip(): v for k, v in self.map.items() if sheet in self.map[k]['sheet']}
|
relevant = {k.strip(): v for k, v in self.map.items() if sheet in self.map[k]['sheet']}
|
||||||
# logger.debug(f"relevant map for {sheet}: {pformat(relevant)}")
|
|
||||||
if relevant == {}:
|
if relevant == {}:
|
||||||
continue
|
continue
|
||||||
for item in relevant:
|
for item in relevant:
|
||||||
# logger.debug(f"Attempting to scrape: {item}")
|
|
||||||
try:
|
try:
|
||||||
reagent = relevant[item]
|
reagent = relevant[item]
|
||||||
name = ws.cell(row=reagent['name']['row'], column=reagent['name']['column']).value
|
name = ws.cell(row=reagent['name']['row'], column=reagent['name']['column']).value
|
||||||
lot = ws.cell(row=reagent['lot']['row'], column=reagent['lot']['column']).value
|
lot = ws.cell(row=reagent['lot']['row'], column=reagent['lot']['column']).value
|
||||||
expiry = ws.cell(row=reagent['expiry']['row'], column=reagent['expiry']['column']).value
|
expiry = ws.cell(row=reagent['expiry']['row'], column=reagent['expiry']['column']).value
|
||||||
if 'comment' in relevant[item].keys():
|
if 'comment' in relevant[item].keys():
|
||||||
# logger.debug(f"looking for {relevant[item]} comment.")
|
|
||||||
comment = ws.cell(row=reagent['comment']['row'], column=reagent['comment']['column']).value
|
comment = ws.cell(row=reagent['comment']['row'], column=reagent['comment']['column']).value
|
||||||
else:
|
else:
|
||||||
comment = ""
|
comment = ""
|
||||||
@@ -353,10 +329,7 @@ class ReagentParser(object):
|
|||||||
missing = False
|
missing = False
|
||||||
else:
|
else:
|
||||||
missing = True
|
missing = True
|
||||||
# logger.debug(f"Got lot for {item}-{name}: {lot} as {type(lot)}")
|
|
||||||
lot = str(lot)
|
lot = str(lot)
|
||||||
# logger.debug(
|
|
||||||
# f"Going into pydantic: name: {name}, lot: {lot}, expiry: {expiry}, type: {item.strip()}, comment: {comment}")
|
|
||||||
try:
|
try:
|
||||||
check = name.lower() != "not applicable"
|
check = name.lower() != "not applicable"
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -381,12 +354,10 @@ class SampleParser(object):
|
|||||||
sample_map (dict | None, optional): Locations in database where samples are found. Defaults to None.
|
sample_map (dict | None, optional): Locations in database where samples are found. Defaults to None.
|
||||||
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
|
sub_object (BasicSubmission | None, optional): Submission object holding methods. Defaults to None.
|
||||||
"""
|
"""
|
||||||
logger.info("\n\nHello from SampleParser!\n\n")
|
|
||||||
self.samples = []
|
self.samples = []
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
# logger.debug(f"Sample parser is using submission type: {submission_type}")
|
|
||||||
self.submission_type = submission_type.name
|
self.submission_type = submission_type.name
|
||||||
self.submission_type_obj = submission_type
|
self.submission_type_obj = submission_type
|
||||||
if sub_object is None:
|
if sub_object is None:
|
||||||
@@ -395,7 +366,6 @@ class SampleParser(object):
|
|||||||
sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
self.sub_object = sub_object
|
self.sub_object = sub_object
|
||||||
self.sample_info_map = self.fetch_sample_info_map(submission_type=submission_type, sample_map=sample_map)
|
self.sample_info_map = self.fetch_sample_info_map(submission_type=submission_type, sample_map=sample_map)
|
||||||
# logger.debug(f"sample_info_map: {self.sample_info_map}")
|
|
||||||
self.plate_map_samples = self.parse_plate_map()
|
self.plate_map_samples = self.parse_plate_map()
|
||||||
self.lookup_samples = self.parse_lookup_table()
|
self.lookup_samples = self.parse_lookup_table()
|
||||||
|
|
||||||
@@ -409,11 +379,8 @@ class SampleParser(object):
|
|||||||
Returns:
|
Returns:
|
||||||
dict: Info locations.
|
dict: Info locations.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Looking up submission type: {submission_type}")
|
|
||||||
self.sample_type = self.sub_object.get_default_info("sample_type", submission_type=submission_type)
|
self.sample_type = self.sub_object.get_default_info("sample_type", submission_type=submission_type)
|
||||||
self.samp_object = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type)
|
self.samp_object = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type)
|
||||||
# logger.debug(f"Got sample class: {self.samp_object.__name__}")
|
|
||||||
# logger.debug(f"info_map: {pformat(se)}")
|
|
||||||
if sample_map is None:
|
if sample_map is None:
|
||||||
sample_info_map = self.sub_object.construct_sample_map(submission_type=self.submission_type_obj)
|
sample_info_map = self.sub_object.construct_sample_map(submission_type=self.submission_type_obj)
|
||||||
else:
|
else:
|
||||||
@@ -432,9 +399,7 @@ class SampleParser(object):
|
|||||||
ws = self.xl[smap['sheet']]
|
ws = self.xl[smap['sheet']]
|
||||||
plate_map_samples = []
|
plate_map_samples = []
|
||||||
for ii, row in enumerate(range(smap['start_row'], smap['end_row'] + 1), start=1):
|
for ii, row in enumerate(range(smap['start_row'], smap['end_row'] + 1), start=1):
|
||||||
# logger.debug(f"Parsing row: {row}")
|
|
||||||
for jj, column in enumerate(range(smap['start_column'], smap['end_column'] + 1), start=1):
|
for jj, column in enumerate(range(smap['start_column'], smap['end_column'] + 1), start=1):
|
||||||
# logger.debug(f"Parsing column: {column}")
|
|
||||||
id = str(ws.cell(row=row, column=column).value)
|
id = str(ws.cell(row=row, column=column).value)
|
||||||
if check_not_nan(id):
|
if check_not_nan(id):
|
||||||
if id not in invalids:
|
if id not in invalids:
|
||||||
@@ -442,10 +407,8 @@ class SampleParser(object):
|
|||||||
sample_dict['sample_type'] = self.sample_type
|
sample_dict['sample_type'] = self.sample_type
|
||||||
plate_map_samples.append(sample_dict)
|
plate_map_samples.append(sample_dict)
|
||||||
else:
|
else:
|
||||||
# logger.error(f"Sample cell ({row}, {column}) has invalid value: {id}.")
|
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
# logger.error(f"Sample cell ({row}, {column}) has no info: {id}.")
|
|
||||||
pass
|
pass
|
||||||
return plate_map_samples
|
return plate_map_samples
|
||||||
|
|
||||||
@@ -507,7 +470,6 @@ class SampleParser(object):
|
|||||||
except (KeyError, IndexError):
|
except (KeyError, IndexError):
|
||||||
check = False
|
check = False
|
||||||
if check:
|
if check:
|
||||||
# logger.debug(f"Direct match found for {psample['id']}")
|
|
||||||
new = lookup_samples[ii] | psample
|
new = lookup_samples[ii] | psample
|
||||||
lookup_samples[ii] = {}
|
lookup_samples[ii] = {}
|
||||||
else:
|
else:
|
||||||
@@ -516,7 +478,6 @@ class SampleParser(object):
|
|||||||
if merge_on_id in sample.keys()]
|
if merge_on_id in sample.keys()]
|
||||||
jj, new = next(((jj, lsample | psample) for jj, lsample in searchables
|
jj, new = next(((jj, lsample | psample) for jj, lsample in searchables
|
||||||
if lsample[merge_on_id] == psample['id']), (-1, psample))
|
if lsample[merge_on_id] == psample['id']), (-1, psample))
|
||||||
# logger.debug(f"Assigning from index {jj} - {new}")
|
|
||||||
if jj >= 0:
|
if jj >= 0:
|
||||||
lookup_samples[jj] = {}
|
lookup_samples[jj] = {}
|
||||||
if not check_key_or_attr(key='submitter_id', interest=new, check_none=True):
|
if not check_key_or_attr(key='submitter_id', interest=new, check_none=True):
|
||||||
@@ -540,7 +501,6 @@ class EquipmentParser(object):
|
|||||||
xl (Workbook): Openpyxl workbook from submitted excel file.
|
xl (Workbook): Openpyxl workbook from submitted excel file.
|
||||||
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
|
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
|
||||||
"""
|
"""
|
||||||
logger.info("\n\nHello from EquipmentParser!\n\n")
|
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
self.submission_type = submission_type
|
self.submission_type = submission_type
|
||||||
@@ -567,7 +527,6 @@ class EquipmentParser(object):
|
|||||||
str: asset number
|
str: asset number
|
||||||
"""
|
"""
|
||||||
regex = Equipment.get_regex()
|
regex = Equipment.get_regex()
|
||||||
# logger.debug(f"Using equipment regex: {regex} on {input}")
|
|
||||||
try:
|
try:
|
||||||
return regex.search(input).group().strip("-")
|
return regex.search(input).group().strip("-")
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
@@ -581,8 +540,6 @@ class EquipmentParser(object):
|
|||||||
Returns:
|
Returns:
|
||||||
List[dict]: list of equipment
|
List[dict]: list of equipment
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
|
|
||||||
# logger.debug(f"Sheets: {sheets}")
|
|
||||||
for sheet in self.xl.sheetnames:
|
for sheet in self.xl.sheetnames:
|
||||||
ws = self.xl[sheet]
|
ws = self.xl[sheet]
|
||||||
try:
|
try:
|
||||||
@@ -590,17 +547,14 @@ class EquipmentParser(object):
|
|||||||
except (TypeError, KeyError) as e:
|
except (TypeError, KeyError) as e:
|
||||||
logger.error(f"Error creating relevant equipment list: {e}")
|
logger.error(f"Error creating relevant equipment list: {e}")
|
||||||
continue
|
continue
|
||||||
# logger.debug(f"Relevant equipment: {pformat(relevant)}")
|
|
||||||
previous_asset = ""
|
previous_asset = ""
|
||||||
for k, v in relevant.items():
|
for k, v in relevant.items():
|
||||||
# logger.debug(f"Checking: {v}")
|
|
||||||
asset = ws.cell(v['name']['row'], v['name']['column']).value
|
asset = ws.cell(v['name']['row'], v['name']['column']).value
|
||||||
if not check_not_nan(asset):
|
if not check_not_nan(asset):
|
||||||
asset = previous_asset
|
asset = previous_asset
|
||||||
else:
|
else:
|
||||||
previous_asset = asset
|
previous_asset = asset
|
||||||
asset = self.get_asset_number(input=asset)
|
asset = self.get_asset_number(input=asset)
|
||||||
# logger.debug(f"asset: {asset}")
|
|
||||||
eq = Equipment.query(asset_number=asset)
|
eq = Equipment.query(asset_number=asset)
|
||||||
if eq is None:
|
if eq is None:
|
||||||
eq = Equipment.query(name=asset)
|
eq = Equipment.query(name=asset)
|
||||||
@@ -623,7 +577,6 @@ class TipParser(object):
|
|||||||
xl (Workbook): Openpyxl workbook from submitted excel file.
|
xl (Workbook): Openpyxl workbook from submitted excel file.
|
||||||
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
|
submission_type (str | SubmissionType): Type of submission expected (Wastewater, Bacterial Culture, etc.)
|
||||||
"""
|
"""
|
||||||
logger.info("\n\nHello from TipParser!\n\n")
|
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
self.submission_type = submission_type
|
self.submission_type = submission_type
|
||||||
@@ -646,8 +599,6 @@ class TipParser(object):
|
|||||||
Returns:
|
Returns:
|
||||||
List[dict]: list of equipment
|
List[dict]: list of equipment
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
|
|
||||||
# logger.debug(f"Sheets: {sheets}")
|
|
||||||
for sheet in self.xl.sheetnames:
|
for sheet in self.xl.sheetnames:
|
||||||
ws = self.xl[sheet]
|
ws = self.xl[sheet]
|
||||||
try:
|
try:
|
||||||
@@ -655,7 +606,6 @@ class TipParser(object):
|
|||||||
except (TypeError, KeyError) as e:
|
except (TypeError, KeyError) as e:
|
||||||
logger.error(f"Error creating relevant equipment list: {e}")
|
logger.error(f"Error creating relevant equipment list: {e}")
|
||||||
continue
|
continue
|
||||||
# logger.debug(f"Relevant equipment: {pformat(relevant)}")
|
|
||||||
previous_asset = ""
|
previous_asset = ""
|
||||||
for k, v in relevant.items():
|
for k, v in relevant.items():
|
||||||
asset = ws.cell(v['name']['row'], v['name']['column']).value
|
asset = ws.cell(v['name']['row'], v['name']['column']).value
|
||||||
@@ -667,7 +617,6 @@ class TipParser(object):
|
|||||||
asset = previous_asset
|
asset = previous_asset
|
||||||
else:
|
else:
|
||||||
previous_asset = asset
|
previous_asset = asset
|
||||||
# logger.debug(f"asset: {asset}")
|
|
||||||
eq = Tips.query(lot=lot, name=asset, limit=1)
|
eq = Tips.query(lot=lot, name=asset, limit=1)
|
||||||
try:
|
try:
|
||||||
yield dict(name=eq.name, role=k, lot=lot)
|
yield dict(name=eq.name, role=k, lot=lot)
|
||||||
@@ -684,7 +633,6 @@ class PCRParser(object):
|
|||||||
filepath (Path | None, optional): file to parse. Defaults to None.
|
filepath (Path | None, optional): file to parse. Defaults to None.
|
||||||
submission (BasicSubmission | None, optional): Submission parsed data to be added to.
|
submission (BasicSubmission | None, optional): Submission parsed data to be added to.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f'Parsing {filepath.__str__()}')
|
|
||||||
if filepath is None:
|
if filepath is None:
|
||||||
logger.error('No filepath given.')
|
logger.error('No filepath given.')
|
||||||
self.xl = None
|
self.xl = None
|
||||||
@@ -727,5 +675,4 @@ class PCRParser(object):
|
|||||||
value = row[1].value or ""
|
value = row[1].value or ""
|
||||||
pcr[key] = value
|
pcr[key] = value
|
||||||
pcr['imported_by'] = getuser()
|
pcr['imported_by'] = getuser()
|
||||||
# logger.debug(f"PCR: {pformat(pcr)}")
|
|
||||||
return pcr
|
return pcr
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ class ReportArchetype(object):
|
|||||||
filename = filename.absolute()
|
filename = filename.absolute()
|
||||||
self.writer = ExcelWriter(filename.with_suffix(".xlsx"), engine='openpyxl')
|
self.writer = ExcelWriter(filename.with_suffix(".xlsx"), engine='openpyxl')
|
||||||
self.df.to_excel(self.writer, sheet_name=self.sheet_name)
|
self.df.to_excel(self.writer, sheet_name=self.sheet_name)
|
||||||
# logger.debug(f"Writing report to: {filename}")
|
|
||||||
self.writer.close()
|
self.writer.close()
|
||||||
|
|
||||||
|
|
||||||
@@ -43,7 +42,6 @@ class ReportMaker(object):
|
|||||||
self.end_date = end_date
|
self.end_date = end_date
|
||||||
# NOTE: Set page size to zero to override limiting query size.
|
# NOTE: Set page size to zero to override limiting query size.
|
||||||
self.subs = BasicSubmission.query(start_date=start_date, end_date=end_date, page_size=0)
|
self.subs = BasicSubmission.query(start_date=start_date, end_date=end_date, page_size=0)
|
||||||
# logger.debug(f"Number of subs returned: {len(self.subs)}")
|
|
||||||
if organizations is not None:
|
if organizations is not None:
|
||||||
self.subs = [sub for sub in self.subs if sub.submitting_lab.name in organizations]
|
self.subs = [sub for sub in self.subs if sub.submitting_lab.name in organizations]
|
||||||
self.detailed_df, self.summary_df = self.make_report_xlsx()
|
self.detailed_df, self.summary_df = self.make_report_xlsx()
|
||||||
@@ -65,10 +63,8 @@ class ReportMaker(object):
|
|||||||
df2 = df.groupby(["submitting_lab", "extraction_kit"]).agg(
|
df2 = df.groupby(["submitting_lab", "extraction_kit"]).agg(
|
||||||
{'extraction_kit': 'count', 'cost': 'sum', 'sample_count': 'sum'})
|
{'extraction_kit': 'count', 'cost': 'sum', 'sample_count': 'sum'})
|
||||||
df2 = df2.rename(columns={"extraction_kit": 'run_count'})
|
df2 = df2.rename(columns={"extraction_kit": 'run_count'})
|
||||||
# logger.debug(f"Output daftaframe for xlsx: {df2.columns}")
|
|
||||||
df = df.drop('id', axis=1)
|
df = df.drop('id', axis=1)
|
||||||
df = df.sort_values(['submitting_lab', "submitted_date"])
|
df = df.sort_values(['submitting_lab', "submitted_date"])
|
||||||
# logger.debug(f"Details dataframe:\n{df2}")
|
|
||||||
return df, df2
|
return df, df2
|
||||||
|
|
||||||
def make_report_html(self, df: DataFrame) -> str:
|
def make_report_html(self, df: DataFrame) -> str:
|
||||||
@@ -86,12 +82,8 @@ class ReportMaker(object):
|
|||||||
"""
|
"""
|
||||||
old_lab = ""
|
old_lab = ""
|
||||||
output = []
|
output = []
|
||||||
# logger.debug(f"Report DataFrame: {df}")
|
|
||||||
for row in df.iterrows():
|
for row in df.iterrows():
|
||||||
# logger.debug(f"Row {ii}: {row}")
|
|
||||||
lab = row[0][0]
|
lab = row[0][0]
|
||||||
# logger.debug(f"Old lab: {old_lab}, Current lab: {lab}")
|
|
||||||
# logger.debug(f"Name: {row[0][1]}")
|
|
||||||
data = [item for item in row[1]]
|
data = [item for item in row[1]]
|
||||||
kit = dict(name=row[0][1], cost=data[1], run_count=int(data[0]), sample_count=int(data[2]))
|
kit = dict(name=row[0][1], cost=data[1], run_count=int(data[0]), sample_count=int(data[2]))
|
||||||
# NOTE: if this is the same lab as before add together
|
# NOTE: if this is the same lab as before add together
|
||||||
@@ -106,7 +98,6 @@ class ReportMaker(object):
|
|||||||
total_runs=kit['run_count'])
|
total_runs=kit['run_count'])
|
||||||
output.append(adder)
|
output.append(adder)
|
||||||
old_lab = lab
|
old_lab = lab
|
||||||
# logger.debug(output)
|
|
||||||
dicto = {'start_date': self.start_date, 'end_date': self.end_date, 'labs': output}
|
dicto = {'start_date': self.start_date, 'end_date': self.end_date, 'labs': output}
|
||||||
temp = env.get_template('summary_report.html')
|
temp = env.get_template('summary_report.html')
|
||||||
html = temp.render(input=dicto)
|
html = temp.render(input=dicto)
|
||||||
@@ -127,14 +118,12 @@ class ReportMaker(object):
|
|||||||
self.summary_df.to_excel(self.writer, sheet_name="Report")
|
self.summary_df.to_excel(self.writer, sheet_name="Report")
|
||||||
self.detailed_df.to_excel(self.writer, sheet_name="Details", index=False)
|
self.detailed_df.to_excel(self.writer, sheet_name="Details", index=False)
|
||||||
self.fix_up_xl()
|
self.fix_up_xl()
|
||||||
# logger.debug(f"Writing report to: {filename}")
|
|
||||||
self.writer.close()
|
self.writer.close()
|
||||||
|
|
||||||
def fix_up_xl(self):
|
def fix_up_xl(self):
|
||||||
"""
|
"""
|
||||||
Handles formatting of xl file, mediocrely.
|
Handles formatting of xl file, mediocrely.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Updating worksheet")
|
|
||||||
worksheet: Worksheet = self.writer.sheets['Report']
|
worksheet: Worksheet = self.writer.sheets['Report']
|
||||||
for idx, col in enumerate(self.summary_df, start=1): # NOTE: loop through all columns
|
for idx, col in enumerate(self.summary_df, start=1): # NOTE: loop through all columns
|
||||||
series = self.summary_df[col]
|
series = self.summary_df[col]
|
||||||
@@ -149,7 +138,6 @@ class ReportMaker(object):
|
|||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
logger.error(f"Couldn't resize column {col} due to {e}")
|
logger.error(f"Couldn't resize column {col} due to {e}")
|
||||||
blank_row = get_first_blank_df_row(self.summary_df) + 1
|
blank_row = get_first_blank_df_row(self.summary_df) + 1
|
||||||
# logger.debug(f"Blank row index = {blank_row}")
|
|
||||||
for col in range(3, 6):
|
for col in range(3, 6):
|
||||||
col_letter = row_map[col]
|
col_letter = row_map[col]
|
||||||
worksheet.cell(row=blank_row, column=col, value=f"=SUM({col_letter}2:{col_letter}{str(blank_row - 1)})")
|
worksheet.cell(row=blank_row, column=col, value=f"=SUM({col_letter}2:{col_letter}{str(blank_row - 1)})")
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ contains writer objects for pushing values to submission sheet templates.
|
|||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from datetime import date
|
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import List, Generator, Tuple
|
from typing import List, Generator, Tuple
|
||||||
@@ -111,7 +110,6 @@ class InfoWriter(object):
|
|||||||
info_dict (dict): Dictionary of information to write.
|
info_dict (dict): Dictionary of information to write.
|
||||||
sub_object (BasicSubmission | None, optional): Submission object containing methods. Defaults to None.
|
sub_object (BasicSubmission | None, optional): Submission object containing methods. Defaults to None.
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Info_dict coming into InfoWriter: {pformat(info_dict)}")
|
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
if sub_object is None:
|
if sub_object is None:
|
||||||
@@ -121,7 +119,6 @@ class InfoWriter(object):
|
|||||||
self.xl = xl
|
self.xl = xl
|
||||||
self.info_map = submission_type.construct_info_map(mode='write')
|
self.info_map = submission_type.construct_info_map(mode='write')
|
||||||
self.info = self.reconcile_map(info_dict, self.info_map)
|
self.info = self.reconcile_map(info_dict, self.info_map)
|
||||||
# logger.debug(pformat(self.info))
|
|
||||||
|
|
||||||
def reconcile_map(self, info_dict: dict, info_map: dict) -> Generator[(Tuple[str, dict]), None, None]:
|
def reconcile_map(self, info_dict: dict, info_map: dict) -> Generator[(Tuple[str, dict]), None, None]:
|
||||||
"""
|
"""
|
||||||
@@ -170,7 +167,6 @@ class InfoWriter(object):
|
|||||||
logger.error(f"No locations for {k}, skipping")
|
logger.error(f"No locations for {k}, skipping")
|
||||||
continue
|
continue
|
||||||
for loc in locations:
|
for loc in locations:
|
||||||
logger.debug(f"Writing {k} to {loc['sheet']}, row: {loc['row']}, column: {loc['column']}")
|
|
||||||
sheet = self.xl[loc['sheet']]
|
sheet = self.xl[loc['sheet']]
|
||||||
try:
|
try:
|
||||||
sheet.cell(row=loc['row'], column=loc['column'], value=v['value'])
|
sheet.cell(row=loc['row'], column=loc['column'], value=v['value'])
|
||||||
@@ -247,8 +243,6 @@ class ReagentWriter(object):
|
|||||||
for v in reagent.values():
|
for v in reagent.values():
|
||||||
if not isinstance(v, dict):
|
if not isinstance(v, dict):
|
||||||
continue
|
continue
|
||||||
# logger.debug(
|
|
||||||
# f"Writing {reagent['type']} {k} to {reagent['sheet']}, row: {v['row']}, column: {v['column']}")
|
|
||||||
sheet.cell(row=v['row'], column=v['column'], value=v['value'])
|
sheet.cell(row=v['row'], column=v['column'], value=v['value'])
|
||||||
return self.xl
|
return self.xl
|
||||||
|
|
||||||
@@ -288,7 +282,6 @@ class SampleWriter(object):
|
|||||||
multiples = ['row', 'column', 'assoc_id', 'submission_rank']
|
multiples = ['row', 'column', 'assoc_id', 'submission_rank']
|
||||||
for sample in sample_list:
|
for sample in sample_list:
|
||||||
sample = self.submission_type.get_submission_class().custom_sample_writer(sample)
|
sample = self.submission_type.get_submission_class().custom_sample_writer(sample)
|
||||||
logger.debug(f"Writing sample: {sample}")
|
|
||||||
for assoc in zip(sample['row'], sample['column'], sample['submission_rank']):
|
for assoc in zip(sample['row'], sample['column'], sample['submission_rank']):
|
||||||
new = dict(row=assoc[0], column=assoc[1], submission_rank=assoc[2])
|
new = dict(row=assoc[0], column=assoc[1], submission_rank=assoc[2])
|
||||||
for k, v in sample.items():
|
for k, v in sample.items():
|
||||||
@@ -369,9 +362,8 @@ class EquipmentWriter(object):
|
|||||||
mp_info = equipment_map[equipment['role']]
|
mp_info = equipment_map[equipment['role']]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.error(f"No {equipment['role']} in {pformat(equipment_map)}")
|
logger.error(f"No {equipment['role']} in {pformat(equipment_map)}")
|
||||||
# logger.debug(f"{equipment['role']} map: {mp_info}")
|
mp_info = None
|
||||||
placeholder = copy(equipment)
|
placeholder = copy(equipment)
|
||||||
# if mp_info == {}:
|
|
||||||
if not mp_info:
|
if not mp_info:
|
||||||
for jj, (k, v) in enumerate(equipment.items(), start=1):
|
for jj, (k, v) in enumerate(equipment.items(), start=1):
|
||||||
dicto = dict(value=v, row=ii, column=jj)
|
dicto = dict(value=v, row=ii, column=jj)
|
||||||
@@ -381,7 +373,6 @@ class EquipmentWriter(object):
|
|||||||
try:
|
try:
|
||||||
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
|
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
# logger.error(f"Keyerror: {e}")
|
|
||||||
continue
|
continue
|
||||||
placeholder[k] = dicto
|
placeholder[k] = dicto
|
||||||
if "asset_number" not in mp_info.keys():
|
if "asset_number" not in mp_info.keys():
|
||||||
@@ -400,17 +391,12 @@ class EquipmentWriter(object):
|
|||||||
Workbook: Workbook with equipment written
|
Workbook: Workbook with equipment written
|
||||||
"""
|
"""
|
||||||
for equipment in self.equipment:
|
for equipment in self.equipment:
|
||||||
try:
|
if not equipment['sheet'] in self.xl.sheetnames:
|
||||||
sheet = self.xl[equipment['sheet']]
|
|
||||||
except KeyError:
|
|
||||||
self.xl.create_sheet("Equipment")
|
self.xl.create_sheet("Equipment")
|
||||||
finally:
|
sheet = self.xl[equipment['sheet']]
|
||||||
sheet = self.xl[equipment['sheet']]
|
|
||||||
for k, v in equipment.items():
|
for k, v in equipment.items():
|
||||||
if not isinstance(v, dict):
|
if not isinstance(v, dict):
|
||||||
continue
|
continue
|
||||||
# logger.debug(
|
|
||||||
# f"Writing {k}: {v['value']} to {equipment['sheet']}, row: {v['row']}, column: {v['column']}")
|
|
||||||
if isinstance(v['value'], list):
|
if isinstance(v['value'], list):
|
||||||
v['value'] = v['value'][0]
|
v['value'] = v['value'][0]
|
||||||
try:
|
try:
|
||||||
@@ -455,7 +441,6 @@ class TipWriter(object):
|
|||||||
return
|
return
|
||||||
for ii, tips in enumerate(tips_list, start=1):
|
for ii, tips in enumerate(tips_list, start=1):
|
||||||
mp_info = tips_map[tips.role]
|
mp_info = tips_map[tips.role]
|
||||||
# logger.debug(f"{tips['role']} map: {mp_info}")
|
|
||||||
placeholder = {}
|
placeholder = {}
|
||||||
if mp_info == {}:
|
if mp_info == {}:
|
||||||
for jj, (k, v) in enumerate(tips.__dict__.items(), start=1):
|
for jj, (k, v) in enumerate(tips.__dict__.items(), start=1):
|
||||||
@@ -466,14 +451,12 @@ class TipWriter(object):
|
|||||||
try:
|
try:
|
||||||
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
|
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
# logger.error(f"Keyerror: {e}")
|
|
||||||
continue
|
continue
|
||||||
placeholder[k] = dicto
|
placeholder[k] = dicto
|
||||||
try:
|
try:
|
||||||
placeholder['sheet'] = mp_info['sheet']
|
placeholder['sheet'] = mp_info['sheet']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
placeholder['sheet'] = "Tips"
|
placeholder['sheet'] = "Tips"
|
||||||
# logger.debug(f"Final output of {tips['role']} : {placeholder}")
|
|
||||||
yield placeholder
|
yield placeholder
|
||||||
|
|
||||||
def write_tips(self) -> Workbook:
|
def write_tips(self) -> Workbook:
|
||||||
@@ -484,17 +467,12 @@ class TipWriter(object):
|
|||||||
Workbook: Workbook with tips written
|
Workbook: Workbook with tips written
|
||||||
"""
|
"""
|
||||||
for tips in self.tips:
|
for tips in self.tips:
|
||||||
try:
|
if not tips['sheet'] in self.xl.sheetnames:
|
||||||
sheet = self.xl[tips['sheet']]
|
|
||||||
except KeyError:
|
|
||||||
self.xl.create_sheet("Tips")
|
self.xl.create_sheet("Tips")
|
||||||
finally:
|
sheet = self.xl[tips['sheet']]
|
||||||
sheet = self.xl[tips['sheet']]
|
|
||||||
for k, v in tips.items():
|
for k, v in tips.items():
|
||||||
if not isinstance(v, dict):
|
if not isinstance(v, dict):
|
||||||
continue
|
continue
|
||||||
# logger.debug(
|
|
||||||
# f"Writing {k}: {v['value']} to {equipment['sheet']}, row: {v['row']}, column: {v['column']}")
|
|
||||||
if isinstance(v['value'], list):
|
if isinstance(v['value'], list):
|
||||||
v['value'] = v['value'][0]
|
v['value'] = v['value'][0]
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from .irida import import_irida
|
from .irida import import_irida
|
||||||
|
|
||||||
def hello(ctx):
|
def hello(ctx):
|
||||||
print("\n\nHello!\n\n")
|
print("\n\nHello! Welcome to Robotics Submission Tracker.\n\n")
|
||||||
|
|
||||||
def goodbye(ctx):
|
def goodbye(ctx):
|
||||||
print("\n\nGoodbye\n\n")
|
print("\n\nGoodbye. Thank you for using Robotics Submission Tracker.\n\n")
|
||||||
|
|||||||
@@ -19,11 +19,10 @@ def import_irida(ctx:Settings):
|
|||||||
existing_controls = [item.name for item in IridaControl.query()]
|
existing_controls = [item.name for item in IridaControl.query()]
|
||||||
prm_list = ", ".join([f"'{thing}'" for thing in existing_controls])
|
prm_list = ", ".join([f"'{thing}'" for thing in existing_controls])
|
||||||
ctrl_db_path = ctx.directory_path.joinpath("submissions_parser_output", "submissions.db")
|
ctrl_db_path = ctx.directory_path.joinpath("submissions_parser_output", "submissions.db")
|
||||||
# print(f"Incoming settings: {pformat(ctx)}")
|
|
||||||
try:
|
try:
|
||||||
conn = sqlite3.connect(ctrl_db_path)
|
conn = sqlite3.connect(ctrl_db_path)
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
print(f"Error, could not import from irida due to {e}")
|
logger.error(f"Error, could not import from irida due to {e}")
|
||||||
return
|
return
|
||||||
sql = f"SELECT name, submitted_date, submission_id, contains, matches, kraken, subtype, refseq_version, " \
|
sql = f"SELECT name, submitted_date, submission_id, contains, matches, kraken, subtype, refseq_version, " \
|
||||||
f"kraken2_version, kraken2_db_version, sample_id FROM _iridacontrol INNER JOIN _control on _control.id " \
|
f"kraken2_version, kraken2_db_version, sample_id FROM _iridacontrol INNER JOIN _control on _control.id " \
|
||||||
@@ -32,8 +31,6 @@ def import_irida(ctx:Settings):
|
|||||||
records = [dict(name=row[0], submitted_date=row[1], submission_id=row[2], contains=row[3], matches=row[4], kraken=row[5],
|
records = [dict(name=row[0], submitted_date=row[1], submission_id=row[2], contains=row[3], matches=row[4], kraken=row[5],
|
||||||
subtype=row[6], refseq_version=row[7], kraken2_version=row[8], kraken2_db_version=row[9],
|
subtype=row[6], refseq_version=row[7], kraken2_version=row[8], kraken2_db_version=row[9],
|
||||||
sample_id=row[10]) for row in cursor]
|
sample_id=row[10]) for row in cursor]
|
||||||
# incoming_controls = set(item['name'] for item in records)
|
|
||||||
# relevant = list(incoming_controls - existing_controls)
|
|
||||||
for record in records:
|
for record in records:
|
||||||
instance = IridaControl.query(name=record['name'])
|
instance = IridaControl.query(name=record['name'])
|
||||||
if instance:
|
if instance:
|
||||||
@@ -52,5 +49,4 @@ def import_irida(ctx:Settings):
|
|||||||
if sample:
|
if sample:
|
||||||
instance.sample = sample
|
instance.sample = sample
|
||||||
instance.submission = sample.submissions[0]
|
instance.submission = sample.submissions[0]
|
||||||
# pprint(instance.__dict__)
|
|
||||||
instance.save()
|
instance.save()
|
||||||
@@ -24,11 +24,9 @@ class RSLNamer(object):
|
|||||||
filename = Path(filename) if Path(filename).exists() else filename
|
filename = Path(filename) if Path(filename).exists() else filename
|
||||||
self.submission_type = sub_type
|
self.submission_type = sub_type
|
||||||
if not self.submission_type:
|
if not self.submission_type:
|
||||||
# logger.debug("Creating submission type because none exists")
|
|
||||||
self.submission_type = self.retrieve_submission_type(filename=filename)
|
self.submission_type = self.retrieve_submission_type(filename=filename)
|
||||||
logger.info(f"got submission type: {self.submission_type}")
|
logger.info(f"got submission type: {self.submission_type}")
|
||||||
if self.submission_type:
|
if self.submission_type:
|
||||||
# logger.debug("Retrieving BasicSubmission subclass")
|
|
||||||
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex(submission_type=sub_type))
|
self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex(submission_type=sub_type))
|
||||||
if not data:
|
if not data:
|
||||||
@@ -52,7 +50,6 @@ class RSLNamer(object):
|
|||||||
str: parsed submission type
|
str: parsed submission type
|
||||||
"""
|
"""
|
||||||
def st_from_path(filename:Path) -> str:
|
def st_from_path(filename:Path) -> str:
|
||||||
# logger.info(f"Using path method for {filename}.")
|
|
||||||
if filename.exists():
|
if filename.exists():
|
||||||
wb = load_workbook(filename)
|
wb = load_workbook(filename)
|
||||||
try:
|
try:
|
||||||
@@ -73,12 +70,9 @@ class RSLNamer(object):
|
|||||||
if filename.startswith("tmp"):
|
if filename.startswith("tmp"):
|
||||||
return "Bacterial Culture"
|
return "Bacterial Culture"
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
# logger.info(f"Using string method for {filename}.")
|
|
||||||
# logger.debug(f"Using regex: {regex}")
|
|
||||||
m = regex.search(filename)
|
m = regex.search(filename)
|
||||||
try:
|
try:
|
||||||
submission_type = m.lastgroup
|
submission_type = m.lastgroup
|
||||||
# logger.debug(f"Got submission type: {submission_type}")
|
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
submission_type = None
|
submission_type = None
|
||||||
logger.critical(f"No submission type found or submission type found!: {e}")
|
logger.critical(f"No submission type found or submission type found!: {e}")
|
||||||
@@ -98,7 +92,6 @@ class RSLNamer(object):
|
|||||||
if check:
|
if check:
|
||||||
if "pytest" in sys.modules:
|
if "pytest" in sys.modules:
|
||||||
raise ValueError("Submission Type came back as None.")
|
raise ValueError("Submission Type came back as None.")
|
||||||
# logger.debug("Final option, ask the user for submission type")
|
|
||||||
from frontend.widgets import ObjectSelector
|
from frontend.widgets import ObjectSelector
|
||||||
dlg = ObjectSelector(title="Couldn't parse submission type.",
|
dlg = ObjectSelector(title="Couldn't parse submission type.",
|
||||||
message="Please select submission type from list below.", obj_type=SubmissionType)
|
message="Please select submission type from list below.", obj_type=SubmissionType)
|
||||||
@@ -116,21 +109,17 @@ class RSLNamer(object):
|
|||||||
regex (str): string to construct pattern
|
regex (str): string to construct pattern
|
||||||
filename (str): string to be parsed
|
filename (str): string to be parsed
|
||||||
"""
|
"""
|
||||||
logger.info(f"Input string to be parsed: {filename}")
|
|
||||||
if regex is None:
|
if regex is None:
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
else:
|
else:
|
||||||
# logger.debug(f"Incoming regex: {regex}")
|
|
||||||
try:
|
try:
|
||||||
regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE)
|
regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE)
|
||||||
except re.error as e:
|
except re.error as e:
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
logger.info(f"Using regex: {regex}")
|
|
||||||
match filename:
|
match filename:
|
||||||
case Path():
|
case Path():
|
||||||
m = regex.search(filename.stem)
|
m = regex.search(filename.stem)
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Using string method.")
|
|
||||||
m = regex.search(filename)
|
m = regex.search(filename)
|
||||||
case _:
|
case _:
|
||||||
m = None
|
m = None
|
||||||
@@ -141,7 +130,6 @@ class RSLNamer(object):
|
|||||||
parsed_name = None
|
parsed_name = None
|
||||||
else:
|
else:
|
||||||
parsed_name = None
|
parsed_name = None
|
||||||
# logger.debug(f"Got parsed submission name: {parsed_name}")
|
|
||||||
return parsed_name
|
return parsed_name
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -187,8 +175,6 @@ class RSLNamer(object):
|
|||||||
Returns:
|
Returns:
|
||||||
str: output file name.
|
str: output file name.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Kwargs: {kwargs}")
|
|
||||||
# logger.debug(f"Template: {template}")
|
|
||||||
environment = jinja_template_loading()
|
environment = jinja_template_loading()
|
||||||
template = environment.from_string(template)
|
template = environment.from_string(template)
|
||||||
return template.render(**kwargs)
|
return template.render(**kwargs)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
'''
|
"""
|
||||||
Contains pydantic models and accompanying validators
|
Contains pydantic models and accompanying validators
|
||||||
'''
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import uuid, re, logging, csv, sys
|
import uuid, re, logging, csv, sys
|
||||||
from pydantic import BaseModel, field_validator, Field, model_validator
|
from pydantic import BaseModel, field_validator, Field, model_validator
|
||||||
@@ -123,18 +123,14 @@ class PydReagent(BaseModel):
|
|||||||
Tuple[Reagent, Report]: Reagent instance and result of function
|
Tuple[Reagent, Report]: Reagent instance and result of function
|
||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
# logger.debug("Adding extra fields.")
|
|
||||||
if self.model_extra is not None:
|
if self.model_extra is not None:
|
||||||
self.__dict__.update(self.model_extra)
|
self.__dict__.update(self.model_extra)
|
||||||
# logger.debug(f"Reagent SQL constructor is looking up type: {self.type}, lot: {self.lot}")
|
|
||||||
reagent = Reagent.query(lot=self.lot, name=self.name)
|
reagent = Reagent.query(lot=self.lot, name=self.name)
|
||||||
# logger.debug(f"Result: {reagent}")
|
|
||||||
if reagent is None:
|
if reagent is None:
|
||||||
reagent = Reagent()
|
reagent = Reagent()
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
value = value['value']
|
value = value['value']
|
||||||
# logger.debug(f"Reagent info item for {key}: {value}")
|
|
||||||
# NOTE: set fields based on keys in dictionary
|
# NOTE: set fields based on keys in dictionary
|
||||||
match key:
|
match key:
|
||||||
case "lot":
|
case "lot":
|
||||||
@@ -149,7 +145,6 @@ class PydReagent(BaseModel):
|
|||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = date(year=1970, month=1, day=1)
|
value = date(year=1970, month=1, day=1)
|
||||||
value = datetime.combine(value, datetime.min.time())
|
value = datetime.combine(value, datetime.min.time())
|
||||||
logger.debug(f"Expiry date coming into sql: {value} with type {type(value)}")
|
|
||||||
reagent.expiry = value.replace(tzinfo=timezone)
|
reagent.expiry = value.replace(tzinfo=timezone)
|
||||||
case _:
|
case _:
|
||||||
try:
|
try:
|
||||||
@@ -179,14 +174,12 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
@model_validator(mode='after')
|
@model_validator(mode='after')
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_model(cls, data):
|
def validate_model(cls, data):
|
||||||
# logger.debug(f"Data for pydsample: {data}")
|
|
||||||
model = BasicSample.find_polymorphic_subclass(polymorphic_identity=data.sample_type)
|
model = BasicSample.find_polymorphic_subclass(polymorphic_identity=data.sample_type)
|
||||||
for k, v in data.model_extra.items():
|
for k, v in data.model_extra.items():
|
||||||
if k in model.timestamps():
|
if k in model.timestamps():
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
v = datetime.strptime(v, "%Y-%m-%d")
|
v = datetime.strptime(v, "%Y-%m-%d")
|
||||||
data.__setattr__(k, v)
|
data.__setattr__(k, v)
|
||||||
# logger.debug(f"Data coming out of validation: {pformat(data)}")
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@field_validator("row", "column", "assoc_id", "submission_rank")
|
@field_validator("row", "column", "assoc_id", "submission_rank")
|
||||||
@@ -238,7 +231,6 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
"""
|
"""
|
||||||
report = None
|
report = None
|
||||||
self.__dict__.update(self.model_extra)
|
self.__dict__.update(self.model_extra)
|
||||||
# logger.debug(f"Here is the incoming sample dict: \n{self.__dict__}")
|
|
||||||
instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id)
|
instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id)
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
match key:
|
match key:
|
||||||
@@ -246,7 +238,6 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
case "row" | "column":
|
case "row" | "column":
|
||||||
continue
|
continue
|
||||||
case _:
|
case _:
|
||||||
# logger.debug(f"Setting sample field {key} to {value}")
|
|
||||||
instance.__setattr__(key, value)
|
instance.__setattr__(key, value)
|
||||||
out_associations = []
|
out_associations = []
|
||||||
if submission is not None:
|
if submission is not None:
|
||||||
@@ -254,15 +245,12 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
submission = BasicSubmission.query(rsl_plate_num=submission)
|
submission = BasicSubmission.query(rsl_plate_num=submission)
|
||||||
assoc_type = submission.submission_type_name
|
assoc_type = submission.submission_type_name
|
||||||
for row, column, aid, submission_rank in zip(self.row, self.column, self.assoc_id, self.submission_rank):
|
for row, column, aid, submission_rank in zip(self.row, self.column, self.assoc_id, self.submission_rank):
|
||||||
# logger.debug(f"Looking up association with identity: ({submission.submission_type_name} Association)")
|
|
||||||
# logger.debug(f"Looking up association with identity: ({assoc_type} Association)")
|
|
||||||
association = SubmissionSampleAssociation.query_or_create(association_type=f"{assoc_type} Association",
|
association = SubmissionSampleAssociation.query_or_create(association_type=f"{assoc_type} Association",
|
||||||
submission=submission,
|
submission=submission,
|
||||||
sample=instance,
|
sample=instance,
|
||||||
row=row, column=column, id=aid,
|
row=row, column=column, id=aid,
|
||||||
submission_rank=submission_rank,
|
submission_rank=submission_rank,
|
||||||
**self.model_extra)
|
**self.model_extra)
|
||||||
# logger.debug(f"Using submission_sample_association: {association}")
|
|
||||||
try:
|
try:
|
||||||
out_associations.append(association)
|
out_associations.append(association)
|
||||||
except IntegrityError as e:
|
except IntegrityError as e:
|
||||||
@@ -332,7 +320,6 @@ class PydEquipment(BaseModel, extra='ignore'):
|
|||||||
@field_validator('processes', mode='before')
|
@field_validator('processes', mode='before')
|
||||||
@classmethod
|
@classmethod
|
||||||
def make_empty_list(cls, value):
|
def make_empty_list(cls, value):
|
||||||
# logger.debug(f"Pydantic value: {value}")
|
|
||||||
if isinstance(value, GeneratorType):
|
if isinstance(value, GeneratorType):
|
||||||
value = [item.name for item in value]
|
value = [item.name for item in value]
|
||||||
value = convert_nans_to_nones(value)
|
value = convert_nans_to_nones(value)
|
||||||
@@ -355,7 +342,6 @@ class PydEquipment(BaseModel, extra='ignore'):
|
|||||||
Tuple[Equipment, SubmissionEquipmentAssociation]: SQL objects
|
Tuple[Equipment, SubmissionEquipmentAssociation]: SQL objects
|
||||||
"""
|
"""
|
||||||
if isinstance(submission, str):
|
if isinstance(submission, str):
|
||||||
# logger.debug(f"Got string, querying {submission}")
|
|
||||||
submission = BasicSubmission.query(rsl_plate_num=submission)
|
submission = BasicSubmission.query(rsl_plate_num=submission)
|
||||||
equipment = Equipment.query(asset_number=self.asset_number)
|
equipment = Equipment.query(asset_number=self.asset_number)
|
||||||
if equipment is None:
|
if equipment is None:
|
||||||
@@ -403,7 +389,6 @@ class PydEquipment(BaseModel, extra='ignore'):
|
|||||||
class PydSubmission(BaseModel, extra='allow'):
|
class PydSubmission(BaseModel, extra='allow'):
|
||||||
filepath: Path
|
filepath: Path
|
||||||
submission_type: dict | None
|
submission_type: dict | None
|
||||||
# For defaults
|
|
||||||
submitter_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True)
|
submitter_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True)
|
||||||
submitted_date: dict | None
|
submitted_date: dict | None
|
||||||
rsl_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True)
|
rsl_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True)
|
||||||
@@ -427,7 +412,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
value = value['value']
|
value = value['value']
|
||||||
if isinstance(value, Generator):
|
if isinstance(value, Generator):
|
||||||
# logger.debug("We have a generator")
|
|
||||||
return [PydTips(**tips) for tips in value]
|
return [PydTips(**tips) for tips in value]
|
||||||
if not value:
|
if not value:
|
||||||
return []
|
return []
|
||||||
@@ -436,9 +420,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator('equipment', mode='before')
|
@field_validator('equipment', mode='before')
|
||||||
@classmethod
|
@classmethod
|
||||||
def convert_equipment_dict(cls, value):
|
def convert_equipment_dict(cls, value):
|
||||||
# logger.debug(f"Equipment: {value}")
|
|
||||||
if isinstance(value, Generator):
|
if isinstance(value, Generator):
|
||||||
logger.debug("We have a generator")
|
|
||||||
return [PydEquipment(**equipment) for equipment in value]
|
return [PydEquipment(**equipment) for equipment in value]
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
return value['value']
|
return value['value']
|
||||||
@@ -454,7 +436,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("submitter_plate_num")
|
@field_validator("submitter_plate_num")
|
||||||
@classmethod
|
@classmethod
|
||||||
def enforce_with_uuid(cls, value):
|
def enforce_with_uuid(cls, value):
|
||||||
# logger.debug(f"submitter_plate_num coming into pydantic: {value}")
|
|
||||||
if value['value'] in [None, "None"]:
|
if value['value'] in [None, "None"]:
|
||||||
return dict(value=uuid.uuid4().hex.upper(), missing=True)
|
return dict(value=uuid.uuid4().hex.upper(), missing=True)
|
||||||
else:
|
else:
|
||||||
@@ -464,7 +445,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("submitted_date", mode="before")
|
@field_validator("submitted_date", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def rescue_date(cls, value):
|
def rescue_date(cls, value):
|
||||||
# logger.debug(f"\n\nDate coming into pydantic: {value}\n\n")
|
|
||||||
try:
|
try:
|
||||||
check = value['value'] is None
|
check = value['value'] is None
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@@ -509,7 +489,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def lookup_submitting_lab(cls, value):
|
def lookup_submitting_lab(cls, value):
|
||||||
if isinstance(value['value'], str):
|
if isinstance(value['value'], str):
|
||||||
# logger.debug(f"Looking up organization {value['value']}")
|
|
||||||
try:
|
try:
|
||||||
value['value'] = Organization.query(name=value['value']).name
|
value['value'] = Organization.query(name=value['value']).name
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -540,13 +519,11 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("rsl_plate_num")
|
@field_validator("rsl_plate_num")
|
||||||
@classmethod
|
@classmethod
|
||||||
def rsl_from_file(cls, value, values):
|
def rsl_from_file(cls, value, values):
|
||||||
# logger.debug(f"RSL-plate initial value: {value['value']} and other values: {values.data}")
|
|
||||||
sub_type = values.data['submission_type']['value']
|
sub_type = values.data['submission_type']['value']
|
||||||
if check_not_nan(value['value']):
|
if check_not_nan(value['value']):
|
||||||
value['value'] = value['value'].strip()
|
value['value'] = value['value'].strip()
|
||||||
return value
|
return value
|
||||||
else:
|
else:
|
||||||
# logger.debug("Constructing plate sub_type.")
|
|
||||||
if "pytest" in sys.modules and sub_type.replace(" ", "") == "BasicSubmission":
|
if "pytest" in sys.modules and sub_type.replace(" ", "") == "BasicSubmission":
|
||||||
output = "RSL-BS-Test001"
|
output = "RSL-BS-Test001"
|
||||||
else:
|
else:
|
||||||
@@ -623,7 +600,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def expand_reagents(cls, value):
|
def expand_reagents(cls, value):
|
||||||
if isinstance(value, Generator):
|
if isinstance(value, Generator):
|
||||||
# logger.debug("We have a generator")
|
|
||||||
return [PydReagent(**reagent) for reagent in value]
|
return [PydReagent(**reagent) for reagent in value]
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@@ -631,7 +607,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def expand_samples(cls, value):
|
def expand_samples(cls, value):
|
||||||
if isinstance(value, Generator):
|
if isinstance(value, Generator):
|
||||||
# logger.debug("We have a generator")[
|
|
||||||
return [PydSample(**sample) for sample in value]
|
return [PydSample(**sample) for sample in value]
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@@ -656,7 +631,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("cost_centre")
|
@field_validator("cost_centre")
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_cost_centre(cls, value, values):
|
def get_cost_centre(cls, value, values):
|
||||||
# logger.debug(f"Value coming in for cost_centre: {value}")
|
|
||||||
match value['value']:
|
match value['value']:
|
||||||
case None:
|
case None:
|
||||||
from backend.db.models import Organization
|
from backend.db.models import Organization
|
||||||
@@ -671,7 +645,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("contact")
|
@field_validator("contact")
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_contact_from_org(cls, value, values):
|
def get_contact_from_org(cls, value, values):
|
||||||
# logger.debug(f"Checking on value: {value}")
|
|
||||||
match value:
|
match value:
|
||||||
case dict():
|
case dict():
|
||||||
if isinstance(value['value'], tuple):
|
if isinstance(value['value'], tuple):
|
||||||
@@ -684,7 +657,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
if check is None:
|
if check is None:
|
||||||
org = Organization.query(name=values.data['submitting_lab']['value'])
|
org = Organization.query(name=values.data['submitting_lab']['value'])
|
||||||
contact = org.contacts[0].name
|
contact = org.contacts[0].name
|
||||||
# logger.debug(f"Pulled: {contact}")
|
|
||||||
if isinstance(contact, tuple):
|
if isinstance(contact, tuple):
|
||||||
contact = contact[0]
|
contact = contact[0]
|
||||||
return dict(value=contact, missing=True)
|
return dict(value=contact, missing=True)
|
||||||
@@ -692,7 +664,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
def __init__(self, run_custom: bool = False, **data):
|
def __init__(self, run_custom: bool = False, **data):
|
||||||
logger.debug(f"{__name__} input data: {data}")
|
|
||||||
super().__init__(**data)
|
super().__init__(**data)
|
||||||
# NOTE: this could also be done with default_factory
|
# NOTE: this could also be done with default_factory
|
||||||
self.submission_object = BasicSubmission.find_polymorphic_subclass(
|
self.submission_object = BasicSubmission.find_polymorphic_subclass(
|
||||||
@@ -755,13 +726,11 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
# logger.debug("Extracting 'value' from attributes")
|
|
||||||
output = {k: self.filter_field(k) for k in fields}
|
output = {k: self.filter_field(k) for k in fields}
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def filter_field(self, key: str):
|
def filter_field(self, key: str):
|
||||||
item = getattr(self, key)
|
item = getattr(self, key)
|
||||||
# logger.debug(f"Attempting deconstruction of {key}: {item} with type {type(item)}")
|
|
||||||
match item:
|
match item:
|
||||||
case dict():
|
case dict():
|
||||||
try:
|
try:
|
||||||
@@ -793,13 +762,10 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
dicto = self.improved_dict()
|
dicto = self.improved_dict()
|
||||||
# logger.warning(f"\n\nQuery or create: {self.submission_type['value']}, {self.rsl_plate_num['value']}")
|
|
||||||
instance, result = BasicSubmission.query_or_create(submission_type=self.submission_type['value'],
|
instance, result = BasicSubmission.query_or_create(submission_type=self.submission_type['value'],
|
||||||
rsl_plate_num=self.rsl_plate_num['value'])
|
rsl_plate_num=self.rsl_plate_num['value'])
|
||||||
logger.debug(f"Result of query or create: {instance}")
|
|
||||||
report.add_result(result)
|
report.add_result(result)
|
||||||
self.handle_duplicate_samples()
|
self.handle_duplicate_samples()
|
||||||
# logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
|
|
||||||
for key, value in dicto.items():
|
for key, value in dicto.items():
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
try:
|
try:
|
||||||
@@ -811,18 +777,13 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
continue
|
continue
|
||||||
if value is None:
|
if value is None:
|
||||||
continue
|
continue
|
||||||
# logger.debug(f"Setting {key} to {value}")
|
|
||||||
match key:
|
match key:
|
||||||
case "reagents":
|
case "reagents":
|
||||||
for reagent in self.reagents:
|
for reagent in self.reagents:
|
||||||
logger.debug(f"Checking reagent {reagent.lot}")
|
|
||||||
reagent, _ = reagent.toSQL(submission=instance)
|
reagent, _ = reagent.toSQL(submission=instance)
|
||||||
# logger.debug(f"Association: {assoc}")
|
|
||||||
case "samples":
|
case "samples":
|
||||||
for sample in self.samples:
|
for sample in self.samples:
|
||||||
sample, associations, _ = sample.toSQL(submission=instance)
|
sample, associations, _ = sample.toSQL(submission=instance)
|
||||||
# logger.debug(f"Sample SQL object to be added to submission: {sample.__dict__}")
|
|
||||||
# logger.debug(associations)
|
|
||||||
for assoc in associations:
|
for assoc in associations:
|
||||||
if assoc is not None:
|
if assoc is not None:
|
||||||
if assoc not in instance.submission_sample_associations:
|
if assoc not in instance.submission_sample_associations:
|
||||||
@@ -830,19 +791,16 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
else:
|
else:
|
||||||
logger.warning(f"Sample association {assoc} is already present in {instance}")
|
logger.warning(f"Sample association {assoc} is already present in {instance}")
|
||||||
case "equipment":
|
case "equipment":
|
||||||
# logger.debug(f"Equipment: {pformat(self.equipment)}")
|
|
||||||
for equip in self.equipment:
|
for equip in self.equipment:
|
||||||
if equip is None:
|
if equip is None:
|
||||||
continue
|
continue
|
||||||
equip, association = equip.toSQL(submission=instance)
|
equip, association = equip.toSQL(submission=instance)
|
||||||
if association is not None:
|
if association is not None:
|
||||||
instance.submission_equipment_associations.append(association)
|
instance.submission_equipment_associations.append(association)
|
||||||
logger.debug(f"Equipment associations: {instance.submission_equipment_associations}")
|
|
||||||
case "tips":
|
case "tips":
|
||||||
for tips in self.tips:
|
for tips in self.tips:
|
||||||
if tips is None:
|
if tips is None:
|
||||||
continue
|
continue
|
||||||
# logger.debug(f"Converting tips: {tips} to sql.")
|
|
||||||
try:
|
try:
|
||||||
association = tips.to_sql(submission=instance)
|
association = tips.to_sql(submission=instance)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -864,14 +822,11 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
value = value
|
value = value
|
||||||
instance.set_attribute(key=key, value=value)
|
instance.set_attribute(key=key, value=value)
|
||||||
case item if item in instance.jsons():
|
case item if item in instance.jsons():
|
||||||
# logger.debug(f"{item} is a json.")
|
|
||||||
try:
|
try:
|
||||||
ii = value.items()
|
ii = value.items()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
ii = {}
|
ii = {}
|
||||||
logger.debug(f"ii is {ii}, value is {value}")
|
|
||||||
for k, v in ii:
|
for k, v in ii:
|
||||||
logger.debug(f"k is {k}, v is {v}")
|
|
||||||
if isinstance(v, datetime):
|
if isinstance(v, datetime):
|
||||||
value[k] = v.strftime("%Y-%m-%d %H:%M:%S")
|
value[k] = v.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
else:
|
else:
|
||||||
@@ -893,21 +848,17 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
else:
|
else:
|
||||||
logger.warning(f"{key} already == {value} so no updating.")
|
logger.warning(f"{key} already == {value} so no updating.")
|
||||||
try:
|
try:
|
||||||
# logger.debug(f"Calculating costs for procedure...")
|
|
||||||
instance.calculate_base_cost()
|
instance.calculate_base_cost()
|
||||||
except (TypeError, AttributeError) as e:
|
except (TypeError, AttributeError) as e:
|
||||||
logger.debug(f"Looks like that kit doesn't have cost breakdown yet due to: {e}, using 0.")
|
logger.error(f"Looks like that kit doesn't have cost breakdown yet due to: {e}, using 0.")
|
||||||
try:
|
try:
|
||||||
instance.run_cost = instance.extraction_kit.cost_per_run
|
instance.run_cost = instance.extraction_kit.cost_per_run
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
instance.run_cost = 0
|
instance.run_cost = 0
|
||||||
# logger.debug(f"Calculated base run cost of: {instance.run_cost}")
|
|
||||||
# NOTE: Apply any discounts that are applicable for client and kit.
|
# NOTE: Apply any discounts that are applicable for client and kit.
|
||||||
try:
|
try:
|
||||||
# logger.debug("Checking and applying discounts...")
|
|
||||||
discounts = [item.amount for item in
|
discounts = [item.amount for item in
|
||||||
Discount.query(kit_type=instance.extraction_kit, organization=instance.submitting_lab)]
|
Discount.query(kit_type=instance.extraction_kit, organization=instance.submitting_lab)]
|
||||||
# logger.debug(f"We got discounts: {discounts}")
|
|
||||||
if len(discounts) > 0:
|
if len(discounts) > 0:
|
||||||
instance.run_cost = instance.run_cost - sum(discounts)
|
instance.run_cost = instance.run_cost - sum(discounts)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -925,7 +876,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
SubmissionFormWidget: Submission form widget
|
SubmissionFormWidget: Submission form widget
|
||||||
"""
|
"""
|
||||||
from frontend.widgets.submission_widget import SubmissionFormWidget
|
from frontend.widgets.submission_widget import SubmissionFormWidget
|
||||||
# logger.debug(f"Disable: {disable}")
|
|
||||||
return SubmissionFormWidget(parent=parent, submission=self, disable=disable)
|
return SubmissionFormWidget(parent=parent, submission=self, disable=disable)
|
||||||
|
|
||||||
def to_writer(self) -> "SheetWriter":
|
def to_writer(self) -> "SheetWriter":
|
||||||
@@ -946,10 +896,8 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
str: Output filename
|
str: Output filename
|
||||||
"""
|
"""
|
||||||
template = self.submission_object.filename_template()
|
template = self.submission_object.filename_template()
|
||||||
# logger.debug(f"Using template string: {template}")
|
|
||||||
render = self.namer.construct_export_name(template=template, **self.improved_dict(dictionaries=False)).replace(
|
render = self.namer.construct_export_name(template=template, **self.improved_dict(dictionaries=False)).replace(
|
||||||
"/", "")
|
"/", "")
|
||||||
# logger.debug(f"Template rendered as: {render}")
|
|
||||||
return render
|
return render
|
||||||
|
|
||||||
# @report_result
|
# @report_result
|
||||||
@@ -964,26 +912,20 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
Report: Result object containing a message and any missing components.
|
Report: Result object containing a message and any missing components.
|
||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
# logger.debug(f"Extraction kit: {extraction_kit}. Is it a string? {isinstance(extraction_kit, str)}")
|
|
||||||
if isinstance(extraction_kit, str):
|
if isinstance(extraction_kit, str):
|
||||||
extraction_kit = dict(value=extraction_kit)
|
extraction_kit = dict(value=extraction_kit)
|
||||||
if extraction_kit is not None and extraction_kit != self.extraction_kit['value']:
|
if extraction_kit is not None and extraction_kit != self.extraction_kit['value']:
|
||||||
self.extraction_kit['value'] = extraction_kit['value']
|
self.extraction_kit['value'] = extraction_kit['value']
|
||||||
# logger.debug(f"Looking up {self.extraction_kit['value']}")
|
|
||||||
ext_kit = KitType.query(name=self.extraction_kit['value'])
|
ext_kit = KitType.query(name=self.extraction_kit['value'])
|
||||||
ext_kit_rtypes = [item.to_pydantic() for item in
|
ext_kit_rtypes = [item.to_pydantic() for item in
|
||||||
ext_kit.get_reagents(required=True, submission_type=self.submission_type['value'])]
|
ext_kit.get_reagents(required=True, submission_type=self.submission_type['value'])]
|
||||||
# logger.debug(f"Kit reagents: {ext_kit_rtypes}")
|
|
||||||
# logger.debug(f"Submission reagents: {self.reagents}")
|
|
||||||
# NOTE: Exclude any reagenttype found in this pyd not expected in kit.
|
# NOTE: Exclude any reagenttype found in this pyd not expected in kit.
|
||||||
expected_check = [item.role for item in ext_kit_rtypes]
|
expected_check = [item.role for item in ext_kit_rtypes]
|
||||||
output_reagents = [rt for rt in self.reagents if rt.role in expected_check]
|
output_reagents = [rt for rt in self.reagents if rt.role in expected_check]
|
||||||
logger.debug(f"Already have these reagent types: {output_reagents}")
|
|
||||||
missing_check = [item.role for item in output_reagents]
|
missing_check = [item.role for item in output_reagents]
|
||||||
missing_reagents = [rt for rt in ext_kit_rtypes if rt.role not in missing_check]
|
missing_reagents = [rt for rt in ext_kit_rtypes if rt.role not in missing_check]
|
||||||
missing_reagents += [rt for rt in output_reagents if rt.missing]
|
missing_reagents += [rt for rt in output_reagents if rt.missing]
|
||||||
output_reagents += [rt for rt in missing_reagents if rt not in output_reagents]
|
output_reagents += [rt for rt in missing_reagents if rt not in output_reagents]
|
||||||
# logger.debug(f"Missing reagents types: {missing_reagents}")
|
|
||||||
# NOTE: if lists are equal return no problem
|
# NOTE: if lists are equal return no problem
|
||||||
if len(missing_reagents) == 0:
|
if len(missing_reagents) == 0:
|
||||||
result = None
|
result = None
|
||||||
@@ -1072,7 +1014,6 @@ class PydReagentRole(BaseModel):
|
|||||||
instance: ReagentRole = ReagentRole.query(name=self.name)
|
instance: ReagentRole = ReagentRole.query(name=self.name)
|
||||||
if instance is None:
|
if instance is None:
|
||||||
instance = ReagentRole(name=self.name, eol_ext=self.eol_ext)
|
instance = ReagentRole(name=self.name, eol_ext=self.eol_ext)
|
||||||
# logger.debug(f"This is the reagent type instance: {instance.__dict__}")
|
|
||||||
try:
|
try:
|
||||||
assoc = KitTypeReagentRoleAssociation.query(reagent_role=instance, kit_type=kit)
|
assoc = KitTypeReagentRoleAssociation.query(reagent_role=instance, kit_type=kit)
|
||||||
except StatementError:
|
except StatementError:
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ class CustomFigure(Figure):
|
|||||||
"""
|
"""
|
||||||
if modes:
|
if modes:
|
||||||
ytitle = modes[0]
|
ytitle = modes[0]
|
||||||
# logger.debug("Creating visibles list for each mode.")
|
|
||||||
self.update_layout(
|
self.update_layout(
|
||||||
xaxis_title="Submitted Date (* - Date parsed from fastq file creation date)",
|
xaxis_title="Submitted Date (* - Date parsed from fastq file creation date)",
|
||||||
yaxis_title=ytitle,
|
yaxis_title=ytitle,
|
||||||
@@ -79,7 +78,6 @@ class CustomFigure(Figure):
|
|||||||
rng = [1]
|
rng = [1]
|
||||||
if months > 2:
|
if months > 2:
|
||||||
rng += [iii for iii in range(3, months, 3)]
|
rng += [iii for iii in range(3, months, 3)]
|
||||||
# logger.debug(f"Making buttons for months: {rng}")
|
|
||||||
buttons = [dict(count=iii, label=f"{iii}m", step="month", stepmode="backward") for iii in rng]
|
buttons = [dict(count=iii, label=f"{iii}m", step="month", stepmode="backward") for iii in rng]
|
||||||
if months > date.today().month:
|
if months > date.today().month:
|
||||||
buttons += [dict(count=1, label="YTD", step="year", stepmode="todate")]
|
buttons += [dict(count=1, label="YTD", step="year", stepmode="todate")]
|
||||||
@@ -117,24 +115,6 @@ class CustomFigure(Figure):
|
|||||||
{"yaxis.title.text": mode},
|
{"yaxis.title.text": mode},
|
||||||
])
|
])
|
||||||
|
|
||||||
# def save_figure(self, group_name: str = "plotly_output", parent: QWidget | None = None):
|
|
||||||
# """
|
|
||||||
# Writes plotly figure to html file.
|
|
||||||
#
|
|
||||||
# Args:
|
|
||||||
# figs ():
|
|
||||||
# settings (dict): settings passed down from click
|
|
||||||
# fig (Figure): input figure object
|
|
||||||
# group_name (str): controltype
|
|
||||||
# """
|
|
||||||
#
|
|
||||||
# output = select_save_file(obj=parent, default_name=group_name, extension="png")
|
|
||||||
# self.write_image(output.absolute().__str__(), engine="kaleido")
|
|
||||||
#
|
|
||||||
# def save_data(self, group_name: str = "plotly_export", parent:QWidget|None=None):
|
|
||||||
# output = select_save_file(obj=parent, default_name=group_name, extension="xlsx")
|
|
||||||
# self.df.to_excel(output.absolute().__str__(), engine="openpyxl", index=False)
|
|
||||||
|
|
||||||
def to_html(self) -> str:
|
def to_html(self) -> str:
|
||||||
"""
|
"""
|
||||||
Creates final html code from plotly
|
Creates final html code from plotly
|
||||||
|
|||||||
@@ -3,13 +3,12 @@ Functions for constructing irida controls graphs using plotly.
|
|||||||
"""
|
"""
|
||||||
from datetime import date
|
from datetime import date
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import Generator
|
|
||||||
import plotly.express as px
|
import plotly.express as px
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from PyQt6.QtWidgets import QWidget
|
from PyQt6.QtWidgets import QWidget
|
||||||
from . import CustomFigure
|
from . import CustomFigure
|
||||||
import logging
|
import logging
|
||||||
from tools import get_unique_values_in_df_column, divide_chunks
|
from tools import get_unique_values_in_df_column
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
|
|||||||
@@ -21,11 +21,9 @@ class PCRFigure(CustomFigure):
|
|||||||
months = int(settings['months'])
|
months = int(settings['months'])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
months = 6
|
months = 6
|
||||||
# logger.debug(f"DF: {self.df}")
|
|
||||||
self.construct_chart(df=df)
|
self.construct_chart(df=df)
|
||||||
|
|
||||||
def construct_chart(self, df: pd.DataFrame):
|
def construct_chart(self, df: pd.DataFrame):
|
||||||
# logger.debug(f"PCR df:\n {df}")
|
|
||||||
try:
|
try:
|
||||||
scatter = px.scatter(data_frame=df, x='submitted_date', y="ct",
|
scatter = px.scatter(data_frame=df, x='submitted_date', y="ct",
|
||||||
hover_data=["name", "target", "ct", "reagent_lot"],
|
hover_data=["name", "target", "ct", "reagent_lot"],
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ class TurnaroundChart(CustomFigure):
|
|||||||
months = int(settings['months'])
|
months = int(settings['months'])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
months = 6
|
months = 6
|
||||||
# logger.debug(f"DF: {self.df}")
|
|
||||||
self.construct_chart()
|
self.construct_chart()
|
||||||
if threshold:
|
if threshold:
|
||||||
self.add_hline(y=threshold)
|
self.add_hline(y=threshold)
|
||||||
@@ -32,11 +31,9 @@ class TurnaroundChart(CustomFigure):
|
|||||||
def construct_chart(self, df: pd.DataFrame | None = None):
|
def construct_chart(self, df: pd.DataFrame | None = None):
|
||||||
if df:
|
if df:
|
||||||
self.df = df
|
self.df = df
|
||||||
# logger.debug(f"PCR df:\n {df}")
|
|
||||||
self.df = self.df[self.df.days.notnull()]
|
self.df = self.df[self.df.days.notnull()]
|
||||||
self.df = self.df.sort_values(['submitted_date', 'name'], ascending=[True, True]).reset_index(drop=True)
|
self.df = self.df.sort_values(['submitted_date', 'name'], ascending=[True, True]).reset_index(drop=True)
|
||||||
self.df = self.df.reset_index().rename(columns={"index": "idx"})
|
self.df = self.df.reset_index().rename(columns={"index": "idx"})
|
||||||
# logger.debug(f"DF: {self.df}")
|
|
||||||
try:
|
try:
|
||||||
scatter = px.scatter(data_frame=self.df, x='idx', y="days",
|
scatter = px.scatter(data_frame=self.df, x='idx', y="days",
|
||||||
hover_data=["name", "submitted_date", "completed_date", "days"],
|
hover_data=["name", "submitted_date", "completed_date", "days"],
|
||||||
|
|||||||
@@ -27,13 +27,11 @@ from .turnaround import TurnaroundTime
|
|||||||
from .omni_search import SearchBox
|
from .omni_search import SearchBox
|
||||||
|
|
||||||
logger = logging.getLogger(f'submissions.{__name__}')
|
logger = logging.getLogger(f'submissions.{__name__}')
|
||||||
# logger.info("Hello, I am a logger")
|
|
||||||
|
|
||||||
|
|
||||||
class App(QMainWindow):
|
class App(QMainWindow):
|
||||||
|
|
||||||
def __init__(self, ctx: Settings = None):
|
def __init__(self, ctx: Settings = None):
|
||||||
# logger.debug(f"Initializing main window...")
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
qInstallMessageHandler(lambda x, y, z: None)
|
qInstallMessageHandler(lambda x, y, z: None)
|
||||||
self.ctx = ctx
|
self.ctx = ctx
|
||||||
@@ -68,7 +66,6 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
adds items to menu bar
|
adds items to menu bar
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Creating menu bar...")
|
|
||||||
menuBar = self.menuBar()
|
menuBar = self.menuBar()
|
||||||
fileMenu = menuBar.addMenu("&File")
|
fileMenu = menuBar.addMenu("&File")
|
||||||
editMenu = menuBar.addMenu("&Edit")
|
editMenu = menuBar.addMenu("&Edit")
|
||||||
@@ -82,7 +79,6 @@ class App(QMainWindow):
|
|||||||
fileMenu.addAction(self.importAction)
|
fileMenu.addAction(self.importAction)
|
||||||
fileMenu.addAction(self.yamlExportAction)
|
fileMenu.addAction(self.yamlExportAction)
|
||||||
fileMenu.addAction(self.yamlImportAction)
|
fileMenu.addAction(self.yamlImportAction)
|
||||||
# methodsMenu.addAction(self.searchLog)
|
|
||||||
methodsMenu.addAction(self.searchSample)
|
methodsMenu.addAction(self.searchSample)
|
||||||
maintenanceMenu.addAction(self.joinExtractionAction)
|
maintenanceMenu.addAction(self.joinExtractionAction)
|
||||||
maintenanceMenu.addAction(self.joinPCRAction)
|
maintenanceMenu.addAction(self.joinPCRAction)
|
||||||
@@ -92,27 +88,20 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
adds items to toolbar
|
adds items to toolbar
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Creating toolbar...")
|
|
||||||
toolbar = QToolBar("My main toolbar")
|
toolbar = QToolBar("My main toolbar")
|
||||||
self.addToolBar(toolbar)
|
self.addToolBar(toolbar)
|
||||||
toolbar.addAction(self.addReagentAction)
|
toolbar.addAction(self.addReagentAction)
|
||||||
# toolbar.addAction(self.addKitAction)
|
|
||||||
# toolbar.addAction(self.addOrgAction)
|
|
||||||
|
|
||||||
def _createActions(self):
|
def _createActions(self):
|
||||||
"""
|
"""
|
||||||
creates actions
|
creates actions
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Creating actions...")
|
|
||||||
self.importAction = QAction("&Import Submission", self)
|
self.importAction = QAction("&Import Submission", self)
|
||||||
self.addReagentAction = QAction("Add Reagent", self)
|
self.addReagentAction = QAction("Add Reagent", self)
|
||||||
# self.addKitAction = QAction("Import Kit", self)
|
|
||||||
# self.addOrgAction = QAction("Import Org", self)
|
|
||||||
self.joinExtractionAction = QAction("Link Extraction Logs")
|
self.joinExtractionAction = QAction("Link Extraction Logs")
|
||||||
self.joinPCRAction = QAction("Link PCR Logs")
|
self.joinPCRAction = QAction("Link PCR Logs")
|
||||||
self.helpAction = QAction("&About", self)
|
self.helpAction = QAction("&About", self)
|
||||||
self.docsAction = QAction("&Docs", self)
|
self.docsAction = QAction("&Docs", self)
|
||||||
# self.searchLog = QAction("Search Log", self)
|
|
||||||
self.searchSample = QAction("Search Sample", self)
|
self.searchSample = QAction("Search Sample", self)
|
||||||
self.githubAction = QAction("Github", self)
|
self.githubAction = QAction("Github", self)
|
||||||
self.yamlExportAction = QAction("Export Type Example", self)
|
self.yamlExportAction = QAction("Export Type Example", self)
|
||||||
@@ -123,14 +112,12 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
connect menu and tool bar item to functions
|
connect menu and tool bar item to functions
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Connecting actions...")
|
|
||||||
self.importAction.triggered.connect(self.table_widget.formwidget.importSubmission)
|
self.importAction.triggered.connect(self.table_widget.formwidget.importSubmission)
|
||||||
self.addReagentAction.triggered.connect(self.table_widget.formwidget.add_reagent)
|
self.addReagentAction.triggered.connect(self.table_widget.formwidget.add_reagent)
|
||||||
self.joinExtractionAction.triggered.connect(self.table_widget.sub_wid.link_extractions)
|
self.joinExtractionAction.triggered.connect(self.table_widget.sub_wid.link_extractions)
|
||||||
self.joinPCRAction.triggered.connect(self.table_widget.sub_wid.link_pcr)
|
self.joinPCRAction.triggered.connect(self.table_widget.sub_wid.link_pcr)
|
||||||
self.helpAction.triggered.connect(self.showAbout)
|
self.helpAction.triggered.connect(self.showAbout)
|
||||||
self.docsAction.triggered.connect(self.openDocs)
|
self.docsAction.triggered.connect(self.openDocs)
|
||||||
# self.searchLog.triggered.connect(self.runSearch)
|
|
||||||
self.searchSample.triggered.connect(self.runSampleSearch)
|
self.searchSample.triggered.connect(self.runSampleSearch)
|
||||||
self.githubAction.triggered.connect(self.openGithub)
|
self.githubAction.triggered.connect(self.openGithub)
|
||||||
self.yamlExportAction.triggered.connect(self.export_ST_yaml)
|
self.yamlExportAction.triggered.connect(self.export_ST_yaml)
|
||||||
@@ -145,7 +132,6 @@ class App(QMainWindow):
|
|||||||
j_env = jinja_template_loading()
|
j_env = jinja_template_loading()
|
||||||
template = j_env.get_template("project.html")
|
template = j_env.get_template("project.html")
|
||||||
html = template.render(info=self.ctx.package.__dict__)
|
html = template.render(info=self.ctx.package.__dict__)
|
||||||
# logger.debug(html)
|
|
||||||
about = HTMLPop(html=html, title="About")
|
about = HTMLPop(html=html, title="About")
|
||||||
about.exec()
|
about.exec()
|
||||||
|
|
||||||
@@ -157,7 +143,6 @@ class App(QMainWindow):
|
|||||||
url = Path(sys._MEIPASS).joinpath("files", "docs", "index.html")
|
url = Path(sys._MEIPASS).joinpath("files", "docs", "index.html")
|
||||||
else:
|
else:
|
||||||
url = Path("docs\\build\\index.html").absolute()
|
url = Path("docs\\build\\index.html").absolute()
|
||||||
# logger.debug(f"Attempting to open {url}")
|
|
||||||
webbrowser.get('windows-default').open(f"file://{url.__str__()}")
|
webbrowser.get('windows-default').open(f"file://{url.__str__()}")
|
||||||
|
|
||||||
def openGithub(self):
|
def openGithub(self):
|
||||||
@@ -177,10 +162,6 @@ class App(QMainWindow):
|
|||||||
instr = HTMLPop(html=html, title="Instructions")
|
instr = HTMLPop(html=html, title="Instructions")
|
||||||
instr.exec()
|
instr.exec()
|
||||||
|
|
||||||
# def runSearch(self):
|
|
||||||
# dlg = LogParser(self)
|
|
||||||
# dlg.exec()
|
|
||||||
|
|
||||||
def runSampleSearch(self):
|
def runSampleSearch(self):
|
||||||
"""
|
"""
|
||||||
Create a search for samples.
|
Create a search for samples.
|
||||||
@@ -253,7 +234,6 @@ class App(QMainWindow):
|
|||||||
class AddSubForm(QWidget):
|
class AddSubForm(QWidget):
|
||||||
|
|
||||||
def __init__(self, parent: QWidget):
|
def __init__(self, parent: QWidget):
|
||||||
# logger.debug(f"Initializating subform...")
|
|
||||||
super(QWidget, self).__init__(parent)
|
super(QWidget, self).__init__(parent)
|
||||||
self.layout = QVBoxLayout(self)
|
self.layout = QVBoxLayout(self)
|
||||||
# NOTE: Initialize tab screen
|
# NOTE: Initialize tab screen
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ from PyQt6.QtWidgets import (
|
|||||||
QWidget, QComboBox, QPushButton
|
QWidget, QComboBox, QPushButton
|
||||||
)
|
)
|
||||||
from PyQt6.QtCore import QSignalBlocker
|
from PyQt6.QtCore import QSignalBlocker
|
||||||
|
|
||||||
from backend import ChartReportMaker
|
from backend import ChartReportMaker
|
||||||
from backend.db import ControlType, IridaControl
|
from backend.db import ControlType, IridaControl
|
||||||
import logging
|
import logging
|
||||||
@@ -21,12 +20,9 @@ class ControlsViewer(InfoPane):
|
|||||||
|
|
||||||
def __init__(self, parent: QWidget, archetype: str) -> None:
|
def __init__(self, parent: QWidget, archetype: str) -> None:
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
logger.debug(f"Incoming Archetype: {archetype}")
|
|
||||||
self.archetype = ControlType.query(name=archetype)
|
self.archetype = ControlType.query(name=archetype)
|
||||||
if not self.archetype:
|
if not self.archetype:
|
||||||
return
|
return
|
||||||
logger.debug(f"Archetype set as: {self.archetype}")
|
|
||||||
# logger.debug(f"\n\n{self.app}\n\n")
|
|
||||||
# NOTE: set tab2 layout
|
# NOTE: set tab2 layout
|
||||||
self.control_sub_typer = QComboBox()
|
self.control_sub_typer = QComboBox()
|
||||||
# NOTE: fetch types of controls
|
# NOTE: fetch types of controls
|
||||||
@@ -54,12 +50,6 @@ class ControlsViewer(InfoPane):
|
|||||||
self.save_button.pressed.connect(self.save_png)
|
self.save_button.pressed.connect(self.save_png)
|
||||||
self.export_button.pressed.connect(self.save_excel)
|
self.export_button.pressed.connect(self.save_excel)
|
||||||
|
|
||||||
# def save_chart_function(self):
|
|
||||||
# self.fig.save_figure(parent=self)
|
|
||||||
#
|
|
||||||
# def save_data_function(self):
|
|
||||||
# self.fig.save_data(parent=self)
|
|
||||||
|
|
||||||
@report_result
|
@report_result
|
||||||
def update_data(self, *args, **kwargs):
|
def update_data(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
@@ -71,20 +61,6 @@ class ControlsViewer(InfoPane):
|
|||||||
self.mode_sub_typer.disconnect()
|
self.mode_sub_typer.disconnect()
|
||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
# NOTE: correct start date being more recent than end date and rerun
|
|
||||||
# if self.datepicker.start_date.date() > self.datepicker.end_date.date():
|
|
||||||
# threemonthsago = self.datepicker.end_date.date().addDays(-60)
|
|
||||||
# msg = f"Start date after end date is not allowed! Setting to {threemonthsago.toString()}."
|
|
||||||
# logger.warning(msg)
|
|
||||||
# # NOTE: block signal that will rerun controls getter and set start date Without triggering this function again
|
|
||||||
# with QSignalBlocker(self.datepicker.start_date) as blocker:
|
|
||||||
# self.datepicker.start_date.setDate(threemonthsago)
|
|
||||||
# self.update_data()
|
|
||||||
# report.add_result(Result(owner=self.__str__(), msg=msg, status="Warning"))
|
|
||||||
# return report
|
|
||||||
# # NOTE: convert to python useable date objects
|
|
||||||
# self.start_date = self.datepicker.start_date.date().toPyDate()
|
|
||||||
# self.end_date = self.datepicker.end_date.date().toPyDate()
|
|
||||||
self.con_sub_type = self.control_sub_typer.currentText()
|
self.con_sub_type = self.control_sub_typer.currentText()
|
||||||
self.mode = self.mode_typer.currentText()
|
self.mode = self.mode_typer.currentText()
|
||||||
self.mode_sub_typer.clear()
|
self.mode_sub_typer.clear()
|
||||||
@@ -104,7 +80,6 @@ class ControlsViewer(InfoPane):
|
|||||||
self.mode_sub_typer.clear()
|
self.mode_sub_typer.clear()
|
||||||
self.mode_sub_typer.setEnabled(False)
|
self.mode_sub_typer.setEnabled(False)
|
||||||
self.chart_maker_function()
|
self.chart_maker_function()
|
||||||
# return report
|
|
||||||
|
|
||||||
@report_result
|
@report_result
|
||||||
def chart_maker_function(self, *args, **kwargs):
|
def chart_maker_function(self, *args, **kwargs):
|
||||||
@@ -119,14 +94,11 @@ class ControlsViewer(InfoPane):
|
|||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
# logger.debug(f"Control getter context: \n\tControl type: {self.con_sub_type}\n\tMode: {self.mode}\n\tStart \
|
|
||||||
# Date: {self.start_date}\n\tEnd Date: {self.end_date}")
|
|
||||||
# NOTE: set the mode_sub_type for kraken
|
# NOTE: set the mode_sub_type for kraken
|
||||||
if self.mode_sub_typer.currentText() == "":
|
if self.mode_sub_typer.currentText() == "":
|
||||||
self.mode_sub_type = None
|
self.mode_sub_type = None
|
||||||
else:
|
else:
|
||||||
self.mode_sub_type = self.mode_sub_typer.currentText()
|
self.mode_sub_type = self.mode_sub_typer.currentText()
|
||||||
logger.debug(f"Subtype: {self.mode_sub_type}")
|
|
||||||
months = self.diff_month(self.start_date, self.end_date)
|
months = self.diff_month(self.start_date, self.end_date)
|
||||||
# NOTE: query all controls using the type/start and end dates from the gui
|
# NOTE: query all controls using the type/start and end dates from the gui
|
||||||
chart_settings = dict(sub_type=self.con_sub_type, start_date=self.start_date, end_date=self.end_date,
|
chart_settings = dict(sub_type=self.con_sub_type, start_date=self.start_date, end_date=self.end_date,
|
||||||
@@ -136,14 +108,11 @@ class ControlsViewer(InfoPane):
|
|||||||
self.report_obj = ChartReportMaker(df=self.fig.df, sheet_name=self.archetype.name)
|
self.report_obj = ChartReportMaker(df=self.fig.df, sheet_name=self.archetype.name)
|
||||||
if issubclass(self.fig.__class__, CustomFigure):
|
if issubclass(self.fig.__class__, CustomFigure):
|
||||||
self.save_button.setEnabled(True)
|
self.save_button.setEnabled(True)
|
||||||
# logger.debug(f"Updating figure...")
|
|
||||||
# NOTE: construct html for webview
|
# NOTE: construct html for webview
|
||||||
try:
|
try:
|
||||||
html = self.fig.to_html()
|
html = self.fig.to_html()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
html = ""
|
html = ""
|
||||||
# logger.debug(f"The length of html code is: {len(html)}")
|
|
||||||
self.webview.setHtml(html)
|
self.webview.setHtml(html)
|
||||||
self.webview.update()
|
self.webview.update()
|
||||||
# logger.debug("Figure updated... I hope.")
|
|
||||||
return report
|
return report
|
||||||
|
|||||||
@@ -21,9 +21,7 @@ class EquipmentUsage(QDialog):
|
|||||||
self.setWindowTitle(f"Equipment Checklist - {submission.rsl_plate_num}")
|
self.setWindowTitle(f"Equipment Checklist - {submission.rsl_plate_num}")
|
||||||
self.used_equipment = self.submission.get_used_equipment()
|
self.used_equipment = self.submission.get_used_equipment()
|
||||||
self.kit = self.submission.extraction_kit
|
self.kit = self.submission.extraction_kit
|
||||||
# logger.debug(f"Existing equipment: {self.used_equipment}")
|
|
||||||
self.opt_equipment = submission.submission_type.get_equipment()
|
self.opt_equipment = submission.submission_type.get_equipment()
|
||||||
# logger.debug(f"EquipmentRoles: {self.opt_equipment}")
|
|
||||||
self.layout = QVBoxLayout()
|
self.layout = QVBoxLayout()
|
||||||
self.setLayout(self.layout)
|
self.setLayout(self.layout)
|
||||||
self.populate_form()
|
self.populate_form()
|
||||||
@@ -38,7 +36,6 @@ class EquipmentUsage(QDialog):
|
|||||||
self.buttonBox.rejected.connect(self.reject)
|
self.buttonBox.rejected.connect(self.reject)
|
||||||
label = self.LabelRow(parent=self)
|
label = self.LabelRow(parent=self)
|
||||||
self.layout.addWidget(label)
|
self.layout.addWidget(label)
|
||||||
# logger.debug("Creating widgets for equipment")
|
|
||||||
for eq in self.opt_equipment:
|
for eq in self.opt_equipment:
|
||||||
widg = eq.to_form(parent=self, used=self.used_equipment)
|
widg = eq.to_form(parent=self, used=self.used_equipment)
|
||||||
self.layout.addWidget(widg)
|
self.layout.addWidget(widg)
|
||||||
@@ -124,9 +121,7 @@ class RoleComboBox(QWidget):
|
|||||||
Changes processes when equipment is changed
|
Changes processes when equipment is changed
|
||||||
"""
|
"""
|
||||||
equip = self.box.currentText()
|
equip = self.box.currentText()
|
||||||
# logger.debug(f"Updating equipment: {equip}")
|
|
||||||
equip2 = next((item for item in self.role.equipment if item.name == equip), self.role.equipment[0])
|
equip2 = next((item for item in self.role.equipment if item.name == equip), self.role.equipment[0])
|
||||||
# logger.debug(f"Using: {equip2}")
|
|
||||||
with QSignalBlocker(self.process) as blocker:
|
with QSignalBlocker(self.process) as blocker:
|
||||||
self.process.clear()
|
self.process.clear()
|
||||||
self.process.addItems([item for item in equip2.processes if item in self.role.processes])
|
self.process.addItems([item for item in equip2.processes if item in self.role.processes])
|
||||||
@@ -136,7 +131,6 @@ class RoleComboBox(QWidget):
|
|||||||
Changes what tips are available when process is changed
|
Changes what tips are available when process is changed
|
||||||
"""
|
"""
|
||||||
process = self.process.currentText().strip()
|
process = self.process.currentText().strip()
|
||||||
# logger.debug(f"Checking process: {process} for equipment {self.role.name}")
|
|
||||||
process = Process.query(name=process)
|
process = Process.query(name=process)
|
||||||
if process.tip_roles:
|
if process.tip_roles:
|
||||||
for iii, tip_role in enumerate(process.tip_roles):
|
for iii, tip_role in enumerate(process.tip_roles):
|
||||||
@@ -144,7 +138,6 @@ class RoleComboBox(QWidget):
|
|||||||
tip_choices = [item.name for item in tip_role.instances]
|
tip_choices = [item.name for item in tip_role.instances]
|
||||||
widget.setEditable(False)
|
widget.setEditable(False)
|
||||||
widget.addItems(tip_choices)
|
widget.addItems(tip_choices)
|
||||||
# logger.debug(f"Tiprole: {tip_role.__dict__}")
|
|
||||||
widget.setObjectName(f"tips_{tip_role.name}")
|
widget.setObjectName(f"tips_{tip_role.name}")
|
||||||
widget.setMinimumWidth(200)
|
widget.setMinimumWidth(200)
|
||||||
widget.setMaximumWidth(200)
|
widget.setMaximumWidth(200)
|
||||||
@@ -169,7 +162,6 @@ class RoleComboBox(QWidget):
|
|||||||
eq = Equipment.query(name=self.box.currentText())
|
eq = Equipment.query(name=self.box.currentText())
|
||||||
tips = [PydTips(name=item.currentText(), role=item.objectName().lstrip("tips").lstrip("_")) for item in
|
tips = [PydTips(name=item.currentText(), role=item.objectName().lstrip("tips").lstrip("_")) for item in
|
||||||
self.findChildren(QComboBox) if item.objectName().startswith("tips")]
|
self.findChildren(QComboBox) if item.objectName().startswith("tips")]
|
||||||
# logger.debug(tips)
|
|
||||||
try:
|
try:
|
||||||
return PydEquipment(
|
return PydEquipment(
|
||||||
name=eq.name,
|
name=eq.name,
|
||||||
|
|||||||
@@ -148,5 +148,4 @@ class ControlsForm(QWidget):
|
|||||||
dicto['values'].append(dict(name=label[1], value=le.currentText()))
|
dicto['values'].append(dict(name=label[1], value=le.currentText()))
|
||||||
if label[0] not in [item['name'] for item in output]:
|
if label[0] not in [item['name'] for item in output]:
|
||||||
output.append(dicto)
|
output.append(dicto)
|
||||||
# logger.debug(pformat(output))
|
|
||||||
return output, self.comment_field.toPlainText()
|
return output, self.comment_field.toPlainText()
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ class InfoPane(QWidget):
|
|||||||
def __init__(self, parent: QWidget) -> None:
|
def __init__(self, parent: QWidget) -> None:
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.app = self.parent().parent()
|
self.app = self.parent().parent()
|
||||||
# logger.debug(f"\n\n{self.app}\n\n")
|
|
||||||
self.report = Report()
|
self.report = Report()
|
||||||
self.datepicker = StartEndDatePicker(default_start=-180)
|
self.datepicker = StartEndDatePicker(default_start=-180)
|
||||||
self.webview = QWebEngineView()
|
self.webview = QWebEngineView()
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
'''
|
"""
|
||||||
Contains miscellaneous widgets for frontend functions
|
Contains miscellaneous widgets for frontend functions
|
||||||
'''
|
"""
|
||||||
import math
|
import math
|
||||||
from datetime import date
|
from datetime import date
|
||||||
from PyQt6.QtGui import QPageLayout, QPageSize, QStandardItem, QIcon
|
from PyQt6.QtGui import QPageLayout, QPageSize, QStandardItem, QIcon
|
||||||
@@ -8,7 +8,7 @@ from PyQt6.QtWebEngineWidgets import QWebEngineView
|
|||||||
from PyQt6.QtWidgets import (
|
from PyQt6.QtWidgets import (
|
||||||
QLabel, QVBoxLayout,
|
QLabel, QVBoxLayout,
|
||||||
QLineEdit, QComboBox, QDialog,
|
QLineEdit, QComboBox, QDialog,
|
||||||
QDialogButtonBox, QDateEdit, QPushButton, QFormLayout, QWidget, QHBoxLayout, QSizePolicy
|
QDialogButtonBox, QDateEdit, QPushButton, QWidget, QHBoxLayout, QSizePolicy
|
||||||
)
|
)
|
||||||
from PyQt6.QtCore import Qt, QDate, QSize, QMarginsF
|
from PyQt6.QtCore import Qt, QDate, QSize, QMarginsF
|
||||||
from tools import jinja_template_loading
|
from tools import jinja_template_loading
|
||||||
@@ -66,7 +66,6 @@ class AddReagentForm(QDialog):
|
|||||||
self.type_input.addItems([item.name for item in ReagentRole.query() if kit in item.kit_types])
|
self.type_input.addItems([item.name for item in ReagentRole.query() if kit in item.kit_types])
|
||||||
else:
|
else:
|
||||||
self.type_input.addItems([item.name for item in ReagentRole.query()])
|
self.type_input.addItems([item.name for item in ReagentRole.query()])
|
||||||
# logger.debug(f"Trying to find index of {reagent_type}")
|
|
||||||
# NOTE: convert input to user-friendly string?
|
# NOTE: convert input to user-friendly string?
|
||||||
try:
|
try:
|
||||||
reagent_role = reagent_role.replace("_", " ").title()
|
reagent_role = reagent_role.replace("_", " ").title()
|
||||||
@@ -106,7 +105,6 @@ class AddReagentForm(QDialog):
|
|||||||
"""
|
"""
|
||||||
Updates reagent names form field with examples from reagent type
|
Updates reagent names form field with examples from reagent type
|
||||||
"""
|
"""
|
||||||
# logger.debug(self.type_input.currentText())
|
|
||||||
self.name_input.clear()
|
self.name_input.clear()
|
||||||
lookup = Reagent.query(role=self.type_input.currentText())
|
lookup = Reagent.query(role=self.type_input.currentText())
|
||||||
self.name_input.addItems(list(set([item.name for item in lookup])))
|
self.name_input.addItems(list(set([item.name for item in lookup])))
|
||||||
@@ -145,7 +143,8 @@ def save_pdf(obj: QWebEngineView, filename: Path):
|
|||||||
obj.page().printToPdf(filename.absolute().__str__(), page_layout)
|
obj.page().printToPdf(filename.absolute().__str__(), page_layout)
|
||||||
|
|
||||||
|
|
||||||
# subclass
|
# NOTE: subclass
|
||||||
|
|
||||||
class CheckableComboBox(QComboBox):
|
class CheckableComboBox(QComboBox):
|
||||||
# once there is a checkState set, it is rendered
|
# once there is a checkState set, it is rendered
|
||||||
# here we assume default Unchecked
|
# here we assume default Unchecked
|
||||||
@@ -162,7 +161,6 @@ class CheckableComboBox(QComboBox):
|
|||||||
return item.checkState() == Qt.CheckState.Checked
|
return item.checkState() == Qt.CheckState.Checked
|
||||||
|
|
||||||
def changed(self):
|
def changed(self):
|
||||||
logger.debug("emitting updated")
|
|
||||||
self.updated.emit()
|
self.updated.emit()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
'''
|
"""
|
||||||
Search box that performs fuzzy search for samples
|
Search box that performs fuzzy search for samples
|
||||||
'''
|
"""
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import Tuple, Any, List
|
from typing import Tuple, Any, List
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
@@ -39,7 +39,6 @@ class SearchBox(QDialog):
|
|||||||
else:
|
else:
|
||||||
self.sub_class = None
|
self.sub_class = None
|
||||||
self.results = SearchResults(parent=self, object_type=self.object_type, extras=self.extras, **kwargs)
|
self.results = SearchResults(parent=self, object_type=self.object_type, extras=self.extras, **kwargs)
|
||||||
# logger.debug(f"results: {self.results}")
|
|
||||||
self.layout.addWidget(self.results, 5, 0)
|
self.layout.addWidget(self.results, 5, 0)
|
||||||
self.setLayout(self.layout)
|
self.setLayout(self.layout)
|
||||||
self.setWindowTitle(f"Search {self.object_type.__name__}")
|
self.setWindowTitle(f"Search {self.object_type.__name__}")
|
||||||
@@ -51,7 +50,6 @@ class SearchBox(QDialog):
|
|||||||
Changes form inputs based on sample type
|
Changes form inputs based on sample type
|
||||||
"""
|
"""
|
||||||
deletes = [item for item in self.findChildren(FieldSearch)]
|
deletes = [item for item in self.findChildren(FieldSearch)]
|
||||||
# logger.debug(deletes)
|
|
||||||
for item in deletes:
|
for item in deletes:
|
||||||
item.setParent(None)
|
item.setParent(None)
|
||||||
# NOTE: Handle any subclasses
|
# NOTE: Handle any subclasses
|
||||||
@@ -62,7 +60,6 @@ class SearchBox(QDialog):
|
|||||||
self.object_type = self.original_type
|
self.object_type = self.original_type
|
||||||
else:
|
else:
|
||||||
self.object_type = self.original_type.find_regular_subclass(self.sub_class.currentText())
|
self.object_type = self.original_type.find_regular_subclass(self.sub_class.currentText())
|
||||||
logger.debug(f"{self.object_type} searchables: {self.object_type.searchables}")
|
|
||||||
for iii, searchable in enumerate(self.object_type.searchables):
|
for iii, searchable in enumerate(self.object_type.searchables):
|
||||||
widget = FieldSearch(parent=self, label=searchable, field_name=searchable)
|
widget = FieldSearch(parent=self, label=searchable, field_name=searchable)
|
||||||
widget.setObjectName(searchable)
|
widget.setObjectName(searchable)
|
||||||
@@ -85,10 +82,9 @@ class SearchBox(QDialog):
|
|||||||
Shows dataframe of relevant samples.
|
Shows dataframe of relevant samples.
|
||||||
"""
|
"""
|
||||||
fields = self.parse_form()
|
fields = self.parse_form()
|
||||||
# logger.debug(f"Got fields: {fields}")
|
|
||||||
sample_list_creator = self.object_type.fuzzy_search(**fields)
|
sample_list_creator = self.object_type.fuzzy_search(**fields)
|
||||||
data = self.object_type.results_to_df(objects=sample_list_creator)
|
data = self.object_type.results_to_df(objects=sample_list_creator)
|
||||||
# Setting results moved to here from __init__ 202411118
|
# NOTE: Setting results moved to here from __init__ 202411118
|
||||||
self.results.setData(df=data)
|
self.results.setData(df=data)
|
||||||
|
|
||||||
|
|
||||||
@@ -154,7 +150,6 @@ class SearchResults(QTableView):
|
|||||||
|
|
||||||
def parse_row(self, x):
|
def parse_row(self, x):
|
||||||
context = {item['name']: x.sibling(x.row(), item['column']).data() for item in self.columns_of_interest}
|
context = {item['name']: x.sibling(x.row(), item['column']).data() for item in self.columns_of_interest}
|
||||||
logger.debug(f"Context: {context}")
|
|
||||||
try:
|
try:
|
||||||
object = self.object_type.query(**context)
|
object = self.object_type.query(**context)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from PyQt6.QtWebEngineWidgets import QWebEngineView
|
|||||||
from tools import jinja_template_loading
|
from tools import jinja_template_loading
|
||||||
import logging
|
import logging
|
||||||
from backend.db import models
|
from backend.db import models
|
||||||
from typing import Any, Literal
|
from typing import Literal
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
|
|||||||
@@ -45,7 +45,6 @@ class SubmissionDetails(QDialog):
|
|||||||
self.btn.clicked.connect(self.save_pdf)
|
self.btn.clicked.connect(self.save_pdf)
|
||||||
self.back = QPushButton("Back")
|
self.back = QPushButton("Back")
|
||||||
self.back.setFixedWidth(100)
|
self.back.setFixedWidth(100)
|
||||||
# self.back.clicked.connect(self.back_function)
|
|
||||||
self.back.clicked.connect(self.webview.back)
|
self.back.clicked.connect(self.webview.back)
|
||||||
self.layout.addWidget(self.back, 0, 0, 1, 1)
|
self.layout.addWidget(self.back, 0, 0, 1, 1)
|
||||||
self.layout.addWidget(self.btn, 0, 1, 1, 9)
|
self.layout.addWidget(self.btn, 0, 1, 1, 9)
|
||||||
@@ -70,7 +69,6 @@ class SubmissionDetails(QDialog):
|
|||||||
if "Submission" in title:
|
if "Submission" in title:
|
||||||
self.btn.setEnabled(True)
|
self.btn.setEnabled(True)
|
||||||
self.export_plate = title.split(" ")[-1]
|
self.export_plate = title.split(" ")[-1]
|
||||||
# logger.debug(f"Updating export plate to: {self.export_plate}")
|
|
||||||
else:
|
else:
|
||||||
self.btn.setEnabled(False)
|
self.btn.setEnabled(False)
|
||||||
try:
|
try:
|
||||||
@@ -78,7 +76,6 @@ class SubmissionDetails(QDialog):
|
|||||||
except IndexError as e:
|
except IndexError as e:
|
||||||
check = title
|
check = title
|
||||||
if title == check:
|
if title == check:
|
||||||
# logger.debug("Disabling back button")
|
|
||||||
self.back.setEnabled(False)
|
self.back.setEnabled(False)
|
||||||
else:
|
else:
|
||||||
self.back.setEnabled(True)
|
self.back.setEnabled(True)
|
||||||
@@ -91,7 +88,6 @@ class SubmissionDetails(QDialog):
|
|||||||
Args:
|
Args:
|
||||||
sample (str): Submitter Id of the sample.
|
sample (str): Submitter Id of the sample.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Details: {sample}")
|
|
||||||
if isinstance(sample, str):
|
if isinstance(sample, str):
|
||||||
sample = BasicSample.query(submitter_id=sample)
|
sample = BasicSample.query(submitter_id=sample)
|
||||||
base_dict = sample.to_sub_dict(full_data=True)
|
base_dict = sample.to_sub_dict(full_data=True)
|
||||||
@@ -114,7 +110,6 @@ class SubmissionDetails(QDialog):
|
|||||||
base_dict = reagent.to_sub_dict(extraction_kit=self.kit, full_data=True)
|
base_dict = reagent.to_sub_dict(extraction_kit=self.kit, full_data=True)
|
||||||
env = jinja_template_loading()
|
env = jinja_template_loading()
|
||||||
temp_name = "reagent_details.html"
|
temp_name = "reagent_details.html"
|
||||||
# logger.debug(f"Returning template: {temp_name}")
|
|
||||||
try:
|
try:
|
||||||
template = env.get_template(temp_name)
|
template = env.get_template(temp_name)
|
||||||
except TemplateNotFound as e:
|
except TemplateNotFound as e:
|
||||||
@@ -147,29 +142,23 @@ class SubmissionDetails(QDialog):
|
|||||||
Args:
|
Args:
|
||||||
submission (str | BasicSubmission): Submission of interest.
|
submission (str | BasicSubmission): Submission of interest.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Details for: {submission}")
|
|
||||||
if isinstance(submission, str):
|
if isinstance(submission, str):
|
||||||
submission = BasicSubmission.query(rsl_plate_num=submission)
|
submission = BasicSubmission.query(rsl_plate_num=submission)
|
||||||
self.rsl_plate_num = submission.rsl_plate_num
|
self.rsl_plate_num = submission.rsl_plate_num
|
||||||
self.base_dict = submission.to_dict(full_data=True)
|
self.base_dict = submission.to_dict(full_data=True)
|
||||||
# logger.debug(f"Submission details data:\n{pformat({k:v for k,v in self.base_dict.items() if k == 'reagents'})}")
|
|
||||||
# NOTE: don't want id
|
# NOTE: don't want id
|
||||||
# logger.debug(f"Creating barcode.")
|
|
||||||
# logger.debug(f"Making platemap...")
|
|
||||||
self.base_dict['platemap'] = submission.make_plate_map(sample_list=submission.hitpick_plate())
|
self.base_dict['platemap'] = submission.make_plate_map(sample_list=submission.hitpick_plate())
|
||||||
self.base_dict['excluded'] = submission.get_default_info("details_ignore")
|
self.base_dict['excluded'] = submission.get_default_info("details_ignore")
|
||||||
self.base_dict, self.template = submission.get_details_template(base_dict=self.base_dict)
|
self.base_dict, self.template = submission.get_details_template(base_dict=self.base_dict)
|
||||||
template_path = Path(self.template.environment.loader.__getattribute__("searchpath")[0])
|
template_path = Path(self.template.environment.loader.__getattribute__("searchpath")[0])
|
||||||
with open(template_path.joinpath("css", "styles.css"), "r") as f:
|
with open(template_path.joinpath("css", "styles.css"), "r") as f:
|
||||||
css = f.read()
|
css = f.read()
|
||||||
# logger.debug(f"Submission_details: {pformat(self.base_dict)}")
|
|
||||||
# logger.debug(f"User is power user: {is_power_user()}")
|
|
||||||
self.html = self.template.render(sub=self.base_dict, permission=is_power_user(), css=css)
|
self.html = self.template.render(sub=self.base_dict, permission=is_power_user(), css=css)
|
||||||
self.webview.setHtml(self.html)
|
self.webview.setHtml(self.html)
|
||||||
|
|
||||||
@pyqtSlot(str)
|
@pyqtSlot(str)
|
||||||
def sign_off(self, submission: str | BasicSubmission):
|
def sign_off(self, submission: str | BasicSubmission):
|
||||||
logger.debug(f"Signing off on {submission} - ({getuser()})")
|
logger.info(f"Signing off on {submission} - ({getuser()})")
|
||||||
if isinstance(submission, str):
|
if isinstance(submission, str):
|
||||||
submission = BasicSubmission.query(rsl_plate_num=submission)
|
submission = BasicSubmission.query(rsl_plate_num=submission)
|
||||||
submission.signed_by = getuser()
|
submission.signed_by = getuser()
|
||||||
@@ -195,7 +184,6 @@ class SubmissionComment(QDialog):
|
|||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
try:
|
try:
|
||||||
self.app = parent.parent().parent().parent().parent().parent().parent
|
self.app = parent.parent().parent().parent().parent().parent().parent
|
||||||
# logger.debug(f"App: {self.app}")
|
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
self.submission = submission
|
self.submission = submission
|
||||||
@@ -225,5 +213,4 @@ class SubmissionComment(QDialog):
|
|||||||
return None
|
return None
|
||||||
dt = datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S")
|
dt = datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S")
|
||||||
full_comment = {"name": commenter, "time": dt, "text": comment}
|
full_comment = {"name": commenter, "time": dt, "text": comment}
|
||||||
# logger.debug(f"Full comment: {full_comment}")
|
|
||||||
return full_comment
|
return full_comment
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
'''
|
"""
|
||||||
Contains widgets specific to the submission summary and submission details.
|
Contains widgets specific to the submission summary and submission details.
|
||||||
'''
|
"""
|
||||||
import logging
|
import logging
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from PyQt6.QtWidgets import QTableView, QMenu
|
from PyQt6.QtWidgets import QTableView, QMenu
|
||||||
@@ -107,20 +107,16 @@ class SubmissionsSheet(QTableView):
|
|||||||
Args:
|
Args:
|
||||||
event (_type_): the item of interest
|
event (_type_): the item of interest
|
||||||
"""
|
"""
|
||||||
# logger.debug(event.__dict__)
|
|
||||||
id = self.selectionModel().currentIndex()
|
id = self.selectionModel().currentIndex()
|
||||||
id = id.sibling(id.row(), 0).data()
|
id = id.sibling(id.row(), 0).data()
|
||||||
submission = BasicSubmission.query(id=id)
|
submission = BasicSubmission.query(id=id)
|
||||||
# logger.debug(f"Event submission: {submission}")
|
|
||||||
self.menu = QMenu(self)
|
self.menu = QMenu(self)
|
||||||
self.con_actions = submission.custom_context_events()
|
self.con_actions = submission.custom_context_events()
|
||||||
# logger.debug(f"Menu options: {self.con_actions}")
|
|
||||||
for k in self.con_actions.keys():
|
for k in self.con_actions.keys():
|
||||||
# logger.debug(f"Adding {k}")
|
|
||||||
action = QAction(k, self)
|
action = QAction(k, self)
|
||||||
action.triggered.connect(lambda _, action_name=k: self.triggered_action(action_name=action_name))
|
action.triggered.connect(lambda _, action_name=k: self.triggered_action(action_name=action_name))
|
||||||
self.menu.addAction(action)
|
self.menu.addAction(action)
|
||||||
# add other required actions
|
# NOTE: add other required actions
|
||||||
self.menu.popup(QCursor.pos())
|
self.menu.popup(QCursor.pos())
|
||||||
|
|
||||||
def triggered_action(self, action_name: str):
|
def triggered_action(self, action_name: str):
|
||||||
@@ -130,8 +126,6 @@ class SubmissionsSheet(QTableView):
|
|||||||
Args:
|
Args:
|
||||||
action_name (str): name of the action from the menu
|
action_name (str): name of the action from the menu
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Action: {action_name}")
|
|
||||||
# logger.debug(f"Responding with {self.con_actions[action_name]}")
|
|
||||||
func = self.con_actions[action_name]
|
func = self.con_actions[action_name]
|
||||||
func(obj=self)
|
func(obj=self)
|
||||||
|
|
||||||
@@ -179,7 +173,6 @@ class SubmissionsSheet(QTableView):
|
|||||||
if sub is None:
|
if sub is None:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
# logger.debug(f"Found submission: {sub.rsl_plate_num}")
|
|
||||||
count += 1
|
count += 1
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
'''
|
"""
|
||||||
Contains all submission related frontend functions
|
Contains all submission related frontend functions
|
||||||
'''
|
"""
|
||||||
from PyQt6.QtWidgets import (
|
from PyQt6.QtWidgets import (
|
||||||
QWidget, QPushButton, QVBoxLayout,
|
QWidget, QPushButton, QVBoxLayout,
|
||||||
QComboBox, QDateEdit, QLineEdit, QLabel, QCheckBox, QBoxLayout, QHBoxLayout, QGridLayout
|
QComboBox, QDateEdit, QLineEdit, QLabel, QCheckBox, QHBoxLayout, QGridLayout
|
||||||
)
|
)
|
||||||
from PyQt6.QtCore import pyqtSignal, Qt, QSignalBlocker
|
from PyQt6.QtCore import pyqtSignal, Qt, QSignalBlocker
|
||||||
from . import select_open_file, select_save_file
|
from . import select_open_file, select_save_file
|
||||||
@@ -34,7 +34,6 @@ class MyQComboBox(QComboBox):
|
|||||||
super(MyQComboBox, self).__init__(*args, **kwargs)
|
super(MyQComboBox, self).__init__(*args, **kwargs)
|
||||||
self.scrollWidget = scrollWidget
|
self.scrollWidget = scrollWidget
|
||||||
self.setFocusPolicy(Qt.FocusPolicy.StrongFocus)
|
self.setFocusPolicy(Qt.FocusPolicy.StrongFocus)
|
||||||
logger.debug(f"Scrollwidget: {scrollWidget}")
|
|
||||||
|
|
||||||
def wheelEvent(self, *args, **kwargs):
|
def wheelEvent(self, *args, **kwargs):
|
||||||
if self.hasFocus():
|
if self.hasFocus():
|
||||||
@@ -61,14 +60,12 @@ class MyQDateEdit(QDateEdit):
|
|||||||
|
|
||||||
|
|
||||||
class SubmissionFormContainer(QWidget):
|
class SubmissionFormContainer(QWidget):
|
||||||
# A signal carrying a path
|
# NOTE: A signal carrying a path
|
||||||
import_drag = pyqtSignal(Path)
|
import_drag = pyqtSignal(Path)
|
||||||
|
|
||||||
def __init__(self, parent: QWidget) -> None:
|
def __init__(self, parent: QWidget) -> None:
|
||||||
# logger.debug(f"Setting form widget...")
|
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.app = self.parent().parent()
|
self.app = self.parent().parent()
|
||||||
# logger.debug(f"App: {self.app}")
|
|
||||||
self.report = Report()
|
self.report = Report()
|
||||||
self.setStyleSheet('background-color: light grey;')
|
self.setStyleSheet('background-color: light grey;')
|
||||||
self.setAcceptDrops(True)
|
self.setAcceptDrops(True)
|
||||||
@@ -89,7 +86,6 @@ class SubmissionFormContainer(QWidget):
|
|||||||
Sets filename when file dropped
|
Sets filename when file dropped
|
||||||
"""
|
"""
|
||||||
fname = Path([u.toLocalFile() for u in event.mimeData().urls()][0])
|
fname = Path([u.toLocalFile() for u in event.mimeData().urls()][0])
|
||||||
# logger.debug(f"App: {self.app}")
|
|
||||||
self.app.last_dir = fname.parent
|
self.app.last_dir = fname.parent
|
||||||
self.import_drag.emit(fname)
|
self.import_drag.emit(fname)
|
||||||
|
|
||||||
@@ -127,7 +123,6 @@ class SubmissionFormContainer(QWidget):
|
|||||||
# NOTE: set file dialog
|
# NOTE: set file dialog
|
||||||
if isinstance(fname, bool) or fname is None:
|
if isinstance(fname, bool) or fname is None:
|
||||||
fname = select_open_file(self, file_extension="xlsx")
|
fname = select_open_file(self, file_extension="xlsx")
|
||||||
# logger.debug(f"Attempting to parse file: {fname}")
|
|
||||||
if not fname:
|
if not fname:
|
||||||
report.add_result(Result(msg=f"File {fname.__str__()} not found.", status="critical"))
|
report.add_result(Result(msg=f"File {fname.__str__()} not found.", status="critical"))
|
||||||
return report
|
return report
|
||||||
@@ -139,14 +134,10 @@ class SubmissionFormContainer(QWidget):
|
|||||||
return
|
return
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
self.prsr = SheetParser(filepath=fname)
|
self.prsr = SheetParser(filepath=fname)
|
||||||
# logger.debug(f"Submission dictionary:\n{pformat(self.prsr.sub)}")
|
|
||||||
self.pyd = self.prsr.to_pydantic()
|
self.pyd = self.prsr.to_pydantic()
|
||||||
# logger.debug(f"Pydantic result: \n\n{pformat(self.pyd)}\n\n")
|
|
||||||
self.form = self.pyd.to_form(parent=self)
|
self.form = self.pyd.to_form(parent=self)
|
||||||
self.layout().addWidget(self.form)
|
self.layout().addWidget(self.form)
|
||||||
return report
|
return report
|
||||||
# logger.debug(f"Outgoing report: {self.report.results}")
|
|
||||||
# logger.debug(f"All attributes of submission container:\n{pformat(self.__dict__)}")
|
|
||||||
|
|
||||||
@report_result
|
@report_result
|
||||||
def add_reagent(self, reagent_lot: str | None = None, reagent_role: str | None = None, expiry: date | None = None,
|
def add_reagent(self, reagent_lot: str | None = None, reagent_role: str | None = None, expiry: date | None = None,
|
||||||
@@ -172,14 +163,12 @@ class SubmissionFormContainer(QWidget):
|
|||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
# NOTE: extract form info
|
# NOTE: extract form info
|
||||||
info = dlg.parse_form()
|
info = dlg.parse_form()
|
||||||
# logger.debug(f"Reagent info: {info}")
|
|
||||||
# NOTE: create reagent object
|
# NOTE: create reagent object
|
||||||
reagent = PydReagent(ctx=self.app.ctx, **info, missing=False)
|
reagent = PydReagent(ctx=self.app.ctx, **info, missing=False)
|
||||||
# NOTE: send reagent to db
|
# NOTE: send reagent to db
|
||||||
sqlobj, result = reagent.toSQL()
|
sqlobj, result = reagent.toSQL()
|
||||||
sqlobj.save()
|
sqlobj.save()
|
||||||
report.add_result(result)
|
report.add_result(result)
|
||||||
# logger.debug(f"Reagent: {reagent}, Report: {report}")
|
|
||||||
return reagent, report
|
return reagent, report
|
||||||
|
|
||||||
|
|
||||||
@@ -189,7 +178,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
|
|
||||||
def __init__(self, parent: QWidget, submission: PydSubmission, disable: list | None = None) -> None:
|
def __init__(self, parent: QWidget, submission: PydSubmission, disable: list | None = None) -> None:
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
# logger.debug(f"Disable: {disable}")
|
|
||||||
if disable is None:
|
if disable is None:
|
||||||
disable = []
|
disable = []
|
||||||
self.app = parent.app
|
self.app = parent.app
|
||||||
@@ -200,17 +188,13 @@ class SubmissionFormWidget(QWidget):
|
|||||||
defaults = st.get_default_info("form_recover", "form_ignore", submission_type=self.pyd.submission_type['value'])
|
defaults = st.get_default_info("form_recover", "form_ignore", submission_type=self.pyd.submission_type['value'])
|
||||||
self.recover = defaults['form_recover']
|
self.recover = defaults['form_recover']
|
||||||
self.ignore = defaults['form_ignore']
|
self.ignore = defaults['form_ignore']
|
||||||
# logger.debug(f"Attempting to extend ignore list with {self.pyd.submission_type['value']}")
|
|
||||||
self.layout = QVBoxLayout()
|
self.layout = QVBoxLayout()
|
||||||
for k in list(self.pyd.model_fields.keys()) + list(self.pyd.model_extra.keys()):
|
for k in list(self.pyd.model_fields.keys()) + list(self.pyd.model_extra.keys()):
|
||||||
# logger.debug(f"Creating widget: {k}")
|
|
||||||
if k in self.ignore:
|
if k in self.ignore:
|
||||||
logger.warning(f"{k} in form_ignore {self.ignore}, not creating widget")
|
logger.warning(f"{k} in form_ignore {self.ignore}, not creating widget")
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
# logger.debug(f"Key: {k}, Disable: {disable}")
|
|
||||||
check = k in disable
|
check = k in disable
|
||||||
# logger.debug(f"Check: {check}")
|
|
||||||
except TypeError:
|
except TypeError:
|
||||||
check = False
|
check = False
|
||||||
try:
|
try:
|
||||||
@@ -225,7 +209,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
sub_obj=st, disable=check)
|
sub_obj=st, disable=check)
|
||||||
if add_widget is not None:
|
if add_widget is not None:
|
||||||
self.layout.addWidget(add_widget)
|
self.layout.addWidget(add_widget)
|
||||||
# if k == "extraction_kit":
|
|
||||||
if k in self.__class__.update_reagent_fields:
|
if k in self.__class__.update_reagent_fields:
|
||||||
add_widget.input.currentTextChanged.connect(self.scrape_reagents)
|
add_widget.input.currentTextChanged.connect(self.scrape_reagents)
|
||||||
self.disabler = self.DisableReagents(self)
|
self.disabler = self.DisableReagents(self)
|
||||||
@@ -236,15 +219,10 @@ class SubmissionFormWidget(QWidget):
|
|||||||
self.scrape_reagents(self.extraction_kit)
|
self.scrape_reagents(self.extraction_kit)
|
||||||
|
|
||||||
def disable_reagents(self):
|
def disable_reagents(self):
|
||||||
|
"""
|
||||||
|
Disables all ReagentFormWidgets in this form/
|
||||||
|
"""
|
||||||
for reagent in self.findChildren(self.ReagentFormWidget):
|
for reagent in self.findChildren(self.ReagentFormWidget):
|
||||||
# if self.disabler.checkbox.isChecked():
|
|
||||||
# # reagent.setVisible(True)
|
|
||||||
# # with QSignalBlocker(self.disabler.checkbox) as b:
|
|
||||||
# reagent.flip_check()
|
|
||||||
# else:
|
|
||||||
# # reagent.setVisible(False)
|
|
||||||
# # with QSignalBlocker(self.disabler.checkbox) as b:
|
|
||||||
# reagent.check.setChecked(False)
|
|
||||||
reagent.flip_check(self.disabler.checkbox.isChecked())
|
reagent.flip_check(self.disabler.checkbox.isChecked())
|
||||||
|
|
||||||
|
|
||||||
@@ -263,7 +241,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
Returns:
|
Returns:
|
||||||
self.InfoItem: Form widget to hold name:value
|
self.InfoItem: Form widget to hold name:value
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Key: {key}, Disable: {disable}")
|
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
if key not in self.ignore:
|
if key not in self.ignore:
|
||||||
@@ -276,7 +253,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
case _:
|
case _:
|
||||||
widget = self.InfoItem(parent=self, key=key, value=value, submission_type=submission_type,
|
widget = self.InfoItem(parent=self, key=key, value=value, submission_type=submission_type,
|
||||||
sub_obj=sub_obj)
|
sub_obj=sub_obj)
|
||||||
# logger.debug(f"Setting widget enabled to: {not disable}")
|
|
||||||
if disable:
|
if disable:
|
||||||
widget.input.setEnabled(False)
|
widget.input.setEnabled(False)
|
||||||
widget.input.setToolTip("Widget disabled to protect database integrity.")
|
widget.input.setToolTip("Widget disabled to protect database integrity.")
|
||||||
@@ -298,24 +274,20 @@ class SubmissionFormWidget(QWidget):
|
|||||||
"""
|
"""
|
||||||
self.extraction_kit = args[0]
|
self.extraction_kit = args[0]
|
||||||
report = Report()
|
report = Report()
|
||||||
logger.debug(f"Extraction kit: {self.extraction_kit}")
|
|
||||||
# NOTE: Remove previous reagent widgets
|
# NOTE: Remove previous reagent widgets
|
||||||
try:
|
try:
|
||||||
old_reagents = self.find_widgets()
|
old_reagents = self.find_widgets()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
logger.error(f"Couldn't find old reagents.")
|
logger.error(f"Couldn't find old reagents.")
|
||||||
old_reagents = []
|
old_reagents = []
|
||||||
# logger.debug(f"\n\nAttempting to clear: {old_reagents}\n\n")
|
|
||||||
for reagent in old_reagents:
|
for reagent in old_reagents:
|
||||||
if isinstance(reagent, self.ReagentFormWidget) or isinstance(reagent, QPushButton):
|
if isinstance(reagent, self.ReagentFormWidget) or isinstance(reagent, QPushButton):
|
||||||
reagent.setParent(None)
|
reagent.setParent(None)
|
||||||
reagents, integrity_report = self.pyd.check_kit_integrity(extraction_kit=self.extraction_kit)
|
reagents, integrity_report = self.pyd.check_kit_integrity(extraction_kit=self.extraction_kit)
|
||||||
# logger.debug(f"Got reagents: {pformat(reagents)}")
|
|
||||||
for reagent in reagents:
|
for reagent in reagents:
|
||||||
add_widget = self.ReagentFormWidget(parent=self, reagent=reagent, extraction_kit=self.extraction_kit)
|
add_widget = self.ReagentFormWidget(parent=self, reagent=reagent, extraction_kit=self.extraction_kit)
|
||||||
self.layout.addWidget(add_widget)
|
self.layout.addWidget(add_widget)
|
||||||
report.add_result(integrity_report)
|
report.add_result(integrity_report)
|
||||||
# logger.debug(f"Outgoing report: {report.results}")
|
|
||||||
if hasattr(self.pyd, "csv"):
|
if hasattr(self.pyd, "csv"):
|
||||||
export_csv_btn = QPushButton("Export CSV")
|
export_csv_btn = QPushButton("Export CSV")
|
||||||
export_csv_btn.setObjectName("export_csv_btn")
|
export_csv_btn.setObjectName("export_csv_btn")
|
||||||
@@ -326,6 +298,7 @@ class SubmissionFormWidget(QWidget):
|
|||||||
self.layout.addWidget(submit_btn)
|
self.layout.addWidget(submit_btn)
|
||||||
submit_btn.clicked.connect(self.submit_new_sample_function)
|
submit_btn.clicked.connect(self.submit_new_sample_function)
|
||||||
self.setLayout(self.layout)
|
self.setLayout(self.layout)
|
||||||
|
self.disabler.checkbox.setChecked(True)
|
||||||
return report
|
return report
|
||||||
|
|
||||||
def clear_form(self):
|
def clear_form(self):
|
||||||
@@ -365,23 +338,16 @@ class SubmissionFormWidget(QWidget):
|
|||||||
report = Report()
|
report = Report()
|
||||||
result = self.parse_form()
|
result = self.parse_form()
|
||||||
report.add_result(result)
|
report.add_result(result)
|
||||||
# logger.debug(f"Submission: {pformat(self.pyd)}")
|
|
||||||
# logger.debug("Checking kit integrity...")
|
|
||||||
if self.disabler.checkbox.isChecked():
|
if self.disabler.checkbox.isChecked():
|
||||||
_, result = self.pyd.check_kit_integrity()
|
_, result = self.pyd.check_kit_integrity()
|
||||||
report.add_result(result)
|
report.add_result(result)
|
||||||
if len(result.results) > 0:
|
if len(result.results) > 0:
|
||||||
return
|
return
|
||||||
# logger.debug(f"PYD before transformation into SQL:\n\n{self.pyd}\n\n")
|
|
||||||
base_submission, result = self.pyd.to_sql()
|
base_submission, result = self.pyd.to_sql()
|
||||||
# logger.debug(f"SQL object: {pformat(base_submission.__dict__)}")
|
|
||||||
# logger.debug(f"Base submission: {base_submission.to_dict()}")
|
|
||||||
# NOTE: check output message for issues
|
# NOTE: check output message for issues
|
||||||
# logger.debug(f"Result of to_sql: {result}")
|
|
||||||
try:
|
try:
|
||||||
trigger = result.results[-1]
|
trigger = result.results[-1]
|
||||||
code = trigger.code
|
code = trigger.code
|
||||||
# logger.debug(f"Code from return: {code}")
|
|
||||||
except IndexError as e:
|
except IndexError as e:
|
||||||
logger.error(result.results)
|
logger.error(result.results)
|
||||||
logger.error(f"Problem getting error code: {e}")
|
logger.error(f"Problem getting error code: {e}")
|
||||||
@@ -408,11 +374,8 @@ class SubmissionFormWidget(QWidget):
|
|||||||
pass
|
pass
|
||||||
# NOTE: add reagents to submission object
|
# NOTE: add reagents to submission object
|
||||||
for reagent in base_submission.reagents:
|
for reagent in base_submission.reagents:
|
||||||
# logger.debug(f"Updating: {reagent} with {reagent.lot}")
|
|
||||||
reagent.update_last_used(kit=base_submission.extraction_kit)
|
reagent.update_last_used(kit=base_submission.extraction_kit)
|
||||||
# logger.debug(f"Final reagents: {pformat(base_submission.reagents)}")
|
|
||||||
save_output = base_submission.save()
|
save_output = base_submission.save()
|
||||||
# logger.debug(f"Save output: {save_output}")
|
|
||||||
# NOTE: update summary sheet
|
# NOTE: update summary sheet
|
||||||
self.app.table_widget.sub_wid.setData()
|
self.app.table_widget.sub_wid.setData()
|
||||||
# NOTE: reset form
|
# NOTE: reset form
|
||||||
@@ -423,7 +386,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
check = True
|
check = True
|
||||||
if check:
|
if check:
|
||||||
self.setParent(None)
|
self.setParent(None)
|
||||||
# logger.debug(f"All attributes of obj: {pformat(self.__dict__)}")
|
|
||||||
return report
|
return report
|
||||||
|
|
||||||
def export_csv_function(self, fname: Path | None = None):
|
def export_csv_function(self, fname: Path | None = None):
|
||||||
@@ -454,7 +416,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
info = {}
|
info = {}
|
||||||
reagents = []
|
reagents = []
|
||||||
for widget in self.findChildren(QWidget):
|
for widget in self.findChildren(QWidget):
|
||||||
# logger.debug(f"Parsed widget of type {type(widget)}")
|
|
||||||
match widget:
|
match widget:
|
||||||
case self.ReagentFormWidget():
|
case self.ReagentFormWidget():
|
||||||
reagent, _ = widget.parse_form()
|
reagent, _ = widget.parse_form()
|
||||||
@@ -464,16 +425,10 @@ class SubmissionFormWidget(QWidget):
|
|||||||
field, value = widget.parse_form()
|
field, value = widget.parse_form()
|
||||||
if field is not None:
|
if field is not None:
|
||||||
info[field] = value
|
info[field] = value
|
||||||
# logger.debug(f"Info: {pformat(info)}")
|
|
||||||
logger.debug(f"Reagents going into pyd: {pformat(reagents)}")
|
|
||||||
self.pyd.reagents = reagents
|
self.pyd.reagents = reagents
|
||||||
logger.debug(f"Reagents after insertion in pyd: {pformat(self.pyd.reagents)}")
|
|
||||||
# logger.debug(f"Attrs not in info: {[k for k, v in self.__dict__.items() if k not in info.keys()]}")
|
|
||||||
for item in self.recover:
|
for item in self.recover:
|
||||||
# logger.debug(f"Attempting to recover: {item}")
|
|
||||||
if hasattr(self, item):
|
if hasattr(self, item):
|
||||||
value = getattr(self, item)
|
value = getattr(self, item)
|
||||||
# logger.debug(f"Setting {item}")
|
|
||||||
info[item] = value
|
info[item] = value
|
||||||
for k, v in info.items():
|
for k, v in info.items():
|
||||||
self.pyd.set_attribute(key=k, value=v)
|
self.pyd.set_attribute(key=k, value=v)
|
||||||
@@ -551,9 +506,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
except (TypeError, KeyError):
|
except (TypeError, KeyError):
|
||||||
pass
|
pass
|
||||||
obj = parent.parent().parent()
|
obj = parent.parent().parent()
|
||||||
# logger.debug(f"Object: {obj}")
|
|
||||||
# logger.debug(f"Parent: {parent.parent()}")
|
|
||||||
# logger.debug(f"Creating widget for: {key}")
|
|
||||||
match key:
|
match key:
|
||||||
case 'submitting_lab':
|
case 'submitting_lab':
|
||||||
add_widget = MyQComboBox(scrollWidget=parent)
|
add_widget = MyQComboBox(scrollWidget=parent)
|
||||||
@@ -567,7 +519,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
looked_up_lab = Organization.query(name=value, limit=1)
|
looked_up_lab = Organization.query(name=value, limit=1)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
looked_up_lab = None
|
looked_up_lab = None
|
||||||
# logger.debug(f"\n\nLooked up lab: {looked_up_lab}")
|
|
||||||
if looked_up_lab:
|
if looked_up_lab:
|
||||||
try:
|
try:
|
||||||
labs.remove(str(looked_up_lab.name))
|
labs.remove(str(looked_up_lab.name))
|
||||||
@@ -586,12 +537,9 @@ class SubmissionFormWidget(QWidget):
|
|||||||
# NOTE: create combobox to hold looked up kits
|
# NOTE: create combobox to hold looked up kits
|
||||||
add_widget = MyQComboBox(scrollWidget=parent)
|
add_widget = MyQComboBox(scrollWidget=parent)
|
||||||
# NOTE: lookup existing kits by 'submission_type' decided on by sheetparser
|
# NOTE: lookup existing kits by 'submission_type' decided on by sheetparser
|
||||||
# logger.debug(f"Looking up kits used for {submission_type}")
|
|
||||||
uses = [item.name for item in submission_type.kit_types]
|
uses = [item.name for item in submission_type.kit_types]
|
||||||
obj.uses = uses
|
obj.uses = uses
|
||||||
# logger.debug(f"Kits received for {submission_type}: {uses}")
|
|
||||||
if check_not_nan(value):
|
if check_not_nan(value):
|
||||||
# logger.debug(f"The extraction kit in parser was: {value}")
|
|
||||||
try:
|
try:
|
||||||
uses.insert(0, uses.pop(uses.index(value)))
|
uses.insert(0, uses.pop(uses.index(value)))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@@ -626,7 +574,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
else:
|
else:
|
||||||
# NOTE: anything else gets added in as a line edit
|
# NOTE: anything else gets added in as a line edit
|
||||||
add_widget = QLineEdit()
|
add_widget = QLineEdit()
|
||||||
# logger.debug(f"Setting widget text to {str(value).replace('_', ' ')}")
|
|
||||||
add_widget.setText(str(value).replace("_", " "))
|
add_widget.setText(str(value).replace("_", " "))
|
||||||
add_widget.setToolTip(f"Enter value for {key}")
|
add_widget.setToolTip(f"Enter value for {key}")
|
||||||
if add_widget is not None:
|
if add_widget is not None:
|
||||||
@@ -725,7 +672,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
if not self.lot.isEnabled():
|
if not self.lot.isEnabled():
|
||||||
return None, report
|
return None, report
|
||||||
lot = self.lot.currentText()
|
lot = self.lot.currentText()
|
||||||
# logger.debug(f"Using this lot for the reagent {self.reagent}: {lot}")
|
|
||||||
wanted_reagent = Reagent.query(lot=lot, role=self.reagent.role)
|
wanted_reagent = Reagent.query(lot=lot, role=self.reagent.role)
|
||||||
# NOTE: if reagent doesn't exist in database, offer to add it (uses App.add_reagent)
|
# NOTE: if reagent doesn't exist in database, offer to add it (uses App.add_reagent)
|
||||||
if wanted_reagent is None:
|
if wanted_reagent is None:
|
||||||
@@ -741,7 +687,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
return wanted_reagent, report
|
return wanted_reagent, report
|
||||||
else:
|
else:
|
||||||
# NOTE: In this case we will have an empty reagent and the submission will fail kit integrity check
|
# NOTE: In this case we will have an empty reagent and the submission will fail kit integrity check
|
||||||
# logger.debug("Will not add reagent.")
|
|
||||||
report.add_result(Result(msg="Failed integrity check", status="Critical"))
|
report.add_result(Result(msg="Failed integrity check", status="Critical"))
|
||||||
return None, report
|
return None, report
|
||||||
else:
|
else:
|
||||||
@@ -791,7 +736,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
looked_up_rt = KitTypeReagentRoleAssociation.query(reagent_role=reagent.role,
|
looked_up_rt = KitTypeReagentRoleAssociation.query(reagent_role=reagent.role,
|
||||||
kit_type=extraction_kit)
|
kit_type=extraction_kit)
|
||||||
relevant_reagents = [str(item.lot) for item in looked_up_rt.get_all_relevant_reagents()]
|
relevant_reagents = [str(item.lot) for item in looked_up_rt.get_all_relevant_reagents()]
|
||||||
logger.debug(f"Relevant reagents for {reagent}: {relevant_reagents}")
|
|
||||||
# NOTE: if reagent in sheet is not found insert it into the front of relevant reagents so it shows
|
# NOTE: if reagent in sheet is not found insert it into the front of relevant reagents so it shows
|
||||||
if str(reagent.lot) not in relevant_reagents:
|
if str(reagent.lot) not in relevant_reagents:
|
||||||
if check_not_nan(reagent.lot):
|
if check_not_nan(reagent.lot):
|
||||||
@@ -803,7 +747,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
looked_up_reg = None
|
looked_up_reg = None
|
||||||
if isinstance(looked_up_reg, list):
|
if isinstance(looked_up_reg, list):
|
||||||
looked_up_reg = None
|
looked_up_reg = None
|
||||||
# logger.debug(f"Because there was no reagent listed for {reagent.lot}, we will insert the last lot used: {looked_up_reg}")
|
|
||||||
if looked_up_reg:
|
if looked_up_reg:
|
||||||
try:
|
try:
|
||||||
relevant_reagents.remove(str(looked_up_reg.lot))
|
relevant_reagents.remove(str(looked_up_reg.lot))
|
||||||
@@ -812,15 +755,11 @@ class SubmissionFormWidget(QWidget):
|
|||||||
relevant_reagents.insert(0, str(looked_up_reg.lot))
|
relevant_reagents.insert(0, str(looked_up_reg.lot))
|
||||||
else:
|
else:
|
||||||
if len(relevant_reagents) > 1:
|
if len(relevant_reagents) > 1:
|
||||||
# logger.debug(f"Found {reagent.lot} in relevant reagents: {relevant_reagents}. Moving to front of list.")
|
|
||||||
idx = relevant_reagents.index(str(reagent.lot))
|
idx = relevant_reagents.index(str(reagent.lot))
|
||||||
# logger.debug(f"The index we got for {reagent.lot} in {relevant_reagents} was {idx}")
|
|
||||||
moved_reag = relevant_reagents.pop(idx)
|
moved_reag = relevant_reagents.pop(idx)
|
||||||
relevant_reagents.insert(0, moved_reag)
|
relevant_reagents.insert(0, moved_reag)
|
||||||
else:
|
else:
|
||||||
# logger.debug(f"Found {reagent.lot} in relevant reagents: {relevant_reagents}. But no need to move due to short list.")
|
|
||||||
pass
|
pass
|
||||||
logger.debug(f"New relevant reagents: {relevant_reagents}")
|
|
||||||
self.setObjectName(f"lot_{reagent.role}")
|
self.setObjectName(f"lot_{reagent.role}")
|
||||||
self.addItems(relevant_reagents)
|
self.addItems(relevant_reagents)
|
||||||
self.setToolTip(f"Enter lot number for the reagent used for {reagent.role}")
|
self.setToolTip(f"Enter lot number for the reagent used for {reagent.role}")
|
||||||
|
|||||||
@@ -35,7 +35,6 @@ class Summary(InfoPane):
|
|||||||
def update_data(self):
|
def update_data(self):
|
||||||
super().update_data()
|
super().update_data()
|
||||||
orgs = [self.org_select.itemText(i) for i in range(self.org_select.count()) if self.org_select.itemChecked(i)]
|
orgs = [self.org_select.itemText(i) for i in range(self.org_select.count()) if self.org_select.itemChecked(i)]
|
||||||
# logger.debug(f"Getting report from {self.start_date} to {self.end_date} using {orgs}")
|
|
||||||
self.report_obj = ReportMaker(start_date=self.start_date, end_date=self.end_date, organizations=orgs)
|
self.report_obj = ReportMaker(start_date=self.start_date, end_date=self.end_date, organizations=orgs)
|
||||||
self.webview.setHtml(self.report_obj.html)
|
self.webview.setHtml(self.report_obj.html)
|
||||||
if self.report_obj.subs:
|
if self.report_obj.subs:
|
||||||
|
|||||||
@@ -27,14 +27,14 @@ timezone = tz("America/Winnipeg")
|
|||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
logger.debug(f"Package dir: {project_path}")
|
logger.info(f"Package dir: {project_path}")
|
||||||
|
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
os_config_dir = "AppData/local"
|
os_config_dir = "AppData/local"
|
||||||
print(f"Got platform Windows, config_dir: {os_config_dir}")
|
logger.info(f"Got platform Windows, config_dir: {os_config_dir}")
|
||||||
else:
|
else:
|
||||||
os_config_dir = ".config"
|
os_config_dir = ".config"
|
||||||
print(f"Got platform other, config_dir: {os_config_dir}")
|
logger.info(f"Got platform other, config_dir: {os_config_dir}")
|
||||||
|
|
||||||
main_aux_dir = Path.home().joinpath(f"{os_config_dir}/submissions")
|
main_aux_dir = Path.home().joinpath(f"{os_config_dir}/submissions")
|
||||||
|
|
||||||
@@ -184,7 +184,6 @@ def convert_nans_to_nones(input_str) -> str | None:
|
|||||||
Returns:
|
Returns:
|
||||||
str: _description_
|
str: _description_
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Input value of: {input_str}")
|
|
||||||
if check_not_nan(input_str):
|
if check_not_nan(input_str):
|
||||||
return input_str
|
return input_str
|
||||||
return None
|
return None
|
||||||
@@ -512,7 +511,6 @@ def get_config(settings_path: Path | str | None = None) -> Settings:
|
|||||||
Returns:
|
Returns:
|
||||||
Settings: Pydantic settings object
|
Settings: Pydantic settings object
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Creating settings...")
|
|
||||||
if isinstance(settings_path, str):
|
if isinstance(settings_path, str):
|
||||||
settings_path = Path(settings_path)
|
settings_path = Path(settings_path)
|
||||||
|
|
||||||
@@ -566,7 +564,6 @@ def get_config(settings_path: Path | str | None = None) -> Settings:
|
|||||||
default_settings = yaml.load(dset, Loader=yaml.Loader)
|
default_settings = yaml.load(dset, Loader=yaml.Loader)
|
||||||
settings = Settings(**default_settings)
|
settings = Settings(**default_settings)
|
||||||
settings.save(settings_path=settings_path)
|
settings.save(settings_path=settings_path)
|
||||||
# logger.debug(f"Using {settings_path} for config file.")
|
|
||||||
with open(settings_path, "r") as stream:
|
with open(settings_path, "r") as stream:
|
||||||
settings = yaml.load(stream, Loader=yaml.Loader)
|
settings = yaml.load(stream, Loader=yaml.Loader)
|
||||||
return Settings(**settings)
|
return Settings(**settings)
|
||||||
@@ -755,7 +752,6 @@ def setup_lookup(func):
|
|||||||
raise ValueError("Could not sanitize dictionary in query. Make sure you parse it first.")
|
raise ValueError("Could not sanitize dictionary in query. Make sure you parse it first.")
|
||||||
elif v is not None:
|
elif v is not None:
|
||||||
sanitized_kwargs[k] = v
|
sanitized_kwargs[k] = v
|
||||||
# logger.debug(f"sanitized kwargs: {sanitized_kwargs}")
|
|
||||||
return func(*args, **sanitized_kwargs)
|
return func(*args, **sanitized_kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
@@ -800,7 +796,6 @@ class Result(BaseModel, arbitrary_types_allowed=True):
|
|||||||
logger.error(f"Exception origin: {origin}")
|
logger.error(f"Exception origin: {origin}")
|
||||||
if "unique constraint failed:" in origin:
|
if "unique constraint failed:" in origin:
|
||||||
field = " ".join(origin.split(".")[1:]).replace("_", " ").upper()
|
field = " ".join(origin.split(".")[1:]).replace("_", " ").upper()
|
||||||
# logger.debug(field)
|
|
||||||
value = f"{field} doesn't have a unique value.\nIt must be changed."
|
value = f"{field} doesn't have a unique value.\nIt must be changed."
|
||||||
else:
|
else:
|
||||||
value = f"Got unknown integrity error: {value}"
|
value = f"Got unknown integrity error: {value}"
|
||||||
@@ -844,7 +839,6 @@ class Report(BaseModel):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
logger.error(f"Problem adding result.")
|
logger.error(f"Problem adding result.")
|
||||||
case Report():
|
case Report():
|
||||||
# logger.debug(f"Adding all results in report to new report")
|
|
||||||
for res in result.results:
|
for res in result.results:
|
||||||
logger.info(f"Adding {res} from {result} to results.")
|
logger.info(f"Adding {res} from {result} to results.")
|
||||||
self.results.append(res)
|
self.results.append(res)
|
||||||
@@ -934,7 +928,7 @@ def check_authorization(func):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
logger.debug(f"Checking authorization")
|
logger.info(f"Checking authorization")
|
||||||
if is_power_user():
|
if is_power_user():
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
else:
|
else:
|
||||||
@@ -957,7 +951,7 @@ def report_result(func):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
logger.debug(f"Report result being called by {func.__name__}")
|
logger.info(f"Report result being called by {func.__name__}")
|
||||||
output = func(*args, **kwargs)
|
output = func(*args, **kwargs)
|
||||||
match output:
|
match output:
|
||||||
case Report():
|
case Report():
|
||||||
@@ -970,14 +964,13 @@ def report_result(func):
|
|||||||
case _:
|
case _:
|
||||||
report = None
|
report = None
|
||||||
return report
|
return report
|
||||||
logger.debug(f"Got report: {report}")
|
logger.info(f"Got report: {report}")
|
||||||
try:
|
try:
|
||||||
results = report.results
|
results = report.results
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
logger.error("No results available")
|
logger.error("No results available")
|
||||||
results = []
|
results = []
|
||||||
for iii, result in enumerate(results):
|
for iii, result in enumerate(results):
|
||||||
logger.debug(f"Result {iii}: {result}")
|
|
||||||
try:
|
try:
|
||||||
dlg = result.report()
|
dlg = result.report()
|
||||||
dlg.exec()
|
dlg.exec()
|
||||||
@@ -990,7 +983,6 @@ def report_result(func):
|
|||||||
true_output = true_output[0]
|
true_output = true_output[0]
|
||||||
else:
|
else:
|
||||||
true_output = None
|
true_output = None
|
||||||
# logger.debug(f"Returning true output: {true_output}")
|
|
||||||
return true_output
|
return true_output
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user