Pre code cleanup
This commit is contained in:
56
alembic/versions/25b86f5ac2d9_add_custom_info_column_to__.py
Normal file
56
alembic/versions/25b86f5ac2d9_add_custom_info_column_to__.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""Add custom info column to _basicsubmission
|
||||
|
||||
Revision ID: 25b86f5ac2d9
|
||||
Revises: 0746f7e2c10e
|
||||
Create Date: 2024-09-24 09:09:15.223556
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '25b86f5ac2d9'
|
||||
down_revision = '0746f7e2c10e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('_basicsubmission', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom', sa.JSON(), nullable=True))
|
||||
|
||||
# with op.batch_alter_table('_process', schema=None) as batch_op:
|
||||
# batch_op.create_unique_constraint(None, ['name'])
|
||||
#
|
||||
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
|
||||
# batch_op.create_unique_constraint(None, ['id'])
|
||||
|
||||
with op.batch_alter_table('_wastewaterarticassociation', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_plate',
|
||||
existing_type=sa.VARCHAR(length=16),
|
||||
type_=sa.String(length=32),
|
||||
existing_nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('_wastewaterarticassociation', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_plate',
|
||||
existing_type=sa.String(length=32),
|
||||
type_=sa.VARCHAR(length=16),
|
||||
existing_nullable=True)
|
||||
|
||||
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
|
||||
# batch_op.drop_constraint(None, type_='unique')
|
||||
#
|
||||
# with op.batch_alter_table('_process', schema=None) as batch_op:
|
||||
# batch_op.drop_constraint(None, type_='unique')
|
||||
|
||||
with op.batch_alter_table('_basicsubmission', schema=None) as batch_op:
|
||||
batch_op.drop_column('custom')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -14,7 +14,8 @@ from sqlalchemy.orm import relationship, validates, Query
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from datetime import date
|
||||
import logging, re
|
||||
from tools import check_authorization, setup_lookup, Report, Result, jinja_template_loading, check_regex_match
|
||||
from tools import check_authorization, setup_lookup, Report, Result, jinja_template_loading, check_regex_match, \
|
||||
yaml_regex_creator
|
||||
from typing import List, Literal, Generator, Any
|
||||
from pandas import ExcelFile
|
||||
from pathlib import Path
|
||||
@@ -422,7 +423,7 @@ class Reagent(BaseClass):
|
||||
else:
|
||||
return f"<Reagent({self.role.name}-{self.lot})>"
|
||||
|
||||
def to_sub_dict(self, extraction_kit: KitType = None, full_data:bool=False) -> dict:
|
||||
def to_sub_dict(self, extraction_kit: KitType = None, full_data: bool = False) -> dict:
|
||||
"""
|
||||
dictionary containing values necessary for gui
|
||||
|
||||
@@ -873,17 +874,16 @@ class SubmissionType(BaseClass):
|
||||
|
||||
@classmethod
|
||||
@check_authorization
|
||||
def import_from_json(cls, filepath:Path|str):
|
||||
def import_from_json(cls, filepath: Path | str):
|
||||
yaml.add_constructor("!regex", yaml_regex_creator)
|
||||
if isinstance(filepath, str):
|
||||
filepath = Path(filepath)
|
||||
if not filepath.exists():
|
||||
logging.critical(f"Given file could not be found.")
|
||||
return None
|
||||
|
||||
with open(filepath, "r") as f:
|
||||
if filepath.suffix == ".json":
|
||||
import_dict = json.load(fp=f)
|
||||
elif filepath.suffix == ".yml":
|
||||
import_dict = yaml.safe_load(stream=f)
|
||||
import_dict = yaml.load(stream=f, Loader=yaml.Loader)
|
||||
else:
|
||||
raise Exception(f"Filetype {filepath.suffix} not supported.")
|
||||
logger.debug(pformat(import_dict))
|
||||
@@ -901,8 +901,14 @@ class SubmissionType(BaseClass):
|
||||
new_kit = KitType(name=kit['kit_type']['name'])
|
||||
for role in kit['kit_type']['reagent roles']:
|
||||
new_role = ReagentRole.query(name=role['role'])
|
||||
eol = datetime.timedelta(role['extension_of_life'])
|
||||
if new_role:
|
||||
check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
||||
if check.lower() == "n":
|
||||
new_role = None
|
||||
else:
|
||||
pass
|
||||
if not new_role:
|
||||
eol = datetime.timedelta(role['extension_of_life'])
|
||||
new_role = ReagentRole(name=role['role'], eol_ext=eol)
|
||||
uses = dict(expiry=role['expiry'], lot=role['lot'], name=role['name'], sheet=role['sheet'])
|
||||
ktrr_assoc = KitTypeReagentRoleAssociation(kit_type=new_kit, reagent_role=new_role, uses=uses)
|
||||
@@ -917,9 +923,18 @@ class SubmissionType(BaseClass):
|
||||
)
|
||||
for role in kit['kit_type']['equipment roles']:
|
||||
new_role = EquipmentRole.query(name=role['role'])
|
||||
if new_role:
|
||||
check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
||||
if check.lower() == "n":
|
||||
new_role = None
|
||||
else:
|
||||
pass
|
||||
if not new_role:
|
||||
new_role = EquipmentRole(name=role['role'])
|
||||
ster_assoc = SubmissionTypeEquipmentRoleAssociation(submission_type=submission_type, equipment_role=new_role)
|
||||
for equipment in Equipment.assign_equipment(equipment_role=new_role):
|
||||
new_role.instances.append(equipment)
|
||||
ster_assoc = SubmissionTypeEquipmentRoleAssociation(submission_type=submission_type,
|
||||
equipment_role=new_role)
|
||||
try:
|
||||
uses = dict(name=role['name'], process=role['process'], sheet=role['sheet'], static=role['static'])
|
||||
except KeyError:
|
||||
@@ -1160,6 +1175,7 @@ class KitTypeReagentRoleAssociation(BaseClass):
|
||||
for rel_reagent in relevant_reagents:
|
||||
yield rel_reagent
|
||||
|
||||
|
||||
class SubmissionReagentAssociation(BaseClass):
|
||||
"""
|
||||
table containing submission/reagent associations
|
||||
@@ -1400,6 +1416,22 @@ class Equipment(BaseClass):
|
||||
(?P<Labcon>\d{4}-\d{3}-\d{3}-\d$)""",
|
||||
re.VERBOSE)
|
||||
|
||||
@classmethod
|
||||
def assign_equipment(cls, equipment_role: EquipmentRole|str) -> List[Equipment]:
|
||||
if isinstance(equipment_role, str):
|
||||
equipment_role = EquipmentRole.query(name=equipment_role)
|
||||
equipment = cls.query()
|
||||
options = "\n".join([f"{ii}. {item.name}" for ii, item in enumerate(equipment)])
|
||||
choices = input(f"Enter equipment numbers to add to {equipment_role.name} (space seperated):\n{options}\n\n")
|
||||
output = []
|
||||
for choice in choices.split(" "):
|
||||
try:
|
||||
choice = int(choice)
|
||||
except (AttributeError, ValueError):
|
||||
continue
|
||||
output.append(equipment[choice])
|
||||
return output
|
||||
|
||||
|
||||
class EquipmentRole(BaseClass):
|
||||
"""
|
||||
|
||||
@@ -167,10 +167,10 @@ class BasicSubmission(BaseClass):
|
||||
dicto = dict(
|
||||
details_ignore=['excluded', 'reagents', 'samples',
|
||||
'extraction_info', 'comment', 'barcode',
|
||||
'platemap', 'export_map', 'equipment', 'tips'],
|
||||
'platemap', 'export_map', 'equipment', 'tips', 'custom'],
|
||||
# NOTE: Fields not placed in ui form
|
||||
form_ignore=['reagents', 'ctx', 'id', 'cost', 'extraction_info', 'signed_by', 'comment', 'namer',
|
||||
'submission_object', "tips", 'contact_phone'] + recover,
|
||||
'submission_object', "tips", 'contact_phone', 'custom'] + recover,
|
||||
# NOTE: Fields not placed in ui form to be moved to pydantic
|
||||
form_recover=recover
|
||||
)
|
||||
@@ -347,12 +347,14 @@ class BasicSubmission(BaseClass):
|
||||
logger.error(f"Error setting tips: {e}")
|
||||
tips = None
|
||||
cost_centre = self.cost_centre
|
||||
custom = self.custom
|
||||
else:
|
||||
reagents = None
|
||||
samples = None
|
||||
equipment = None
|
||||
tips = None
|
||||
cost_centre = None
|
||||
custom = None
|
||||
# logger.debug("Getting comments")
|
||||
try:
|
||||
comments = self.comment
|
||||
@@ -381,6 +383,7 @@ class BasicSubmission(BaseClass):
|
||||
# logger.debug(f"Setting contact to: {contact} of type: {type(contact)}")
|
||||
output["contact"] = contact
|
||||
output["contact_phone"] = contact_phone
|
||||
output["custom"] = custom
|
||||
return output
|
||||
|
||||
def calculate_column_count(self) -> int:
|
||||
@@ -549,26 +552,29 @@ class BasicSubmission(BaseClass):
|
||||
case "ctx" | "csv" | "filepath" | "equipment":
|
||||
return
|
||||
case item if item in self.jsons():
|
||||
# logger.debug(f"Setting JSON attribute.")
|
||||
existing = self.__getattribute__(key)
|
||||
if value is None or value in ['', 'null']:
|
||||
logger.error(f"No value given, not setting.")
|
||||
return
|
||||
if existing is None:
|
||||
existing = []
|
||||
if value in existing:
|
||||
logger.warning("Value already exists. Preventing duplicate addition.")
|
||||
return
|
||||
else:
|
||||
if isinstance(value, list):
|
||||
existing += value
|
||||
else:
|
||||
if value is not None:
|
||||
if key == "custom":
|
||||
existing = value
|
||||
match value:
|
||||
case list():
|
||||
# logger.debug(f"Setting JSON attribute.")
|
||||
existing = self.__getattribute__(key)
|
||||
if value is None or value in ['', 'null']:
|
||||
logger.error(f"No value given, not setting.")
|
||||
return
|
||||
if existing is None:
|
||||
existing = []
|
||||
if value in existing:
|
||||
logger.warning("Value already exists. Preventing duplicate addition.")
|
||||
return
|
||||
else:
|
||||
if isinstance(value, list):
|
||||
existing += value
|
||||
else:
|
||||
existing.append(value)
|
||||
|
||||
if value is not None:
|
||||
if key == "custom":
|
||||
existing = value
|
||||
else:
|
||||
existing.append(value)
|
||||
case _:
|
||||
existing = value
|
||||
self.__setattr__(key, existing)
|
||||
flag_modified(self, key)
|
||||
return
|
||||
@@ -749,9 +755,8 @@ class BasicSubmission(BaseClass):
|
||||
# logger.debug(f"Input dict: {input_dict}")
|
||||
# logger.debug(f"Custom fields: {custom_fields}")
|
||||
input_dict['custom'] = {}
|
||||
for k,v in custom_fields.items():
|
||||
logger.debug(f"Attempting custom parse of {k}: {v}")
|
||||
|
||||
for k, v in custom_fields.items():
|
||||
# logger.debug(f"Attempting custom parse of {k}: {v}")
|
||||
match v['type']:
|
||||
case "exempt":
|
||||
continue
|
||||
@@ -766,8 +771,9 @@ class BasicSubmission(BaseClass):
|
||||
if v['start_column'] != v['end_column']:
|
||||
v['end_column'] = v['end_column'] + 1
|
||||
for ii in range(v['start_row'], v['end_row']):
|
||||
for jj in range(v['start_column'], v['end_column']+1):
|
||||
input_dict['custom'][k].append(dict(value=ws.cell(row=ii, column=jj).value, row=ii, column=jj))
|
||||
for jj in range(v['start_column'], v['end_column'] + 1):
|
||||
input_dict['custom'][k].append(
|
||||
dict(value=ws.cell(row=ii, column=jj).value, row=ii, column=jj))
|
||||
return input_dict
|
||||
|
||||
@classmethod
|
||||
@@ -819,7 +825,7 @@ class BasicSubmission(BaseClass):
|
||||
logger.info(f"Hello from {cls.__mapper_args__['polymorphic_identity']} autofill")
|
||||
logger.debug(f"Input dict: {info}")
|
||||
logger.debug(f"Custom fields: {custom_fields}")
|
||||
for k,v in custom_fields.items():
|
||||
for k, v in custom_fields.items():
|
||||
try:
|
||||
assert v['type'] in ['exempt', 'range', 'cell']
|
||||
except (AssertionError, KeyError):
|
||||
@@ -1170,6 +1176,7 @@ class BasicSubmission(BaseClass):
|
||||
if "submitted_date" not in kwargs.keys():
|
||||
instance.submitted_date = date.today()
|
||||
else:
|
||||
logger.warning(f"Found existing instance: {instance}, asking to overwrite.")
|
||||
code = 1
|
||||
msg = "This submission already exists.\nWould you like to overwrite?"
|
||||
report.add_result(Result(msg=msg, code=code))
|
||||
@@ -1659,7 +1666,8 @@ class Wastewater(BasicSubmission):
|
||||
continue
|
||||
copy = dict(submitter_id=sample['submitter_id'], row=row, column=column)
|
||||
well_24.append(copy)
|
||||
input_dict['origin_plate'] = [item for item in DocxWriter.create_plate_map(sample_list=well_24, rows=4, columns=6)]
|
||||
input_dict['origin_plate'] = [item for item in
|
||||
DocxWriter.create_plate_map(sample_list=well_24, rows=4, columns=6)]
|
||||
return input_dict
|
||||
|
||||
|
||||
@@ -1728,14 +1736,22 @@ class WastewaterArtic(BasicSubmission):
|
||||
ws = wb[info_dict['sheet']]
|
||||
img_loader = SheetImageLoader(ws)
|
||||
for ii in range(info_dict['start_row'], info_dict['end_row'] + 1):
|
||||
logger.debug(f"Checking row: {ii}")
|
||||
# logger.debug(f"Checking row: {ii}")
|
||||
for jj in range(info_dict['start_column'], info_dict['end_column'] + 1):
|
||||
# logger.debug(f"Checking column: {jj}")
|
||||
cell_str = f"{row_map[jj]}{ii}"
|
||||
if img_loader.image_in(cell_str):
|
||||
return img_loader.get(cell_str)
|
||||
try:
|
||||
return img_loader.get(cell_str)
|
||||
except ValueError as e:
|
||||
logger.error(f"Could not open image from cell: {cell_str} due to {e}")
|
||||
return None
|
||||
return None
|
||||
|
||||
input_dict = super().custom_info_parser(input_dict)
|
||||
|
||||
input_dict['submission_type'] = dict(value="Wastewater Artic", missing=False)
|
||||
|
||||
logger.debug(f"Custom fields: {custom_fields}")
|
||||
egel_section = custom_fields['egel_controls']
|
||||
ws = xl[egel_section['sheet']]
|
||||
|
||||
@@ -65,14 +65,24 @@ class SheetParser(object):
|
||||
"""
|
||||
parser = InfoParser(xl=self.xl, submission_type=self.submission_type, sub_object=self.sub_object)
|
||||
info = parser.parse_info()
|
||||
self.info_map = parser.map
|
||||
try:
|
||||
check = info['submission_type']['value'] not in [None, "None", "", " "]
|
||||
except KeyError:
|
||||
return
|
||||
logger.debug(f"Checking old submission type: {self.submission_type.name} against new: {info['submission_type']['value']}")
|
||||
if self.submission_type.name != info['submission_type']['value']:
|
||||
if info['submission_type']['value'] not in [None, "None", "", " "]:
|
||||
# logger.debug(f"info submission type: {info}")
|
||||
if check:
|
||||
self.submission_type = SubmissionType.query(name=info['submission_type']['value'])
|
||||
logger.debug(f"Updated self.submission_type to {self.submission_type}. Rerunning parse.")
|
||||
self.parse_info()
|
||||
return
|
||||
self.info_map = parser.map
|
||||
|
||||
else:
|
||||
self.submission_type = RSLNamer.retrieve_submission_type(filename=self.filepath)
|
||||
self.parse_info()
|
||||
|
||||
|
||||
for k, v in info.items():
|
||||
match k:
|
||||
# NOTE: exclude samples.
|
||||
@@ -86,7 +96,7 @@ class SheetParser(object):
|
||||
# logger.debug(f"Updated self.submission_type to {self.submission_type}")
|
||||
case _:
|
||||
self.sub[k] = v
|
||||
print(f"\n\n {self.sub} \n\n")
|
||||
# print(f"\n\n {self.sub} \n\n")
|
||||
|
||||
|
||||
def parse_reagents(self, extraction_kit: str | None = None):
|
||||
|
||||
@@ -729,6 +729,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
||||
dicto = self.improved_dict()
|
||||
instance, result = BasicSubmission.query_or_create(submission_type=self.submission_type['value'],
|
||||
rsl_plate_num=self.rsl_plate_num['value'])
|
||||
logger.debug(f"Result of query or create: {type(result)}")
|
||||
report.add_result(result)
|
||||
self.handle_duplicate_samples()
|
||||
# logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""
|
||||
Constructs main application.
|
||||
"""
|
||||
import yaml
|
||||
from pprint import pformat
|
||||
from PyQt6.QtWidgets import (
|
||||
QTabWidget, QWidget, QVBoxLayout,
|
||||
QHBoxLayout, QScrollArea, QMainWindow,
|
||||
@@ -12,17 +12,17 @@ from pathlib import Path
|
||||
|
||||
from markdown import markdown
|
||||
from __init__ import project_path
|
||||
from tools import check_if_app, Settings, Report, jinja_template_loading
|
||||
from .functions import select_save_file
|
||||
from tools import check_if_app, Settings, Report, jinja_template_loading, check_authorization
|
||||
from .functions import select_save_file,select_open_file
|
||||
from datetime import date
|
||||
from .pop_ups import HTMLPop
|
||||
from .pop_ups import HTMLPop, AlertPop
|
||||
from .misc import LogParser
|
||||
import logging, webbrowser, sys, shutil
|
||||
from .submission_table import SubmissionsSheet
|
||||
from .submission_widget import SubmissionFormContainer
|
||||
from .controls_chart import ControlsViewer
|
||||
from .kit_creator import KitAdder
|
||||
from .submission_type_creator import SubmissionTypeAdder
|
||||
from .submission_type_creator import SubmissionTypeAdder, SubmissionType
|
||||
from .sample_search import SearchBox
|
||||
|
||||
logger = logging.getLogger(f'submissions.{__name__}')
|
||||
@@ -76,7 +76,8 @@ class App(QMainWindow):
|
||||
helpMenu.addAction(self.docsAction)
|
||||
helpMenu.addAction(self.githubAction)
|
||||
fileMenu.addAction(self.importAction)
|
||||
fileMenu.addAction(self.yamlAction)
|
||||
fileMenu.addAction(self.yamlExportAction)
|
||||
fileMenu.addAction(self.yamlImportAction)
|
||||
methodsMenu.addAction(self.searchLog)
|
||||
methodsMenu.addAction(self.searchSample)
|
||||
reportMenu.addAction(self.generateReportAction)
|
||||
@@ -111,7 +112,8 @@ class App(QMainWindow):
|
||||
self.searchLog = QAction("Search Log", self)
|
||||
self.searchSample = QAction("Search Sample", self)
|
||||
self.githubAction = QAction("Github", self)
|
||||
self.yamlAction = QAction("Export Type Template", self)
|
||||
self.yamlExportAction = QAction("Export Type Example", self)
|
||||
self.yamlImportAction = QAction("Import Type Template", self)
|
||||
|
||||
def _connectActions(self):
|
||||
"""
|
||||
@@ -128,7 +130,8 @@ class App(QMainWindow):
|
||||
self.searchLog.triggered.connect(self.runSearch)
|
||||
self.searchSample.triggered.connect(self.runSampleSearch)
|
||||
self.githubAction.triggered.connect(self.openGithub)
|
||||
self.yamlAction.triggered.connect(self.export_ST_yaml)
|
||||
self.yamlExportAction.triggered.connect(self.export_ST_yaml)
|
||||
self.yamlImportAction.triggered.connect(self.import_ST_yaml)
|
||||
|
||||
def showAbout(self):
|
||||
"""
|
||||
@@ -207,11 +210,31 @@ class App(QMainWindow):
|
||||
yaml_path = Path(sys._MEIPASS).joinpath("resources", "viral_culture.yml")
|
||||
else:
|
||||
yaml_path = project_path.joinpath("src", "submissions", "resources", "viral_culture.yml")
|
||||
with open(yaml_path, "r") as f:
|
||||
data = yaml.safe_load(f)
|
||||
# with open(yaml_path, "r") as f:
|
||||
# data = yaml.safe_load(f)
|
||||
fname = select_save_file(obj=self, default_name="Submission Type Template.yml", extension="yml")
|
||||
with open(fname, "w") as f:
|
||||
yaml.safe_dump(data=data, stream=f)
|
||||
# with open(fname, "w") as f:
|
||||
# yaml.safe_dump(data=data, stream=f)
|
||||
shutil.copyfile(yaml_path, fname)
|
||||
|
||||
@check_authorization
|
||||
def import_ST_yaml(self, *args, **kwargs):
|
||||
fname = select_open_file(obj=self, file_extension="yml")
|
||||
if not fname:
|
||||
logger.info(f"Import cancelled.")
|
||||
return
|
||||
ap = AlertPop(message="This function will proceed in the debug window.", status="Warning", owner=self)
|
||||
ap.exec()
|
||||
st = SubmissionType.import_from_json(filepath=fname)
|
||||
if st:
|
||||
print(pformat(st.to_export_dict()))
|
||||
choice = input("Save the above submission type? [y/N]: ")
|
||||
if choice.lower() == "y":
|
||||
# st.save()
|
||||
pass
|
||||
else:
|
||||
logger.warning("Save of submission type cancelled.")
|
||||
|
||||
|
||||
|
||||
class AddSubForm(QWidget):
|
||||
|
||||
@@ -31,6 +31,11 @@ def select_open_file(obj: QMainWindow, file_extension: str | None = None) -> Pat
|
||||
else:
|
||||
fname = Path(
|
||||
QFileDialog.getOpenFileName(obj, 'Open file', home_dir, filter=f"{file_extension}(*.{file_extension})")[0])
|
||||
if not fname.exists():
|
||||
raise FileNotFoundError(f"File {fname.__str__()} could not be found.")
|
||||
if fname.__str__() == ".":
|
||||
logger.warning(f"No file selected, cancelling.")
|
||||
return
|
||||
obj.last_dir = fname.parent
|
||||
return fname
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
'''
|
||||
Contains all submission related frontend functions
|
||||
'''
|
||||
import sys
|
||||
|
||||
from PyQt6.QtWidgets import (
|
||||
QWidget, QPushButton, QVBoxLayout,
|
||||
QComboBox, QDateEdit, QLineEdit, QLabel
|
||||
@@ -96,6 +98,7 @@ class SubmissionFormContainer(QWidget):
|
||||
self.import_submission_function(fname)
|
||||
return self.report
|
||||
|
||||
@report_result
|
||||
def import_submission_function(self, fname: Path | None = None):
|
||||
"""
|
||||
Import a new submission to the app window
|
||||
@@ -119,7 +122,7 @@ class SubmissionFormContainer(QWidget):
|
||||
if isinstance(fname, bool) or fname is None:
|
||||
fname = select_open_file(self, file_extension="xlsx")
|
||||
# logger.debug(f"Attempting to parse file: {fname}")
|
||||
if not fname.exists():
|
||||
if not fname:
|
||||
report.add_result(Result(msg=f"File {fname.__str__()} not found.", status="critical"))
|
||||
return report
|
||||
# NOTE: create sheetparser using excel sheet and context from gui
|
||||
@@ -347,9 +350,14 @@ class SubmissionFormWidget(QWidget):
|
||||
# logger.debug(f"SQL object: {pformat(base_submission.__dict__)}")
|
||||
# logger.debug(f"Base submission: {base_submission.to_dict()}")
|
||||
# NOTE: check output message for issues
|
||||
# logger.debug(f"Result of to_sql: {result}")
|
||||
try:
|
||||
code = report.results[-1].code
|
||||
except IndexError:
|
||||
trigger = result.results[-1]
|
||||
code = trigger.code
|
||||
# logger.debug(f"Code from return: {code}")
|
||||
except IndexError as e:
|
||||
logger.error(result.results)
|
||||
logger.error(f"Problem getting error code: {e}")
|
||||
code = 0
|
||||
match code:
|
||||
# NOTE: code 0: everything is fine.
|
||||
@@ -357,7 +365,7 @@ class SubmissionFormWidget(QWidget):
|
||||
pass
|
||||
# NOTE: code 1: ask for overwrite
|
||||
case 1:
|
||||
dlg = QuestionAsker(title=f"Review {base_submission.rsl_plate_num}?", message=result.msg)
|
||||
dlg = QuestionAsker(title=f"Review {base_submission.rsl_plate_num}?", message=trigger.msg)
|
||||
if dlg.exec():
|
||||
# NOTE: Do not add duplicate reagents.
|
||||
pass
|
||||
|
||||
@@ -46,7 +46,35 @@ info:
|
||||
row: 6
|
||||
sheet: Sample List
|
||||
write: []
|
||||
custom: {}
|
||||
# The 'custom' field is currently under development and will allow parsing, storage and writing of new fields in
|
||||
# generic submission types.
|
||||
# Examples of the types of fields are given below.
|
||||
custom: {
|
||||
# The 'cell' type will read a single cell from the excel sheet and write it to the same cell as well as
|
||||
# any specified in the write list.
|
||||
# "test cell": {
|
||||
# "read": {
|
||||
# "column": 3,
|
||||
# "row": 1,
|
||||
# "sheet": "Plate Map"
|
||||
# },
|
||||
# "type": "cell",
|
||||
# "write": [{
|
||||
# "column": 8,
|
||||
# "row": 8,
|
||||
# "sheet": "Sample List"
|
||||
# }]
|
||||
# },
|
||||
# The 'range' type will read a group of cells, store them as a list of values, row and column integers
|
||||
# and write to that range.
|
||||
# "test range": {
|
||||
# "sheet": "First Strand",
|
||||
# "start_row": 1,
|
||||
# "end_row":9,
|
||||
# "start_column": 1,
|
||||
# "end_column": 5
|
||||
# }
|
||||
}
|
||||
extraction_kit:
|
||||
read:
|
||||
- column: 4
|
||||
@@ -107,9 +135,9 @@ info:
|
||||
row: 14
|
||||
sheet: Sample List
|
||||
write: []
|
||||
# The 'kits' field holds reagents, equipment and tips information. It's a list of kit_type objects.
|
||||
kits:
|
||||
- constant_cost: 0.0
|
||||
kit_type:
|
||||
- kit_type:
|
||||
name: MagMAX-96 Viral RNA Isolation Kit
|
||||
equipment roles:
|
||||
- role: Extractor
|
||||
@@ -281,6 +309,7 @@ kits:
|
||||
required: 1
|
||||
role: Carrier RNA
|
||||
sheet: Sample List
|
||||
constant_cost: 0.0
|
||||
mutable_cost_column: 0.0
|
||||
mutable_cost_sample: 0.0
|
||||
samples:
|
||||
|
||||
@@ -13,7 +13,10 @@
|
||||
{{ super() }}
|
||||
<p>{% for key, value in sub.items() if key not in sub['excluded'] %}
|
||||
<b>{{ key | replace("_", " ") | title | replace("Pcr", "PCR") }}: </b>{% if key=='cost' %}{% if sub['cost'] %} {{ "${:,.2f}".format(value) }}{% endif %}{% else %}{{ value }}{% endif %}<br>
|
||||
{% endfor %}</p>
|
||||
{% endfor %}
|
||||
{% if sub['custom'] %}{% for key, value in sub['custom'].items() %}
|
||||
<b>{{ key | replace("_", " ") | title }}: </b>{{ value }}<br>
|
||||
{% endfor %}{% endif %}</p>
|
||||
|
||||
<h3><u>Reagents:</u></h3>
|
||||
<p>{% for item in sub['reagents'] %}
|
||||
|
||||
@@ -831,7 +831,10 @@ class Report(BaseModel):
|
||||
results: List[Result] = Field(default=[])
|
||||
|
||||
def __repr__(self):
|
||||
return f"Report(result_count:{len(self.results)})"
|
||||
return f"<Report(result_count:{len(self.results)})>"
|
||||
|
||||
def __str__(self):
|
||||
return f"<Report(result_count:{len(self.results)})>"
|
||||
|
||||
def add_result(self, result: Result | Report | None):
|
||||
"""
|
||||
@@ -918,6 +921,7 @@ def yaml_regex_creator(loader, node):
|
||||
return f"(?P<{name}>RSL(?:-|_)?{abbr}(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
||||
|
||||
|
||||
|
||||
ctx = get_config(None)
|
||||
|
||||
|
||||
@@ -1004,3 +1008,5 @@ def report_result(func):
|
||||
@check_authorization
|
||||
def test_function():
|
||||
print("Success!")
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user