From 7f6a476cd7c74538cc55c46b26962d45e2c15f27 Mon Sep 17 00:00:00 2001 From: lwark Date: Fri, 6 Sep 2024 11:22:05 -0500 Subject: [PATCH] Improved generic submissions to replace viral culture addition. --- .../versions/0746f7e2c10e_rebuild_20240723.py | 40 ++-- .../ef1a0222b882_adding_viral_cultures.py | 24 +- docs/source/conf.py | 2 +- src/submissions/backend/db/models/controls.py | 2 +- src/submissions/backend/db/models/kits.py | 5 +- .../backend/db/models/submissions.py | 213 +++++++++--------- src/submissions/backend/excel/parser.py | 11 +- .../backend/validators/__init__.py | 14 +- src/submissions/backend/validators/pydant.py | 4 +- .../frontend/widgets/controls_chart.py | 2 +- .../frontend/widgets/submission_widget.py | 4 +- 11 files changed, 166 insertions(+), 155 deletions(-) diff --git a/alembic/versions/0746f7e2c10e_rebuild_20240723.py b/alembic/versions/0746f7e2c10e_rebuild_20240723.py index 7f7cc63..b8de6e9 100644 --- a/alembic/versions/0746f7e2c10e_rebuild_20240723.py +++ b/alembic/versions/0746f7e2c10e_rebuild_20240723.py @@ -33,67 +33,67 @@ def upgrade() -> None: ) op.create_table('_contact', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.Column('email', sa.String(length=64), nullable=True), sa.Column('phone', sa.String(length=32), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('_controltype', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('sub_type', sa.String(length=255), nullable=True), sa.Column('targets', sa.JSON(), nullable=True), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') + sa.UniqueConstraint('sub_type') ) op.create_table('_equipment', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.Column('nickname', sa.String(length=64), nullable=True), sa.Column('asset_number', sa.String(length=16), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('_equipmentrole', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=32), nullable=True), + sa.Column('sub_type', sa.String(length=32), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('_kittype', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') + sa.UniqueConstraint('sub_type') ) op.create_table('_organization', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.Column('cost_centre', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('_process', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') + sa.UniqueConstraint('sub_type') ) op.create_table('_reagentrole', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.Column('eol_ext', sa.Interval(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('_submissiontype', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=128), nullable=True), + sa.Column('sub_type', sa.String(length=128), nullable=True), sa.Column('info_map', sa.JSON(), nullable=True), sa.Column('defaults', sa.JSON(), nullable=True), sa.Column('template_file', sa.LargeBinary(), nullable=True), sa.Column('sample_map', sa.JSON(), nullable=True), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') + sa.UniqueConstraint('sub_type') ) op.create_table('_tiprole', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('_bacterialculturesample', @@ -107,7 +107,7 @@ def upgrade() -> None: sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('kit_id', sa.INTEGER(), nullable=True), sa.Column('client_id', sa.INTEGER(), nullable=True), - sa.Column('name', sa.String(length=128), nullable=True), + sa.Column('sub_type', sa.String(length=128), nullable=True), sa.Column('amount', sa.FLOAT(precision=2), nullable=True), sa.ForeignKeyConstraint(['client_id'], ['_organization.id'], name='fk_org_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['kit_id'], ['_kittype.id'], name='fk_kit_type_id', ondelete='SET NULL'), @@ -164,7 +164,7 @@ def upgrade() -> None: op.create_table('_reagent', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('role_id', sa.INTEGER(), nullable=True), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.Column('lot', sa.String(length=64), nullable=True), sa.Column('expiry', sa.TIMESTAMP(), nullable=True), sa.ForeignKeyConstraint(['role_id'], ['_reagentrole.id'], name='fk_reagent_role_id', ondelete='SET NULL'), @@ -207,7 +207,7 @@ def upgrade() -> None: op.create_table('_tips', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('role_id', sa.INTEGER(), nullable=True), - sa.Column('name', sa.String(length=64), nullable=True), + sa.Column('sub_type', sa.String(length=64), nullable=True), sa.Column('lot', sa.String(length=64), nullable=True), sa.ForeignKeyConstraint(['role_id'], ['_tiprole.id'], name='fk_tip_role_id', ondelete='SET NULL'), sa.PrimaryKeyConstraint('id') @@ -245,7 +245,7 @@ def upgrade() -> None: sa.ForeignKeyConstraint(['contact_id'], ['_contact.id'], name='fk_BS_contact_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kittype.id'], name='fk_BS_extkit_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['reagents_id'], ['_reagent.id'], name='fk_BS_reagents_id', ondelete='SET NULL'), - sa.ForeignKeyConstraint(['submission_type_name'], ['_submissiontype.name'], name='fk_BS_subtype_name', ondelete='SET NULL'), + sa.ForeignKeyConstraint(['submission_type_name'], ['_submissiontype.sub_type'], name='fk_BS_subtype_name', ondelete='SET NULL'), sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organization.id'], name='fk_BS_sublab_id', ondelete='SET NULL'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('rsl_plate_num'), @@ -277,7 +277,7 @@ def upgrade() -> None: op.create_table('_control', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), nullable=True), - sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('sub_type', sa.String(length=255), nullable=True), sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True), sa.Column('contains', sa.JSON(), nullable=True), sa.Column('matches', sa.JSON(), nullable=True), @@ -291,7 +291,7 @@ def upgrade() -> None: sa.ForeignKeyConstraint(['sample_id'], ['_basicsample.id'], name='cont_BCS_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') + sa.UniqueConstraint('sub_type') ) op.create_table('_submissionequipmentassociation', sa.Column('equipment_id', sa.INTEGER(), nullable=False), diff --git a/alembic/versions/ef1a0222b882_adding_viral_cultures.py b/alembic/versions/ef1a0222b882_adding_viral_cultures.py index 881f2e1..6ab1a77 100644 --- a/alembic/versions/ef1a0222b882_adding_viral_cultures.py +++ b/alembic/versions/ef1a0222b882_adding_viral_cultures.py @@ -24,7 +24,7 @@ def upgrade() -> None: sa.PrimaryKeyConstraint('id') ) # with op.batch_alter_table('_process', schema=None) as batch_op: - # batch_op.create_unique_constraint("process_uni", ['name']) + # batch_op.create_unique_constraint("process_uni", ['sub_type']) # # with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op: # batch_op.create_unique_constraint("subsamp_uni", ['id']) @@ -40,17 +40,17 @@ def upgrade() -> None: def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('_wastewaterarticassociation', schema=None) as batch_op: - batch_op.alter_column('source_plate', - existing_type=sa.String(length=32), - type_=sa.VARCHAR(length=16), - existing_nullable=True) - - with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op: - batch_op.drop_constraint("subsamp_uni", type_='unique') - - with op.batch_alter_table('_process', schema=None) as batch_op: - batch_op.drop_constraint("process_uni", type_='unique') + # with op.batch_alter_table('_wastewaterarticassociation', schema=None) as batch_op: + # batch_op.alter_column('source_plate', + # existing_type=sa.String(length=32), + # type_=sa.VARCHAR(length=16), + # existing_nullable=True) + # + # with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op: + # batch_op.drop_constraint("subsamp_uni", type_='unique') + # + # with op.batch_alter_table('_process', schema=None) as batch_op: + # batch_op.drop_constraint("process_uni", type_='unique') op.drop_table('_viralculture') # ### end Alembic commands ### diff --git a/docs/source/conf.py b/docs/source/conf.py index bbc9ccb..dab0672 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -13,7 +13,7 @@ from submissions import __version__, __copyright__, __author__ project = 'RSL Submissions' copyright = __copyright__ -author = f"{__author__['name']} - {__author__['email']}" +author = f"{__author__['sub_type']} - {__author__['email']}" release = __version__ # -- General configuration --------------------------------------------------- diff --git a/src/submissions/backend/db/models/controls.py b/src/submissions/backend/db/models/controls.py index 6595ef9..25711fa 100644 --- a/src/submissions/backend/db/models/controls.py +++ b/src/submissions/backend/db/models/controls.py @@ -58,7 +58,7 @@ class ControlType(BaseClass): Get subtypes associated with this controltype (currently used only for Kraken) Args: - mode (str): analysis mode name + mode (str): analysis mode sub_type Returns: List[str]: list of subtypes available diff --git a/src/submissions/backend/db/models/kits.py b/src/submissions/backend/db/models/kits.py index b1d88c6..693ea3a 100644 --- a/src/submissions/backend/db/models/kits.py +++ b/src/submissions/backend/db/models/kits.py @@ -5,7 +5,7 @@ from __future__ import annotations import datetime import json -from pprint import pprint +from pprint import pprint, pformat from sqlalchemy import Column, String, TIMESTAMP, JSON, INTEGER, ForeignKey, Interval, Table, FLOAT, BLOB from sqlalchemy.orm import relationship, validates, Query @@ -846,7 +846,7 @@ class SubmissionType(BaseClass): pass return cls.execute_query(query=query, limit=limit) - def to_dict(self): + def to_export_dict(self): base_dict = dict(name=self.name) base_dict['info'] = self.construct_info_map(mode='export') base_dict['defaults'] = self.defaults @@ -874,6 +874,7 @@ class SubmissionType(BaseClass): return None with open(filepath, "r") as f: import_dict = json.load(fp=f) + logger.debug(pformat(import_dict)) submission_type = cls.query(name=import_dict['name']) if submission_type: return submission_type diff --git a/src/submissions/backend/db/models/submissions.py b/src/submissions/backend/db/models/submissions.py index 324f611..fa911d1 100644 --- a/src/submissions/backend/db/models/submissions.py +++ b/src/submissions/backend/db/models/submissions.py @@ -113,15 +113,6 @@ class BasicSubmission(BaseClass): __mapper_args__ = { "polymorphic_identity": "Basic Submission", "polymorphic_on": submission_type_name, - # "polymorphic_on": case( - # - # (submission_type_name == "Bacterial Culture", "Bacterial Culture"), - # (submission_type_name == "Wastewater Artic", "Wastewater Artic"), - # (submission_type_name == "Wastewater", "Wastewater"), - # (submission_type_name == "Viral Culture", "Bacterial Culture"), - # - # else_="Basic Sample" - # ), "with_polymorphic": "*", } @@ -160,7 +151,7 @@ class BasicSubmission(BaseClass): return output @classmethod - def get_default_info(cls, *args): + def get_default_info(cls, *args, submission_type: SubmissionType | None = None): # NOTE: Create defaults for all submission_types parent_defs = super().get_default_info() recover = ['filepath', 'samples', 'csv', 'comment', 'equipment'] @@ -185,7 +176,10 @@ class BasicSubmission(BaseClass): continue else: output[k] = v - st = cls.get_submission_type() + if isinstance(submission_type, SubmissionType): + st = submission_type + else: + st = cls.get_submission_type(submission_type) if st is None: logger.error("No default info for BasicSubmission.") else: @@ -205,18 +199,23 @@ class BasicSubmission(BaseClass): return output @classmethod - def get_submission_type(cls) -> SubmissionType: + def get_submission_type(cls, sub_type: str | SubmissionType | None = None) -> SubmissionType: """ Gets the SubmissionType associated with this class Returns: SubmissionType: SubmissionType with name equal to this polymorphic identity """ - name = cls.__mapper_args__['polymorphic_identity'] - return SubmissionType.query(name=name) + match sub_type: + case str(): + return SubmissionType.query(name=sub_type) + case SubmissionType(): + return sub_type + case _: + return SubmissionType.query(cls.__mapper_args__['polymorphic_identity']) @classmethod - def construct_info_map(cls, mode: Literal["read", "write"]) -> dict: + def construct_info_map(cls, submission_type:SubmissionType|None=None, mode: Literal["read", "write"]="read") -> dict: """ Method to call submission type's construct info map. @@ -226,17 +225,17 @@ class BasicSubmission(BaseClass): Returns: dict: Map of info locations. """ - return cls.get_submission_type().construct_info_map(mode=mode) + return cls.get_submission_type(submission_type).construct_info_map(mode=mode) @classmethod - def construct_sample_map(cls) -> dict: + def construct_sample_map(cls, submission_type:SubmissionType|None=None) -> dict: """ Method to call submission type's construct_sample_map Returns: dict: sample location map """ - return cls.get_submission_type().construct_sample_map() + return cls.get_submission_type(submission_type).construct_sample_map() @classmethod def finalize_details(cls, input_dict: dict) -> dict: @@ -466,7 +465,7 @@ class BasicSubmission(BaseClass): Returns: pd.DataFrame: Pandas Dataframe of all relevant submissions """ - # logger.debug(f"Querying Type: {submission_type}") + logger.debug(f"Querying Type: {submission_type}") # logger.debug(f"Using limit: {limit}") # NOTE: use lookup function to create list of dicts subs = [item.to_dict() for item in @@ -635,14 +634,13 @@ class BasicSubmission(BaseClass): super().save() @classmethod - def get_regex(cls) -> str: - """ - Dummy for inheritence. - - Returns: - str: Regex for submission type. - """ - return cls.construct_regex() + def get_regex(cls, submission_type:SubmissionType|str|None=None): + # logger.debug(f"Attempting to get regex for {cls.__mapper_args__['polymorphic_identity']}") + logger.debug(f"Attempting to get regex for {submission_type}") + try: + return cls.get_submission_type(submission_type).defaults['regex'] + except AttributeError as e: + raise AttributeError(f"Couldn't get submission type for {cls.__mapper_args__['polymorphic_identity']}") # Polymorphic functions @@ -654,7 +652,8 @@ class BasicSubmission(BaseClass): Returns: re.Pattern: Regular expression pattern to discriminate between submission types. """ - rstring = rf'{"|".join([item.get_regex() for item in cls.__subclasses__()])}' + # rstring = rf'{"|".join([item.get_regex() for item in cls.__subclasses__()])}' + rstring = rf'{"|".join([item.defaults["regex"] for item in SubmissionType.query()])}' regex = re.compile(rstring, flags=re.IGNORECASE | re.VERBOSE) return regex @@ -685,7 +684,7 @@ class BasicSubmission(BaseClass): # item.__mapper_args__['polymorphic_identity'] == polymorphic_identity][0] model = cls.__mapper__.polymorphic_map[polymorphic_identity].class_ except Exception as e: - logger.error(f"Could not get polymorph {polymorphic_identity} of {cls} due to {e}") + logger.error(f"Could not get polymorph {polymorphic_identity} of {cls} due to {e}, falling back to BasicSubmission") case _: pass if attrs is None or len(attrs) == 0: @@ -796,11 +795,13 @@ class BasicSubmission(BaseClass): # logger.info(f"Hello from {cls.__mapper_args__['polymorphic_identity']} Enforcer!") from backend.validators import RSLNamer # logger.debug(f"instr coming into {cls}: {instr}") - # logger.debug(f"data coming into {cls}: {data}") - defaults = cls.get_default_info("abbreviation", "submission_type") - data['abbreviation'] = defaults['abbreviation'] - if 'submission_type' not in data.keys() or data['submission_type'] in [None, ""]: - data['submission_type'] = defaults['submission_type'] + logger.debug(f"data coming into {cls}: {data}") + # defaults = cls.get_default_info("abbreviation", "submission_type") + data['abbreviation'] = cls.get_default_info("abbreviation", submission_type=data['submission_type']) + # logger.debug(f"Default info: {defaults}") + # data['abbreviation'] = defaults['abbreviation'] + # if 'submission_type' not in data.keys() or data['submission_type'] in [None, ""]: + # data['submission_type'] = defaults['submission_type'] if instr in [None, ""]: # logger.debug("Sending to RSLNamer to make new plate name.") outstr = RSLNamer.construct_new_plate_name(data=data) @@ -829,9 +830,11 @@ class BasicSubmission(BaseClass): except AttributeError as e: repeat = "" outstr = re.sub(r"(-\dR)\d?", rf"\1 {repeat}", outstr).replace(" ", "") - abb = cls.get_default_info('abbreviation') - outstr = re.sub(rf"RSL{abb}", rf"RSL-{abb}", outstr) - return re.sub(rf"{abb}(\d)", rf"{abb}-\1", outstr) + # abb = cls.get_default_info('abbreviation') + # outstr = re.sub(rf"RSL{abb}", rf"RSL-{abb}", outstr) + # return re.sub(rf"{abb}(\d)", rf"{abb}-\1", outstr) + outstr = re.sub(rf"RSL{data['abbreviation']}", rf"RSL-{data['abbreviation']}", outstr) + return re.sub(rf"{data['abbreviation']}(\d)", rf"{data['abbreviation']}-\1", outstr) @classmethod def parse_pcr(cls, xl: Workbook, rsl_plate_num: str) -> list: @@ -1261,15 +1264,15 @@ class BacterialCulture(BasicSubmission): output['controls'] = [item.to_sub_dict() for item in self.controls] return output - @classmethod - def get_regex(cls) -> str: - """ - Retrieves string for regex construction. - - Returns: - str: string for regex construction - """ - return "(?PRSL(?:-|_)?BC(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)" + # @classmethod + # def get_regex(cls) -> str: + # """ + # Retrieves string for regex construction. + # + # Returns: + # str: string for regex construction + # """ + # return "(?PRSL(?:-|_)?BC(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)" @classmethod def filename_template(cls): @@ -1339,47 +1342,47 @@ class BacterialCulture(BasicSubmission): return input_dict -class ViralCulture(BasicSubmission): - - id = Column(INTEGER, ForeignKey('_basicsubmission.id'), primary_key=True) - __mapper_args__ = dict(polymorphic_identity="Viral Culture", - polymorphic_load="inline", - inherit_condition=(id == BasicSubmission.id)) - - @classmethod - def get_regex(cls) -> str: - """ - Retrieves string for regex construction. - - Returns: - str: string for regex construction - """ - return "(?PRSL(?:-|_)?VE(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)" - - @classmethod - def custom_sample_autofill_row(cls, sample, worksheet: Worksheet) -> int: - """ - Extends parent - """ - # logger.debug(f"Checking {sample.well}") - # logger.debug(f"here's the worksheet: {worksheet}") - row = super().custom_sample_autofill_row(sample, worksheet) - df = pd.DataFrame(list(worksheet.values)) - # logger.debug(f"Here's the dataframe: {df}") - idx = df[df[0] == sample.well] - if idx.empty: - new = f"{sample.well[0]}{sample.well[1:].zfill(2)}" - # logger.debug(f"Checking: {new}") - idx = df[df[0] == new] - # logger.debug(f"Here is the row: {idx}") - row = idx.index.to_list()[0] - return row + 1 - - @classmethod - def custom_info_parser(cls, input_dict: dict, xl: Workbook | None = None, custom_fields: dict = {}) -> dict: - input_dict = super().custom_info_parser(input_dict=input_dict, xl=xl, custom_fields=custom_fields) - logger.debug(f"\n\nInfo dictionary:\n\n{pformat(input_dict)}\n\n") - return input_dict +# class ViralCulture(BasicSubmission): +# +# id = Column(INTEGER, ForeignKey('_basicsubmission.id'), primary_key=True) +# __mapper_args__ = dict(polymorphic_identity="Viral Culture", +# polymorphic_load="inline", +# inherit_condition=(id == BasicSubmission.id)) +# +# # @classmethod +# # def get_regex(cls) -> str: +# # """ +# # Retrieves string for regex construction. +# # +# # Returns: +# # str: string for regex construction +# # """ +# # return "(?PRSL(?:-|_)?VE(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)" +# +# @classmethod +# def custom_sample_autofill_row(cls, sample, worksheet: Worksheet) -> int: +# """ +# Extends parent +# """ +# # logger.debug(f"Checking {sample.well}") +# # logger.debug(f"here's the worksheet: {worksheet}") +# row = super().custom_sample_autofill_row(sample, worksheet) +# df = pd.DataFrame(list(worksheet.values)) +# # logger.debug(f"Here's the dataframe: {df}") +# idx = df[df[0] == sample.well] +# if idx.empty: +# new = f"{sample.well[0]}{sample.well[1:].zfill(2)}" +# # logger.debug(f"Checking: {new}") +# idx = df[df[0] == new] +# # logger.debug(f"Here is the row: {idx}") +# row = idx.index.to_list()[0] +# return row + 1 +# +# @classmethod +# def custom_info_parser(cls, input_dict: dict, xl: Workbook | None = None, custom_fields: dict = {}) -> dict: +# input_dict = super().custom_info_parser(input_dict=input_dict, xl=xl, custom_fields=custom_fields) +# logger.debug(f"\n\nInfo dictionary:\n\n{pformat(input_dict)}\n\n") +# return input_dict class Wastewater(BasicSubmission): @@ -1499,15 +1502,15 @@ class Wastewater(BasicSubmission): outstr = super().enforce_name(instr=instr, data=data) return outstr - @classmethod - def get_regex(cls) -> str: - """ - Retrieves string for regex construction - - Returns: - str: String for regex construction - """ - return "(?PRSL(?:-|_)?WW(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)" + # @classmethod + # def get_regex(cls) -> str: + # """ + # Retrieves string for regex construction + # + # Returns: + # str: String for regex construction + # """ + # return "(?PRSL(?:-|_)?WW(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)" @classmethod def adjust_autofill_samples(cls, samples: List[Any]) -> List[Any]: @@ -1919,15 +1922,15 @@ class WastewaterArtic(BasicSubmission): # logger.debug(f"Final EN name: {final_en_name}") return final_en_name - @classmethod - def get_regex(cls) -> str: - """ - Retrieves string for regex construction - - Returns: - str: string for regex construction. - """ - return "(?P(\\d{4}-\\d{2}-\\d{2}(?:-|_)(?:\\d_)?artic)|(RSL(?:-|_)?AR(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)\\d?(\\D|$)R?\\d?)?))" + # @classmethod + # def get_regex(cls) -> str: + # """ + # Retrieves string for regex construction + # + # Returns: + # str: string for regex construction. + # """ + # return "(?P(\\d{4}-\\d{2}-\\d{2}(?:-|_)(?:\\d_)?artic)|(RSL(?:-|_)?AR(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)\\d?(\\D|$)R?\\d?)?))" @classmethod def finalize_parse(cls, input_dict: dict, xl: pd.ExcelFile | None = None, info_map: dict | None = None) -> dict: diff --git a/src/submissions/backend/excel/parser.py b/src/submissions/backend/excel/parser.py index 88eec02..5bd7ea7 100644 --- a/src/submissions/backend/excel/parser.py +++ b/src/submissions/backend/excel/parser.py @@ -76,6 +76,7 @@ class SheetParser(object): # NOTE: Rescue submission type using scraped values to be used in Sample, Reagents, etc. if v not in [None, "None", "", " "]: self.submission_type = SubmissionType.query(name=v) + logger.debug(f"Updated self.submission_type to {self.submission_type}") case _: self.sub[k] = v @@ -202,7 +203,7 @@ class InfoParser(object): """ self.submission_type = dict(value=self.submission_type_obj.name, missing=True) # logger.debug(f"Looking up submission type: {self.submission_type['value']}") - info_map = self.sub_object.construct_info_map("read") + info_map = self.sub_object.construct_info_map(submission_type=self.submission_type_obj, mode="read") # NOTE: Get the parse_info method from the submission type specified return info_map @@ -385,10 +386,12 @@ class SampleParser(object): self.xl = xl if isinstance(submission_type, str): submission_type = SubmissionType.query(name=submission_type) + logger.debug(f"Sample parser is using submission type: {submission_type}") self.submission_type = submission_type.name self.submission_type_obj = submission_type if sub_object is None: - sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type_obj.name) + logger.warning(f"Sample parser attempting to fetch submission class with polymorphic identity: {self.submission_type}") + sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type) self.sub_object = sub_object self.sample_info_map = self.fetch_sample_info_map(submission_type=submission_type, sample_map=sample_map) # logger.debug(f"sample_info_map: {self.sample_info_map}") @@ -406,12 +409,12 @@ class SampleParser(object): dict: Info locations. """ # logger.debug(f"Looking up submission type: {submission_type}") - self.sample_type = self.sub_object.get_default_info("sample_type") + self.sample_type = self.sub_object.get_default_info("sample_type", submission_type=submission_type) self.samp_object = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type) # logger.debug(f"Got sample class: {self.samp_object.__name__}") # logger.debug(f"info_map: {pformat(se)}") if sample_map is None: - sample_info_map = self.sub_object.construct_sample_map() + sample_info_map = self.sub_object.construct_sample_map(submission_type=self.submission_type_obj) else: sample_info_map = sample_map return sample_info_map diff --git a/src/submissions/backend/validators/__init__.py b/src/submissions/backend/validators/__init__.py index 0bdad30..7108bed 100644 --- a/src/submissions/backend/validators/__init__.py +++ b/src/submissions/backend/validators/__init__.py @@ -26,11 +26,11 @@ class RSLNamer(object): if self.submission_type is None: # logger.debug("Creating submission type because none exists") self.submission_type = self.retrieve_submission_type(filename=filename) - # logger.debug(f"got submission type: {self.submission_type}") + logger.debug(f"got submission type: {self.submission_type}") if self.submission_type is not None: # logger.debug("Retrieving BasicSubmission subclass") self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type) - self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex()) + self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex(submission_type=sub_type)) if data is None: data = dict(submission_type=self.submission_type) if "submission_type" not in data.keys(): @@ -50,7 +50,7 @@ class RSLNamer(object): """ match filename: case Path(): - # logger.debug(f"Using path method for {filename}.") + logger.debug(f"Using path method for {filename}.") if filename.exists(): wb = load_workbook(filename) try: @@ -70,12 +70,14 @@ class RSLNamer(object): submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__()) case str(): regex = BasicSubmission.construct_regex() - # logger.debug(f"Using string method for {filename}.") + logger.debug(f"Using string method for {filename}.") + logger.debug(f"Using regex: {regex}") m = regex.search(filename) try: submission_type = m.lastgroup + logger.debug(f"Got submission type: {submission_type}") except AttributeError as e: - logger.critical(f"No RSL plate number found or submission type found!: {e}") + logger.critical(f"No submission type found or submission type found!: {e}") case _: submission_type = None try: @@ -112,7 +114,7 @@ class RSLNamer(object): regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE) except re.error as e: regex = BasicSubmission.construct_regex() - # logger.debug(f"Using regex: {regex}") + logger.debug(f"Using regex: {regex}") match filename: case Path(): m = regex.search(filename.stem) diff --git a/src/submissions/backend/validators/pydant.py b/src/submissions/backend/validators/pydant.py index ded54b6..674f698 100644 --- a/src/submissions/backend/validators/pydant.py +++ b/src/submissions/backend/validators/pydant.py @@ -509,7 +509,7 @@ class PydSubmission(BaseModel, extra='allow'): if check_not_nan(value['value']): return value else: - # logger.debug("Constructing plate name.") + # logger.debug("Constructing plate sub_type.") if "pytest" in sys.modules and sub_type.replace(" ", "") == "BasicSubmission": output = "RSL-BS-Test001" else: @@ -642,7 +642,7 @@ class PydSubmission(BaseModel, extra='allow'): # this could also be done with default_factory self.submission_object = BasicSubmission.find_polymorphic_subclass( polymorphic_identity=self.submission_type['value']) - self.namer = RSLNamer(self.rsl_plate_num['value']) + self.namer = RSLNamer(self.rsl_plate_num['value'], sub_type=self.submission_type['value']) def set_attribute(self, key: str, value): """ diff --git a/src/submissions/frontend/widgets/controls_chart.py b/src/submissions/frontend/widgets/controls_chart.py index 4258a5d..48c0237 100644 --- a/src/submissions/frontend/widgets/controls_chart.py +++ b/src/submissions/frontend/widgets/controls_chart.py @@ -170,7 +170,7 @@ class ControlsViewer(QWidget): Args: ctx (dict): settings passed from gui input_df (list[dict]): list of dictionaries containing records - subtype (str | None, optional): name of submission type. Defaults to None. + subtype (str | None, optional): sub_type of submission type. Defaults to None. Returns: DataFrame: dataframe of controls diff --git a/src/submissions/frontend/widgets/submission_widget.py b/src/submissions/frontend/widgets/submission_widget.py index 59f74bf..42c099a 100644 --- a/src/submissions/frontend/widgets/submission_widget.py +++ b/src/submissions/frontend/widgets/submission_widget.py @@ -185,7 +185,7 @@ class SubmissionFormWidget(QWidget): self.pyd = submission self.missing_info = [] st = SubmissionType.query(name=self.pyd.submission_type['value']).get_submission_class() - defaults = st.get_default_info("form_recover", "form_ignore") + defaults = st.get_default_info("form_recover", "form_ignore", submission_type=self.pyd.submission_type['value']) self.recover = defaults['form_recover'] self.ignore = defaults['form_ignore'] # logger.debug(f"Attempting to extend ignore list with {self.pyd.submission_type['value']}") @@ -554,6 +554,8 @@ class SubmissionFormWidget(QWidget): add_widget.addItems(cats) add_widget.setToolTip("Enter submission category or select from list.") case _: + # if key in sub_obj.get_default_info("form_ignore", submission_type=submission_type): + # return None if key in sub_obj.timestamps(): add_widget = MyQDateEdit(calendarPopup=True, scrollWidget=parent) # NOTE: sets submitted date based on date found in excel sheet