Improved generic submissions to replace viral culture addition.
This commit is contained in:
@@ -33,67 +33,67 @@ def upgrade() -> None:
|
|||||||
)
|
)
|
||||||
op.create_table('_contact',
|
op.create_table('_contact',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.Column('email', sa.String(length=64), nullable=True),
|
sa.Column('email', sa.String(length=64), nullable=True),
|
||||||
sa.Column('phone', sa.String(length=32), nullable=True),
|
sa.Column('phone', sa.String(length=32), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('_controltype',
|
op.create_table('_controltype',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=255), nullable=True),
|
sa.Column('sub_type', sa.String(length=255), nullable=True),
|
||||||
sa.Column('targets', sa.JSON(), nullable=True),
|
sa.Column('targets', sa.JSON(), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('name')
|
sa.UniqueConstraint('sub_type')
|
||||||
)
|
)
|
||||||
op.create_table('_equipment',
|
op.create_table('_equipment',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.Column('nickname', sa.String(length=64), nullable=True),
|
sa.Column('nickname', sa.String(length=64), nullable=True),
|
||||||
sa.Column('asset_number', sa.String(length=16), nullable=True),
|
sa.Column('asset_number', sa.String(length=16), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('_equipmentrole',
|
op.create_table('_equipmentrole',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=32), nullable=True),
|
sa.Column('sub_type', sa.String(length=32), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('_kittype',
|
op.create_table('_kittype',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('name')
|
sa.UniqueConstraint('sub_type')
|
||||||
)
|
)
|
||||||
op.create_table('_organization',
|
op.create_table('_organization',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.Column('cost_centre', sa.String(), nullable=True),
|
sa.Column('cost_centre', sa.String(), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('_process',
|
op.create_table('_process',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('name')
|
sa.UniqueConstraint('sub_type')
|
||||||
)
|
)
|
||||||
op.create_table('_reagentrole',
|
op.create_table('_reagentrole',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.Column('eol_ext', sa.Interval(), nullable=True),
|
sa.Column('eol_ext', sa.Interval(), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('_submissiontype',
|
op.create_table('_submissiontype',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=128), nullable=True),
|
sa.Column('sub_type', sa.String(length=128), nullable=True),
|
||||||
sa.Column('info_map', sa.JSON(), nullable=True),
|
sa.Column('info_map', sa.JSON(), nullable=True),
|
||||||
sa.Column('defaults', sa.JSON(), nullable=True),
|
sa.Column('defaults', sa.JSON(), nullable=True),
|
||||||
sa.Column('template_file', sa.LargeBinary(), nullable=True),
|
sa.Column('template_file', sa.LargeBinary(), nullable=True),
|
||||||
sa.Column('sample_map', sa.JSON(), nullable=True),
|
sa.Column('sample_map', sa.JSON(), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('name')
|
sa.UniqueConstraint('sub_type')
|
||||||
)
|
)
|
||||||
op.create_table('_tiprole',
|
op.create_table('_tiprole',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('_bacterialculturesample',
|
op.create_table('_bacterialculturesample',
|
||||||
@@ -107,7 +107,7 @@ def upgrade() -> None:
|
|||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('kit_id', sa.INTEGER(), nullable=True),
|
sa.Column('kit_id', sa.INTEGER(), nullable=True),
|
||||||
sa.Column('client_id', sa.INTEGER(), nullable=True),
|
sa.Column('client_id', sa.INTEGER(), nullable=True),
|
||||||
sa.Column('name', sa.String(length=128), nullable=True),
|
sa.Column('sub_type', sa.String(length=128), nullable=True),
|
||||||
sa.Column('amount', sa.FLOAT(precision=2), nullable=True),
|
sa.Column('amount', sa.FLOAT(precision=2), nullable=True),
|
||||||
sa.ForeignKeyConstraint(['client_id'], ['_organization.id'], name='fk_org_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['client_id'], ['_organization.id'], name='fk_org_id', ondelete='SET NULL'),
|
||||||
sa.ForeignKeyConstraint(['kit_id'], ['_kittype.id'], name='fk_kit_type_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['kit_id'], ['_kittype.id'], name='fk_kit_type_id', ondelete='SET NULL'),
|
||||||
@@ -164,7 +164,7 @@ def upgrade() -> None:
|
|||||||
op.create_table('_reagent',
|
op.create_table('_reagent',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('role_id', sa.INTEGER(), nullable=True),
|
sa.Column('role_id', sa.INTEGER(), nullable=True),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.Column('lot', sa.String(length=64), nullable=True),
|
sa.Column('lot', sa.String(length=64), nullable=True),
|
||||||
sa.Column('expiry', sa.TIMESTAMP(), nullable=True),
|
sa.Column('expiry', sa.TIMESTAMP(), nullable=True),
|
||||||
sa.ForeignKeyConstraint(['role_id'], ['_reagentrole.id'], name='fk_reagent_role_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['role_id'], ['_reagentrole.id'], name='fk_reagent_role_id', ondelete='SET NULL'),
|
||||||
@@ -207,7 +207,7 @@ def upgrade() -> None:
|
|||||||
op.create_table('_tips',
|
op.create_table('_tips',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('role_id', sa.INTEGER(), nullable=True),
|
sa.Column('role_id', sa.INTEGER(), nullable=True),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('sub_type', sa.String(length=64), nullable=True),
|
||||||
sa.Column('lot', sa.String(length=64), nullable=True),
|
sa.Column('lot', sa.String(length=64), nullable=True),
|
||||||
sa.ForeignKeyConstraint(['role_id'], ['_tiprole.id'], name='fk_tip_role_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['role_id'], ['_tiprole.id'], name='fk_tip_role_id', ondelete='SET NULL'),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
@@ -245,7 +245,7 @@ def upgrade() -> None:
|
|||||||
sa.ForeignKeyConstraint(['contact_id'], ['_contact.id'], name='fk_BS_contact_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['contact_id'], ['_contact.id'], name='fk_BS_contact_id', ondelete='SET NULL'),
|
||||||
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kittype.id'], name='fk_BS_extkit_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kittype.id'], name='fk_BS_extkit_id', ondelete='SET NULL'),
|
||||||
sa.ForeignKeyConstraint(['reagents_id'], ['_reagent.id'], name='fk_BS_reagents_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['reagents_id'], ['_reagent.id'], name='fk_BS_reagents_id', ondelete='SET NULL'),
|
||||||
sa.ForeignKeyConstraint(['submission_type_name'], ['_submissiontype.name'], name='fk_BS_subtype_name', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['submission_type_name'], ['_submissiontype.sub_type'], name='fk_BS_subtype_name', ondelete='SET NULL'),
|
||||||
sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organization.id'], name='fk_BS_sublab_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organization.id'], name='fk_BS_sublab_id', ondelete='SET NULL'),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('rsl_plate_num'),
|
sa.UniqueConstraint('rsl_plate_num'),
|
||||||
@@ -277,7 +277,7 @@ def upgrade() -> None:
|
|||||||
op.create_table('_control',
|
op.create_table('_control',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('parent_id', sa.INTEGER(), nullable=True),
|
sa.Column('parent_id', sa.INTEGER(), nullable=True),
|
||||||
sa.Column('name', sa.String(length=255), nullable=True),
|
sa.Column('sub_type', sa.String(length=255), nullable=True),
|
||||||
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
|
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
|
||||||
sa.Column('contains', sa.JSON(), nullable=True),
|
sa.Column('contains', sa.JSON(), nullable=True),
|
||||||
sa.Column('matches', sa.JSON(), nullable=True),
|
sa.Column('matches', sa.JSON(), nullable=True),
|
||||||
@@ -291,7 +291,7 @@ def upgrade() -> None:
|
|||||||
sa.ForeignKeyConstraint(['sample_id'], ['_basicsample.id'], name='cont_BCS_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['sample_id'], ['_basicsample.id'], name='cont_BCS_id', ondelete='SET NULL'),
|
||||||
sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
|
sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('name')
|
sa.UniqueConstraint('sub_type')
|
||||||
)
|
)
|
||||||
op.create_table('_submissionequipmentassociation',
|
op.create_table('_submissionequipmentassociation',
|
||||||
sa.Column('equipment_id', sa.INTEGER(), nullable=False),
|
sa.Column('equipment_id', sa.INTEGER(), nullable=False),
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ def upgrade() -> None:
|
|||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
# with op.batch_alter_table('_process', schema=None) as batch_op:
|
# with op.batch_alter_table('_process', schema=None) as batch_op:
|
||||||
# batch_op.create_unique_constraint("process_uni", ['name'])
|
# batch_op.create_unique_constraint("process_uni", ['sub_type'])
|
||||||
#
|
#
|
||||||
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
|
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
|
||||||
# batch_op.create_unique_constraint("subsamp_uni", ['id'])
|
# batch_op.create_unique_constraint("subsamp_uni", ['id'])
|
||||||
@@ -40,17 +40,17 @@ def upgrade() -> None:
|
|||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
with op.batch_alter_table('_wastewaterarticassociation', schema=None) as batch_op:
|
# with op.batch_alter_table('_wastewaterarticassociation', schema=None) as batch_op:
|
||||||
batch_op.alter_column('source_plate',
|
# batch_op.alter_column('source_plate',
|
||||||
existing_type=sa.String(length=32),
|
# existing_type=sa.String(length=32),
|
||||||
type_=sa.VARCHAR(length=16),
|
# type_=sa.VARCHAR(length=16),
|
||||||
existing_nullable=True)
|
# existing_nullable=True)
|
||||||
|
#
|
||||||
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
|
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
|
||||||
batch_op.drop_constraint("subsamp_uni", type_='unique')
|
# batch_op.drop_constraint("subsamp_uni", type_='unique')
|
||||||
|
#
|
||||||
with op.batch_alter_table('_process', schema=None) as batch_op:
|
# with op.batch_alter_table('_process', schema=None) as batch_op:
|
||||||
batch_op.drop_constraint("process_uni", type_='unique')
|
# batch_op.drop_constraint("process_uni", type_='unique')
|
||||||
|
|
||||||
op.drop_table('_viralculture')
|
op.drop_table('_viralculture')
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from submissions import __version__, __copyright__, __author__
|
|||||||
|
|
||||||
project = 'RSL Submissions'
|
project = 'RSL Submissions'
|
||||||
copyright = __copyright__
|
copyright = __copyright__
|
||||||
author = f"{__author__['name']} - {__author__['email']}"
|
author = f"{__author__['sub_type']} - {__author__['email']}"
|
||||||
release = __version__
|
release = __version__
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
# -- General configuration ---------------------------------------------------
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ class ControlType(BaseClass):
|
|||||||
Get subtypes associated with this controltype (currently used only for Kraken)
|
Get subtypes associated with this controltype (currently used only for Kraken)
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
mode (str): analysis mode name
|
mode (str): analysis mode sub_type
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List[str]: list of subtypes available
|
List[str]: list of subtypes available
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
from pprint import pprint
|
from pprint import pprint, pformat
|
||||||
|
|
||||||
from sqlalchemy import Column, String, TIMESTAMP, JSON, INTEGER, ForeignKey, Interval, Table, FLOAT, BLOB
|
from sqlalchemy import Column, String, TIMESTAMP, JSON, INTEGER, ForeignKey, Interval, Table, FLOAT, BLOB
|
||||||
from sqlalchemy.orm import relationship, validates, Query
|
from sqlalchemy.orm import relationship, validates, Query
|
||||||
@@ -846,7 +846,7 @@ class SubmissionType(BaseClass):
|
|||||||
pass
|
pass
|
||||||
return cls.execute_query(query=query, limit=limit)
|
return cls.execute_query(query=query, limit=limit)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_export_dict(self):
|
||||||
base_dict = dict(name=self.name)
|
base_dict = dict(name=self.name)
|
||||||
base_dict['info'] = self.construct_info_map(mode='export')
|
base_dict['info'] = self.construct_info_map(mode='export')
|
||||||
base_dict['defaults'] = self.defaults
|
base_dict['defaults'] = self.defaults
|
||||||
@@ -874,6 +874,7 @@ class SubmissionType(BaseClass):
|
|||||||
return None
|
return None
|
||||||
with open(filepath, "r") as f:
|
with open(filepath, "r") as f:
|
||||||
import_dict = json.load(fp=f)
|
import_dict = json.load(fp=f)
|
||||||
|
logger.debug(pformat(import_dict))
|
||||||
submission_type = cls.query(name=import_dict['name'])
|
submission_type = cls.query(name=import_dict['name'])
|
||||||
if submission_type:
|
if submission_type:
|
||||||
return submission_type
|
return submission_type
|
||||||
|
|||||||
@@ -113,15 +113,6 @@ class BasicSubmission(BaseClass):
|
|||||||
__mapper_args__ = {
|
__mapper_args__ = {
|
||||||
"polymorphic_identity": "Basic Submission",
|
"polymorphic_identity": "Basic Submission",
|
||||||
"polymorphic_on": submission_type_name,
|
"polymorphic_on": submission_type_name,
|
||||||
# "polymorphic_on": case(
|
|
||||||
#
|
|
||||||
# (submission_type_name == "Bacterial Culture", "Bacterial Culture"),
|
|
||||||
# (submission_type_name == "Wastewater Artic", "Wastewater Artic"),
|
|
||||||
# (submission_type_name == "Wastewater", "Wastewater"),
|
|
||||||
# (submission_type_name == "Viral Culture", "Bacterial Culture"),
|
|
||||||
#
|
|
||||||
# else_="Basic Sample"
|
|
||||||
# ),
|
|
||||||
"with_polymorphic": "*",
|
"with_polymorphic": "*",
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -160,7 +151,7 @@ class BasicSubmission(BaseClass):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_default_info(cls, *args):
|
def get_default_info(cls, *args, submission_type: SubmissionType | None = None):
|
||||||
# NOTE: Create defaults for all submission_types
|
# NOTE: Create defaults for all submission_types
|
||||||
parent_defs = super().get_default_info()
|
parent_defs = super().get_default_info()
|
||||||
recover = ['filepath', 'samples', 'csv', 'comment', 'equipment']
|
recover = ['filepath', 'samples', 'csv', 'comment', 'equipment']
|
||||||
@@ -185,7 +176,10 @@ class BasicSubmission(BaseClass):
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
output[k] = v
|
output[k] = v
|
||||||
st = cls.get_submission_type()
|
if isinstance(submission_type, SubmissionType):
|
||||||
|
st = submission_type
|
||||||
|
else:
|
||||||
|
st = cls.get_submission_type(submission_type)
|
||||||
if st is None:
|
if st is None:
|
||||||
logger.error("No default info for BasicSubmission.")
|
logger.error("No default info for BasicSubmission.")
|
||||||
else:
|
else:
|
||||||
@@ -205,18 +199,23 @@ class BasicSubmission(BaseClass):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_submission_type(cls) -> SubmissionType:
|
def get_submission_type(cls, sub_type: str | SubmissionType | None = None) -> SubmissionType:
|
||||||
"""
|
"""
|
||||||
Gets the SubmissionType associated with this class
|
Gets the SubmissionType associated with this class
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
SubmissionType: SubmissionType with name equal to this polymorphic identity
|
SubmissionType: SubmissionType with name equal to this polymorphic identity
|
||||||
"""
|
"""
|
||||||
name = cls.__mapper_args__['polymorphic_identity']
|
match sub_type:
|
||||||
return SubmissionType.query(name=name)
|
case str():
|
||||||
|
return SubmissionType.query(name=sub_type)
|
||||||
|
case SubmissionType():
|
||||||
|
return sub_type
|
||||||
|
case _:
|
||||||
|
return SubmissionType.query(cls.__mapper_args__['polymorphic_identity'])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def construct_info_map(cls, mode: Literal["read", "write"]) -> dict:
|
def construct_info_map(cls, submission_type:SubmissionType|None=None, mode: Literal["read", "write"]="read") -> dict:
|
||||||
"""
|
"""
|
||||||
Method to call submission type's construct info map.
|
Method to call submission type's construct info map.
|
||||||
|
|
||||||
@@ -226,17 +225,17 @@ class BasicSubmission(BaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
dict: Map of info locations.
|
dict: Map of info locations.
|
||||||
"""
|
"""
|
||||||
return cls.get_submission_type().construct_info_map(mode=mode)
|
return cls.get_submission_type(submission_type).construct_info_map(mode=mode)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def construct_sample_map(cls) -> dict:
|
def construct_sample_map(cls, submission_type:SubmissionType|None=None) -> dict:
|
||||||
"""
|
"""
|
||||||
Method to call submission type's construct_sample_map
|
Method to call submission type's construct_sample_map
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: sample location map
|
dict: sample location map
|
||||||
"""
|
"""
|
||||||
return cls.get_submission_type().construct_sample_map()
|
return cls.get_submission_type(submission_type).construct_sample_map()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def finalize_details(cls, input_dict: dict) -> dict:
|
def finalize_details(cls, input_dict: dict) -> dict:
|
||||||
@@ -466,7 +465,7 @@ class BasicSubmission(BaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
pd.DataFrame: Pandas Dataframe of all relevant submissions
|
pd.DataFrame: Pandas Dataframe of all relevant submissions
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Querying Type: {submission_type}")
|
logger.debug(f"Querying Type: {submission_type}")
|
||||||
# logger.debug(f"Using limit: {limit}")
|
# logger.debug(f"Using limit: {limit}")
|
||||||
# NOTE: use lookup function to create list of dicts
|
# NOTE: use lookup function to create list of dicts
|
||||||
subs = [item.to_dict() for item in
|
subs = [item.to_dict() for item in
|
||||||
@@ -635,14 +634,13 @@ class BasicSubmission(BaseClass):
|
|||||||
super().save()
|
super().save()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_regex(cls) -> str:
|
def get_regex(cls, submission_type:SubmissionType|str|None=None):
|
||||||
"""
|
# logger.debug(f"Attempting to get regex for {cls.__mapper_args__['polymorphic_identity']}")
|
||||||
Dummy for inheritence.
|
logger.debug(f"Attempting to get regex for {submission_type}")
|
||||||
|
try:
|
||||||
Returns:
|
return cls.get_submission_type(submission_type).defaults['regex']
|
||||||
str: Regex for submission type.
|
except AttributeError as e:
|
||||||
"""
|
raise AttributeError(f"Couldn't get submission type for {cls.__mapper_args__['polymorphic_identity']}")
|
||||||
return cls.construct_regex()
|
|
||||||
|
|
||||||
# Polymorphic functions
|
# Polymorphic functions
|
||||||
|
|
||||||
@@ -654,7 +652,8 @@ class BasicSubmission(BaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
re.Pattern: Regular expression pattern to discriminate between submission types.
|
re.Pattern: Regular expression pattern to discriminate between submission types.
|
||||||
"""
|
"""
|
||||||
rstring = rf'{"|".join([item.get_regex() for item in cls.__subclasses__()])}'
|
# rstring = rf'{"|".join([item.get_regex() for item in cls.__subclasses__()])}'
|
||||||
|
rstring = rf'{"|".join([item.defaults["regex"] for item in SubmissionType.query()])}'
|
||||||
regex = re.compile(rstring, flags=re.IGNORECASE | re.VERBOSE)
|
regex = re.compile(rstring, flags=re.IGNORECASE | re.VERBOSE)
|
||||||
return regex
|
return regex
|
||||||
|
|
||||||
@@ -685,7 +684,7 @@ class BasicSubmission(BaseClass):
|
|||||||
# item.__mapper_args__['polymorphic_identity'] == polymorphic_identity][0]
|
# item.__mapper_args__['polymorphic_identity'] == polymorphic_identity][0]
|
||||||
model = cls.__mapper__.polymorphic_map[polymorphic_identity].class_
|
model = cls.__mapper__.polymorphic_map[polymorphic_identity].class_
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Could not get polymorph {polymorphic_identity} of {cls} due to {e}")
|
logger.error(f"Could not get polymorph {polymorphic_identity} of {cls} due to {e}, falling back to BasicSubmission")
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
if attrs is None or len(attrs) == 0:
|
if attrs is None or len(attrs) == 0:
|
||||||
@@ -796,11 +795,13 @@ class BasicSubmission(BaseClass):
|
|||||||
# logger.info(f"Hello from {cls.__mapper_args__['polymorphic_identity']} Enforcer!")
|
# logger.info(f"Hello from {cls.__mapper_args__['polymorphic_identity']} Enforcer!")
|
||||||
from backend.validators import RSLNamer
|
from backend.validators import RSLNamer
|
||||||
# logger.debug(f"instr coming into {cls}: {instr}")
|
# logger.debug(f"instr coming into {cls}: {instr}")
|
||||||
# logger.debug(f"data coming into {cls}: {data}")
|
logger.debug(f"data coming into {cls}: {data}")
|
||||||
defaults = cls.get_default_info("abbreviation", "submission_type")
|
# defaults = cls.get_default_info("abbreviation", "submission_type")
|
||||||
data['abbreviation'] = defaults['abbreviation']
|
data['abbreviation'] = cls.get_default_info("abbreviation", submission_type=data['submission_type'])
|
||||||
if 'submission_type' not in data.keys() or data['submission_type'] in [None, ""]:
|
# logger.debug(f"Default info: {defaults}")
|
||||||
data['submission_type'] = defaults['submission_type']
|
# data['abbreviation'] = defaults['abbreviation']
|
||||||
|
# if 'submission_type' not in data.keys() or data['submission_type'] in [None, ""]:
|
||||||
|
# data['submission_type'] = defaults['submission_type']
|
||||||
if instr in [None, ""]:
|
if instr in [None, ""]:
|
||||||
# logger.debug("Sending to RSLNamer to make new plate name.")
|
# logger.debug("Sending to RSLNamer to make new plate name.")
|
||||||
outstr = RSLNamer.construct_new_plate_name(data=data)
|
outstr = RSLNamer.construct_new_plate_name(data=data)
|
||||||
@@ -829,9 +830,11 @@ class BasicSubmission(BaseClass):
|
|||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
repeat = ""
|
repeat = ""
|
||||||
outstr = re.sub(r"(-\dR)\d?", rf"\1 {repeat}", outstr).replace(" ", "")
|
outstr = re.sub(r"(-\dR)\d?", rf"\1 {repeat}", outstr).replace(" ", "")
|
||||||
abb = cls.get_default_info('abbreviation')
|
# abb = cls.get_default_info('abbreviation')
|
||||||
outstr = re.sub(rf"RSL{abb}", rf"RSL-{abb}", outstr)
|
# outstr = re.sub(rf"RSL{abb}", rf"RSL-{abb}", outstr)
|
||||||
return re.sub(rf"{abb}(\d)", rf"{abb}-\1", outstr)
|
# return re.sub(rf"{abb}(\d)", rf"{abb}-\1", outstr)
|
||||||
|
outstr = re.sub(rf"RSL{data['abbreviation']}", rf"RSL-{data['abbreviation']}", outstr)
|
||||||
|
return re.sub(rf"{data['abbreviation']}(\d)", rf"{data['abbreviation']}-\1", outstr)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_pcr(cls, xl: Workbook, rsl_plate_num: str) -> list:
|
def parse_pcr(cls, xl: Workbook, rsl_plate_num: str) -> list:
|
||||||
@@ -1261,15 +1264,15 @@ class BacterialCulture(BasicSubmission):
|
|||||||
output['controls'] = [item.to_sub_dict() for item in self.controls]
|
output['controls'] = [item.to_sub_dict() for item in self.controls]
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@classmethod
|
# @classmethod
|
||||||
def get_regex(cls) -> str:
|
# def get_regex(cls) -> str:
|
||||||
"""
|
# """
|
||||||
Retrieves string for regex construction.
|
# Retrieves string for regex construction.
|
||||||
|
#
|
||||||
Returns:
|
# Returns:
|
||||||
str: string for regex construction
|
# str: string for regex construction
|
||||||
"""
|
# """
|
||||||
return "(?P<Bacterial_Culture>RSL(?:-|_)?BC(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
# return "(?P<Bacterial_Culture>RSL(?:-|_)?BC(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filename_template(cls):
|
def filename_template(cls):
|
||||||
@@ -1339,47 +1342,47 @@ class BacterialCulture(BasicSubmission):
|
|||||||
return input_dict
|
return input_dict
|
||||||
|
|
||||||
|
|
||||||
class ViralCulture(BasicSubmission):
|
# class ViralCulture(BasicSubmission):
|
||||||
|
#
|
||||||
id = Column(INTEGER, ForeignKey('_basicsubmission.id'), primary_key=True)
|
# id = Column(INTEGER, ForeignKey('_basicsubmission.id'), primary_key=True)
|
||||||
__mapper_args__ = dict(polymorphic_identity="Viral Culture",
|
# __mapper_args__ = dict(polymorphic_identity="Viral Culture",
|
||||||
polymorphic_load="inline",
|
# polymorphic_load="inline",
|
||||||
inherit_condition=(id == BasicSubmission.id))
|
# inherit_condition=(id == BasicSubmission.id))
|
||||||
|
#
|
||||||
@classmethod
|
# # @classmethod
|
||||||
def get_regex(cls) -> str:
|
# # def get_regex(cls) -> str:
|
||||||
"""
|
# # """
|
||||||
Retrieves string for regex construction.
|
# # Retrieves string for regex construction.
|
||||||
|
# #
|
||||||
Returns:
|
# # Returns:
|
||||||
str: string for regex construction
|
# # str: string for regex construction
|
||||||
"""
|
# # """
|
||||||
return "(?P<Viral_Culture>RSL(?:-|_)?VE(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
# # return "(?P<Viral_Culture>RSL(?:-|_)?VE(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
||||||
|
#
|
||||||
@classmethod
|
# @classmethod
|
||||||
def custom_sample_autofill_row(cls, sample, worksheet: Worksheet) -> int:
|
# def custom_sample_autofill_row(cls, sample, worksheet: Worksheet) -> int:
|
||||||
"""
|
# """
|
||||||
Extends parent
|
# Extends parent
|
||||||
"""
|
# """
|
||||||
# logger.debug(f"Checking {sample.well}")
|
# # logger.debug(f"Checking {sample.well}")
|
||||||
# logger.debug(f"here's the worksheet: {worksheet}")
|
# # logger.debug(f"here's the worksheet: {worksheet}")
|
||||||
row = super().custom_sample_autofill_row(sample, worksheet)
|
# row = super().custom_sample_autofill_row(sample, worksheet)
|
||||||
df = pd.DataFrame(list(worksheet.values))
|
# df = pd.DataFrame(list(worksheet.values))
|
||||||
# logger.debug(f"Here's the dataframe: {df}")
|
# # logger.debug(f"Here's the dataframe: {df}")
|
||||||
idx = df[df[0] == sample.well]
|
# idx = df[df[0] == sample.well]
|
||||||
if idx.empty:
|
# if idx.empty:
|
||||||
new = f"{sample.well[0]}{sample.well[1:].zfill(2)}"
|
# new = f"{sample.well[0]}{sample.well[1:].zfill(2)}"
|
||||||
# logger.debug(f"Checking: {new}")
|
# # logger.debug(f"Checking: {new}")
|
||||||
idx = df[df[0] == new]
|
# idx = df[df[0] == new]
|
||||||
# logger.debug(f"Here is the row: {idx}")
|
# # logger.debug(f"Here is the row: {idx}")
|
||||||
row = idx.index.to_list()[0]
|
# row = idx.index.to_list()[0]
|
||||||
return row + 1
|
# return row + 1
|
||||||
|
#
|
||||||
@classmethod
|
# @classmethod
|
||||||
def custom_info_parser(cls, input_dict: dict, xl: Workbook | None = None, custom_fields: dict = {}) -> dict:
|
# def custom_info_parser(cls, input_dict: dict, xl: Workbook | None = None, custom_fields: dict = {}) -> dict:
|
||||||
input_dict = super().custom_info_parser(input_dict=input_dict, xl=xl, custom_fields=custom_fields)
|
# input_dict = super().custom_info_parser(input_dict=input_dict, xl=xl, custom_fields=custom_fields)
|
||||||
logger.debug(f"\n\nInfo dictionary:\n\n{pformat(input_dict)}\n\n")
|
# logger.debug(f"\n\nInfo dictionary:\n\n{pformat(input_dict)}\n\n")
|
||||||
return input_dict
|
# return input_dict
|
||||||
|
|
||||||
|
|
||||||
class Wastewater(BasicSubmission):
|
class Wastewater(BasicSubmission):
|
||||||
@@ -1499,15 +1502,15 @@ class Wastewater(BasicSubmission):
|
|||||||
outstr = super().enforce_name(instr=instr, data=data)
|
outstr = super().enforce_name(instr=instr, data=data)
|
||||||
return outstr
|
return outstr
|
||||||
|
|
||||||
@classmethod
|
# @classmethod
|
||||||
def get_regex(cls) -> str:
|
# def get_regex(cls) -> str:
|
||||||
"""
|
# """
|
||||||
Retrieves string for regex construction
|
# Retrieves string for regex construction
|
||||||
|
#
|
||||||
Returns:
|
# Returns:
|
||||||
str: String for regex construction
|
# str: String for regex construction
|
||||||
"""
|
# """
|
||||||
return "(?P<Wastewater>RSL(?:-|_)?WW(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
# return "(?P<Wastewater>RSL(?:-|_)?WW(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def adjust_autofill_samples(cls, samples: List[Any]) -> List[Any]:
|
def adjust_autofill_samples(cls, samples: List[Any]) -> List[Any]:
|
||||||
@@ -1919,15 +1922,15 @@ class WastewaterArtic(BasicSubmission):
|
|||||||
# logger.debug(f"Final EN name: {final_en_name}")
|
# logger.debug(f"Final EN name: {final_en_name}")
|
||||||
return final_en_name
|
return final_en_name
|
||||||
|
|
||||||
@classmethod
|
# @classmethod
|
||||||
def get_regex(cls) -> str:
|
# def get_regex(cls) -> str:
|
||||||
"""
|
# """
|
||||||
Retrieves string for regex construction
|
# Retrieves string for regex construction
|
||||||
|
#
|
||||||
Returns:
|
# Returns:
|
||||||
str: string for regex construction.
|
# str: string for regex construction.
|
||||||
"""
|
# """
|
||||||
return "(?P<Wastewater_Artic>(\\d{4}-\\d{2}-\\d{2}(?:-|_)(?:\\d_)?artic)|(RSL(?:-|_)?AR(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)\\d?(\\D|$)R?\\d?)?))"
|
# return "(?P<Wastewater_Artic>(\\d{4}-\\d{2}-\\d{2}(?:-|_)(?:\\d_)?artic)|(RSL(?:-|_)?AR(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)\\d?(\\D|$)R?\\d?)?))"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def finalize_parse(cls, input_dict: dict, xl: pd.ExcelFile | None = None, info_map: dict | None = None) -> dict:
|
def finalize_parse(cls, input_dict: dict, xl: pd.ExcelFile | None = None, info_map: dict | None = None) -> dict:
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ class SheetParser(object):
|
|||||||
# NOTE: Rescue submission type using scraped values to be used in Sample, Reagents, etc.
|
# NOTE: Rescue submission type using scraped values to be used in Sample, Reagents, etc.
|
||||||
if v not in [None, "None", "", " "]:
|
if v not in [None, "None", "", " "]:
|
||||||
self.submission_type = SubmissionType.query(name=v)
|
self.submission_type = SubmissionType.query(name=v)
|
||||||
|
logger.debug(f"Updated self.submission_type to {self.submission_type}")
|
||||||
case _:
|
case _:
|
||||||
self.sub[k] = v
|
self.sub[k] = v
|
||||||
|
|
||||||
@@ -202,7 +203,7 @@ class InfoParser(object):
|
|||||||
"""
|
"""
|
||||||
self.submission_type = dict(value=self.submission_type_obj.name, missing=True)
|
self.submission_type = dict(value=self.submission_type_obj.name, missing=True)
|
||||||
# logger.debug(f"Looking up submission type: {self.submission_type['value']}")
|
# logger.debug(f"Looking up submission type: {self.submission_type['value']}")
|
||||||
info_map = self.sub_object.construct_info_map("read")
|
info_map = self.sub_object.construct_info_map(submission_type=self.submission_type_obj, mode="read")
|
||||||
# NOTE: Get the parse_info method from the submission type specified
|
# NOTE: Get the parse_info method from the submission type specified
|
||||||
return info_map
|
return info_map
|
||||||
|
|
||||||
@@ -385,10 +386,12 @@ class SampleParser(object):
|
|||||||
self.xl = xl
|
self.xl = xl
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
|
logger.debug(f"Sample parser is using submission type: {submission_type}")
|
||||||
self.submission_type = submission_type.name
|
self.submission_type = submission_type.name
|
||||||
self.submission_type_obj = submission_type
|
self.submission_type_obj = submission_type
|
||||||
if sub_object is None:
|
if sub_object is None:
|
||||||
sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type_obj.name)
|
logger.warning(f"Sample parser attempting to fetch submission class with polymorphic identity: {self.submission_type}")
|
||||||
|
sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
self.sub_object = sub_object
|
self.sub_object = sub_object
|
||||||
self.sample_info_map = self.fetch_sample_info_map(submission_type=submission_type, sample_map=sample_map)
|
self.sample_info_map = self.fetch_sample_info_map(submission_type=submission_type, sample_map=sample_map)
|
||||||
# logger.debug(f"sample_info_map: {self.sample_info_map}")
|
# logger.debug(f"sample_info_map: {self.sample_info_map}")
|
||||||
@@ -406,12 +409,12 @@ class SampleParser(object):
|
|||||||
dict: Info locations.
|
dict: Info locations.
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Looking up submission type: {submission_type}")
|
# logger.debug(f"Looking up submission type: {submission_type}")
|
||||||
self.sample_type = self.sub_object.get_default_info("sample_type")
|
self.sample_type = self.sub_object.get_default_info("sample_type", submission_type=submission_type)
|
||||||
self.samp_object = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type)
|
self.samp_object = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type)
|
||||||
# logger.debug(f"Got sample class: {self.samp_object.__name__}")
|
# logger.debug(f"Got sample class: {self.samp_object.__name__}")
|
||||||
# logger.debug(f"info_map: {pformat(se)}")
|
# logger.debug(f"info_map: {pformat(se)}")
|
||||||
if sample_map is None:
|
if sample_map is None:
|
||||||
sample_info_map = self.sub_object.construct_sample_map()
|
sample_info_map = self.sub_object.construct_sample_map(submission_type=self.submission_type_obj)
|
||||||
else:
|
else:
|
||||||
sample_info_map = sample_map
|
sample_info_map = sample_map
|
||||||
return sample_info_map
|
return sample_info_map
|
||||||
|
|||||||
@@ -26,11 +26,11 @@ class RSLNamer(object):
|
|||||||
if self.submission_type is None:
|
if self.submission_type is None:
|
||||||
# logger.debug("Creating submission type because none exists")
|
# logger.debug("Creating submission type because none exists")
|
||||||
self.submission_type = self.retrieve_submission_type(filename=filename)
|
self.submission_type = self.retrieve_submission_type(filename=filename)
|
||||||
# logger.debug(f"got submission type: {self.submission_type}")
|
logger.debug(f"got submission type: {self.submission_type}")
|
||||||
if self.submission_type is not None:
|
if self.submission_type is not None:
|
||||||
# logger.debug("Retrieving BasicSubmission subclass")
|
# logger.debug("Retrieving BasicSubmission subclass")
|
||||||
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex())
|
self.parsed_name = self.retrieve_rsl_number(filename=filename, regex=self.sub_object.get_regex(submission_type=sub_type))
|
||||||
if data is None:
|
if data is None:
|
||||||
data = dict(submission_type=self.submission_type)
|
data = dict(submission_type=self.submission_type)
|
||||||
if "submission_type" not in data.keys():
|
if "submission_type" not in data.keys():
|
||||||
@@ -50,7 +50,7 @@ class RSLNamer(object):
|
|||||||
"""
|
"""
|
||||||
match filename:
|
match filename:
|
||||||
case Path():
|
case Path():
|
||||||
# logger.debug(f"Using path method for {filename}.")
|
logger.debug(f"Using path method for {filename}.")
|
||||||
if filename.exists():
|
if filename.exists():
|
||||||
wb = load_workbook(filename)
|
wb = load_workbook(filename)
|
||||||
try:
|
try:
|
||||||
@@ -70,12 +70,14 @@ class RSLNamer(object):
|
|||||||
submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__())
|
submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__())
|
||||||
case str():
|
case str():
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
# logger.debug(f"Using string method for {filename}.")
|
logger.debug(f"Using string method for {filename}.")
|
||||||
|
logger.debug(f"Using regex: {regex}")
|
||||||
m = regex.search(filename)
|
m = regex.search(filename)
|
||||||
try:
|
try:
|
||||||
submission_type = m.lastgroup
|
submission_type = m.lastgroup
|
||||||
|
logger.debug(f"Got submission type: {submission_type}")
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
logger.critical(f"No RSL plate number found or submission type found!: {e}")
|
logger.critical(f"No submission type found or submission type found!: {e}")
|
||||||
case _:
|
case _:
|
||||||
submission_type = None
|
submission_type = None
|
||||||
try:
|
try:
|
||||||
@@ -112,7 +114,7 @@ class RSLNamer(object):
|
|||||||
regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE)
|
regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE)
|
||||||
except re.error as e:
|
except re.error as e:
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
# logger.debug(f"Using regex: {regex}")
|
logger.debug(f"Using regex: {regex}")
|
||||||
match filename:
|
match filename:
|
||||||
case Path():
|
case Path():
|
||||||
m = regex.search(filename.stem)
|
m = regex.search(filename.stem)
|
||||||
|
|||||||
@@ -509,7 +509,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
if check_not_nan(value['value']):
|
if check_not_nan(value['value']):
|
||||||
return value
|
return value
|
||||||
else:
|
else:
|
||||||
# logger.debug("Constructing plate name.")
|
# logger.debug("Constructing plate sub_type.")
|
||||||
if "pytest" in sys.modules and sub_type.replace(" ", "") == "BasicSubmission":
|
if "pytest" in sys.modules and sub_type.replace(" ", "") == "BasicSubmission":
|
||||||
output = "RSL-BS-Test001"
|
output = "RSL-BS-Test001"
|
||||||
else:
|
else:
|
||||||
@@ -642,7 +642,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
# this could also be done with default_factory
|
# this could also be done with default_factory
|
||||||
self.submission_object = BasicSubmission.find_polymorphic_subclass(
|
self.submission_object = BasicSubmission.find_polymorphic_subclass(
|
||||||
polymorphic_identity=self.submission_type['value'])
|
polymorphic_identity=self.submission_type['value'])
|
||||||
self.namer = RSLNamer(self.rsl_plate_num['value'])
|
self.namer = RSLNamer(self.rsl_plate_num['value'], sub_type=self.submission_type['value'])
|
||||||
|
|
||||||
def set_attribute(self, key: str, value):
|
def set_attribute(self, key: str, value):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -170,7 +170,7 @@ class ControlsViewer(QWidget):
|
|||||||
Args:
|
Args:
|
||||||
ctx (dict): settings passed from gui
|
ctx (dict): settings passed from gui
|
||||||
input_df (list[dict]): list of dictionaries containing records
|
input_df (list[dict]): list of dictionaries containing records
|
||||||
subtype (str | None, optional): name of submission type. Defaults to None.
|
subtype (str | None, optional): sub_type of submission type. Defaults to None.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
DataFrame: dataframe of controls
|
DataFrame: dataframe of controls
|
||||||
|
|||||||
@@ -185,7 +185,7 @@ class SubmissionFormWidget(QWidget):
|
|||||||
self.pyd = submission
|
self.pyd = submission
|
||||||
self.missing_info = []
|
self.missing_info = []
|
||||||
st = SubmissionType.query(name=self.pyd.submission_type['value']).get_submission_class()
|
st = SubmissionType.query(name=self.pyd.submission_type['value']).get_submission_class()
|
||||||
defaults = st.get_default_info("form_recover", "form_ignore")
|
defaults = st.get_default_info("form_recover", "form_ignore", submission_type=self.pyd.submission_type['value'])
|
||||||
self.recover = defaults['form_recover']
|
self.recover = defaults['form_recover']
|
||||||
self.ignore = defaults['form_ignore']
|
self.ignore = defaults['form_ignore']
|
||||||
# logger.debug(f"Attempting to extend ignore list with {self.pyd.submission_type['value']}")
|
# logger.debug(f"Attempting to extend ignore list with {self.pyd.submission_type['value']}")
|
||||||
@@ -554,6 +554,8 @@ class SubmissionFormWidget(QWidget):
|
|||||||
add_widget.addItems(cats)
|
add_widget.addItems(cats)
|
||||||
add_widget.setToolTip("Enter submission category or select from list.")
|
add_widget.setToolTip("Enter submission category or select from list.")
|
||||||
case _:
|
case _:
|
||||||
|
# if key in sub_obj.get_default_info("form_ignore", submission_type=submission_type):
|
||||||
|
# return None
|
||||||
if key in sub_obj.timestamps():
|
if key in sub_obj.timestamps():
|
||||||
add_widget = MyQDateEdit(calendarPopup=True, scrollWidget=parent)
|
add_widget = MyQDateEdit(calendarPopup=True, scrollWidget=parent)
|
||||||
# NOTE: sets submitted date based on date found in excel sheet
|
# NOTE: sets submitted date based on date found in excel sheet
|
||||||
|
|||||||
Reference in New Issue
Block a user