Added in ability to overwrite submissions.
This commit is contained in:
@@ -55,7 +55,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
|
|||||||
# are written from script.py.mako
|
# are written from script.py.mako
|
||||||
# output_encoding = utf-8
|
# output_encoding = utf-8
|
||||||
|
|
||||||
sqlalchemy.url = sqlite:///submissions.db
|
sqlalchemy.url = sqlite:///L:\Robotics Laboratory Support\Submissions\submissions.db
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
[post_write_hooks]
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
"""initial commit
|
"""initial commit
|
||||||
|
|
||||||
Revision ID: 03da9270e51f
|
Revision ID: 8753ed70f148
|
||||||
Revises:
|
Revises:
|
||||||
Create Date: 2023-01-19 09:01:03.022482
|
Create Date: 2023-01-26 12:48:42.340619
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
@@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = '03da9270e51f'
|
revision = '8753ed70f148'
|
||||||
down_revision = None
|
down_revision = None
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
@@ -50,18 +50,6 @@ def upgrade() -> None:
|
|||||||
sa.ForeignKeyConstraint(['kit_id'], ['_kits.id'], name='fk_RT_kits_id', ondelete='SET NULL', use_alter=True),
|
sa.ForeignKeyConstraint(['kit_id'], ['_kits.id'], name='fk_RT_kits_id', ondelete='SET NULL', use_alter=True),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('_control_samples',
|
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
|
||||||
sa.Column('parent_id', sa.String(), nullable=True),
|
|
||||||
sa.Column('name', sa.String(length=255), nullable=True),
|
|
||||||
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
|
|
||||||
sa.Column('contains', sa.JSON(), nullable=True),
|
|
||||||
sa.Column('matches', sa.JSON(), nullable=True),
|
|
||||||
sa.Column('kraken', sa.JSON(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['parent_id'], ['_control_types.id'], name='fk_control_parent_id'),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('name')
|
|
||||||
)
|
|
||||||
op.create_table('_organizations',
|
op.create_table('_organizations',
|
||||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
sa.Column('name', sa.String(length=64), nullable=True),
|
sa.Column('name', sa.String(length=64), nullable=True),
|
||||||
@@ -102,8 +90,6 @@ def upgrade() -> None:
|
|||||||
sa.Column('submission_type', sa.String(length=32), nullable=True),
|
sa.Column('submission_type', sa.String(length=32), nullable=True),
|
||||||
sa.Column('technician', sa.String(length=64), nullable=True),
|
sa.Column('technician', sa.String(length=64), nullable=True),
|
||||||
sa.Column('reagents_id', sa.String(), nullable=True),
|
sa.Column('reagents_id', sa.String(), nullable=True),
|
||||||
sa.Column('control_id', sa.INTEGER(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['control_id'], ['_control_samples.id'], name='fk_BC_control_id', ondelete='SET NULL'),
|
|
||||||
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kits.id'], name='fk_BS_extkit_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kits.id'], name='fk_BS_extkit_id', ondelete='SET NULL'),
|
||||||
sa.ForeignKeyConstraint(['reagents_id'], ['_reagents.id'], name='fk_BS_reagents_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['reagents_id'], ['_reagents.id'], name='fk_BS_reagents_id', ondelete='SET NULL'),
|
||||||
sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organizations.id'], name='fk_BS_sublab_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organizations.id'], name='fk_BS_sublab_id', ondelete='SET NULL'),
|
||||||
@@ -121,6 +107,20 @@ def upgrade() -> None:
|
|||||||
sa.ForeignKeyConstraint(['rsl_plate_id'], ['_submissions.id'], name='fk_BCS_sample_id', ondelete='SET NULL'),
|
sa.ForeignKeyConstraint(['rsl_plate_id'], ['_submissions.id'], name='fk_BCS_sample_id', ondelete='SET NULL'),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
|
op.create_table('_control_samples',
|
||||||
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
|
sa.Column('parent_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=True),
|
||||||
|
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
|
||||||
|
sa.Column('contains', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('matches', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('kraken', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('submission_id', sa.INTEGER(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['parent_id'], ['_control_types.id'], name='fk_control_parent_id'),
|
||||||
|
sa.ForeignKeyConstraint(['submission_id'], ['_submissions.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('name')
|
||||||
|
)
|
||||||
op.create_table('_reagents_submissions',
|
op.create_table('_reagents_submissions',
|
||||||
sa.Column('reagent_id', sa.INTEGER(), nullable=True),
|
sa.Column('reagent_id', sa.INTEGER(), nullable=True),
|
||||||
sa.Column('submission_id', sa.INTEGER(), nullable=True),
|
sa.Column('submission_id', sa.INTEGER(), nullable=True),
|
||||||
@@ -153,13 +153,13 @@ def downgrade() -> None:
|
|||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.drop_table('_ww_samples')
|
op.drop_table('_ww_samples')
|
||||||
op.drop_table('_reagents_submissions')
|
op.drop_table('_reagents_submissions')
|
||||||
|
op.drop_table('_control_samples')
|
||||||
op.drop_table('_bc_samples')
|
op.drop_table('_bc_samples')
|
||||||
op.drop_table('_submissions')
|
op.drop_table('_submissions')
|
||||||
op.drop_table('_orgs_contacts')
|
op.drop_table('_orgs_contacts')
|
||||||
op.drop_table('_reagentstypes_kittypes')
|
op.drop_table('_reagentstypes_kittypes')
|
||||||
op.drop_table('_reagents')
|
op.drop_table('_reagents')
|
||||||
op.drop_table('_organizations')
|
op.drop_table('_organizations')
|
||||||
op.drop_table('_control_samples')
|
|
||||||
op.drop_table('_reagent_types')
|
op.drop_table('_reagent_types')
|
||||||
op.drop_table('_kits')
|
op.drop_table('_kits')
|
||||||
op.drop_table('_control_types')
|
op.drop_table('_control_types')
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
from . import models
|
from . import models
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
# from sqlite3 import IntegrityError
|
import sqlalchemy.exc
|
||||||
from sqlalchemy.exc import IntegrityError
|
import sqlite3
|
||||||
|
# from sqlalchemy.exc import IntegrityError, OperationalError
|
||||||
|
# from sqlite3 import IntegrityError, OperationalError
|
||||||
import logging
|
import logging
|
||||||
from datetime import date, datetime
|
from datetime import date, datetime
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
@@ -19,48 +21,69 @@ def get_kits_by_use( ctx:dict, kittype_str:str|None) -> list:
|
|||||||
|
|
||||||
|
|
||||||
def store_submission(ctx:dict, base_submission:models.BasicSubmission) -> None:
|
def store_submission(ctx:dict, base_submission:models.BasicSubmission) -> None:
|
||||||
|
logger.debug(f"Hello from store_submission")
|
||||||
for sample in base_submission.samples:
|
for sample in base_submission.samples:
|
||||||
sample.rsl_plate = base_submission
|
sample.rsl_plate = base_submission
|
||||||
|
logger.debug(f"Attempting to add sample: {sample.to_string()}")
|
||||||
try:
|
try:
|
||||||
ctx['database_session'].add(sample)
|
ctx['database_session'].add(sample)
|
||||||
except IntegrityError:
|
except (sqlite3.IntegrityError, sqlalchemy.exc.IntegrityError) as e:
|
||||||
|
logger.debug(f"Hit an integrity error : {e}")
|
||||||
continue
|
continue
|
||||||
ctx['database_session'].add(base_submission)
|
ctx['database_session'].add(base_submission)
|
||||||
|
logger.debug(f"Attempting to add submission: {base_submission.rsl_plate_num}")
|
||||||
try:
|
try:
|
||||||
ctx['database_session'].commit()
|
ctx['database_session'].commit()
|
||||||
except IntegrityError:
|
except (sqlite3.IntegrityError, sqlalchemy.exc.IntegrityError) as e:
|
||||||
|
logger.debug(f"Hit an integrity error : {e}")
|
||||||
ctx['database_session'].rollback()
|
ctx['database_session'].rollback()
|
||||||
return {"message":"This plate number already exists, so we can't add it."}
|
return {"message":"This plate number already exists, so we can't add it."}
|
||||||
|
except (sqlite3.OperationalError, sqlalchemy.exc.IntegrityError) as e:
|
||||||
|
logger.debug(f"Hit an operational error: {e}")
|
||||||
|
ctx['database_session'].rollback()
|
||||||
|
return {"message":"The database is locked for editing."}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def store_reagent(ctx:dict, reagent:models.Reagent) -> None:
|
def store_reagent(ctx:dict, reagent:models.Reagent) -> None:
|
||||||
logger.debug(reagent.__dict__)
|
logger.debug(reagent.__dict__)
|
||||||
ctx['database_session'].add(reagent)
|
ctx['database_session'].add(reagent)
|
||||||
|
try:
|
||||||
ctx['database_session'].commit()
|
ctx['database_session'].commit()
|
||||||
|
except OperationalError:
|
||||||
|
return {"message":"The database is locked for editing."}
|
||||||
|
|
||||||
|
|
||||||
def construct_submission_info(ctx:dict, info_dict:dict) -> models.BasicSubmission:
|
def construct_submission_info(ctx:dict, info_dict:dict) -> models.BasicSubmission:
|
||||||
query = info_dict['submission_type'].replace(" ", "")
|
query = info_dict['submission_type'].replace(" ", "")
|
||||||
|
instance = ctx['database_session'].query(models.BasicSubmission).filter(models.BasicSubmission.rsl_plate_num==info_dict['rsl_plate_num']).first()
|
||||||
|
msg = "This submission already exists.\nWould you like to overwrite?"
|
||||||
model = getattr(models, query)
|
model = getattr(models, query)
|
||||||
info_dict['submission_type'] = info_dict['submission_type'].replace(" ", "_").lower()
|
info_dict['submission_type'] = info_dict['submission_type'].replace(" ", "_").lower()
|
||||||
|
if instance == None:
|
||||||
instance = model()
|
instance = model()
|
||||||
|
msg = None
|
||||||
for item in info_dict:
|
for item in info_dict:
|
||||||
logger.debug(f"Setting {item} to {info_dict[item]}")
|
logger.debug(f"Setting {item} to {info_dict[item]}")
|
||||||
match item:
|
match item:
|
||||||
case "extraction_kit":
|
case "extraction_kit":
|
||||||
q_str = info_dict[item]
|
q_str = info_dict[item]
|
||||||
logger.debug(f"Looking up kit {q_str}")
|
logger.debug(f"Looking up kit {q_str}")
|
||||||
|
try:
|
||||||
field_value = lookup_kittype_by_name(ctx=ctx, name=q_str)
|
field_value = lookup_kittype_by_name(ctx=ctx, name=q_str)
|
||||||
|
except (sqlite3.IntegrityError, sqlalchemy.exc.IntegrityError) as e:
|
||||||
|
logger.error(f"Hit an integrity error: {e}")
|
||||||
logger.debug(f"Got {field_value} for kit {q_str}")
|
logger.debug(f"Got {field_value} for kit {q_str}")
|
||||||
case "submitting_lab":
|
case "submitting_lab":
|
||||||
q_str = info_dict[item].replace(" ", "_").lower()
|
q_str = info_dict[item].replace(" ", "_").lower()
|
||||||
logger.debug(f"looking up organization: {q_str}")
|
logger.debug(f"Looking up organization: {q_str}")
|
||||||
field_value = lookup_org_by_name(ctx=ctx, name=q_str)
|
field_value = lookup_org_by_name(ctx=ctx, name=q_str)
|
||||||
logger.debug(f"Got {field_value} for organization {q_str}")
|
logger.debug(f"Got {field_value} for organization {q_str}")
|
||||||
case "submitter_plate_num":
|
case "submitter_plate_num":
|
||||||
# Because of unique constraint, the submitter plate number cannot be None, so...
|
# Because of unique constraint, the submitter plate number cannot be None, so...
|
||||||
if info_dict[item] == None:
|
logger.debug(f"Submitter plate id: {info_dict[item]}")
|
||||||
|
if info_dict[item] == None or info_dict[item] == "None":
|
||||||
|
logger.debug(f"Got None as a submitter plate number, inserting random string to preserve database unique constraint.")
|
||||||
info_dict[item] = uuid.uuid4().hex.upper()
|
info_dict[item] = uuid.uuid4().hex.upper()
|
||||||
field_value = info_dict[item]
|
field_value = info_dict[item]
|
||||||
# case "samples":
|
# case "samples":
|
||||||
@@ -75,7 +98,9 @@ def construct_submission_info(ctx:dict, info_dict:dict) -> models.BasicSubmissio
|
|||||||
logger.debug(f"Could not set attribute: {item} to {info_dict[item]}")
|
logger.debug(f"Could not set attribute: {item} to {info_dict[item]}")
|
||||||
continue
|
continue
|
||||||
# logger.debug(instance.__dict__)
|
# logger.debug(instance.__dict__)
|
||||||
return instance
|
logger.debug(f"Constructed instance: {instance.to_string()}")
|
||||||
|
logger.debug(msg)
|
||||||
|
return instance, {'message':msg}
|
||||||
# looked_up = []
|
# looked_up = []
|
||||||
# for reagent in reagents:
|
# for reagent in reagents:
|
||||||
# my_reagent = lookup_reagent(reagent)
|
# my_reagent = lookup_reagent(reagent)
|
||||||
@@ -235,7 +260,11 @@ def lookup_all_sample_types(ctx:dict) -> list[str]:
|
|||||||
|
|
||||||
def get_all_available_modes(ctx:dict) -> list[str]:
|
def get_all_available_modes(ctx:dict) -> list[str]:
|
||||||
rel = ctx['database_session'].query(models.Control).first()
|
rel = ctx['database_session'].query(models.Control).first()
|
||||||
|
try:
|
||||||
cols = [item.name for item in list(rel.__table__.columns) if isinstance(item.type, JSON)]
|
cols = [item.name for item in list(rel.__table__.columns) if isinstance(item.type, JSON)]
|
||||||
|
except AttributeError as e:
|
||||||
|
logger.debug(f"Failed to get available modes from db: {e}")
|
||||||
|
cols = []
|
||||||
return cols
|
return cols
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -32,5 +32,6 @@ class Control(Base):
|
|||||||
matches = Column(JSON) #: unstructured hashes in matches.tsv for each organism
|
matches = Column(JSON) #: unstructured hashes in matches.tsv for each organism
|
||||||
kraken = Column(JSON) #: unstructured output from kraken_report
|
kraken = Column(JSON) #: unstructured output from kraken_report
|
||||||
# UniqueConstraint('name', name='uq_control_name')
|
# UniqueConstraint('name', name='uq_control_name')
|
||||||
submissions = relationship("BacterialCulture", back_populates="control")
|
submission_id = Column(INTEGER, ForeignKey("_submissions.id"))
|
||||||
|
submission = relationship("BacterialCulture", back_populates="controls", foreign_keys=[submission_id])
|
||||||
|
|
||||||
|
|||||||
@@ -30,6 +30,9 @@ class BasicSubmission(Base):
|
|||||||
"with_polymorphic": "*",
|
"with_polymorphic": "*",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def to_string(self):
|
||||||
|
return f"{self.rsl_plate_num} - {self.submitter_plate_num}"
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
try:
|
try:
|
||||||
sub_lab = self.submitting_lab.name
|
sub_lab = self.submitting_lab.name
|
||||||
@@ -90,8 +93,8 @@ class BasicSubmission(Base):
|
|||||||
# Below are the custom submission
|
# Below are the custom submission
|
||||||
|
|
||||||
class BacterialCulture(BasicSubmission):
|
class BacterialCulture(BasicSubmission):
|
||||||
control = relationship("Control", back_populates="submissions") #: A control sample added to submission
|
# control_id = Column(INTEGER, ForeignKey("_control_samples.id", ondelete="SET NULL", name="fk_BC_control_id"))
|
||||||
control_id = Column(INTEGER, ForeignKey("_control_samples.id", ondelete="SET NULL", name="fk_BC_control_id"))
|
controls = relationship("Control", back_populates="submission", uselist=True) #: A control sample added to submission
|
||||||
samples = relationship("BCSample", back_populates="rsl_plate", uselist=True)
|
samples = relationship("BCSample", back_populates="rsl_plate", uselist=True)
|
||||||
# bc_sample_id = Column(INTEGER, ForeignKey("_bc_samples.id", ondelete="SET NULL", name="fk_BC_sample_id"))
|
# bc_sample_id = Column(INTEGER, ForeignKey("_bc_samples.id", ondelete="SET NULL", name="fk_BC_sample_id"))
|
||||||
__mapper_args__ = {"polymorphic_identity": "bacterial_culture", "polymorphic_load": "inline"}
|
__mapper_args__ = {"polymorphic_identity": "bacterial_culture", "polymorphic_load": "inline"}
|
||||||
|
|||||||
@@ -45,13 +45,14 @@ class SheetParser(object):
|
|||||||
|
|
||||||
|
|
||||||
def _parse_generic(self, sheet_name:str):
|
def _parse_generic(self, sheet_name:str):
|
||||||
submission_info = self.xl.parse(sheet_name=sheet_name)
|
submission_info = self.xl.parse(sheet_name=sheet_name, dtype=object)
|
||||||
self.sub['submitter_plate_num'] = submission_info.iloc[0][1]
|
|
||||||
self.sub['rsl_plate_num'] = str(submission_info.iloc[10][1])
|
self.sub['submitter_plate_num'] = submission_info.iloc[0][1] #if pd.isnull(submission_info.iloc[0][1]) else string_formatter(submission_info.iloc[0][1])
|
||||||
self.sub['submitted_date'] = submission_info.iloc[1][1].date()#.strftime("%Y-%m-%d")
|
self.sub['rsl_plate_num'] = submission_info.iloc[10][1] #if pd.isnull(submission_info.iloc[10][1]) else string_formatter(submission_info.iloc[10][1])
|
||||||
self.sub['submitting_lab'] = submission_info.iloc[0][3]
|
self.sub['submitted_date'] = submission_info.iloc[1][1] #if pd.isnull(submission_info.iloc[1][1]) else submission_info.iloc[1][1].date()#.strftime("%Y-%m-%d")
|
||||||
self.sub['sample_count'] = str(submission_info.iloc[2][3])
|
self.sub['submitting_lab'] = submission_info.iloc[0][3] #if pd.isnull(submission_info.iloc[0][3]) else string_formatter(submission_info.iloc[0][3])
|
||||||
self.sub['extraction_kit'] = submission_info.iloc[3][3]
|
self.sub['sample_count'] = submission_info.iloc[2][3] #if pd.isnull(submission_info.iloc[2][3]) else string_formatter(submission_info.iloc[2][3])
|
||||||
|
self.sub['extraction_kit'] = submission_info.iloc[3][3] #if #pd.isnull(submission_info.iloc[3][3]) else string_formatter(submission_info.iloc[3][3])
|
||||||
|
|
||||||
return submission_info
|
return submission_info
|
||||||
|
|
||||||
@@ -67,16 +68,17 @@ class SheetParser(object):
|
|||||||
tech = ", ".join(tech_reg.findall(tech))
|
tech = ", ".join(tech_reg.findall(tech))
|
||||||
self.sub['technician'] = tech
|
self.sub['technician'] = tech
|
||||||
# reagents
|
# reagents
|
||||||
self.sub['lot_wash_1'] = submission_info.iloc[1][6]
|
|
||||||
self.sub['lot_wash_2'] = submission_info.iloc[2][6]
|
self.sub['lot_wash_1'] = submission_info.iloc[1][6] #if pd.isnull(submission_info.iloc[1][6]) else string_formatter(submission_info.iloc[1][6])
|
||||||
self.sub['lot_binding_buffer'] = submission_info.iloc[3][6]
|
self.sub['lot_wash_2'] = submission_info.iloc[2][6] #if pd.isnull(submission_info.iloc[2][6]) else string_formatter(submission_info.iloc[2][6])
|
||||||
self.sub['lot_magnetic_beads'] = submission_info.iloc[4][6]
|
self.sub['lot_binding_buffer'] = submission_info.iloc[3][6] #if pd.isnull(submission_info.iloc[3][6]) else string_formatter(submission_info.iloc[3][6])
|
||||||
self.sub['lot_lysis_buffer'] = submission_info.iloc[5][6]
|
self.sub['lot_magnetic_beads'] = submission_info.iloc[4][6] #if pd.isnull(submission_info.iloc[4][6]) else string_formatter(submission_info.iloc[4][6])
|
||||||
self.sub['lot_elution_buffer'] = submission_info.iloc[6][6]
|
self.sub['lot_lysis_buffer'] = submission_info.iloc[5][6] #if np.nan(submission_info.iloc[5][6]) else string_formatter(submission_info.iloc[5][6])
|
||||||
self.sub['lot_isopropanol'] = submission_info.iloc[9][6]
|
self.sub['lot_elution_buffer'] = submission_info.iloc[6][6] #if pd.isnull(submission_info.iloc[6][6]) else string_formatter(submission_info.iloc[6][6])
|
||||||
self.sub['lot_ethanol'] = submission_info.iloc[10][6]
|
self.sub['lot_isopropanol'] = submission_info.iloc[9][6] #if pd.isnull(submission_info.iloc[9][6]) else string_formatter(submission_info.iloc[9][6])
|
||||||
self.sub['lot_positive_control'] = submission_info.iloc[103][1]
|
self.sub['lot_ethanol'] = submission_info.iloc[10][6] #if pd.isnull(submission_info.iloc[10][6]) else string_formatter(submission_info.iloc[10][6])
|
||||||
self.sub['lot_plate'] = submission_info.iloc[12][6]
|
self.sub['lot_positive_control'] = submission_info.iloc[103][1] #if pd.isnull(submission_info.iloc[103][1]) else string_formatter(submission_info.iloc[103][1])
|
||||||
|
self.sub['lot_plate'] = submission_info.iloc[12][6] #if pd.isnull(submission_info.iloc[12][6]) else string_formatter(submission_info.iloc[12][6])
|
||||||
sample_parser = SampleParser(submission_info.iloc[15:111])
|
sample_parser = SampleParser(submission_info.iloc[15:111])
|
||||||
sample_parse = getattr(sample_parser, f"parse_{self.sub['submission_type'].lower()}_samples")
|
sample_parse = getattr(sample_parser, f"parse_{self.sub['submission_type'].lower()}_samples")
|
||||||
logger.debug(f"Parser result: {self.sub}")
|
logger.debug(f"Parser result: {self.sub}")
|
||||||
@@ -86,25 +88,26 @@ class SheetParser(object):
|
|||||||
def _parse_wastewater(self):
|
def _parse_wastewater(self):
|
||||||
# submission_info = self.xl.parse("WW Submissions (ENTER HERE)")
|
# submission_info = self.xl.parse("WW Submissions (ENTER HERE)")
|
||||||
submission_info = self._parse_generic("WW Submissions (ENTER HERE)")
|
submission_info = self._parse_generic("WW Submissions (ENTER HERE)")
|
||||||
enrichment_info = self.xl.parse("Enrichment Worksheet")
|
enrichment_info = self.xl.parse("Enrichment Worksheet", dtype=object)
|
||||||
extraction_info = self.xl.parse("Extraction Worksheet")
|
extraction_info = self.xl.parse("Extraction Worksheet", dtype=object)
|
||||||
qprc_info = self.xl.parse("qPCR Worksheet")
|
qprc_info = self.xl.parse("qPCR Worksheet", dtype=object)
|
||||||
self.sub['technician'] = f"Enr: {enrichment_info.columns[2]}, Ext: {extraction_info.columns[2]}, PCR: {qprc_info.columns[2]}"
|
self.sub['technician'] = f"Enr: {enrichment_info.columns[2]}, Ext: {extraction_info.columns[2]}, PCR: {qprc_info.columns[2]}"
|
||||||
# reagents
|
# reagents
|
||||||
self.sub['lot_lysis_buffer'] = enrichment_info.iloc[0][14]
|
logger.debug(qprc_info)
|
||||||
self.sub['lot_proteinase_K'] = enrichment_info.iloc[1][14]
|
self.sub['lot_lysis_buffer'] = enrichment_info.iloc[0][14] #if pd.isnull(enrichment_info.iloc[0][14]) else string_formatter(enrichment_info.iloc[0][14])
|
||||||
self.sub['lot_magnetic_virus_particles'] = enrichment_info.iloc[2][14]
|
self.sub['lot_proteinase_K'] = enrichment_info.iloc[1][14] #if pd.isnull(enrichment_info.iloc[1][14]) else string_formatter(enrichment_info.iloc[1][14])
|
||||||
self.sub['lot_enrichment_reagent_1'] = enrichment_info.iloc[3][14]
|
self.sub['lot_magnetic_virus_particles'] = enrichment_info.iloc[2][14] #if pd.isnull(enrichment_info.iloc[2][14]) else string_formatter(enrichment_info.iloc[2][14])
|
||||||
self.sub['lot_binding_buffer'] = extraction_info.iloc[0][14]
|
self.sub['lot_enrichment_reagent_1'] = enrichment_info.iloc[3][14] #if pd.isnull(enrichment_info.iloc[3][14]) else string_formatter(enrichment_info.iloc[3][14])
|
||||||
self.sub['lot_magnetic_beads'] = extraction_info.iloc[1][14]
|
self.sub['lot_binding_buffer'] = extraction_info.iloc[0][14] #if pd.isnull(extraction_info.iloc[0][14]) else string_formatter(extraction_info.iloc[0][14])
|
||||||
self.sub['lot_wash'] = extraction_info.iloc[2][14]
|
self.sub['lot_magnetic_beads'] = extraction_info.iloc[1][14] #if pd.isnull(extraction_info.iloc[1][14]) else string_formatter(extraction_info.iloc[1][14])
|
||||||
self.sub['lot_ethanol'] = extraction_info.iloc[3][14]
|
self.sub['lot_wash'] = extraction_info.iloc[2][14] #if pd.isnull(extraction_info.iloc[2][14]) else string_formatter(extraction_info.iloc[2][14])
|
||||||
self.sub['lot_elution_buffer'] = extraction_info.iloc[4][14]
|
self.sub['lot_ethanol'] = extraction_info.iloc[3][14] #if pd.isnull(extraction_info.iloc[3][14]) else string_formatter(extraction_info.iloc[3][14])
|
||||||
self.sub['lot_master_mix'] = qprc_info.iloc[0][14]
|
self.sub['lot_elution_buffer'] = extraction_info.iloc[4][14] #if pd.isnull(extraction_info.iloc[4][14]) else string_formatter(extraction_info.iloc[4][14])
|
||||||
self.sub['lot_pre_mix_1'] = qprc_info.iloc[1][14]
|
self.sub['lot_master_mix'] = qprc_info.iloc[0][14] #if pd.isnull(qprc_info.iloc[0][14]) else string_formatter(qprc_info.iloc[0][14])
|
||||||
self.sub['lot_pre_mix_2'] = qprc_info.iloc[2][14]
|
self.sub['lot_pre_mix_1'] = qprc_info.iloc[1][14] #if pd.isnull(qprc_info.iloc[1][14]) else string_formatter(qprc_info.iloc[1][14])
|
||||||
self.sub['lot_positive_control'] = qprc_info.iloc[3][14]
|
self.sub['lot_pre_mix_2'] = qprc_info.iloc[2][14] #if pd.isnull(qprc_info.iloc[2][14]) else string_formatter(qprc_info.iloc[2][14])
|
||||||
self.sub['lot_ddh2o'] = qprc_info.iloc[4][14]
|
self.sub['lot_positive_control'] = qprc_info.iloc[3][14] #if pd.isnull(qprc_info.iloc[3][14]) else string_formatter(qprc_info.iloc[3][14])
|
||||||
|
self.sub['lot_ddh2o'] = qprc_info.iloc[4][14] #if pd.isnull(qprc_info.iloc[4][14]) else string_formatter(qprc_info.iloc[4][14])
|
||||||
sample_parser = SampleParser(submission_info.iloc[16:40])
|
sample_parser = SampleParser(submission_info.iloc[16:40])
|
||||||
sample_parse = getattr(sample_parser, f"parse_{self.sub['submission_type'].lower()}_samples")
|
sample_parse = getattr(sample_parser, f"parse_{self.sub['submission_type'].lower()}_samples")
|
||||||
self.sub['samples'] = sample_parse()
|
self.sub['samples'] = sample_parse()
|
||||||
@@ -165,3 +168,12 @@ class SampleParser(object):
|
|||||||
new_list.append(new)
|
new_list.append(new)
|
||||||
return new_list
|
return new_list
|
||||||
|
|
||||||
|
|
||||||
|
def string_formatter(input):
|
||||||
|
logger.debug(f"{input} : {type(input)}")
|
||||||
|
match input:
|
||||||
|
case int() | float() | np.float64:
|
||||||
|
return "{:0.0f}".format(input)
|
||||||
|
case _:
|
||||||
|
return input
|
||||||
|
|
||||||
@@ -73,15 +73,14 @@ def get_config(settings_path: str|None) -> dict:
|
|||||||
Returns:
|
Returns:
|
||||||
setting: dictionary of settings.
|
setting: dictionary of settings.
|
||||||
"""
|
"""
|
||||||
# with open("C:\\Users\\lwark\\Desktop\\packagedir.txt", "w") as f:
|
|
||||||
# f.write(package_dir.__str__())
|
|
||||||
def join(loader, node):
|
def join(loader, node):
|
||||||
seq = loader.construct_sequence(node)
|
seq = loader.construct_sequence(node)
|
||||||
return ''.join([str(i) for i in seq])
|
return ''.join([str(i) for i in seq])
|
||||||
## register the tag handler
|
## register the tag handler
|
||||||
yaml.add_constructor('!join', join)
|
yaml.add_constructor('!join', join)
|
||||||
# if user hasn't defined config path in cli args
|
|
||||||
logger.debug(f"Making directory: {CONFIGDIR.__str__()}")
|
logger.debug(f"Making directory: {CONFIGDIR.__str__()}")
|
||||||
|
# make directories
|
||||||
try:
|
try:
|
||||||
CONFIGDIR.mkdir(parents=True)
|
CONFIGDIR.mkdir(parents=True)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
@@ -91,10 +90,9 @@ def get_config(settings_path: str|None) -> dict:
|
|||||||
LOGDIR.mkdir(parents=True)
|
LOGDIR.mkdir(parents=True)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
pass
|
||||||
|
# if user hasn't defined config path in cli args
|
||||||
if settings_path == None:
|
if settings_path == None:
|
||||||
# Check user .config/ozma directory
|
# Check user .config/submissions directory
|
||||||
# if Path.exists(Path.joinpath(CONFIGDIR, "config.yml")):
|
|
||||||
# settings_path = Path.joinpath(CONFIGDIR, "config.yml")
|
|
||||||
if CONFIGDIR.joinpath("config.yml").exists():
|
if CONFIGDIR.joinpath("config.yml").exists():
|
||||||
settings_path = CONFIGDIR.joinpath("config.yml")
|
settings_path = CONFIGDIR.joinpath("config.yml")
|
||||||
# Check user .ozma directory
|
# Check user .ozma directory
|
||||||
@@ -116,7 +114,6 @@ def get_config(settings_path: str|None) -> dict:
|
|||||||
logger.error("No config.yml file found. Using empty dictionary.")
|
logger.error("No config.yml file found. Using empty dictionary.")
|
||||||
return {}
|
return {}
|
||||||
logger.debug(f"Using {settings_path} for config file.")
|
logger.debug(f"Using {settings_path} for config file.")
|
||||||
|
|
||||||
with open(settings_path, "r") as stream:
|
with open(settings_path, "r") as stream:
|
||||||
try:
|
try:
|
||||||
settings = yaml.load(stream, Loader=yaml.Loader)
|
settings = yaml.load(stream, Loader=yaml.Loader)
|
||||||
@@ -182,22 +179,17 @@ def setup_logger(verbosity:int=3):
|
|||||||
# ch = StreamToLogger(logger=logger, log_level=verbosity)
|
# ch = StreamToLogger(logger=logger, log_level=verbosity)
|
||||||
match verbosity:
|
match verbosity:
|
||||||
case 3:
|
case 3:
|
||||||
# verb = logging.DEBUG
|
|
||||||
ch.setLevel(logging.DEBUG)
|
ch.setLevel(logging.DEBUG)
|
||||||
case 2:
|
case 2:
|
||||||
# verb = logging.INFO
|
|
||||||
ch.setLevel(logging.INFO)
|
ch.setLevel(logging.INFO)
|
||||||
case 1:
|
case 1:
|
||||||
# verb = logging.WARNING
|
|
||||||
ch.setLevel(logging.WARNING)
|
ch.setLevel(logging.WARNING)
|
||||||
ch.name = "Stream"
|
ch.name = "Stream"
|
||||||
# create formatter and add it to the handlers
|
# create formatter and add it to the handlers
|
||||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
formatter = logging.Formatter('%(asctime)s - %(levelname)s - {%(pathname)s:%(lineno)d} - %(message)s')
|
||||||
fh.setFormatter(formatter)
|
fh.setFormatter(formatter)
|
||||||
ch.setFormatter(formatter)
|
ch.setFormatter(formatter)
|
||||||
# ch.setLevel(logging.ERROR)
|
|
||||||
# add the handlers to the logger
|
# add the handlers to the logger
|
||||||
|
|
||||||
logger.addHandler(fh)
|
logger.addHandler(fh)
|
||||||
logger.addHandler(ch)
|
logger.addHandler(ch)
|
||||||
def handle_exception(exc_type, exc_value, exc_traceback):
|
def handle_exception(exc_type, exc_value, exc_traceback):
|
||||||
@@ -206,18 +198,10 @@ def setup_logger(verbosity:int=3):
|
|||||||
return
|
return
|
||||||
|
|
||||||
logger.critical("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
|
logger.critical("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
|
||||||
sys.exit(f"Uncaught error: {exc_type}, {exc_traceback}, check logs.")
|
# sys.exit(f"Uncaught error: {exc_type}, {exc_traceback}, check logs.")
|
||||||
|
|
||||||
sys.excepthook = handle_exception
|
sys.excepthook = handle_exception
|
||||||
# stderr_logger = logging.getLogger('STDERR')
|
|
||||||
# sys.stderr = logger
|
|
||||||
return logger
|
return logger
|
||||||
# sl = StreamToLogger(stderr_logger, logging.ERROR)
|
|
||||||
# sys.stderr = sl
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# def set_logger_verbosity(verbosity):
|
# def set_logger_verbosity(verbosity):
|
||||||
# """Does what it says.
|
# """Does what it says.
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ from backend.db import (construct_submission_info, lookup_reagent,
|
|||||||
)
|
)
|
||||||
from backend.excel.reports import make_report_xlsx
|
from backend.excel.reports import make_report_xlsx
|
||||||
import numpy
|
import numpy
|
||||||
from frontend.custom_widgets import AddReagentQuestion, AddReagentForm, SubmissionsSheet, ReportDatePicker, KitAdder, ControlsDatePicker
|
from frontend.custom_widgets import AddReagentQuestion, AddReagentForm, SubmissionsSheet, ReportDatePicker, KitAdder, ControlsDatePicker, OverwriteSubQuestion
|
||||||
import logging
|
import logging
|
||||||
import difflib
|
import difflib
|
||||||
|
|
||||||
@@ -240,9 +240,16 @@ class App(QMainWindow):
|
|||||||
parsed_reagents.append(wanted_reagent)
|
parsed_reagents.append(wanted_reagent)
|
||||||
logger.debug(info)
|
logger.debug(info)
|
||||||
info['samples'] = self.samples
|
info['samples'] = self.samples
|
||||||
base_submission = construct_submission_info(ctx=self.ctx, info_dict=info)
|
base_submission, output = construct_submission_info(ctx=self.ctx, info_dict=info)
|
||||||
|
if output['message'] != None:
|
||||||
|
dlg = OverwriteSubQuestion(output['message'], base_submission.rsl_plate_num)
|
||||||
|
if dlg.exec():
|
||||||
|
base_submission.reagents = []
|
||||||
|
else:
|
||||||
|
return
|
||||||
for reagent in parsed_reagents:
|
for reagent in parsed_reagents:
|
||||||
base_submission.reagents.append(reagent)
|
base_submission.reagents.append(reagent)
|
||||||
|
logger.debug(f"Sending submission: {base_submission.rsl_plate_num} to database.")
|
||||||
result = store_submission(ctx=self.ctx, base_submission=base_submission)
|
result = store_submission(ctx=self.ctx, base_submission=base_submission)
|
||||||
if result != None:
|
if result != None:
|
||||||
msg = QMessageBox()
|
msg = QMessageBox()
|
||||||
@@ -250,8 +257,11 @@ class App(QMainWindow):
|
|||||||
msg.setText("Error")
|
msg.setText("Error")
|
||||||
msg.setInformativeText(result['message'])
|
msg.setInformativeText(result['message'])
|
||||||
msg.setWindowTitle("Error")
|
msg.setWindowTitle("Error")
|
||||||
|
msg.show()
|
||||||
msg.exec()
|
msg.exec()
|
||||||
self.table_widget.sub_wid.setData()
|
self.table_widget.sub_wid.setData()
|
||||||
|
for item in self.table_widget.formlayout.parentWidget().findChildren(QWidget):
|
||||||
|
item.setParent(None)
|
||||||
|
|
||||||
|
|
||||||
def add_reagent(self, reagent_lot:str|None=None, reagent_type:str|None=None):
|
def add_reagent(self, reagent_lot:str|None=None, reagent_type:str|None=None):
|
||||||
|
|||||||
@@ -44,6 +44,25 @@ class AddReagentQuestion(QDialog):
|
|||||||
self.setLayout(self.layout)
|
self.setLayout(self.layout)
|
||||||
|
|
||||||
|
|
||||||
|
class OverwriteSubQuestion(QDialog):
|
||||||
|
def __init__(self, message:str, rsl_plate_num:str):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.setWindowTitle(f"Overwrite {rsl_plate_num}?")
|
||||||
|
|
||||||
|
QBtn = QDialogButtonBox.StandardButton.Yes | QDialogButtonBox.StandardButton.No
|
||||||
|
|
||||||
|
self.buttonBox = QDialogButtonBox(QBtn)
|
||||||
|
self.buttonBox.accepted.connect(self.accept)
|
||||||
|
self.buttonBox.rejected.connect(self.reject)
|
||||||
|
|
||||||
|
self.layout = QVBoxLayout()
|
||||||
|
message = QLabel(message)
|
||||||
|
self.layout.addWidget(message)
|
||||||
|
self.layout.addWidget(self.buttonBox)
|
||||||
|
self.setLayout(self.layout)
|
||||||
|
|
||||||
|
|
||||||
class AddReagentForm(QDialog):
|
class AddReagentForm(QDialog):
|
||||||
def __init__(self, ctx:dict, reagent_lot:str|None, reagent_type:str|None):
|
def __init__(self, ctx:dict, reagent_lot:str|None, reagent_type:str|None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|||||||
Reference in New Issue
Block a user