WastewaterArcticAssociation completed.

This commit is contained in:
lwark
2024-06-13 13:47:30 -05:00
parent f73a3e6c35
commit 4a7d0b0bd4
9 changed files with 449 additions and 24 deletions

View File

@@ -0,0 +1,54 @@
"""Made WastewaterArticAssociation
Revision ID: 16b20d4368e6
Revises: d2b094cfa308
Create Date: 2024-06-13 12:16:48.385516
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '16b20d4368e6'
down_revision = 'd2b094cfa308'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('_wastewaterarticassociation',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('source_plate', sa.String(length=16), nullable=True),
sa.Column('source_plate_number', sa.INTEGER(), nullable=True),
sa.Column('source_well', sa.String(length=8), nullable=True),
sa.Column('ct', sa.String(length=8), nullable=True),
sa.ForeignKeyConstraint(['id'], ['_submissionsampleassociation.id'], ),
sa.PrimaryKeyConstraint('id')
)
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
#
# with op.batch_alter_table('_submissiontipsassociation', schema=None) as batch_op:
# batch_op.alter_column('role_name',
# existing_type=sa.INTEGER(),
# type_=sa.String(length=32),
# existing_nullable=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissiontipsassociation', schema=None) as batch_op:
batch_op.alter_column('role_name',
existing_type=sa.String(length=32),
type_=sa.INTEGER(),
existing_nullable=True)
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='unique')
op.drop_table('_wastewaterarticassociation')
# ### end Alembic commands ###

View File

@@ -0,0 +1,72 @@
"""Adding fields to Artic
Revision ID: 861b52a2004e
Revises: b744e8a452fd
Create Date: 2024-06-05 13:35:19.012337
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = '861b52a2004e'
down_revision = 'b744e8a452fd'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('_alembic_tmp__basicsubmission')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.add_column(sa.Column('artic_date', sa.TIMESTAMP(), nullable=True))
batch_op.add_column(sa.Column('ngs_date', sa.TIMESTAMP(), nullable=True))
batch_op.add_column(sa.Column('gel_date', sa.TIMESTAMP(), nullable=True))
batch_op.add_column(sa.Column('gel_barcode', sa.String(length=16), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.drop_column('gel_barcode')
batch_op.drop_column('gel_date')
batch_op.drop_column('ngs_date')
batch_op.drop_column('artic_date')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
op.create_table('_alembic_tmp__basicsubmission',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('rsl_plate_num', sa.VARCHAR(length=32), nullable=False),
sa.Column('submitter_plate_num', sa.VARCHAR(length=127), nullable=True),
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
sa.Column('submitting_lab_id', sa.INTEGER(), nullable=True),
sa.Column('sample_count', sa.INTEGER(), nullable=True),
sa.Column('extraction_kit_id', sa.INTEGER(), nullable=True),
sa.Column('submission_type_name', sa.VARCHAR(), nullable=True),
sa.Column('technician', sa.VARCHAR(length=64), nullable=True),
sa.Column('reagents_id', sa.VARCHAR(), nullable=True),
sa.Column('extraction_info', sqlite.JSON(), nullable=True),
sa.Column('run_cost', sa.FLOAT(), nullable=True),
sa.Column('signed_by', sa.VARCHAR(length=32), nullable=True),
sa.Column('comment', sqlite.JSON(), nullable=True),
sa.Column('submission_category', sa.VARCHAR(length=64), nullable=True),
sa.Column('cost_centre', sa.VARCHAR(length=64), nullable=True),
sa.Column('contact_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['_contact.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kittype.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['reagents_id'], ['_reagent.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['submission_type_name'], ['_submissiontype.name'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organization.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('rsl_plate_num'),
sa.UniqueConstraint('submitter_plate_num')
)
# ### end Alembic commands ###

View File

@@ -0,0 +1,34 @@
"""Adding ranking to SubmissionSampleAssociation
Revision ID: 874af342c82c
Revises: a04c25fd9138
Create Date: 2024-05-03 15:08:20.194275
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '874af342c82c'
down_revision = 'a04c25fd9138'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.add_column(sa.Column('submission_rank', sa.INTEGER(), nullable=False, default=1))
# batch_op.create_unique_constraint(None, ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
batch_op.drop_column('submission_rank')
# ### end Alembic commands ###

View File

@@ -0,0 +1,38 @@
"""splitting off sample info in SubmissionType
Revision ID: a04c25fd9138
Revises: 6d2a357860ef
Create Date: 2024-05-01 09:11:44.957532
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a04c25fd9138'
down_revision = '6d2a357860ef'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
with op.batch_alter_table('_submissiontype', schema=None) as batch_op:
batch_op.add_column(sa.Column('sample_map', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissiontype', schema=None) as batch_op:
batch_op.drop_column('sample_map')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
# ### end Alembic commands ###

View File

@@ -0,0 +1,40 @@
"""Attaching contact to submission
Revision ID: b744e8a452fd
Revises: f829a8ab292f
Create Date: 2024-06-04 14:21:38.163431
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b744e8a452fd'
down_revision = 'f829a8ab292f'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_basicsubmission', schema=None) as batch_op:
batch_op.add_column(sa.Column('contact_id', sa.INTEGER(), nullable=True))
batch_op.create_foreign_key('fk_BS_contact_id', '_contact', ['contact_id'], ['id'], ondelete='SET NULL')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
with op.batch_alter_table('_basicsubmission', schema=None) as batch_op:
batch_op.drop_constraint('fk_BS_contact_id', type_='foreignkey')
batch_op.drop_column('contact_id')
# ### end Alembic commands ###

View File

@@ -0,0 +1,95 @@
"""Adding tips
Revision ID: d2b094cfa308
Revises: 861b52a2004e
Create Date: 2024-06-11 13:16:57.319769
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd2b094cfa308'
down_revision = '861b52a2004e'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('_tiprole',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_process_tiprole',
sa.Column('process_id', sa.INTEGER(), nullable=True),
sa.Column('tiprole_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['process_id'], ['_process.id'], ),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], )
)
op.create_table('_submissiontypetiproleassociation',
sa.Column('tiprole_id', sa.INTEGER(), nullable=False),
sa.Column('submissiontype_id', sa.INTEGER(), nullable=False),
sa.Column('uses', sa.JSON(), nullable=True),
sa.Column('static', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['submissiontype_id'], ['_submissiontype.id'], ),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], ),
sa.PrimaryKeyConstraint('tiprole_id', 'submissiontype_id')
)
op.create_table('_tips',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('role_id', sa.INTEGER(), nullable=True),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('lot', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['_tiprole.id'], name='fk_tip_role_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_equipment_tips',
sa.Column('equipment_id', sa.INTEGER(), nullable=True),
sa.Column('tips_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['equipment_id'], ['_equipment.id'], ),
sa.ForeignKeyConstraint(['tips_id'], ['_tips.id'], )
)
op.create_table('_submissiontipsassociation',
sa.Column('tip_id', sa.INTEGER(), nullable=False),
sa.Column('submission_id', sa.INTEGER(), nullable=False),
sa.Column('role_name', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['role_name'], ['_tiprole.name'], ),
sa.ForeignKeyConstraint(['submission_id'], ['_submissiontype.id'], ),
sa.ForeignKeyConstraint(['tip_id'], ['_tips.id'], ),
sa.PrimaryKeyConstraint('tip_id', 'submission_id')
)
op.create_table('_tiproles_tips',
sa.Column('tiprole_id', sa.INTEGER(), nullable=True),
sa.Column('tips_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], ),
sa.ForeignKeyConstraint(['tips_id'], ['_tips.id'], )
)
# op.create_table('_submissions_tips',
# sa.Column('submission_id', sa.INTEGER(), nullable=True),
# sa.Column('tips_id', sa.INTEGER(), nullable=True),
# sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
# sa.ForeignKeyConstraint(['tips_id'], ['_tips.id'], )
# )
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint("unique_ssa_id", ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
op.drop_table('_submissions_tips')
op.drop_table('_tiproles_tips')
op.drop_table('_submissiontipassociation')
op.drop_table('_equipment_tips')
op.drop_table('_tips')
op.drop_table('_submissiontypetiproleassociation')
op.drop_table('_process_tiprole')
op.drop_table('_tiprole')
# ### end Alembic commands ###

View File

@@ -0,0 +1,39 @@
"""Adding configitems to db
Revision ID: f829a8ab292f
Revises: 874af342c82c
Create Date: 2024-05-15 14:03:11.767480
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f829a8ab292f'
down_revision = '874af342c82c'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('_configitem',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('key', sa.String(), nullable=True),
sa.Column('value', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
op.drop_table('_configitem')
# ### end Alembic commands ###

View File

@@ -1789,28 +1789,28 @@ class WastewaterArtic(BasicSubmission):
"""
# logger.debug(f"Hello from {self.__class__.__name__} dictionary sample adjuster.")
output = []
set_plate = None
# set_plate = None
for assoc in self.submission_sample_associations:
dicto = assoc.to_sub_dict()
if self.source_plates is None:
output.append(dicto)
continue
for item in self.source_plates:
if assoc.sample.id is None:
old_plate = None
else:
old_plate = WastewaterAssociation.query(submission=item['plate'], sample=assoc.sample, limit=1)
if old_plate is not None:
set_plate = old_plate.submission.rsl_plate_num
# logger.debug(f"Dictionary: {pformat(dicto)}")
if dicto['ww_processing_num'].startswith("NTC"):
dicto['well'] = dicto['ww_processing_num']
else:
dicto['well'] = f"{row_map[old_plate.row]}{old_plate.column}"
break
elif dicto['ww_processing_num'].startswith("NTC"):
dicto['well'] = dicto['ww_processing_num']
dicto['plate_name'] = set_plate
# if self.source_plates is None:
# output.append(dicto)
# continue
# for item in self.source_plates:
# if assoc.sample.id is None:
# old_plate = None
# else:
# old_plate = WastewaterAssociation.query(submission=item['plate'], sample=assoc.sample, limit=1)
# if old_plate is not None:
# set_plate = old_plate.submission.rsl_plate_num
# # logger.debug(f"Dictionary: {pformat(dicto)}")
# if dicto['ww_processing_num'].startswith("NTC"):
# dicto['well'] = dicto['ww_processing_num']
# else:
# dicto['well'] = f"{row_map[old_plate.row]}{old_plate.column}"
# break
# elif dicto['ww_processing_num'].startswith("NTC"):
# dicto['well'] = dicto['ww_processing_num']
# dicto['plate_name'] = set_plate
# logger.debug(f"Here is our raw sample: {pformat(dicto)}")
output.append(dicto)
return output
@@ -2342,7 +2342,7 @@ class SubmissionSampleAssociation(BaseClass):
}
def __init__(self, submission: BasicSubmission = None, sample: BasicSample = None, row: int = 1, column: int = 1,
id: int | None = None, submission_rank: int = 0):
id: int | None = None, submission_rank: int = 0, **kwargs):
self.submission = submission
self.sample = sample
self.row = row
@@ -2352,6 +2352,12 @@ class SubmissionSampleAssociation(BaseClass):
self.id = id
else:
self.id = self.__class__.autoincrement_id()
logger.debug(f"Looking at kwargs: {pformat(kwargs)}")
for k,v in kwargs.items():
try:
self.__setattr__(k, v)
except AttributeError:
logger.error(f"Couldn't set {k} to {v}")
# logger.debug(f"Using submission sample association id: {self.id}")
def __repr__(self) -> str:
@@ -2532,7 +2538,7 @@ class SubmissionSampleAssociation(BaseClass):
Returns:
SubmissionSampleAssociation: Queried or new association.
"""
# logger.debug(f"Attempting create or query with {kwargs}")
logger.debug(f"Attempting create or query with {kwargs}")
match submission:
case BasicSubmission():
pass
@@ -2562,7 +2568,6 @@ class SubmissionSampleAssociation(BaseClass):
if instance is None:
# sanitized_kwargs = {k:v for k,v in kwargs.items() if k not in ['id']}
used_cls = cls.find_polymorphic_subclass(polymorphic_identity=association_type)
# instance = used_cls(submission=submission, sample=sample, id=id, **kwargs)
instance = used_cls(submission=submission, sample=sample, id=id, **kwargs)
return instance
@@ -2627,3 +2632,48 @@ class WastewaterAssociation(SubmissionSampleAssociation):
except ValueError as e:
logger.error(f"Problem incrementing id: {e}")
return 1
class WastewaterArticAssociation(SubmissionSampleAssociation):
id = Column(INTEGER, ForeignKey("_submissionsampleassociation.id"), primary_key=True)
source_plate = Column(String(16))
source_plate_number = Column(INTEGER)
source_well = Column(String(8))
ct = Column(String(8)) #: AKA ct for N1
__mapper_args__ = dict(polymorphic_identity="Wastewater Artic Association",
polymorphic_load="inline",
inherit_condition=(id == SubmissionSampleAssociation.id))
def to_sub_dict(self) -> dict:
"""
Returns a sample dictionary updated with instance information. Extends parent
Returns:
dict: Updated dictionary with row, column and well updated
"""
sample = super().to_sub_dict()
sample['ct'] = self.ct
sample['source_plate'] = self.source_plate
sample['source_plate_number'] = self.source_plate_number
sample['source_well'] = self.source_well
return sample
@classmethod
def autoincrement_id(cls) -> int:
"""
Increments the association id automatically. Overrides parent
Returns:
int: incremented id
"""
try:
parent = [base for base in cls.__bases__ if base.__name__ == "SubmissionSampleAssociation"][0]
return max([item.id for item in parent.query()]) + 1
except ValueError as e:
logger.error(f"Problem incrementing id: {e}")
return 1

View File

@@ -256,7 +256,7 @@ class PydSample(BaseModel, extra='allow'):
submission=submission,
sample=instance,
row=row, column=column, id=aid,
submission_rank=submission_rank)
submission_rank=submission_rank, **self.model_extra)
logger.debug(f"Using submission_sample_association: {association}")
try:
# instance.sample_submission_associations.append(association)
@@ -375,6 +375,7 @@ class PydSubmission(BaseModel, extra='allow'):
@classmethod
def convert_equipment_dict(cls, value):
# logger.debug(f"Equipment: {value}")
if isinstance(value, dict):
return value['value']
return value
@@ -697,6 +698,8 @@ class PydSubmission(BaseModel, extra='allow'):
for key, value in dicto.items():
if isinstance(value, dict):
value = value['value']
if value is None:
continue
# logger.debug(f"Setting {key} to {value}")
match key:
case "reagents":