Prior to major database rebuild.

This commit is contained in:
Landon Wark
2023-07-27 08:14:04 -05:00
parent f22e697815
commit af810ae528
22 changed files with 76 additions and 696 deletions

View File

@@ -1,5 +1,7 @@
- [ ] Fix tests. - [ ] Rebuild database
- [ ] Reorganize wastewater artic parser. - [ ] Fix Wastewater/Artic double submission problem
- [X] Fix tests.
- [X] Reorganize wastewater artic parser.
- [ ] Streamline addition of new kits by moving as much into DB as possible. - [ ] Streamline addition of new kits by moving as much into DB as possible.
- [X] Large scale refactor (2023-07-24). - [X] Large scale refactor (2023-07-24).
- [x] Make plate details from html, same as export. - [x] Make plate details from html, same as export.

View File

@@ -56,7 +56,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
# output_encoding = utf-8 # output_encoding = utf-8
; sqlalchemy.url = sqlite:///L:\Robotics Laboratory Support\Submissions\submissions.db ; sqlalchemy.url = sqlite:///L:\Robotics Laboratory Support\Submissions\submissions.db
sqlalchemy.url = sqlite:///C:\Users\lwark\Documents\Archives\DB_backups\submissions-20230712.db sqlalchemy.url = sqlite:///C:\Users\lwark\Documents\Archives\DB_backups\submissions-20230726.db
; sqlalchemy.url = sqlite:///C:\Users\lwark\Documents\python\submissions\tests\test_assets\submissions_test.db ; sqlalchemy.url = sqlite:///C:\Users\lwark\Documents\python\submissions\tests\test_assets\submissions_test.db

View File

@@ -1,57 +0,0 @@
"""added target status to ww samples
Revision ID: 00de69ad6eab
Revises: 8adc85dd9b92
Create Date: 2023-03-31 14:51:40.705301
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = '00de69ad6eab'
down_revision = '8adc85dd9b92'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# op.drop_table('_alembic_tmp__submissions')
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.add_column(sa.Column('n1_status', sa.String(length=32), nullable=True))
batch_op.add_column(sa.Column('n2_status', sa.String(length=32), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.drop_column('n2_status')
batch_op.drop_column('n1_status')
# op.create_table('_alembic_tmp__submissions',
# sa.Column('id', sa.INTEGER(), nullable=False),
# sa.Column('rsl_plate_num', sa.VARCHAR(length=32), nullable=False),
# sa.Column('submitter_plate_num', sa.VARCHAR(length=127), nullable=True),
# sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
# sa.Column('submitting_lab_id', sa.INTEGER(), nullable=True),
# sa.Column('sample_count', sa.INTEGER(), nullable=True),
# sa.Column('extraction_kit_id', sa.INTEGER(), nullable=True),
# sa.Column('submission_type', sa.VARCHAR(length=32), nullable=True),
# sa.Column('technician', sa.VARCHAR(length=64), nullable=True),
# sa.Column('reagents_id', sa.VARCHAR(), nullable=True),
# sa.Column('extraction_info', sqlite.JSON(), nullable=True),
# sa.Column('run_cost', sa.FLOAT(), nullable=True),
# sa.Column('uploaded_by', sa.VARCHAR(length=32), nullable=True),
# sa.Column('pcr_info', sqlite.JSON(), nullable=True),
# sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kits.id'], ondelete='SET NULL'),
# sa.ForeignKeyConstraint(['reagents_id'], ['_reagents.id'], ondelete='SET NULL'),
# sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organizations.id'], ondelete='SET NULL'),
# sa.PrimaryKeyConstraint('id'),
# sa.UniqueConstraint('rsl_plate_num'),
# sa.UniqueConstraint('submitter_plate_num')
# )
# ### end Alembic commands ###

View File

@@ -1,8 +1,8 @@
"""initial commit """database_rebuild
Revision ID: 8753ed70f148 Revision ID: 06e2c8dc4889
Revises: Revises:
Create Date: 2023-01-26 12:48:42.340619 Create Date: 2023-07-26 14:08:18.809998
""" """
from alembic import op from alembic import op
@@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '8753ed70f148' revision = '06e2c8dc4889'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@@ -23,6 +23,8 @@ def upgrade() -> None:
sa.Column('name', sa.String(length=64), nullable=True), sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('email', sa.String(length=64), nullable=True), sa.Column('email', sa.String(length=64), nullable=True),
sa.Column('phone', sa.String(length=32), nullable=True), sa.Column('phone', sa.String(length=32), nullable=True),
sa.Column('organization_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['_organizations.id'], name='fk_contact_org_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_table('_control_types', op.create_table('_control_types',
@@ -37,19 +39,14 @@ def upgrade() -> None:
sa.Column('name', sa.String(length=64), nullable=True), sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('used_for', sa.JSON(), nullable=True), sa.Column('used_for', sa.JSON(), nullable=True),
sa.Column('cost_per_run', sa.FLOAT(precision=2), nullable=True), sa.Column('cost_per_run', sa.FLOAT(precision=2), nullable=True),
sa.Column('mutable_cost_column', sa.FLOAT(precision=2), nullable=True),
sa.Column('mutable_cost_sample', sa.FLOAT(precision=2), nullable=True),
sa.Column('constant_cost', sa.FLOAT(precision=2), nullable=True),
sa.Column('reagent_types_id', sa.INTEGER(), nullable=True), sa.Column('reagent_types_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['reagent_types_id'], ['_reagent_types.id'], name='fk_KT_reagentstype_id', ondelete='SET NULL', use_alter=True), sa.ForeignKeyConstraint(['reagent_types_id'], ['_reagent_types.id'], name='fk_KT_reagentstype_id', ondelete='SET NULL', use_alter=True),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name') sa.UniqueConstraint('name')
) )
op.create_table('_reagent_types',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('kit_id', sa.INTEGER(), nullable=True),
sa.Column('eol_ext', sa.Interval(), nullable=True),
sa.ForeignKeyConstraint(['kit_id'], ['_kits.id'], name='fk_RT_kits_id', ondelete='SET NULL', use_alter=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_organizations', op.create_table('_organizations',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True), sa.Column('name', sa.String(length=64), nullable=True),
@@ -58,6 +55,32 @@ def upgrade() -> None:
sa.ForeignKeyConstraint(['contact_ids'], ['_contacts.id'], name='fk_org_contact_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['contact_ids'], ['_contacts.id'], name='fk_org_contact_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_table('_reagent_types',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('kit_id', sa.INTEGER(), nullable=True),
sa.Column('eol_ext', sa.Interval(), nullable=True),
sa.Column('required', sa.INTEGER(), server_default='1', nullable=True),
sa.Column('last_used', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['kit_id'], ['_kits.id'], name='fk_RT_kits_id', ondelete='SET NULL', use_alter=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_discounts',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('kit_id', sa.INTEGER(), nullable=True),
sa.Column('client_id', sa.INTEGER(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('amount', sa.FLOAT(precision=2), nullable=True),
sa.ForeignKeyConstraint(['client_id'], ['_organizations.id'], name='fk_org_id', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['kit_id'], ['_kits.id'], name='fk_kit_type_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_orgs_contacts',
sa.Column('org_id', sa.INTEGER(), nullable=True),
sa.Column('contact_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['_contacts.id'], ),
sa.ForeignKeyConstraint(['org_id'], ['_organizations.id'], )
)
op.create_table('_reagents', op.create_table('_reagents',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('type_id', sa.INTEGER(), nullable=True), sa.Column('type_id', sa.INTEGER(), nullable=True),
@@ -73,15 +96,9 @@ def upgrade() -> None:
sa.ForeignKeyConstraint(['kits_id'], ['_kits.id'], ), sa.ForeignKeyConstraint(['kits_id'], ['_kits.id'], ),
sa.ForeignKeyConstraint(['reagent_types_id'], ['_reagent_types.id'], ) sa.ForeignKeyConstraint(['reagent_types_id'], ['_reagent_types.id'], )
) )
op.create_table('_orgs_contacts',
sa.Column('org_id', sa.INTEGER(), nullable=True),
sa.Column('contact_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['_contacts.id'], ),
sa.ForeignKeyConstraint(['org_id'], ['_organizations.id'], )
)
op.create_table('_submissions', op.create_table('_submissions',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('rsl_plate_num', sa.String(length=32), nullable=True), sa.Column('rsl_plate_num', sa.String(length=32), nullable=False),
sa.Column('submitter_plate_num', sa.String(length=127), nullable=True), sa.Column('submitter_plate_num', sa.String(length=127), nullable=True),
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True), sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
sa.Column('submitting_lab_id', sa.INTEGER(), nullable=True), sa.Column('submitting_lab_id', sa.INTEGER(), nullable=True),
@@ -90,6 +107,11 @@ def upgrade() -> None:
sa.Column('submission_type', sa.String(length=32), nullable=True), sa.Column('submission_type', sa.String(length=32), nullable=True),
sa.Column('technician', sa.String(length=64), nullable=True), sa.Column('technician', sa.String(length=64), nullable=True),
sa.Column('reagents_id', sa.String(), nullable=True), sa.Column('reagents_id', sa.String(), nullable=True),
sa.Column('extraction_info', sa.JSON(), nullable=True),
sa.Column('run_cost', sa.FLOAT(precision=2), nullable=True),
sa.Column('uploaded_by', sa.String(length=32), nullable=True),
sa.Column('comment', sa.JSON(), nullable=True),
sa.Column('pcr_info', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kits.id'], name='fk_BS_extkit_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kits.id'], name='fk_BS_extkit_id', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['reagents_id'], ['_reagents.id'], name='fk_BS_reagents_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['reagents_id'], ['_reagents.id'], name='fk_BS_reagents_id', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organizations.id'], name='fk_BS_sublab_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organizations.id'], name='fk_BS_sublab_id', ondelete='SET NULL'),
@@ -105,7 +127,8 @@ def upgrade() -> None:
sa.Column('concentration', sa.String(length=16), nullable=True), sa.Column('concentration', sa.String(length=16), nullable=True),
sa.Column('rsl_plate_id', sa.INTEGER(), nullable=True), sa.Column('rsl_plate_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['rsl_plate_id'], ['_submissions.id'], name='fk_BCS_sample_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['rsl_plate_id'], ['_submissions.id'], name='fk_BCS_sample_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('sample_id')
) )
op.create_table('_control_samples', op.create_table('_control_samples',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
@@ -116,6 +139,9 @@ def upgrade() -> None:
sa.Column('matches', sa.JSON(), nullable=True), sa.Column('matches', sa.JSON(), nullable=True),
sa.Column('kraken', sa.JSON(), nullable=True), sa.Column('kraken', sa.JSON(), nullable=True),
sa.Column('submission_id', sa.INTEGER(), nullable=True), sa.Column('submission_id', sa.INTEGER(), nullable=True),
sa.Column('refseq_version', sa.String(length=16), nullable=True),
sa.Column('kraken2_version', sa.String(length=16), nullable=True),
sa.Column('kraken2_db_version', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['_control_types.id'], name='fk_control_parent_id'), sa.ForeignKeyConstraint(['parent_id'], ['_control_types.id'], name='fk_control_parent_id'),
sa.ForeignKeyConstraint(['submission_id'], ['_submissions.id'], ), sa.ForeignKeyConstraint(['submission_id'], ['_submissions.id'], ),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
@@ -134,17 +160,23 @@ def upgrade() -> None:
sa.Column('rsl_number', sa.String(length=64), nullable=True), sa.Column('rsl_number', sa.String(length=64), nullable=True),
sa.Column('rsl_plate_id', sa.INTEGER(), nullable=True), sa.Column('rsl_plate_id', sa.INTEGER(), nullable=True),
sa.Column('collection_date', sa.TIMESTAMP(), nullable=True), sa.Column('collection_date', sa.TIMESTAMP(), nullable=True),
sa.Column('well_number', sa.String(length=8), nullable=True),
sa.Column('testing_type', sa.String(length=64), nullable=True), sa.Column('testing_type', sa.String(length=64), nullable=True),
sa.Column('site_status', sa.String(length=64), nullable=True), sa.Column('site_status', sa.String(length=64), nullable=True),
sa.Column('notes', sa.String(length=2000), nullable=True), sa.Column('notes', sa.String(length=2000), nullable=True),
sa.Column('ct_n1', sa.FLOAT(precision=2), nullable=True), sa.Column('ct_n1', sa.FLOAT(precision=2), nullable=True),
sa.Column('ct_n2', sa.FLOAT(precision=2), nullable=True), sa.Column('ct_n2', sa.FLOAT(precision=2), nullable=True),
sa.Column('n1_status', sa.String(length=32), nullable=True),
sa.Column('n2_status', sa.String(length=32), nullable=True),
sa.Column('seq_submitted', sa.BOOLEAN(), nullable=True), sa.Column('seq_submitted', sa.BOOLEAN(), nullable=True),
sa.Column('ww_seq_run_id', sa.String(length=64), nullable=True), sa.Column('ww_seq_run_id', sa.String(length=64), nullable=True),
sa.Column('sample_type', sa.String(length=8), nullable=True), sa.Column('sample_type', sa.String(length=8), nullable=True),
sa.Column('well_number', sa.String(length=8), nullable=True), sa.Column('pcr_results', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['rsl_plate_id'], ['_submissions.id'], name='fk_WWS_sample_id', ondelete='SET NULL'), sa.Column('well_24', sa.String(length=8), nullable=True),
sa.PrimaryKeyConstraint('id') sa.Column('artic_well_number', sa.String(length=8), nullable=True),
sa.ForeignKeyConstraint(['rsl_plate_id'], ['_submissions.id'], name='fk_WWS_submission_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('ww_sample_full_id')
) )
# ### end Alembic commands ### # ### end Alembic commands ###
@@ -156,11 +188,12 @@ def downgrade() -> None:
op.drop_table('_control_samples') op.drop_table('_control_samples')
op.drop_table('_bc_samples') op.drop_table('_bc_samples')
op.drop_table('_submissions') op.drop_table('_submissions')
op.drop_table('_orgs_contacts')
op.drop_table('_reagentstypes_kittypes') op.drop_table('_reagentstypes_kittypes')
op.drop_table('_reagents') op.drop_table('_reagents')
op.drop_table('_organizations') op.drop_table('_orgs_contacts')
op.drop_table('_discounts')
op.drop_table('_reagent_types') op.drop_table('_reagent_types')
op.drop_table('_organizations')
op.drop_table('_kits') op.drop_table('_kits')
op.drop_table('_control_types') op.drop_table('_control_types')
op.drop_table('_contacts') op.drop_table('_contacts')

View File

@@ -1,32 +0,0 @@
"""added pcr info to wastewater subs
Revision ID: 0ee7ffa026b2
Revises: 3d80e4a17a26
Create Date: 2023-03-22 14:51:37.871062
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0ee7ffa026b2'
down_revision = '3d80e4a17a26'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.add_column(sa.Column('pcr_info', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.drop_column('pcr_info')
# ### end Alembic commands ###

View File

@@ -1,40 +0,0 @@
"""updating costs
Revision ID: 178203610c3b
Revises: afbdd9e46207
Create Date: 2023-02-02 09:31:05.748477
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '178203610c3b'
down_revision = 'afbdd9e46207'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_kits', schema=None) as batch_op:
batch_op.add_column(sa.Column('mutable_cost', sa.FLOAT(precision=2), nullable=True))
batch_op.add_column(sa.Column('constant_cost', sa.FLOAT(precision=2), nullable=True))
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.add_column(sa.Column('run_cost', sa.FLOAT(precision=2), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.drop_column('run_cost')
with op.batch_alter_table('_kits', schema=None) as batch_op:
batch_op.drop_column('constant_cost')
batch_op.drop_column('mutable_cost')
# ### end Alembic commands ###

View File

@@ -1,59 +0,0 @@
"""added versions to ref/kraken
Revision ID: 3d80e4a17a26
Revises: 785bb1140878
Create Date: 2023-03-02 13:09:30.750398
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = '3d80e4a17a26'
down_revision = '785bb1140878'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# op.drop_table('_alembic_tmp__submissions')
with op.batch_alter_table('_control_samples', schema=None) as batch_op:
batch_op.add_column(sa.Column('refseq_version', sa.String(length=16), nullable=True))
batch_op.add_column(sa.Column('kraken2_version', sa.String(length=16), nullable=True))
batch_op.add_column(sa.Column('kraken2_db_version', sa.String(length=32), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_control_samples', schema=None) as batch_op:
batch_op.drop_column('kraken2_db_version')
batch_op.drop_column('kraken2_version')
batch_op.drop_column('refseq_version')
# op.create_table('_alembic_tmp__submissions',
# sa.Column('id', sa.INTEGER(), nullable=False),
# sa.Column('rsl_plate_num', sa.VARCHAR(length=32), nullable=False),
# sa.Column('submitter_plate_num', sa.VARCHAR(length=127), nullable=True),
# sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
# sa.Column('submitting_lab_id', sa.INTEGER(), nullable=True),
# sa.Column('sample_count', sa.INTEGER(), nullable=True),
# sa.Column('extraction_kit_id', sa.INTEGER(), nullable=True),
# sa.Column('submission_type', sa.VARCHAR(length=32), nullable=True),
# sa.Column('technician', sa.VARCHAR(length=64), nullable=True),
# sa.Column('reagents_id', sa.VARCHAR(), nullable=True),
# sa.Column('extraction_info', sqlite.JSON(), nullable=True),
# sa.Column('run_cost', sa.FLOAT(), nullable=True),
# sa.Column('uploaded_by', sa.VARCHAR(length=32), nullable=True),
# sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kits.id'], ondelete='SET NULL'),
# sa.ForeignKeyConstraint(['reagents_id'], ['_reagents.id'], ondelete='SET NULL'),
# sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organizations.id'], ondelete='SET NULL'),
# sa.PrimaryKeyConstraint('id'),
# sa.UniqueConstraint('rsl_plate_num'),
# sa.UniqueConstraint('submitter_plate_num')
# )
# ### end Alembic commands ###

View File

@@ -1,32 +0,0 @@
"""added last_used to reagenttype
Revision ID: 4c6221f01324
Revises: 7aadd731ff63
Create Date: 2023-07-07 14:32:24.064042
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4c6221f01324'
down_revision = '7aadd731ff63'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_reagent_types', schema=None) as batch_op:
batch_op.add_column(sa.Column('last_used', sa.String(length=32), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_reagent_types', schema=None) as batch_op:
batch_op.drop_column('last_used')
# ### end Alembic commands ###

View File

@@ -1,31 +0,0 @@
"""added elution well to ww_sample
Revision ID: 64fec6271a50
Revises: a31943b2284c
Create Date: 2023-05-24 14:43:25.477637
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = '64fec6271a50'
down_revision = 'a31943b2284c'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.add_column(sa.Column('elution_well', sa.String(length=8), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.drop_column('elution_well')
# ### end Alembic commands ###

View File

@@ -1,38 +0,0 @@
"""making sample ids unique
Revision ID: 78178df0286a
Revises: 4c6221f01324
Create Date: 2023-07-26 13:55:41.864399
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '78178df0286a'
down_revision = '4c6221f01324'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_bc_samples', schema=None) as batch_op:
batch_op.create_unique_constraint("unique_bc_sample", ['sample_id'])
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.create_unique_constraint("unique_ww_sample", ['ww_sample_full_id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='unique')
with op.batch_alter_table('_bc_samples', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='unique')
# ### end Alembic commands ###

View File

@@ -1,32 +0,0 @@
"""added user tracking
Revision ID: 785bb1140878
Revises: 178203610c3b
Create Date: 2023-02-06 09:54:20.371117
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '785bb1140878'
down_revision = '178203610c3b'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.add_column(sa.Column('uploaded_by', sa.String(length=32), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.drop_column('uploaded_by')
# ### end Alembic commands ###

View File

@@ -1,42 +0,0 @@
"""added required to reagenttype
Revision ID: 7aadd731ff63
Revises: 8d32abdafe2b
Create Date: 2023-07-06 07:58:36.545604
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7aadd731ff63'
down_revision = '8d32abdafe2b'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_reagent_types', schema=None) as batch_op:
batch_op.add_column(sa.Column('required', sa.INTEGER(), nullable=True))
# with op.batch_alter_table('_submissions', schema=None) as batch_op:
# batch_op.alter_column('rsl_plate_num',
# existing_type=sa.VARCHAR(length=32),
# nullable=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissions', schema=None) as batch_op:
# batch_op.alter_column('rsl_plate_num',
# existing_type=sa.VARCHAR(length=32),
# nullable=True)
with op.batch_alter_table('_reagent_types', schema=None) as batch_op:
batch_op.drop_column('required')
# ### end Alembic commands ###

View File

@@ -1,30 +0,0 @@
"""updated discount table
Revision ID: 83b06f3f4869
Revises: cc9672a505f5
Create Date: 2023-04-27 13:04:35.886294
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = '83b06f3f4869'
down_revision = 'cc9672a505f5'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_discounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('name', sa.String(length=128), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_discounts', schema=None) as batch_op:
batch_op.drop_column('name')
# ### end Alembic commands ###

View File

@@ -1,31 +0,0 @@
"""added pcr info to wastewater samples
Revision ID: 8adc85dd9b92
Revises: 0ee7ffa026b2
Create Date: 2023-03-27 13:46:06.173379
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8adc85dd9b92'
down_revision = '0ee7ffa026b2'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.add_column(sa.Column('pcr_results', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.drop_column('pcr_results')
# ### end Alembic commands ###

View File

@@ -1,30 +0,0 @@
"""moved artic info to ww_samples
Revision ID: 8d32abdafe2b
Revises: aac569c672de
Create Date: 2023-06-05 10:10:37.650733
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8d32abdafe2b'
down_revision = 'aac569c672de'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.add_column(sa.Column('artic_well_number', sa.String(length=8), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_ww_samples', schema=None) as batch_op:
batch_op.drop_column('artic_well_number')
# ### end Alembic commands ###

View File

@@ -1,33 +0,0 @@
"""added commenting
Revision ID: a31943b2284c
Revises: 83b06f3f4869
Create Date: 2023-05-10 11:34:30.339915
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a31943b2284c'
down_revision = '83b06f3f4869'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.add_column(sa.Column('comment', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.drop_column('comment')
# ### end Alembic commands ###

View File

@@ -1,63 +0,0 @@
"""added in artic information
Revision ID: aac569c672de
Revises: 64fec6271a50
Create Date: 2023-06-02 15:14:13.726489
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = 'aac569c672de'
down_revision = '64fec6271a50'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# op.create_table('_artic_samples',
# sa.Column('id', sa.INTEGER(), nullable=False),
# sa.Column('well_number', sa.String(length=8), nullable=True),
# sa.Column('rsl_plate_id', sa.INTEGER(), nullable=True),
# sa.Column('ww_sample_full_id', sa.String(length=64), nullable=False),
# sa.Column('lims_sample_id', sa.String(length=64), nullable=False),
# sa.Column('ct_1', sa.FLOAT(precision=2), nullable=True),
# sa.Column('ct_2', sa.FLOAT(precision=2), nullable=True),
# sa.ForeignKeyConstraint(['rsl_plate_id'], ['_submissions.id'], name='fk_WWA_submission_id', ondelete='SET NULL'),
# sa.PrimaryKeyConstraint('id')
# )
op.drop_table('_alembic_tmp__submissions')
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('_alembic_tmp__submissions',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('rsl_plate_num', sa.VARCHAR(length=32), nullable=False),
sa.Column('submitter_plate_num', sa.VARCHAR(length=127), nullable=True),
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
sa.Column('submitting_lab_id', sa.INTEGER(), nullable=True),
sa.Column('sample_count', sa.INTEGER(), nullable=True),
sa.Column('extraction_kit_id', sa.INTEGER(), nullable=True),
sa.Column('submission_type', sa.VARCHAR(length=32), nullable=True),
sa.Column('technician', sa.VARCHAR(length=64), nullable=True),
sa.Column('reagents_id', sa.VARCHAR(), nullable=True),
sa.Column('extraction_info', sqlite.JSON(), nullable=True),
sa.Column('run_cost', sa.FLOAT(), nullable=True),
sa.Column('uploaded_by', sa.VARCHAR(length=32), nullable=True),
sa.Column('pcr_info', sqlite.JSON(), nullable=True),
sa.Column('comment', sqlite.JSON(), nullable=True),
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kits.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['reagents_id'], ['_reagents.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organizations.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('rsl_plate_num'),
sa.UniqueConstraint('submitter_plate_num')
)
# op.drop_table('_artic_samples')
# ### end Alembic commands ###

View File

@@ -1,40 +0,0 @@
"""add extraction info to submissions
Revision ID: afbdd9e46207
Revises: 8753ed70f148
Create Date: 2023-01-30 13:05:42.858306
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'afbdd9e46207'
down_revision = '8753ed70f148'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_contacts', schema=None) as batch_op:
batch_op.add_column(sa.Column('organization_id', sa.INTEGER(), nullable=True))
batch_op.create_foreign_key('fk_contact_org_id', '_organizations', ['organization_id'], ['id'], ondelete='SET NULL')
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.add_column(sa.Column('extraction_info', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissions', schema=None) as batch_op:
batch_op.drop_column('extraction_info')
with op.batch_alter_table('_contacts', schema=None) as batch_op:
batch_op.drop_constraint('fk_contact_org_id', type_='foreignkey')
batch_op.drop_column('organization_id')
# ### end Alembic commands ###

View File

@@ -1,37 +0,0 @@
"""added discount table
Revision ID: cc9672a505f5
Revises: 00de69ad6eab
Create Date: 2023-04-27 12:58:41.331563
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cc9672a505f5'
down_revision = '00de69ad6eab'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('_discounts',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('kit_id', sa.INTEGER(), nullable=True),
sa.Column('client_id', sa.INTEGER(), nullable=True),
sa.Column('amount', sa.FLOAT(precision=2), nullable=True),
sa.ForeignKeyConstraint(['client_id'], ['_organizations.id'], name='fk_org_id', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['kit_id'], ['_kits.id'], name='fk_kit_type_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('_discounts')
# ### end Alembic commands ###

View File

@@ -49,12 +49,16 @@ def store_submission(ctx:Settings, base_submission:models.BasicSubmission) -> No
base_submission.rsl_plate_num = typer.parsed_name base_submission.rsl_plate_num = typer.parsed_name
for sample in base_submission.samples: for sample in base_submission.samples:
logger.debug(f"Typer: {typer.submission_type}") logger.debug(f"Typer: {typer.submission_type}")
logger.debug(f"sample going in: {type(sample)}\n{sample.__dict__}")
# Suuuuuper hacky way to be sure that the artic doesn't overwrite the ww plate in a ww sample # Suuuuuper hacky way to be sure that the artic doesn't overwrite the ww plate in a ww sample
# need something more elegant # need something more elegant
if "_artic" not in typer.submission_type.lower(): if "_artic" not in typer.submission_type:
sample.rsl_plate = base_submission sample.rsl_plate = base_submission
else: else:
sample.artic_rsl_plate = base_submission logger.debug(f"{sample.ww_sample_full_id} is an ARTIC sample.")
# base_submission.samples.remove(sample)
# sample.rsl_plate = sample.rsl_plate
# sample.artic_rsl_plate = base_submission
logger.debug(f"Attempting to add sample: {sample.to_string()}") logger.debug(f"Attempting to add sample: {sample.to_string()}")
try: try:
# ctx['database_session'].add(sample) # ctx['database_session'].add(sample)
@@ -62,6 +66,7 @@ def store_submission(ctx:Settings, base_submission:models.BasicSubmission) -> No
except (sqlite3.IntegrityError, sqlalchemy.exc.IntegrityError) as e: except (sqlite3.IntegrityError, sqlalchemy.exc.IntegrityError) as e:
logger.debug(f"Hit an integrity error : {e}") logger.debug(f"Hit an integrity error : {e}")
continue continue
logger.debug(f"Here is the sample to be stored in the DB: {sample.__dict__}")
# Add submission to submission table # Add submission to submission table
# ctx['database_session'].add(base_submission) # ctx['database_session'].add(base_submission)
ctx.database_session.add(base_submission) ctx.database_session.add(base_submission)
@@ -650,7 +655,7 @@ def get_control_subtypes(ctx:Settings, type:str, mode:str) -> list[str]:
# Only the first control of type is necessary since they all share subtypes # Only the first control of type is necessary since they all share subtypes
try: try:
outs = get_all_controls_by_type(ctx=ctx, con_type=type)[0] outs = get_all_controls_by_type(ctx=ctx, con_type=type)[0]
except TypeError: except (TypeError, IndexError):
return [] return []
# Get analysis mode data as dict # Get analysis mode data as dict
jsoner = json.loads(getattr(outs, mode)) jsoner = json.loads(getattr(outs, mode))

View File

@@ -156,39 +156,3 @@ class BCSample(Base):
col=well_col, col=well_col,
positive=False) positive=False)
# class ArticSample(Base):
# """
# base of artic sample
# """
# __tablename__ = "_artic_samples"
# id = Column(INTEGER, primary_key=True) #: primary key
# well_number = Column(String(8)) #: location on parent plate
# rsl_plate = relationship("WastewaterArtic", back_populates="samples") #: relationship to parent plate
# rsl_plate_id = Column(INTEGER, ForeignKey("_submissions.id", ondelete="SET NULL", name="fk_WWA_submission_id"))
# ww_sample_full_id = Column(String(64), nullable=False)
# lims_sample_id = Column(String(64), nullable=False)
# ct_1 = Column(FLOAT(2)) #: first ct value in column
# ct_2 = Column(FLOAT(2)) #: second ct value in column
# def to_string(self) -> str:
# """
# string representing sample object
# Returns:
# str: string representing location and sample id
# """
# return f"{self.well_number}: {self.ww_sample_full_id}"
# def to_sub_dict(self) -> dict:
# """
# gui friendly dictionary
# Returns:
# dict: well location and name (sample id, organism) NOTE: keys must sync with WWSample to_sub_dict above
# """
# return {
# "well": self.well_number,
# "name": self.ww_sample_full_id,
# }

View File

@@ -97,8 +97,11 @@ class SubmissionsSheet(QTableView):
sets data in model sets data in model
""" """
self.data = submissions_to_df(ctx=self.ctx) self.data = submissions_to_df(ctx=self.ctx)
try:
self.data['id'] = self.data['id'].apply(str) self.data['id'] = self.data['id'].apply(str)
self.data['id'] = self.data['id'].str.zfill(3) self.data['id'] = self.data['id'].str.zfill(3)
except KeyError:
pass
try: try:
del self.data['samples'] del self.data['samples']
except KeyError: except KeyError: