Added Postgres support.

This commit is contained in:
lwark
2024-07-25 08:41:44 -05:00
parent 54e1e55804
commit 4bc5e08ac6
32 changed files with 579 additions and 1030 deletions

View File

@@ -1,7 +1,12 @@
## 202407.04
- Added support for postgresql databases (auto backup not functional).
## 202407.02 ## 202407.02
- HTML template for 'About'. - HTML template for 'About'.
- More flexible custom parsers/writers due to custom info items. - More flexible custom parsers/writers due to custom info items.
- Vastly increased portability of the reporting functions.
## 202407.01 ## 202407.01

View File

@@ -1,7 +1,8 @@
## Startup: ## Startup:
1. Open the app using the shortcut in the Submissions folder. For example: L:\\Robotics Laboratory Support\\Submissions\\submissions_v122b.exe - Shortcut.lnk (Version may have changed). 1. Open the app using the shortcut in the Submissions folder: L:\Robotics Laboratory Support\Submissions\Submissions App.lnk.
1. Ignore the large black window of fast scrolling text, it is there for debugging purposes. 1. Ignore the large black window of fast scrolling text, it is there for debugging purposes.
2. The 'Submissions' tab should be open by default. 2. The 'Submissions' tab should be open by default.
3. Default settings (config.yml) will be copied to C:\Users\{YOUR USERNAME}\AppData\Local\submissions\config
## Logging in New Run: ## Logging in New Run:
*should fit 90% of usage cases* *should fit 90% of usage cases*
@@ -109,3 +110,16 @@ This is meant to import .xslx files created from the Design & Analysis Software
1. Click "Monthly" -> "Link PCR Logs". 1. Click "Monthly" -> "Link PCR Logs".
2. Chose the .csv file taken from the PCR table runlogs folder. 2. Chose the .csv file taken from the PCR table runlogs folder.
## SETUP:
## Download:
1. Clone or download from github.
2. Enter the downloaded folder.
## Database:
1. Copy 'alembic_default.ini' to 'alembic.ini' in the same folder.
2. Open 'alembic.ini' and edit 'sqlalchemy.url' to the desired path of the database.
1. The path by default is sqlite based. Postgresql support is available.
2. Postgres path

View File

@@ -2,7 +2,7 @@
- The hardest part of this is going to be the sample parsing. I'm onto using the cell formulas in the plate map to suss out the location in the lookup table, but it could get a little recursive up in here. - The hardest part of this is going to be the sample parsing. I'm onto using the cell formulas in the plate map to suss out the location in the lookup table, but it could get a little recursive up in here.
- [ ] Create a default info return function. - [ ] Create a default info return function.
- [x] Parse comment from excel sheet. - [x] Parse comment from excel sheet.
- [ ] Make reporting better. - [x] Make reporting better.
- [x] Build master query method? - [x] Build master query method?
- Obviously there will need to be extensions, but I feel the attr method I have in Submissions could work. - Obviously there will need to be extensions, but I feel the attr method I have in Submissions could work.
- [x] Fix Artic RSLNamer - [x] Fix Artic RSLNamer
@@ -23,7 +23,7 @@
- [x] Merge BasicSubmission.find_subclasses and BasicSubmission.find_polymorphic_subclass - [x] Merge BasicSubmission.find_subclasses and BasicSubmission.find_polymorphic_subclass
- [x] Fix updating of Extraction Kit in submission form widget. - [x] Fix updating of Extraction Kit in submission form widget.
- [x] Fix cropping of gel image. - [x] Fix cropping of gel image.
- [ ] Create Tips ... *sigh*. - [x] Create Tips ... *sigh*.
- [x] Create platemap image from html for export to pdf. - [x] Create platemap image from html for export to pdf.
- [x] Move plate map maker to submission. - [x] Move plate map maker to submission.
- [x] Finish Equipment Parser (add in regex to id asset_number) - [x] Finish Equipment Parser (add in regex to id asset_number)

View File

@@ -1,8 +1,8 @@
"""First Commit """rebuild 20240723
Revision ID: e3f6770ef515 Revision ID: 0746f7e2c10e
Revises: Revises:
Create Date: 2024-01-22 14:01:02.958292 Create Date: 2024-07-23 14:08:37.436400
""" """
from alembic import op from alembic import op
@@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'e3f6770ef515' revision = '0746f7e2c10e'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@@ -25,6 +25,12 @@ def upgrade() -> None:
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('submitter_id') sa.UniqueConstraint('submitter_id')
) )
op.create_table('_configitem',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('key', sa.String(length=32), nullable=True),
sa.Column('value', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_contact', op.create_table('_contact',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True), sa.Column('name', sa.String(length=64), nullable=True),
@@ -66,9 +72,10 @@ def upgrade() -> None:
op.create_table('_process', op.create_table('_process',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True), sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
) )
op.create_table('_reagenttype', op.create_table('_reagentrole',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True), sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('eol_ext', sa.Interval(), nullable=True), sa.Column('eol_ext', sa.Interval(), nullable=True),
@@ -78,10 +85,17 @@ def upgrade() -> None:
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True), sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('info_map', sa.JSON(), nullable=True), sa.Column('info_map', sa.JSON(), nullable=True),
sa.Column('template_file', sa.BLOB(), nullable=True), sa.Column('defaults', sa.JSON(), nullable=True),
sa.Column('template_file', sa.LargeBinary(), nullable=True),
sa.Column('sample_map', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name') sa.UniqueConstraint('name')
) )
op.create_table('_tiprole',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_bacterialculturesample', op.create_table('_bacterialculturesample',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('organism', sa.String(length=64), nullable=True), sa.Column('organism', sa.String(length=64), nullable=True),
@@ -117,17 +131,17 @@ def upgrade() -> None:
sa.ForeignKeyConstraint(['equipmentrole_id'], ['_equipmentrole.id'], ), sa.ForeignKeyConstraint(['equipmentrole_id'], ['_equipmentrole.id'], ),
sa.ForeignKeyConstraint(['process_id'], ['_process.id'], ) sa.ForeignKeyConstraint(['process_id'], ['_process.id'], )
) )
op.create_table('_kittypereagenttypeassociation', op.create_table('_kittypereagentroleassociation',
sa.Column('reagent_types_id', sa.INTEGER(), nullable=False), sa.Column('reagent_roles_id', sa.INTEGER(), nullable=False),
sa.Column('kits_id', sa.INTEGER(), nullable=False), sa.Column('kits_id', sa.INTEGER(), nullable=False),
sa.Column('submission_type_id', sa.INTEGER(), nullable=False), sa.Column('submission_type_id', sa.INTEGER(), nullable=False),
sa.Column('uses', sa.JSON(), nullable=True), sa.Column('uses', sa.JSON(), nullable=True),
sa.Column('required', sa.INTEGER(), nullable=True), sa.Column('required', sa.INTEGER(), nullable=True),
sa.Column('last_used', sa.String(length=32), nullable=True), sa.Column('last_used', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['kits_id'], ['_kittype.id'], ), sa.ForeignKeyConstraint(['kits_id'], ['_kittype.id'], ),
sa.ForeignKeyConstraint(['reagent_types_id'], ['_reagenttype.id'], ), sa.ForeignKeyConstraint(['reagent_roles_id'], ['_reagentrole.id'], ),
sa.ForeignKeyConstraint(['submission_type_id'], ['_submissiontype.id'], ), sa.ForeignKeyConstraint(['submission_type_id'], ['_submissiontype.id'], ),
sa.PrimaryKeyConstraint('reagent_types_id', 'kits_id', 'submission_type_id') sa.PrimaryKeyConstraint('reagent_roles_id', 'kits_id', 'submission_type_id')
) )
op.create_table('_kittypes_processes', op.create_table('_kittypes_processes',
sa.Column('process_id', sa.INTEGER(), nullable=True), sa.Column('process_id', sa.INTEGER(), nullable=True),
@@ -141,13 +155,19 @@ def upgrade() -> None:
sa.ForeignKeyConstraint(['contact_id'], ['_contact.id'], ), sa.ForeignKeyConstraint(['contact_id'], ['_contact.id'], ),
sa.ForeignKeyConstraint(['org_id'], ['_organization.id'], ) sa.ForeignKeyConstraint(['org_id'], ['_organization.id'], )
) )
op.create_table('_process_tiprole',
sa.Column('process_id', sa.INTEGER(), nullable=True),
sa.Column('tiprole_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['process_id'], ['_process.id'], ),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], )
)
op.create_table('_reagent', op.create_table('_reagent',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('type_id', sa.INTEGER(), nullable=True), sa.Column('role_id', sa.INTEGER(), nullable=True),
sa.Column('name', sa.String(length=64), nullable=True), sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('lot', sa.String(length=64), nullable=True), sa.Column('lot', sa.String(length=64), nullable=True),
sa.Column('expiry', sa.TIMESTAMP(), nullable=True), sa.Column('expiry', sa.TIMESTAMP(), nullable=True),
sa.ForeignKeyConstraint(['type_id'], ['_reagenttype.id'], name='fk_reagent_type_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['role_id'], ['_reagentrole.id'], name='fk_reagent_role_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_table('_submissiontypeequipmentroleassociation', op.create_table('_submissiontypeequipmentroleassociation',
@@ -175,6 +195,23 @@ def upgrade() -> None:
sa.ForeignKeyConstraint(['equipmentroles_id'], ['_submissiontype.id'], ), sa.ForeignKeyConstraint(['equipmentroles_id'], ['_submissiontype.id'], ),
sa.ForeignKeyConstraint(['process_id'], ['_process.id'], ) sa.ForeignKeyConstraint(['process_id'], ['_process.id'], )
) )
op.create_table('_submissiontypetiproleassociation',
sa.Column('tiprole_id', sa.INTEGER(), nullable=False),
sa.Column('submissiontype_id', sa.INTEGER(), nullable=False),
sa.Column('uses', sa.JSON(), nullable=True),
sa.Column('static', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['submissiontype_id'], ['_submissiontype.id'], ),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], ),
sa.PrimaryKeyConstraint('tiprole_id', 'submissiontype_id')
)
op.create_table('_tips',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('role_id', sa.INTEGER(), nullable=True),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('lot', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['_tiprole.id'], name='fk_tip_role_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_wastewatersample', op.create_table('_wastewatersample',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('ww_processing_num', sa.String(length=64), nullable=True), sa.Column('ww_processing_num', sa.String(length=64), nullable=True),
@@ -197,13 +234,15 @@ def upgrade() -> None:
sa.Column('extraction_kit_id', sa.INTEGER(), nullable=True), sa.Column('extraction_kit_id', sa.INTEGER(), nullable=True),
sa.Column('submission_type_name', sa.String(), nullable=True), sa.Column('submission_type_name', sa.String(), nullable=True),
sa.Column('technician', sa.String(length=64), nullable=True), sa.Column('technician', sa.String(length=64), nullable=True),
sa.Column('reagents_id', sa.String(), nullable=True), sa.Column('reagents_id', sa.INTEGER(), nullable=True),
sa.Column('extraction_info', sa.JSON(), nullable=True), sa.Column('extraction_info', sa.JSON(), nullable=True),
sa.Column('pcr_info', sa.JSON(), nullable=True),
sa.Column('run_cost', sa.FLOAT(precision=2), nullable=True), sa.Column('run_cost', sa.FLOAT(precision=2), nullable=True),
sa.Column('uploaded_by', sa.String(length=32), nullable=True), sa.Column('signed_by', sa.String(length=32), nullable=True),
sa.Column('comment', sa.JSON(), nullable=True), sa.Column('comment', sa.JSON(), nullable=True),
sa.Column('submission_category', sa.String(length=64), nullable=True), sa.Column('submission_category', sa.String(length=64), nullable=True),
sa.Column('cost_centre', sa.String(length=64), nullable=True),
sa.Column('contact_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['_contact.id'], name='fk_BS_contact_id', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kittype.id'], name='fk_BS_extkit_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kittype.id'], name='fk_BS_extkit_id', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['reagents_id'], ['_reagent.id'], name='fk_BS_reagents_id', ondelete='SET NULL'), sa.ForeignKeyConstraint(['reagents_id'], ['_reagent.id'], name='fk_BS_reagents_id', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['submission_type_name'], ['_submissiontype.name'], name='fk_BS_subtype_name', ondelete='SET NULL'), sa.ForeignKeyConstraint(['submission_type_name'], ['_submissiontype.name'], name='fk_BS_subtype_name', ondelete='SET NULL'),
@@ -212,11 +251,23 @@ def upgrade() -> None:
sa.UniqueConstraint('rsl_plate_num'), sa.UniqueConstraint('rsl_plate_num'),
sa.UniqueConstraint('submitter_plate_num') sa.UniqueConstraint('submitter_plate_num')
) )
op.create_table('_reagenttypes_reagents', op.create_table('_equipment_tips',
sa.Column('equipment_id', sa.INTEGER(), nullable=True),
sa.Column('tips_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['equipment_id'], ['_equipment.id'], ),
sa.ForeignKeyConstraint(['tips_id'], ['_tips.id'], )
)
op.create_table('_reagentroles_reagents',
sa.Column('reagent_id', sa.INTEGER(), nullable=True), sa.Column('reagent_id', sa.INTEGER(), nullable=True),
sa.Column('reagenttype_id', sa.INTEGER(), nullable=True), sa.Column('reagentrole_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['reagent_id'], ['_reagent.id'], ), sa.ForeignKeyConstraint(['reagent_id'], ['_reagent.id'], ),
sa.ForeignKeyConstraint(['reagenttype_id'], ['_reagenttype.id'], ) sa.ForeignKeyConstraint(['reagentrole_id'], ['_reagentrole.id'], )
)
op.create_table('_tiproles_tips',
sa.Column('tiprole_id', sa.INTEGER(), nullable=True),
sa.Column('tips_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], ),
sa.ForeignKeyConstraint(['tips_id'], ['_tips.id'], )
) )
op.create_table('_bacterialculture', op.create_table('_bacterialculture',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
@@ -225,7 +276,7 @@ def upgrade() -> None:
) )
op.create_table('_control', op.create_table('_control',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('parent_id', sa.String(), nullable=True), sa.Column('parent_id', sa.INTEGER(), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True), sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True), sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
sa.Column('contains', sa.JSON(), nullable=True), sa.Column('contains', sa.JSON(), nullable=True),
@@ -264,19 +315,31 @@ def upgrade() -> None:
sa.PrimaryKeyConstraint('reagent_id', 'submission_id') sa.PrimaryKeyConstraint('reagent_id', 'submission_id')
) )
op.create_table('_submissionsampleassociation', op.create_table('_submissionsampleassociation',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('sample_id', sa.INTEGER(), nullable=False), sa.Column('sample_id', sa.INTEGER(), nullable=False),
sa.Column('submission_id', sa.INTEGER(), nullable=False), sa.Column('submission_id', sa.INTEGER(), nullable=False),
sa.Column('row', sa.INTEGER(), nullable=False), sa.Column('row', sa.INTEGER(), nullable=False),
sa.Column('column', sa.INTEGER(), nullable=False), sa.Column('column', sa.INTEGER(), nullable=False),
sa.Column('submission_rank', sa.INTEGER(), nullable=False),
sa.Column('base_sub_type', sa.String(), nullable=True), sa.Column('base_sub_type', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['sample_id'], ['_basicsample.id'], ), sa.ForeignKeyConstraint(['sample_id'], ['_basicsample.id'], ),
sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ), sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
sa.PrimaryKeyConstraint('submission_id', 'row', 'column') sa.PrimaryKeyConstraint('submission_id', 'row', 'column'),
sa.UniqueConstraint('id')
)
op.create_table('_submissiontipsassociation',
sa.Column('tip_id', sa.INTEGER(), nullable=False),
sa.Column('submission_id', sa.INTEGER(), nullable=False),
sa.Column('role_name', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
sa.ForeignKeyConstraint(['tip_id'], ['_tips.id'], ),
sa.PrimaryKeyConstraint('tip_id', 'submission_id', 'role_name')
) )
op.create_table('_wastewater', op.create_table('_wastewater',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('ext_technician', sa.String(length=64), nullable=True), sa.Column('ext_technician', sa.String(length=64), nullable=True),
sa.Column('pcr_technician', sa.String(length=64), nullable=True), sa.Column('pcr_technician', sa.String(length=64), nullable=True),
sa.Column('pcr_info', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['id'], ['_basicsubmission.id'], ), sa.ForeignKeyConstraint(['id'], ['_basicsubmission.id'], ),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
@@ -284,20 +347,36 @@ def upgrade() -> None:
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('artic_technician', sa.String(length=64), nullable=True), sa.Column('artic_technician', sa.String(length=64), nullable=True),
sa.Column('dna_core_submission_number', sa.String(length=64), nullable=True), sa.Column('dna_core_submission_number', sa.String(length=64), nullable=True),
sa.Column('pcr_info', sa.JSON(), nullable=True),
sa.Column('gel_image', sa.String(length=64), nullable=True),
sa.Column('gel_info', sa.JSON(), nullable=True),
sa.Column('gel_controls', sa.JSON(), nullable=True),
sa.Column('source_plates', sa.JSON(), nullable=True),
sa.Column('artic_date', sa.TIMESTAMP(), nullable=True),
sa.Column('ngs_date', sa.TIMESTAMP(), nullable=True),
sa.Column('gel_date', sa.TIMESTAMP(), nullable=True),
sa.Column('gel_barcode', sa.String(length=16), nullable=True),
sa.ForeignKeyConstraint(['id'], ['_basicsubmission.id'], ), sa.ForeignKeyConstraint(['id'], ['_basicsubmission.id'], ),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_table('_wastewaterarticassociation',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('source_plate', sa.String(length=16), nullable=True),
sa.Column('source_plate_number', sa.INTEGER(), nullable=True),
sa.Column('source_well', sa.String(length=8), nullable=True),
sa.Column('ct', sa.String(length=8), nullable=True),
sa.ForeignKeyConstraint(['id'], ['_submissionsampleassociation.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_wastewaterassociation', op.create_table('_wastewaterassociation',
sa.Column('sample_id', sa.INTEGER(), nullable=False), sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('submission_id', sa.INTEGER(), nullable=False),
sa.Column('ct_n1', sa.FLOAT(precision=2), nullable=True), sa.Column('ct_n1', sa.FLOAT(precision=2), nullable=True),
sa.Column('ct_n2', sa.FLOAT(precision=2), nullable=True), sa.Column('ct_n2', sa.FLOAT(precision=2), nullable=True),
sa.Column('n1_status', sa.String(length=32), nullable=True), sa.Column('n1_status', sa.String(length=32), nullable=True),
sa.Column('n2_status', sa.String(length=32), nullable=True), sa.Column('n2_status', sa.String(length=32), nullable=True),
sa.Column('pcr_results', sa.JSON(), nullable=True), sa.Column('pcr_results', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['sample_id'], ['_submissionsampleassociation.sample_id'], ), sa.ForeignKeyConstraint(['id'], ['_submissionsampleassociation.id'], ),
sa.ForeignKeyConstraint(['submission_id'], ['_submissionsampleassociation.submission_id'], ), sa.PrimaryKeyConstraint('id')
sa.PrimaryKeyConstraint('sample_id', 'submission_id')
) )
# ### end Alembic commands ### # ### end Alembic commands ###
@@ -305,30 +384,38 @@ def upgrade() -> None:
def downgrade() -> None: def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_table('_wastewaterassociation') op.drop_table('_wastewaterassociation')
op.drop_table('_wastewaterarticassociation')
op.drop_table('_wastewaterartic') op.drop_table('_wastewaterartic')
op.drop_table('_wastewater') op.drop_table('_wastewater')
op.drop_table('_submissiontipsassociation')
op.drop_table('_submissionsampleassociation') op.drop_table('_submissionsampleassociation')
op.drop_table('_submissionreagentassociation') op.drop_table('_submissionreagentassociation')
op.drop_table('_submissionequipmentassociation') op.drop_table('_submissionequipmentassociation')
op.drop_table('_control') op.drop_table('_control')
op.drop_table('_bacterialculture') op.drop_table('_bacterialculture')
op.drop_table('_reagenttypes_reagents') op.drop_table('_tiproles_tips')
op.drop_table('_reagentroles_reagents')
op.drop_table('_equipment_tips')
op.drop_table('_basicsubmission') op.drop_table('_basicsubmission')
op.drop_table('_wastewatersample') op.drop_table('_wastewatersample')
op.drop_table('_tips')
op.drop_table('_submissiontypetiproleassociation')
op.drop_table('_submissiontypes_processes') op.drop_table('_submissiontypes_processes')
op.drop_table('_submissiontypekittypeassociation') op.drop_table('_submissiontypekittypeassociation')
op.drop_table('_submissiontypeequipmentroleassociation') op.drop_table('_submissiontypeequipmentroleassociation')
op.drop_table('_reagent') op.drop_table('_reagent')
op.drop_table('_process_tiprole')
op.drop_table('_orgs_contacts') op.drop_table('_orgs_contacts')
op.drop_table('_kittypes_processes') op.drop_table('_kittypes_processes')
op.drop_table('_kittypereagenttypeassociation') op.drop_table('_kittypereagentroleassociation')
op.drop_table('_equipmentroles_processes') op.drop_table('_equipmentroles_processes')
op.drop_table('_equipmentroles_equipment') op.drop_table('_equipmentroles_equipment')
op.drop_table('_equipment_processes') op.drop_table('_equipment_processes')
op.drop_table('_discount') op.drop_table('_discount')
op.drop_table('_bacterialculturesample') op.drop_table('_bacterialculturesample')
op.drop_table('_tiprole')
op.drop_table('_submissiontype') op.drop_table('_submissiontype')
op.drop_table('_reagenttype') op.drop_table('_reagentrole')
op.drop_table('_process') op.drop_table('_process')
op.drop_table('_organization') op.drop_table('_organization')
op.drop_table('_kittype') op.drop_table('_kittype')
@@ -336,5 +423,6 @@ def downgrade() -> None:
op.drop_table('_equipment') op.drop_table('_equipment')
op.drop_table('_controltype') op.drop_table('_controltype')
op.drop_table('_contact') op.drop_table('_contact')
op.drop_table('_configitem')
op.drop_table('_basicsample') op.drop_table('_basicsample')
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@@ -1,54 +0,0 @@
"""Made WastewaterArticAssociation
Revision ID: 16b20d4368e6
Revises: d2b094cfa308
Create Date: 2024-06-13 12:16:48.385516
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '16b20d4368e6'
down_revision = 'd2b094cfa308'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('_wastewaterarticassociation',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('source_plate', sa.String(length=16), nullable=True),
sa.Column('source_plate_number', sa.INTEGER(), nullable=True),
sa.Column('source_well', sa.String(length=8), nullable=True),
sa.Column('ct', sa.String(length=8), nullable=True),
sa.ForeignKeyConstraint(['id'], ['_submissionsampleassociation.id'], ),
sa.PrimaryKeyConstraint('id')
)
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
#
# with op.batch_alter_table('_submissiontipsassociation', schema=None) as batch_op:
# batch_op.alter_column('role_name',
# existing_type=sa.INTEGER(),
# type_=sa.String(length=32),
# existing_nullable=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissiontipsassociation', schema=None) as batch_op:
batch_op.alter_column('role_name',
existing_type=sa.String(length=32),
type_=sa.INTEGER(),
existing_nullable=True)
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='unique')
op.drop_table('_wastewaterarticassociation')
# ### end Alembic commands ###

View File

@@ -1,51 +0,0 @@
"""adding cost centre storage to basicsubmission
Revision ID: 6d2a357860ef
Revises: e6647bd661d9
Create Date: 2024-04-24 13:01:14.923814
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6d2a357860ef'
down_revision = 'e6647bd661d9'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# op.drop_table('_alembic_tmp__submissionsampleassociation')
with op.batch_alter_table('_basicsubmission', schema=None) as batch_op:
batch_op.add_column(sa.Column('cost_centre', sa.String(length=64), nullable=True))
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
with op.batch_alter_table('_basicsubmission', schema=None) as batch_op:
batch_op.drop_column('used_cost_centre')
op.create_table('_alembic_tmp__submissionsampleassociation',
sa.Column('sample_id', sa.INTEGER(), nullable=False),
sa.Column('submission_id', sa.INTEGER(), nullable=False),
sa.Column('row', sa.INTEGER(), nullable=False),
sa.Column('column', sa.INTEGER(), nullable=False),
sa.Column('base_sub_type', sa.VARCHAR(), nullable=True),
sa.Column('id', sa.INTEGER(), server_default=sa.text('1'), nullable=False),
sa.ForeignKeyConstraint(['sample_id'], ['_basicsample.id'], ),
sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
sa.PrimaryKeyConstraint('submission_id', 'row', 'column'),
sa.UniqueConstraint('id', name='ssa_id')
)
# ### end Alembic commands ###

View File

@@ -1,34 +0,0 @@
"""adding gel image, info. Again
Revision ID: 70426df72f80
Revises: c4201b0ea9fe
Create Date: 2024-01-30 08:47:22.809841
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '70426df72f80'
down_revision = 'c4201b0ea9fe'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.add_column(sa.Column('gel_image', sa.String(length=64), nullable=True))
batch_op.add_column(sa.Column('gel_info', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.drop_column('gel_info')
batch_op.drop_column('gel_image')
# ### end Alembic commands ###

View File

@@ -1,38 +0,0 @@
"""tweaking submission sample association
Revision ID: 70d5a751f579
Revises: 97392dda5436
Create Date: 2024-01-25 13:39:34.163501
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '70d5a751f579'
down_revision = '97392dda5436'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.alter_column('id',
existing_type=sa.INTEGER(),
nullable=False)
batch_op.create_unique_constraint("ssa_id_unique", ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.drop_constraint("ssa_id_unique", type_='unique')
batch_op.alter_column('id',
existing_type=sa.INTEGER(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -1,72 +0,0 @@
"""Adding fields to Artic
Revision ID: 861b52a2004e
Revises: b744e8a452fd
Create Date: 2024-06-05 13:35:19.012337
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = '861b52a2004e'
down_revision = 'b744e8a452fd'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('_alembic_tmp__basicsubmission')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.add_column(sa.Column('artic_date', sa.TIMESTAMP(), nullable=True))
batch_op.add_column(sa.Column('ngs_date', sa.TIMESTAMP(), nullable=True))
batch_op.add_column(sa.Column('gel_date', sa.TIMESTAMP(), nullable=True))
batch_op.add_column(sa.Column('gel_barcode', sa.String(length=16), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.drop_column('gel_barcode')
batch_op.drop_column('gel_date')
batch_op.drop_column('ngs_date')
batch_op.drop_column('artic_date')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
op.create_table('_alembic_tmp__basicsubmission',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('rsl_plate_num', sa.VARCHAR(length=32), nullable=False),
sa.Column('submitter_plate_num', sa.VARCHAR(length=127), nullable=True),
sa.Column('submitted_date', sa.TIMESTAMP(), nullable=True),
sa.Column('submitting_lab_id', sa.INTEGER(), nullable=True),
sa.Column('sample_count', sa.INTEGER(), nullable=True),
sa.Column('extraction_kit_id', sa.INTEGER(), nullable=True),
sa.Column('submission_type_name', sa.VARCHAR(), nullable=True),
sa.Column('technician', sa.VARCHAR(length=64), nullable=True),
sa.Column('reagents_id', sa.VARCHAR(), nullable=True),
sa.Column('extraction_info', sqlite.JSON(), nullable=True),
sa.Column('run_cost', sa.FLOAT(), nullable=True),
sa.Column('signed_by', sa.VARCHAR(length=32), nullable=True),
sa.Column('comment', sqlite.JSON(), nullable=True),
sa.Column('submission_category', sa.VARCHAR(length=64), nullable=True),
sa.Column('cost_centre', sa.VARCHAR(length=64), nullable=True),
sa.Column('contact_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['_contact.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['extraction_kit_id'], ['_kittype.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['reagents_id'], ['_reagent.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['submission_type_name'], ['_submissiontype.name'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['submitting_lab_id'], ['_organization.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('rsl_plate_num'),
sa.UniqueConstraint('submitter_plate_num')
)
# ### end Alembic commands ###

View File

@@ -1,34 +0,0 @@
"""Adding ranking to SubmissionSampleAssociation
Revision ID: 874af342c82c
Revises: a04c25fd9138
Create Date: 2024-05-03 15:08:20.194275
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '874af342c82c'
down_revision = 'a04c25fd9138'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.add_column(sa.Column('submission_rank', sa.INTEGER(), nullable=False, default=1))
# batch_op.create_unique_constraint(None, ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
batch_op.drop_column('submission_rank')
# ### end Alembic commands ###

View File

@@ -1,50 +0,0 @@
"""Update to submissionsampleassociation
Revision ID: 97392dda5436
Revises: e3f6770ef515
Create Date: 2024-01-25 09:10:04.384194
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '97392dda5436'
down_revision = 'e3f6770ef515'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.add_column(sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=True))
batch_op.create_unique_constraint("submissionsampleassociation_id", ['id'])
with op.batch_alter_table('_wastewaterassociation', schema=None) as batch_op:
batch_op.add_column(sa.Column('id', sa.INTEGER(), nullable=False))
# batch_op.drop_constraint("sample_id", type_='foreignkey')
# batch_op.drop_constraint("submission_id", type_='foreignkey')
batch_op.create_foreign_key("fk_subsampassoc_id", '_submissionsampleassociation', ['id'], ['id'])
# batch_op.drop_column('sample_id')
# batch_op.drop_column('submission_id')
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_wastewaterassociation', schema=None) as batch_op:
batch_op.add_column(sa.Column('submission_id', sa.INTEGER(), nullable=False))
batch_op.add_column(sa.Column('sample_id', sa.INTEGER(), nullable=False))
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key(None, '_submissionsampleassociation', ['submission_id'], ['submission_id'])
batch_op.create_foreign_key(None, '_submissionsampleassociation', ['sample_id'], ['sample_id'])
batch_op.drop_column('id')
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='unique')
batch_op.drop_column('id')
# ### end Alembic commands ###

View File

@@ -1,38 +0,0 @@
"""splitting off sample info in SubmissionType
Revision ID: a04c25fd9138
Revises: 6d2a357860ef
Create Date: 2024-05-01 09:11:44.957532
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a04c25fd9138'
down_revision = '6d2a357860ef'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
with op.batch_alter_table('_submissiontype', schema=None) as batch_op:
batch_op.add_column(sa.Column('sample_map', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissiontype', schema=None) as batch_op:
batch_op.drop_column('sample_map')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
# ### end Alembic commands ###

View File

@@ -1,40 +0,0 @@
"""Attaching contact to submission
Revision ID: b744e8a452fd
Revises: f829a8ab292f
Create Date: 2024-06-04 14:21:38.163431
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b744e8a452fd'
down_revision = 'f829a8ab292f'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_basicsubmission', schema=None) as batch_op:
batch_op.add_column(sa.Column('contact_id', sa.INTEGER(), nullable=True))
batch_op.create_foreign_key('fk_BS_contact_id', '_contact', ['contact_id'], ['id'], ondelete='SET NULL')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
with op.batch_alter_table('_basicsubmission', schema=None) as batch_op:
batch_op.drop_constraint('fk_BS_contact_id', type_='foreignkey')
batch_op.drop_column('contact_id')
# ### end Alembic commands ###

View File

@@ -1,42 +0,0 @@
"""adding gel image, info
Revision ID: c4201b0ea9fe
Revises: 70d5a751f579
Create Date: 2024-01-30 08:42:03.928933
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c4201b0ea9fe'
down_revision = '70d5a751f579'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.create_unique_constraint("unique_ssa_id", ['id'])
with op.batch_alter_table('_wastewaterassociation', schema=None) as batch_op:
batch_op.alter_column('id',
existing_type=sa.INTEGER(),
nullable=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_wastewaterassociation', schema=None) as batch_op:
batch_op.alter_column('id',
existing_type=sa.INTEGER(),
nullable=True)
with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
batch_op.drop_constraint("unique_ssa_id", type_='unique')
# ### end Alembic commands ###

View File

@@ -1,95 +0,0 @@
"""Adding tips
Revision ID: d2b094cfa308
Revises: 861b52a2004e
Create Date: 2024-06-11 13:16:57.319769
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd2b094cfa308'
down_revision = '861b52a2004e'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('_tiprole',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_process_tiprole',
sa.Column('process_id', sa.INTEGER(), nullable=True),
sa.Column('tiprole_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['process_id'], ['_process.id'], ),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], )
)
op.create_table('_submissiontypetiproleassociation',
sa.Column('tiprole_id', sa.INTEGER(), nullable=False),
sa.Column('submissiontype_id', sa.INTEGER(), nullable=False),
sa.Column('uses', sa.JSON(), nullable=True),
sa.Column('static', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['submissiontype_id'], ['_submissiontype.id'], ),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], ),
sa.PrimaryKeyConstraint('tiprole_id', 'submissiontype_id')
)
op.create_table('_tips',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('role_id', sa.INTEGER(), nullable=True),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('lot', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['_tiprole.id'], name='fk_tip_role_id', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('_equipment_tips',
sa.Column('equipment_id', sa.INTEGER(), nullable=True),
sa.Column('tips_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['equipment_id'], ['_equipment.id'], ),
sa.ForeignKeyConstraint(['tips_id'], ['_tips.id'], )
)
op.create_table('_submissiontipsassociation',
sa.Column('tip_id', sa.INTEGER(), nullable=False),
sa.Column('submission_id', sa.INTEGER(), nullable=False),
sa.Column('role_name', sa.String(), nullable=False),
# sa.ForeignKeyConstraint(['role_name'], ['_tiprole.name'], ),
sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
sa.ForeignKeyConstraint(['tip_id'], ['_tips.id'], ),
sa.PrimaryKeyConstraint('tip_id', 'submission_id', 'role_name')
)
op.create_table('_tiproles_tips',
sa.Column('tiprole_id', sa.INTEGER(), nullable=True),
sa.Column('tips_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['tiprole_id'], ['_tiprole.id'], ),
sa.ForeignKeyConstraint(['tips_id'], ['_tips.id'], )
)
# op.create_table('_submissions_tips',
# sa.Column('submission_id', sa.INTEGER(), nullable=True),
# sa.Column('tips_id', sa.INTEGER(), nullable=True),
# sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
# sa.ForeignKeyConstraint(['tips_id'], ['_tips.id'], )
# )
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint("unique_ssa_id", ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
op.drop_table('_submissions_tips')
op.drop_table('_tiproles_tips')
op.drop_table('_submissiontipassociation')
op.drop_table('_equipment_tips')
op.drop_table('_tips')
op.drop_table('_submissiontypetiproleassociation')
op.drop_table('_process_tiprole')
op.drop_table('_tiprole')
# ### end Alembic commands ###

View File

@@ -1,33 +0,0 @@
"""adding default info to submissiontype
Revision ID: e6647bd661d9
Revises: f18487b41f45
Create Date: 2024-04-22 12:02:21.512781
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e6647bd661d9'
down_revision = 'f18487b41f45'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissiontype', schema=None) as batch_op:
batch_op.add_column(sa.Column('defaults', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_submissiontype', schema=None) as batch_op:
batch_op.drop_column('defaults')
# ### end Alembic commands ###

View File

@@ -1,51 +0,0 @@
"""adding source plates to Artic submission...again
Revision ID: f18487b41f45
Revises: fabf697c721d
Create Date: 2024-04-17 10:42:30.508213
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f18487b41f45'
down_revision = 'fabf697c721d'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# op.drop_table('_alembic_tmp__submissionsampleassociation')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint("ssa_id", ['id'])
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.add_column(sa.Column('source_plates', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.drop_column('source_plates')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
# op.create_table('_alembic_tmp__submissionsampleassociation',
# sa.Column('sample_id', sa.INTEGER(), nullable=False),
# sa.Column('submission_id', sa.INTEGER(), nullable=False),
# sa.Column('row', sa.INTEGER(), nullable=False),
# sa.Column('column', sa.INTEGER(), nullable=False),
# sa.Column('base_sub_type', sa.VARCHAR(), nullable=True),
# sa.Column('id', sa.INTEGER(), server_default=sa.text('1'), nullable=False),
# sa.ForeignKeyConstraint(['sample_id'], ['_basicsample.id'], ),
# sa.ForeignKeyConstraint(['submission_id'], ['_basicsubmission.id'], ),
# sa.PrimaryKeyConstraint('submission_id', 'row', 'column'),
# sa.UniqueConstraint('id', name='ssa_unique')
# )
# ### end Alembic commands ###

View File

@@ -1,39 +0,0 @@
"""Adding configitems to db
Revision ID: f829a8ab292f
Revises: 874af342c82c
Create Date: 2024-05-15 14:03:11.767480
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f829a8ab292f'
down_revision = '874af342c82c'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('_configitem',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('key', sa.String(), nullable=True),
sa.Column('value', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint(None, ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint(None, type_='unique')
op.drop_table('_configitem')
# ### end Alembic commands ###

View File

@@ -1,48 +0,0 @@
"""adding source plates to Artic submission
Revision ID: fabf697c721d
Revises: 70426df72f80
Create Date: 2024-03-06 11:01:34.794411
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fabf697c721d'
down_revision = '70426df72f80'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.create_unique_constraint("ssa_unique", ['id'])
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.add_column(sa.Column('source_plates', sa.JSON(), nullable=True))
with op.batch_alter_table('_wastewaterassociation', schema=None) as batch_op:
batch_op.alter_column('id',
existing_type=sa.INTEGER(),
nullable=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('_wastewaterassociation', schema=None) as batch_op:
batch_op.alter_column('id',
existing_type=sa.INTEGER(),
nullable=True)
with op.batch_alter_table('_wastewaterartic', schema=None) as batch_op:
batch_op.drop_column('source_plates')
# with op.batch_alter_table('_submissionsampleassociation', schema=None) as batch_op:
# batch_op.drop_constraint("ssa_unique", type_='unique')
# ### end Alembic commands ###

104
alembic_default.ini Normal file
View File

@@ -0,0 +1,104 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = ./alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to ./alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:./alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:///L:\Robotics Laboratory Support\Submissions\submissions.db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -17,7 +17,7 @@ def set_sqlite_pragma(dbapi_connection, connection_record):
connection_record (_type_): _description_ connection_record (_type_): _description_
""" """
cursor = dbapi_connection.cursor() cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON") # cursor.execute("PRAGMA foreign_keys=ON")
cursor.close() cursor.close()

View File

@@ -3,12 +3,13 @@ Contains all models for sqlalchemy
""" """
from __future__ import annotations from __future__ import annotations
import sys, logging import sys, logging
from sqlalchemy import Column, INTEGER, String, JSON from sqlalchemy import Column, INTEGER, String, JSON, inspect
from sqlalchemy.orm import DeclarativeMeta, declarative_base, Query, Session from sqlalchemy.orm import DeclarativeMeta, declarative_base, Query, Session
from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.exc import ArgumentError from sqlalchemy.exc import ArgumentError
from typing import Any, List from typing import Any, List
from pathlib import Path from pathlib import Path
from tools import report_result
# Load testing environment # Load testing environment
if 'pytest' in sys.modules: if 'pytest' in sys.modules:
@@ -152,17 +153,22 @@ class BaseClass(Base):
case _: case _:
return query.limit(limit).all() return query.limit(limit).all()
@report_result
def save(self): def save(self):
""" """
Add the object to the database and commit Add the object to the database and commit
""" """
# logger.debug(f"Saving object: {pformat(self.__dict__)}") # logger.debug(f"Saving object: {pformat(self.__dict__)}")
report = Report()
try: try:
self.__database_session__.add(self) self.__database_session__.add(self)
self.__database_session__.commit() self.__database_session__.commit()
# self.__database_session__.merge(self)
except Exception as e: except Exception as e:
logger.critical(f"Problem saving object: {e}") logger.critical(f"Problem saving object: {e}")
self.__database_session__.rollback() self.__database_session__.rollback()
report.add_result(Result(msg=f"Problem saving object {e}", status="Critical"))
return report
class ConfigItem(BaseClass): class ConfigItem(BaseClass):
@@ -177,7 +183,7 @@ class ConfigItem(BaseClass):
return f"ConfigItem({self.key} : {self.value})" return f"ConfigItem({self.key} : {self.value})"
@classmethod @classmethod
def get_config_items(cls, *args) -> ConfigItem|List[ConfigItem]: def get_config_items(cls, *args) -> ConfigItem | List[ConfigItem]:
""" """
Get desired config items from database Get desired config items from database
@@ -196,4 +202,5 @@ from .controls import *
from .organizations import * from .organizations import *
from .kits import * from .kits import *
from .submissions import * from .submissions import *
BasicSubmission.reagents.creator = lambda reg: SubmissionReagentAssociation(reagent=reg) BasicSubmission.reagents.creator = lambda reg: SubmissionReagentAssociation(reagent=reg)

View File

@@ -1532,7 +1532,7 @@ class Process(BaseClass):
query = cls.__database_session__.query(cls) query = cls.__database_session__.query(cls)
match name: match name:
case str(): case str():
logger.debug(f"Lookup Process with name str {name}") # logger.debug(f"Lookup Process with name str {name}")
query = query.filter(cls.name == name) query = query.filter(cls.name == name)
limit = 1 limit = 1
case _: case _:

View File

@@ -23,7 +23,7 @@ import pandas as pd
from openpyxl import Workbook from openpyxl import Workbook
from openpyxl.worksheet.worksheet import Worksheet from openpyxl.worksheet.worksheet import Worksheet
from openpyxl.drawing.image import Image as OpenpyxlImage from openpyxl.drawing.image import Image as OpenpyxlImage
from tools import row_map, setup_lookup, jinja_template_loading, rreplace, row_keys, check_key_or_attr from tools import row_map, setup_lookup, jinja_template_loading, rreplace, row_keys, check_key_or_attr, Result, Report
from datetime import datetime, date from datetime import datetime, date
from typing import List, Any, Tuple, Literal from typing import List, Any, Tuple, Literal
from dateutil.parser import parse from dateutil.parser import parse
@@ -691,7 +691,8 @@ class BasicSubmission(BaseClass):
Args: Args:
input_dict (dict): Input sample dictionary input_dict (dict): Input sample dictionary
xl (pd.ExcelFile): original xl workbook, used for child classes mostly xl (Workbook): original xl workbook, used for child classes mostly
custom_fields: Dictionary of locations, ranges, etc to be used by this function
Returns: Returns:
dict: Updated sample dictionary dict: Updated sample dictionary
@@ -739,6 +740,7 @@ class BasicSubmission(BaseClass):
input_excel (Workbook): initial workbook. input_excel (Workbook): initial workbook.
info (dict | None, optional): dictionary of additional info. Defaults to None. info (dict | None, optional): dictionary of additional info. Defaults to None.
backup (bool, optional): Whether this is part of a backup operation. Defaults to False. backup (bool, optional): Whether this is part of a backup operation. Defaults to False.
custom_fields: Dictionary of locations, ranges, etc to be used by this function
Returns: Returns:
Workbook: Updated workbook Workbook: Updated workbook
@@ -1046,14 +1048,16 @@ class BasicSubmission(BaseClass):
""" """
code = 0 code = 0
msg = "" msg = ""
report = Report()
disallowed = ["id"] disallowed = ["id"]
if kwargs == {}: if kwargs == {}:
raise ValueError("Need to narrow down query or the first available instance will be returned.") raise ValueError("Need to narrow down query or the first available instance will be returned.")
for key in kwargs.keys(): sanitized_kwargs = {k: v for k, v in kwargs.items() if k not in disallowed}
if key in disallowed: # for key in kwargs.keys():
raise ValueError( # if key in disallowed:
f"{key} is not allowed as a query argument as it could lead to creation of duplicate objects. Use .query() instead.") # raise ValueError(
instance = cls.query(submission_type=submission_type, limit=1, **kwargs) # f"{key} is not allowed as a query argument as it could lead to creation of duplicate objects. Use .query() instead.")
instance = cls.query(submission_type=submission_type, limit=1, **sanitized_kwargs)
# logger.debug(f"Retrieved instance: {instance}") # logger.debug(f"Retrieved instance: {instance}")
if instance is None: if instance is None:
used_class = cls.find_polymorphic_subclass(attrs=kwargs, polymorphic_identity=submission_type) used_class = cls.find_polymorphic_subclass(attrs=kwargs, polymorphic_identity=submission_type)
@@ -1070,7 +1074,8 @@ class BasicSubmission(BaseClass):
else: else:
code = 1 code = 1
msg = "This submission already exists.\nWould you like to overwrite?" msg = "This submission already exists.\nWould you like to overwrite?"
return instance, code, msg report.add_result(Result(msg=msg, code=code))
return instance, report
# Custom context events for the ui # Custom context events for the ui
@@ -1135,7 +1140,7 @@ class BasicSubmission(BaseClass):
# logger.debug(widg) # logger.debug(widg)
widg.setParent(None) widg.setParent(None)
pyd = self.to_pydantic(backup=True) pyd = self.to_pydantic(backup=True)
form = pyd.to_form(parent=obj) form = pyd.to_form(parent=obj, disable=['rsl_plate_num'])
obj.app.table_widget.formwidget.layout().addWidget(form) obj.app.table_widget.formwidget.layout().addWidget(form)
def add_comment(self, obj): def add_comment(self, obj):
@@ -1352,13 +1357,29 @@ class Wastewater(BasicSubmission):
Args: Args:
input_dict (dict): Input sample dictionary input_dict (dict): Input sample dictionary
xl (Workbook): xl (Workbook): original xl workbook, used for child classes mostly.
custom_fields: Dictionary of locations, ranges, etc to be used by this function
Returns: Returns:
dict: Updated sample dictionary dict: Updated sample dictionary
""" """
input_dict = super().custom_info_parser(input_dict) input_dict = super().custom_info_parser(input_dict)
logger.debug(f"Input dict: {pformat(input_dict)}")
if xl is not None: if xl is not None:
try:
input_dict['csv'] = xl["Copy to import file"] input_dict['csv'] = xl["Copy to import file"]
except KeyError as e:
logger.error(e)
try:
match input_dict['rsl_plate_num']:
case dict():
input_dict['csv'] = xl[input_dict['rsl_plate_num']['value']]
case str():
input_dict['csv'] = xl[input_dict['rsl_plate_num']]
case _:
pass
except Exception as e:
logger.error(f"Error handling couldn't get csv due to: {e}")
return input_dict return input_dict
@classmethod @classmethod
@@ -1604,11 +1625,12 @@ class WastewaterArtic(BasicSubmission):
Args: Args:
input_dict (dict): Input sample dictionary input_dict (dict): Input sample dictionary
xl (pd.ExcelFile): original xl workbook, used for child classes mostly xl (pd.ExcelFile): original xl workbook, used for child classes mostly
custom_fields: Dictionary of locations, ranges, etc to be used by this function
Returns: Returns:
dict: Updated sample dictionary dict: Updated sample dictionary
""" """
# TODO: Clean up and move range start/stops to db somehow. from backend.validators import RSLNamer
input_dict = super().custom_info_parser(input_dict) input_dict = super().custom_info_parser(input_dict)
egel_section = custom_fields['egel_results'] egel_section = custom_fields['egel_results']
ws = xl[egel_section['sheet']] ws = xl[egel_section['sheet']]
@@ -1621,12 +1643,11 @@ class WastewaterArtic(BasicSubmission):
source_plates_section = custom_fields['source_plates'] source_plates_section = custom_fields['source_plates']
ws = xl[source_plates_section['sheet']] ws = xl[source_plates_section['sheet']]
data = [dict(plate=ws.cell(row=ii, column=source_plates_section['plate_column']).value, starting_sample=ws.cell(row=ii, column=source_plates_section['starting_sample_column']).value) for ii in data = [dict(plate=ws.cell(row=ii, column=source_plates_section['plate_column']).value, starting_sample=ws.cell(row=ii, column=source_plates_section['starting_sample_column']).value) for ii in
range(source_plates_section['start_row'], source_plates_section['end_row'])] range(source_plates_section['start_row'], source_plates_section['end_row']+1)]
for datum in data: for datum in data:
if datum['plate'] in ["None", None, ""]: if datum['plate'] in ["None", None, ""]:
continue continue
else: else:
from backend.validators import RSLNamer
datum['plate'] = RSLNamer(filename=datum['plate'], sub_type="Wastewater").parsed_name datum['plate'] = RSLNamer(filename=datum['plate'], sub_type="Wastewater").parsed_name
input_dict['source_plates'] = data input_dict['source_plates'] = data
return input_dict return input_dict
@@ -1820,6 +1841,7 @@ class WastewaterArtic(BasicSubmission):
input_excel (Workbook): initial workbook. input_excel (Workbook): initial workbook.
info (dict | None, optional): dictionary of additional info. Defaults to None. info (dict | None, optional): dictionary of additional info. Defaults to None.
backup (bool, optional): Whether this is part of a backup operation. Defaults to False. backup (bool, optional): Whether this is part of a backup operation. Defaults to False.
custom_fields: Dictionary of locations, ranges, etc to be used by this function
Returns: Returns:
Workbook: Updated workbook Workbook: Updated workbook
@@ -2798,7 +2820,7 @@ class WastewaterArticAssociation(SubmissionSampleAssociation):
DOC: https://docs.sqlalchemy.org/en/14/orm/extensions/associationproxy.html DOC: https://docs.sqlalchemy.org/en/14/orm/extensions/associationproxy.html
""" """
id = Column(INTEGER, ForeignKey("_submissionsampleassociation.id"), primary_key=True) id = Column(INTEGER, ForeignKey("_submissionsampleassociation.id"), primary_key=True)
source_plate = Column(String(16)) source_plate = Column(String(32))
source_plate_number = Column(INTEGER) source_plate_number = Column(INTEGER)
source_well = Column(String(8)) source_well = Column(String(8))
ct = Column(String(8)) #: AKA ct for N1 ct = Column(String(8)) #: AKA ct for N1

View File

@@ -161,7 +161,7 @@ class PydReagent(BaseModel):
# reagent.reagent_submission_associations.append(assoc) # reagent.reagent_submission_associations.append(assoc)
else: else:
assoc = None assoc = None
report.add_result(Result(owner = __name__, code=0, msg="New reagent created.", status="Information")) report.add_result(Result(owner=__name__, code=0, msg="New reagent created.", status="Information"))
else: else:
if submission is not None and reagent not in submission.reagents: if submission is not None and reagent not in submission.reagents:
assoc = SubmissionReagentAssociation(reagent=reagent, submission=submission) assoc = SubmissionReagentAssociation(reagent=reagent, submission=submission)
@@ -254,7 +254,8 @@ class PydSample(BaseModel, extra='allow'):
submission=submission, submission=submission,
sample=instance, sample=instance,
row=row, column=column, id=aid, row=row, column=column, id=aid,
submission_rank=submission_rank, **self.model_extra) submission_rank=submission_rank,
**self.model_extra)
# logger.debug(f"Using submission_sample_association: {association}") # logger.debug(f"Using submission_sample_association: {association}")
try: try:
# instance.sample_submission_associations.append(association) # instance.sample_submission_associations.append(association)
@@ -281,10 +282,10 @@ class PydSample(BaseModel, extra='allow'):
class PydTips(BaseModel): class PydTips(BaseModel):
name: str name: str
lot: str|None = Field(default=None) lot: str | None = Field(default=None)
role: str role: str
def to_sql(self, submission:BasicSubmission) -> SubmissionTipsAssociation: def to_sql(self, submission: BasicSubmission) -> SubmissionTipsAssociation:
""" """
Con Con
@@ -305,7 +306,7 @@ class PydEquipment(BaseModel, extra='ignore'):
nickname: str | None nickname: str | None
processes: List[str] | None processes: List[str] | None
role: str | None role: str | None
tips: List[PydTips]|None = Field(default=None) tips: List[PydTips] | None = Field(default=None)
@field_validator('processes', mode='before') @field_validator('processes', mode='before')
@classmethod @classmethod
@@ -338,25 +339,21 @@ class PydEquipment(BaseModel, extra='ignore'):
if equipment is None: if equipment is None:
return return
if submission is not None: if submission is not None:
# NOTE: Need to make sure the same association is not added to the submission
assoc = SubmissionEquipmentAssociation.query(equipment_id=equipment.id, submission_id=submission.id,
role=self.role, limit=1)
if assoc is None:
assoc = SubmissionEquipmentAssociation(submission=submission, equipment=equipment) assoc = SubmissionEquipmentAssociation(submission=submission, equipment=equipment)
process = Process.query(name=self.processes[0]) process = Process.query(name=self.processes[0])
if process is None: if process is None:
logger.error(f"Found unknown process: {process}.") logger.error(f"Found unknown process: {process}.")
# from frontend.widgets.pop_ups import QuestionAsker
# dlg = QuestionAsker(title="Add Process?",
# message=f"Unable to find {self.processes[0]} in the database.\nWould you like to add it?")
# if dlg.exec():
# kit = submission.extraction_kit
# submission_type = submission.submission_type
# process = Process(name=self.processes[0])
# process.kit_types.append(kit)
# process.submission_types.append(submission_type)
# process.equipment.append(equipment)
# process.save()
assoc.process = process assoc.process = process
assoc.role = self.role assoc.role = self.role
else: else:
assoc = None assoc = None
else:
assoc = None
return equipment, assoc return equipment, assoc
def improved_dict(self) -> dict: def improved_dict(self) -> dict:
@@ -637,7 +634,7 @@ class PydSubmission(BaseModel, extra='allow'):
self.submission_object = BasicSubmission.find_polymorphic_subclass( self.submission_object = BasicSubmission.find_polymorphic_subclass(
polymorphic_identity=self.submission_type['value']) polymorphic_identity=self.submission_type['value'])
def set_attribute(self, key:str, value): def set_attribute(self, key: str, value):
""" """
Better handling of attribute setting. Better handling of attribute setting.
@@ -710,7 +707,7 @@ class PydSubmission(BaseModel, extra='allow'):
missing_reagents = [reagent for reagent in self.reagents if reagent.missing] missing_reagents = [reagent for reagent in self.reagents if reagent.missing]
return missing_info, missing_reagents return missing_info, missing_reagents
def to_sql(self) -> Tuple[BasicSubmission, Result]: def to_sql(self) -> Tuple[BasicSubmission, Report]:
""" """
Converts this instance into a backend.db.models.submissions.BasicSubmission instance Converts this instance into a backend.db.models.submissions.BasicSubmission instance
@@ -718,13 +715,13 @@ class PydSubmission(BaseModel, extra='allow'):
Tuple[BasicSubmission, Result]: BasicSubmission instance, result object Tuple[BasicSubmission, Result]: BasicSubmission instance, result object
""" """
# self.__dict__.update(self.model_extra) # self.__dict__.update(self.model_extra)
report = Report()
dicto = self.improved_dict() dicto = self.improved_dict()
instance, code, msg = BasicSubmission.query_or_create(submission_type=self.submission_type['value'], instance, result = BasicSubmission.query_or_create(submission_type=self.submission_type['value'],
rsl_plate_num=self.rsl_plate_num['value']) rsl_plate_num=self.rsl_plate_num['value'])
result = Result(msg=msg, code=code) report.add_result(result)
self.handle_duplicate_samples() self.handle_duplicate_samples()
# logger.debug(f"Here's our list of duplicate removed samples: {self.samples}") # logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
# for key, value in self.__dict__.items():
for key, value in dicto.items(): for key, value in dicto.items():
if isinstance(value, dict): if isinstance(value, dict):
value = value['value'] value = value['value']
@@ -733,13 +730,13 @@ class PydSubmission(BaseModel, extra='allow'):
# logger.debug(f"Setting {key} to {value}") # logger.debug(f"Setting {key} to {value}")
match key: match key:
case "reagents": case "reagents":
if code == 1: if report.results[0].code == 1:
instance.submission_reagent_associations = [] instance.submission_reagent_associations = []
# logger.debug(f"Looking through {self.reagents}") # logger.debug(f"Looking through {self.reagents}")
for reagent in self.reagents: for reagent in self.reagents:
reagent, assoc, _ = reagent.toSQL(submission=instance) reagent, assoc, _ = reagent.toSQL(submission=instance)
# logger.debug(f"Association: {assoc}") # logger.debug(f"Association: {assoc}")
if assoc is not None:# and assoc not in instance.submission_reagent_associations: if assoc is not None: # and assoc not in instance.submission_reagent_associations:
instance.submission_reagent_associations.append(assoc) instance.submission_reagent_associations.append(assoc)
# instance.reagents.append(reagent) # instance.reagents.append(reagent)
case "samples": case "samples":
@@ -755,10 +752,7 @@ class PydSubmission(BaseModel, extra='allow'):
if equip is None: if equip is None:
continue continue
equip, association = equip.toSQL(submission=instance) equip, association = equip.toSQL(submission=instance)
if association is not None and association not in instance.submission_equipment_associations: if association is not None:
# association.save()
# logger.debug(
# f"Equipment association SQL object to be added to submission: {association.__dict__}")
instance.submission_equipment_associations.append(association) instance.submission_equipment_associations.append(association)
case "tips": case "tips":
for tips in self.tips: for tips in self.tips:
@@ -817,9 +811,9 @@ class PydSubmission(BaseModel, extra='allow'):
# except AttributeError as e: # except AttributeError as e:
# logger.debug(f"Something went wrong constructing instance {self.rsl_plate_num}: {e}") # logger.debug(f"Something went wrong constructing instance {self.rsl_plate_num}: {e}")
# logger.debug(f"Constructed submissions message: {msg}") # logger.debug(f"Constructed submissions message: {msg}")
return instance, result return instance, report
def to_form(self, parent: QWidget): def to_form(self, parent: QWidget, disable:list|None=None):
""" """
Converts this instance into a frontend.widgets.submission_widget.SubmissionFormWidget Converts this instance into a frontend.widgets.submission_widget.SubmissionFormWidget
@@ -830,7 +824,8 @@ class PydSubmission(BaseModel, extra='allow'):
SubmissionFormWidget: Submission form widget SubmissionFormWidget: Submission form widget
""" """
from frontend.widgets.submission_widget import SubmissionFormWidget from frontend.widgets.submission_widget import SubmissionFormWidget
return SubmissionFormWidget(parent=parent, submission=self) logger.debug(f"Disbable: {disable}")
return SubmissionFormWidget(parent=parent, submission=self, disable=disable)
def to_writer(self) -> "SheetWriter": def to_writer(self) -> "SheetWriter":
""" """
@@ -897,7 +892,7 @@ class PydSubmission(BaseModel, extra='allow'):
report.add_result(result) report.add_result(result)
return output_reagents, report return output_reagents, report
def export_csv(self, filename:Path|str): def export_csv(self, filename: Path | str):
try: try:
worksheet = self.csv worksheet = self.csv
except AttributeError: except AttributeError:
@@ -1024,4 +1019,3 @@ class PydEquipmentRole(BaseModel):
""" """
from frontend.widgets.equipment_usage import RoleComboBox from frontend.widgets.equipment_usage import RoleComboBox
return RoleComboBox(parent=parent, role=self, used=used) return RoleComboBox(parent=parent, role=self, used=used)

View File

@@ -1,6 +1,6 @@
''' """
Constructs main application. Constructs main application.
''' """
from PyQt6.QtWidgets import ( from PyQt6.QtWidgets import (
QTabWidget, QWidget, QVBoxLayout, QTabWidget, QWidget, QVBoxLayout,
QHBoxLayout, QScrollArea, QMainWindow, QHBoxLayout, QScrollArea, QMainWindow,
@@ -13,7 +13,7 @@ from markdown import markdown
from tools import check_if_app, Settings, Report, jinja_template_loading from tools import check_if_app, Settings, Report, jinja_template_loading
from datetime import date from datetime import date
from .pop_ups import AlertPop, HTMLPop from .pop_ups import HTMLPop
from .misc import LogParser from .misc import LogParser
import logging, webbrowser, sys, shutil import logging, webbrowser, sys, shutil
from .submission_table import SubmissionsSheet from .submission_table import SubmissionsSheet
@@ -36,7 +36,7 @@ class App(QMainWindow):
self.report = Report() self.report = Report()
# NOTE: indicate version and connected database in title bar # NOTE: indicate version and connected database in title bar
try: try:
self.title = f"Submissions App (v{ctx.package.__version__}) - {ctx.database_path}" self.title = f"Submissions App (v{ctx.package.__version__}) - {ctx.database_session.get_bind().url}"
except (AttributeError, KeyError): except (AttributeError, KeyError):
self.title = f"Submissions App" self.title = f"Submissions App"
# NOTE: set initial app position and size # NOTE: set initial app position and size
@@ -164,27 +164,6 @@ class App(QMainWindow):
instr = HTMLPop(html=html, title="Instructions") instr = HTMLPop(html=html, title="Instructions")
instr.exec() instr.exec()
def result_reporter(self):
"""
Report any anomolous results - if any - to the user
Args:
result (dict | None, optional): The result from a function. Defaults to None.
"""
# logger.debug(f"Running results reporter for: {self.report.results}")
if len(self.report.results) > 0:
# logger.debug(f"We've got some results!")
for result in self.report.results:
# logger.debug(f"Showing result: {result}")
if result is not None:
alert = result.report()
if alert.exec():
pass
self.report = Report()
else:
self.statusBar().showMessage("Action completed sucessfully.", 5000)
def runSearch(self): def runSearch(self):
dlg = LogParser(self) dlg = LogParser(self)
dlg.exec() dlg.exec()
@@ -201,12 +180,19 @@ class App(QMainWindow):
Copies the database into the backup directory the first time it is opened every month. Copies the database into the backup directory the first time it is opened every month.
""" """
month = date.today().strftime("%Y-%m") month = date.today().strftime("%Y-%m")
current_month_bak = Path(self.ctx.backup_path).joinpath(f"submissions_backup-{month}").resolve()
# logger.debug(f"Here is the db directory: {self.ctx.database_path}") # logger.debug(f"Here is the db directory: {self.ctx.database_path}")
# logger.debug(f"Here is the backup directory: {self.ctx.backup_path}") # logger.debug(f"Here is the backup directory: {self.ctx.backup_path}")
current_month_bak = Path(self.ctx.backup_path).joinpath(f"submissions_backup-{month}").resolve().with_suffix(".db") match self.ctx.database_schema:
case "sqlite":
current_month_bak = current_month_bak.with_suffix(".db")
if not current_month_bak.exists() and "demo" not in self.ctx.database_path.__str__(): if not current_month_bak.exists() and "demo" not in self.ctx.database_path.__str__():
logger.info("No backup found for this month, backing up database.") logger.info("No backup found for this month, backing up database.")
shutil.copyfile(self.ctx.database_path, current_month_bak) shutil.copyfile(self.ctx.database_path, current_month_bak)
case "postgresql+psycopg2":
logger.warning(f"Backup function not yet implemented for psql")
current_month_bak = current_month_bak.with_suffix(".psql")
class AddSubForm(QWidget): class AddSubForm(QWidget):

View File

@@ -15,7 +15,7 @@ logger = logging.getLogger(f"submissions.{__name__}")
class EquipmentUsage(QDialog): class EquipmentUsage(QDialog):
def __init__(self, parent, submission: BasicSubmission) -> QDialog: def __init__(self, parent, submission: BasicSubmission):
super().__init__(parent) super().__init__(parent)
self.submission = submission self.submission = submission
self.setWindowTitle(f"Equipment Checklist - {submission.rsl_plate_num}") self.setWindowTitle(f"Equipment Checklist - {submission.rsl_plate_num}")
@@ -139,7 +139,7 @@ class RoleComboBox(QWidget):
Changes what tips are available when process is changed Changes what tips are available when process is changed
""" """
process = self.process.currentText().strip() process = self.process.currentText().strip()
logger.debug(f"Checking process: {process} for equipment {self.role.name}") # logger.debug(f"Checking process: {process} for equipment {self.role.name}")
process = Process.query(name=process) process = Process.query(name=process)
if process.tip_roles: if process.tip_roles:
for iii, tip_role in enumerate(process.tip_roles): for iii, tip_role in enumerate(process.tip_roles):

View File

@@ -21,10 +21,10 @@ logger = logging.getLogger(f"submissions.{__name__}")
# Main window class # Main window class
class GelBox(QDialog): class GelBox(QDialog):
def __init__(self, parent, img_path:str|Path, submission:WastewaterArtic): def __init__(self, parent, img_path: str | Path, submission: WastewaterArtic):
super().__init__(parent) super().__init__(parent)
# NOTE: setting title # NOTE: setting title
self.setWindowTitle("PyQtGraph") self.setWindowTitle(f"Gel - {img_path}")
self.img_path = img_path self.img_path = img_path
self.submission = submission self.submission = submission
# NOTE: setting geometry # NOTE: setting geometry
@@ -49,35 +49,36 @@ class GelBox(QDialog):
# NOTE: Create image. # NOTE: Create image.
# NOTE: For some reason, ImageView wants to flip the image, so we have to rotate and flip the array first. # NOTE: For some reason, ImageView wants to flip the image, so we have to rotate and flip the array first.
# NOTE: Using the Image.rotate function results in cropped image, so using np. # NOTE: Using the Image.rotate function results in cropped image, so using np.
img = np.flip(np.rot90(np.array(Image.open(self.img_path)),1),0) img = np.flip(np.rot90(np.array(Image.open(self.img_path)), 1), 0)
self.imv.setImage(img) self.imv.setImage(img)
layout = QGridLayout() layout = QGridLayout()
layout.addWidget(QLabel("DNA Core Submission Number"),0,1) layout.addWidget(QLabel("DNA Core Submission Number"), 21, 1)
self.core_number = QLineEdit() self.core_number = QLineEdit()
self.core_number.setText(self.submission.dna_core_submission_number) self.core_number.setText(self.submission.dna_core_submission_number)
layout.addWidget(self.core_number, 0,2) layout.addWidget(self.core_number, 21, 2)
layout.addWidget(QLabel("Gel Barcode"),0,3) layout.addWidget(QLabel("Gel Barcode"), 21, 3)
self.gel_barcode = QLineEdit() self.gel_barcode = QLineEdit()
self.gel_barcode.setText(self.submission.gel_barcode) self.gel_barcode.setText(self.submission.gel_barcode)
layout.addWidget(self.gel_barcode, 0, 4) layout.addWidget(self.gel_barcode, 21, 4)
# NOTE: setting this layout to the widget # NOTE: setting this layout to the widget
# NOTE: plot window goes on right side, spanning 3 rows # NOTE: plot window goes on right side, spanning 3 rows
layout.addWidget(self.imv, 1, 1,20,20) layout.addWidget(self.imv, 0, 1, 20, 20)
# NOTE: setting this widget as central widget of the main window # NOTE: setting this widget as central widget of the main window
try: try:
control_info = sorted(self.submission.gel_controls, key=lambda d: d['location']) control_info = sorted(self.submission.gel_controls, key=lambda d: d['location'])
except KeyError: except KeyError:
control_info = None control_info = None
self.form = ControlsForm(parent=self, control_info=control_info) self.form = ControlsForm(parent=self, control_info=control_info)
layout.addWidget(self.form,22,1,1,4) layout.addWidget(self.form, 22, 1, 1, 4)
QBtn = QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel QBtn = QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel
self.buttonBox = QDialogButtonBox(QBtn) self.buttonBox = QDialogButtonBox(QBtn)
self.buttonBox.accepted.connect(self.accept) self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject) self.buttonBox.rejected.connect(self.reject)
layout.addWidget(self.buttonBox, 23, 1, 1, 1)#, alignment=Qt.AlignmentFlag.AlignTop) layout.addWidget(self.buttonBox, 23, 1, 1, 1) #, alignment=Qt.AlignmentFlag.AlignTop)
self.setLayout(layout) self.setLayout(layout)
def parse_form(self) -> Tuple[str, str|Path, list]:
def parse_form(self) -> Tuple[str, str | Path, list]:
""" """
Get relevant values from self/form Get relevant values from self/form
@@ -92,7 +93,7 @@ class GelBox(QDialog):
class ControlsForm(QWidget): class ControlsForm(QWidget):
def __init__(self, parent, control_info:List=None) -> None: def __init__(self, parent, control_info: List = None) -> None:
super().__init__(parent) super().__init__(parent)
self.layout = QGridLayout() self.layout = QGridLayout()
columns = [] columns = []
@@ -101,9 +102,10 @@ class ControlsForm(QWidget):
tt_text = "\n".join([f"{item['sample_id']} - CELL {item['location']}" for item in control_info]) tt_text = "\n".join([f"{item['sample_id']} - CELL {item['location']}" for item in control_info])
except TypeError: except TypeError:
tt_text = None tt_text = None
for iii, item in enumerate(["Negative Control Key", "Description", "Results - 65 C", "Results - 63 C", "Results - Spike"]): for iii, item in enumerate(
["Negative Control Key", "Description", "Results - 65 C", "Results - 63 C", "Results - Spike"]):
label = QLabel(item) label = QLabel(item)
self.layout.addWidget(label, 0, iii,1,1) self.layout.addWidget(label, 0, iii, 1, 1)
if iii > 1: if iii > 1:
columns.append(item) columns.append(item)
elif iii == 0: elif iii == 0:
@@ -114,7 +116,8 @@ class ControlsForm(QWidget):
label = QLabel(item) label = QLabel(item)
self.layout.addWidget(label, iii, 0, 1, 1) self.layout.addWidget(label, iii, 0, 1, 1)
rows.append(item) rows.append(item)
for iii, item in enumerate(["Processing Negative (PBS)", "Extraction Negative (Extraction buffers ONLY)", "Artic no-template control (mastermix ONLY)"], start=1): for iii, item in enumerate(["Processing Negative (PBS)", "Extraction Negative (Extraction buffers ONLY)",
"Artic no-template control (mastermix ONLY)"], start=1):
label = QLabel(item) label = QLabel(item)
self.layout.addWidget(label, iii, 1, 1, 1) self.layout.addWidget(label, iii, 1, 1, 1)
for iii in range(3): for iii in range(3):
@@ -125,11 +128,11 @@ class ControlsForm(QWidget):
widge.setCurrentIndex(0) widge.setCurrentIndex(0)
widge.setEditable(True) widge.setEditable(True)
widge.setObjectName(f"{rows[iii]} : {columns[jjj]}") widge.setObjectName(f"{rows[iii]} : {columns[jjj]}")
self.layout.addWidget(widge, iii+1, jjj+2, 1, 1) self.layout.addWidget(widge, iii + 1, jjj + 2, 1, 1)
self.layout.addWidget(QLabel("Comments:"), 0,5,1,1) self.layout.addWidget(QLabel("Comments:"), 0, 5, 1, 1)
self.comment_field = QTextEdit(self) self.comment_field = QTextEdit(self)
self.comment_field.setFixedHeight(50) self.comment_field.setFixedHeight(50)
self.layout.addWidget(self.comment_field, 1,5,4,1) self.layout.addWidget(self.comment_field, 1, 5, 4, 1)
self.setLayout(self.layout) self.setLayout(self.layout)
def parse_form(self) -> List[dict]: def parse_form(self) -> List[dict]:
@@ -143,7 +146,7 @@ class ControlsForm(QWidget):
for le in self.findChildren(QComboBox): for le in self.findChildren(QComboBox):
label = [item.strip() for item in le.objectName().split(" : ")] label = [item.strip() for item in le.objectName().split(" : ")]
try: try:
dicto = [item for item in output if item['name']==label[0]][0] dicto = [item for item in output if item['name'] == label[0]][0]
except IndexError: except IndexError:
dicto = dict(name=label[0], values=[]) dicto = dict(name=label[0], values=[])
dicto['values'].append(dict(name=label[1], value=le.currentText())) dicto['values'].append(dict(name=label[1], value=le.currentText()))

View File

@@ -8,7 +8,7 @@ from PyQt6.QtWidgets import (
QDialogButtonBox, QDateEdit, QPushButton, QFormLayout QDialogButtonBox, QDateEdit, QPushButton, QFormLayout
) )
from PyQt6.QtCore import Qt, QDate from PyQt6.QtCore import Qt, QDate
from tools import jinja_template_loading, Settings from tools import jinja_template_loading
from backend.db.models import * from backend.db.models import *
import logging import logging
from .pop_ups import AlertPop from .pop_ups import AlertPop
@@ -45,18 +45,19 @@ class AddReagentForm(QDialog):
self.exp_input.setObjectName('expiry') self.exp_input.setObjectName('expiry')
# NOTE: if expiry is not passed in from gui, use today # NOTE: if expiry is not passed in from gui, use today
if expiry is None: if expiry is None:
self.exp_input.setDate(QDate.currentDate()) # self.exp_input.setDate(QDate.currentDate())
self.exp_input.setDate(QDate(1970, 1, 1))
else: else:
try: try:
self.exp_input.setDate(expiry) self.exp_input.setDate(expiry)
except TypeError: except TypeError:
self.exp_input.setDate(QDate.currentDate()) self.exp_input.setDate(QDate(1970, 1, 1))
# NOTE: widget to get reagent type info # NOTE: widget to get reagent type info
self.type_input = QComboBox() self.type_input = QComboBox()
self.type_input.setObjectName('type') self.type_input.setObjectName('type')
self.type_input.addItems([item.name for item in ReagentRole.query()]) self.type_input.addItems([item.name for item in ReagentRole.query()])
# logger.debug(f"Trying to find index of {reagent_type}") # logger.debug(f"Trying to find index of {reagent_type}")
# NOTE: convert input to user friendly string? # NOTE: convert input to user-friendly string?
try: try:
reagent_role = reagent_role.replace("_", " ").title() reagent_role = reagent_role.replace("_", " ").title()
except AttributeError: except AttributeError:

View File

@@ -7,8 +7,8 @@ from PyQt6.QtWidgets import QTableView, QMenu
from PyQt6.QtCore import Qt, QAbstractTableModel, QSortFilterProxyModel from PyQt6.QtCore import Qt, QAbstractTableModel, QSortFilterProxyModel
from PyQt6.QtGui import QAction, QCursor from PyQt6.QtGui import QAction, QCursor
from backend.db.models import BasicSubmission from backend.db.models import BasicSubmission
from backend.excel import make_report_html, make_report_xlsx, ReportMaker from backend.excel import ReportMaker
from tools import Report, Result, row_map, get_first_blank_df_row, html_to_pdf from tools import Report, Result, report_result
from .functions import select_save_file, select_open_file from .functions import select_save_file, select_open_file
from .misc import ReportDatePicker from .misc import ReportDatePicker
import pandas as pd import pandas as pd
@@ -129,14 +129,15 @@ class SubmissionsSheet(QTableView):
func = self.con_actions[action_name] func = self.con_actions[action_name]
func(obj=self) func(obj=self)
@report_result
def link_extractions(self): def link_extractions(self):
""" """
Pull extraction logs into the db Pull extraction logs into the db
""" """
self.link_extractions_function()
self.app.report.add_result(self.report)
self.report = Report() self.report = Report()
self.app.result_reporter() self.link_extractions_function()
self.report.add_result(self.report)
return self.report
def link_extractions_function(self): def link_extractions_function(self):
""" """
@@ -179,6 +180,7 @@ class SubmissionsSheet(QTableView):
sub.save() sub.save()
self.report.add_result(Result(msg=f"We added {count} logs to the database.", status='Information')) self.report.add_result(Result(msg=f"We added {count} logs to the database.", status='Information'))
@report_result
def link_pcr(self): def link_pcr(self):
""" """
Pull pcr logs into the db Pull pcr logs into the db
@@ -186,7 +188,7 @@ class SubmissionsSheet(QTableView):
self.link_pcr_function() self.link_pcr_function()
self.app.report.add_result(self.report) self.app.report.add_result(self.report)
self.report = Report() self.report = Report()
self.app.result_reporter() return self.report
def link_pcr_function(self): def link_pcr_function(self):
""" """
@@ -226,14 +228,14 @@ class SubmissionsSheet(QTableView):
sub.save() sub.save()
self.report.add_result(Result(msg=f"We added {count} logs to the database.", status='Information')) self.report.add_result(Result(msg=f"We added {count} logs to the database.", status='Information'))
@report_result
def generate_report(self): def generate_report(self):
""" """
Make a report Make a report
""" """
self.generate_report_function()
self.app.report.add_result(self.report)
self.report = Report() self.report = Report()
self.app.result_reporter() self.generate_report_function()
return self.report
def generate_report_function(self): def generate_report_function(self):
""" """
@@ -250,43 +252,7 @@ class SubmissionsSheet(QTableView):
dlg = ReportDatePicker() dlg = ReportDatePicker()
if dlg.exec(): if dlg.exec():
info = dlg.parse_form() info = dlg.parse_form()
# logger.debug(f"Report info: {info}")
# NOTE: find submissions based on date range
subs = BasicSubmission.query(start_date=info['start_date'], end_date=info['end_date'])
# NOTE: convert each object to dict
records = [item.to_dict(report=True) for item in subs]
# logger.debug(f"Records: {pformat(records)}")
# NOTE: make dataframe from record dictionaries
detailed_df, summary_df = make_report_xlsx(records=records)
html = make_report_html(df=summary_df, start_date=info['start_date'], end_date=info['end_date'])
# NOTE: get save location of report
fname = select_save_file(obj=self, default_name=f"Submissions_Report_{info['start_date']}-{info['end_date']}.docx", extension="docx") fname = select_save_file(obj=self, default_name=f"Submissions_Report_{info['start_date']}-{info['end_date']}.docx", extension="docx")
# html_to_pdf(html=html, output_file=fname)
# writer = pd.ExcelWriter(fname.with_suffix(".xlsx"), engine='openpyxl')
# summary_df.to_excel(writer, sheet_name="Report")
# detailed_df.to_excel(writer, sheet_name="Details", index=False)
# worksheet: Worksheet = writer.sheets['Report']
# for idx, col in enumerate(summary_df, start=1): # loop through all columns
# series = summary_df[col]
# max_len = max((
# series.astype(str).map(len).max(), # len of largest item
# len(str(series.name)) # len of column name/header
# )) + 20 # adding a little extra space
# try:
# # NOTE: Convert idx to letter
# col_letter = chr(ord('@') + idx)
# worksheet.column_dimensions[col_letter].width = max_len
# except ValueError:
# pass
# blank_row = get_first_blank_df_row(summary_df) + 1
# # logger.debug(f"Blank row index = {blank_row}")
# for col in range(3,6):
# col_letter = row_map[col]
# worksheet.cell(row=blank_row, column=col, value=f"=SUM({col_letter}2:{col_letter}{str(blank_row-1)})")
# for cell in worksheet['D']:
# if cell.row > 1:
# cell.style = 'Currency'
# writer.close()
rp = ReportMaker(start_date=info['start_date'], end_date=info['end_date']) rp = ReportMaker(start_date=info['start_date'], end_date=info['end_date'])
rp.write_report(filename=fname, obj=self) rp.write_report(filename=fname, obj=self)
self.report.add_result(report) self.report.add_result(report)

View File

@@ -11,7 +11,7 @@ from pathlib import Path
from . import select_open_file, select_save_file from . import select_open_file, select_save_file
import logging, difflib, inspect import logging, difflib, inspect
from pathlib import Path from pathlib import Path
from tools import Report, Result, check_not_nan, workbook_2_csv, main_form_style from tools import Report, Result, check_not_nan, workbook_2_csv, main_form_style, report_result
from backend.excel.parser import SheetParser from backend.excel.parser import SheetParser
from backend.validators import PydSubmission, PydReagent from backend.validators import PydSubmission, PydReagent
from backend.db import ( from backend.db import (
@@ -59,17 +59,16 @@ class SubmissionFormContainer(QWidget):
self.app.last_dir = fname.parent self.app.last_dir = fname.parent
self.import_drag.emit(fname) self.import_drag.emit(fname)
@report_result
def importSubmission(self, fname: Path | None = None): def importSubmission(self, fname: Path | None = None):
""" """
import submission from excel sheet into form import submission from excel sheet into form
""" """
self.app.raise_() self.app.raise_()
self.app.activateWindow() self.app.activateWindow()
self.import_submission_function(fname)
# logger.debug(f"Result from result reporter: {self.report.results}")
self.app.report.add_result(self.report)
self.report = Report() self.report = Report()
self.app.result_reporter() self.import_submission_function(fname)
return self.report
def import_submission_function(self, fname: Path | None = None): def import_submission_function(self, fname: Path | None = None):
""" """
@@ -115,8 +114,9 @@ class SubmissionFormContainer(QWidget):
# logger.debug(f"Outgoing report: {self.report.results}") # logger.debug(f"Outgoing report: {self.report.results}")
# logger.debug(f"All attributes of submission container:\n{pformat(self.__dict__)}") # logger.debug(f"All attributes of submission container:\n{pformat(self.__dict__)}")
@report_result
def add_reagent(self, reagent_lot: str | None = None, reagent_role: str | None = None, expiry: date | None = None, def add_reagent(self, reagent_lot: str | None = None, reagent_role: str | None = None, expiry: date | None = None,
name: str | None = None): name: str | None = None) -> Tuple[PydReagent, Report]:
""" """
Action to create new reagent in DB. Action to create new reagent in DB.
@@ -144,16 +144,18 @@ class SubmissionFormContainer(QWidget):
sqlobj, assoc, result = reagent.toSQL() sqlobj, assoc, result = reagent.toSQL()
sqlobj.save() sqlobj.save()
report.add_result(result) report.add_result(result)
self.app.report.add_result(report) # logger.debug(f"Reagent: {reagent}, Report: {report}")
self.app.result_reporter() return reagent, report
return reagent
class SubmissionFormWidget(QWidget): class SubmissionFormWidget(QWidget):
def __init__(self, parent: QWidget, submission: PydSubmission) -> None: def __init__(self, parent: QWidget, submission: PydSubmission, disable: list | None = None) -> None:
super().__init__(parent) super().__init__(parent)
# self.report = Report() # self.report = Report()
# logger.debug(f"Disable: {disable}")
if disable is None:
disable = []
self.app = parent.app self.app = parent.app
self.pyd = submission self.pyd = submission
self.missing_info = [] self.missing_info = []
@@ -166,12 +168,19 @@ class SubmissionFormWidget(QWidget):
for k in list(self.pyd.model_fields.keys()) + list(self.pyd.model_extra.keys()): for k in list(self.pyd.model_fields.keys()) + list(self.pyd.model_extra.keys()):
if k in self.ignore: if k in self.ignore:
continue continue
try:
# logger.debug(f"Key: {k}, Disable: {disable}")
check = k in disable
# logger.debug(f"Check: {check}")
except TypeError:
check = False
try: try:
value = self.pyd.__getattribute__(k) value = self.pyd.__getattribute__(k)
except AttributeError: except AttributeError:
logger.error(f"Couldn't get attribute from pyd: {k}") logger.error(f"Couldn't get attribute from pyd: {k}")
value = dict(value=None, missing=True) value = dict(value=None, missing=True)
add_widget = self.create_widget(key=k, value=value, submission_type=self.pyd.submission_type['value'], sub_obj=st) add_widget = self.create_widget(key=k, value=value, submission_type=self.pyd.submission_type['value'],
sub_obj=st, disable=check)
if add_widget is not None: if add_widget is not None:
self.layout.addWidget(add_widget) self.layout.addWidget(add_widget)
if k == "extraction_kit": if k == "extraction_kit":
@@ -180,11 +189,13 @@ class SubmissionFormWidget(QWidget):
self.scrape_reagents(self.pyd.extraction_kit) self.scrape_reagents(self.pyd.extraction_kit)
def create_widget(self, key: str, value: dict | PydReagent, submission_type: str | None = None, def create_widget(self, key: str, value: dict | PydReagent, submission_type: str | None = None,
extraction_kit: str | None = None, sub_obj:BasicSubmission|None=None) -> "self.InfoItem": extraction_kit: str | None = None, sub_obj: BasicSubmission | None = None,
disable: bool = False) -> "self.InfoItem":
""" """
Make an InfoItem widget to hold a field Make an InfoItem widget to hold a field
Args: Args:
disable ():
key (str): Name of the field key (str): Name of the field
value (dict): Value of field value (dict): Value of field
submission_type (str | None, optional): Submissiontype as str. Defaults to None. submission_type (str | None, optional): Submissiontype as str. Defaults to None.
@@ -192,18 +203,25 @@ class SubmissionFormWidget(QWidget):
Returns: Returns:
self.InfoItem: Form widget to hold name:value self.InfoItem: Form widget to hold name:value
""" """
# logger.debug(f"Key: {key}, Disable: {disable}")
if key not in self.ignore: if key not in self.ignore:
match value: match value:
case PydReagent(): case PydReagent():
if value.name.lower() != "not applicable": if value.name.lower() != "not applicable":
widget = self.ReagentFormWidget(self, reagent=value, extraction_kit=extraction_kit) widget = self.ReagentFormWidget(self, reagent=value, extraction_kit=extraction_kit)
else: else:
widget = None widget = None
case _: case _:
widget = self.InfoItem(self, key=key, value=value, submission_type=submission_type, sub_obj=sub_obj) widget = self.InfoItem(self, key=key, value=value, submission_type=submission_type, sub_obj=sub_obj)
# logger.debug(f"Setting widget enabled to: {not disable}")
if disable:
widget.input.setEnabled(False)
widget.input.setToolTip("Widget disabled to protect database integrity.")
return widget return widget
return None return None
@report_result
def scrape_reagents(self, *args, **kwargs): #extraction_kit:str, caller:str|None=None): def scrape_reagents(self, *args, **kwargs): #extraction_kit:str, caller:str|None=None):
""" """
Extracted scrape reagents function that will run when Extracted scrape reagents function that will run when
@@ -250,8 +268,7 @@ class SubmissionFormWidget(QWidget):
self.layout.addWidget(submit_btn) self.layout.addWidget(submit_btn)
submit_btn.clicked.connect(self.submit_new_sample_function) submit_btn.clicked.connect(self.submit_new_sample_function)
self.setLayout(self.layout) self.setLayout(self.layout)
self.app.report.add_result(report) return report
self.app.result_reporter()
def clear_form(self): def clear_form(self):
""" """
@@ -275,7 +292,8 @@ class SubmissionFormWidget(QWidget):
query = [widget for widget in query if widget.objectName() == object_name] query = [widget for widget in query if widget.objectName() == object_name]
return query return query
def submit_new_sample_function(self) -> QWidget: @report_result
def submit_new_sample_function(self, *args) -> Report:
""" """
Parse forms and add sample to the database. Parse forms and add sample to the database.
@@ -294,37 +312,40 @@ class SubmissionFormWidget(QWidget):
_, result = self.pyd.check_kit_integrity() _, result = self.pyd.check_kit_integrity()
report.add_result(result) report.add_result(result)
if len(result.results) > 0: if len(result.results) > 0:
self.app.report.add_result(report) # self.app.report.add_result(report)
self.app.result_reporter() # self.app.report_result()
return return
# logger.debug(f"PYD before transformation into SQL:\n\n{self.pyd}\n\n") # logger.debug(f"PYD before transformation into SQL:\n\n{self.pyd}\n\n")
base_submission, result = self.pyd.to_sql() base_submission, result = self.pyd.to_sql()
# logger.debug(f"SQL object: {pformat(base_submission.__dict__)}") # logger.debug(f"SQL object: {pformat(base_submission.__dict__)}")
# logger.debug(f"Base submission: {base_submission.to_dict()}") # logger.debug(f"Base submission: {base_submission.to_dict()}")
# NOTE: check output message for issues # NOTE: check output message for issues
match result.code: try:
code = report.results[-1].code
except IndexError:
code = 0
match code:
# NOTE: code 0: everything is fine. # NOTE: code 0: everything is fine.
case 0: case 0:
report.add_result(None) pass
# NOTE: code 1: ask for overwrite # NOTE: code 1: ask for overwrite
case 1: case 1:
dlg = QuestionAsker(title=f"Review {base_submission.rsl_plate_num}?", message=result.msg) dlg = QuestionAsker(title=f"Review {base_submission.rsl_plate_num}?", message=result.msg)
if dlg.exec(): if dlg.exec():
# NOTE: Do not add duplicate reagents. # NOTE: Do not add duplicate reagents.
result = None pass
else: else:
self.app.ctx.database_session.rollback() self.app.ctx.database_session.rollback()
report.add_result(Result(msg="Overwrite cancelled", status="Information")) report.add_result(Result(msg="Overwrite cancelled", status="Information"))
self.app.report.add_result(report) # self.app.report.add_result(report)
self.app.result_reporter() # self.app.report_result()
return return report
# NOTE: code 2: No RSL plate number given # NOTE: code 2: No RSL plate number given
case 2: case 2:
report.add_result(result) report.add_result(result)
self.app.report.add_result(report) # self.app.report.add_result(report)
self.app.result_reporter() # self.app.report_result()
return return report
case _: case _:
pass pass
# NOTE: add reagents to submission object # NOTE: add reagents to submission object
@@ -338,8 +359,7 @@ class SubmissionFormWidget(QWidget):
# NOTE: reset form # NOTE: reset form
self.setParent(None) self.setParent(None)
# logger.debug(f"All attributes of obj: {pformat(self.__dict__)}") # logger.debug(f"All attributes of obj: {pformat(self.__dict__)}")
self.app.report.add_result(report) return report
self.app.result_reporter()
def export_csv_function(self, fname: Path | None = None): def export_csv_function(self, fname: Path | None = None):
""" """
@@ -352,7 +372,6 @@ class SubmissionFormWidget(QWidget):
fname = select_save_file(obj=self, default_name=self.pyd.construct_filename(), extension="csv") fname = select_save_file(obj=self, default_name=self.pyd.construct_filename(), extension="csv")
try: try:
self.pyd.export_csv(fname) self.pyd.export_csv(fname)
# workbook_2_csv(worksheet=self.pyd.csv, filename=fname)
except PermissionError: except PermissionError:
logger.warning(f"Could not get permissions to {fname}. Possibly the request was cancelled.") logger.warning(f"Could not get permissions to {fname}. Possibly the request was cancelled.")
except AttributeError: except AttributeError:
@@ -398,11 +417,13 @@ class SubmissionFormWidget(QWidget):
class InfoItem(QWidget): class InfoItem(QWidget):
def __init__(self, parent: QWidget, key: str, value: dict, submission_type: str | None = None, sub_obj:BasicSubmission|None=None) -> None: def __init__(self, parent: QWidget, key: str, value: dict, submission_type: str | None = None,
sub_obj: BasicSubmission | None = None) -> None:
super().__init__(parent) super().__init__(parent)
layout = QVBoxLayout() layout = QVBoxLayout()
self.label = self.ParsedQLabel(key=key, value=value) self.label = self.ParsedQLabel(key=key, value=value)
self.input: QWidget = self.set_widget(parent=self, key=key, value=value, submission_type=submission_type, sub_obj=sub_obj) self.input: QWidget = self.set_widget(parent=self, key=key, value=value, submission_type=submission_type,
sub_obj=sub_obj)
self.setObjectName(key) self.setObjectName(key)
try: try:
self.missing: bool = value['missing'] self.missing: bool = value['missing']
@@ -439,7 +460,8 @@ class SubmissionFormWidget(QWidget):
return None, None return None, None
return self.input.objectName(), dict(value=value, missing=self.missing) return self.input.objectName(), dict(value=value, missing=self.missing)
def set_widget(self, parent: QWidget, key: str, value: dict, submission_type: str | None = None, sub_obj:BasicSubmission|None=None) -> QWidget: def set_widget(self, parent: QWidget, key: str, value: dict, submission_type: str | None = None,
sub_obj: BasicSubmission | None = None) -> QWidget:
""" """
Creates form widget Creates form widget
@@ -472,6 +494,7 @@ class SubmissionFormWidget(QWidget):
pass pass
# set combobox values to lookedup values # set combobox values to lookedup values
add_widget.addItems(labs) add_widget.addItems(labs)
add_widget.setToolTip("Select submitting lab.")
case 'extraction_kit': case 'extraction_kit':
# if extraction kit not available, all other values fail # if extraction kit not available, all other values fail
if not check_not_nan(value): if not check_not_nan(value):
@@ -493,15 +516,7 @@ class SubmissionFormWidget(QWidget):
logger.error(f"Couldn't find {obj.prsr.sub['extraction_kit']}") logger.error(f"Couldn't find {obj.prsr.sub['extraction_kit']}")
obj.ext_kit = uses[0] obj.ext_kit = uses[0]
add_widget.addItems(uses) add_widget.addItems(uses)
# case 'submitted_date': add_widget.setToolTip("Select extraction kit.")
# # NOTE: uses base calendar
# add_widget = QDateEdit(calendarPopup=True)
# # NOTE: sets submitted date based on date found in excel sheet
# try:
# add_widget.setDate(value)
# # NOTE: if not found, use today
# except:
# add_widget.setDate(date.today())
case 'submission_category': case 'submission_category':
add_widget = QComboBox() add_widget = QComboBox()
cats = ['Diagnostic', "Surveillance", "Research"] cats = ['Diagnostic', "Surveillance", "Research"]
@@ -511,6 +526,7 @@ class SubmissionFormWidget(QWidget):
except ValueError: except ValueError:
cats.insert(0, cats.pop(cats.index(submission_type))) cats.insert(0, cats.pop(cats.index(submission_type)))
add_widget.addItems(cats) add_widget.addItems(cats)
add_widget.setToolTip("Enter submission category or select from list.")
case _: case _:
if key in sub_obj.timestamps(): if key in sub_obj.timestamps():
add_widget = QDateEdit(calendarPopup=True) add_widget = QDateEdit(calendarPopup=True)
@@ -520,11 +536,13 @@ class SubmissionFormWidget(QWidget):
# NOTE: if not found, use today # NOTE: if not found, use today
except: except:
add_widget.setDate(date.today()) add_widget.setDate(date.today())
add_widget.setToolTip(f"Select date for {key}")
else: else:
# NOTE: anything else gets added in as a line edit # NOTE: anything else gets added in as a line edit
add_widget = QLineEdit() add_widget = QLineEdit()
# logger.debug(f"Setting widget text to {str(value).replace('_', ' ')}") # logger.debug(f"Setting widget text to {str(value).replace('_', ' ')}")
add_widget.setText(str(value).replace("_", " ")) add_widget.setText(str(value).replace("_", " "))
add_widget.setToolTip(f"Enter value for {key}")
if add_widget is not None: if add_widget is not None:
add_widget.setObjectName(key) add_widget.setObjectName(key)
add_widget.setParent(parent) add_widget.setParent(parent)
@@ -594,13 +612,14 @@ class SubmissionFormWidget(QWidget):
# NOTE: If changed set self.missing to True and update self.label # NOTE: If changed set self.missing to True and update self.label
self.lot.currentTextChanged.connect(self.updated) self.lot.currentTextChanged.connect(self.updated)
def parse_form(self) -> Tuple[PydReagent, dict]: def parse_form(self) -> Tuple[PydReagent | None, Report]:
""" """
Pulls form info into PydReagent Pulls form info into PydReagent
Returns: Returns:
Tuple[PydReagent, dict]: PydReagent and Report(?) Tuple[PydReagent, dict]: PydReagent and Report(?)
""" """
report = Report()
lot = self.lot.currentText() lot = self.lot.currentText()
# logger.debug(f"Using this lot for the reagent {self.reagent}: {lot}") # logger.debug(f"Using this lot for the reagent {self.reagent}: {lot}")
wanted_reagent = Reagent.query(lot_number=lot, reagent_role=self.reagent.role) wanted_reagent = Reagent.query(lot_number=lot, reagent_role=self.reagent.role)
@@ -609,14 +628,16 @@ class SubmissionFormWidget(QWidget):
dlg = QuestionAsker(title=f"Add {lot}?", dlg = QuestionAsker(title=f"Add {lot}?",
message=f"Couldn't find reagent type {self.reagent.role}: {lot} in the database.\n\nWould you like to add it?") message=f"Couldn't find reagent type {self.reagent.role}: {lot} in the database.\n\nWould you like to add it?")
if dlg.exec(): if dlg.exec():
wanted_reagent = self.parent().parent().add_reagent(reagent_lot=lot, reagent_role=self.reagent.role, wanted_reagent, _ = self.parent().parent().add_reagent(reagent_lot=lot,
reagent_role=self.reagent.role,
expiry=self.reagent.expiry, expiry=self.reagent.expiry,
name=self.reagent.name) name=self.reagent.name)
return wanted_reagent, None return wanted_reagent, report
else: else:
# NOTE: In this case we will have an empty reagent and the submission will fail kit integrity check # NOTE: In this case we will have an empty reagent and the submission will fail kit integrity check
# logger.debug("Will not add reagent.") # logger.debug("Will not add reagent.")
return None, Result(msg="Failed integrity check", status="Critical") report.add_result(Result(msg="Failed integrity check", status="Critical"))
return None, report
else: else:
# NOTE: Since this now gets passed in directly from the parser -> pyd -> form and the parser gets the name # NOTE: Since this now gets passed in directly from the parser -> pyd -> form and the parser gets the name
# from the db, it should no longer be necessary to query the db with reagent/kit, but with rt name directly. # from the db, it should no longer be necessary to query the db with reagent/kit, but with rt name directly.
@@ -624,7 +645,7 @@ class SubmissionFormWidget(QWidget):
if rt is None: if rt is None:
rt = ReagentRole.query(kit_type=self.extraction_kit, reagent=wanted_reagent) rt = ReagentRole.query(kit_type=self.extraction_kit, reagent=wanted_reagent)
return PydReagent(name=wanted_reagent.name, lot=wanted_reagent.lot, role=rt.name, return PydReagent(name=wanted_reagent.name, lot=wanted_reagent.lot, role=rt.name,
expiry=wanted_reagent.expiry, missing=False), None expiry=wanted_reagent.expiry, missing=False), report
def updated(self): def updated(self):
""" """
@@ -708,4 +729,5 @@ class SubmissionFormWidget(QWidget):
# logger.debug(f"New relevant reagents: {relevant_reagents}") # logger.debug(f"New relevant reagents: {relevant_reagents}")
self.setObjectName(f"lot_{reagent.role}") self.setObjectName(f"lot_{reagent.role}")
self.addItems(relevant_reagents) self.addItems(relevant_reagents)
self.setToolTip(f"Enter lot number for the reagent used for {reagent.role}")
# self.setStyleSheet(main_form_style) # self.setStyleSheet(main_form_style)

View File

@@ -4,6 +4,8 @@ Contains miscellaenous functions used by both frontend and backend.
from __future__ import annotations from __future__ import annotations
import json import json
from json import JSONDecodeError
import jinja2
import numpy as np import numpy as np
import logging, re, yaml, sys, os, stat, platform, getpass, inspect, csv import logging, re, yaml, sys, os, stat, platform, getpass, inspect, csv
import pandas as pd import pandas as pd
@@ -18,7 +20,6 @@ from typing import Any, Tuple, Literal, List
from PyQt6.QtGui import QPageSize from PyQt6.QtGui import QPageSize
from PyQt6.QtWebEngineWidgets import QWebEngineView from PyQt6.QtWebEngineWidgets import QWebEngineView
from openpyxl.worksheet.worksheet import Worksheet from openpyxl.worksheet.worksheet import Worksheet
# from PyQt6 import QtPrintSupport, QtCore, QtWebEngineWidgets
from PyQt6.QtPrintSupport import QPrinter from PyQt6.QtPrintSupport import QPrinter
logger = logging.getLogger(f"submissions.{__name__}") logger = logging.getLogger(f"submissions.{__name__}")
@@ -222,7 +223,11 @@ class Settings(BaseSettings, extra="allow"):
FileNotFoundError: Error if database not found. FileNotFoundError: Error if database not found.
""" """
database_schema: str
directory_path: Path directory_path: Path
database_user: str | None = None
database_password: str | None = None
database_name: str
database_path: Path | str | None = None database_path: Path | str | None = None
backup_path: Path | str | None = None backup_path: Path | str | None = None
# super_users: list|None = None # super_users: list|None = None
@@ -260,17 +265,26 @@ class Settings(BaseSettings, extra="allow"):
@field_validator('database_path', mode="before") @field_validator('database_path', mode="before")
@classmethod @classmethod
def ensure_database_exists(cls, value, values): def ensure_database_exists(cls, value, values):
if value == ":memory:": # if value == ":memory:":
# return value
match values.data['database_schema']:
case "sqlite":
value = f"/{Path(value).absolute().__str__()}/{values.data['database_name']}.db"
# db_name = f"{values.data['database_name']}.db"
case _:
value = f"@{value}/{values.data['database_name']}"
# db_name = values.data['database_name']
# match value:
# case str():
# value = Path(value)
# case None:
# value = values.data['directory_path'].joinpath("submissions.db")
# if value.exists():
# return value
# else:
# raise FileNotFoundError(f"Couldn't find database at {value}")
return value return value
match value:
case str():
value = Path(value)
case None:
value = values.data['directory_path'].joinpath("submissions.db")
if value.exists():
return value
else:
raise FileNotFoundError(f"Couldn't find database at {value}")
@field_validator('database_session', mode="before") @field_validator('database_session', mode="before")
@classmethod @classmethod
@@ -278,27 +292,33 @@ class Settings(BaseSettings, extra="allow"):
if value is not None: if value is not None:
return value return value
else: else:
database_path = values.data['database_path'] template = jinja_template_loading().from_string(
if database_path is None: "{{ values['database_schema'] }}://{% if values['database_user'] %}{{ values['database_user'] }}{% if values['database_password'] %}:{{ values['database_password'] }}{% endif %}{% endif %}{{ values['database_path'] }}")
# NOTE: check in user's .submissions directory for submissions.db database_path = template.render(values=values.data)
if Path.home().joinpath(".submissions", "submissions.db").exists(): # print(f"Using {database_path} for database path")
database_path = Path.home().joinpath(".submissions", "submissions.db") # database_path = values.data['database_path']
# NOTE: finally, look in the local dir # if database_path is None:
else: # # NOTE: check in user's .submissions directory for submissions.db
database_path = package_dir.joinpath("submissions.db") # if Path.home().joinpath(".submissions", "submissions.db").exists():
else: # database_path = Path.home().joinpath(".submissions", "submissions.db")
if database_path == ":memory:": # # NOTE: finally, look in the local dir
pass # else:
# NOTE: check if user defined path is directory # database_path = package_dir.joinpath("submissions.db")
elif database_path.is_dir(): # else:
database_path = database_path.joinpath("submissions.db") # if database_path == ":memory:":
# NOTE: check if user defined path is a file # pass
elif database_path.is_file(): # # NOTE: check if user defined path is directory
database_path = database_path # elif database_path.is_dir():
else: # database_path = database_path.joinpath("submissions.db")
raise FileNotFoundError("No database file found. Exiting program.") # # NOTE: check if user defined path is a file
# elif database_path.is_file():
# database_path = database_path
# else:
# raise FileNotFoundError("No database file found. Exiting program.")
logger.info(f"Using {database_path} for database file.") logger.info(f"Using {database_path} for database file.")
engine = create_engine(f"sqlite:///{database_path}") #, echo=True, future=True) # engine = create_engine(f"sqlite:///{database_path}") #, echo=True, future=True)
# engine = create_engine("postgresql+psycopg2://postgres:RE,4321q@localhost:5432/submissions")
engine = create_engine(database_path)
session = Session(engine) session = Session(engine)
return session return session
@@ -316,13 +336,21 @@ class Settings(BaseSettings, extra="allow"):
def set_from_db(self, db_path: Path): def set_from_db(self, db_path: Path):
if 'pytest' in sys.modules: if 'pytest' in sys.modules:
config_items = dict(power_users=['lwark', 'styson', 'ruwang']) output = dict(power_users=['lwark', 'styson', 'ruwang'])
else: else:
session = Session(create_engine(f"sqlite:///{db_path}")) # session = Session(create_engine(f"sqlite:///{db_path}"))
session = self.database_session
config_items = session.execute(text("SELECT * FROM _configitem")).all() config_items = session.execute(text("SELECT * FROM _configitem")).all()
session.close() session.close()
config_items = {item[1]: json.loads(item[2]) for item in config_items} # print(config_items)
for k, v in config_items.items(): output = {}
for item in config_items:
try:
output[item[1]] = json.loads(item[2])
except (JSONDecodeError, TypeError):
output[item[1]] = item[2]
# config_items = {item[1]: json.loads(item[2]) for item in config_items}
for k, v in output.items():
if not hasattr(self, k): if not hasattr(self, k):
self.__setattr__(k, v) self.__setattr__(k, v)
@@ -355,7 +383,6 @@ def get_config(settings_path: Path | str | None = None) -> Settings:
CONFIGDIR.mkdir(parents=True) CONFIGDIR.mkdir(parents=True)
except FileExistsError: except FileExistsError:
logger.warning(f"Config directory {CONFIGDIR} already exists.") logger.warning(f"Config directory {CONFIGDIR} already exists.")
try: try:
LOGDIR.mkdir(parents=True) LOGDIR.mkdir(parents=True)
except FileExistsError: except FileExistsError:
@@ -373,7 +400,7 @@ def get_config(settings_path: Path | str | None = None) -> Settings:
if check_if_app(): if check_if_app():
settings_path = Path(sys._MEIPASS).joinpath("files", "config.yml") settings_path = Path(sys._MEIPASS).joinpath("files", "config.yml")
else: else:
settings_path = package_dir.joinpath('config.yml') settings_path = package_dir.joinpath('src', 'config.yml')
with open(settings_path, "r") as dset: with open(settings_path, "r") as dset:
default_settings = yaml.load(dset, Loader=yaml.Loader) default_settings = yaml.load(dset, Loader=yaml.Loader)
# NOTE: Tell program we need to copy the config.yml to the user directory # NOTE: Tell program we need to copy the config.yml to the user directory
@@ -502,7 +529,7 @@ def setup_logger(verbosity: int = 3):
# NOTE: create console handler with a higher log level # NOTE: create console handler with a higher log level
# NOTE: create custom logger with STERR -> log # NOTE: create custom logger with STERR -> log
ch = logging.StreamHandler(stream=sys.stdout) ch = logging.StreamHandler(stream=sys.stdout)
# NOTE: set looging level based on verbosity # NOTE: set logging level based on verbosity
match verbosity: match verbosity:
case 3: case 3:
ch.setLevel(logging.DEBUG) ch.setLevel(logging.DEBUG)
@@ -542,10 +569,10 @@ def copy_settings(settings_path: Path, settings: dict) -> dict:
dict: output dictionary for use in first run dict: output dictionary for use in first run
""" """
# NOTE: if the current user is not a superuser remove the superusers entry # NOTE: if the current user is not a superuser remove the superusers entry
if not getpass.getuser() in settings['super_users']: # if not getpass.getuser() in settings['super_users']:
del settings['super_users'] # del settings['super_users']
if not getpass.getuser() in settings['power_users']: # if not getpass.getuser() in settings['power_users']:
del settings['power_users'] # del settings['power_users']
if not settings_path.exists(): if not settings_path.exists():
with open(settings_path, 'w') as f: with open(settings_path, 'w') as f:
yaml.dump(settings, f) yaml.dump(settings, f)
@@ -668,7 +695,7 @@ class Report(BaseModel):
logger.error(f"Unknown variable type: {type(result)} for <Result> entry into <Report>") logger.error(f"Unknown variable type: {type(result)} for <Result> entry into <Report>")
def rreplace(s:str, old:str, new:str) -> str: def rreplace(s: str, old: str, new: str) -> str:
""" """
Removes rightmost occurence of a substring Removes rightmost occurence of a substring
@@ -683,7 +710,7 @@ def rreplace(s:str, old:str, new:str) -> str:
return (s[::-1].replace(old[::-1], new[::-1], 1))[::-1] return (s[::-1].replace(old[::-1], new[::-1], 1))[::-1]
def html_to_pdf(html:str, output_file: Path | str): def html_to_pdf(html: str, output_file: Path | str):
""" """
Attempts to print an html string as a PDF. (currently not working) Attempts to print an html string as a PDF. (currently not working)
@@ -773,3 +800,32 @@ def check_authorization(func):
return dict(code=1, message="This user does not have permission for this function.", status="warning") return dict(code=1, message="This user does not have permission for this function.", status="warning")
return wrapper return wrapper
def report_result(func):
def wrapper(*args, **kwargs):
logger.debug(f"Arguments: {args}")
logger.debug(f"Keyword arguments: {kwargs}")
output = func(*args, **kwargs)
if isinstance(output, tuple):
report = [item for item in output if isinstance(item, Report)][0]
else:
report = None
logger.debug(f"Got report: {report}")
try:
results = report.results
except AttributeError:
logger.error("No results available")
results = []
for iii, result in enumerate(results):
logger.debug(f"Result {iii}: {result}")
try:
dlg = result.report()
dlg.exec()
except Exception as e:
logger.error(f"Problem reporting due to {e}")
logger.error(result.msg)
logger.debug(f"Returning: {output}")
return output
return wrapper