Pre-frontend refactor.
This commit is contained in:
@@ -1,3 +1,8 @@
|
|||||||
|
## 202311.02
|
||||||
|
|
||||||
|
- Construct first strand integrated into Artic Import.
|
||||||
|
- Addition of query_or_create methods for some classes.
|
||||||
|
|
||||||
## 202311.01
|
## 202311.01
|
||||||
|
|
||||||
- Kit integrity is now checked before creation of sql object to improve reagent type lookups.
|
- Kit integrity is now checked before creation of sql object to improve reagent type lookups.
|
||||||
|
|||||||
10
TODO.md
10
TODO.md
@@ -1,6 +1,10 @@
|
|||||||
- [ ] Clear out any unnecessary ctx passes now that queries are improved.
|
- [x] Create a result object to facilitate returning function results.
|
||||||
- [ ] Make a 'query or create' method in all db objects to go with new query.
|
- [ ] Refactor main_window_functions into as many objects (forms, etc.) as possible to clean it up.
|
||||||
- [ ] Ensure Bacterial plates end up with RSL_YY_###_{submitterName}_{submitterPlateID}.xlsx format.
|
- [x] Integrate 'Construct First Strand' into the Artic import.
|
||||||
|
- [x] Clear out any unnecessary ctx passes now that queries are improved.
|
||||||
|
- [x] Make a 'query or create' method in all db objects to go with new query.
|
||||||
|
- Due to necessity of user input this has only been implemented for BasicSubmission, BasicSample and SubmissionSampleAssociation.
|
||||||
|
- [x] Ensure Bacterial plates end up with RSL_YY_###_{submitterName}_{submitterPlateID}.xlsx format.
|
||||||
- [x] Move lookup functions into class methods of db objects?
|
- [x] Move lookup functions into class methods of db objects?
|
||||||
- Not sure if will work for associations.
|
- Not sure if will work for associations.
|
||||||
- [x] Update artic submission type database entry to add more technicians.
|
- [x] Update artic submission type database entry to add more technicians.
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
submissions.backend.db.functions package
|
|
||||||
========================================
|
|
||||||
|
|
||||||
Submodules
|
|
||||||
----------
|
|
||||||
|
|
||||||
submissions.backend.db.functions.constructions module
|
|
||||||
-----------------------------------------------------
|
|
||||||
|
|
||||||
.. automodule:: submissions.backend.db.functions.constructions
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
submissions.backend.db.functions.lookups module
|
|
||||||
-----------------------------------------------
|
|
||||||
|
|
||||||
.. automodule:: submissions.backend.db.functions.lookups
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
submissions.backend.db.functions.misc module
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
.. automodule:: submissions.backend.db.functions.misc
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: submissions.backend.db.functions
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
@@ -7,9 +7,19 @@ Subpackages
|
|||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 4
|
:maxdepth: 4
|
||||||
|
|
||||||
submissions.backend.db.functions
|
|
||||||
submissions.backend.db.models
|
submissions.backend.db.models
|
||||||
|
|
||||||
|
Submodules
|
||||||
|
----------
|
||||||
|
|
||||||
|
submissions.backend.db.functions module
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: submissions.backend.db.functions
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
Module contents
|
Module contents
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +0,0 @@
|
|||||||
submissions.backend.pydant package
|
|
||||||
==================================
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: submissions.backend.pydant
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
@@ -9,7 +9,7 @@ Subpackages
|
|||||||
|
|
||||||
submissions.backend.db
|
submissions.backend.db
|
||||||
submissions.backend.excel
|
submissions.backend.excel
|
||||||
submissions.backend.pydant
|
submissions.backend.validators
|
||||||
|
|
||||||
Module contents
|
Module contents
|
||||||
---------------
|
---------------
|
||||||
|
|||||||
@@ -9,7 +9,17 @@ Subpackages
|
|||||||
|
|
||||||
submissions.backend
|
submissions.backend
|
||||||
submissions.frontend
|
submissions.frontend
|
||||||
submissions.tools
|
|
||||||
|
Submodules
|
||||||
|
----------
|
||||||
|
|
||||||
|
submissions.tools module
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
.. automodule:: submissions.tools
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
Module contents
|
Module contents
|
||||||
---------------
|
---------------
|
||||||
|
|||||||
@@ -1,10 +0,0 @@
|
|||||||
submissions.tools package
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: submissions.tools
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
@@ -4,7 +4,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
# Version of the realpython-reader package
|
# Version of the realpython-reader package
|
||||||
__project__ = "submissions"
|
__project__ = "submissions"
|
||||||
__version__ = "202311.1b"
|
__version__ = "202311.2b"
|
||||||
__author__ = {"name":"Landon Wark", "email":"Landon.Wark@phac-aspc.gc.ca"}
|
__author__ = {"name":"Landon Wark", "email":"Landon.Wark@phac-aspc.gc.ca"}
|
||||||
__copyright__ = "2022-2023, Government of Canada"
|
__copyright__ = "2022-2023, Government of Canada"
|
||||||
|
|
||||||
@@ -36,4 +36,10 @@ class bcolors:
|
|||||||
|
|
||||||
# Landon, this is your even more slightly less past self here. I've overhauled a lot of stuff to make things more flexible, so you should
|
# Landon, this is your even more slightly less past self here. I've overhauled a lot of stuff to make things more flexible, so you should
|
||||||
# hopefully be even less screwed than before... at least with regards to parsers. The addition of kits and such is another story. Putting that
|
# hopefully be even less screwed than before... at least with regards to parsers. The addition of kits and such is another story. Putting that
|
||||||
# On the todo list.
|
# On the todo list.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Landon, this is 2023-11-07 Landon here in a comment string no less. Really all you should have to do now to add in new experiments is create a new
|
||||||
|
BasicSubmission derivative with associated SubbmissionType, BasicSample (and maybe SubmissionSampleAssociation if you're feeling lucky), oh, also,
|
||||||
|
kits, reagenttypes, reagents... This is sounding less and less impressive as I type it.
|
||||||
|
'''
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
'''Contains or imports all database convenience functions'''
|
'''Contains or imports all database convenience functions'''
|
||||||
from tools import Settings
|
from tools import Result, Report
|
||||||
from sqlalchemy import event
|
from sqlalchemy import event
|
||||||
from sqlalchemy.engine import Engine
|
from sqlalchemy.engine import Engine
|
||||||
from sqlalchemy.exc import OperationalError as AlcOperationalError, IntegrityError as AlcIntegrityError
|
from sqlalchemy.exc import OperationalError as AlcOperationalError, IntegrityError as AlcIntegrityError
|
||||||
@@ -7,10 +7,7 @@ from sqlite3 import OperationalError as SQLOperationalError, IntegrityError as S
|
|||||||
import logging
|
import logging
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
|
||||||
from .models import *
|
from .models import *
|
||||||
# from sqlalchemy.exc import OperationalError as AlcOperationalError, IntegrityError as AlcIntegrityError
|
|
||||||
# from sqlite3 import OperationalError as SQLOperationalError, IntegrityError as SQLIntegrityError
|
|
||||||
import logging
|
import logging
|
||||||
from backend.validators.pydant import *
|
from backend.validators.pydant import *
|
||||||
|
|
||||||
@@ -47,7 +44,7 @@ def submissions_to_df(submission_type:str|None=None, limit:int=0) -> pd.DataFram
|
|||||||
# use lookup function to create list of dicts
|
# use lookup function to create list of dicts
|
||||||
# subs = [item.to_dict() for item in lookup_submissions(ctx=ctx, submission_type=submission_type, limit=limit)]
|
# subs = [item.to_dict() for item in lookup_submissions(ctx=ctx, submission_type=submission_type, limit=limit)]
|
||||||
subs = [item.to_dict() for item in BasicSubmission.query(submission_type=submission_type, limit=limit)]
|
subs = [item.to_dict() for item in BasicSubmission.query(submission_type=submission_type, limit=limit)]
|
||||||
logger.debug(f"Got {len(subs)} results.")
|
logger.debug(f"Got {len(subs)} submissions.")
|
||||||
# make df from dicts (records) in list
|
# make df from dicts (records) in list
|
||||||
df = pd.DataFrame.from_records(subs)
|
df = pd.DataFrame.from_records(subs)
|
||||||
# Exclude sub information
|
# Exclude sub information
|
||||||
@@ -111,79 +108,24 @@ def update_last_used(reagent:Reagent, kit:KitType):
|
|||||||
Updates the 'last_used' field in kittypes/reagenttypes
|
Updates the 'last_used' field in kittypes/reagenttypes
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
ctx (Settings): settings object passed down from gui
|
|
||||||
reagent (models.Reagent): reagent to be used for update
|
reagent (models.Reagent): reagent to be used for update
|
||||||
kit (models.KitType): kit to be used for lookup
|
kit (models.KitType): kit to be used for lookup
|
||||||
"""
|
"""
|
||||||
# rt = list(set(reagent.type).intersection(kit.reagent_types))[0]
|
report = Report()
|
||||||
logger.debug(f"Attempting update of reagent type at intersection of ({reagent}), ({kit})")
|
logger.debug(f"Attempting update of reagent type at intersection of ({reagent}), ({kit})")
|
||||||
# rt = lookup_reagent_types(ctx=ctx, kit_type=kit, reagent=reagent)
|
|
||||||
rt = ReagentType.query(kit_type=kit, reagent=reagent)
|
rt = ReagentType.query(kit_type=kit, reagent=reagent)
|
||||||
if rt != None:
|
if rt != None:
|
||||||
# assoc = lookup_reagenttype_kittype_association(ctx=ctx, kit_type=kit, reagent_type=rt)
|
|
||||||
assoc = KitTypeReagentTypeAssociation.query(kit_type=kit, reagent_type=rt)
|
assoc = KitTypeReagentTypeAssociation.query(kit_type=kit, reagent_type=rt)
|
||||||
if assoc != None:
|
if assoc != None:
|
||||||
if assoc.last_used != reagent.lot:
|
if assoc.last_used != reagent.lot:
|
||||||
logger.debug(f"Updating {assoc} last used to {reagent.lot}")
|
logger.debug(f"Updating {assoc} last used to {reagent.lot}")
|
||||||
assoc.last_used = reagent.lot
|
assoc.last_used = reagent.lot
|
||||||
# ctx.database_session.merge(assoc)
|
result = assoc.save()
|
||||||
# ctx.database_session.commit()
|
return(report.add_result(result))
|
||||||
# result = store_object(ctx=ctx, object=assoc)
|
return report.add_result(Result(msg=f"Updating last used {rt} was not performed.", status="Information"))
|
||||||
result = assoc.save()
|
|
||||||
return result
|
|
||||||
return dict(message=f"Updating last used {rt} was not performed.")
|
|
||||||
|
|
||||||
# def delete_submission(id:int) -> dict|None:
|
|
||||||
# """
|
|
||||||
# Deletes a submission and its associated samples from the database.
|
|
||||||
|
|
||||||
# Args:
|
def check_kit_integrity(sub:BasicSubmission|KitType|PydSubmission, reagenttypes:list=[]) -> Tuple[list, Report]:
|
||||||
# ctx (Settings): settings object passed down from gui
|
|
||||||
# id (int): id of submission to be deleted.
|
|
||||||
# """
|
|
||||||
# # In order to properly do this Im' going to have to delete all of the secondary table stuff as well.
|
|
||||||
# # Retrieve submission
|
|
||||||
# # sub = lookup_submissions(ctx=ctx, id=id)
|
|
||||||
# sub = models.BasicSubmission.query(id=id)
|
|
||||||
# # Convert to dict for storing backup as a yml
|
|
||||||
# sub.delete()
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# def update_ww_sample(sample_obj:dict) -> dict|None:
|
|
||||||
# """
|
|
||||||
# Retrieves wastewater sample by rsl number (sample_obj['sample']) and updates values from constructed dictionary
|
|
||||||
|
|
||||||
# Args:
|
|
||||||
# ctx (Settings): settings object passed down from gui
|
|
||||||
# sample_obj (dict): dictionary representing new values for database object
|
|
||||||
# """
|
|
||||||
# logger.debug(f"dictionary to use for update: {pformat(sample_obj)}")
|
|
||||||
# logger.debug(f"Looking up {sample_obj['sample']} in plate {sample_obj['plate_rsl']}")
|
|
||||||
# # assoc = lookup_submission_sample_association(ctx=ctx, submission=sample_obj['plate_rsl'], sample=sample_obj['sample'])
|
|
||||||
# assoc = models.SubmissionSampleAssociation.query(submission=sample_obj['plate_rsl'], sample=sample_obj['sample'])
|
|
||||||
# if assoc != None:
|
|
||||||
# for key, value in sample_obj.items():
|
|
||||||
# # set attribute 'key' to 'value'
|
|
||||||
# try:
|
|
||||||
# check = getattr(assoc, key)
|
|
||||||
# except AttributeError as e:
|
|
||||||
# logger.error(f"Item doesn't have field {key} due to {e}")
|
|
||||||
# continue
|
|
||||||
# if check != value:
|
|
||||||
# logger.debug(f"Setting association key: {key} to {value}")
|
|
||||||
# try:
|
|
||||||
# setattr(assoc, key, value)
|
|
||||||
# except AttributeError as e:
|
|
||||||
# logger.error(f"Can't set field {key} to {value} due to {e}")
|
|
||||||
# continue
|
|
||||||
# else:
|
|
||||||
# logger.error(f"Unable to find sample {sample_obj['sample']}")
|
|
||||||
# return
|
|
||||||
# # result = store_object(ctx=ctx, object=assoc)
|
|
||||||
# result = assoc.save()
|
|
||||||
# return result
|
|
||||||
|
|
||||||
def check_kit_integrity(sub:BasicSubmission|KitType|PydSubmission, reagenttypes:list=[]) -> dict|None:
|
|
||||||
"""
|
"""
|
||||||
Ensures all reagents expected in kit are listed in Submission
|
Ensures all reagents expected in kit are listed in Submission
|
||||||
|
|
||||||
@@ -194,6 +136,7 @@ def check_kit_integrity(sub:BasicSubmission|KitType|PydSubmission, reagenttypes:
|
|||||||
Returns:
|
Returns:
|
||||||
dict|None: Result object containing a message and any missing components.
|
dict|None: Result object containing a message and any missing components.
|
||||||
"""
|
"""
|
||||||
|
report = Report()
|
||||||
logger.debug(type(sub))
|
logger.debug(type(sub))
|
||||||
# What type is sub?
|
# What type is sub?
|
||||||
# reagenttypes = []
|
# reagenttypes = []
|
||||||
@@ -238,8 +181,9 @@ def check_kit_integrity(sub:BasicSubmission|KitType|PydSubmission, reagenttypes:
|
|||||||
if len(missing)==0:
|
if len(missing)==0:
|
||||||
result = None
|
result = None
|
||||||
else:
|
else:
|
||||||
result = {'message' : f"The submission you are importing is missing some reagents expected by the kit.\n\nIt looks like you are missing: {[item.upper() for item in missing]}\n\nAlternatively, you may have set the wrong extraction kit.\n\nThe program will populate lists using existing reagents.\n\nPlease make sure you check the lots carefully!", 'missing': missing}
|
result = Result(msg=f"The submission you are importing is missing some reagents expected by the kit.\n\nIt looks like you are missing: {[item.upper() for item in missing]}\n\nAlternatively, you may have set the wrong extraction kit.\n\nThe program will populate lists using existing reagents.\n\nPlease make sure you check the lots carefully!", status="Warning")
|
||||||
return result
|
report.add_result(result)
|
||||||
|
return report
|
||||||
|
|
||||||
def update_subsampassoc_with_pcr(submission:BasicSubmission, sample:BasicSample, input_dict:dict) -> dict|None:
|
def update_subsampassoc_with_pcr(submission:BasicSubmission, sample:BasicSample, input_dict:dict) -> dict|None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -2,12 +2,12 @@
|
|||||||
All kit and reagent related models
|
All kit and reagent related models
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from sqlalchemy import Column, String, TIMESTAMP, JSON, INTEGER, ForeignKey, Interval, Table, FLOAT
|
from sqlalchemy import Column, String, TIMESTAMP, JSON, INTEGER, ForeignKey, Interval, Table, FLOAT, func
|
||||||
from sqlalchemy.orm import relationship, validates, Query
|
from sqlalchemy.orm import relationship, validates, Query
|
||||||
from sqlalchemy.ext.associationproxy import association_proxy
|
from sqlalchemy.ext.associationproxy import association_proxy
|
||||||
from datetime import date
|
from datetime import date
|
||||||
import logging
|
import logging
|
||||||
from tools import Settings, check_authorization, Base, setup_lookup, query_return
|
from tools import check_authorization, Base, setup_lookup, query_return, Report, Result
|
||||||
from typing import List
|
from typing import List
|
||||||
from . import Organization
|
from . import Organization
|
||||||
|
|
||||||
@@ -322,10 +322,11 @@ class KitTypeReagentTypeAssociation(Base):
|
|||||||
limit = 1
|
limit = 1
|
||||||
return query_return(query=query, limit=limit)
|
return query_return(query=query, limit=limit)
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> Report:
|
||||||
|
report = Report()
|
||||||
self.metadata.session.add(self)
|
self.metadata.session.add(self)
|
||||||
self.metadata.session.commit()
|
self.metadata.session.commit()
|
||||||
return None
|
return report
|
||||||
|
|
||||||
class Reagent(Base):
|
class Reagent(Base):
|
||||||
"""
|
"""
|
||||||
@@ -564,6 +565,7 @@ class SubmissionType(Base):
|
|||||||
@setup_lookup
|
@setup_lookup
|
||||||
def query(cls,
|
def query(cls,
|
||||||
name:str|None=None,
|
name:str|None=None,
|
||||||
|
key:str|None=None,
|
||||||
limit:int=0
|
limit:int=0
|
||||||
) -> SubmissionType|List[SubmissionType]:
|
) -> SubmissionType|List[SubmissionType]:
|
||||||
"""
|
"""
|
||||||
@@ -585,7 +587,17 @@ class SubmissionType(Base):
|
|||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
|
match key:
|
||||||
|
case str():
|
||||||
|
query = query.filter(cls.info_map.op('->')(key)!=None)
|
||||||
|
case _:
|
||||||
|
pass
|
||||||
return query_return(query=query, limit=limit)
|
return query_return(query=query, limit=limit)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
self.metadata.session.add(self)
|
||||||
|
self.metadata.session.commit()
|
||||||
|
return None
|
||||||
|
|
||||||
class SubmissionTypeKitTypeAssociation(Base):
|
class SubmissionTypeKitTypeAssociation(Base):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
from getpass import getuser
|
from getpass import getuser
|
||||||
import math
|
import math
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from . import Reagent, SubmissionType
|
from . import Reagent, SubmissionType, KitType, Organization
|
||||||
from sqlalchemy import Column, String, TIMESTAMP, INTEGER, ForeignKey, Table, JSON, FLOAT, case
|
from sqlalchemy import Column, String, TIMESTAMP, INTEGER, ForeignKey, Table, JSON, FLOAT, case
|
||||||
from sqlalchemy.orm import relationship, validates, Query
|
from sqlalchemy.orm import relationship, validates, Query
|
||||||
import logging
|
import logging
|
||||||
@@ -21,9 +21,11 @@ from tools import check_not_nan, row_map, Base, query_return, setup_lookup
|
|||||||
from datetime import datetime, date
|
from datetime import datetime, date
|
||||||
from typing import List
|
from typing import List
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
|
from dateutil.parser._parser import ParserError
|
||||||
import yaml
|
import yaml
|
||||||
from sqlalchemy.exc import OperationalError as AlcOperationalError, IntegrityError as AlcIntegrityError
|
from sqlalchemy.exc import OperationalError as AlcOperationalError, IntegrityError as AlcIntegrityError, StatementError
|
||||||
from sqlite3 import OperationalError as SQLOperationalError, IntegrityError as SQLIntegrityError
|
from sqlite3 import OperationalError as SQLOperationalError, IntegrityError as SQLIntegrityError
|
||||||
|
import sys
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
@@ -54,8 +56,8 @@ class BasicSubmission(Base):
|
|||||||
pcr_info = Column(JSON) #: unstructured output from pcr table logger or user(Artic)
|
pcr_info = Column(JSON) #: unstructured output from pcr table logger or user(Artic)
|
||||||
run_cost = Column(FLOAT(2)) #: total cost of running the plate. Set from constant and mutable kit costs at time of creation.
|
run_cost = Column(FLOAT(2)) #: total cost of running the plate. Set from constant and mutable kit costs at time of creation.
|
||||||
uploaded_by = Column(String(32)) #: user name of person who submitted the submission to the database.
|
uploaded_by = Column(String(32)) #: user name of person who submitted the submission to the database.
|
||||||
comment = Column(JSON)
|
comment = Column(JSON) #: user notes
|
||||||
submission_category = Column(String(64))
|
submission_category = Column(String(64)) #: ["Research", "Diagnostic", "Surveillance"], else defaults to submission_type_name
|
||||||
|
|
||||||
submission_sample_associations = relationship(
|
submission_sample_associations = relationship(
|
||||||
"SubmissionSampleAssociation",
|
"SubmissionSampleAssociation",
|
||||||
@@ -253,7 +255,7 @@ class BasicSubmission(Base):
|
|||||||
Stupid stopgap solution to there being an issue with the Bacterial Culture plate map
|
Stupid stopgap solution to there being an issue with the Bacterial Culture plate map
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
xl (pd.ExcelFile): original xl workbook
|
xl (pd.ExcelFile): original xl workbook, used for child classes mostly
|
||||||
plate_map (pd.DataFrame): original plate map
|
plate_map (pd.DataFrame): original plate map
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -268,6 +270,7 @@ class BasicSubmission(Base):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
input_dict (dict): Input sample dictionary
|
input_dict (dict): Input sample dictionary
|
||||||
|
xl (pd.ExcelFile): original xl workbook, used for child classes mostly
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: Updated sample dictionary
|
dict: Updated sample dictionary
|
||||||
@@ -289,6 +292,10 @@ class BasicSubmission(Base):
|
|||||||
# logger.debug(f"Called {cls.__name__} sample parser")
|
# logger.debug(f"Called {cls.__name__} sample parser")
|
||||||
return input_dict
|
return input_dict
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def finalize_parse(cls, input_dict:dict, xl:pd.ExcelFile|None=None, info_map:dict|None=None, plate_map:dict|None=None) -> dict:
|
||||||
|
return input_dict
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def custom_autofill(cls, input_excel:Workbook) -> Workbook:
|
def custom_autofill(cls, input_excel:Workbook) -> Workbook:
|
||||||
"""
|
"""
|
||||||
@@ -315,9 +322,14 @@ class BasicSubmission(Base):
|
|||||||
return regex
|
return regex
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def find_subclasses(cls, attrs:dict|None=None, submission_type:str|None=None):
|
def find_subclasses(cls, attrs:dict|None=None, submission_type:str|SubmissionType|None=None):
|
||||||
if submission_type != None:
|
match submission_type:
|
||||||
return cls.find_polymorphic_subclass(submission_type)
|
case str():
|
||||||
|
return cls.find_polymorphic_subclass(submission_type)
|
||||||
|
case SubmissionType():
|
||||||
|
return cls.find_polymorphic_subclass(submission_type.name)
|
||||||
|
case _:
|
||||||
|
pass
|
||||||
if len(attrs) == 0 or attrs == None:
|
if len(attrs) == 0 or attrs == None:
|
||||||
return cls
|
return cls
|
||||||
if any([not hasattr(cls, attr) for attr in attrs]):
|
if any([not hasattr(cls, attr) for attr in attrs]):
|
||||||
@@ -361,7 +373,7 @@ class BasicSubmission(Base):
|
|||||||
yaml.dump(backup, f)
|
yaml.dump(backup, f)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
self.metadata.database_session.delete(self)
|
self.metadata.session.delete(self)
|
||||||
try:
|
try:
|
||||||
self.metadata.session.commit()
|
self.metadata.session.commit()
|
||||||
except (SQLIntegrityError, SQLOperationalError, AlcIntegrityError, AlcOperationalError) as e:
|
except (SQLIntegrityError, SQLOperationalError, AlcIntegrityError, AlcOperationalError) as e:
|
||||||
@@ -396,6 +408,7 @@ class BasicSubmission(Base):
|
|||||||
Returns:
|
Returns:
|
||||||
models.BasicSubmission | List[models.BasicSubmission]: Submission(s) of interest
|
models.BasicSubmission | List[models.BasicSubmission]: Submission(s) of interest
|
||||||
"""
|
"""
|
||||||
|
logger.debug(kwargs)
|
||||||
# NOTE: if you go back to using 'model' change the appropriate cls to model in the query filters
|
# NOTE: if you go back to using 'model' change the appropriate cls to model in the query filters
|
||||||
if submission_type == None:
|
if submission_type == None:
|
||||||
model = cls.find_subclasses(attrs=kwargs)
|
model = cls.find_subclasses(attrs=kwargs)
|
||||||
@@ -404,19 +417,7 @@ class BasicSubmission(Base):
|
|||||||
model = cls.find_subclasses(submission_type=submission_type.name)
|
model = cls.find_subclasses(submission_type=submission_type.name)
|
||||||
else:
|
else:
|
||||||
model = cls.find_subclasses(submission_type=submission_type)
|
model = cls.find_subclasses(submission_type=submission_type)
|
||||||
# query: Query = setup_lookup(ctx=ctx, locals=locals()).query(model)
|
|
||||||
query: Query = cls.metadata.session.query(model)
|
query: Query = cls.metadata.session.query(model)
|
||||||
# by submission type
|
|
||||||
# match submission_type:
|
|
||||||
# case SubmissionType():
|
|
||||||
# logger.debug(f"Looking up BasicSubmission with submission type: {submission_type}")
|
|
||||||
# query = query.filter(model.submission_type_name==submission_type.name)
|
|
||||||
# case str():
|
|
||||||
# logger.debug(f"Looking up BasicSubmission with submission type: {submission_type}")
|
|
||||||
# query = query.filter(model.submission_type_name==submission_type)
|
|
||||||
# case _:
|
|
||||||
# pass
|
|
||||||
# by date range
|
|
||||||
if start_date != None and end_date == None:
|
if start_date != None and end_date == None:
|
||||||
logger.warning(f"Start date with no end date, using today.")
|
logger.warning(f"Start date with no end date, using today.")
|
||||||
end_date = date.today()
|
end_date = date.today()
|
||||||
@@ -482,10 +483,94 @@ class BasicSubmission(Base):
|
|||||||
query.order_by(cls.submitted_date)
|
query.order_by(cls.submitted_date)
|
||||||
return query_return(query=query, limit=limit)
|
return query_return(query=query, limit=limit)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def query_or_create(cls, submission_type:str|SubmissionType|None=None, **kwargs) -> BasicSubmission:
|
||||||
|
"""
|
||||||
|
Returns object from db if exists, else, creates new. Due to need for user input, doesn't see much use ATM.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
submission_type (str | SubmissionType | None, optional): Submission type to be created. Defaults to None.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: _description_
|
||||||
|
ValueError: _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
cls: _description_
|
||||||
|
"""
|
||||||
|
code = 0
|
||||||
|
msg = None
|
||||||
|
disallowed = ["id"]
|
||||||
|
if kwargs == {}:
|
||||||
|
raise ValueError("Need to narrow down query or the first available instance will be returned.")
|
||||||
|
for key in kwargs.keys():
|
||||||
|
if key in disallowed:
|
||||||
|
raise ValueError(f"{key} is not allowed as a query argument as it could lead to creation of duplicate objects. Use .query() instead.")
|
||||||
|
instance = cls.query(submission_type=submission_type, limit=1, **kwargs)
|
||||||
|
logger.debug(f"Retrieved instance: {instance}")
|
||||||
|
if instance == None:
|
||||||
|
used_class = cls.find_subclasses(attrs=kwargs, submission_type=submission_type)
|
||||||
|
instance = used_class(**kwargs)
|
||||||
|
match submission_type:
|
||||||
|
case str():
|
||||||
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
|
case _:
|
||||||
|
pass
|
||||||
|
instance.submission_type = submission_type
|
||||||
|
instance.submission_type_name = submission_type.name
|
||||||
|
if "submitted_date" not in kwargs.keys():
|
||||||
|
instance.submitted_date = date.today()
|
||||||
|
else:
|
||||||
|
code = 1
|
||||||
|
msg = "This submission already exists.\nWould you like to overwrite?"
|
||||||
|
return instance, code, msg
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filename_template(cls):
|
def filename_template(cls):
|
||||||
return "{{ rsl_plate_num }}"
|
return "{{ rsl_plate_num }}"
|
||||||
|
|
||||||
|
def set_attribute(self, key, value):
|
||||||
|
match key:
|
||||||
|
case "extraction_kit":
|
||||||
|
logger.debug(f"Looking up kit {value}")
|
||||||
|
# field_value = lookup_kit_types(ctx=self.ctx, name=value)
|
||||||
|
field_value = KitType.query(name=value)
|
||||||
|
logger.debug(f"Got {field_value} for kit {value}")
|
||||||
|
case "submitting_lab":
|
||||||
|
logger.debug(f"Looking up organization: {value}")
|
||||||
|
# field_value = lookup_organizations(ctx=self.ctx, name=value)
|
||||||
|
field_value = Organization.query(name=value)
|
||||||
|
logger.debug(f"Got {field_value} for organization {value}")
|
||||||
|
case "submitter_plate_num":
|
||||||
|
logger.debug(f"Submitter plate id: {value}")
|
||||||
|
field_value = value
|
||||||
|
case "samples":
|
||||||
|
# instance = construct_samples(ctx=ctx, instance=instance, samples=value)
|
||||||
|
for sample in value:
|
||||||
|
# logger.debug(f"Parsing {sample} to sql.")
|
||||||
|
sample, _ = sample.toSQL(submission=self)
|
||||||
|
# instance.samples.append(sample)
|
||||||
|
return
|
||||||
|
case "reagents":
|
||||||
|
field_value = [reagent['value'].toSQL()[0] if isinstance(reagent, dict) else reagent.toSQL()[0] for reagent in value]
|
||||||
|
case "submission_type":
|
||||||
|
# field_value = lookup_submission_type(ctx=self.ctx, name=value)
|
||||||
|
field_value = SubmissionType.query(name=value)
|
||||||
|
case "sample_count":
|
||||||
|
if value == None:
|
||||||
|
field_value = len(self.samples)
|
||||||
|
else:
|
||||||
|
field_value = value
|
||||||
|
case "ctx" | "csv" | "filepath":
|
||||||
|
return
|
||||||
|
case _:
|
||||||
|
field_value = value
|
||||||
|
# insert into field
|
||||||
|
try:
|
||||||
|
setattr(self, key, field_value)
|
||||||
|
except AttributeError:
|
||||||
|
logger.error(f"Could not set {self} attribute {key} to {value}")
|
||||||
|
|
||||||
# Below are the custom submission types
|
# Below are the custom submission types
|
||||||
|
|
||||||
class BacterialCulture(BasicSubmission):
|
class BacterialCulture(BasicSubmission):
|
||||||
@@ -759,7 +844,7 @@ class WastewaterArtic(BasicSubmission):
|
|||||||
input_dict['sample_type'] = "Wastewater Sample"
|
input_dict['sample_type'] = "Wastewater Sample"
|
||||||
# Because generate_sample_object needs the submitter_id and the artic has the "({origin well})"
|
# Because generate_sample_object needs the submitter_id and the artic has the "({origin well})"
|
||||||
# at the end, this has to be done here. No moving to sqlalchemy object :(
|
# at the end, this has to be done here. No moving to sqlalchemy object :(
|
||||||
input_dict['submitter_id'] = re.sub(r"\s\(.+\)$", "", str(input_dict['submitter_id'])).strip()
|
input_dict['submitter_id'] = re.sub(r"\s\(.+\)\s?$", "", str(input_dict['submitter_id'])).strip()
|
||||||
return input_dict
|
return input_dict
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -782,6 +867,53 @@ class WastewaterArtic(BasicSubmission):
|
|||||||
def get_regex(cls):
|
def get_regex(cls):
|
||||||
return "(?P<Wastewater_Artic>(\\d{4}-\\d{2}-\\d{2}(?:-|_)(?:\\d_)?artic)|(RSL(?:-|_)?AR(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)\\d?(\\D|$)R?\\d?)?))"
|
return "(?P<Wastewater_Artic>(\\d{4}-\\d{2}-\\d{2}(?:-|_)(?:\\d_)?artic)|(RSL(?:-|_)?AR(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)\\d?(\\D|$)R?\\d?)?))"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def finalize_parse(cls, input_dict: dict, xl: pd.ExcelFile | None = None, info_map: dict | None = None, plate_map: dict | None = None) -> dict:
|
||||||
|
input_dict = super().finalize_parse(input_dict, xl, info_map, plate_map)
|
||||||
|
logger.debug(pformat(input_dict))
|
||||||
|
logger.debug(pformat(info_map))
|
||||||
|
logger.debug(pformat(plate_map))
|
||||||
|
samples = []
|
||||||
|
for sample in input_dict['samples']:
|
||||||
|
if sample.submitter_id == "NTC1":
|
||||||
|
samples.append(dict(sample=sample.submitter_id, destination_row=8, destination_column=2, source_row=0, source_column=0, plate_number='control', plate=None))
|
||||||
|
continue
|
||||||
|
elif sample.submitter_id == "NTC2":
|
||||||
|
samples.append(dict(sample=sample.submitter_id, destination_row=8, destination_column=5, source_row=0, source_column=0, plate_number='control', plate=None))
|
||||||
|
continue
|
||||||
|
destination_row = sample.row[0]
|
||||||
|
destination_column = sample.column[0]
|
||||||
|
logger.debug(f"Looking up: {sample.submitter_id} friend.")
|
||||||
|
lookup_sample = BasicSample.query(submitter_id=sample.submitter_id)
|
||||||
|
lookup_ssa = SubmissionSampleAssociation.query(sample=lookup_sample, exclude_submission_type=cls.__mapper_args__['polymorphic_identity'] , chronologic=True, reverse=True, limit=1)
|
||||||
|
try:
|
||||||
|
plate = lookup_ssa.submission.rsl_plate_num
|
||||||
|
source_row = lookup_ssa.row
|
||||||
|
source_column = lookup_ssa.column
|
||||||
|
except AttributeError:
|
||||||
|
plate = ""
|
||||||
|
source_row = 0
|
||||||
|
source_column = 0
|
||||||
|
samples.append(dict(
|
||||||
|
sample=sample.submitter_id,
|
||||||
|
destination_column=destination_column,
|
||||||
|
destination_row=destination_row,
|
||||||
|
plate=plate,
|
||||||
|
source_column=source_column,
|
||||||
|
source_row = source_row
|
||||||
|
))
|
||||||
|
plates = sorted(list(set([sample['plate'] for sample in samples if sample['plate'] != None])))
|
||||||
|
for iii, plate in enumerate(plates):
|
||||||
|
for sample in samples:
|
||||||
|
if sample['plate'] == plate:
|
||||||
|
sample['plate_number'] = iii + 1
|
||||||
|
df = pd.DataFrame.from_records(samples).fillna(value="")
|
||||||
|
df.source_row = df.source_row.astype(int)
|
||||||
|
df.source_column = df.source_column.astype(int)
|
||||||
|
df.sort_values(by=['destination_column', 'destination_row'], inplace=True)
|
||||||
|
input_dict['csv'] = df
|
||||||
|
return input_dict
|
||||||
|
|
||||||
class BasicSample(Base):
|
class BasicSample(Base):
|
||||||
"""
|
"""
|
||||||
Base of basic sample which polymorphs into BCSample and WWSample
|
Base of basic sample which polymorphs into BCSample and WWSample
|
||||||
@@ -870,16 +1002,21 @@ class BasicSample(Base):
|
|||||||
return dict(name=self.submitter_id[:10], positive=False, tooltip=tooltip_text)
|
return dict(name=self.submitter_id[:10], positive=False, tooltip=tooltip_text)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def find_subclasses(cls, attrs:dict|None=None, rsl_number:str|None=None):
|
def find_subclasses(cls, attrs:dict|None=None, sample_type:str|None=None):
|
||||||
|
if sample_type != None:
|
||||||
|
return cls.find_polymorphic_subclass(polymorphic_identity=sample_type)
|
||||||
if len(attrs) == 0 or attrs == None:
|
if len(attrs) == 0 or attrs == None:
|
||||||
|
logger.debug(f"No attr, returning {cls}")
|
||||||
return cls
|
return cls
|
||||||
if any([not hasattr(cls, attr) for attr in attrs]):
|
if any([not hasattr(cls, attr) for attr in attrs]):
|
||||||
|
logger.debug(f"{cls} is missing attrs. searching for better match.")
|
||||||
# looks for first model that has all included kwargs
|
# looks for first model that has all included kwargs
|
||||||
try:
|
try:
|
||||||
model = [subclass for subclass in cls.__subclasses__() if all([hasattr(subclass, attr) for attr in attrs])][0]
|
model = [subclass for subclass in cls.__subclasses__() if all([hasattr(subclass, attr) for attr in attrs])][0]
|
||||||
except IndexError as e:
|
except IndexError as e:
|
||||||
raise AttributeError(f"Couldn't find existing class/subclass of {cls} with all attributes:\n{pformat(attrs)}")
|
raise AttributeError(f"Couldn't find existing class/subclass of {cls} with all attributes:\n{pformat(attrs)}")
|
||||||
else:
|
else:
|
||||||
|
logger.debug(f"{cls} has all necessary attributes, returning")
|
||||||
return cls
|
return cls
|
||||||
logger.debug(f"Using model: {model}")
|
logger.debug(f"Using model: {model}")
|
||||||
return model
|
return model
|
||||||
@@ -906,7 +1043,7 @@ class BasicSample(Base):
|
|||||||
@setup_lookup
|
@setup_lookup
|
||||||
def query(cls,
|
def query(cls,
|
||||||
submitter_id:str|None=None,
|
submitter_id:str|None=None,
|
||||||
# sample_type:str|None=None,
|
sample_type:str|None=None,
|
||||||
limit:int=0,
|
limit:int=0,
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> BasicSample|List[BasicSample]:
|
) -> BasicSample|List[BasicSample]:
|
||||||
@@ -922,14 +1059,18 @@ class BasicSample(Base):
|
|||||||
Returns:
|
Returns:
|
||||||
models.BasicSample|List[models.BasicSample]: Sample(s) of interest.
|
models.BasicSample|List[models.BasicSample]: Sample(s) of interest.
|
||||||
"""
|
"""
|
||||||
|
if sample_type == None:
|
||||||
|
model = cls.find_subclasses(attrs=kwargs)
|
||||||
|
else:
|
||||||
|
model = cls.find_subclasses(sample_type=sample_type)
|
||||||
logger.debug(f"Length of kwargs: {len(kwargs)}")
|
logger.debug(f"Length of kwargs: {len(kwargs)}")
|
||||||
# model = models.BasicSample.find_subclasses(ctx=ctx, attrs=kwargs)
|
# model = models.BasicSample.find_subclasses(ctx=ctx, attrs=kwargs)
|
||||||
# query: Query = setup_lookup(ctx=ctx, locals=locals()).query(model)
|
# query: Query = setup_lookup(ctx=ctx, locals=locals()).query(model)
|
||||||
query: Query = cls.metadata.session.query(cls)
|
query: Query = cls.metadata.session.query(model)
|
||||||
match submitter_id:
|
match submitter_id:
|
||||||
case str():
|
case str():
|
||||||
logger.debug(f"Looking up {cls} with submitter id: {submitter_id}")
|
logger.debug(f"Looking up {model} with submitter id: {submitter_id}")
|
||||||
query = query.filter(cls.submitter_id==submitter_id)
|
query = query.filter(model.submitter_id==submitter_id)
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
@@ -940,12 +1081,28 @@ class BasicSample(Base):
|
|||||||
# case _:
|
# case _:
|
||||||
# pass
|
# pass
|
||||||
for k, v in kwargs.items():
|
for k, v in kwargs.items():
|
||||||
attr = getattr(cls, k)
|
attr = getattr(model, k)
|
||||||
logger.debug(f"Got attr: {attr}")
|
logger.debug(f"Got attr: {attr}")
|
||||||
query = query.filter(attr==v)
|
query = query.filter(attr==v)
|
||||||
if len(kwargs) > 0:
|
if len(kwargs) > 0:
|
||||||
limit = 1
|
limit = 1
|
||||||
return query_return(query=query, limit=limit)
|
return query_return(query=query, limit=limit)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def query_or_create(cls, sample_type:str, **kwargs):
|
||||||
|
disallowed = ["id"]
|
||||||
|
if kwargs == {}:
|
||||||
|
raise ValueError("Need to narrow down query or the first available instance will be returned.")
|
||||||
|
for key in kwargs.keys():
|
||||||
|
if key in disallowed:
|
||||||
|
raise ValueError(f"{key} is not allowed as a query argument as it could lead to creation of duplicate objects.")
|
||||||
|
instance = cls.query(sample_type=sample_type, limit=1, **kwargs)
|
||||||
|
logger.debug(f"Retrieved instance: {instance}")
|
||||||
|
if instance == None:
|
||||||
|
used_class = cls.find_subclasses(attrs=kwargs, sample_type=sample_type)
|
||||||
|
instance = used_class(**kwargs)
|
||||||
|
instance.sample_type = sample_type
|
||||||
|
return instance
|
||||||
|
|
||||||
class WastewaterSample(BasicSample):
|
class WastewaterSample(BasicSample):
|
||||||
"""
|
"""
|
||||||
@@ -996,6 +1153,20 @@ class WastewaterSample(BasicSample):
|
|||||||
output_dict['rsl_number'] = output_dict['submitter_id']
|
output_dict['rsl_number'] = output_dict['submitter_id']
|
||||||
if output_dict['ww_full_sample_id'] != None:
|
if output_dict['ww_full_sample_id'] != None:
|
||||||
output_dict["submitter_id"] = output_dict['ww_full_sample_id']
|
output_dict["submitter_id"] = output_dict['ww_full_sample_id']
|
||||||
|
# Ad hoc repair method for WW (or possibly upstream) not formatting some dates properly.
|
||||||
|
match output_dict['collection_date']:
|
||||||
|
case str():
|
||||||
|
try:
|
||||||
|
output_dict['collection_date'] = parse(output_dict['collection_date']).date()
|
||||||
|
except ParserError:
|
||||||
|
logger.error(f"Problem parsing collection_date: {output_dict['collection_date']}")
|
||||||
|
output_dict['collection_date'] = date(1,1,1)
|
||||||
|
case datetime():
|
||||||
|
output_dict['collection_date'] = output_dict['collection_date'].date()
|
||||||
|
case date():
|
||||||
|
pass
|
||||||
|
case _:
|
||||||
|
del output_dict['collection_date']
|
||||||
return output_dict
|
return output_dict
|
||||||
|
|
||||||
class BacterialCultureSample(BasicSample):
|
class BacterialCultureSample(BasicSample):
|
||||||
@@ -1070,11 +1241,13 @@ class SubmissionSampleAssociation(Base):
|
|||||||
@setup_lookup
|
@setup_lookup
|
||||||
def query(cls,
|
def query(cls,
|
||||||
submission:BasicSubmission|str|None=None,
|
submission:BasicSubmission|str|None=None,
|
||||||
|
exclude_submission_type:str|None=None,
|
||||||
sample:BasicSample|str|None=None,
|
sample:BasicSample|str|None=None,
|
||||||
row:int=0,
|
row:int=0,
|
||||||
column:int=0,
|
column:int=0,
|
||||||
limit:int=0,
|
limit:int=0,
|
||||||
chronologic:bool=False
|
chronologic:bool=False,
|
||||||
|
reverse:bool=False,
|
||||||
) -> SubmissionSampleAssociation|List[SubmissionSampleAssociation]:
|
) -> SubmissionSampleAssociation|List[SubmissionSampleAssociation]:
|
||||||
"""
|
"""
|
||||||
Lookup junction of Submission and Sample in the database
|
Lookup junction of Submission and Sample in the database
|
||||||
@@ -1109,12 +1282,64 @@ class SubmissionSampleAssociation(Base):
|
|||||||
query = query.filter(cls.row==row)
|
query = query.filter(cls.row==row)
|
||||||
if column > 0:
|
if column > 0:
|
||||||
query = query.filter(cls.column==column)
|
query = query.filter(cls.column==column)
|
||||||
logger.debug(f"Query count: {query.count()}")
|
match exclude_submission_type:
|
||||||
|
case str():
|
||||||
|
query = query.join(BasicSubmission).filter(BasicSubmission.submission_type_name != exclude_submission_type)
|
||||||
|
case _:
|
||||||
|
pass
|
||||||
|
# logger.debug(f"Query count: {query.count()}")
|
||||||
|
if reverse and not chronologic:
|
||||||
|
query = query.order_by(BasicSubmission.id.desc())
|
||||||
|
# query = query.join(BasicSubmission).order_by(BasicSubmission.id.desc())
|
||||||
|
# query.join(BasicSubmission).order_by(cls.submission.id.desc())
|
||||||
if chronologic:
|
if chronologic:
|
||||||
query.join(BasicSubmission).order_by(BasicSubmission.submitted_date)
|
if reverse:
|
||||||
if query.count() == 1:
|
query = query.order_by(BasicSubmission.submitted_date.desc())
|
||||||
limit = 1
|
# query = query.join(BasicSubmission).order_by(BasicSubmission.submitted_date.desc())
|
||||||
|
# query.join(BasicSubmission).order_by(cls.submission.submitted_date.desc())
|
||||||
|
else:
|
||||||
|
query = query.order_by(BasicSubmission.submitted_date)
|
||||||
|
# query.join(BasicSubmission).order_by(cls.submission.submitted_date)
|
||||||
|
# if query.count() == 1:
|
||||||
|
# limit = 1
|
||||||
return query_return(query=query, limit=limit)
|
return query_return(query=query, limit=limit)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def query_or_create(cls,
|
||||||
|
association_type:str="Basic Association",
|
||||||
|
submission:BasicSubmission|str|None=None,
|
||||||
|
sample:BasicSample|str|None=None,
|
||||||
|
**kwargs):
|
||||||
|
match submission:
|
||||||
|
case BasicSubmission():
|
||||||
|
pass
|
||||||
|
case str():
|
||||||
|
submission = BasicSubmission.query(rsl_number=submission)
|
||||||
|
case _:
|
||||||
|
raise ValueError()
|
||||||
|
match sample:
|
||||||
|
case BasicSample():
|
||||||
|
pass
|
||||||
|
case str():
|
||||||
|
sample = BasicSample.query(submitter_id=sample)
|
||||||
|
case _:
|
||||||
|
raise ValueError()
|
||||||
|
try:
|
||||||
|
row = kwargs['row']
|
||||||
|
except KeyError:
|
||||||
|
row = None
|
||||||
|
try:
|
||||||
|
column = kwargs['column']
|
||||||
|
except KeyError:
|
||||||
|
column = None
|
||||||
|
try:
|
||||||
|
instance = cls.query(submission=submission, sample=sample, row=row, column=column, limit=1)
|
||||||
|
except StatementError:
|
||||||
|
instance = None
|
||||||
|
if instance == None:
|
||||||
|
used_cls = cls.find_polymorphic_subclass(polymorphic_identity=association_type)
|
||||||
|
instance = used_cls(submission=submission, sample=sample, **kwargs)
|
||||||
|
return instance
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
self.metadata.session.add(self)
|
self.metadata.session.add(self)
|
||||||
|
|||||||
@@ -7,8 +7,7 @@ from typing import List
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from backend.db import models
|
from backend.db.models import *
|
||||||
from backend.db.functions import lookup_kit_types, lookup_submission_type, lookup_samples
|
|
||||||
from backend.validators import PydSubmission, PydReagent, RSLNamer, PydSample
|
from backend.validators import PydSubmission, PydReagent, RSLNamer, PydSample
|
||||||
import logging
|
import logging
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
@@ -49,19 +48,22 @@ class SheetParser(object):
|
|||||||
raise FileNotFoundError(f"Couldn't parse file {self.filepath}")
|
raise FileNotFoundError(f"Couldn't parse file {self.filepath}")
|
||||||
self.sub = OrderedDict()
|
self.sub = OrderedDict()
|
||||||
# make decision about type of sample we have
|
# make decision about type of sample we have
|
||||||
self.sub['submission_type'] = dict(value=RSLNamer.retrieve_submission_type(ctx=self.ctx, instr=self.filepath), missing=True)
|
self.sub['submission_type'] = dict(value=RSLNamer.retrieve_submission_type(instr=self.filepath), missing=True)
|
||||||
# # grab the info map from the submission type in database
|
# # grab the info map from the submission type in database
|
||||||
self.parse_info()
|
self.parse_info()
|
||||||
self.import_kit_validation_check()
|
self.import_kit_validation_check()
|
||||||
self.parse_reagents()
|
self.parse_reagents()
|
||||||
self.import_reagent_validation_check()
|
self.import_reagent_validation_check()
|
||||||
self.parse_samples()
|
self.parse_samples()
|
||||||
|
self.finalize_parse()
|
||||||
|
|
||||||
def parse_info(self):
|
def parse_info(self):
|
||||||
"""
|
"""
|
||||||
Pulls basic information from the excel sheet
|
Pulls basic information from the excel sheet
|
||||||
"""
|
"""
|
||||||
info = InfoParser(ctx=self.ctx, xl=self.xl, submission_type=self.sub['submission_type']['value']).parse_info()
|
parser = InfoParser(xl=self.xl, submission_type=self.sub['submission_type']['value'])
|
||||||
|
info = parser.parse_info()
|
||||||
|
self.info_map = parser.map
|
||||||
for k,v in info.items():
|
for k,v in info.items():
|
||||||
match k:
|
match k:
|
||||||
case "sample":
|
case "sample":
|
||||||
@@ -77,13 +79,15 @@ class SheetParser(object):
|
|||||||
if extraction_kit == None:
|
if extraction_kit == None:
|
||||||
extraction_kit = extraction_kit=self.sub['extraction_kit']
|
extraction_kit = extraction_kit=self.sub['extraction_kit']
|
||||||
logger.debug(f"Parsing reagents for {extraction_kit}")
|
logger.debug(f"Parsing reagents for {extraction_kit}")
|
||||||
self.sub['reagents'] = ReagentParser(ctx=self.ctx, xl=self.xl, submission_type=self.sub['submission_type'], extraction_kit=extraction_kit).parse_reagents()
|
self.sub['reagents'] = ReagentParser(xl=self.xl, submission_type=self.sub['submission_type'], extraction_kit=extraction_kit).parse_reagents()
|
||||||
|
|
||||||
def parse_samples(self):
|
def parse_samples(self):
|
||||||
"""
|
"""
|
||||||
Pulls sample info from the excel sheet
|
Pulls sample info from the excel sheet
|
||||||
"""
|
"""
|
||||||
self.sample_result, self.sub['samples'] = SampleParser(ctx=self.ctx, xl=self.xl, submission_type=self.sub['submission_type']['value']).parse_samples()
|
parser = SampleParser(xl=self.xl, submission_type=self.sub['submission_type']['value'])
|
||||||
|
self.sample_result, self.sub['samples'] = parser.parse_samples()
|
||||||
|
self.plate_map = parser.plate_map
|
||||||
|
|
||||||
def import_kit_validation_check(self):
|
def import_kit_validation_check(self):
|
||||||
"""
|
"""
|
||||||
@@ -97,7 +101,7 @@ class SheetParser(object):
|
|||||||
List[PydReagent]: List of reagents
|
List[PydReagent]: List of reagents
|
||||||
"""
|
"""
|
||||||
if not check_not_nan(self.sub['extraction_kit']['value']):
|
if not check_not_nan(self.sub['extraction_kit']['value']):
|
||||||
dlg = KitSelector(ctx=self.ctx, title="Kit Needed", message="At minimum a kit is needed. Please select one.")
|
dlg = KitSelector(title="Kit Needed", message="At minimum a kit is needed. Please select one.")
|
||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
self.sub['extraction_kit'] = dict(value=dlg.getValues(), missing=True)
|
self.sub['extraction_kit'] = dict(value=dlg.getValues(), missing=True)
|
||||||
else:
|
else:
|
||||||
@@ -111,11 +115,16 @@ class SheetParser(object):
|
|||||||
Enforce that only allowed reagents get into the Pydantic Model
|
Enforce that only allowed reagents get into the Pydantic Model
|
||||||
"""
|
"""
|
||||||
# kit = lookup_kit_types(ctx=self.ctx, name=self.sub['extraction_kit']['value'])
|
# kit = lookup_kit_types(ctx=self.ctx, name=self.sub['extraction_kit']['value'])
|
||||||
kit = models.KitType.query(name=self.sub['extraction_kit']['value'])
|
kit = KitType.query(name=self.sub['extraction_kit']['value'])
|
||||||
allowed_reagents = [item.name for item in kit.get_reagents()]
|
allowed_reagents = [item.name for item in kit.get_reagents()]
|
||||||
logger.debug(f"List of reagents for comparison with allowed_reagents: {pprint.pformat(self.sub['reagents'])}")
|
logger.debug(f"List of reagents for comparison with allowed_reagents: {pprint.pformat(self.sub['reagents'])}")
|
||||||
# self.sub['reagents'] = [reagent for reagent in self.sub['reagents'] if reagent['value'].type in allowed_reagents]
|
# self.sub['reagents'] = [reagent for reagent in self.sub['reagents'] if reagent['value'].type in allowed_reagents]
|
||||||
self.sub['reagents'] = [reagent for reagent in self.sub['reagents'] if reagent.type in allowed_reagents]
|
self.sub['reagents'] = [reagent for reagent in self.sub['reagents'] if reagent.type in allowed_reagents]
|
||||||
|
|
||||||
|
def finalize_parse(self):
|
||||||
|
finisher = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.sub['submission_type']).finalize_parse
|
||||||
|
self.sub = finisher(input_dict=self.sub, xl=self.xl, info_map=self.info_map, plate_map=self.plate_map)
|
||||||
|
|
||||||
|
|
||||||
def to_pydantic(self) -> PydSubmission:
|
def to_pydantic(self) -> PydSubmission:
|
||||||
"""
|
"""
|
||||||
@@ -125,15 +134,15 @@ class SheetParser(object):
|
|||||||
PydSubmission: output pydantic model
|
PydSubmission: output pydantic model
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Submission dictionary coming into 'to_pydantic':\n{pprint.pformat(self.sub)}")
|
logger.debug(f"Submission dictionary coming into 'to_pydantic':\n{pprint.pformat(self.sub)}")
|
||||||
psm = PydSubmission(ctx=self.ctx, filepath=self.filepath, **self.sub)
|
psm = PydSubmission(filepath=self.filepath, **self.sub)
|
||||||
# delattr(psm, "filepath")
|
# delattr(psm, "filepath")
|
||||||
return psm
|
return psm
|
||||||
|
|
||||||
class InfoParser(object):
|
class InfoParser(object):
|
||||||
|
|
||||||
def __init__(self, ctx:Settings, xl:pd.ExcelFile, submission_type:str):
|
def __init__(self, xl:pd.ExcelFile, submission_type:str):
|
||||||
logger.debug(f"\n\nHello from InfoParser!")
|
logger.debug(f"\n\nHello from InfoParser!")
|
||||||
self.ctx = ctx
|
# self.ctx = ctx
|
||||||
self.map = self.fetch_submission_info_map(submission_type=submission_type)
|
self.map = self.fetch_submission_info_map(submission_type=submission_type)
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
logger.debug(f"Info map for InfoParser: {pprint.pformat(self.map)}")
|
logger.debug(f"Info map for InfoParser: {pprint.pformat(self.map)}")
|
||||||
@@ -152,11 +161,10 @@ class InfoParser(object):
|
|||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = dict(value=submission_type, missing=True)
|
submission_type = dict(value=submission_type, missing=True)
|
||||||
logger.debug(f"Looking up submission type: {submission_type['value']}")
|
logger.debug(f"Looking up submission type: {submission_type['value']}")
|
||||||
# submission_type = lookup_submission_type(ctx=self.ctx, name=submission_type['value'])
|
submission_type = SubmissionType.query(name=submission_type['value'])
|
||||||
submission_type = models.SubmissionType.query(name=submission_type['value'])
|
|
||||||
info_map = submission_type.info_map
|
info_map = submission_type.info_map
|
||||||
# Get the parse_info method from the submission type specified
|
# Get the parse_info method from the submission type specified
|
||||||
self.custom_parser = models.BasicSubmission.find_polymorphic_subclass(polymorphic_identity=submission_type.name).parse_info
|
self.custom_parser = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=submission_type.name).parse_info
|
||||||
return info_map
|
return info_map
|
||||||
|
|
||||||
def parse_info(self) -> dict:
|
def parse_info(self) -> dict:
|
||||||
@@ -174,7 +182,7 @@ class InfoParser(object):
|
|||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
dicto[k] = dict(value=v, missing=False)
|
dicto[k] = dict(value=v, missing=False)
|
||||||
continue
|
continue
|
||||||
if k == "samples":
|
if k in ["samples", "all_sheets"]:
|
||||||
continue
|
continue
|
||||||
if sheet in self.map[k]['sheets']:
|
if sheet in self.map[k]['sheets']:
|
||||||
relevant[k] = v
|
relevant[k] = v
|
||||||
@@ -205,9 +213,9 @@ class InfoParser(object):
|
|||||||
|
|
||||||
class ReagentParser(object):
|
class ReagentParser(object):
|
||||||
|
|
||||||
def __init__(self, ctx:Settings, xl:pd.ExcelFile, submission_type:str, extraction_kit:str):
|
def __init__(self, xl:pd.ExcelFile, submission_type:str, extraction_kit:str):
|
||||||
logger.debug("\n\nHello from ReagentParser!\n\n")
|
logger.debug("\n\nHello from ReagentParser!\n\n")
|
||||||
self.ctx = ctx
|
# self.ctx = ctx
|
||||||
self.map = self.fetch_kit_info_map(extraction_kit=extraction_kit, submission_type=submission_type)
|
self.map = self.fetch_kit_info_map(extraction_kit=extraction_kit, submission_type=submission_type)
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
|
|
||||||
@@ -215,7 +223,7 @@ class ReagentParser(object):
|
|||||||
if isinstance(extraction_kit, dict):
|
if isinstance(extraction_kit, dict):
|
||||||
extraction_kit = extraction_kit['value']
|
extraction_kit = extraction_kit['value']
|
||||||
# kit = lookup_kit_types(ctx=self.ctx, name=extraction_kit)
|
# kit = lookup_kit_types(ctx=self.ctx, name=extraction_kit)
|
||||||
kit = models.KitType.query(name=extraction_kit)
|
kit = KitType.query(name=extraction_kit)
|
||||||
if isinstance(submission_type, dict):
|
if isinstance(submission_type, dict):
|
||||||
submission_type = submission_type['value']
|
submission_type = submission_type['value']
|
||||||
reagent_map = kit.construct_xl_map_for_use(submission_type.title())
|
reagent_map = kit.construct_xl_map_for_use(submission_type.title())
|
||||||
@@ -238,7 +246,7 @@ class ReagentParser(object):
|
|||||||
lot = df.iat[relevant[item]['lot']['row']-1, relevant[item]['lot']['column']-1]
|
lot = df.iat[relevant[item]['lot']['row']-1, relevant[item]['lot']['column']-1]
|
||||||
expiry = df.iat[relevant[item]['expiry']['row']-1, relevant[item]['expiry']['column']-1]
|
expiry = df.iat[relevant[item]['expiry']['row']-1, relevant[item]['expiry']['column']-1]
|
||||||
except (KeyError, IndexError):
|
except (KeyError, IndexError):
|
||||||
listo.append(PydReagent(ctx=self.ctx, type=item.strip(), lot=None, expiry=None, name=None, missing=True))
|
listo.append(PydReagent(type=item.strip(), lot=None, expiry=None, name=None, missing=True))
|
||||||
continue
|
continue
|
||||||
# If the cell is blank tell the PydReagent
|
# If the cell is blank tell the PydReagent
|
||||||
if check_not_nan(lot):
|
if check_not_nan(lot):
|
||||||
@@ -248,7 +256,7 @@ class ReagentParser(object):
|
|||||||
# logger.debug(f"Got lot for {item}-{name}: {lot} as {type(lot)}")
|
# logger.debug(f"Got lot for {item}-{name}: {lot} as {type(lot)}")
|
||||||
lot = str(lot)
|
lot = str(lot)
|
||||||
logger.debug(f"Going into pydantic: name: {name}, lot: {lot}, expiry: {expiry}, type: {item.strip()}")
|
logger.debug(f"Going into pydantic: name: {name}, lot: {lot}, expiry: {expiry}, type: {item.strip()}")
|
||||||
listo.append(PydReagent(ctx=self.ctx, type=item.strip(), lot=lot, expiry=expiry, name=name, missing=missing))
|
listo.append(PydReagent(type=item.strip(), lot=lot, expiry=expiry, name=name, missing=missing))
|
||||||
# logger.debug(f"Returning listo: {listo}")
|
# logger.debug(f"Returning listo: {listo}")
|
||||||
return listo
|
return listo
|
||||||
|
|
||||||
@@ -257,7 +265,7 @@ class SampleParser(object):
|
|||||||
object to pull data for samples in excel sheet and construct individual sample objects
|
object to pull data for samples in excel sheet and construct individual sample objects
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, ctx:Settings, xl:pd.ExcelFile, submission_type:str) -> None:
|
def __init__(self, xl:pd.ExcelFile, submission_type:str) -> None:
|
||||||
"""
|
"""
|
||||||
convert sample sub-dataframe to dictionary of records
|
convert sample sub-dataframe to dictionary of records
|
||||||
|
|
||||||
@@ -268,7 +276,7 @@ class SampleParser(object):
|
|||||||
"""
|
"""
|
||||||
logger.debug("\n\nHello from SampleParser!")
|
logger.debug("\n\nHello from SampleParser!")
|
||||||
self.samples = []
|
self.samples = []
|
||||||
self.ctx = ctx
|
# self.ctx = ctx
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
self.submission_type = submission_type
|
self.submission_type = submission_type
|
||||||
sample_info_map = self.fetch_sample_info_map(submission_type=submission_type)
|
sample_info_map = self.fetch_sample_info_map(submission_type=submission_type)
|
||||||
@@ -293,12 +301,12 @@ class SampleParser(object):
|
|||||||
"""
|
"""
|
||||||
logger.debug(f"Looking up submission type: {submission_type}")
|
logger.debug(f"Looking up submission type: {submission_type}")
|
||||||
# submission_type = lookup_submission_type(ctx=self.ctx, name=submission_type)
|
# submission_type = lookup_submission_type(ctx=self.ctx, name=submission_type)
|
||||||
submission_type = models.SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
logger.debug(f"info_map: {pprint.pformat(submission_type.info_map)}")
|
logger.debug(f"info_map: {pprint.pformat(submission_type.info_map)}")
|
||||||
sample_info_map = submission_type.info_map['samples']
|
sample_info_map = submission_type.info_map['samples']
|
||||||
# self.custom_parser = get_polymorphic_subclass(models.BasicSubmission, submission_type.name).parse_samples
|
# self.custom_parser = get_polymorphic_subclass(models.BasicSubmission, submission_type.name).parse_samples
|
||||||
self.custom_sub_parser = models.BasicSubmission.find_polymorphic_subclass(polymorphic_identity=submission_type.name).parse_samples
|
self.custom_sub_parser = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=submission_type.name).parse_samples
|
||||||
self.custom_sample_parser = models.BasicSample.find_polymorphic_subclass(polymorphic_identity=f"{submission_type.name} Sample").parse_sample
|
self.custom_sample_parser = BasicSample.find_polymorphic_subclass(polymorphic_identity=f"{submission_type.name} Sample").parse_sample
|
||||||
return sample_info_map
|
return sample_info_map
|
||||||
|
|
||||||
def construct_plate_map(self, plate_map_location:dict) -> pd.DataFrame:
|
def construct_plate_map(self, plate_map_location:dict) -> pd.DataFrame:
|
||||||
@@ -316,7 +324,7 @@ class SampleParser(object):
|
|||||||
df = pd.DataFrame(df.values[1:], columns=df.iloc[0])
|
df = pd.DataFrame(df.values[1:], columns=df.iloc[0])
|
||||||
df = df.set_index(df.columns[0])
|
df = df.set_index(df.columns[0])
|
||||||
# custom_mapper = get_polymorphic_subclass(models.BasicSubmission, self.submission_type)
|
# custom_mapper = get_polymorphic_subclass(models.BasicSubmission, self.submission_type)
|
||||||
custom_mapper = models.BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
custom_mapper = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
df = custom_mapper.custom_platemap(self.xl, df)
|
df = custom_mapper.custom_platemap(self.xl, df)
|
||||||
logger.debug(f"Custom platemap:\n{df}")
|
logger.debug(f"Custom platemap:\n{df}")
|
||||||
return df
|
return df
|
||||||
@@ -407,7 +415,7 @@ class SampleParser(object):
|
|||||||
# logger.debug(f"Output sample dict: {sample}")
|
# logger.debug(f"Output sample dict: {sample}")
|
||||||
logger.debug(f"Final lookup_table: \n\n {self.lookup_table}")
|
logger.debug(f"Final lookup_table: \n\n {self.lookup_table}")
|
||||||
|
|
||||||
def parse_samples(self, generate:bool=True) -> List[dict]|List[models.BasicSample]:
|
def parse_samples(self, generate:bool=True) -> List[dict]|List[BasicSample]:
|
||||||
"""
|
"""
|
||||||
Parse merged platemap\lookup info into dicts/samples
|
Parse merged platemap\lookup info into dicts/samples
|
||||||
|
|
||||||
@@ -445,36 +453,39 @@ class SampleParser(object):
|
|||||||
new_samples.append(PydSample(**translated_dict))
|
new_samples.append(PydSample(**translated_dict))
|
||||||
return result, new_samples
|
return result, new_samples
|
||||||
|
|
||||||
def generate_sample_object(self, input_dict) -> models.BasicSample:
|
# def generate_sample_object(self, input_dict) -> BasicSample:
|
||||||
"""
|
# """
|
||||||
Constructs sample object from dict
|
# Constructs sample object from dict.
|
||||||
|
# NOTE: Depreciated due to using Pydantic object up until db saving.
|
||||||
|
|
||||||
Args:
|
# Args:
|
||||||
input_dict (dict): sample information
|
# input_dict (dict): sample information
|
||||||
|
|
||||||
Returns:
|
# Returns:
|
||||||
models.BasicSample: Sample object
|
# models.BasicSample: Sample object
|
||||||
"""
|
# """
|
||||||
query = input_dict['sample_type'].replace(" ", "")
|
# database_obj = BasicSample.find_polymorphic_subclass(polymorphic_identity=input_dict['sample_type'])
|
||||||
try:
|
# # query = input_dict['sample_type'].replace(" ", "")
|
||||||
database_obj = getattr(models, query)
|
# # try:
|
||||||
except AttributeError as e:
|
# # # database_obj = getattr(models, query)
|
||||||
logger.error(f"Could not find the model {query}. Using generic.")
|
|
||||||
database_obj = models.BasicSample
|
# # except AttributeError as e:
|
||||||
logger.debug(f"Searching database for {input_dict['submitter_id']}...")
|
# # logger.error(f"Could not find the model {query}. Using generic.")
|
||||||
# instance = lookup_samples(ctx=self.ctx, submitter_id=str(input_dict['submitter_id']))
|
# # database_obj = models.BasicSample
|
||||||
instance = models.BasicSample.query(submitter_id=str(input_dict['submitter_id']))
|
# logger.debug(f"Searching database for {input_dict['submitter_id']}...")
|
||||||
if instance == None:
|
# # instance = lookup_samples(ctx=self.ctx, submitter_id=str(input_dict['submitter_id']))
|
||||||
logger.debug(f"Couldn't find sample {input_dict['submitter_id']}. Creating new sample.")
|
# instance = BasicSample.query(submitter_id=str(input_dict['submitter_id']))
|
||||||
instance = database_obj()
|
# if instance == None:
|
||||||
for k,v in input_dict.items():
|
# logger.debug(f"Couldn't find sample {input_dict['submitter_id']}. Creating new sample.")
|
||||||
try:
|
# instance = database_obj()
|
||||||
instance.set_attribute(k, v)
|
# for k,v in input_dict.items():
|
||||||
except Exception as e:
|
# try:
|
||||||
logger.error(f"Failed to set {k} due to {type(e).__name__}: {e}")
|
# instance.set_attribute(k, v)
|
||||||
else:
|
# except Exception as e:
|
||||||
logger.debug(f"Sample {instance.submitter_id} already exists, will run update.")
|
# logger.error(f"Failed to set {k} due to {type(e).__name__}: {e}")
|
||||||
return dict(sample=instance, row=input_dict['row'], column=input_dict['column'])
|
# else:
|
||||||
|
# logger.debug(f"Sample {instance.submitter_id} already exists, will run update.")
|
||||||
|
# return dict(sample=instance, row=input_dict['row'], column=input_dict['column'])
|
||||||
|
|
||||||
def grab_plates(self) -> List[str]:
|
def grab_plates(self) -> List[str]:
|
||||||
"""
|
"""
|
||||||
@@ -487,7 +498,7 @@ class SampleParser(object):
|
|||||||
for plate in self.plates:
|
for plate in self.plates:
|
||||||
df = self.xl.parse(plate['sheet'], header=None)
|
df = self.xl.parse(plate['sheet'], header=None)
|
||||||
if isinstance(df.iat[plate['row']-1, plate['column']-1], str):
|
if isinstance(df.iat[plate['row']-1, plate['column']-1], str):
|
||||||
output = RSLNamer.retrieve_rsl_number(ctx=self.ctx, instr=df.iat[plate['row']-1, plate['column']-1])
|
output = RSLNamer.retrieve_rsl_number(instr=df.iat[plate['row']-1, plate['column']-1])
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
plates.append(output)
|
plates.append(output)
|
||||||
@@ -497,7 +508,7 @@ class PCRParser(object):
|
|||||||
"""
|
"""
|
||||||
Object to pull data from Design and Analysis PCR export file.
|
Object to pull data from Design and Analysis PCR export file.
|
||||||
"""
|
"""
|
||||||
def __init__(self, ctx:dict, filepath:Path|None = None) -> None:
|
def __init__(self, filepath:Path|None = None) -> None:
|
||||||
"""
|
"""
|
||||||
Initializes object.
|
Initializes object.
|
||||||
|
|
||||||
@@ -505,7 +516,7 @@ class PCRParser(object):
|
|||||||
ctx (dict): settings passed down from gui.
|
ctx (dict): settings passed down from gui.
|
||||||
filepath (Path | None, optional): file to parse. Defaults to None.
|
filepath (Path | None, optional): file to parse. Defaults to None.
|
||||||
"""
|
"""
|
||||||
self.ctx = ctx
|
# self.ctx = ctx
|
||||||
logger.debug(f"Parsing {filepath.__str__()}")
|
logger.debug(f"Parsing {filepath.__str__()}")
|
||||||
if filepath == None:
|
if filepath == None:
|
||||||
logger.error(f"No filepath given.")
|
logger.error(f"No filepath given.")
|
||||||
@@ -521,11 +532,11 @@ class PCRParser(object):
|
|||||||
return
|
return
|
||||||
# self.pcr = OrderedDict()
|
# self.pcr = OrderedDict()
|
||||||
self.parse_general(sheet_name="Results")
|
self.parse_general(sheet_name="Results")
|
||||||
namer = RSLNamer(ctx=self.ctx, instr=filepath.__str__())
|
namer = RSLNamer(instr=filepath.__str__())
|
||||||
self.plate_num = namer.parsed_name
|
self.plate_num = namer.parsed_name
|
||||||
self.submission_type = namer.submission_type
|
self.submission_type = namer.submission_type
|
||||||
logger.debug(f"Set plate number to {self.plate_num} and type to {self.submission_type}")
|
logger.debug(f"Set plate number to {self.plate_num} and type to {self.submission_type}")
|
||||||
parser = models.BasicSubmission.find_polymorphic_subclass(self.submission_type)
|
parser = BasicSubmission.find_polymorphic_subclass(self.submission_type)
|
||||||
self.samples = parser.parse_pcr(xl=self.xl, rsl_number=self.plate_num)
|
self.samples = parser.parse_pcr(xl=self.xl, rsl_number=self.plate_num)
|
||||||
|
|
||||||
def parse_general(self, sheet_name:str):
|
def parse_general(self, sheet_name:str):
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ def make_report_html(df:DataFrame, start_date:date, end_date:date) -> str:
|
|||||||
return html
|
return html
|
||||||
|
|
||||||
|
|
||||||
def convert_data_list_to_df(ctx:dict, input:list[dict], subtype:str|None=None) -> DataFrame:
|
def convert_data_list_to_df(input:list[dict], subtype:str|None=None) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
Convert list of control records to dataframe
|
Convert list of control records to dataframe
|
||||||
|
|
||||||
@@ -171,8 +171,8 @@ def check_date(df:DataFrame, item:dict, previous_dates:list) -> Tuple[DataFrame,
|
|||||||
passed = False
|
passed = False
|
||||||
else:
|
else:
|
||||||
passed = True
|
passed = True
|
||||||
logger.debug(f"\n\tCurrent date: {item['date']}\n\tPrevious dates:{previous_dates}")
|
# logger.debug(f"\n\tCurrent date: {item['date']}\n\tPrevious dates:{previous_dates}")
|
||||||
logger.debug(f"DF: {type(df)}, previous_dates: {type(previous_dates)}")
|
# logger.debug(f"DF: {type(df)}, previous_dates: {type(previous_dates)}")
|
||||||
# if run didn't lead to changed date, return values
|
# if run didn't lead to changed date, return values
|
||||||
if passed:
|
if passed:
|
||||||
logger.debug(f"Date check passed, returning.")
|
logger.debug(f"Date check passed, returning.")
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import logging, re
|
import logging, re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from openpyxl import load_workbook
|
from openpyxl import load_workbook
|
||||||
from backend.db.models import BasicSubmission
|
from backend.db import BasicSubmission, SubmissionType
|
||||||
from tools import Settings
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
@@ -10,14 +9,12 @@ logger = logging.getLogger(f"submissions.{__name__}")
|
|||||||
class RSLNamer(object):
|
class RSLNamer(object):
|
||||||
"""
|
"""
|
||||||
Object that will enforce proper formatting on RSL plate names.
|
Object that will enforce proper formatting on RSL plate names.
|
||||||
NOTE: Depreciated in favour of object based methods in 'submissions.py'
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, ctx, instr:str, sub_type:str|None=None):
|
def __init__(self, instr:str, sub_type:str|None=None):
|
||||||
self.ctx = ctx
|
|
||||||
self.submission_type = sub_type
|
self.submission_type = sub_type
|
||||||
|
|
||||||
if self.submission_type == None:
|
if self.submission_type == None:
|
||||||
self.submission_type = self.retrieve_submission_type(ctx=self.ctx, instr=instr)
|
self.submission_type = self.retrieve_submission_type(instr=instr)
|
||||||
logger.debug(f"got submission type: {self.submission_type}")
|
logger.debug(f"got submission type: {self.submission_type}")
|
||||||
if self.submission_type != None:
|
if self.submission_type != None:
|
||||||
enforcer = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
enforcer = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
@@ -25,25 +22,30 @@ class RSLNamer(object):
|
|||||||
self.parsed_name = enforcer.enforce_name(instr=self.parsed_name)
|
self.parsed_name = enforcer.enforce_name(instr=self.parsed_name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def retrieve_submission_type(cls, ctx:Settings, instr:str|Path) -> str:
|
def retrieve_submission_type(cls, instr:str|Path) -> str:
|
||||||
match instr:
|
match instr:
|
||||||
case Path():
|
case Path():
|
||||||
logger.debug(f"Using path method.")
|
logger.debug(f"Using path method for {instr}.")
|
||||||
if instr.exists():
|
if instr.exists():
|
||||||
wb = load_workbook(instr)
|
wb = load_workbook(instr)
|
||||||
try:
|
try:
|
||||||
submission_type = [item.strip().title() for item in wb.properties.category.split(";")][0]
|
submission_type = [item.strip().title() for item in wb.properties.category.split(";")][0]
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
try:
|
try:
|
||||||
for type in ctx.submission_types:
|
sts = {item.name:item.info_map['all_sheets'] for item in SubmissionType.query(key="all_sheets")}
|
||||||
|
for k,v in sts.items():
|
||||||
# This gets the *first* submission type that matches the sheet names in the workbook
|
# This gets the *first* submission type that matches the sheet names in the workbook
|
||||||
if wb.sheetnames == ctx.submission_types[type]['excel_map']:
|
if wb.sheetnames == v:
|
||||||
submission_type = type.title()
|
submission_type = k.title()
|
||||||
|
break
|
||||||
except:
|
except:
|
||||||
submission_type = cls.retrieve_submission_type(ctx=ctx, instr=instr.stem.__str__())
|
# On failure recurse using filename as string for string method
|
||||||
|
submission_type = cls.retrieve_submission_type(instr=instr.stem.__str__())
|
||||||
|
else:
|
||||||
|
submission_type = cls.retrieve_submission_type(instr=instr.stem.__str__())
|
||||||
case str():
|
case str():
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
logger.debug(f"Using string method.")
|
logger.debug(f"Using string method for {instr}.")
|
||||||
m = regex.search(instr)
|
m = regex.search(instr)
|
||||||
try:
|
try:
|
||||||
submission_type = m.lastgroup
|
submission_type = m.lastgroup
|
||||||
@@ -51,9 +53,13 @@ class RSLNamer(object):
|
|||||||
logger.critical("No RSL plate number found or submission type found!")
|
logger.critical("No RSL plate number found or submission type found!")
|
||||||
case _:
|
case _:
|
||||||
submission_type = None
|
submission_type = None
|
||||||
if submission_type == None:
|
try:
|
||||||
|
check = submission_type == None
|
||||||
|
except UnboundLocalError:
|
||||||
|
check = True
|
||||||
|
if check:
|
||||||
from frontend.custom_widgets import SubmissionTypeSelector
|
from frontend.custom_widgets import SubmissionTypeSelector
|
||||||
dlg = SubmissionTypeSelector(ctx, title="Couldn't parse submission type.", message="Please select submission type from list below.")
|
dlg = SubmissionTypeSelector(title="Couldn't parse submission type.", message="Please select submission type from list below.")
|
||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
submission_type = dlg.parse_form()
|
submission_type = dlg.parse_form()
|
||||||
submission_type = submission_type.replace("_", " ")
|
submission_type = submission_type.replace("_", " ")
|
||||||
|
|||||||
@@ -6,14 +6,14 @@ from pydantic import BaseModel, field_validator, Field
|
|||||||
from datetime import date, datetime, timedelta
|
from datetime import date, datetime, timedelta
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
from dateutil.parser._parser import ParserError
|
from dateutil.parser._parser import ParserError
|
||||||
from typing import List, Any, Tuple
|
from typing import List, Any, Tuple, Literal
|
||||||
from . import RSLNamer
|
from . import RSLNamer
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
from tools import check_not_nan, convert_nans_to_nones, Settings, jinja_template_loading
|
from tools import check_not_nan, convert_nans_to_nones, jinja_template_loading
|
||||||
from backend.db.models import *
|
from backend.db.models import *
|
||||||
from sqlalchemy.exc import InvalidRequestError, StatementError
|
from sqlalchemy.exc import StatementError
|
||||||
from PyQt6.QtWidgets import QComboBox, QWidget
|
from PyQt6.QtWidgets import QComboBox, QWidget
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from openpyxl import load_workbook
|
from openpyxl import load_workbook
|
||||||
@@ -21,7 +21,6 @@ from openpyxl import load_workbook
|
|||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
class PydReagent(BaseModel):
|
class PydReagent(BaseModel):
|
||||||
ctx: Settings
|
|
||||||
lot: str|None
|
lot: str|None
|
||||||
type: str|None
|
type: str|None
|
||||||
expiry: date|None
|
expiry: date|None
|
||||||
@@ -139,15 +138,17 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
def int_to_str(cls, value):
|
def int_to_str(cls, value):
|
||||||
return str(value)
|
return str(value)
|
||||||
|
|
||||||
def toSQL(self, ctx:Settings, submission):
|
def toSQL(self, submission=None):
|
||||||
result = None
|
result = None
|
||||||
self.__dict__.update(self.model_extra)
|
self.__dict__.update(self.model_extra)
|
||||||
logger.debug(f"Here is the incoming sample dict: \n{self.__dict__}")
|
logger.debug(f"Here is the incoming sample dict: \n{self.__dict__}")
|
||||||
# instance = lookup_samples(ctx=ctx, submitter_id=self.submitter_id)
|
# instance = lookup_samples(ctx=ctx, submitter_id=self.submitter_id)
|
||||||
instance = BasicSample.query(submitter_id=self.submitter_id)
|
# instance = BasicSample.query(submitter_id=self.submitter_id)
|
||||||
if instance == None:
|
# if instance == None:
|
||||||
logger.debug(f"Sample {self.submitter_id} doesn't exist yet. Looking up sample object with polymorphic identity: {self.sample_type}")
|
# logger.debug(f"Sample {self.submitter_id} doesn't exist yet. Looking up sample object with polymorphic identity: {self.sample_type}")
|
||||||
instance = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type)()
|
# instance = BasicSample.find_polymorphic_subclass(polymorphic_identity=self.sample_type)()
|
||||||
|
# instance = BasicSample.query_or_create(**{k:v for k,v in self.__dict__.items() if k not in ['row', 'column']})
|
||||||
|
instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id)
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
# logger.debug(f"Setting sample field {key} to {value}")
|
# logger.debug(f"Setting sample field {key} to {value}")
|
||||||
match key:
|
match key:
|
||||||
@@ -155,20 +156,26 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
continue
|
continue
|
||||||
case _:
|
case _:
|
||||||
instance.set_attribute(name=key, value=value)
|
instance.set_attribute(name=key, value=value)
|
||||||
for row, column in zip(self.row, self.column):
|
if submission != None:
|
||||||
logger.debug(f"Looking up association with identity: ({submission.submission_type_name} Association)")
|
assoc_type = self.sample_type.replace("Sample", "").strip()
|
||||||
# association = lookup_submission_sample_association(ctx=ctx, submission=submission, row=row, column=column)
|
for row, column in zip(self.row, self.column):
|
||||||
association = SubmissionSampleAssociation.query(submission=submission, row=row, column=column)
|
# logger.debug(f"Looking up association with identity: ({submission.submission_type_name} Association)")
|
||||||
logger.debug(f"Returned association: {association}")
|
logger.debug(f"Looking up association with identity: ({assoc_type} Association)")
|
||||||
if association == None or association == []:
|
# association = lookup_submission_sample_association(ctx=ctx, submission=submission, row=row, column=column)
|
||||||
logger.debug(f"Looked up association at row {row}, column {column} didn't exist, creating new association.")
|
# association = SubmissionSampleAssociation.query(submission=submission, row=row, column=column)
|
||||||
association = SubmissionSampleAssociation.find_polymorphic_subclass(polymorphic_identity=f"{submission.submission_type_name} Association")
|
# logger.debug(f"Returned association: {association}")
|
||||||
association = association(submission=submission, sample=instance, row=row, column=column)
|
# if association == None or association == []:
|
||||||
|
# logger.debug(f"Looked up association at row {row}, column {column} didn't exist, creating new association.")
|
||||||
|
# association = SubmissionSampleAssociation.find_polymorphic_subclass(polymorphic_identity=f"{submission.submission_type_name} Association")
|
||||||
|
# association = association(submission=submission, sample=instance, row=row, column=column)
|
||||||
|
association = SubmissionSampleAssociation.query_or_create(association_type=f"{assoc_type} Association",
|
||||||
|
submission=submission,
|
||||||
|
sample=instance,
|
||||||
|
row=row, column=column)
|
||||||
instance.sample_submission_associations.append(association)
|
instance.sample_submission_associations.append(association)
|
||||||
return instance, result
|
return instance, result
|
||||||
|
|
||||||
class PydSubmission(BaseModel, extra='allow'):
|
class PydSubmission(BaseModel, extra='allow'):
|
||||||
ctx: Settings
|
|
||||||
filepath: Path
|
filepath: Path
|
||||||
submission_type: dict|None
|
submission_type: dict|None
|
||||||
# For defaults
|
# For defaults
|
||||||
@@ -240,15 +247,16 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
sub_type = values.data['submission_type']['value']
|
sub_type = values.data['submission_type']['value']
|
||||||
if check_not_nan(value['value']):
|
if check_not_nan(value['value']):
|
||||||
# if lookup_submissions(ctx=values.data['ctx'], rsl_number=value['value']) == None:
|
# if lookup_submissions(ctx=values.data['ctx'], rsl_number=value['value']) == None:
|
||||||
if BasicSubmission.query(rsl_number=value['value']) == None:
|
# if BasicSubmission.query(rsl_number=value['value']) == None:
|
||||||
return dict(value=value['value'], missing=False)
|
# return dict(value=value['value'], missing=False)
|
||||||
else:
|
# else:
|
||||||
logger.warning(f"Submission number {value} already exists in DB, attempting salvage with filepath")
|
# logger.warning(f"Submission number {value} already exists in DB, attempting salvage with filepath")
|
||||||
# output = RSLNamer(ctx=values.data['ctx'], instr=values.data['filepath'].__str__(), sub_type=sub_type).parsed_name
|
# # output = RSLNamer(ctx=values.data['ctx'], instr=values.data['filepath'].__str__(), sub_type=sub_type).parsed_name
|
||||||
output = RSLNamer(ctx=values.data['ctx'], instr=values.data['filepath'].__str__(), sub_type=sub_type).parsed_name
|
# output = RSLNamer(instr=values.data['filepath'].__str__(), sub_type=sub_type).parsed_name
|
||||||
return dict(value=output, missing=True)
|
# return dict(value=output, missing=True)
|
||||||
|
return value
|
||||||
else:
|
else:
|
||||||
output = RSLNamer(ctx=values.data['ctx'], instr=values.data['filepath'].__str__(), sub_type=sub_type).parsed_name
|
output = RSLNamer(instr=values.data['filepath'].__str__(), sub_type=sub_type).parsed_name
|
||||||
return dict(value=output, missing=True)
|
return dict(value=output, missing=True)
|
||||||
|
|
||||||
@field_validator("technician", mode="before")
|
@field_validator("technician", mode="before")
|
||||||
@@ -298,10 +306,8 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
if check_not_nan(value['value']):
|
if check_not_nan(value['value']):
|
||||||
value = value['value'].title()
|
value = value['value'].title()
|
||||||
return dict(value=value, missing=False)
|
return dict(value=value, missing=False)
|
||||||
# else:
|
|
||||||
# return dict(value="RSL Name not found.")
|
|
||||||
else:
|
else:
|
||||||
return dict(value=RSLNamer(ctx=values.data['ctx'], instr=values.data['filepath'].__str__()).submission_type.title(), missing=True)
|
return dict(value=RSLNamer(instr=values.data['filepath'].__str__()).submission_type.title(), missing=True)
|
||||||
|
|
||||||
@field_validator("submission_category")
|
@field_validator("submission_category")
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -345,58 +351,15 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
msg = None
|
msg = None
|
||||||
status = None
|
status = None
|
||||||
self.__dict__.update(self.model_extra)
|
self.__dict__.update(self.model_extra)
|
||||||
# instance = lookup_submissions(ctx=self.ctx, rsl_number=self.rsl_plate_num['value'])
|
instance, code, msg = BasicSubmission.query_or_create(submission_type=self.submission_type['value'], rsl_plate_num=self.rsl_plate_num['value'])
|
||||||
instance = BasicSubmission.query(rsl_number=self.rsl_plate_num['value'])
|
|
||||||
if instance == None:
|
|
||||||
instance = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)()
|
|
||||||
else:
|
|
||||||
code = 1
|
|
||||||
msg = "This submission already exists.\nWould you like to overwrite?"
|
|
||||||
self.handle_duplicate_samples()
|
self.handle_duplicate_samples()
|
||||||
logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
|
logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
value = value['value']
|
value = value['value']
|
||||||
logger.debug(f"Setting {key} to {value}")
|
logger.debug(f"Setting {key} to {value}")
|
||||||
# set fields based on keys in dictionary
|
|
||||||
match key:
|
|
||||||
case "extraction_kit":
|
|
||||||
logger.debug(f"Looking up kit {value}")
|
|
||||||
# field_value = lookup_kit_types(ctx=self.ctx, name=value)
|
|
||||||
field_value = KitType.query(name=value)
|
|
||||||
logger.debug(f"Got {field_value} for kit {value}")
|
|
||||||
case "submitting_lab":
|
|
||||||
logger.debug(f"Looking up organization: {value}")
|
|
||||||
# field_value = lookup_organizations(ctx=self.ctx, name=value)
|
|
||||||
field_value = Organization.query(name=value)
|
|
||||||
logger.debug(f"Got {field_value} for organization {value}")
|
|
||||||
case "submitter_plate_num":
|
|
||||||
logger.debug(f"Submitter plate id: {value}")
|
|
||||||
field_value = value
|
|
||||||
case "samples":
|
|
||||||
# instance = construct_samples(ctx=ctx, instance=instance, samples=value)
|
|
||||||
for sample in value:
|
|
||||||
# logger.debug(f"Parsing {sample} to sql.")
|
|
||||||
sample, _ = sample.toSQL(ctx=self.ctx, submission=instance)
|
|
||||||
# instance.samples.append(sample)
|
|
||||||
continue
|
|
||||||
case "reagents":
|
|
||||||
field_value = [reagent['value'].toSQL()[0] if isinstance(reagent, dict) else reagent.toSQL()[0] for reagent in value]
|
|
||||||
case "submission_type":
|
|
||||||
# field_value = lookup_submission_type(ctx=self.ctx, name=value)
|
|
||||||
field_value = SubmissionType.query(name=value)
|
|
||||||
case "sample_count":
|
|
||||||
if value == None:
|
|
||||||
field_value = len(self.samples)
|
|
||||||
else:
|
|
||||||
field_value = value
|
|
||||||
case "ctx" | "csv" | "filepath":
|
|
||||||
continue
|
|
||||||
case _:
|
|
||||||
field_value = value
|
|
||||||
# insert into field
|
|
||||||
try:
|
try:
|
||||||
setattr(instance, key, field_value)
|
instance.set_attribute(key=key, value=value)
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
logger.debug(f"Could not set attribute: {key} to {value} due to: \n\n {e}")
|
logger.debug(f"Could not set attribute: {key} to {value} due to: \n\n {e}")
|
||||||
continue
|
continue
|
||||||
@@ -412,7 +375,6 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
# Apply any discounts that are applicable for client and kit.
|
# Apply any discounts that are applicable for client and kit.
|
||||||
try:
|
try:
|
||||||
logger.debug("Checking and applying discounts...")
|
logger.debug("Checking and applying discounts...")
|
||||||
# discounts = [item.amount for item in lookup_discounts(ctx=self.ctx, kit_type=instance.extraction_kit, organization=instance.submitting_lab)]
|
|
||||||
discounts = [item.amount for item in Discount.query(kit_type=instance.extraction_kit, organization=instance.submitting_lab)]
|
discounts = [item.amount for item in Discount.query(kit_type=instance.extraction_kit, organization=instance.submitting_lab)]
|
||||||
logger.debug(f"We got discounts: {discounts}")
|
logger.debug(f"We got discounts: {discounts}")
|
||||||
if len(discounts) > 0:
|
if len(discounts) > 0:
|
||||||
@@ -513,7 +475,9 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
template = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type).filename_template()
|
template = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type).filename_template()
|
||||||
logger.debug(f"Using template string: {template}")
|
logger.debug(f"Using template string: {template}")
|
||||||
template = env.from_string(template)
|
template = env.from_string(template)
|
||||||
return template.render(**self.improved_dict(dictionaries=False))
|
render = template.render(**self.improved_dict(dictionaries=False)).replace("/", "")
|
||||||
|
logger.debug(f"Template rendered as: {render}")
|
||||||
|
return render
|
||||||
|
|
||||||
class PydContact(BaseModel):
|
class PydContact(BaseModel):
|
||||||
|
|
||||||
@@ -521,7 +485,7 @@ class PydContact(BaseModel):
|
|||||||
phone: str|None
|
phone: str|None
|
||||||
email: str|None
|
email: str|None
|
||||||
|
|
||||||
def toSQL(self, ctx):
|
def toSQL(self):
|
||||||
return Contact(name=self.name, phone=self.phone, email=self.email)
|
return Contact(name=self.name, phone=self.phone, email=self.email)
|
||||||
|
|
||||||
class PydOrganization(BaseModel):
|
class PydOrganization(BaseModel):
|
||||||
@@ -530,12 +494,12 @@ class PydOrganization(BaseModel):
|
|||||||
cost_centre: str
|
cost_centre: str
|
||||||
contacts: List[PydContact]|None
|
contacts: List[PydContact]|None
|
||||||
|
|
||||||
def toSQL(self, ctx):
|
def toSQL(self):
|
||||||
instance = Organization()
|
instance = Organization()
|
||||||
for field in self.model_fields:
|
for field in self.model_fields:
|
||||||
match field:
|
match field:
|
||||||
case "contacts":
|
case "contacts":
|
||||||
value = [item.toSQL(ctx) for item in getattr(self, field)]
|
value = [item.toSQL() for item in getattr(self, field)]
|
||||||
case _:
|
case _:
|
||||||
value = getattr(self, field)
|
value = getattr(self, field)
|
||||||
instance.set_attribute(name=field, value=value)
|
instance.set_attribute(name=field, value=value)
|
||||||
@@ -555,7 +519,7 @@ class PydReagentType(BaseModel):
|
|||||||
return timedelta(days=value)
|
return timedelta(days=value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def toSQL(self, ctx:Settings, kit:KitType):
|
def toSQL(self, kit:KitType):
|
||||||
# instance: ReagentType = lookup_reagent_types(ctx=ctx, name=self.name)
|
# instance: ReagentType = lookup_reagent_types(ctx=ctx, name=self.name)
|
||||||
instance: ReagentType = ReagentType.query(name=self.name)
|
instance: ReagentType = ReagentType.query(name=self.name)
|
||||||
if instance == None:
|
if instance == None:
|
||||||
@@ -576,14 +540,14 @@ class PydKit(BaseModel):
|
|||||||
name: str
|
name: str
|
||||||
reagent_types: List[PydReagentType] = []
|
reagent_types: List[PydReagentType] = []
|
||||||
|
|
||||||
def toSQL(self, ctx):
|
def toSQL(self):
|
||||||
result = dict(message=None, status='Information')
|
result = dict(message=None, status='Information')
|
||||||
# instance = lookup_kit_types(ctx=ctx, name=self.name)
|
# instance = lookup_kit_types(ctx=ctx, name=self.name)
|
||||||
instance = KitType.query(name=self.name)
|
instance = KitType.query(name=self.name)
|
||||||
if instance == None:
|
if instance == None:
|
||||||
instance = KitType(name=self.name)
|
instance = KitType(name=self.name)
|
||||||
# instance.reagent_types = [item.toSQL(ctx, instance) for item in self.reagent_types]
|
# instance.reagent_types = [item.toSQL(ctx, instance) for item in self.reagent_types]
|
||||||
[item.toSQL(ctx, instance) for item in self.reagent_types]
|
[item.toSQL(instance) for item in self.reagent_types]
|
||||||
return instance, result
|
return instance, result
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,23 +6,27 @@ from PyQt6.QtWidgets import (
|
|||||||
QMainWindow, QToolBar,
|
QMainWindow, QToolBar,
|
||||||
QTabWidget, QWidget, QVBoxLayout,
|
QTabWidget, QWidget, QVBoxLayout,
|
||||||
QComboBox, QHBoxLayout,
|
QComboBox, QHBoxLayout,
|
||||||
QScrollArea, QLineEdit, QDateEdit
|
QScrollArea
|
||||||
)
|
)
|
||||||
from PyQt6.QtCore import pyqtSignal
|
from PyQt6.QtCore import pyqtSignal
|
||||||
from PyQt6.QtGui import QAction
|
from PyQt6.QtGui import QAction
|
||||||
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from backend.db.functions import (
|
|
||||||
lookup_control_types, lookup_modes
|
|
||||||
)
|
|
||||||
from backend.db.models import ControlType, Control
|
from backend.db.models import ControlType, Control
|
||||||
from backend.validators import PydSubmission, PydReagent
|
from backend.validators import PydSubmission, PydReagent
|
||||||
from tools import check_if_app, Settings
|
from .functions import (
|
||||||
from frontend.custom_widgets import SubmissionsSheet, AlertPop, AddReagentForm, KitAdder, ControlsDatePicker, ReagentFormWidget
|
import_submission_function, kit_reload_function, kit_integrity_completion_function,
|
||||||
|
submit_new_sample_function, generate_report_function, add_kit_function, add_org_function,
|
||||||
|
controls_getter_function, chart_maker_function, link_controls_function, link_extractions_function,
|
||||||
|
link_pcr_function, autofill_excel, scrape_reagents, export_csv_function, import_pcr_results_function
|
||||||
|
)
|
||||||
|
from tools import check_if_app, Settings, Report
|
||||||
|
from frontend.custom_widgets import SubmissionsSheet, AlertPop, AddReagentForm, KitAdder, ControlsDatePicker
|
||||||
import logging
|
import logging
|
||||||
from datetime import date
|
from datetime import date
|
||||||
import webbrowser
|
import webbrowser
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
|
|
||||||
logger = logging.getLogger(f'submissions.{__name__}')
|
logger = logging.getLogger(f'submissions.{__name__}')
|
||||||
logger.info("Hello, I am a logger")
|
logger.info("Hello, I am a logger")
|
||||||
@@ -34,6 +38,7 @@ class App(QMainWindow):
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
self.ctx = ctx
|
self.ctx = ctx
|
||||||
self.last_dir = ctx.directory_path
|
self.last_dir = ctx.directory_path
|
||||||
|
self.report = Report()
|
||||||
# indicate version and connected database in title bar
|
# indicate version and connected database in title bar
|
||||||
try:
|
try:
|
||||||
self.title = f"Submissions App (v{ctx.package.__version__}) - {ctx.database_path}"
|
self.title = f"Submissions App (v{ctx.package.__version__}) - {ctx.database_path}"
|
||||||
@@ -58,7 +63,6 @@ class App(QMainWindow):
|
|||||||
self.show()
|
self.show()
|
||||||
self.statusBar().showMessage('Ready', 5000)
|
self.statusBar().showMessage('Ready', 5000)
|
||||||
|
|
||||||
|
|
||||||
def _createMenuBar(self):
|
def _createMenuBar(self):
|
||||||
"""
|
"""
|
||||||
adds items to menu bar
|
adds items to menu bar
|
||||||
@@ -67,7 +71,7 @@ class App(QMainWindow):
|
|||||||
menuBar = self.menuBar()
|
menuBar = self.menuBar()
|
||||||
fileMenu = menuBar.addMenu("&File")
|
fileMenu = menuBar.addMenu("&File")
|
||||||
# Creating menus using a title
|
# Creating menus using a title
|
||||||
methodsMenu = menuBar.addMenu("&Methods")
|
# methodsMenu = menuBar.addMenu("&Methods")
|
||||||
reportMenu = menuBar.addMenu("&Reports")
|
reportMenu = menuBar.addMenu("&Reports")
|
||||||
maintenanceMenu = menuBar.addMenu("&Monthly")
|
maintenanceMenu = menuBar.addMenu("&Monthly")
|
||||||
helpMenu = menuBar.addMenu("&Help")
|
helpMenu = menuBar.addMenu("&Help")
|
||||||
@@ -75,7 +79,7 @@ class App(QMainWindow):
|
|||||||
helpMenu.addAction(self.docsAction)
|
helpMenu.addAction(self.docsAction)
|
||||||
fileMenu.addAction(self.importAction)
|
fileMenu.addAction(self.importAction)
|
||||||
fileMenu.addAction(self.importPCRAction)
|
fileMenu.addAction(self.importPCRAction)
|
||||||
methodsMenu.addAction(self.constructFS)
|
# methodsMenu.addAction(self.constructFS)
|
||||||
reportMenu.addAction(self.generateReportAction)
|
reportMenu.addAction(self.generateReportAction)
|
||||||
maintenanceMenu.addAction(self.joinExtractionAction)
|
maintenanceMenu.addAction(self.joinExtractionAction)
|
||||||
maintenanceMenu.addAction(self.joinPCRAction)
|
maintenanceMenu.addAction(self.joinPCRAction)
|
||||||
@@ -106,8 +110,7 @@ class App(QMainWindow):
|
|||||||
self.joinPCRAction = QAction("Link PCR Logs")
|
self.joinPCRAction = QAction("Link PCR Logs")
|
||||||
self.helpAction = QAction("&About", self)
|
self.helpAction = QAction("&About", self)
|
||||||
self.docsAction = QAction("&Docs", self)
|
self.docsAction = QAction("&Docs", self)
|
||||||
self.constructFS = QAction("Make First Strand", self)
|
# self.constructFS = QAction("Make First Strand", self)
|
||||||
|
|
||||||
|
|
||||||
def _connectActions(self):
|
def _connectActions(self):
|
||||||
"""
|
"""
|
||||||
@@ -128,7 +131,7 @@ class App(QMainWindow):
|
|||||||
self.joinPCRAction.triggered.connect(self.linkPCR)
|
self.joinPCRAction.triggered.connect(self.linkPCR)
|
||||||
self.helpAction.triggered.connect(self.showAbout)
|
self.helpAction.triggered.connect(self.showAbout)
|
||||||
self.docsAction.triggered.connect(self.openDocs)
|
self.docsAction.triggered.connect(self.openDocs)
|
||||||
self.constructFS.triggered.connect(self.construct_first_strand)
|
# self.constructFS.triggered.connect(self.construct_first_strand)
|
||||||
self.table_widget.formwidget.import_drag.connect(self.importSubmission)
|
self.table_widget.formwidget.import_drag.connect(self.importSubmission)
|
||||||
|
|
||||||
def showAbout(self):
|
def showAbout(self):
|
||||||
@@ -150,7 +153,7 @@ class App(QMainWindow):
|
|||||||
logger.debug(f"Attempting to open {url}")
|
logger.debug(f"Attempting to open {url}")
|
||||||
webbrowser.get('windows-default').open(f"file://{url.__str__()}")
|
webbrowser.get('windows-default').open(f"file://{url.__str__()}")
|
||||||
|
|
||||||
def result_reporter(self, result:dict|None=None):
|
def result_reporter(self):
|
||||||
# def result_reporter(self, result:TypedDict[]|None=None):
|
# def result_reporter(self, result:TypedDict[]|None=None):
|
||||||
"""
|
"""
|
||||||
Report any anomolous results - if any - to the user
|
Report any anomolous results - if any - to the user
|
||||||
@@ -158,31 +161,41 @@ class App(QMainWindow):
|
|||||||
Args:
|
Args:
|
||||||
result (dict | None, optional): The result from a function. Defaults to None.
|
result (dict | None, optional): The result from a function. Defaults to None.
|
||||||
"""
|
"""
|
||||||
logger.info(f"We got the result: {result}")
|
# logger.info(f"We got the result: {result}")
|
||||||
if result != None:
|
# if result != None:
|
||||||
msg = AlertPop(message=result['message'], status=result['status'])
|
# msg = AlertPop(message=result['message'], status=result['status'])
|
||||||
msg.exec()
|
# msg.exec()
|
||||||
|
logger.debug(f"Running results reporter for: {self.report.results}")
|
||||||
|
if len(self.report.results) > 0:
|
||||||
|
logger.debug(f"We've got some results!")
|
||||||
|
for result in self.report.results:
|
||||||
|
logger.debug(f"Showing result: {result}")
|
||||||
|
if result != None:
|
||||||
|
alert = result.report()
|
||||||
|
if alert.exec():
|
||||||
|
pass
|
||||||
|
self.report = Report()
|
||||||
else:
|
else:
|
||||||
self.statusBar().showMessage("Action completed sucessfully.", 5000)
|
self.statusBar().showMessage("Action completed sucessfully.", 5000)
|
||||||
|
|
||||||
def importSubmission(self, fname:Path|None=None):
|
def importSubmission(self, fname:Path|None=None):
|
||||||
"""
|
"""
|
||||||
import submission from excel sheet into form
|
import submission from excel sheet into form
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import import_submission_function
|
# from .main_window_functions import import_submission_function
|
||||||
self.raise_()
|
self.raise_()
|
||||||
self.activateWindow()
|
self.activateWindow()
|
||||||
self, result = import_submission_function(self, fname)
|
self = import_submission_function(self, fname)
|
||||||
logger.debug(f"Import result: {result}")
|
logger.debug(f"Result from result reporter: {self.report.results}")
|
||||||
self.result_reporter(result)
|
self.result_reporter()
|
||||||
|
|
||||||
def kit_reload(self):
|
def kit_reload(self):
|
||||||
"""
|
"""
|
||||||
Removes all reagents from form before running kit integrity completion.
|
Removes all reagents from form before running kit integrity completion.
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import kit_reload_function
|
# from .main_window_functions import kit_reload_function
|
||||||
self, result = kit_reload_function(self)
|
self = kit_reload_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter()
|
||||||
|
|
||||||
def kit_integrity_completion(self):
|
def kit_integrity_completion(self):
|
||||||
"""
|
"""
|
||||||
@@ -190,15 +203,15 @@ class App(QMainWindow):
|
|||||||
NOTE: this will not change self.reagents which should be fine
|
NOTE: this will not change self.reagents which should be fine
|
||||||
since it's only used when looking up
|
since it's only used when looking up
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import kit_integrity_completion_function
|
# from .main_window_functions import kit_integrity_completion_function
|
||||||
self, result = kit_integrity_completion_function(self)
|
self = kit_integrity_completion_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter()
|
||||||
|
|
||||||
def submit_new_sample(self):
|
def submit_new_sample(self):
|
||||||
"""
|
"""
|
||||||
Attempt to add sample to database when 'submit' button clicked
|
Attempt to add sample to database when 'submit' button clicked
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import submit_new_sample_function
|
# from .main_window_functions import submit_new_sample_function
|
||||||
self, result = submit_new_sample_function(self)
|
self, result = submit_new_sample_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter(result)
|
||||||
|
|
||||||
@@ -237,7 +250,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
Action to create a summary of sheet data per client
|
Action to create a summary of sheet data per client
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import generate_report_function
|
# from .main_window_functions import generate_report_function
|
||||||
self, result = generate_report_function(self)
|
self, result = generate_report_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter(result)
|
||||||
|
|
||||||
@@ -245,7 +258,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
Constructs new kit from yaml and adds to DB.
|
Constructs new kit from yaml and adds to DB.
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import add_kit_function
|
# from .main_window_functions import add_kit_function
|
||||||
self, result = add_kit_function(self)
|
self, result = add_kit_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter(result)
|
||||||
|
|
||||||
@@ -253,7 +266,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
Constructs new kit from yaml and adds to DB.
|
Constructs new kit from yaml and adds to DB.
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import add_org_function
|
# from .main_window_functions import add_org_function
|
||||||
self, result = add_org_function(self)
|
self, result = add_org_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter(result)
|
||||||
|
|
||||||
@@ -261,24 +274,24 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
Lookup controls from database and send to chartmaker
|
Lookup controls from database and send to chartmaker
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import controls_getter_function
|
# from .main_window_functions import controls_getter_function
|
||||||
self, result = controls_getter_function(self)
|
self = controls_getter_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter()
|
||||||
|
|
||||||
def _chart_maker(self):
|
def _chart_maker(self):
|
||||||
"""
|
"""
|
||||||
Creates plotly charts for webview
|
Creates plotly charts for webview
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import chart_maker_function
|
# from .main_window_functions import chart_maker_function
|
||||||
self, result = chart_maker_function(self)
|
self = chart_maker_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter()
|
||||||
|
|
||||||
def linkControls(self):
|
def linkControls(self):
|
||||||
"""
|
"""
|
||||||
Adds controls pulled from irida to relevant submissions
|
Adds controls pulled from irida to relevant submissions
|
||||||
NOTE: Depreciated due to improvements in controls scraper.
|
NOTE: Depreciated due to improvements in controls scraper.
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import link_controls_function
|
# from .main_window_functions import link_controls_function
|
||||||
self, result = link_controls_function(self)
|
self, result = link_controls_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter(result)
|
||||||
|
|
||||||
@@ -286,7 +299,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
Links extraction logs from .csv files to relevant submissions.
|
Links extraction logs from .csv files to relevant submissions.
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import link_extractions_function
|
# from .main_window_functions import link_extractions_function
|
||||||
self, result = link_extractions_function(self)
|
self, result = link_extractions_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter(result)
|
||||||
|
|
||||||
@@ -294,7 +307,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
Links PCR logs from .csv files to relevant submissions.
|
Links PCR logs from .csv files to relevant submissions.
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import link_pcr_function
|
# from .main_window_functions import link_pcr_function
|
||||||
self, result = link_pcr_function(self)
|
self, result = link_pcr_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter(result)
|
||||||
|
|
||||||
@@ -302,25 +315,29 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
Imports results exported from Design and Analysis .eds files
|
Imports results exported from Design and Analysis .eds files
|
||||||
"""
|
"""
|
||||||
from .main_window_functions import import_pcr_results_function
|
# from .main_window_functions import import_pcr_results_function
|
||||||
self, result = import_pcr_results_function(self)
|
self, result = import_pcr_results_function(self)
|
||||||
self.result_reporter(result)
|
self.result_reporter(result)
|
||||||
|
|
||||||
def construct_first_strand(self):
|
# def construct_first_strand(self):
|
||||||
"""
|
# """
|
||||||
Converts first strand excel sheet to Biomek CSV
|
# Converts first strand excel sheet to Biomek CSV
|
||||||
"""
|
# """
|
||||||
from .main_window_functions import construct_first_strand_function
|
# from .main_window_functions import construct_first_strand_function
|
||||||
self, result = construct_first_strand_function(self)
|
# self, result = construct_first_strand_function(self)
|
||||||
self.result_reporter(result)
|
# self.result_reporter(result)
|
||||||
|
|
||||||
def scrape_reagents(self, *args, **kwargs):
|
def scrape_reagents(self, *args, **kwargs):
|
||||||
from .main_window_functions import scrape_reagents
|
# from .main_window_functions import scrape_reagents
|
||||||
logger.debug(f"Args: {args}")
|
logger.debug(f"Args: {args}")
|
||||||
logger.debug(F"kwargs: {kwargs}")
|
logger.debug(F"kwargs: {kwargs}")
|
||||||
self, result = scrape_reagents(self, args[0])
|
self = scrape_reagents(self, args[0])
|
||||||
self.kit_integrity_completion()
|
self.kit_integrity_completion()
|
||||||
self.result_reporter(result)
|
self.result_reporter()
|
||||||
|
|
||||||
|
def export_csv(self, fname:Path|None=None):
|
||||||
|
from .main_window_functions import export_csv_function
|
||||||
|
export_csv_function(self, fname)
|
||||||
|
|
||||||
class AddSubForm(QWidget):
|
class AddSubForm(QWidget):
|
||||||
|
|
||||||
|
|||||||
@@ -1,105 +0,0 @@
|
|||||||
'''
|
|
||||||
functions used by all windows in the application's frontend
|
|
||||||
'''
|
|
||||||
from pathlib import Path
|
|
||||||
import logging
|
|
||||||
from PyQt6.QtWidgets import (
|
|
||||||
QMainWindow, QWidget, QFileDialog,
|
|
||||||
QLineEdit, QComboBox, QDateEdit, QSpinBox,
|
|
||||||
QDoubleSpinBox
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
|
||||||
|
|
||||||
def select_open_file(obj:QMainWindow, file_extension:str) -> Path:
|
|
||||||
"""
|
|
||||||
File dialog to select a file to read from
|
|
||||||
|
|
||||||
Args:
|
|
||||||
obj (QMainWindow): Original main app window to be parent
|
|
||||||
file_extension (str): file extension
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Path: Path of file to be opened
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# home_dir = Path(obj.ctx.directory_path).resolve().__str__()
|
|
||||||
home_dir = obj.last_dir.resolve().__str__()
|
|
||||||
except FileNotFoundError:
|
|
||||||
home_dir = Path.home().resolve().__str__()
|
|
||||||
fname = Path(QFileDialog.getOpenFileName(obj, 'Open file', home_dir, filter = f"{file_extension}(*.{file_extension})")[0])
|
|
||||||
# fname = Path(QFileDialog.getOpenFileName(obj, 'Open file', filter = f"{file_extension}(*.{file_extension})")[0])
|
|
||||||
obj.last_file = fname
|
|
||||||
return fname
|
|
||||||
|
|
||||||
def select_save_file(obj:QMainWindow, default_name:str, extension:str) -> Path:
|
|
||||||
"""
|
|
||||||
File dialog to select a file to write to
|
|
||||||
|
|
||||||
Args:
|
|
||||||
obj (QMainWindow): Original main app window to be parent
|
|
||||||
default_name (str): default base file name
|
|
||||||
extension (str): file extension
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Path: Path of file to be opened
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# home_dir = Path(obj.ctx.directory_path).joinpath(default_name).resolve().__str__()
|
|
||||||
home_dir = obj.last_dir.joinpath(default_name).resolve().__str__()
|
|
||||||
except FileNotFoundError:
|
|
||||||
home_dir = Path.home().joinpath(default_name).resolve().__str__()
|
|
||||||
fname = Path(QFileDialog.getSaveFileName(obj, "Save File", home_dir, filter = f"{extension}(*.{extension})")[0])
|
|
||||||
# fname = Path(QFileDialog.getSaveFileName(obj, "Save File", filter = f"{extension}(*.{extension})")[0])
|
|
||||||
obj.last_dir = fname.parent
|
|
||||||
return fname
|
|
||||||
|
|
||||||
def extract_form_info(object) -> dict:
|
|
||||||
"""
|
|
||||||
retrieves object names and values from form
|
|
||||||
DEPRECIATED. Replaced by individual form parser methods.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
object (_type_): the form widget
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: dictionary of objectName:text items
|
|
||||||
"""
|
|
||||||
|
|
||||||
from frontend.custom_widgets import ReagentTypeForm
|
|
||||||
dicto = {}
|
|
||||||
reagents = []
|
|
||||||
logger.debug(f"Object type: {type(object)}")
|
|
||||||
# grab all widgets in form
|
|
||||||
try:
|
|
||||||
all_children = object.layout.parentWidget().findChildren(QWidget)
|
|
||||||
except AttributeError:
|
|
||||||
all_children = object.layout().parentWidget().findChildren(QWidget)
|
|
||||||
for item in all_children:
|
|
||||||
logger.debug(f"Looking at: {item.objectName()}: {type(item)}")
|
|
||||||
match item:
|
|
||||||
case QLineEdit():
|
|
||||||
dicto[item.objectName()] = item.text()
|
|
||||||
case QComboBox():
|
|
||||||
dicto[item.objectName()] = item.currentText()
|
|
||||||
case QDateEdit():
|
|
||||||
dicto[item.objectName()] = item.date().toPyDate()
|
|
||||||
case QSpinBox() | QDoubleSpinBox():
|
|
||||||
dicto[item.objectName()] = item.value()
|
|
||||||
case ReagentTypeForm():
|
|
||||||
reagent = extract_form_info(item)
|
|
||||||
logger.debug(f"Reagent found: {reagent}")
|
|
||||||
if isinstance(reagent, tuple):
|
|
||||||
reagent = reagent[0]
|
|
||||||
# reagents[reagent["name"].strip()] = {'eol':int(reagent['eol'])}
|
|
||||||
reagents.append({k:v for k,v in reagent.items() if k not in ['', 'qt_spinbox_lineedit']})
|
|
||||||
# value for ad hoc check above
|
|
||||||
if isinstance(dicto, tuple):
|
|
||||||
logger.warning(f"Got tuple for dicto for some reason.")
|
|
||||||
dicto = dicto[0]
|
|
||||||
if isinstance(reagents, tuple):
|
|
||||||
logger.warning(f"Got tuple for reagents for some reason.")
|
|
||||||
reagents = reagents[0]
|
|
||||||
if reagents != {}:
|
|
||||||
return dicto, reagents
|
|
||||||
return dicto
|
|
||||||
@@ -13,8 +13,6 @@ from PyQt6.QtWidgets import (
|
|||||||
)
|
)
|
||||||
from PyQt6.QtCore import Qt, QDate, QSize, pyqtSignal
|
from PyQt6.QtCore import Qt, QDate, QSize, pyqtSignal
|
||||||
from tools import check_not_nan, jinja_template_loading, Settings
|
from tools import check_not_nan, jinja_template_loading, Settings
|
||||||
from backend.db.functions import (lookup_reagent_types, lookup_reagents, lookup_submission_type, lookup_reagenttype_kittype_association, \
|
|
||||||
lookup_submissions, lookup_organizations, lookup_kit_types)
|
|
||||||
from backend.db.models import *
|
from backend.db.models import *
|
||||||
from sqlalchemy import FLOAT, INTEGER
|
from sqlalchemy import FLOAT, INTEGER
|
||||||
import logging
|
import logging
|
||||||
@@ -200,7 +198,6 @@ class KitAdder(QWidget):
|
|||||||
"qt_scrollarea_vcontainer", "submit_btn"
|
"qt_scrollarea_vcontainer", "submit_btn"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def add_RT(self) -> None:
|
def add_RT(self) -> None:
|
||||||
"""
|
"""
|
||||||
insert new reagent type row
|
insert new reagent type row
|
||||||
@@ -439,7 +436,7 @@ class ReagentFormWidget(QWidget):
|
|||||||
# self.setParent(parent)
|
# self.setParent(parent)
|
||||||
self.reagent = reagent
|
self.reagent = reagent
|
||||||
self.extraction_kit = extraction_kit
|
self.extraction_kit = extraction_kit
|
||||||
self.ctx = reagent.ctx
|
# self.ctx = reagent.ctx
|
||||||
layout = QVBoxLayout()
|
layout = QVBoxLayout()
|
||||||
self.label = self.ReagentParsedLabel(reagent=reagent)
|
self.label = self.ReagentParsedLabel(reagent=reagent)
|
||||||
layout.addWidget(self.label)
|
layout.addWidget(self.label)
|
||||||
@@ -476,7 +473,7 @@ class ReagentFormWidget(QWidget):
|
|||||||
if rt == None:
|
if rt == None:
|
||||||
# rt = lookup_reagent_types(ctx=self.ctx, kit_type=self.extraction_kit, reagent=wanted_reagent)
|
# rt = lookup_reagent_types(ctx=self.ctx, kit_type=self.extraction_kit, reagent=wanted_reagent)
|
||||||
rt = ReagentType.query(kit_type=self.extraction_kit, reagent=wanted_reagent)
|
rt = ReagentType.query(kit_type=self.extraction_kit, reagent=wanted_reagent)
|
||||||
return PydReagent(ctx=self.ctx, name=wanted_reagent.name, lot=wanted_reagent.lot, type=rt.name, expiry=wanted_reagent.expiry, parsed=not self.missing), None
|
return PydReagent(name=wanted_reagent.name, lot=wanted_reagent.lot, type=rt.name, expiry=wanted_reagent.expiry, parsed=not self.missing), None
|
||||||
|
|
||||||
def updated(self):
|
def updated(self):
|
||||||
self.missing = True
|
self.missing = True
|
||||||
@@ -504,7 +501,7 @@ class ReagentFormWidget(QWidget):
|
|||||||
|
|
||||||
def __init__(self, reagent, extraction_kit:str) -> None:
|
def __init__(self, reagent, extraction_kit:str) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.ctx = reagent.ctx
|
# self.ctx = reagent.ctx
|
||||||
self.setEditable(True)
|
self.setEditable(True)
|
||||||
# if reagent.parsed:
|
# if reagent.parsed:
|
||||||
# pass
|
# pass
|
||||||
@@ -569,6 +566,7 @@ class SubmissionFormWidget(QWidget):
|
|||||||
layout.addWidget(add_widget)
|
layout.addWidget(add_widget)
|
||||||
else:
|
else:
|
||||||
setattr(self, k, v)
|
setattr(self, k, v)
|
||||||
|
|
||||||
self.setLayout(layout)
|
self.setLayout(layout)
|
||||||
|
|
||||||
def create_widget(self, key:str, value:dict, submission_type:str|None=None):
|
def create_widget(self, key:str, value:dict, submission_type:str|None=None):
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from PyQt6.QtWidgets import (
|
|||||||
)
|
)
|
||||||
from tools import jinja_template_loading
|
from tools import jinja_template_loading
|
||||||
import logging
|
import logging
|
||||||
from backend.db.functions import lookup_kit_types, lookup_submission_type
|
|
||||||
from backend.db.models import KitType, SubmissionType
|
from backend.db.models import KitType, SubmissionType
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
@@ -38,19 +37,19 @@ class AlertPop(QMessageBox):
|
|||||||
"""
|
"""
|
||||||
Dialog to show an alert.
|
Dialog to show an alert.
|
||||||
"""
|
"""
|
||||||
def __init__(self, message:str, status:Literal['information', 'question', 'warning', 'critical']) -> QMessageBox:
|
def __init__(self, message:str, status:Literal['Information', 'Question', 'Warning', 'Critical'], owner:str|None=None) -> QMessageBox:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
# select icon by string
|
# select icon by string
|
||||||
icon = getattr(QMessageBox.Icon, status.title())
|
icon = getattr(QMessageBox.Icon, status)
|
||||||
self.setIcon(icon)
|
self.setIcon(icon)
|
||||||
self.setInformativeText(message)
|
self.setInformativeText(message)
|
||||||
self.setWindowTitle(status.title())
|
self.setWindowTitle(f"{owner} - {status.title()}")
|
||||||
|
|
||||||
class KitSelector(QDialog):
|
class KitSelector(QDialog):
|
||||||
"""
|
"""
|
||||||
dialog to ask yes/no questions
|
dialog to ask yes/no questions
|
||||||
"""
|
"""
|
||||||
def __init__(self, ctx:dict, title:str, message:str) -> QDialog:
|
def __init__(self, title:str, message:str) -> QDialog:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.setWindowTitle(title)
|
self.setWindowTitle(title)
|
||||||
self.widget = QComboBox()
|
self.widget = QComboBox()
|
||||||
@@ -78,7 +77,7 @@ class SubmissionTypeSelector(QDialog):
|
|||||||
"""
|
"""
|
||||||
dialog to ask yes/no questions
|
dialog to ask yes/no questions
|
||||||
"""
|
"""
|
||||||
def __init__(self, ctx:dict, title:str, message:str) -> QDialog:
|
def __init__(self, title:str, message:str) -> QDialog:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.setWindowTitle(title)
|
self.setWindowTitle(title)
|
||||||
self.widget = QComboBox()
|
self.widget = QComboBox()
|
||||||
|
|||||||
105
src/submissions/frontend/functions/__init__.py
Normal file
105
src/submissions/frontend/functions/__init__.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
'''
|
||||||
|
functions used by all windows in the application's frontend
|
||||||
|
'''
|
||||||
|
from pathlib import Path
|
||||||
|
import logging
|
||||||
|
from PyQt6.QtWidgets import QMainWindow, QFileDialog
|
||||||
|
|
||||||
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
|
def select_open_file(obj:QMainWindow, file_extension:str) -> Path:
|
||||||
|
"""
|
||||||
|
File dialog to select a file to read from
|
||||||
|
|
||||||
|
Args:
|
||||||
|
obj (QMainWindow): Original main app window to be parent
|
||||||
|
file_extension (str): file extension
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Path of file to be opened
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# home_dir = Path(obj.ctx.directory_path).resolve().__str__()
|
||||||
|
home_dir = obj.last_dir.resolve().__str__()
|
||||||
|
except FileNotFoundError:
|
||||||
|
home_dir = Path.home().resolve().__str__()
|
||||||
|
fname = Path(QFileDialog.getOpenFileName(obj, 'Open file', home_dir, filter = f"{file_extension}(*.{file_extension})")[0])
|
||||||
|
# fname = Path(QFileDialog.getOpenFileName(obj, 'Open file', filter = f"{file_extension}(*.{file_extension})")[0])
|
||||||
|
obj.last_file = fname
|
||||||
|
return fname
|
||||||
|
|
||||||
|
def select_save_file(obj:QMainWindow, default_name:str, extension:str) -> Path:
|
||||||
|
"""
|
||||||
|
File dialog to select a file to write to
|
||||||
|
|
||||||
|
Args:
|
||||||
|
obj (QMainWindow): Original main app window to be parent
|
||||||
|
default_name (str): default base file name
|
||||||
|
extension (str): file extension
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Path of file to be opened
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# home_dir = Path(obj.ctx.directory_path).joinpath(default_name).resolve().__str__()
|
||||||
|
home_dir = obj.last_dir.joinpath(default_name).resolve().__str__()
|
||||||
|
except FileNotFoundError:
|
||||||
|
home_dir = Path.home().joinpath(default_name).resolve().__str__()
|
||||||
|
fname = Path(QFileDialog.getSaveFileName(obj, "Save File", home_dir, filter = f"{extension}(*.{extension})")[0])
|
||||||
|
# fname = Path(QFileDialog.getSaveFileName(obj, "Save File", filter = f"{extension}(*.{extension})")[0])
|
||||||
|
obj.last_dir = fname.parent
|
||||||
|
return fname
|
||||||
|
|
||||||
|
# def extract_form_info(object) -> dict:
|
||||||
|
# """
|
||||||
|
# retrieves object names and values from form
|
||||||
|
# DEPRECIATED. Replaced by individual form parser methods.
|
||||||
|
|
||||||
|
# Args:
|
||||||
|
# object (_type_): the form widget
|
||||||
|
|
||||||
|
# Returns:
|
||||||
|
# dict: dictionary of objectName:text items
|
||||||
|
# """
|
||||||
|
|
||||||
|
# from frontend.custom_widgets import ReagentTypeForm
|
||||||
|
# dicto = {}
|
||||||
|
# reagents = []
|
||||||
|
# logger.debug(f"Object type: {type(object)}")
|
||||||
|
# # grab all widgets in form
|
||||||
|
# try:
|
||||||
|
# all_children = object.layout.parentWidget().findChildren(QWidget)
|
||||||
|
# except AttributeError:
|
||||||
|
# all_children = object.layout().parentWidget().findChildren(QWidget)
|
||||||
|
# for item in all_children:
|
||||||
|
# logger.debug(f"Looking at: {item.objectName()}: {type(item)}")
|
||||||
|
# match item:
|
||||||
|
# case QLineEdit():
|
||||||
|
# dicto[item.objectName()] = item.text()
|
||||||
|
# case QComboBox():
|
||||||
|
# dicto[item.objectName()] = item.currentText()
|
||||||
|
# case QDateEdit():
|
||||||
|
# dicto[item.objectName()] = item.date().toPyDate()
|
||||||
|
# case QSpinBox() | QDoubleSpinBox():
|
||||||
|
# dicto[item.objectName()] = item.value()
|
||||||
|
# case ReagentTypeForm():
|
||||||
|
# reagent = extract_form_info(item)
|
||||||
|
# logger.debug(f"Reagent found: {reagent}")
|
||||||
|
# if isinstance(reagent, tuple):
|
||||||
|
# reagent = reagent[0]
|
||||||
|
# # reagents[reagent["name"].strip()] = {'eol':int(reagent['eol'])}
|
||||||
|
# reagents.append({k:v for k,v in reagent.items() if k not in ['', 'qt_spinbox_lineedit']})
|
||||||
|
# # value for ad hoc check above
|
||||||
|
# if isinstance(dicto, tuple):
|
||||||
|
# logger.warning(f"Got tuple for dicto for some reason.")
|
||||||
|
# dicto = dicto[0]
|
||||||
|
# if isinstance(reagents, tuple):
|
||||||
|
# logger.warning(f"Got tuple for reagents for some reason.")
|
||||||
|
# reagents = reagents[0]
|
||||||
|
# if reagents != {}:
|
||||||
|
# return dicto, reagents
|
||||||
|
# return dicto
|
||||||
|
|
||||||
|
|
||||||
|
from .main_window_functions import *
|
||||||
|
from .submission_functions import *
|
||||||
102
src/submissions/frontend/functions/all_window_functions.py
Normal file
102
src/submissions/frontend/functions/all_window_functions.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
'''
|
||||||
|
functions used by all windows in the application's frontend
|
||||||
|
NOTE: Depreciated. Moved to functions.__init__
|
||||||
|
'''
|
||||||
|
from pathlib import Path
|
||||||
|
import logging
|
||||||
|
from PyQt6.QtWidgets import QMainWindow, QFileDialog
|
||||||
|
|
||||||
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
|
def select_open_file(obj:QMainWindow, file_extension:str) -> Path:
|
||||||
|
"""
|
||||||
|
File dialog to select a file to read from
|
||||||
|
|
||||||
|
Args:
|
||||||
|
obj (QMainWindow): Original main app window to be parent
|
||||||
|
file_extension (str): file extension
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Path of file to be opened
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# home_dir = Path(obj.ctx.directory_path).resolve().__str__()
|
||||||
|
home_dir = obj.last_dir.resolve().__str__()
|
||||||
|
except FileNotFoundError:
|
||||||
|
home_dir = Path.home().resolve().__str__()
|
||||||
|
fname = Path(QFileDialog.getOpenFileName(obj, 'Open file', home_dir, filter = f"{file_extension}(*.{file_extension})")[0])
|
||||||
|
# fname = Path(QFileDialog.getOpenFileName(obj, 'Open file', filter = f"{file_extension}(*.{file_extension})")[0])
|
||||||
|
obj.last_file = fname
|
||||||
|
return fname
|
||||||
|
|
||||||
|
def select_save_file(obj:QMainWindow, default_name:str, extension:str) -> Path:
|
||||||
|
"""
|
||||||
|
File dialog to select a file to write to
|
||||||
|
|
||||||
|
Args:
|
||||||
|
obj (QMainWindow): Original main app window to be parent
|
||||||
|
default_name (str): default base file name
|
||||||
|
extension (str): file extension
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Path of file to be opened
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# home_dir = Path(obj.ctx.directory_path).joinpath(default_name).resolve().__str__()
|
||||||
|
home_dir = obj.last_dir.joinpath(default_name).resolve().__str__()
|
||||||
|
except FileNotFoundError:
|
||||||
|
home_dir = Path.home().joinpath(default_name).resolve().__str__()
|
||||||
|
fname = Path(QFileDialog.getSaveFileName(obj, "Save File", home_dir, filter = f"{extension}(*.{extension})")[0])
|
||||||
|
# fname = Path(QFileDialog.getSaveFileName(obj, "Save File", filter = f"{extension}(*.{extension})")[0])
|
||||||
|
obj.last_dir = fname.parent
|
||||||
|
return fname
|
||||||
|
|
||||||
|
# def extract_form_info(object) -> dict:
|
||||||
|
# """
|
||||||
|
# retrieves object names and values from form
|
||||||
|
# DEPRECIATED. Replaced by individual form parser methods.
|
||||||
|
|
||||||
|
# Args:
|
||||||
|
# object (_type_): the form widget
|
||||||
|
|
||||||
|
# Returns:
|
||||||
|
# dict: dictionary of objectName:text items
|
||||||
|
# """
|
||||||
|
|
||||||
|
# from frontend.custom_widgets import ReagentTypeForm
|
||||||
|
# dicto = {}
|
||||||
|
# reagents = []
|
||||||
|
# logger.debug(f"Object type: {type(object)}")
|
||||||
|
# # grab all widgets in form
|
||||||
|
# try:
|
||||||
|
# all_children = object.layout.parentWidget().findChildren(QWidget)
|
||||||
|
# except AttributeError:
|
||||||
|
# all_children = object.layout().parentWidget().findChildren(QWidget)
|
||||||
|
# for item in all_children:
|
||||||
|
# logger.debug(f"Looking at: {item.objectName()}: {type(item)}")
|
||||||
|
# match item:
|
||||||
|
# case QLineEdit():
|
||||||
|
# dicto[item.objectName()] = item.text()
|
||||||
|
# case QComboBox():
|
||||||
|
# dicto[item.objectName()] = item.currentText()
|
||||||
|
# case QDateEdit():
|
||||||
|
# dicto[item.objectName()] = item.date().toPyDate()
|
||||||
|
# case QSpinBox() | QDoubleSpinBox():
|
||||||
|
# dicto[item.objectName()] = item.value()
|
||||||
|
# case ReagentTypeForm():
|
||||||
|
# reagent = extract_form_info(item)
|
||||||
|
# logger.debug(f"Reagent found: {reagent}")
|
||||||
|
# if isinstance(reagent, tuple):
|
||||||
|
# reagent = reagent[0]
|
||||||
|
# # reagents[reagent["name"].strip()] = {'eol':int(reagent['eol'])}
|
||||||
|
# reagents.append({k:v for k,v in reagent.items() if k not in ['', 'qt_spinbox_lineedit']})
|
||||||
|
# # value for ad hoc check above
|
||||||
|
# if isinstance(dicto, tuple):
|
||||||
|
# logger.warning(f"Got tuple for dicto for some reason.")
|
||||||
|
# dicto = dicto[0]
|
||||||
|
# if isinstance(reagents, tuple):
|
||||||
|
# logger.warning(f"Got tuple for reagents for some reason.")
|
||||||
|
# reagents = reagents[0]
|
||||||
|
# if reagents != {}:
|
||||||
|
# return dicto, reagents
|
||||||
|
# return dicto
|
||||||
@@ -3,11 +3,8 @@ contains operations used by multiple widgets.
|
|||||||
'''
|
'''
|
||||||
from datetime import date
|
from datetime import date
|
||||||
import difflib
|
import difflib
|
||||||
from getpass import getuser
|
|
||||||
import inspect
|
import inspect
|
||||||
import pprint
|
from pprint import pformat
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import yaml
|
import yaml
|
||||||
import json
|
import json
|
||||||
from typing import Tuple, List
|
from typing import Tuple, List
|
||||||
@@ -17,28 +14,22 @@ from xhtml2pdf import pisa
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
from backend.db.models import *
|
from backend.db.models import *
|
||||||
import logging
|
import logging
|
||||||
from PyQt6.QtWidgets import (
|
from PyQt6.QtWidgets import QMainWindow, QPushButton
|
||||||
QMainWindow, QLabel, QWidget, QPushButton,
|
# from .all_window_functions import select_open_file, select_save_file
|
||||||
QLineEdit, QComboBox, QDateEdit
|
from . import select_open_file, select_save_file
|
||||||
)
|
|
||||||
from .all_window_functions import select_open_file, select_save_file
|
|
||||||
from PyQt6.QtCore import QSignalBlocker
|
from PyQt6.QtCore import QSignalBlocker
|
||||||
from backend.db.models import BasicSubmission
|
|
||||||
from backend.db.functions import (
|
from backend.db.functions import (
|
||||||
lookup_reagents, get_control_subtypes,
|
get_control_subtypes, update_subsampassoc_with_pcr, check_kit_integrity, update_last_used
|
||||||
update_subsampassoc_with_pcr, check_kit_integrity, update_last_used, lookup_organizations, lookup_kit_types,
|
|
||||||
lookup_submissions, lookup_controls, lookup_samples, lookup_submission_sample_association, store_object, lookup_submission_type,
|
|
||||||
#construct_submission_info, construct_kit_from_yaml, construct_org_from_yaml
|
|
||||||
)
|
)
|
||||||
from backend.excel.parser import SheetParser, PCRParser, SampleParser
|
from backend.excel.parser import SheetParser, PCRParser
|
||||||
from backend.excel.reports import make_report_html, make_report_xlsx, convert_data_list_to_df
|
from backend.excel.reports import make_report_html, make_report_xlsx, convert_data_list_to_df
|
||||||
from backend.validators import PydSubmission, PydSample, PydReagent
|
from backend.validators import PydSubmission, PydKit
|
||||||
from tools import check_not_nan, convert_well_to_row_column
|
from tools import Report, Result
|
||||||
from .custom_widgets.pop_ups import AlertPop, QuestionAsker
|
from frontend.custom_widgets.pop_ups import AlertPop, QuestionAsker
|
||||||
from .custom_widgets import ReportDatePicker
|
from frontend.custom_widgets import ReportDatePicker
|
||||||
from .visualizations.control_charts import create_charts, construct_html
|
from frontend.visualizations.control_charts import create_charts, construct_html
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from frontend.custom_widgets.misc import FirstStrandSalvage, FirstStrandPlateList, ReagentFormWidget
|
from frontend.custom_widgets.misc import ReagentFormWidget
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
@@ -53,7 +44,7 @@ def import_submission_function(obj:QMainWindow, fname:Path|None=None) -> Tuple[Q
|
|||||||
Tuple[QMainWindow, dict|None]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict|None]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
logger.debug(f"\n\nStarting Import...\n\n")
|
logger.debug(f"\n\nStarting Import...\n\n")
|
||||||
result = None
|
report = Report()
|
||||||
# logger.debug(obj.ctx)
|
# logger.debug(obj.ctx)
|
||||||
# initialize samples
|
# initialize samples
|
||||||
try:
|
try:
|
||||||
@@ -67,126 +58,38 @@ def import_submission_function(obj:QMainWindow, fname:Path|None=None) -> Tuple[Q
|
|||||||
fname = select_open_file(obj, file_extension="xlsx")
|
fname = select_open_file(obj, file_extension="xlsx")
|
||||||
logger.debug(f"Attempting to parse file: {fname}")
|
logger.debug(f"Attempting to parse file: {fname}")
|
||||||
if not fname.exists():
|
if not fname.exists():
|
||||||
result = dict(message=f"File {fname.__str__()} not found.", status="critical")
|
# result = dict(message=f"File {fname.__str__()} not found.", status="critical")
|
||||||
return obj, result
|
report.add_result(Result(msg=f"File {fname.__str__()} not found.", status="critical"))
|
||||||
|
obj.report.add_result(report)
|
||||||
|
return obj
|
||||||
# create sheetparser using excel sheet and context from gui
|
# create sheetparser using excel sheet and context from gui
|
||||||
try:
|
try:
|
||||||
obj.prsr = SheetParser(ctx=obj.ctx, filepath=fname)
|
obj.prsr = SheetParser(ctx=obj.ctx, filepath=fname)
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
logger.error(f"Couldn't get permission to access file: {fname}")
|
logger.error(f"Couldn't get permission to access file: {fname}")
|
||||||
return obj, result
|
return obj
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Submission dictionary:\n{pprint.pformat(obj.prsr.sub)}")
|
logger.debug(f"Submission dictionary:\n{pformat(obj.prsr.sub)}")
|
||||||
obj.pyd = obj.prsr.to_pydantic()
|
obj.pyd = obj.prsr.to_pydantic()
|
||||||
logger.debug(f"Pydantic result: \n\n{pprint.pformat(obj.pyd)}\n\n")
|
logger.debug(f"Pydantic result: \n\n{pformat(obj.pyd)}\n\n")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return obj, dict(message= f"Problem creating pydantic model:\n\n{e}", status="critical")
|
report.add_result(Result(msg=f"Problem creating pydantic model:\n\n{e}", status="critical"))
|
||||||
# destroy any widgets from previous imports
|
obj.report.add_result(report)
|
||||||
# obj.table_widget.formwidget.set_parent(None)
|
return obj
|
||||||
# obj.current_submission_type = pyd.submission_type['value']
|
|
||||||
# obj.current_file = pyd.filepath
|
|
||||||
# Get list of fields from pydantic model.
|
|
||||||
# fields = list(pyd.model_fields.keys()) + list(pyd.model_extra.keys())
|
|
||||||
# fields.remove('filepath')
|
|
||||||
# logger.debug(f"pydantic fields: {fields}")
|
|
||||||
# for field in fields:
|
|
||||||
# value = getattr(pyd, field)
|
|
||||||
# logger.debug(f"Checking: {field}: {value}")
|
|
||||||
# # Get from pydantic model whether field was completed in the form
|
|
||||||
# if isinstance(value, dict) and field != 'ctx':
|
|
||||||
# logger.debug(f"The field {field} is a dictionary: {value}")
|
|
||||||
# if not value['parsed']:
|
|
||||||
# obj.missing_info.append(field)
|
|
||||||
# label = ParsedQLabel(value, field)
|
|
||||||
# match field:
|
|
||||||
# case 'submitting_lab':
|
|
||||||
# logger.debug(f"{field}: {value['value']}")
|
|
||||||
# # create combobox to hold looked up submitting labs
|
|
||||||
# add_widget = QComboBox()
|
|
||||||
# labs = [item.__str__() for item in lookup_organizations(ctx=obj.ctx)]
|
|
||||||
# # try to set closest match to top of list
|
|
||||||
# try:
|
|
||||||
# labs = difflib.get_close_matches(value['value'], labs, len(labs), 0)
|
|
||||||
# except (TypeError, ValueError):
|
|
||||||
# pass
|
|
||||||
# # set combobox values to lookedup values
|
|
||||||
# add_widget.addItems(labs)
|
|
||||||
# case 'extraction_kit':
|
|
||||||
# # if extraction kit not available, all other values fail
|
|
||||||
# if not check_not_nan(value['value']):
|
|
||||||
# msg = AlertPop(message="Make sure to check your extraction kit in the excel sheet!", status="warning")
|
|
||||||
# msg.exec()
|
|
||||||
# # create combobox to hold looked up kits
|
|
||||||
# add_widget = QComboBox()
|
|
||||||
# # lookup existing kits by 'submission_type' decided on by sheetparser
|
|
||||||
# logger.debug(f"Looking up kits used for {pyd.submission_type['value']}")
|
|
||||||
# uses = [item.__str__() for item in lookup_kit_types(ctx=obj.ctx, used_for=pyd.submission_type['value'])]
|
|
||||||
# logger.debug(f"Kits received for {pyd.submission_type['value']}: {uses}")
|
|
||||||
# if check_not_nan(value['value']):
|
|
||||||
# logger.debug(f"The extraction kit in parser was: {value['value']}")
|
|
||||||
# uses.insert(0, uses.pop(uses.index(value['value'])))
|
|
||||||
# obj.ext_kit = value['value']
|
|
||||||
# else:
|
|
||||||
# logger.error(f"Couldn't find {obj.prsr.sub['extraction_kit']}")
|
|
||||||
# obj.ext_kit = uses[0]
|
|
||||||
# # Run reagent scraper whenever extraction kit is changed.
|
|
||||||
# add_widget.currentTextChanged.connect(obj.scrape_reagents)
|
|
||||||
# case 'submitted_date':
|
|
||||||
# # uses base calendar
|
|
||||||
# add_widget = QDateEdit(calendarPopup=True)
|
|
||||||
# # sets submitted date based on date found in excel sheet
|
|
||||||
# try:
|
|
||||||
# add_widget.setDate(value['value'])
|
|
||||||
# # if not found, use today
|
|
||||||
# except:
|
|
||||||
# add_widget.setDate(date.today())
|
|
||||||
# case 'samples':
|
|
||||||
# # hold samples in 'obj' until form submitted
|
|
||||||
# logger.debug(f"{field}:\n\t{value}")
|
|
||||||
# obj.samples = value
|
|
||||||
# continue
|
|
||||||
# case 'submission_category':
|
|
||||||
# add_widget = QComboBox()
|
|
||||||
# cats = ['Diagnostic', "Surveillance", "Research"]
|
|
||||||
# cats += [item.name for item in lookup_submission_type(ctx=obj.ctx)]
|
|
||||||
# try:
|
|
||||||
# cats.insert(0, cats.pop(cats.index(value['value'])))
|
|
||||||
# except ValueError:
|
|
||||||
# cats.insert(0, cats.pop(cats.index(pyd.submission_type['value'])))
|
|
||||||
# add_widget.addItems(cats)
|
|
||||||
# case "ctx" | 'reagents' | 'csv' | 'filepath':
|
|
||||||
# continue
|
|
||||||
# case _:
|
|
||||||
# # anything else gets added in as a line edit
|
|
||||||
# add_widget = QLineEdit()
|
|
||||||
# logger.debug(f"Setting widget text to {str(value['value']).replace('_', ' ')}")
|
|
||||||
# add_widget.setText(str(value['value']).replace("_", " "))
|
|
||||||
# try:
|
|
||||||
# add_widget.setObjectName(field)
|
|
||||||
# logger.debug(f"Widget name set to: {add_widget.objectName()}")
|
|
||||||
# obj.table_widget.formlayout.addWidget(label)
|
|
||||||
# obj.table_widget.formlayout.addWidget(add_widget)
|
|
||||||
# except AttributeError as e:
|
|
||||||
# logger.error(e)
|
|
||||||
obj.form = obj.pyd.toForm(parent=obj)
|
obj.form = obj.pyd.toForm(parent=obj)
|
||||||
obj.table_widget.formlayout.addWidget(obj.form)
|
obj.table_widget.formlayout.addWidget(obj.form)
|
||||||
# kit_widget = obj.table_widget.formlayout.parentWidget().findChild(QComboBox, 'extraction_kit')
|
|
||||||
kit_widget = obj.form.find_widgets(object_name="extraction_kit")[0].input
|
kit_widget = obj.form.find_widgets(object_name="extraction_kit")[0].input
|
||||||
logger.debug(f"Kitwidget {kit_widget}")
|
logger.debug(f"Kitwidget {kit_widget}")
|
||||||
# block
|
|
||||||
# with QSignalBlocker(kit_widget) as blocker:
|
|
||||||
# kit_widget.addItems(obj.uses)
|
|
||||||
obj.scrape_reagents(kit_widget.currentText())
|
obj.scrape_reagents(kit_widget.currentText())
|
||||||
kit_widget.currentTextChanged.connect(obj.scrape_reagents)
|
kit_widget.currentTextChanged.connect(obj.scrape_reagents)
|
||||||
# compare obj.reagents with expected reagents in kit
|
# compare obj.reagents with expected reagents in kit
|
||||||
if obj.prsr.sample_result != None:
|
if obj.prsr.sample_result != None:
|
||||||
msg = AlertPop(message=obj.prsr.sample_result, status="WARNING")
|
msg = AlertPop(message=obj.prsr.sample_result, status="WARNING")
|
||||||
msg.exec()
|
msg.exec()
|
||||||
# logger.debug(f"Pydantic extra fields: {obj.pyd.model_extra}")
|
obj.report.add_result(report)
|
||||||
# if "csv" in pyd.model_extra:
|
logger.debug(f"Outgoing report: {obj.report.results}")
|
||||||
# obj.csv = pyd.model_extra['csv']
|
logger.debug(f"All attributes of obj:\n{pformat(obj.__dict__)}")
|
||||||
logger.debug(f"All attributes of obj:\n{pprint.pformat(obj.__dict__)}")
|
return obj
|
||||||
return obj, result
|
|
||||||
|
|
||||||
def kit_reload_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
def kit_reload_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
||||||
"""
|
"""
|
||||||
@@ -198,21 +101,16 @@ def kit_reload_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
result = None
|
report = Report()
|
||||||
# for item in obj.table_widget.formlayout.parentWidget().findChildren(QWidget):
|
# for item in obj.table_widget.formlayout.parentWidget().findChildren(QWidget):
|
||||||
logger.debug(f"Attempting to clear {obj.form.find_widgets()}")
|
logger.debug(f"Attempting to clear {obj.form.find_widgets()}")
|
||||||
|
|
||||||
for item in obj.form.find_widgets():
|
for item in obj.form.find_widgets():
|
||||||
if isinstance(item, ReagentFormWidget):
|
if isinstance(item, ReagentFormWidget):
|
||||||
item.setParent(None)
|
item.setParent(None)
|
||||||
# if item.text().startswith("Lot"):
|
obj = kit_integrity_completion_function(obj)
|
||||||
# item.setParent(None)
|
obj.report.add_result(report)
|
||||||
# else:
|
logger.debug(f"Outgoing report: {obj.report.results}")
|
||||||
# logger.debug(f"Type of {item.objectName()} is {type(item)}")
|
return obj
|
||||||
# if item.objectName().startswith("lot_"):
|
|
||||||
# item.setParent(None)
|
|
||||||
kit_integrity_completion_function(obj)
|
|
||||||
return obj, result
|
|
||||||
|
|
||||||
def kit_integrity_completion_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
def kit_integrity_completion_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
||||||
"""
|
"""
|
||||||
@@ -224,9 +122,8 @@ def kit_integrity_completion_function(obj:QMainWindow) -> Tuple[QMainWindow, dic
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
result = None
|
report = Report()
|
||||||
missing_reagents = []
|
missing_reagents = []
|
||||||
# kit_reload_function(obj=obj)
|
|
||||||
logger.debug(inspect.currentframe().f_back.f_code.co_name)
|
logger.debug(inspect.currentframe().f_back.f_code.co_name)
|
||||||
# find the widget that contains kit info
|
# find the widget that contains kit info
|
||||||
kit_widget = obj.form.find_widgets(object_name="extraction_kit")[0].input
|
kit_widget = obj.form.find_widgets(object_name="extraction_kit")[0].input
|
||||||
@@ -235,13 +132,8 @@ def kit_integrity_completion_function(obj:QMainWindow) -> Tuple[QMainWindow, dic
|
|||||||
obj.ext_kit = kit_widget.currentText()
|
obj.ext_kit = kit_widget.currentText()
|
||||||
# for reagent in obj.pyd.reagents:
|
# for reagent in obj.pyd.reagents:
|
||||||
for reagent in obj.form.reagents:
|
for reagent in obj.form.reagents:
|
||||||
# obj.table_widget.formlayout.addWidget(ParsedQLabel({'parsed':True}, item.type, title=False, label_name=f"lot_{item.type}"))
|
|
||||||
# reagent = dict(type=item.type, lot=item.lot, expiry=item.expiry, name=item.name)
|
|
||||||
# add_widget = ImportReagent(ctx=obj.ctx, reagent=reagent, extraction_kit=obj.ext_kit)
|
|
||||||
# obj.table_widget.formlayout.addWidget(add_widget)
|
|
||||||
add_widget = ReagentFormWidget(parent=obj.table_widget.formwidget, reagent=reagent, extraction_kit=obj.ext_kit)
|
add_widget = ReagentFormWidget(parent=obj.table_widget.formwidget, reagent=reagent, extraction_kit=obj.ext_kit)
|
||||||
add_widget.setParent(obj.form)
|
add_widget.setParent(obj.form)
|
||||||
# obj.table_widget.formlayout.addWidget(add_widget)
|
|
||||||
obj.form.layout().addWidget(add_widget)
|
obj.form.layout().addWidget(add_widget)
|
||||||
if reagent.missing:
|
if reagent.missing:
|
||||||
missing_reagents.append(reagent)
|
missing_reagents.append(reagent)
|
||||||
@@ -249,22 +141,23 @@ def kit_integrity_completion_function(obj:QMainWindow) -> Tuple[QMainWindow, dic
|
|||||||
# TODO: put check_kit_integrity here instead of what's here?
|
# TODO: put check_kit_integrity here instead of what's here?
|
||||||
# see if there are any missing reagents
|
# see if there are any missing reagents
|
||||||
if len(missing_reagents) > 0:
|
if len(missing_reagents) > 0:
|
||||||
result = dict(message=f"The submission you are importing is missing some reagents expected by the kit.\n\nIt looks like you are missing: {[item.type.upper() for item in missing_reagents]}\n\nAlternatively, you may have set the wrong extraction kit.\n\nThe program will populate lists using existing reagents.\n\nPlease make sure you check the lots carefully!", status="Warning")
|
result = Result(msg=f"""The submission you are importing is missing some reagents expected by the kit.\n\n
|
||||||
# for item in obj.missing_reagents:
|
It looks like you are missing: {[item.type.upper() for item in missing_reagents]}\n\n
|
||||||
# # Add label that has parsed as False to show "MISSING" label.
|
Alternatively, you may have set the wrong extraction kit.\n\nThe program will populate lists using existing reagents.
|
||||||
# obj.table_widget.formlayout.addWidget(ParsedQLabel({'parsed':False}, item.type, title=False, label_name=f"missing_{item.type}"))
|
\n\nPlease make sure you check the lots carefully!""".replace(" ", ""), status="Warning")
|
||||||
# # Set default parameters for the empty reagent.
|
report.add_result(result)
|
||||||
# reagent = dict(type=item.type, lot=None, expiry=date.today(), name=None)
|
if hasattr(obj.pyd, "csv"):
|
||||||
# # create and add widget
|
export_csv_btn = QPushButton("Export CSV")
|
||||||
# # add_widget = ImportReagent(ctx=obj.ctx, reagent=PydReagent(**reagent), extraction_kit=obj.ext_kit)
|
export_csv_btn.setObjectName("export_csv_btn")
|
||||||
# add_widget = ImportReagent(ctx=obj.ctx, reagent=reagent, extraction_kit=obj.ext_kit)
|
obj.form.layout().addWidget(export_csv_btn)
|
||||||
# obj.table_widget.formlayout.addWidget(add_widget)
|
export_csv_btn.clicked.connect(obj.export_csv)
|
||||||
# Add submit button to the form.
|
|
||||||
submit_btn = QPushButton("Submit")
|
submit_btn = QPushButton("Submit")
|
||||||
submit_btn.setObjectName("submit_btn")
|
submit_btn.setObjectName("submit_btn")
|
||||||
obj.form.layout().addWidget(submit_btn)
|
obj.form.layout().addWidget(submit_btn)
|
||||||
submit_btn.clicked.connect(obj.submit_new_sample)
|
submit_btn.clicked.connect(obj.submit_new_sample)
|
||||||
return obj, result
|
obj.report.add_result(report)
|
||||||
|
logger.debug(f"Outgoing report: {obj.report.results}")
|
||||||
|
return obj
|
||||||
|
|
||||||
def submit_new_sample_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
def submit_new_sample_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
||||||
"""
|
"""
|
||||||
@@ -277,13 +170,15 @@ def submit_new_sample_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
logger.debug(f"\n\nBeginning Submission\n\n")
|
logger.debug(f"\n\nBeginning Submission\n\n")
|
||||||
result = None
|
report = Report()
|
||||||
obj.pyd: PydSubmission = obj.form.parse_form()
|
obj.pyd: PydSubmission = obj.form.parse_form()
|
||||||
logger.debug(f"Submission: {pprint.pformat(obj.pyd)}")
|
logger.debug(f"Submission: {pformat(obj.pyd)}")
|
||||||
logger.debug("Checking kit integrity...")
|
logger.debug("Checking kit integrity...")
|
||||||
kit_integrity = check_kit_integrity(sub=obj.pyd)
|
result = check_kit_integrity(sub=obj.pyd)
|
||||||
if kit_integrity != None:
|
report.add_result(result)
|
||||||
return obj, dict(message=kit_integrity['message'], status="critical")
|
if len(result.results) > 0:
|
||||||
|
obj.report.add_result(report)
|
||||||
|
return obj
|
||||||
base_submission, result = obj.pyd.toSQL()
|
base_submission, result = obj.pyd.toSQL()
|
||||||
# check output message for issues
|
# check output message for issues
|
||||||
match result['code']:
|
match result['code']:
|
||||||
@@ -308,25 +203,28 @@ def submit_new_sample_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
# add reagents to submission object
|
# add reagents to submission object
|
||||||
for reagent in base_submission.reagents:
|
for reagent in base_submission.reagents:
|
||||||
update_last_used(reagent=reagent, kit=base_submission.extraction_kit)
|
update_last_used(reagent=reagent, kit=base_submission.extraction_kit)
|
||||||
logger.debug(f"Here is the final submission: {pprint.pformat(base_submission.__dict__)}")
|
logger.debug(f"Here is the final submission: {pformat(base_submission.__dict__)}")
|
||||||
logger.debug(f"Parsed reagents: {pprint.pformat(base_submission.reagents)}")
|
logger.debug(f"Parsed reagents: {pformat(base_submission.reagents)}")
|
||||||
logger.debug(f"Sending submission: {base_submission.rsl_plate_num} to database.")
|
logger.debug(f"Sending submission: {base_submission.rsl_plate_num} to database.")
|
||||||
base_submission.save()
|
base_submission.save()
|
||||||
# update summary sheet
|
# update summary sheet
|
||||||
obj.table_widget.sub_wid.setData()
|
obj.table_widget.sub_wid.setData()
|
||||||
# reset form
|
# reset form
|
||||||
obj.form.setParent(None)
|
obj.form.setParent(None)
|
||||||
logger.debug(f"All attributes of obj: {pprint.pformat(obj.__dict__)}")
|
logger.debug(f"All attributes of obj: {pformat(obj.__dict__)}")
|
||||||
wkb = obj.pyd.autofill_excel()
|
wkb = obj.pyd.autofill_excel()
|
||||||
if wkb != None:
|
if wkb != None:
|
||||||
fname = select_save_file(obj=obj, default_name=obj.pyd.construct_filename(), extension="xlsx")
|
fname = select_save_file(obj=obj, default_name=obj.pyd.construct_filename(), extension="xlsx")
|
||||||
wkb.save(filename=fname.__str__())
|
try:
|
||||||
|
wkb.save(filename=fname.__str__())
|
||||||
|
except PermissionError:
|
||||||
|
logger.error("Hit a permission error when saving workbook. Cancelled?")
|
||||||
if hasattr(obj.pyd, 'csv'):
|
if hasattr(obj.pyd, 'csv'):
|
||||||
dlg = QuestionAsker("Export CSV?", "Would you like to export the csv file?")
|
dlg = QuestionAsker("Export CSV?", "Would you like to export the csv file?")
|
||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
fname = select_save_file(obj, f"{obj.pyd.rsl_plate_num['value']}.csv", extension="csv")
|
fname = select_save_file(obj, f"{obj.pyd.construct_filename()}.csv", extension="csv")
|
||||||
try:
|
try:
|
||||||
obj.csv.to_csv(fname.__str__(), index=False)
|
obj.pyd.csv.to_csv(fname.__str__(), index=False)
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
logger.debug(f"Could not get permissions to {fname}. Possibly the request was cancelled.")
|
logger.debug(f"Could not get permissions to {fname}. Possibly the request was cancelled.")
|
||||||
return obj, result
|
return obj, result
|
||||||
@@ -344,11 +242,9 @@ def generate_report_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
# ask for date ranges
|
# ask for date ranges
|
||||||
dlg = ReportDatePicker()
|
dlg = ReportDatePicker()
|
||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
# info = extract_form_info(dlg)
|
|
||||||
info = dlg.parse_form()
|
info = dlg.parse_form()
|
||||||
logger.debug(f"Report info: {info}")
|
logger.debug(f"Report info: {info}")
|
||||||
# find submissions based on date range
|
# find submissions based on date range
|
||||||
# subs = lookup_submissions(ctx=obj.ctx, start_date=info['start_date'], end_date=info['end_date'])
|
|
||||||
subs = BasicSubmission.query(start_date=info['start_date'], end_date=info['end_date'])
|
subs = BasicSubmission.query(start_date=info['start_date'], end_date=info['end_date'])
|
||||||
# convert each object to dict
|
# convert each object to dict
|
||||||
records = [item.report_dict() for item in subs]
|
records = [item.report_dict() for item in subs]
|
||||||
@@ -357,7 +253,6 @@ def generate_report_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
html = make_report_html(df=summary_df, start_date=info['start_date'], end_date=info['end_date'])
|
html = make_report_html(df=summary_df, start_date=info['start_date'], end_date=info['end_date'])
|
||||||
# get save location of report
|
# get save location of report
|
||||||
fname = select_save_file(obj=obj, default_name=f"Submissions_Report_{info['start_date']}-{info['end_date']}.pdf", extension="pdf")
|
fname = select_save_file(obj=obj, default_name=f"Submissions_Report_{info['start_date']}-{info['end_date']}.pdf", extension="pdf")
|
||||||
# logger.debug(f"report output name: {fname}")
|
|
||||||
with open(fname, "w+b") as f:
|
with open(fname, "w+b") as f:
|
||||||
pisa.CreatePDF(html, dest=f)
|
pisa.CreatePDF(html, dest=f)
|
||||||
writer = pd.ExcelWriter(fname.with_suffix(".xlsx"), engine='openpyxl')
|
writer = pd.ExcelWriter(fname.with_suffix(".xlsx"), engine='openpyxl')
|
||||||
@@ -378,7 +273,7 @@ def generate_report_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
if cell.row > 1:
|
if cell.row > 1:
|
||||||
cell.style = 'Currency'
|
cell.style = 'Currency'
|
||||||
writer.close()
|
writer.close()
|
||||||
return obj, result
|
return obj, None
|
||||||
|
|
||||||
def add_kit_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
def add_kit_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
||||||
"""
|
"""
|
||||||
@@ -405,7 +300,7 @@ def add_kit_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
except PermissionError:
|
except PermissionError:
|
||||||
return
|
return
|
||||||
# send to kit creator function
|
# send to kit creator function
|
||||||
result = construct_kit_from_yaml(ctx=obj.ctx, exp=exp)
|
result = PydKit(**exp)
|
||||||
return obj, result
|
return obj, result
|
||||||
|
|
||||||
def add_org_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
def add_org_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
||||||
@@ -446,7 +341,7 @@ def controls_getter_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
result = None
|
report = Report()
|
||||||
# subtype defaults to disabled
|
# subtype defaults to disabled
|
||||||
try:
|
try:
|
||||||
obj.table_widget.sub_typer.disconnect()
|
obj.table_widget.sub_typer.disconnect()
|
||||||
@@ -461,7 +356,8 @@ def controls_getter_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
with QSignalBlocker(obj.table_widget.datepicker.start_date) as blocker:
|
with QSignalBlocker(obj.table_widget.datepicker.start_date) as blocker:
|
||||||
obj.table_widget.datepicker.start_date.setDate(threemonthsago)
|
obj.table_widget.datepicker.start_date.setDate(threemonthsago)
|
||||||
obj._controls_getter()
|
obj._controls_getter()
|
||||||
return obj, result
|
obj.report.add_result(report)
|
||||||
|
return obj
|
||||||
# convert to python useable date objects
|
# convert to python useable date objects
|
||||||
obj.start_date = obj.table_widget.datepicker.start_date.date().toPyDate()
|
obj.start_date = obj.table_widget.datepicker.start_date.date().toPyDate()
|
||||||
obj.end_date = obj.table_widget.datepicker.end_date.date().toPyDate()
|
obj.end_date = obj.table_widget.datepicker.end_date.date().toPyDate()
|
||||||
@@ -481,7 +377,8 @@ def controls_getter_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
obj.table_widget.sub_typer.clear()
|
obj.table_widget.sub_typer.clear()
|
||||||
obj.table_widget.sub_typer.setEnabled(False)
|
obj.table_widget.sub_typer.setEnabled(False)
|
||||||
obj._chart_maker()
|
obj._chart_maker()
|
||||||
return obj, result
|
obj.report.add_result(report)
|
||||||
|
return obj
|
||||||
|
|
||||||
def chart_maker_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
def chart_maker_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
||||||
"""
|
"""
|
||||||
@@ -493,7 +390,7 @@ def chart_maker_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
result = None
|
report = Report()
|
||||||
logger.debug(f"Control getter context: \n\tControl type: {obj.con_type}\n\tMode: {obj.mode}\n\tStart Date: {obj.start_date}\n\tEnd Date: {obj.end_date}")
|
logger.debug(f"Control getter context: \n\tControl type: {obj.con_type}\n\tMode: {obj.mode}\n\tStart Date: {obj.start_date}\n\tEnd Date: {obj.end_date}")
|
||||||
# set the subtype for kraken
|
# set the subtype for kraken
|
||||||
if obj.table_widget.sub_typer.currentText() == "":
|
if obj.table_widget.sub_typer.currentText() == "":
|
||||||
@@ -517,7 +414,7 @@ def chart_maker_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
if data == []:
|
if data == []:
|
||||||
return obj, dict(status="Critical", message="No data found for controls in given date range.")
|
return obj, dict(status="Critical", message="No data found for controls in given date range.")
|
||||||
# send to dataframe creator
|
# send to dataframe creator
|
||||||
df = convert_data_list_to_df(ctx=obj.ctx, input=data, subtype=obj.subtype)
|
df = convert_data_list_to_df(input=data, subtype=obj.subtype)
|
||||||
if obj.subtype == None:
|
if obj.subtype == None:
|
||||||
title = obj.mode
|
title = obj.mode
|
||||||
else:
|
else:
|
||||||
@@ -531,7 +428,8 @@ def chart_maker_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
obj.table_widget.webengineview.setHtml(html)
|
obj.table_widget.webengineview.setHtml(html)
|
||||||
obj.table_widget.webengineview.update()
|
obj.table_widget.webengineview.update()
|
||||||
logger.debug("Figure updated... I hope.")
|
logger.debug("Figure updated... I hope.")
|
||||||
return obj, result
|
obj.report.add_result(report)
|
||||||
|
return obj
|
||||||
|
|
||||||
def link_controls_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
def link_controls_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
||||||
"""
|
"""
|
||||||
@@ -737,7 +635,7 @@ def import_pcr_results_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
|||||||
"""
|
"""
|
||||||
result = None
|
result = None
|
||||||
fname = select_open_file(obj, file_extension="xlsx")
|
fname = select_open_file(obj, file_extension="xlsx")
|
||||||
parser = PCRParser(ctx=obj.ctx, filepath=fname)
|
parser = PCRParser(filepath=fname)
|
||||||
logger.debug(f"Attempting lookup for {parser.plate_num}")
|
logger.debug(f"Attempting lookup for {parser.plate_num}")
|
||||||
# sub = lookup_submission_by_rsl_num(ctx=obj.ctx, rsl_num=parser.plate_num)
|
# sub = lookup_submission_by_rsl_num(ctx=obj.ctx, rsl_num=parser.plate_num)
|
||||||
sub = BasicSubmission.query(rsl_number=parser.plate_num)
|
sub = BasicSubmission.query(rsl_number=parser.plate_num)
|
||||||
@@ -884,107 +782,108 @@ def autofill_excel(obj:QMainWindow, xl_map:dict, reagents:List[dict], missing_re
|
|||||||
fname = select_save_file(obj=obj, default_name=info['rsl_plate_num'], extension="xlsx")
|
fname = select_save_file(obj=obj, default_name=info['rsl_plate_num'], extension="xlsx")
|
||||||
workbook.save(filename=fname.__str__())
|
workbook.save(filename=fname.__str__())
|
||||||
|
|
||||||
def construct_first_strand_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
# def construct_first_strand_function(obj:QMainWindow) -> Tuple[QMainWindow, dict]:
|
||||||
"""
|
# """
|
||||||
Generates a csv file from client submitted xlsx file.
|
# Generates a csv file from client submitted xlsx file.
|
||||||
|
# NOTE: Depreciated, now folded into import Artic.
|
||||||
|
|
||||||
Args:
|
# Args:
|
||||||
obj (QMainWindow): Main application
|
# obj (QMainWindow): Main application
|
||||||
|
|
||||||
Returns:
|
# Returns:
|
||||||
Tuple[QMainWindow, dict]: Updated main application and result
|
# Tuple[QMainWindow, dict]: Updated main application and result
|
||||||
"""
|
# """
|
||||||
def get_plates(input_sample_number:str, plates:list) -> Tuple[int, str]:
|
# def get_plates(input_sample_number:str, plates:list) -> Tuple[int, str]:
|
||||||
logger.debug(f"Looking up {input_sample_number} in {plates}")
|
# logger.debug(f"Looking up {input_sample_number} in {plates}")
|
||||||
# samp = lookup_samples(ctx=obj.ctx, ww_processing_num=input_sample_number)
|
# # samp = lookup_samples(ctx=obj.ctx, ww_processing_num=input_sample_number)
|
||||||
samp = BasicSample.query(ww_processing_num=input_sample_number)
|
# samp = BasicSample.query(ww_processing_num=input_sample_number)
|
||||||
if samp == None:
|
# if samp == None:
|
||||||
# samp = lookup_samples(ctx=obj.ctx, submitter_id=input_sample_number)
|
# # samp = lookup_samples(ctx=obj.ctx, submitter_id=input_sample_number)
|
||||||
samp = BasicSample.query(submitter_id=input_sample_number)
|
# samp = BasicSample.query(submitter_id=input_sample_number)
|
||||||
if samp == None:
|
# if samp == None:
|
||||||
return None, None
|
# return None, None
|
||||||
logger.debug(f"Got sample: {samp}")
|
# logger.debug(f"Got sample: {samp}")
|
||||||
# new_plates = [(iii+1, lookup_submission_sample_association(ctx=obj.ctx, sample=samp, submission=plate)) for iii, plate in enumerate(plates)]
|
# # new_plates = [(iii+1, lookup_submission_sample_association(ctx=obj.ctx, sample=samp, submission=plate)) for iii, plate in enumerate(plates)]
|
||||||
new_plates = [(iii+1, SubmissionSampleAssociation.query(sample=samp, submission=plate)) for iii, plate in enumerate(plates)]
|
# new_plates = [(iii+1, SubmissionSampleAssociation.query(sample=samp, submission=plate)) for iii, plate in enumerate(plates)]
|
||||||
logger.debug(f"Associations: {pprint.pformat(new_plates)}")
|
# logger.debug(f"Associations: {pformat(new_plates)}")
|
||||||
try:
|
# try:
|
||||||
plate_num, plate = next(assoc for assoc in new_plates if assoc[1])
|
# plate_num, plate = next(assoc for assoc in new_plates if assoc[1])
|
||||||
except StopIteration:
|
# except StopIteration:
|
||||||
plate_num, plate = None, None
|
# plate_num, plate = None, None
|
||||||
logger.debug(f"Plate number {plate_num} is {plate}")
|
# logger.debug(f"Plate number {plate_num} is {plate}")
|
||||||
return plate_num, plate
|
# return plate_num, plate
|
||||||
fname = select_open_file(obj=obj, file_extension="xlsx")
|
# fname = select_open_file(obj=obj, file_extension="xlsx")
|
||||||
xl = pd.ExcelFile(fname)
|
# xl = pd.ExcelFile(fname)
|
||||||
sprsr = SampleParser(ctx=obj.ctx, xl=xl, submission_type="First Strand")
|
# sprsr = SampleParser(xl=xl, submission_type="First Strand")
|
||||||
_, samples = sprsr.parse_samples(generate=False)
|
# _, samples = sprsr.parse_samples(generate=False)
|
||||||
logger.debug(f"Samples: {pformat(samples)}")
|
# logger.debug(f"Samples: {pformat(samples)}")
|
||||||
logger.debug("Called first strand sample parser")
|
# logger.debug("Called first strand sample parser")
|
||||||
plates = sprsr.grab_plates()
|
# plates = sprsr.grab_plates()
|
||||||
# Fix no plates found in form.
|
# # Fix no plates found in form.
|
||||||
if plates == []:
|
# if plates == []:
|
||||||
dlg = FirstStrandPlateList(ctx=obj.ctx)
|
# dlg = FirstStrandPlateList(ctx=obj.ctx)
|
||||||
if dlg.exec():
|
# if dlg.exec():
|
||||||
plates = dlg.parse_form()
|
# plates = dlg.parse_form()
|
||||||
plates = list(set(plates))
|
# plates = list(set(plates))
|
||||||
logger.debug(f"Plates: {pformat(plates)}")
|
# logger.debug(f"Plates: {pformat(plates)}")
|
||||||
output_samples = []
|
# output_samples = []
|
||||||
logger.debug(f"Samples: {pformat(samples)}")
|
# logger.debug(f"Samples: {pformat(samples)}")
|
||||||
old_plate_number = 1
|
# old_plate_number = 1
|
||||||
old_plate = ''
|
# old_plate = ''
|
||||||
for item in samples:
|
# for item in samples:
|
||||||
try:
|
# try:
|
||||||
item['well'] = re.search(r"\s\((.*)\)$", item['submitter_id']).groups()[0]
|
# item['well'] = re.search(r"\s\((.*)\)$", item['submitter_id']).groups()[0]
|
||||||
except AttributeError:
|
# except AttributeError:
|
||||||
pass
|
# pass
|
||||||
item['submitter_id'] = re.sub(r"\s\(.*\)$", "", str(item['submitter_id'])).strip()
|
# item['submitter_id'] = re.sub(r"\s\(.*\)$", "", str(item['submitter_id'])).strip()
|
||||||
new_dict = {}
|
# new_dict = {}
|
||||||
new_dict['sample'] = item['submitter_id']
|
# new_dict['sample'] = item['submitter_id']
|
||||||
plate_num, plate = get_plates(input_sample_number=new_dict['sample'], plates=plates)
|
# plate_num, plate = get_plates(input_sample_number=new_dict['sample'], plates=plates)
|
||||||
if plate_num == None:
|
# if plate_num == None:
|
||||||
plate_num = str(old_plate_number) + "*"
|
# plate_num = str(old_plate_number) + "*"
|
||||||
else:
|
# else:
|
||||||
old_plate_number = plate_num
|
# old_plate_number = plate_num
|
||||||
logger.debug(f"Got plate number: {plate_num}, plate: {plate}")
|
# logger.debug(f"Got plate number: {plate_num}, plate: {plate}")
|
||||||
if item['submitter_id'] == "NTC1":
|
# if item['submitter_id'] == "NTC1":
|
||||||
new_dict['destination_row'] = 8
|
# new_dict['destination_row'] = 8
|
||||||
new_dict['destination_column'] = 2
|
# new_dict['destination_column'] = 2
|
||||||
new_dict['plate_number'] = 'control'
|
# new_dict['plate_number'] = 'control'
|
||||||
new_dict['plate'] = None
|
# new_dict['plate'] = None
|
||||||
output_samples.append(new_dict)
|
# output_samples.append(new_dict)
|
||||||
continue
|
# continue
|
||||||
elif item['submitter_id'] == "NTC2":
|
# elif item['submitter_id'] == "NTC2":
|
||||||
new_dict['destination_row'] = 8
|
# new_dict['destination_row'] = 8
|
||||||
new_dict['destination_column'] = 5
|
# new_dict['destination_column'] = 5
|
||||||
new_dict['plate_number'] = 'control'
|
# new_dict['plate_number'] = 'control'
|
||||||
new_dict['plate'] = None
|
# new_dict['plate'] = None
|
||||||
output_samples.append(new_dict)
|
# output_samples.append(new_dict)
|
||||||
continue
|
# continue
|
||||||
else:
|
# else:
|
||||||
new_dict['destination_row'] = item['row']
|
# new_dict['destination_row'] = item['row']
|
||||||
new_dict['destination_column'] = item['column']
|
# new_dict['destination_column'] = item['column']
|
||||||
new_dict['plate_number'] = plate_num
|
# new_dict['plate_number'] = plate_num
|
||||||
# Fix plate association not found
|
# # Fix plate association not found
|
||||||
if plate == None:
|
# if plate == None:
|
||||||
dlg = FirstStrandSalvage(ctx=obj.ctx, submitter_id=item['submitter_id'], rsl_plate_num=old_plate)
|
# dlg = FirstStrandSalvage(ctx=obj.ctx, submitter_id=item['submitter_id'], rsl_plate_num=old_plate)
|
||||||
if dlg.exec():
|
# if dlg.exec():
|
||||||
item.update(dlg.parse_form())
|
# item.update(dlg.parse_form())
|
||||||
try:
|
# try:
|
||||||
new_dict['source_row'], new_dict['source_column'] = convert_well_to_row_column(item['well'])
|
# new_dict['source_row'], new_dict['source_column'] = convert_well_to_row_column(item['well'])
|
||||||
except KeyError:
|
# except KeyError:
|
||||||
pass
|
# pass
|
||||||
else:
|
# else:
|
||||||
new_dict['plate'] = plate.submission.rsl_plate_num
|
# new_dict['plate'] = plate.submission.rsl_plate_num
|
||||||
new_dict['source_row'] = plate.row
|
# new_dict['source_row'] = plate.row
|
||||||
new_dict['source_column'] = plate.column
|
# new_dict['source_column'] = plate.column
|
||||||
old_plate = plate.submission.rsl_plate_num
|
# old_plate = plate.submission.rsl_plate_num
|
||||||
output_samples.append(new_dict)
|
# output_samples.append(new_dict)
|
||||||
df = pd.DataFrame.from_records(output_samples)
|
# df = pd.DataFrame.from_records(output_samples)
|
||||||
df.sort_values(by=['destination_column', 'destination_row'], ascending=True, inplace=True)
|
# df.sort_values(by=['destination_column', 'destination_row'], ascending=True, inplace=True)
|
||||||
columnsTitles = ['sample', 'destination_column', 'destination_row', 'plate_number', 'plate', "source_column", 'source_row']
|
# columnsTitles = ['sample', 'destination_column', 'destination_row', 'plate_number', 'plate', "source_column", 'source_row']
|
||||||
df = df.reindex(columns=columnsTitles)
|
# df = df.reindex(columns=columnsTitles)
|
||||||
ofname = select_save_file(obj=obj, default_name=f"First Strand {date.today()}", extension="csv")
|
# ofname = select_save_file(obj=obj, default_name=f"First Strand {date.today()}", extension="csv")
|
||||||
df.to_csv(ofname, index=False)
|
# df.to_csv(ofname, index=False)
|
||||||
return obj, None
|
# return obj, None
|
||||||
|
|
||||||
def scrape_reagents(obj:QMainWindow, extraction_kit:str) -> Tuple[QMainWindow, dict]:
|
def scrape_reagents(obj:QMainWindow, extraction_kit:str) -> Tuple[QMainWindow, dict]:
|
||||||
"""
|
"""
|
||||||
@@ -998,6 +897,7 @@ def scrape_reagents(obj:QMainWindow, extraction_kit:str) -> Tuple[QMainWindow, d
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[QMainWindow, dict]: Updated application and result
|
Tuple[QMainWindow, dict]: Updated application and result
|
||||||
"""
|
"""
|
||||||
|
report = Report()
|
||||||
logger.debug(f"Extraction kit: {extraction_kit}")
|
logger.debug(f"Extraction kit: {extraction_kit}")
|
||||||
# obj.reagents = []
|
# obj.reagents = []
|
||||||
# obj.missing_reagents = []
|
# obj.missing_reagents = []
|
||||||
@@ -1022,5 +922,14 @@ def scrape_reagents(obj:QMainWindow, extraction_kit:str) -> Tuple[QMainWindow, d
|
|||||||
obj.form.reagents = obj.prsr.sub['reagents']
|
obj.form.reagents = obj.prsr.sub['reagents']
|
||||||
# logger.debug(f"Imported reagents: {obj.reagents}")
|
# logger.debug(f"Imported reagents: {obj.reagents}")
|
||||||
# logger.debug(f"Missing reagents: {obj.missing_reagents}")
|
# logger.debug(f"Missing reagents: {obj.missing_reagents}")
|
||||||
return obj, None
|
obj.report.add_result(report)
|
||||||
|
logger.debug(f"Outgoing report: {obj.report.results}")
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def export_csv_function(obj:QMainWindow, fname:Path|None=None):
|
||||||
|
if isinstance(fname, bool) or fname == None:
|
||||||
|
fname = select_save_file(obj=obj, default_name=obj.pyd.construct_filename(), extension="csv")
|
||||||
|
try:
|
||||||
|
obj.pyd.csv.to_csv(fname.__str__(), index=False)
|
||||||
|
except PermissionError:
|
||||||
|
logger.debug(f"Could not get permissions to {fname}. Possibly the request was cancelled.")
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
'''
|
'''
|
||||||
Contains miscellaenous functions used by both frontend and backend.
|
Contains miscellaenous functions used by both frontend and backend.
|
||||||
'''
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -14,9 +15,10 @@ from logging import handlers
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from sqlalchemy.orm import Session, declarative_base, DeclarativeMeta, Query
|
from sqlalchemy.orm import Session, declarative_base, DeclarativeMeta, Query
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from pydantic import field_validator
|
from pydantic import field_validator, BaseModel, Field
|
||||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
from typing import Any, Tuple
|
from typing import Any, Tuple, Literal, List
|
||||||
|
import inspect
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
@@ -176,11 +178,10 @@ class Settings(BaseSettings):
|
|||||||
def ensure_directory_exists(cls, value):
|
def ensure_directory_exists(cls, value):
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = Path(value)
|
value = Path(value)
|
||||||
if value.exists():
|
if not value.exists():
|
||||||
metadata.directory_path = value
|
value = Path().home()
|
||||||
return value
|
metadata.directory_path = value
|
||||||
else:
|
return value
|
||||||
raise FileNotFoundError(f"Couldn't find settings file {value}")
|
|
||||||
|
|
||||||
@field_validator('database_path', mode="before")
|
@field_validator('database_path', mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -382,7 +383,7 @@ def jinja_template_loading():
|
|||||||
if check_if_app():
|
if check_if_app():
|
||||||
loader_path = Path(sys._MEIPASS).joinpath("files", "templates")
|
loader_path = Path(sys._MEIPASS).joinpath("files", "templates")
|
||||||
else:
|
else:
|
||||||
loader_path = Path(__file__).parents[1].joinpath('templates').absolute()#.__str__()
|
loader_path = Path(__file__).parent.joinpath('templates').absolute()#.__str__()
|
||||||
# jinja template loading
|
# jinja template loading
|
||||||
loader = FileSystemLoader(loader_path)
|
loader = FileSystemLoader(loader_path)
|
||||||
env = Environment(loader=loader)
|
env = Environment(loader=loader)
|
||||||
@@ -460,4 +461,56 @@ def setup_lookup(func):
|
|||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
raise ValueError("Cannot use dictionary in query. Make sure you parse it first.")
|
raise ValueError("Cannot use dictionary in query. Make sure you parse it first.")
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
class Result(BaseModel):
|
||||||
|
|
||||||
|
owner: str = Field(default="", validate_default=True)
|
||||||
|
code: int = Field(default=0)
|
||||||
|
msg: str
|
||||||
|
status: Literal["NoIcon", "Question", "Information", "Warning", "Critical"] = Field(default="NoIcon")
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"Result({self.owner})"
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.owner = inspect.stack()[1].function
|
||||||
|
|
||||||
|
def report(self):
|
||||||
|
from frontend.custom_widgets.misc import AlertPop
|
||||||
|
return AlertPop(message=self.msg, status=self.status, owner=self.owner)
|
||||||
|
|
||||||
|
class Report(BaseModel):
|
||||||
|
|
||||||
|
results: List[Result] = Field(default=[])
|
||||||
|
|
||||||
|
# def __init__(self, *args, **kwargs):
|
||||||
|
# if 'msg' in kwargs.keys():
|
||||||
|
# res = Result(msg=kwargs['msg'])
|
||||||
|
# for k,v in kwargs.items():
|
||||||
|
# if k in ['code', 'status']:
|
||||||
|
# setattr(res, k, v)
|
||||||
|
# self.results.append(res)
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"Report(result_count:{len(self.results)})"
|
||||||
|
|
||||||
|
def add_result(self, result:Result|Report|None):
|
||||||
|
match result:
|
||||||
|
case Result():
|
||||||
|
logger.debug(f"Adding {result} to results.")
|
||||||
|
try:
|
||||||
|
self.results.append(result)
|
||||||
|
except AttributeError:
|
||||||
|
logger.error(f"Problem adding result.")
|
||||||
|
case Report():
|
||||||
|
|
||||||
|
for res in result.results:
|
||||||
|
logger.debug(f"Adding {res} from to results.")
|
||||||
|
self.results.append(res)
|
||||||
|
case _:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
Reference in New Issue
Block a user