Code cleanup, dependency update, various bug fixes
This commit is contained in:
@@ -1,7 +1,6 @@
|
|||||||
## 202405.04
|
## 202405.04
|
||||||
|
|
||||||
## 202405.04
|
- Improved Webview of submission details.
|
||||||
|
|
||||||
- Fixed Reagents not being updated on edit.
|
- Fixed Reagents not being updated on edit.
|
||||||
- Fixed data resorting after submitting new run.
|
- Fixed data resorting after submitting new run.
|
||||||
|
|
||||||
|
|||||||
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
@@ -102,7 +102,7 @@ class BaseClass(Base):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def query(cls, **kwargs) -> Any | List[Any]:
|
def query(cls, **kwargs) -> Any | List[Any]:
|
||||||
"""
|
"""
|
||||||
Default query function for models
|
Default query function for models. Overridden in most models.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Any | List[Any]: Result of query execution.
|
Any | List[Any]: Result of query execution.
|
||||||
@@ -128,7 +128,7 @@ class BaseClass(Base):
|
|||||||
query: Query = cls.__database_session__.query(model)
|
query: Query = cls.__database_session__.query(model)
|
||||||
# logger.debug(f"Grabbing singles using {model.get_default_info}")
|
# logger.debug(f"Grabbing singles using {model.get_default_info}")
|
||||||
singles = model.get_default_info('singles')
|
singles = model.get_default_info('singles')
|
||||||
logger.debug(f"Querying: {model}, with kwargs: {kwargs}")
|
logger.info(f"Querying: {model}, with kwargs: {kwargs}")
|
||||||
for k, v in kwargs.items():
|
for k, v in kwargs.items():
|
||||||
# logger.debug(f"Using key: {k} with value: {v}")
|
# logger.debug(f"Using key: {k} with value: {v}")
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -63,16 +63,16 @@ class ControlType(BaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
List[str]: list of subtypes available
|
List[str]: list of subtypes available
|
||||||
"""
|
"""
|
||||||
# Get first instance since all should have same subtypes
|
# NOTE: Get first instance since all should have same subtypes
|
||||||
# Get mode of instance
|
# NOTE: Get mode of instance
|
||||||
jsoner = getattr(self.instances[0], mode)
|
jsoner = getattr(self.instances[0], mode)
|
||||||
# logger.debug(f"JSON out: {jsoner.keys()}")
|
# logger.debug(f"JSON out: {jsoner.keys()}")
|
||||||
try:
|
try:
|
||||||
# Pick genera (all should have same subtypes)
|
# NOTE: Pick genera (all should have same subtypes)
|
||||||
genera = list(jsoner.keys())[0]
|
genera = list(jsoner.keys())[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return []
|
return []
|
||||||
# remove items that don't have relevant data
|
# NOTE: remove items that don't have relevant data
|
||||||
subtypes = [item for item in jsoner[genera] if "_hashes" not in item and "_ratio" not in item]
|
subtypes = [item for item in jsoner[genera] if "_hashes" not in item and "_ratio" not in item]
|
||||||
return subtypes
|
return subtypes
|
||||||
|
|
||||||
@@ -135,7 +135,6 @@ class Control(BaseClass):
|
|||||||
"""
|
"""
|
||||||
# logger.debug("loading json string into dict")
|
# logger.debug("loading json string into dict")
|
||||||
try:
|
try:
|
||||||
# kraken = json.loads(self.kraken)
|
|
||||||
kraken = self.kraken
|
kraken = self.kraken
|
||||||
except TypeError:
|
except TypeError:
|
||||||
kraken = {}
|
kraken = {}
|
||||||
@@ -178,7 +177,7 @@ class Control(BaseClass):
|
|||||||
data = self.__getattribute__(mode)
|
data = self.__getattribute__(mode)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
data = {}
|
data = {}
|
||||||
logger.debug(f"Length of data: {len(data)}")
|
# logger.debug(f"Length of data: {len(data)}")
|
||||||
# logger.debug("dict keys are genera of bacteria, e.g. 'Streptococcus'")
|
# logger.debug("dict keys are genera of bacteria, e.g. 'Streptococcus'")
|
||||||
for genus in data:
|
for genus in data:
|
||||||
_dict = dict(
|
_dict = dict(
|
||||||
@@ -236,7 +235,7 @@ class Control(BaseClass):
|
|||||||
models.Control|List[models.Control]: Control object of interest.
|
models.Control|List[models.Control]: Control object of interest.
|
||||||
"""
|
"""
|
||||||
query: Query = cls.__database_session__.query(cls)
|
query: Query = cls.__database_session__.query(cls)
|
||||||
# by control type
|
# NOTE: by control type
|
||||||
match control_type:
|
match control_type:
|
||||||
case ControlType():
|
case ControlType():
|
||||||
# logger.debug(f"Looking up control by control type: {control_type}")
|
# logger.debug(f"Looking up control by control type: {control_type}")
|
||||||
@@ -246,7 +245,7 @@ class Control(BaseClass):
|
|||||||
query = query.join(ControlType).filter(ControlType.name == control_type)
|
query = query.join(ControlType).filter(ControlType.name == control_type)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
# by date range
|
# NOTE: by date range
|
||||||
if start_date is not None and end_date is None:
|
if start_date is not None and end_date is None:
|
||||||
logger.warning(f"Start date with no end date, using today.")
|
logger.warning(f"Start date with no end date, using today.")
|
||||||
end_date = date.today()
|
end_date = date.today()
|
||||||
|
|||||||
@@ -120,8 +120,8 @@ class KitType(BaseClass):
|
|||||||
submission_type (str | Submissiontype | None, optional): Submission type to narrow results. Defaults to None.
|
submission_type (str | Submissiontype | None, optional): Submission type to narrow results. Defaults to None.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list: List of reagent types
|
List[ReagentType]: List of reagents linked to this kit.
|
||||||
"""
|
"""
|
||||||
match submission_type:
|
match submission_type:
|
||||||
case SubmissionType():
|
case SubmissionType():
|
||||||
# logger.debug(f"Getting reagents by SubmissionType {submission_type}")
|
# logger.debug(f"Getting reagents by SubmissionType {submission_type}")
|
||||||
@@ -152,17 +152,15 @@ class KitType(BaseClass):
|
|||||||
dict: Dictionary containing information locations.
|
dict: Dictionary containing information locations.
|
||||||
"""
|
"""
|
||||||
info_map = {}
|
info_map = {}
|
||||||
# Account for submission_type variable type.
|
# NOTE: Account for submission_type variable type.
|
||||||
match submission_type:
|
match submission_type:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Constructing xl map with str {submission_type}")
|
# logger.debug(f"Constructing xl map with str {submission_type}")
|
||||||
assocs = [item for item in self.kit_reagenttype_associations if
|
assocs = [item for item in self.kit_reagenttype_associations if
|
||||||
item.submission_type.name == submission_type]
|
item.submission_type.name == submission_type]
|
||||||
# st_assoc = [item for item in self.used_for if submission_type == item.name][0]
|
|
||||||
case SubmissionType():
|
case SubmissionType():
|
||||||
# logger.debug(f"Constructing xl map with SubmissionType {submission_type}")
|
# logger.debug(f"Constructing xl map with SubmissionType {submission_type}")
|
||||||
assocs = [item for item in self.kit_reagenttype_associations if item.submission_type == submission_type]
|
assocs = [item for item in self.kit_reagenttype_associations if item.submission_type == submission_type]
|
||||||
# st_assoc = submission_type
|
|
||||||
case _:
|
case _:
|
||||||
raise ValueError(f"Wrong variable type: {type(submission_type)} used!")
|
raise ValueError(f"Wrong variable type: {type(submission_type)} used!")
|
||||||
# logger.debug("Get all KitTypeReagentTypeAssociation for SubmissionType")
|
# logger.debug("Get all KitTypeReagentTypeAssociation for SubmissionType")
|
||||||
@@ -371,10 +369,10 @@ class Reagent(BaseClass):
|
|||||||
dict: representation of the reagent's attributes
|
dict: representation of the reagent's attributes
|
||||||
"""
|
"""
|
||||||
if extraction_kit is not None:
|
if extraction_kit is not None:
|
||||||
# Get the intersection of this reagent's ReagentType and all ReagentTypes in KitType
|
# NOTE: Get the intersection of this reagent's ReagentType and all ReagentTypes in KitType
|
||||||
try:
|
try:
|
||||||
reagent_role = list(set(self.type).intersection(extraction_kit.reagent_types))[0]
|
reagent_role = list(set(self.type).intersection(extraction_kit.reagent_types))[0]
|
||||||
# Most will be able to fall back to first ReagentType in itself because most will only have 1.
|
# NOTE: Most will be able to fall back to first ReagentType in itself because most will only have 1.
|
||||||
except:
|
except:
|
||||||
reagent_role = self.type[0]
|
reagent_role = self.type[0]
|
||||||
else:
|
else:
|
||||||
@@ -383,7 +381,7 @@ class Reagent(BaseClass):
|
|||||||
rtype = reagent_role.name.replace("_", " ")
|
rtype = reagent_role.name.replace("_", " ")
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
rtype = "Unknown"
|
rtype = "Unknown"
|
||||||
# Calculate expiry with EOL from ReagentType
|
# NOTE: Calculate expiry with EOL from ReagentType
|
||||||
try:
|
try:
|
||||||
place_holder = self.expiry + reagent_role.eol_ext
|
place_holder = self.expiry + reagent_role.eol_ext
|
||||||
except (TypeError, AttributeError) as e:
|
except (TypeError, AttributeError) as e:
|
||||||
@@ -467,7 +465,7 @@ class Reagent(BaseClass):
|
|||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up reagent by name str: {name}")
|
# logger.debug(f"Looking up reagent by name str: {name}")
|
||||||
# Not limited due to multiple reagents having same name.
|
# NOTE: Not limited due to multiple reagents having same name.
|
||||||
query = query.filter(cls.name == name)
|
query = query.filter(cls.name == name)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
@@ -475,7 +473,7 @@ class Reagent(BaseClass):
|
|||||||
case str():
|
case str():
|
||||||
# logger.debug(f"Looking up reagent by lot number str: {lot_number}")
|
# logger.debug(f"Looking up reagent by lot number str: {lot_number}")
|
||||||
query = query.filter(cls.lot == lot_number)
|
query = query.filter(cls.lot == lot_number)
|
||||||
# In this case limit number returned.
|
# NOTE: In this case limit number returned.
|
||||||
limit = 1
|
limit = 1
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
@@ -516,10 +514,6 @@ class Discount(BaseClass):
|
|||||||
organization (models.Organization | str | int): Organization receiving discount.
|
organization (models.Organization | str | int): Organization receiving discount.
|
||||||
kit_type (models.KitType | str | int): Kit discount received on.
|
kit_type (models.KitType | str | int): Kit discount received on.
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: Invalid Organization
|
|
||||||
ValueError: Invalid kit.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
models.Discount|List[models.Discount]: Discount(s) of interest.
|
models.Discount|List[models.Discount]: Discount(s) of interest.
|
||||||
"""
|
"""
|
||||||
@@ -535,7 +529,6 @@ class Discount(BaseClass):
|
|||||||
# logger.debug(f"Looking up discount with organization id: {organization}")
|
# logger.debug(f"Looking up discount with organization id: {organization}")
|
||||||
query = query.join(Organization).filter(Organization.id == organization)
|
query = query.join(Organization).filter(Organization.id == organization)
|
||||||
case _:
|
case _:
|
||||||
# raise ValueError(f"Invalid value for organization: {organization}")
|
|
||||||
pass
|
pass
|
||||||
match kit_type:
|
match kit_type:
|
||||||
case KitType():
|
case KitType():
|
||||||
@@ -548,7 +541,6 @@ class Discount(BaseClass):
|
|||||||
# logger.debug(f"Looking up discount with kit type id: {kit_type}")
|
# logger.debug(f"Looking up discount with kit type id: {kit_type}")
|
||||||
query = query.join(KitType).filter(KitType.id == kit_type)
|
query = query.join(KitType).filter(KitType.id == kit_type)
|
||||||
case _:
|
case _:
|
||||||
# raise ValueError(f"Invalid value for kit type: {kit_type}")
|
|
||||||
pass
|
pass
|
||||||
return cls.execute_query(query=query)
|
return cls.execute_query(query=query)
|
||||||
|
|
||||||
@@ -634,11 +626,18 @@ class SubmissionType(BaseClass):
|
|||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
def construct_info_map(self, mode: Literal['read', 'write']) -> dict:
|
def construct_info_map(self, mode: Literal['read', 'write']) -> dict:
|
||||||
|
"""
|
||||||
|
Make of map of where all fields are located in excel sheet
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mode (Literal["read", "write"]): Which mode to get locations for
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Map of locations
|
||||||
|
"""
|
||||||
info = self.info_map
|
info = self.info_map
|
||||||
# logger.debug(f"Info map: {info}")
|
# logger.debug(f"Info map: {info}")
|
||||||
output = {}
|
output = {}
|
||||||
# for k,v in info.items():
|
|
||||||
# info[k]['write'] += info[k]['read']
|
|
||||||
match mode:
|
match mode:
|
||||||
case "read":
|
case "read":
|
||||||
output = {k: v[mode] for k, v in info.items() if v[mode]}
|
output = {k: v[mode] for k, v in info.items() if v[mode]}
|
||||||
@@ -647,7 +646,13 @@ class SubmissionType(BaseClass):
|
|||||||
output = {k: v for k, v in output.items() if all([isinstance(item, dict) for item in v])}
|
output = {k: v for k, v in output.items() if all([isinstance(item, dict) for item in v])}
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def construct_sample_map(self):
|
def construct_sample_map(self) -> dict:
|
||||||
|
"""
|
||||||
|
Returns sample map
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: sample location map
|
||||||
|
"""
|
||||||
return self.sample_map
|
return self.sample_map
|
||||||
|
|
||||||
def construct_equipment_map(self) -> dict:
|
def construct_equipment_map(self) -> dict:
|
||||||
@@ -655,7 +660,7 @@ class SubmissionType(BaseClass):
|
|||||||
Constructs map of equipment to excel cells.
|
Constructs map of equipment to excel cells.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List[dict]: List of equipment locations in excel sheet
|
dict: Map equipment locations in excel sheet
|
||||||
"""
|
"""
|
||||||
output = {}
|
output = {}
|
||||||
# logger.debug("Iterating through equipment roles")
|
# logger.debug("Iterating through equipment roles")
|
||||||
@@ -671,7 +676,7 @@ class SubmissionType(BaseClass):
|
|||||||
Returns PydEquipmentRole of all equipment associated with this SubmissionType
|
Returns PydEquipmentRole of all equipment associated with this SubmissionType
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List['PydEquipmentRole']: List of equipment roles
|
List[PydEquipmentRole]: List of equipment roles
|
||||||
"""
|
"""
|
||||||
return [item.to_pydantic(submission_type=self, extraction_kit=extraction_kit) for item in self.equipment]
|
return [item.to_pydantic(submission_type=self, extraction_kit=extraction_kit) for item in self.equipment]
|
||||||
|
|
||||||
@@ -702,7 +707,13 @@ class SubmissionType(BaseClass):
|
|||||||
raise TypeError(f"Type {type(equipment_role)} is not allowed")
|
raise TypeError(f"Type {type(equipment_role)} is not allowed")
|
||||||
return list(set([item for items in relevant for item in items if item != None]))
|
return list(set([item for items in relevant for item in items if item != None]))
|
||||||
|
|
||||||
def get_submission_class(self):
|
def get_submission_class(self) -> "BasicSubmission":
|
||||||
|
"""
|
||||||
|
Gets submission class associated with this submission type.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
BasicSubmission: Submission class
|
||||||
|
"""
|
||||||
from .submissions import BasicSubmission
|
from .submissions import BasicSubmission
|
||||||
return BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.name)
|
return BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.name)
|
||||||
|
|
||||||
@@ -1063,7 +1074,7 @@ class Equipment(BaseClass):
|
|||||||
processes (bool, optional): Whether to include processes. Defaults to False.
|
processes (bool, optional): Whether to include processes. Defaults to False.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: _description_
|
dict: Dictionary representation of this equipment
|
||||||
"""
|
"""
|
||||||
if not processes:
|
if not processes:
|
||||||
return {k: v for k, v in self.__dict__.items() if k != 'processes'}
|
return {k: v for k, v in self.__dict__.items() if k != 'processes'}
|
||||||
@@ -1152,7 +1163,7 @@ class Equipment(BaseClass):
|
|||||||
extraction_kit (str | KitType | None, optional): Relevant KitType. Defaults to None.
|
extraction_kit (str | KitType | None, optional): Relevant KitType. Defaults to None.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
PydEquipment: _description_
|
PydEquipment: pydantic equipment object
|
||||||
"""
|
"""
|
||||||
from backend.validators.pydant import PydEquipment
|
from backend.validators.pydant import PydEquipment
|
||||||
return PydEquipment(
|
return PydEquipment(
|
||||||
@@ -1179,7 +1190,6 @@ class Equipment(BaseClass):
|
|||||||
class EquipmentRole(BaseClass):
|
class EquipmentRole(BaseClass):
|
||||||
"""
|
"""
|
||||||
Abstract roles for equipment
|
Abstract roles for equipment
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
id = Column(INTEGER, primary_key=True) #: Role id, primary key
|
id = Column(INTEGER, primary_key=True) #: Role id, primary key
|
||||||
@@ -1331,7 +1341,7 @@ class SubmissionEquipmentAssociation(BaseClass):
|
|||||||
equipment = relationship(Equipment, back_populates="equipment_submission_associations") #: associated equipment
|
equipment = relationship(Equipment, back_populates="equipment_submission_associations") #: associated equipment
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<SubmissionEquipmentAssociation({self.submission.rsl_plate_num}&{self.equipment.name})>"
|
return f"<SubmissionEquipmentAssociation({self.submission.rsl_plate_num} & {self.equipment.name})>"
|
||||||
|
|
||||||
def __init__(self, submission, equipment, role: str = "None"):
|
def __init__(self, submission, equipment, role: str = "None"):
|
||||||
self.submission = submission
|
self.submission = submission
|
||||||
|
|||||||
@@ -107,6 +107,12 @@ class BasicSubmission(BaseClass):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def jsons(cls) -> List[str]:
|
def jsons(cls) -> List[str]:
|
||||||
|
"""
|
||||||
|
Get list of JSON db columns
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[str]: List of column names
|
||||||
|
"""
|
||||||
output = [item.name for item in cls.__table__.columns if isinstance(item.type, JSON)]
|
output = [item.name for item in cls.__table__.columns if isinstance(item.type, JSON)]
|
||||||
if issubclass(cls, BasicSubmission) and not cls.__name__ == "BasicSubmission":
|
if issubclass(cls, BasicSubmission) and not cls.__name__ == "BasicSubmission":
|
||||||
output += BasicSubmission.jsons()
|
output += BasicSubmission.jsons()
|
||||||
@@ -114,6 +120,12 @@ class BasicSubmission(BaseClass):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def timestamps(cls) -> List[str]:
|
def timestamps(cls) -> List[str]:
|
||||||
|
"""
|
||||||
|
Get list of TIMESTAMP columns
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[str]: List of column names
|
||||||
|
"""
|
||||||
output = [item.name for item in cls.__table__.columns if isinstance(item.type, TIMESTAMP)]
|
output = [item.name for item in cls.__table__.columns if isinstance(item.type, TIMESTAMP)]
|
||||||
if issubclass(cls, BasicSubmission) and not cls.__name__ == "BasicSubmission":
|
if issubclass(cls, BasicSubmission) and not cls.__name__ == "BasicSubmission":
|
||||||
output += BasicSubmission.timestamps()
|
output += BasicSubmission.timestamps()
|
||||||
@@ -122,7 +134,7 @@ class BasicSubmission(BaseClass):
|
|||||||
# TODO: Beef up this to include info_map from DB
|
# TODO: Beef up this to include info_map from DB
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_default_info(cls, *args):
|
def get_default_info(cls, *args):
|
||||||
# Create defaults for all submission_types
|
# NOTE: Create defaults for all submission_types
|
||||||
parent_defs = super().get_default_info()
|
parent_defs = super().get_default_info()
|
||||||
recover = ['filepath', 'samples', 'csv', 'comment', 'equipment']
|
recover = ['filepath', 'samples', 'csv', 'comment', 'equipment']
|
||||||
dicto = dict(
|
dicto = dict(
|
||||||
@@ -132,9 +144,7 @@ class BasicSubmission(BaseClass):
|
|||||||
# NOTE: Fields not placed in ui form
|
# NOTE: Fields not placed in ui form
|
||||||
form_ignore=['reagents', 'ctx', 'id', 'cost', 'extraction_info', 'signed_by', 'comment'] + recover,
|
form_ignore=['reagents', 'ctx', 'id', 'cost', 'extraction_info', 'signed_by', 'comment'] + recover,
|
||||||
# NOTE: Fields not placed in ui form to be moved to pydantic
|
# NOTE: Fields not placed in ui form to be moved to pydantic
|
||||||
form_recover=recover,
|
form_recover=recover
|
||||||
# parser_ignore=['samples', 'signed_by'] + [item for item in cls.jsons() if item != "comment"],
|
|
||||||
# excel_ignore=[],
|
|
||||||
)
|
)
|
||||||
# logger.debug(dicto['singles'])
|
# logger.debug(dicto['singles'])
|
||||||
# NOTE: Singles tells the query which fields to set limit to 1
|
# NOTE: Singles tells the query which fields to set limit to 1
|
||||||
@@ -151,7 +161,6 @@ class BasicSubmission(BaseClass):
|
|||||||
st = cls.get_submission_type()
|
st = cls.get_submission_type()
|
||||||
if st is None:
|
if st is None:
|
||||||
logger.error("No default info for BasicSubmission.")
|
logger.error("No default info for BasicSubmission.")
|
||||||
# return output
|
|
||||||
else:
|
else:
|
||||||
output['submission_type'] = st.name
|
output['submission_type'] = st.name
|
||||||
for k, v in st.defaults.items():
|
for k, v in st.defaults.items():
|
||||||
@@ -169,16 +178,37 @@ class BasicSubmission(BaseClass):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_submission_type(cls):
|
def get_submission_type(cls) -> SubmissionType:
|
||||||
|
"""
|
||||||
|
Gets the SubmissionType associated with this class
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SubmissionType: SubmissionType with name equal to this polymorphic identity
|
||||||
|
"""
|
||||||
name = cls.__mapper_args__['polymorphic_identity']
|
name = cls.__mapper_args__['polymorphic_identity']
|
||||||
return SubmissionType.query(name=name)
|
return SubmissionType.query(name=name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def construct_info_map(cls, mode:Literal['read', 'write']):
|
def construct_info_map(cls, mode:Literal["read", "write"]) -> dict:
|
||||||
|
"""
|
||||||
|
Method to call submission type's construct info map.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mode (Literal["read", "write"]): Which map to construct.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Map of info locations.
|
||||||
|
"""
|
||||||
return cls.get_submission_type().construct_info_map(mode=mode)
|
return cls.get_submission_type().construct_info_map(mode=mode)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def construct_sample_map(cls):
|
def construct_sample_map(cls) -> dict:
|
||||||
|
"""
|
||||||
|
Method to call submission type's construct_sample_map
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: sample location map
|
||||||
|
"""
|
||||||
return cls.get_submission_type().construct_sample_map()
|
return cls.get_submission_type().construct_sample_map()
|
||||||
|
|
||||||
def to_dict(self, full_data: bool = False, backup: bool = False, report: bool = False) -> dict:
|
def to_dict(self, full_data: bool = False, backup: bool = False, report: bool = False) -> dict:
|
||||||
@@ -192,7 +222,7 @@ class BasicSubmission(BaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
dict: dictionary used in submissions summary and details
|
dict: dictionary used in submissions summary and details
|
||||||
"""
|
"""
|
||||||
# get lab from nested organization object
|
# NOTE: get lab from nested organization object
|
||||||
# logger.debug(f"Converting {self.rsl_plate_num} to dict...")
|
# logger.debug(f"Converting {self.rsl_plate_num} to dict...")
|
||||||
try:
|
try:
|
||||||
sub_lab = self.submitting_lab.name
|
sub_lab = self.submitting_lab.name
|
||||||
@@ -202,12 +232,12 @@ class BasicSubmission(BaseClass):
|
|||||||
sub_lab = sub_lab.replace("_", " ").title()
|
sub_lab = sub_lab.replace("_", " ").title()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
# get extraction kit name from nested kit object
|
# NOTE: get extraction kit name from nested kit object
|
||||||
try:
|
try:
|
||||||
ext_kit = self.extraction_kit.name
|
ext_kit = self.extraction_kit.name
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
ext_kit = None
|
ext_kit = None
|
||||||
# load scraped extraction info
|
# NOTE: load scraped extraction info
|
||||||
try:
|
try:
|
||||||
ext_info = self.extraction_info
|
ext_info = self.extraction_info
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@@ -324,7 +354,7 @@ class BasicSubmission(BaseClass):
|
|||||||
|
|
||||||
def make_plate_map(self, plate_rows: int = 8, plate_columns=12) -> str:
|
def make_plate_map(self, plate_rows: int = 8, plate_columns=12) -> str:
|
||||||
"""
|
"""
|
||||||
Constructs an html based plate map.
|
Constructs an html based plate map for submission details.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
sample_list (list): List of submission samples
|
sample_list (list): List of submission samples
|
||||||
@@ -386,7 +416,7 @@ class BasicSubmission(BaseClass):
|
|||||||
subs = [item.to_dict() for item in cls.query(submission_type=submission_type, limit=limit, chronologic=chronologic)]
|
subs = [item.to_dict() for item in cls.query(submission_type=submission_type, limit=limit, chronologic=chronologic)]
|
||||||
# logger.debug(f"Got {len(subs)} submissions.")
|
# logger.debug(f"Got {len(subs)} submissions.")
|
||||||
df = pd.DataFrame.from_records(subs)
|
df = pd.DataFrame.from_records(subs)
|
||||||
# Exclude sub information
|
# NOTE: Exclude sub information
|
||||||
for item in ['controls', 'extraction_info', 'pcr_info', 'comment', 'comments', 'samples', 'reagents',
|
for item in ['controls', 'extraction_info', 'pcr_info', 'comment', 'comments', 'samples', 'reagents',
|
||||||
'equipment', 'gel_info', 'gel_image', 'dna_core_submission_number', 'gel_controls']:
|
'equipment', 'gel_info', 'gel_image', 'dna_core_submission_number', 'gel_controls']:
|
||||||
try:
|
try:
|
||||||
@@ -414,9 +444,6 @@ class BasicSubmission(BaseClass):
|
|||||||
# logger.debug(f"Looking up organization: {value}")
|
# logger.debug(f"Looking up organization: {value}")
|
||||||
field_value = Organization.query(name=value)
|
field_value = Organization.query(name=value)
|
||||||
# logger.debug(f"Got {field_value} for organization {value}")
|
# logger.debug(f"Got {field_value} for organization {value}")
|
||||||
# case "submitter_plate_num":
|
|
||||||
# # logger.debug(f"Submitter plate id: {value}")
|
|
||||||
# field_value = value
|
|
||||||
case "samples":
|
case "samples":
|
||||||
for sample in value:
|
for sample in value:
|
||||||
# logger.debug(f"Parsing {sample} to sql.")
|
# logger.debug(f"Parsing {sample} to sql.")
|
||||||
@@ -436,17 +463,6 @@ class BasicSubmission(BaseClass):
|
|||||||
field_value = value
|
field_value = value
|
||||||
case "ctx" | "csv" | "filepath" | "equipment":
|
case "ctx" | "csv" | "filepath" | "equipment":
|
||||||
return
|
return
|
||||||
# case "comment":
|
|
||||||
# if value == "" or value == None or value == 'null':
|
|
||||||
# field_value = None
|
|
||||||
# else:
|
|
||||||
# field_value = dict(name=getuser(), text=value, time=datetime.now())
|
|
||||||
# # if self.comment is None:
|
|
||||||
# # self.comment = [field_value]
|
|
||||||
# # else:
|
|
||||||
# # self.comment.append(field_value)
|
|
||||||
# self.update_json(field=key, value=field_value)
|
|
||||||
# return
|
|
||||||
case item if item in self.jsons():
|
case item if item in self.jsons():
|
||||||
logger.debug(f"Setting JSON attribute.")
|
logger.debug(f"Setting JSON attribute.")
|
||||||
existing = self.__getattribute__(key)
|
existing = self.__getattribute__(key)
|
||||||
@@ -1852,13 +1868,6 @@ class WastewaterArtic(BasicSubmission):
|
|||||||
set_plate = None
|
set_plate = None
|
||||||
for assoc in self.submission_sample_associations:
|
for assoc in self.submission_sample_associations:
|
||||||
dicto = assoc.to_sub_dict()
|
dicto = assoc.to_sub_dict()
|
||||||
# old_sub = assoc.sample.get_previous_ww_submission(current_artic_submission=self)
|
|
||||||
# try:
|
|
||||||
# dicto['plate_name'] = old_sub.rsl_plate_num
|
|
||||||
# except AttributeError:
|
|
||||||
# dicto['plate_name'] = ""
|
|
||||||
# old_assoc = WastewaterAssociation.query(submission=old_sub, sample=assoc.sample, limit=1)
|
|
||||||
# dicto['well'] = f"{row_map[old_assoc.row]}{old_assoc.column}"
|
|
||||||
for item in self.source_plates:
|
for item in self.source_plates:
|
||||||
old_plate = WastewaterAssociation.query(submission=item['plate'], sample=assoc.sample, limit=1)
|
old_plate = WastewaterAssociation.query(submission=item['plate'], sample=assoc.sample, limit=1)
|
||||||
if old_plate is not None:
|
if old_plate is not None:
|
||||||
@@ -1879,6 +1888,12 @@ class WastewaterArtic(BasicSubmission):
|
|||||||
events['Gel Box'] = self.gel_box
|
events['Gel Box'] = self.gel_box
|
||||||
return events
|
return events
|
||||||
|
|
||||||
|
def set_attribute(self, key: str, value):
|
||||||
|
super().set_attribute(key=key, value=value)
|
||||||
|
if key == 'gel_info':
|
||||||
|
if len(self.gel_info) > 3:
|
||||||
|
self.gel_info = self.gel_info[-3:]
|
||||||
|
|
||||||
def gel_box(self, obj):
|
def gel_box(self, obj):
|
||||||
"""
|
"""
|
||||||
Creates widget to perform gel viewing operations
|
Creates widget to perform gel viewing operations
|
||||||
|
|||||||
@@ -543,7 +543,6 @@ class EquipmentParser(object):
|
|||||||
def __init__(self, xl: Workbook, submission_type: str|SubmissionType) -> None:
|
def __init__(self, xl: Workbook, submission_type: str|SubmissionType) -> None:
|
||||||
if isinstance(submission_type, str):
|
if isinstance(submission_type, str):
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
submission_type = SubmissionType.query(name=submission_type)
|
||||||
|
|
||||||
self.submission_type = submission_type
|
self.submission_type = submission_type
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
self.map = self.fetch_equipment_map()
|
self.map = self.fetch_equipment_map()
|
||||||
@@ -555,7 +554,6 @@ class EquipmentParser(object):
|
|||||||
Returns:
|
Returns:
|
||||||
List[dict]: List of locations
|
List[dict]: List of locations
|
||||||
"""
|
"""
|
||||||
# submission_type = SubmissionType.query(name=self.submission_type)
|
|
||||||
return self.submission_type.construct_equipment_map()
|
return self.submission_type.construct_equipment_map()
|
||||||
|
|
||||||
def get_asset_number(self, input: str) -> str:
|
def get_asset_number(self, input: str) -> str:
|
||||||
@@ -569,7 +567,7 @@ class EquipmentParser(object):
|
|||||||
str: asset number
|
str: asset number
|
||||||
"""
|
"""
|
||||||
regex = Equipment.get_regex()
|
regex = Equipment.get_regex()
|
||||||
logger.debug(f"Using equipment regex: {regex} on {input}")
|
# logger.debug(f"Using equipment regex: {regex} on {input}")
|
||||||
try:
|
try:
|
||||||
return regex.search(input).group().strip("-")
|
return regex.search(input).group().strip("-")
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -582,11 +580,10 @@ class EquipmentParser(object):
|
|||||||
Returns:
|
Returns:
|
||||||
List[PydEquipment]: list of equipment
|
List[PydEquipment]: list of equipment
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
|
# logger.debug(f"Equipment parser going into parsing: {pformat(self.__dict__)}")
|
||||||
output = []
|
output = []
|
||||||
# logger.debug(f"Sheets: {sheets}")
|
# logger.debug(f"Sheets: {sheets}")
|
||||||
for sheet in self.xl.sheetnames:
|
for sheet in self.xl.sheetnames:
|
||||||
# df = self.xl.parse(sheet, header=None, dtype=object)
|
|
||||||
ws = self.xl[sheet]
|
ws = self.xl[sheet]
|
||||||
try:
|
try:
|
||||||
relevant = [item for item in self.map if item['sheet'] == sheet]
|
relevant = [item for item in self.map if item['sheet'] == sheet]
|
||||||
@@ -595,7 +592,6 @@ class EquipmentParser(object):
|
|||||||
# logger.debug(f"Relevant equipment: {pformat(relevant)}")
|
# logger.debug(f"Relevant equipment: {pformat(relevant)}")
|
||||||
previous_asset = ""
|
previous_asset = ""
|
||||||
for equipment in relevant:
|
for equipment in relevant:
|
||||||
# asset = df.iat[equipment['name']['row']-1, equipment['name']['column']-1]
|
|
||||||
asset = ws.cell(equipment['name']['row'], equipment['name']['column'])
|
asset = ws.cell(equipment['name']['row'], equipment['name']['column'])
|
||||||
if not check_not_nan(asset):
|
if not check_not_nan(asset):
|
||||||
asset = previous_asset
|
asset = previous_asset
|
||||||
@@ -603,7 +599,6 @@ class EquipmentParser(object):
|
|||||||
previous_asset = asset
|
previous_asset = asset
|
||||||
asset = self.get_asset_number(input=asset)
|
asset = self.get_asset_number(input=asset)
|
||||||
eq = Equipment.query(asset_number=asset)
|
eq = Equipment.query(asset_number=asset)
|
||||||
# process = df.iat[equipment['process']['row']-1, equipment['process']['column']-1]
|
|
||||||
process = ws.cell(row=equipment['process']['row'], column=equipment['process']['column'])
|
process = ws.cell(row=equipment['process']['row'], column=equipment['process']['column'])
|
||||||
try:
|
try:
|
||||||
output.append(
|
output.append(
|
||||||
@@ -614,72 +609,6 @@ class EquipmentParser(object):
|
|||||||
# logger.debug(f"Here is the output so far: {pformat(output)}")
|
# logger.debug(f"Here is the output so far: {pformat(output)}")
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
# class PCRParser(object):
|
|
||||||
# """
|
|
||||||
# Object to pull data from Design and Analysis PCR export file.
|
|
||||||
# """
|
|
||||||
#
|
|
||||||
# def __init__(self, filepath: Path | None = None) -> None:
|
|
||||||
# """
|
|
||||||
# Initializes object.
|
|
||||||
#
|
|
||||||
# Args:
|
|
||||||
# filepath (Path | None, optional): file to parse. Defaults to None.
|
|
||||||
# """
|
|
||||||
# logger.debug(f"Parsing {filepath.__str__()}")
|
|
||||||
# if filepath == None:
|
|
||||||
# logger.error(f"No filepath given.")
|
|
||||||
# self.xl = None
|
|
||||||
# else:
|
|
||||||
# try:
|
|
||||||
# self.xl = pd.ExcelFile(filepath.__str__())
|
|
||||||
# except ValueError as e:
|
|
||||||
# logger.error(f"Incorrect value: {e}")
|
|
||||||
# self.xl = None
|
|
||||||
# except PermissionError:
|
|
||||||
# logger.error(f"Couldn't get permissions for {filepath.__str__()}. Operation might have been cancelled.")
|
|
||||||
# return
|
|
||||||
# self.parse_general(sheet_name="Results")
|
|
||||||
# namer = RSLNamer(filename=filepath.__str__())
|
|
||||||
# self.plate_num = namer.parsed_name
|
|
||||||
# self.submission_type = namer.submission_type
|
|
||||||
# logger.debug(f"Set plate number to {self.plate_num} and type to {self.submission_type}")
|
|
||||||
# parser = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
|
||||||
# self.samples = parser.parse_pcr(xl=self.xl, rsl_number=self.plate_num)
|
|
||||||
#
|
|
||||||
# def parse_general(self, sheet_name: str):
|
|
||||||
# """
|
|
||||||
# Parse general info rows for all types of PCR results
|
|
||||||
#
|
|
||||||
# Args:
|
|
||||||
# sheet_name (str): Name of sheet in excel workbook that holds info.
|
|
||||||
# """
|
|
||||||
# self.pcr = {}
|
|
||||||
# df = self.xl.parse(sheet_name=sheet_name, dtype=object).fillna("")
|
|
||||||
# self.pcr['comment'] = df.iloc[0][1]
|
|
||||||
# self.pcr['operator'] = df.iloc[1][1]
|
|
||||||
# self.pcr['barcode'] = df.iloc[2][1]
|
|
||||||
# self.pcr['instrument'] = df.iloc[3][1]
|
|
||||||
# self.pcr['block_type'] = df.iloc[4][1]
|
|
||||||
# self.pcr['instrument_name'] = df.iloc[5][1]
|
|
||||||
# self.pcr['instrument_serial'] = df.iloc[6][1]
|
|
||||||
# self.pcr['heated_cover_serial'] = df.iloc[7][1]
|
|
||||||
# self.pcr['block_serial'] = df.iloc[8][1]
|
|
||||||
# self.pcr['run-start'] = df.iloc[9][1]
|
|
||||||
# self.pcr['run_end'] = df.iloc[10][1]
|
|
||||||
# self.pcr['run_duration'] = df.iloc[11][1]
|
|
||||||
# self.pcr['sample_volume'] = df.iloc[12][1]
|
|
||||||
# self.pcr['cover_temp'] = df.iloc[13][1]
|
|
||||||
# self.pcr['passive_ref'] = df.iloc[14][1]
|
|
||||||
# self.pcr['pcr_step'] = df.iloc[15][1]
|
|
||||||
# self.pcr['quant_cycle_method'] = df.iloc[16][1]
|
|
||||||
# self.pcr['analysis_time'] = df.iloc[17][1]
|
|
||||||
# self.pcr['software'] = df.iloc[18][1]
|
|
||||||
# self.pcr['plugin'] = df.iloc[19][1]
|
|
||||||
# self.pcr['exported_on'] = df.iloc[20][1]
|
|
||||||
# self.pcr['imported_by'] = getuser()
|
|
||||||
|
|
||||||
class PCRParser(object):
|
class PCRParser(object):
|
||||||
"""Object to pull data from Design and Analysis PCR export file."""
|
"""Object to pull data from Design and Analysis PCR export file."""
|
||||||
|
|
||||||
@@ -690,7 +619,7 @@ class PCRParser(object):
|
|||||||
Args:
|
Args:
|
||||||
filepath (Path | None, optional): file to parse. Defaults to None.
|
filepath (Path | None, optional): file to parse. Defaults to None.
|
||||||
"""
|
"""
|
||||||
logger.debug(f'Parsing {filepath.__str__()}')
|
# logger.debug(f'Parsing {filepath.__str__()}')
|
||||||
if filepath is None:
|
if filepath is None:
|
||||||
logger.error('No filepath given.')
|
logger.error('No filepath given.')
|
||||||
self.xl = None
|
self.xl = None
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ def make_report_xlsx(records:list[dict]) -> Tuple[DataFrame, DataFrame]:
|
|||||||
# aggregate cost and sample count columns
|
# aggregate cost and sample count columns
|
||||||
df2 = df.groupby(["Submitting Lab", "Extraction Kit"]).agg({'Extraction Kit':'count', 'Cost': 'sum', 'Sample Count':'sum'})
|
df2 = df.groupby(["Submitting Lab", "Extraction Kit"]).agg({'Extraction Kit':'count', 'Cost': 'sum', 'Sample Count':'sum'})
|
||||||
df2 = df2.rename(columns={"Extraction Kit": 'Run Count'})
|
df2 = df2.rename(columns={"Extraction Kit": 'Run Count'})
|
||||||
logger.debug(f"Output daftaframe for xlsx: {df2.columns}")
|
# logger.debug(f"Output daftaframe for xlsx: {df2.columns}")
|
||||||
df = df.drop('id', axis=1)
|
df = df.drop('id', axis=1)
|
||||||
df = df.sort_values(['Submitting Lab', "Submitted Date"])
|
df = df.sort_values(['Submitting Lab', "Submitted Date"])
|
||||||
return df, df2
|
return df, df2
|
||||||
@@ -47,13 +47,13 @@ def make_report_html(df:DataFrame, start_date:date, end_date:date) -> str:
|
|||||||
"""
|
"""
|
||||||
old_lab = ""
|
old_lab = ""
|
||||||
output = []
|
output = []
|
||||||
logger.debug(f"Report DataFrame: {df}")
|
# logger.debug(f"Report DataFrame: {df}")
|
||||||
for ii, row in enumerate(df.iterrows()):
|
for ii, row in enumerate(df.iterrows()):
|
||||||
logger.debug(f"Row {ii}: {row}")
|
# logger.debug(f"Row {ii}: {row}")
|
||||||
lab = row[0][0]
|
lab = row[0][0]
|
||||||
logger.debug(type(row))
|
# logger.debug(type(row))
|
||||||
logger.debug(f"Old lab: {old_lab}, Current lab: {lab}")
|
# logger.debug(f"Old lab: {old_lab}, Current lab: {lab}")
|
||||||
logger.debug(f"Name: {row[0][1]}")
|
# logger.debug(f"Name: {row[0][1]}")
|
||||||
data = [item for item in row[1]]
|
data = [item for item in row[1]]
|
||||||
kit = dict(name=row[0][1], cost=data[1], run_count=int(data[0]), sample_count=int(data[2]))
|
kit = dict(name=row[0][1], cost=data[1], run_count=int(data[0]), sample_count=int(data[2]))
|
||||||
# if this is the same lab as before add together
|
# if this is the same lab as before add together
|
||||||
@@ -67,7 +67,7 @@ def make_report_html(df:DataFrame, start_date:date, end_date:date) -> str:
|
|||||||
adder = dict(lab=lab, kits=[kit], total_cost=kit['cost'], total_samples=kit['sample_count'], total_runs=kit['run_count'])
|
adder = dict(lab=lab, kits=[kit], total_cost=kit['cost'], total_samples=kit['sample_count'], total_runs=kit['run_count'])
|
||||||
output.append(adder)
|
output.append(adder)
|
||||||
old_lab = lab
|
old_lab = lab
|
||||||
logger.debug(output)
|
# logger.debug(output)
|
||||||
dicto = {'start_date':start_date, 'end_date':end_date, 'labs':output}#, "table":table}
|
dicto = {'start_date':start_date, 'end_date':end_date, 'labs':output}#, "table":table}
|
||||||
temp = env.get_template('summary_report.html')
|
temp = env.get_template('summary_report.html')
|
||||||
html = temp.render(input=dicto)
|
html = temp.render(input=dicto)
|
||||||
@@ -91,14 +91,14 @@ def convert_data_list_to_df(input:list[dict], subtype:str|None=None) -> DataFram
|
|||||||
for column in df.columns:
|
for column in df.columns:
|
||||||
if "percent" in column:
|
if "percent" in column:
|
||||||
count_col = [item for item in df.columns if "count" in item][0]
|
count_col = [item for item in df.columns if "count" in item][0]
|
||||||
# The actual percentage from kraken was off due to exclusion of NaN, recalculating.
|
# NOTE: The actual percentage from kraken was off due to exclusion of NaN, recalculating.
|
||||||
df[column] = 100 * df[count_col] / df.groupby('name')[count_col].transform('sum')
|
df[column] = 100 * df[count_col] / df.groupby('name')[count_col].transform('sum')
|
||||||
if column not in safe:
|
if column not in safe:
|
||||||
if subtype != None and column != subtype:
|
if subtype != None and column != subtype:
|
||||||
del df[column]
|
del df[column]
|
||||||
# move date of sample submitted on same date as previous ahead one.
|
# NOTE: move date of sample submitted on same date as previous ahead one.
|
||||||
df = displace_date(df)
|
df = displace_date(df)
|
||||||
# ad hoc method to make data labels more accurate.
|
# NOTE: ad hoc method to make data labels more accurate.
|
||||||
df = df_column_renamer(df=df)
|
df = df_column_renamer(df=df)
|
||||||
return df
|
return df
|
||||||
|
|
||||||
@@ -131,8 +131,8 @@ def displace_date(df:DataFrame) -> DataFrame:
|
|||||||
Returns:
|
Returns:
|
||||||
DataFrame: output dataframe with dates incremented.
|
DataFrame: output dataframe with dates incremented.
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Unique items: {df['name'].unique()}")
|
# logger.debug(f"Unique items: {df['name'].unique()}")
|
||||||
# get submitted dates for each control
|
# NOTE: get submitted dates for each control
|
||||||
dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in sorted(df['name'].unique())]
|
dict_list = [dict(name=item, date=df[df.name == item].iloc[0]['submitted_date']) for item in sorted(df['name'].unique())]
|
||||||
previous_dates = []
|
previous_dates = []
|
||||||
for _, item in enumerate(dict_list):
|
for _, item in enumerate(dict_list):
|
||||||
@@ -157,10 +157,10 @@ def check_date(df:DataFrame, item:dict, previous_dates:list) -> Tuple[DataFrame,
|
|||||||
check = False
|
check = False
|
||||||
previous_dates.append(item['date'])
|
previous_dates.append(item['date'])
|
||||||
if check:
|
if check:
|
||||||
logger.debug(f"We found one! Increment date!\n\t{item['date']} to {item['date'] + timedelta(days=1)}")
|
# logger.debug(f"We found one! Increment date!\n\t{item['date']} to {item['date'] + timedelta(days=1)}")
|
||||||
# get df locations where name == item name
|
# NOTE: get df locations where name == item name
|
||||||
mask = df['name'] == item['name']
|
mask = df['name'] == item['name']
|
||||||
# increment date in dataframe
|
# NOTE: increment date in dataframe
|
||||||
df.loc[mask, 'submitted_date'] = df.loc[mask, 'submitted_date'].apply(lambda x: x + timedelta(days=1))
|
df.loc[mask, 'submitted_date'] = df.loc[mask, 'submitted_date'].apply(lambda x: x + timedelta(days=1))
|
||||||
item['date'] += timedelta(days=1)
|
item['date'] += timedelta(days=1)
|
||||||
passed = False
|
passed = False
|
||||||
@@ -170,9 +170,9 @@ def check_date(df:DataFrame, item:dict, previous_dates:list) -> Tuple[DataFrame,
|
|||||||
# logger.debug(f"DF: {type(df)}, previous_dates: {type(previous_dates)}")
|
# logger.debug(f"DF: {type(df)}, previous_dates: {type(previous_dates)}")
|
||||||
# if run didn't lead to changed date, return values
|
# if run didn't lead to changed date, return values
|
||||||
if passed:
|
if passed:
|
||||||
logger.debug(f"Date check passed, returning.")
|
# logger.debug(f"Date check passed, returning.")
|
||||||
return df, previous_dates
|
return df, previous_dates
|
||||||
# if date was changed, rerun with new date
|
# NOTE: if date was changed, rerun with new date
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Date check failed, running recursion")
|
logger.warning(f"Date check failed, running recursion")
|
||||||
df, previous_dates = check_date(df, item, previous_dates)
|
df, previous_dates = check_date(df, item, previous_dates)
|
||||||
|
|||||||
@@ -31,7 +31,6 @@ class SheetWriter(object):
|
|||||||
case 'filepath':
|
case 'filepath':
|
||||||
self.__setattr__(k, v)
|
self.__setattr__(k, v)
|
||||||
case 'submission_type':
|
case 'submission_type':
|
||||||
# self.__setattr__('submission_type', submission.submission_type['value'])
|
|
||||||
self.sub[k] = v['value']
|
self.sub[k] = v['value']
|
||||||
self.submission_type = SubmissionType.query(name=v['value'])
|
self.submission_type = SubmissionType.query(name=v['value'])
|
||||||
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
@@ -40,7 +39,7 @@ class SheetWriter(object):
|
|||||||
self.sub[k] = v['value']
|
self.sub[k] = v['value']
|
||||||
else:
|
else:
|
||||||
self.sub[k] = v
|
self.sub[k] = v
|
||||||
logger.debug(f"\n\nWriting to {submission.filepath.__str__()}\n\n")
|
# logger.debug(f"\n\nWriting to {submission.filepath.__str__()}\n\n")
|
||||||
|
|
||||||
if self.filepath.stem.startswith("tmp"):
|
if self.filepath.stem.startswith("tmp"):
|
||||||
template = self.submission_type.template_file
|
template = self.submission_type.template_file
|
||||||
@@ -95,7 +94,7 @@ class InfoWriter(object):
|
|||||||
self.xl = xl
|
self.xl = xl
|
||||||
map = submission_type.construct_info_map(mode='write')
|
map = submission_type.construct_info_map(mode='write')
|
||||||
self.info = self.reconcile_map(info_dict, map)
|
self.info = self.reconcile_map(info_dict, map)
|
||||||
logger.debug(pformat(self.info))
|
# logger.debug(pformat(self.info))
|
||||||
|
|
||||||
def reconcile_map(self, info_dict: dict, map: dict) -> dict:
|
def reconcile_map(self, info_dict: dict, map: dict) -> dict:
|
||||||
output = {}
|
output = {}
|
||||||
@@ -121,8 +120,7 @@ class InfoWriter(object):
|
|||||||
logger.error(f"No locations for {k}, skipping")
|
logger.error(f"No locations for {k}, skipping")
|
||||||
continue
|
continue
|
||||||
for loc in locations:
|
for loc in locations:
|
||||||
|
# logger.debug(f"Writing {k} to {loc['sheet']}, row: {loc['row']}, column: {loc['column']}")
|
||||||
logger.debug(f"Writing {k} to {loc['sheet']}, row: {loc['row']}, column: {loc['column']}")
|
|
||||||
sheet = self.xl[loc['sheet']]
|
sheet = self.xl[loc['sheet']]
|
||||||
sheet.cell(row=loc['row'], column=loc['column'], value=v['value'])
|
sheet.cell(row=loc['row'], column=loc['column'], value=v['value'])
|
||||||
return self.sub_object.custom_info_writer(self.xl, info=self.info)
|
return self.sub_object.custom_info_writer(self.xl, info=self.info)
|
||||||
@@ -152,7 +150,7 @@ class ReagentWriter(object):
|
|||||||
try:
|
try:
|
||||||
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
|
dicto = dict(value=v, row=mp_info[k]['row'], column=mp_info[k]['column'])
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
# logger.error(f"Keyerror: {e}")
|
logger.error(f"KeyError: {e}")
|
||||||
dicto = v
|
dicto = v
|
||||||
placeholder[k] = dicto
|
placeholder[k] = dicto
|
||||||
placeholder['sheet'] = mp_info['sheet']
|
placeholder['sheet'] = mp_info['sheet']
|
||||||
@@ -197,7 +195,6 @@ class SampleWriter(object):
|
|||||||
def write_samples(self):
|
def write_samples(self):
|
||||||
sheet = self.xl[self.map['sheet']]
|
sheet = self.xl[self.map['sheet']]
|
||||||
columns = self.map['sample_columns']
|
columns = self.map['sample_columns']
|
||||||
# rows = range(self.map['start_row'], self.map['end_row']+1)
|
|
||||||
for ii, sample in enumerate(self.samples):
|
for ii, sample in enumerate(self.samples):
|
||||||
row = self.map['start_row'] + (sample['submission_rank'] - 1)
|
row = self.map['start_row'] + (sample['submission_rank'] - 1)
|
||||||
for k, v in sample.items():
|
for k, v in sample.items():
|
||||||
@@ -229,8 +226,6 @@ class EquipmentWriter(object):
|
|||||||
for jj, (k, v) in enumerate(equipment.items(), start=1):
|
for jj, (k, v) in enumerate(equipment.items(), start=1):
|
||||||
dicto = dict(value=v, row=ii, column=jj)
|
dicto = dict(value=v, row=ii, column=jj)
|
||||||
placeholder[k] = dicto
|
placeholder[k] = dicto
|
||||||
|
|
||||||
# output.append(placeholder)
|
|
||||||
else:
|
else:
|
||||||
for jj, (k, v) in enumerate(equipment.items(), start=1):
|
for jj, (k, v) in enumerate(equipment.items(), start=1):
|
||||||
try:
|
try:
|
||||||
@@ -258,8 +253,8 @@ class EquipmentWriter(object):
|
|||||||
for k, v in equipment.items():
|
for k, v in equipment.items():
|
||||||
if not isinstance(v, dict):
|
if not isinstance(v, dict):
|
||||||
continue
|
continue
|
||||||
logger.debug(
|
# logger.debug(
|
||||||
f"Writing {k}: {v['value']} to {equipment['sheet']}, row: {v['row']}, column: {v['column']}")
|
# f"Writing {k}: {v['value']} to {equipment['sheet']}, row: {v['row']}, column: {v['column']}")
|
||||||
if isinstance(v['value'], list):
|
if isinstance(v['value'], list):
|
||||||
v['value'] = v['value'][0]
|
v['value'] = v['value'][0]
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ class RSLNamer(object):
|
|||||||
if self.submission_type is None:
|
if self.submission_type is None:
|
||||||
# logger.debug("Creating submission type because none exists")
|
# logger.debug("Creating submission type because none exists")
|
||||||
self.submission_type = self.retrieve_submission_type(filename=filename)
|
self.submission_type = self.retrieve_submission_type(filename=filename)
|
||||||
logger.debug(f"got submission type: {self.submission_type}")
|
# logger.debug(f"got submission type: {self.submission_type}")
|
||||||
if self.submission_type is not None:
|
if self.submission_type is not None:
|
||||||
# logger.debug("Retrieving BasicSubmission subclass")
|
# logger.debug("Retrieving BasicSubmission subclass")
|
||||||
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
self.sub_object = BasicSubmission.find_polymorphic_subclass(polymorphic_identity=self.submission_type)
|
||||||
@@ -47,7 +47,7 @@ class RSLNamer(object):
|
|||||||
"""
|
"""
|
||||||
match filename:
|
match filename:
|
||||||
case Path():
|
case Path():
|
||||||
logger.debug(f"Using path method for {filename}.")
|
# logger.debug(f"Using path method for {filename}.")
|
||||||
if filename.exists():
|
if filename.exists():
|
||||||
wb = load_workbook(filename)
|
wb = load_workbook(filename)
|
||||||
try:
|
try:
|
||||||
@@ -67,7 +67,7 @@ class RSLNamer(object):
|
|||||||
submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__())
|
submission_type = cls.retrieve_submission_type(filename=filename.stem.__str__())
|
||||||
case str():
|
case str():
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
logger.debug(f"Using string method for {filename}.")
|
# logger.debug(f"Using string method for {filename}.")
|
||||||
m = regex.search(filename)
|
m = regex.search(filename)
|
||||||
try:
|
try:
|
||||||
submission_type = m.lastgroup
|
submission_type = m.lastgroup
|
||||||
@@ -100,17 +100,17 @@ class RSLNamer(object):
|
|||||||
regex (str): string to construct pattern
|
regex (str): string to construct pattern
|
||||||
filename (str): string to be parsed
|
filename (str): string to be parsed
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Input string to be parsed: {filename}")
|
# logger.debug(f"Input string to be parsed: {filename}")
|
||||||
if regex is None:
|
if regex is None:
|
||||||
regex = BasicSubmission.construct_regex()
|
regex = BasicSubmission.construct_regex()
|
||||||
else:
|
else:
|
||||||
regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE)
|
regex = re.compile(rf'{regex}', re.IGNORECASE | re.VERBOSE)
|
||||||
logger.debug(f"Using regex: {regex}")
|
# logger.debug(f"Using regex: {regex}")
|
||||||
match filename:
|
match filename:
|
||||||
case Path():
|
case Path():
|
||||||
m = regex.search(filename.stem)
|
m = regex.search(filename.stem)
|
||||||
case str():
|
case str():
|
||||||
logger.debug(f"Using string method.")
|
# logger.debug(f"Using string method.")
|
||||||
m = regex.search(filename)
|
m = regex.search(filename)
|
||||||
case _:
|
case _:
|
||||||
m = None
|
m = None
|
||||||
@@ -121,7 +121,7 @@ class RSLNamer(object):
|
|||||||
parsed_name = None
|
parsed_name = None
|
||||||
else:
|
else:
|
||||||
parsed_name = None
|
parsed_name = None
|
||||||
logger.debug(f"Got parsed submission name: {parsed_name}")
|
# logger.debug(f"Got parsed submission name: {parsed_name}")
|
||||||
return parsed_name
|
return parsed_name
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -167,8 +167,8 @@ class RSLNamer(object):
|
|||||||
Returns:
|
Returns:
|
||||||
str: output file name.
|
str: output file name.
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Kwargs: {kwargs}")
|
# logger.debug(f"Kwargs: {kwargs}")
|
||||||
logger.debug(f"Template: {template}")
|
# logger.debug(f"Template: {template}")
|
||||||
environment = jinja_template_loading()
|
environment = jinja_template_loading()
|
||||||
template = environment.from_string(template)
|
template = environment.from_string(template)
|
||||||
return template.render(**kwargs)
|
return template.render(**kwargs)
|
||||||
|
|||||||
@@ -134,15 +134,15 @@ class PydReagent(BaseModel):
|
|||||||
# logger.debug("Adding extra fields.")
|
# logger.debug("Adding extra fields.")
|
||||||
if self.model_extra != None:
|
if self.model_extra != None:
|
||||||
self.__dict__.update(self.model_extra)
|
self.__dict__.update(self.model_extra)
|
||||||
logger.debug(f"Reagent SQL constructor is looking up type: {self.type}, lot: {self.lot}")
|
# logger.debug(f"Reagent SQL constructor is looking up type: {self.type}, lot: {self.lot}")
|
||||||
reagent = Reagent.query(lot_number=self.lot, name=self.name)
|
reagent = Reagent.query(lot_number=self.lot, name=self.name)
|
||||||
logger.debug(f"Result: {reagent}")
|
# logger.debug(f"Result: {reagent}")
|
||||||
if reagent is None:
|
if reagent is None:
|
||||||
reagent = Reagent()
|
reagent = Reagent()
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
value = value['value']
|
value = value['value']
|
||||||
logger.debug(f"Reagent info item for {key}: {value}")
|
# logger.debug(f"Reagent info item for {key}: {value}")
|
||||||
# set fields based on keys in dictionary
|
# set fields based on keys in dictionary
|
||||||
match key:
|
match key:
|
||||||
case "lot":
|
case "lot":
|
||||||
@@ -191,7 +191,7 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
@model_validator(mode='after')
|
@model_validator(mode='after')
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_model(cls, data):
|
def validate_model(cls, data):
|
||||||
logger.debug(f"Data for pydsample: {data}")
|
# logger.debug(f"Data for pydsample: {data}")
|
||||||
model = BasicSample.find_polymorphic_subclass(polymorphic_identity=data.sample_type)
|
model = BasicSample.find_polymorphic_subclass(polymorphic_identity=data.sample_type)
|
||||||
for k, v in data.model_extra.items():
|
for k, v in data.model_extra.items():
|
||||||
print(k, v)
|
print(k, v)
|
||||||
@@ -200,7 +200,7 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
v = datetime.strptime(v, "%Y-%m-%d")
|
v = datetime.strptime(v, "%Y-%m-%d")
|
||||||
data.__setattr__(k, v)
|
data.__setattr__(k, v)
|
||||||
# print(dir(data))
|
# print(dir(data))
|
||||||
logger.debug(f"Data coming out of validation: {pformat(data)}")
|
# logger.debug(f"Data coming out of validation: {pformat(data)}")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@field_validator("row", "column", "assoc_id", "submission_rank")
|
@field_validator("row", "column", "assoc_id", "submission_rank")
|
||||||
@@ -233,7 +233,7 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
"""
|
"""
|
||||||
report = None
|
report = None
|
||||||
self.__dict__.update(self.model_extra)
|
self.__dict__.update(self.model_extra)
|
||||||
logger.debug(f"Here is the incoming sample dict: \n{self.__dict__}")
|
# logger.debug(f"Here is the incoming sample dict: \n{self.__dict__}")
|
||||||
instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id)
|
instance = BasicSample.query_or_create(sample_type=self.sample_type, submitter_id=self.submitter_id)
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
match key:
|
match key:
|
||||||
@@ -246,8 +246,8 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
if submission is not None:
|
if submission is not None:
|
||||||
assoc_type = self.sample_type.replace("Sample", "").strip()
|
assoc_type = self.sample_type.replace("Sample", "").strip()
|
||||||
for row, column, aid, submission_rank in zip(self.row, self.column, self.assoc_id, self.submission_rank):
|
for row, column, aid, submission_rank in zip(self.row, self.column, self.assoc_id, self.submission_rank):
|
||||||
logger.debug(f"Looking up association with identity: ({submission.submission_type_name} Association)")
|
# logger.debug(f"Looking up association with identity: ({submission.submission_type_name} Association)")
|
||||||
logger.debug(f"Looking up association with identity: ({assoc_type} Association)")
|
# logger.debug(f"Looking up association with identity: ({assoc_type} Association)")
|
||||||
association = SubmissionSampleAssociation.query_or_create(association_type=f"{assoc_type} Association",
|
association = SubmissionSampleAssociation.query_or_create(association_type=f"{assoc_type} Association",
|
||||||
submission=submission,
|
submission=submission,
|
||||||
sample=instance,
|
sample=instance,
|
||||||
@@ -357,7 +357,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator('equipment', mode='before')
|
@field_validator('equipment', mode='before')
|
||||||
@classmethod
|
@classmethod
|
||||||
def convert_equipment_dict(cls, value):
|
def convert_equipment_dict(cls, value):
|
||||||
logger.debug(f"Equipment: {value}")
|
# logger.debug(f"Equipment: {value}")
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
return value['value']
|
return value['value']
|
||||||
return value
|
return value
|
||||||
@@ -381,7 +381,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("submitted_date", mode="before")
|
@field_validator("submitted_date", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def rescue_date(cls, value):
|
def rescue_date(cls, value):
|
||||||
logger.debug(f"\n\nDate coming into pydantic: {value}\n\n")
|
# logger.debug(f"\n\nDate coming into pydantic: {value}\n\n")
|
||||||
try:
|
try:
|
||||||
check = value['value'] == None
|
check = value['value'] == None
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@@ -426,7 +426,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def lookup_submitting_lab(cls, value):
|
def lookup_submitting_lab(cls, value):
|
||||||
if isinstance(value['value'], str):
|
if isinstance(value['value'], str):
|
||||||
logger.debug(f"Looking up organization {value['value']}")
|
# logger.debug(f"Looking up organization {value['value']}")
|
||||||
try:
|
try:
|
||||||
value['value'] = Organization.query(name=value['value']).name
|
value['value'] = Organization.query(name=value['value']).name
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -457,12 +457,12 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
@field_validator("rsl_plate_num")
|
@field_validator("rsl_plate_num")
|
||||||
@classmethod
|
@classmethod
|
||||||
def rsl_from_file(cls, value, values):
|
def rsl_from_file(cls, value, values):
|
||||||
logger.debug(f"RSL-plate initial value: {value['value']} and other values: {values.data}")
|
# logger.debug(f"RSL-plate initial value: {value['value']} and other values: {values.data}")
|
||||||
sub_type = values.data['submission_type']['value']
|
sub_type = values.data['submission_type']['value']
|
||||||
if check_not_nan(value['value']):
|
if check_not_nan(value['value']):
|
||||||
return value
|
return value
|
||||||
else:
|
else:
|
||||||
logger.debug("Constructing plate name.")
|
# logger.debug("Constructing plate name.")
|
||||||
output = RSLNamer(filename=values.data['filepath'].__str__(), sub_type=sub_type,
|
output = RSLNamer(filename=values.data['filepath'].__str__(), sub_type=sub_type,
|
||||||
data=values.data).parsed_name
|
data=values.data).parsed_name
|
||||||
return dict(value=output, missing=True)
|
return dict(value=output, missing=True)
|
||||||
@@ -649,32 +649,32 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
rsl_plate_num=self.rsl_plate_num['value'])
|
rsl_plate_num=self.rsl_plate_num['value'])
|
||||||
result = Result(msg=msg, code=code)
|
result = Result(msg=msg, code=code)
|
||||||
self.handle_duplicate_samples()
|
self.handle_duplicate_samples()
|
||||||
logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
|
# logger.debug(f"Here's our list of duplicate removed samples: {self.samples}")
|
||||||
# for key, value in self.__dict__.items():
|
# for key, value in self.__dict__.items():
|
||||||
for key, value in dicto.items():
|
for key, value in dicto.items():
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
value = value['value']
|
value = value['value']
|
||||||
logger.debug(f"Setting {key} to {value}")
|
# logger.debug(f"Setting {key} to {value}")
|
||||||
match key:
|
match key:
|
||||||
case "reagents":
|
case "reagents":
|
||||||
if code == 1:
|
if code == 1:
|
||||||
instance.submission_reagent_associations = []
|
instance.submission_reagent_associations = []
|
||||||
logger.debug(f"Looking through {self.reagents}")
|
# logger.debug(f"Looking through {self.reagents}")
|
||||||
for reagent in self.reagents:
|
for reagent in self.reagents:
|
||||||
reagent, assoc = reagent.toSQL(submission=instance)
|
reagent, assoc = reagent.toSQL(submission=instance)
|
||||||
logger.debug(f"Association: {assoc}")
|
# logger.debug(f"Association: {assoc}")
|
||||||
if assoc is not None:# and assoc not in instance.submission_reagent_associations:
|
if assoc is not None:# and assoc not in instance.submission_reagent_associations:
|
||||||
instance.submission_reagent_associations.append(assoc)
|
instance.submission_reagent_associations.append(assoc)
|
||||||
# instance.reagents.append(reagent)
|
# instance.reagents.append(reagent)
|
||||||
case "samples":
|
case "samples":
|
||||||
for sample in self.samples:
|
for sample in self.samples:
|
||||||
sample, associations, _ = sample.toSQL(submission=instance)
|
sample, associations, _ = sample.toSQL(submission=instance)
|
||||||
logger.debug(f"Sample SQL object to be added to submission: {sample.__dict__}")
|
# logger.debug(f"Sample SQL object to be added to submission: {sample.__dict__}")
|
||||||
for assoc in associations:
|
for assoc in associations:
|
||||||
if assoc is not None and assoc not in instance.submission_sample_associations:
|
if assoc is not None and assoc not in instance.submission_sample_associations:
|
||||||
instance.submission_sample_associations.append(assoc)
|
instance.submission_sample_associations.append(assoc)
|
||||||
case "equipment":
|
case "equipment":
|
||||||
logger.debug(f"Equipment: {pformat(self.equipment)}")
|
# logger.debug(f"Equipment: {pformat(self.equipment)}")
|
||||||
try:
|
try:
|
||||||
if equip is None:
|
if equip is None:
|
||||||
continue
|
continue
|
||||||
@@ -684,11 +684,11 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
equip, association = equip.toSQL(submission=instance)
|
equip, association = equip.toSQL(submission=instance)
|
||||||
if association is not None:
|
if association is not None:
|
||||||
association.save()
|
association.save()
|
||||||
logger.debug(
|
# logger.debug(
|
||||||
f"Equipment association SQL object to be added to submission: {association.__dict__}")
|
# f"Equipment association SQL object to be added to submission: {association.__dict__}")
|
||||||
instance.submission_equipment_associations.append(association)
|
instance.submission_equipment_associations.append(association)
|
||||||
case item if item in instance.jsons():
|
case item if item in instance.jsons():
|
||||||
logger.debug(f"{item} is a json.")
|
# logger.debug(f"{item} is a json.")
|
||||||
try:
|
try:
|
||||||
ii = value.items()
|
ii = value.items()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -701,38 +701,38 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
try:
|
try:
|
||||||
instance.set_attribute(key=key, value=value)
|
instance.set_attribute(key=key, value=value)
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
logger.debug(f"Could not set attribute: {key} to {value} due to: \n\n {e}")
|
logger.error(f"Could not set attribute: {key} to {value} due to: \n\n {e}")
|
||||||
continue
|
continue
|
||||||
except KeyError:
|
except KeyError:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Calculating costs for procedure...")
|
# logger.debug(f"Calculating costs for procedure...")
|
||||||
instance.calculate_base_cost()
|
instance.calculate_base_cost()
|
||||||
except (TypeError, AttributeError) as e:
|
except (TypeError, AttributeError) as e:
|
||||||
logger.debug(f"Looks like that kit doesn't have cost breakdown yet due to: {e}, using full plate cost.")
|
# logger.debug(f"Looks like that kit doesn't have cost breakdown yet due to: {e}, using full plate cost.")
|
||||||
try:
|
try:
|
||||||
instance.run_cost = instance.extraction_kit.cost_per_run
|
instance.run_cost = instance.extraction_kit.cost_per_run
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
instance.run_cost = 0
|
instance.run_cost = 0
|
||||||
logger.debug(f"Calculated base run cost of: {instance.run_cost}")
|
# logger.debug(f"Calculated base run cost of: {instance.run_cost}")
|
||||||
# Apply any discounts that are applicable for client and kit.
|
# Apply any discounts that are applicable for client and kit.
|
||||||
try:
|
try:
|
||||||
logger.debug("Checking and applying discounts...")
|
# logger.debug("Checking and applying discounts...")
|
||||||
discounts = [item.amount for item in
|
discounts = [item.amount for item in
|
||||||
Discount.query(kit_type=instance.extraction_kit, organization=instance.submitting_lab)]
|
Discount.query(kit_type=instance.extraction_kit, organization=instance.submitting_lab)]
|
||||||
logger.debug(f"We got discounts: {discounts}")
|
# logger.debug(f"We got discounts: {discounts}")
|
||||||
if len(discounts) > 0:
|
if len(discounts) > 0:
|
||||||
discounts = sum(discounts)
|
discounts = sum(discounts)
|
||||||
instance.run_cost = instance.run_cost - discounts
|
instance.run_cost = instance.run_cost - discounts
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"An unknown exception occurred when calculating discounts: {e}")
|
logger.error(f"An unknown exception occurred when calculating discounts: {e}")
|
||||||
# We need to make sure there's a proper rsl plate number
|
# We need to make sure there's a proper rsl plate number
|
||||||
logger.debug(f"We've got a total cost of {instance.run_cost}")
|
# logger.debug(f"We've got a total cost of {instance.run_cost}")
|
||||||
try:
|
# try:
|
||||||
logger.debug(f"Constructed instance: {instance}")
|
# logger.debug(f"Constructed instance: {instance}")
|
||||||
except AttributeError as e:
|
# except AttributeError as e:
|
||||||
logger.debug(f"Something went wrong constructing instance {self.rsl_plate_num}: {e}")
|
# logger.debug(f"Something went wrong constructing instance {self.rsl_plate_num}: {e}")
|
||||||
logger.debug(f"Constructed submissions message: {msg}")
|
# logger.debug(f"Constructed submissions message: {msg}")
|
||||||
return instance, result
|
return instance, result
|
||||||
|
|
||||||
def to_form(self, parent: QWidget):
|
def to_form(self, parent: QWidget):
|
||||||
@@ -777,26 +777,26 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
Report: Result object containing a message and any missing components.
|
Report: Result object containing a message and any missing components.
|
||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
logger.debug(f"Extraction kit: {extraction_kit}. Is it a string? {isinstance(extraction_kit, str)}")
|
# logger.debug(f"Extraction kit: {extraction_kit}. Is it a string? {isinstance(extraction_kit, str)}")
|
||||||
if isinstance(extraction_kit, str):
|
if isinstance(extraction_kit, str):
|
||||||
extraction_kit = dict(value=extraction_kit)
|
extraction_kit = dict(value=extraction_kit)
|
||||||
if extraction_kit is not None and extraction_kit != self.extraction_kit['value']:
|
if extraction_kit is not None and extraction_kit != self.extraction_kit['value']:
|
||||||
self.extraction_kit['value'] = extraction_kit['value']
|
self.extraction_kit['value'] = extraction_kit['value']
|
||||||
logger.debug(f"Looking up {self.extraction_kit['value']}")
|
# logger.debug(f"Looking up {self.extraction_kit['value']}")
|
||||||
ext_kit = KitType.query(name=self.extraction_kit['value'])
|
ext_kit = KitType.query(name=self.extraction_kit['value'])
|
||||||
ext_kit_rtypes = [item.to_pydantic() for item in
|
ext_kit_rtypes = [item.to_pydantic() for item in
|
||||||
ext_kit.get_reagents(required=True, submission_type=self.submission_type['value'])]
|
ext_kit.get_reagents(required=True, submission_type=self.submission_type['value'])]
|
||||||
logger.debug(f"Kit reagents: {ext_kit_rtypes}")
|
# logger.debug(f"Kit reagents: {ext_kit_rtypes}")
|
||||||
logger.debug(f"Submission reagents: {self.reagents}")
|
# logger.debug(f"Submission reagents: {self.reagents}")
|
||||||
# Exclude any reagenttype found in this pyd not expected in kit.
|
# Exclude any reagenttype found in this pyd not expected in kit.
|
||||||
expected_check = [item.type for item in ext_kit_rtypes]
|
expected_check = [item.type for item in ext_kit_rtypes]
|
||||||
output_reagents = [rt for rt in self.reagents if rt.type in expected_check]
|
output_reagents = [rt for rt in self.reagents if rt.type in expected_check]
|
||||||
logger.debug(f"Already have these reagent types: {output_reagents}")
|
# logger.debug(f"Already have these reagent types: {output_reagents}")
|
||||||
missing_check = [item.type for item in output_reagents]
|
missing_check = [item.type for item in output_reagents]
|
||||||
missing_reagents = [rt for rt in ext_kit_rtypes if rt.type not in missing_check]
|
missing_reagents = [rt for rt in ext_kit_rtypes if rt.type not in missing_check]
|
||||||
missing_reagents += [rt for rt in output_reagents if rt.missing]
|
missing_reagents += [rt for rt in output_reagents if rt.missing]
|
||||||
output_reagents += [rt for rt in missing_reagents if rt not in output_reagents]
|
output_reagents += [rt for rt in missing_reagents if rt not in output_reagents]
|
||||||
logger.debug(f"Missing reagents types: {missing_reagents}")
|
# logger.debug(f"Missing reagents types: {missing_reagents}")
|
||||||
# if lists are equal return no problem
|
# if lists are equal return no problem
|
||||||
if len(missing_reagents) == 0:
|
if len(missing_reagents) == 0:
|
||||||
result = None
|
result = None
|
||||||
@@ -873,7 +873,7 @@ class PydReagentType(BaseModel):
|
|||||||
instance: ReagentType = ReagentType.query(name=self.name)
|
instance: ReagentType = ReagentType.query(name=self.name)
|
||||||
if instance == None:
|
if instance == None:
|
||||||
instance = ReagentType(name=self.name, eol_ext=self.eol_ext)
|
instance = ReagentType(name=self.name, eol_ext=self.eol_ext)
|
||||||
logger.debug(f"This is the reagent type instance: {instance.__dict__}")
|
# logger.debug(f"This is the reagent type instance: {instance.__dict__}")
|
||||||
try:
|
try:
|
||||||
assoc = KitTypeReagentTypeAssociation.query(reagent_type=instance, kit_type=kit)
|
assoc = KitTypeReagentTypeAssociation.query(reagent_type=instance, kit_type=kit)
|
||||||
except StatementError:
|
except StatementError:
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ def create_charts(ctx:Settings, df:pd.DataFrame, ytitle:str|None=None) -> Figure
|
|||||||
# Set descending for any columns that have "{mode}" in the header.
|
# Set descending for any columns that have "{mode}" in the header.
|
||||||
ascending = [False if item == "target" else True for item in sorts]
|
ascending = [False if item == "target" else True for item in sorts]
|
||||||
df = df.sort_values(by=sorts, ascending=ascending)
|
df = df.sort_values(by=sorts, ascending=ascending)
|
||||||
logger.debug(df[df.isna().any(axis=1)])
|
# logger.debug(df[df.isna().any(axis=1)])
|
||||||
# actual chart construction is done by
|
# actual chart construction is done by
|
||||||
fig = construct_chart(df=df, modes=modes, ytitle=ytitle)
|
fig = construct_chart(df=df, modes=modes, ytitle=ytitle)
|
||||||
return fig
|
return fig
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ logger.info("Hello, I am a logger")
|
|||||||
class App(QMainWindow):
|
class App(QMainWindow):
|
||||||
|
|
||||||
def __init__(self, ctx: Settings = None):
|
def __init__(self, ctx: Settings = None):
|
||||||
logger.debug(f"Initializing main window...")
|
# logger.debug(f"Initializing main window...")
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.ctx = ctx
|
self.ctx = ctx
|
||||||
self.last_dir = ctx.directory_path
|
self.last_dir = ctx.directory_path
|
||||||
@@ -58,7 +58,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
adds items to menu bar
|
adds items to menu bar
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Creating menu bar...")
|
# logger.debug(f"Creating menu bar...")
|
||||||
menuBar = self.menuBar()
|
menuBar = self.menuBar()
|
||||||
fileMenu = menuBar.addMenu("&File")
|
fileMenu = menuBar.addMenu("&File")
|
||||||
# Creating menus using a title
|
# Creating menus using a title
|
||||||
@@ -79,7 +79,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
adds items to toolbar
|
adds items to toolbar
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Creating toolbar...")
|
# logger.debug(f"Creating toolbar...")
|
||||||
toolbar = QToolBar("My main toolbar")
|
toolbar = QToolBar("My main toolbar")
|
||||||
self.addToolBar(toolbar)
|
self.addToolBar(toolbar)
|
||||||
toolbar.addAction(self.addReagentAction)
|
toolbar.addAction(self.addReagentAction)
|
||||||
@@ -90,7 +90,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
creates actions
|
creates actions
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Creating actions...")
|
# logger.debug(f"Creating actions...")
|
||||||
self.importAction = QAction("&Import Submission", self)
|
self.importAction = QAction("&Import Submission", self)
|
||||||
# self.importPCRAction = QAction("&Import PCR Results", self)
|
# self.importPCRAction = QAction("&Import PCR Results", self)
|
||||||
self.addReagentAction = QAction("Add Reagent", self)
|
self.addReagentAction = QAction("Add Reagent", self)
|
||||||
@@ -107,7 +107,7 @@ class App(QMainWindow):
|
|||||||
"""
|
"""
|
||||||
connect menu and tool bar item to functions
|
connect menu and tool bar item to functions
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Connecting actions...")
|
# logger.debug(f"Connecting actions...")
|
||||||
self.importAction.triggered.connect(self.table_widget.formwidget.importSubmission)
|
self.importAction.triggered.connect(self.table_widget.formwidget.importSubmission)
|
||||||
# self.importPCRAction.triggered.connect(self.table_widget.formwidget.import_pcr_results)
|
# self.importPCRAction.triggered.connect(self.table_widget.formwidget.import_pcr_results)
|
||||||
self.addReagentAction.triggered.connect(self.table_widget.formwidget.add_reagent)
|
self.addReagentAction.triggered.connect(self.table_widget.formwidget.add_reagent)
|
||||||
@@ -134,7 +134,7 @@ class App(QMainWindow):
|
|||||||
url = Path(sys._MEIPASS).joinpath("files", "docs", "index.html")
|
url = Path(sys._MEIPASS).joinpath("files", "docs", "index.html")
|
||||||
else:
|
else:
|
||||||
url = Path("docs\\build\\index.html").absolute()
|
url = Path("docs\\build\\index.html").absolute()
|
||||||
logger.debug(f"Attempting to open {url}")
|
# logger.debug(f"Attempting to open {url}")
|
||||||
webbrowser.get('windows-default').open(f"file://{url.__str__()}")
|
webbrowser.get('windows-default').open(f"file://{url.__str__()}")
|
||||||
|
|
||||||
def result_reporter(self):
|
def result_reporter(self):
|
||||||
@@ -144,11 +144,11 @@ class App(QMainWindow):
|
|||||||
Args:
|
Args:
|
||||||
result (dict | None, optional): The result from a function. Defaults to None.
|
result (dict | None, optional): The result from a function. Defaults to None.
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Running results reporter for: {self.report.results}")
|
# logger.debug(f"Running results reporter for: {self.report.results}")
|
||||||
if len(self.report.results) > 0:
|
if len(self.report.results) > 0:
|
||||||
logger.debug(f"We've got some results!")
|
# logger.debug(f"We've got some results!")
|
||||||
for result in self.report.results:
|
for result in self.report.results:
|
||||||
logger.debug(f"Showing result: {result}")
|
# logger.debug(f"Showing result: {result}")
|
||||||
if result != None:
|
if result != None:
|
||||||
alert = result.report()
|
alert = result.report()
|
||||||
if alert.exec():
|
if alert.exec():
|
||||||
@@ -164,17 +164,17 @@ class App(QMainWindow):
|
|||||||
def backup_database(self):
|
def backup_database(self):
|
||||||
month = date.today().strftime("%Y-%m")
|
month = date.today().strftime("%Y-%m")
|
||||||
# day = date.today().strftime("%Y-%m-%d")
|
# day = date.today().strftime("%Y-%m-%d")
|
||||||
logger.debug(f"Here is the db directory: {self.ctx.database_path}")
|
# logger.debug(f"Here is the db directory: {self.ctx.database_path}")
|
||||||
logger.debug(f"Here is the backup directory: {self.ctx.backup_path}")
|
# logger.debug(f"Here is the backup directory: {self.ctx.backup_path}")
|
||||||
current_month_bak = Path(self.ctx.backup_path).joinpath(f"submissions_backup-{month}").resolve().with_suffix(".db")
|
current_month_bak = Path(self.ctx.backup_path).joinpath(f"submissions_backup-{month}").resolve().with_suffix(".db")
|
||||||
if not current_month_bak.exists() and "demo" not in self.ctx.database_path.__str__():
|
if not current_month_bak.exists() and "demo" not in self.ctx.database_path.__str__():
|
||||||
logger.debug("No backup found for this month, backing up database.")
|
logger.info("No backup found for this month, backing up database.")
|
||||||
shutil.copyfile(self.ctx.database_path, current_month_bak)
|
shutil.copyfile(self.ctx.database_path, current_month_bak)
|
||||||
|
|
||||||
class AddSubForm(QWidget):
|
class AddSubForm(QWidget):
|
||||||
|
|
||||||
def __init__(self, parent:QWidget):
|
def __init__(self, parent:QWidget):
|
||||||
logger.debug(f"Initializating subform...")
|
# logger.debug(f"Initializating subform...")
|
||||||
super(QWidget, self).__init__(parent)
|
super(QWidget, self).__init__(parent)
|
||||||
self.layout = QVBoxLayout(self)
|
self.layout = QVBoxLayout(self)
|
||||||
# Initialize tab screen
|
# Initialize tab screen
|
||||||
|
|||||||
@@ -112,13 +112,13 @@ class ControlsViewer(QWidget):
|
|||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
logger.debug(f"Control getter context: \n\tControl type: {self.con_type}\n\tMode: {self.mode}\n\tStart Date: {self.start_date}\n\tEnd Date: {self.end_date}")
|
# logger.debug(f"Control getter context: \n\tControl type: {self.con_type}\n\tMode: {self.mode}\n\tStart Date: {self.start_date}\n\tEnd Date: {self.end_date}")
|
||||||
# set the subtype for kraken
|
# set the subtype for kraken
|
||||||
if self.sub_typer.currentText() == "":
|
if self.sub_typer.currentText() == "":
|
||||||
self.subtype = None
|
self.subtype = None
|
||||||
else:
|
else:
|
||||||
self.subtype = self.sub_typer.currentText()
|
self.subtype = self.sub_typer.currentText()
|
||||||
logger.debug(f"Subtype: {self.subtype}")
|
# logger.debug(f"Subtype: {self.subtype}")
|
||||||
# query all controls using the type/start and end dates from the gui
|
# query all controls using the type/start and end dates from the gui
|
||||||
controls = Control.query(control_type=self.con_type, start_date=self.start_date, end_date=self.end_date)
|
controls = Control.query(control_type=self.con_type, start_date=self.start_date, end_date=self.end_date)
|
||||||
# if no data found from query set fig to none for reporting in webview
|
# if no data found from query set fig to none for reporting in webview
|
||||||
@@ -129,7 +129,7 @@ class ControlsViewer(QWidget):
|
|||||||
data = [control.convert_by_mode(mode=self.mode) for control in controls]
|
data = [control.convert_by_mode(mode=self.mode) for control in controls]
|
||||||
# flatten data to one dimensional list
|
# flatten data to one dimensional list
|
||||||
data = [item for sublist in data for item in sublist]
|
data = [item for sublist in data for item in sublist]
|
||||||
logger.debug(f"Control objects going into df conversion: {type(data)}")
|
# logger.debug(f"Control objects going into df conversion: {type(data)}")
|
||||||
if data == []:
|
if data == []:
|
||||||
self.report.add_result(Result(status="Critical", msg="No data found for controls in given date range."))
|
self.report.add_result(Result(status="Critical", msg="No data found for controls in given date range."))
|
||||||
return
|
return
|
||||||
@@ -141,13 +141,13 @@ class ControlsViewer(QWidget):
|
|||||||
title = f"{self.mode} - {self.subtype}"
|
title = f"{self.mode} - {self.subtype}"
|
||||||
# send dataframe to chart maker
|
# send dataframe to chart maker
|
||||||
fig = create_charts(ctx=self.app.ctx, df=df, ytitle=title)
|
fig = create_charts(ctx=self.app.ctx, df=df, ytitle=title)
|
||||||
logger.debug(f"Updating figure...")
|
# logger.debug(f"Updating figure...")
|
||||||
# construct html for webview
|
# construct html for webview
|
||||||
html = construct_html(figure=fig)
|
html = construct_html(figure=fig)
|
||||||
logger.debug(f"The length of html code is: {len(html)}")
|
# logger.debug(f"The length of html code is: {len(html)}")
|
||||||
self.webengineview.setHtml(html)
|
self.webengineview.setHtml(html)
|
||||||
self.webengineview.update()
|
self.webengineview.update()
|
||||||
logger.debug("Figure updated... I hope.")
|
# logger.debug("Figure updated... I hope.")
|
||||||
self.report.add_result(report)
|
self.report.add_result(report)
|
||||||
|
|
||||||
class ControlsDatePicker(QWidget):
|
class ControlsDatePicker(QWidget):
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ class EquipmentUsage(QDialog):
|
|||||||
self.setWindowTitle("Equipment Checklist")
|
self.setWindowTitle("Equipment Checklist")
|
||||||
self.used_equipment = self.submission.get_used_equipment()
|
self.used_equipment = self.submission.get_used_equipment()
|
||||||
self.kit = self.submission.extraction_kit
|
self.kit = self.submission.extraction_kit
|
||||||
logger.debug(f"Existing equipment: {self.used_equipment}")
|
# logger.debug(f"Existing equipment: {self.used_equipment}")
|
||||||
self.opt_equipment = submission.submission_type.get_equipment()
|
self.opt_equipment = submission.submission_type.get_equipment()
|
||||||
logger.debug(f"EquipmentRoles: {self.opt_equipment}")
|
# logger.debug(f"EquipmentRoles: {self.opt_equipment}")
|
||||||
self.layout = QVBoxLayout()
|
self.layout = QVBoxLayout()
|
||||||
self.setLayout(self.layout)
|
self.setLayout(self.layout)
|
||||||
self.populate_form()
|
self.populate_form()
|
||||||
@@ -115,9 +115,9 @@ class RoleComboBox(QWidget):
|
|||||||
Changes processes when equipment is changed
|
Changes processes when equipment is changed
|
||||||
"""
|
"""
|
||||||
equip = self.box.currentText()
|
equip = self.box.currentText()
|
||||||
logger.debug(f"Updating equipment: {equip}")
|
# logger.debug(f"Updating equipment: {equip}")
|
||||||
equip2 = [item for item in self.role.equipment if item.name==equip][0]
|
equip2 = [item for item in self.role.equipment if item.name==equip][0]
|
||||||
logger.debug(f"Using: {equip2}")
|
# logger.debug(f"Using: {equip2}")
|
||||||
self.process.clear()
|
self.process.clear()
|
||||||
self.process.addItems([item for item in equip2.processes if item in self.role.processes])
|
self.process.addItems([item for item in equip2.processes if item in self.role.processes])
|
||||||
|
|
||||||
|
|||||||
@@ -145,5 +145,5 @@ class ControlsForm(QWidget):
|
|||||||
dicto['values'].append(dict(name=label[1], value=le.text()))
|
dicto['values'].append(dict(name=label[1], value=le.text()))
|
||||||
if label[0] not in [item['name'] for item in output]:
|
if label[0] not in [item['name'] for item in output]:
|
||||||
output.append(dicto)
|
output.append(dicto)
|
||||||
logger.debug(pformat(output))
|
# logger.debug(pformat(output))
|
||||||
return output, self.comment_field.toPlainText()
|
return output, self.comment_field.toPlainText()
|
||||||
|
|||||||
@@ -93,10 +93,10 @@ class KitAdder(QWidget):
|
|||||||
# get form info
|
# get form info
|
||||||
info, reagents = self.parse_form()
|
info, reagents = self.parse_form()
|
||||||
info = {k:v for k,v in info.items() if k in [column.name for column in self.columns] + ['kit_name', 'used_for']}
|
info = {k:v for k,v in info.items() if k in [column.name for column in self.columns] + ['kit_name', 'used_for']}
|
||||||
logger.debug(f"kit info: {pformat(info)}")
|
# logger.debug(f"kit info: {pformat(info)}")
|
||||||
logger.debug(f"kit reagents: {pformat(reagents)}")
|
# logger.debug(f"kit reagents: {pformat(reagents)}")
|
||||||
info['reagent_types'] = reagents
|
info['reagent_types'] = reagents
|
||||||
logger.debug(pformat(info))
|
# logger.debug(pformat(info))
|
||||||
# send to kit constructor
|
# send to kit constructor
|
||||||
kit = PydKit(name=info['kit_name'])
|
kit = PydKit(name=info['kit_name'])
|
||||||
for reagent in info['reagent_types']:
|
for reagent in info['reagent_types']:
|
||||||
@@ -108,7 +108,7 @@ class KitAdder(QWidget):
|
|||||||
'expiry':reagent['expiry']
|
'expiry':reagent['expiry']
|
||||||
}}
|
}}
|
||||||
kit.reagent_types.append(PydReagentType(name=reagent['rtname'], eol_ext=reagent['eol'], uses=uses))
|
kit.reagent_types.append(PydReagentType(name=reagent['rtname'], eol_ext=reagent['eol'], uses=uses))
|
||||||
logger.debug(f"Output pyd object: {kit.__dict__}")
|
# logger.debug(f"Output pyd object: {kit.__dict__}")
|
||||||
sqlobj, result = kit.toSQL(self.ctx)
|
sqlobj, result = kit.toSQL(self.ctx)
|
||||||
report.add_result(result=result)
|
report.add_result(result=result)
|
||||||
sqlobj.save()
|
sqlobj.save()
|
||||||
@@ -122,7 +122,7 @@ class KitAdder(QWidget):
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[dict, list]: dict=info, list=reagents
|
Tuple[dict, list]: dict=info, list=reagents
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Hello from {self.__class__} parser!")
|
# logger.debug(f"Hello from {self.__class__} parser!")
|
||||||
info = {}
|
info = {}
|
||||||
reagents = []
|
reagents = []
|
||||||
widgets = [widget for widget in self.findChildren(QWidget) if widget.objectName() not in self.ignore and not isinstance(widget.parent(), ReagentTypeForm)]
|
widgets = [widget for widget in self.findChildren(QWidget) if widget.objectName() not in self.ignore and not isinstance(widget.parent(), ReagentTypeForm)]
|
||||||
@@ -153,7 +153,7 @@ class ReagentTypeForm(QWidget):
|
|||||||
self.reagent_getter.setObjectName("rtname")
|
self.reagent_getter.setObjectName("rtname")
|
||||||
# lookup all reagent type names from db
|
# lookup all reagent type names from db
|
||||||
lookup = ReagentType.query()
|
lookup = ReagentType.query()
|
||||||
logger.debug(f"Looked up ReagentType names: {lookup}")
|
# logger.debug(f"Looked up ReagentType names: {lookup}")
|
||||||
self.reagent_getter.addItems([item.name for item in lookup])
|
self.reagent_getter.addItems([item.name for item in lookup])
|
||||||
self.reagent_getter.setEditable(True)
|
self.reagent_getter.setEditable(True)
|
||||||
grid.addWidget(self.reagent_getter,0,1)
|
grid.addWidget(self.reagent_getter,0,1)
|
||||||
@@ -205,14 +205,14 @@ class ReagentTypeForm(QWidget):
|
|||||||
Returns:
|
Returns:
|
||||||
dict: _description_
|
dict: _description_
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Hello from {self.__class__} parser!")
|
# logger.debug(f"Hello from {self.__class__} parser!")
|
||||||
info = {}
|
info = {}
|
||||||
info['eol'] = self.eol.value()
|
info['eol'] = self.eol.value()
|
||||||
info['sheet'] = self.location_sheet_name.text()
|
info['sheet'] = self.location_sheet_name.text()
|
||||||
info['rtname'] = self.reagent_getter.currentText()
|
info['rtname'] = self.reagent_getter.currentText()
|
||||||
widgets = [widget for widget in self.findChildren(QWidget) if widget.objectName() not in self.ignore]
|
widgets = [widget for widget in self.findChildren(QWidget) if widget.objectName() not in self.ignore]
|
||||||
for widget in widgets:
|
for widget in widgets:
|
||||||
logger.debug(f"Parsed widget: {widget.objectName()} of type {type(widget)} with parent {widget.parent()}")
|
# logger.debug(f"Parsed widget: {widget.objectName()} of type {type(widget)} with parent {widget.parent()}")
|
||||||
match widget:
|
match widget:
|
||||||
case QLineEdit():
|
case QLineEdit():
|
||||||
info[widget.objectName()] = widget.text()
|
info[widget.objectName()] = widget.text()
|
||||||
@@ -225,7 +225,7 @@ class ReagentTypeForm(QWidget):
|
|||||||
key, sub_key = widget.objectName().split("_")
|
key, sub_key = widget.objectName().split("_")
|
||||||
if key not in info.keys():
|
if key not in info.keys():
|
||||||
info[key] = {}
|
info[key] = {}
|
||||||
logger.debug(f"Adding key {key}, {sub_key} and value {widget.value()} to {info}")
|
# logger.debug(f"Adding key {key}, {sub_key} and value {widget.value()} to {info}")
|
||||||
info[key][sub_key] = widget.value()
|
info[key][sub_key] = widget.value()
|
||||||
return info
|
return info
|
||||||
|
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ class AddReagentForm(QDialog):
|
|||||||
self.type_input = QComboBox()
|
self.type_input = QComboBox()
|
||||||
self.type_input.setObjectName('type')
|
self.type_input.setObjectName('type')
|
||||||
self.type_input.addItems([item.name for item in ReagentType.query()])
|
self.type_input.addItems([item.name for item in ReagentType.query()])
|
||||||
logger.debug(f"Trying to find index of {reagent_type}")
|
# logger.debug(f"Trying to find index of {reagent_type}")
|
||||||
# convert input to user friendly string?
|
# convert input to user friendly string?
|
||||||
try:
|
try:
|
||||||
reagent_type = reagent_type.replace("_", " ").title()
|
reagent_type = reagent_type.replace("_", " ").title()
|
||||||
@@ -97,7 +97,7 @@ class AddReagentForm(QDialog):
|
|||||||
"""
|
"""
|
||||||
Updates reagent names form field with examples from reagent type
|
Updates reagent names form field with examples from reagent type
|
||||||
"""
|
"""
|
||||||
logger.debug(self.type_input.currentText())
|
# logger.debug(self.type_input.currentText())
|
||||||
self.name_input.clear()
|
self.name_input.clear()
|
||||||
lookup = Reagent.query(reagent_type=self.type_input.currentText())
|
lookup = Reagent.query(reagent_type=self.type_input.currentText())
|
||||||
self.name_input.addItems(list(set([item.name for item in lookup])))
|
self.name_input.addItems(list(set([item.name for item in lookup])))
|
||||||
@@ -210,7 +210,7 @@ class LogParser(QDialog):
|
|||||||
"""
|
"""
|
||||||
count: int = 0
|
count: int = 0
|
||||||
total: int = 0
|
total: int = 0
|
||||||
logger.debug(f"Current search term: {self.phrase_looker.currentText()}")
|
# logger.debug(f"Current search term: {self.phrase_looker.currentText()}")
|
||||||
try:
|
try:
|
||||||
with open(self.fname, "r") as f:
|
with open(self.fname, "r") as f:
|
||||||
for chunk in readInChunks(fileObj=f):
|
for chunk in readInChunks(fileObj=f):
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from PyQt6.QtWidgets import (QDialog, QScrollArea, QPushButton, QVBoxLayout, QMessageBox,
|
from PyQt6.QtWidgets import (QDialog, QPushButton, QVBoxLayout, QMessageBox,
|
||||||
QDialogButtonBox, QTextEdit)
|
QDialogButtonBox, QTextEdit)
|
||||||
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
from PyQt6.QtWebEngineWidgets import QWebEngineView
|
||||||
from PyQt6.QtWebChannel import QWebChannel
|
from PyQt6.QtWebChannel import QWebChannel
|
||||||
@@ -10,7 +10,6 @@ from .functions import select_save_file
|
|||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from tempfile import TemporaryFile, TemporaryDirectory
|
from tempfile import TemporaryFile, TemporaryDirectory
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
# from xhtml2pdf import pisa
|
|
||||||
import logging, base64
|
import logging, base64
|
||||||
from getpass import getuser
|
from getpass import getuser
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@@ -33,24 +32,19 @@ class SubmissionDetails(QDialog):
|
|||||||
self.app = parent.parent().parent().parent().parent().parent().parent()
|
self.app = parent.parent().parent().parent().parent().parent().parent()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
self.app = None
|
self.app = None
|
||||||
# self.setWindowTitle(f"Submission Details - {sub.rsl_plate_num}")
|
|
||||||
# create scrollable interior
|
|
||||||
interior = QScrollArea()
|
|
||||||
interior.setParent(self)
|
|
||||||
self.webview = QWebEngineView(parent=self)
|
self.webview = QWebEngineView(parent=self)
|
||||||
self.webview.setMinimumSize(900, 500)
|
self.webview.setMinimumSize(900, 500)
|
||||||
self.webview.setMaximumSize(900, 500)
|
self.webview.setMaximumSize(900, 500)
|
||||||
# self.webview.setHtml(self.html)
|
|
||||||
self.layout = QVBoxLayout()
|
self.layout = QVBoxLayout()
|
||||||
interior.resize(900, 500)
|
|
||||||
interior.setWidget(self.webview)
|
|
||||||
self.setFixedSize(900, 500)
|
self.setFixedSize(900, 500)
|
||||||
# button to export a pdf version
|
# NOTE: button to export a pdf version
|
||||||
btn = QPushButton("Export PDF")
|
btn = QPushButton("Export PDF")
|
||||||
btn.setParent(self)
|
btn.setFixedWidth(875)
|
||||||
btn.setFixedWidth(900)
|
|
||||||
btn.clicked.connect(self.export)
|
btn.clicked.connect(self.export)
|
||||||
# setup channel
|
self.layout.addWidget(btn)
|
||||||
|
self.layout.addWidget(self.webview)
|
||||||
|
self.setLayout(self.layout)
|
||||||
|
# NOTE: setup channel
|
||||||
self.channel = QWebChannel()
|
self.channel = QWebChannel()
|
||||||
self.channel.registerObject('backend', self)
|
self.channel.registerObject('backend', self)
|
||||||
self.submission_details(submission=sub)
|
self.submission_details(submission=sub)
|
||||||
@@ -80,31 +74,25 @@ class SubmissionDetails(QDialog):
|
|||||||
Args:
|
Args:
|
||||||
submission (str | BasicSubmission): Submission of interest.
|
submission (str | BasicSubmission): Submission of interest.
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Details for: {submission}")
|
# logger.debug(f"Details for: {submission}")
|
||||||
if isinstance(submission, str):
|
if isinstance(submission, str):
|
||||||
# submission = BasicSubmission.query(rsl_number=submission)
|
|
||||||
submission = BasicSubmission.query(rsl_plate_num=submission)
|
submission = BasicSubmission.query(rsl_plate_num=submission)
|
||||||
self.base_dict = submission.to_dict(full_data=True)
|
self.base_dict = submission.to_dict(full_data=True)
|
||||||
logger.debug(f"Submission details data:\n{pformat({k:v for k,v in self.base_dict.items() if k != 'samples'})}")
|
# logger.debug(f"Submission details data:\n{pformat({k:v for k,v in self.base_dict.items() if k != 'samples'})}")
|
||||||
# don't want id
|
# NOTE: don't want id
|
||||||
del self.base_dict['id']
|
del self.base_dict['id']
|
||||||
# logger.debug(f"Creating barcode.")
|
# logger.debug(f"Creating barcode.")
|
||||||
# if not check_if_app():
|
# logger.debug(f"Making platemap...")
|
||||||
# self.base_dict['barcode'] = base64.b64encode(submission.make_plate_barcode(width=120, height=30)).decode('utf-8')
|
|
||||||
logger.debug(f"Making platemap...")
|
|
||||||
self.base_dict['platemap'] = submission.make_plate_map()
|
self.base_dict['platemap'] = submission.make_plate_map()
|
||||||
self.base_dict, self.template = submission.get_details_template(base_dict=self.base_dict)
|
self.base_dict, self.template = submission.get_details_template(base_dict=self.base_dict)
|
||||||
self.html = self.template.render(sub=self.base_dict, signing_permission=is_power_user())
|
self.html = self.template.render(sub=self.base_dict, signing_permission=is_power_user())
|
||||||
self.webview.setHtml(self.html)
|
self.webview.setHtml(self.html)
|
||||||
self.setWindowTitle(f"Submission Details - {submission.rsl_plate_num}")
|
self.setWindowTitle(f"Submission Details - {submission.rsl_plate_num}")
|
||||||
# with open("details.html", "w") as f:
|
|
||||||
# f.write(self.html)
|
|
||||||
|
|
||||||
@pyqtSlot(str)
|
@pyqtSlot(str)
|
||||||
def sign_off(self, submission:str|BasicSubmission):
|
def sign_off(self, submission:str|BasicSubmission):
|
||||||
logger.debug(f"Signing off on {submission} - ({getuser()})")
|
# logger.debug(f"Signing off on {submission} - ({getuser()})")
|
||||||
if isinstance(submission, str):
|
if isinstance(submission, str):
|
||||||
# submission = BasicSubmission.query(rsl_number=submission)
|
|
||||||
submission = BasicSubmission.query(rsl_plate_num=submission)
|
submission = BasicSubmission.query(rsl_plate_num=submission)
|
||||||
submission.signed_by = getuser()
|
submission.signed_by = getuser()
|
||||||
submission.save()
|
submission.save()
|
||||||
@@ -177,6 +165,6 @@ class SubmissionComment(QDialog):
|
|||||||
comment = self.txt_editor.toPlainText()
|
comment = self.txt_editor.toPlainText()
|
||||||
dt = datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S")
|
dt = datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S")
|
||||||
full_comment = {"name":commenter, "time": dt, "text": comment}
|
full_comment = {"name":commenter, "time": dt, "text": comment}
|
||||||
logger.debug(f"Full comment: {full_comment}")
|
# logger.debug(f"Full comment: {full_comment}")
|
||||||
return full_comment
|
return full_comment
|
||||||
|
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ class SubmissionsSheet(QTableView):
|
|||||||
self.menu = QMenu(self)
|
self.menu = QMenu(self)
|
||||||
self.con_actions = submission.custom_context_events()
|
self.con_actions = submission.custom_context_events()
|
||||||
for k in self.con_actions.keys():
|
for k in self.con_actions.keys():
|
||||||
logger.debug(f"Adding {k}")
|
# logger.debug(f"Adding {k}")
|
||||||
action = QAction(k, self)
|
action = QAction(k, self)
|
||||||
action.triggered.connect(lambda _, action_name=k: self.triggered_action(action_name=action_name))
|
action.triggered.connect(lambda _, action_name=k: self.triggered_action(action_name=action_name))
|
||||||
self.menu.addAction(action)
|
self.menu.addAction(action)
|
||||||
@@ -124,8 +124,8 @@ class SubmissionsSheet(QTableView):
|
|||||||
Args:
|
Args:
|
||||||
action_name (str): name of the action from the menu
|
action_name (str): name of the action from the menu
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Action: {action_name}")
|
# logger.debug(f"Action: {action_name}")
|
||||||
logger.debug(f"Responding with {self.con_actions[action_name]}")
|
# logger.debug(f"Responding with {self.con_actions[action_name]}")
|
||||||
func = self.con_actions[action_name]
|
func = self.con_actions[action_name]
|
||||||
func(obj=self)
|
func(obj=self)
|
||||||
|
|
||||||
@@ -162,50 +162,20 @@ class SubmissionsSheet(QTableView):
|
|||||||
experiment_name=run[4].strip(),
|
experiment_name=run[4].strip(),
|
||||||
end_time=run[5].strip()
|
end_time=run[5].strip()
|
||||||
)
|
)
|
||||||
# elution columns are item 6 in the comma split list to the end
|
# NOTE: elution columns are item 6 in the comma split list to the end
|
||||||
for ii in range(6, len(run)):
|
for ii in range(6, len(run)):
|
||||||
new_run[f"column{str(ii-5)}_vol"] = run[ii]
|
new_run[f"column{str(ii-5)}_vol"] = run[ii]
|
||||||
# Lookup imported submissions
|
# NOTE: Lookup imported submissions
|
||||||
# sub = BasicSubmission.query(rsl_number=new_run['rsl_plate_num'])
|
|
||||||
sub = BasicSubmission.query(rsl_plate_num=new_run['rsl_plate_num'])
|
sub = BasicSubmission.query(rsl_plate_num=new_run['rsl_plate_num'])
|
||||||
# If no such submission exists, move onto the next run
|
# NOTE: If no such submission exists, move onto the next run
|
||||||
if sub == None:
|
if sub == None:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Found submission: {sub.rsl_plate_num}")
|
# logger.debug(f"Found submission: {sub.rsl_plate_num}")
|
||||||
count += 1
|
count += 1
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
continue
|
continue
|
||||||
sub.set_attribute('extraction_info', new_run)
|
sub.set_attribute('extraction_info', new_run)
|
||||||
# if sub.extraction_info != None:
|
|
||||||
# # existing = json.loads(sub.extraction_info)
|
|
||||||
# existing = sub.extraction_info
|
|
||||||
# else:
|
|
||||||
# existing = None
|
|
||||||
# # Check if the new info already exists in the imported submission
|
|
||||||
# try:
|
|
||||||
# # if json.dumps(new_run) in sub.extraction_info:
|
|
||||||
# if new_run in sub.extraction_info:
|
|
||||||
# logger.debug(f"Looks like we already have that info.")
|
|
||||||
# continue
|
|
||||||
# except TypeError:
|
|
||||||
# pass
|
|
||||||
# # Update or create the extraction info
|
|
||||||
# if existing != None:
|
|
||||||
# try:
|
|
||||||
# logger.debug(f"Updating {type(existing)}: {existing} with {type(new_run)}: {new_run}")
|
|
||||||
# existing.append(new_run)
|
|
||||||
# logger.debug(f"Setting: {existing}")
|
|
||||||
# # sub.extraction_info = json.dumps(existing)
|
|
||||||
# sub.extraction_info = existing
|
|
||||||
# except TypeError:
|
|
||||||
# logger.error(f"Error updating!")
|
|
||||||
# # sub.extraction_info = json.dumps([new_run])
|
|
||||||
# sub.extraction_info = [new_run]
|
|
||||||
# logger.debug(f"Final ext info for {sub.rsl_plate_num}: {sub.extraction_info}")
|
|
||||||
# else:
|
|
||||||
# # sub.extraction_info = json.dumps([new_run])
|
|
||||||
# sub.extraction_info = [new_run]
|
|
||||||
sub.save()
|
sub.save()
|
||||||
self.report.add_result(Result(msg=f"We added {count} logs to the database.", status='Information'))
|
self.report.add_result(Result(msg=f"We added {count} logs to the database.", status='Information'))
|
||||||
|
|
||||||
@@ -230,7 +200,7 @@ class SubmissionsSheet(QTableView):
|
|||||||
"""
|
"""
|
||||||
fname = select_open_file(self, file_extension="csv")
|
fname = select_open_file(self, file_extension="csv")
|
||||||
with open(fname.__str__(), 'r') as f:
|
with open(fname.__str__(), 'r') as f:
|
||||||
# split csv rows on comma
|
# NOTE: split csv rows on comma
|
||||||
runs = [col.strip().split(",") for col in f.readlines()]
|
runs = [col.strip().split(",") for col in f.readlines()]
|
||||||
count = 0
|
count = 0
|
||||||
for run in runs:
|
for run in runs:
|
||||||
@@ -242,49 +212,17 @@ class SubmissionsSheet(QTableView):
|
|||||||
experiment_name=run[4].strip(),
|
experiment_name=run[4].strip(),
|
||||||
end_time=run[5].strip()
|
end_time=run[5].strip()
|
||||||
)
|
)
|
||||||
# lookup imported submission
|
# NOTE: lookup imported submission
|
||||||
# sub = lookup_submission_by_rsl_num(ctx=obj.ctx, rsl_num=new_run['rsl_plate_num'])
|
|
||||||
# sub = lookup_submissions(ctx=obj.ctx, rsl_number=new_run['rsl_plate_num'])
|
|
||||||
sub = BasicSubmission.query(rsl_number=new_run['rsl_plate_num'])
|
sub = BasicSubmission.query(rsl_number=new_run['rsl_plate_num'])
|
||||||
# if imported submission doesn't exist move on to next run
|
# NOTE: if imported submission doesn't exist move on to next run
|
||||||
if sub == None:
|
if sub == None:
|
||||||
continue
|
continue
|
||||||
try:
|
|
||||||
logger.debug(f"Found submission: {sub.rsl_plate_num}")
|
|
||||||
except AttributeError:
|
|
||||||
continue
|
|
||||||
sub.set_attribute('pcr_info', new_run)
|
|
||||||
# # check if pcr_info already exists
|
|
||||||
# if hasattr(sub, 'pcr_info') and sub.pcr_info != None:
|
|
||||||
# # existing = json.loads(sub.pcr_info)
|
|
||||||
# existing = sub.pcr_info
|
|
||||||
# else:
|
|
||||||
# existing = None
|
|
||||||
# # check if this entry already exists in imported submission
|
|
||||||
# try:
|
# try:
|
||||||
# # if json.dumps(new_run) in sub.pcr_info:
|
# logger.debug(f"Found submission: {sub.rsl_plate_num}")
|
||||||
# if new_run in sub.pcr_info:
|
# except AttributeError:
|
||||||
# logger.debug(f"Looks like we already have that info.")
|
# continue
|
||||||
# continue
|
sub.set_attribute('pcr_info', new_run)
|
||||||
# else:
|
# NOTE: check if pcr_info already exists
|
||||||
# count += 1
|
|
||||||
# except TypeError:
|
|
||||||
# logger.error(f"No json to dump")
|
|
||||||
# if existing is not None:
|
|
||||||
# try:
|
|
||||||
# logger.debug(f"Updating {type(existing)}: {existing} with {type(new_run)}: {new_run}")
|
|
||||||
# existing.append(new_run)
|
|
||||||
# logger.debug(f"Setting: {existing}")
|
|
||||||
# # sub.pcr_info = json.dumps(existing)
|
|
||||||
# sub.pcr_info = existing
|
|
||||||
# except TypeError:
|
|
||||||
# logger.error(f"Error updating!")
|
|
||||||
# # sub.pcr_info = json.dumps([new_run])
|
|
||||||
# sub.pcr_info = [new_run]
|
|
||||||
# logger.debug(f"Final ext info for {sub.rsl_plate_num}: {sub.pcr_info}")
|
|
||||||
# else:
|
|
||||||
# # sub.pcr_info = json.dumps([new_run])
|
|
||||||
# sub.pcr_info = [new_run]
|
|
||||||
sub.save()
|
sub.save()
|
||||||
self.report.add_result(Result(msg=f"We added {count} logs to the database.", status='Information'))
|
self.report.add_result(Result(msg=f"We added {count} logs to the database.", status='Information'))
|
||||||
|
|
||||||
@@ -308,23 +246,21 @@ class SubmissionsSheet(QTableView):
|
|||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
# ask for date ranges
|
# NOTE: ask for date ranges
|
||||||
dlg = ReportDatePicker()
|
dlg = ReportDatePicker()
|
||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
info = dlg.parse_form()
|
info = dlg.parse_form()
|
||||||
logger.debug(f"Report info: {info}")
|
# logger.debug(f"Report info: {info}")
|
||||||
# find submissions based on date range
|
# NOTE: find submissions based on date range
|
||||||
subs = BasicSubmission.query(start_date=info['start_date'], end_date=info['end_date'])
|
subs = BasicSubmission.query(start_date=info['start_date'], end_date=info['end_date'])
|
||||||
# convert each object to dict
|
# NOTE: convert each object to dict
|
||||||
records = [item.to_dict(report=True) for item in subs]
|
records = [item.to_dict(report=True) for item in subs]
|
||||||
logger.debug(f"Records: {pformat(records)}")
|
logger.debug(f"Records: {pformat(records)}")
|
||||||
# make dataframe from record dictionaries
|
# NOTE: make dataframe from record dictionaries
|
||||||
detailed_df, summary_df = make_report_xlsx(records=records)
|
detailed_df, summary_df = make_report_xlsx(records=records)
|
||||||
html = make_report_html(df=summary_df, start_date=info['start_date'], end_date=info['end_date'])
|
html = make_report_html(df=summary_df, start_date=info['start_date'], end_date=info['end_date'])
|
||||||
# get save location of report
|
# NOTE: get save location of report
|
||||||
fname = select_save_file(obj=self, default_name=f"Submissions_Report_{info['start_date']}-{info['end_date']}.pdf", extension="pdf")
|
fname = select_save_file(obj=self, default_name=f"Submissions_Report_{info['start_date']}-{info['end_date']}.pdf", extension="pdf")
|
||||||
# with open(fname, "w+b") as f:
|
|
||||||
# pisa.CreatePDF(html, dest=f)
|
|
||||||
html_to_pdf(html=html, output_file=fname)
|
html_to_pdf(html=html, output_file=fname)
|
||||||
writer = pd.ExcelWriter(fname.with_suffix(".xlsx"), engine='openpyxl')
|
writer = pd.ExcelWriter(fname.with_suffix(".xlsx"), engine='openpyxl')
|
||||||
summary_df.to_excel(writer, sheet_name="Report")
|
summary_df.to_excel(writer, sheet_name="Report")
|
||||||
@@ -337,14 +273,13 @@ class SubmissionsSheet(QTableView):
|
|||||||
len(str(series.name)) # len of column name/header
|
len(str(series.name)) # len of column name/header
|
||||||
)) + 20 # adding a little extra space
|
)) + 20 # adding a little extra space
|
||||||
try:
|
try:
|
||||||
# worksheet.column_dimensions[get_column_letter(idx=idx)].width = max_len
|
# NOTE: Convert idx to letter
|
||||||
# Convert idx to letter
|
|
||||||
col_letter = chr(ord('@') + idx)
|
col_letter = chr(ord('@') + idx)
|
||||||
worksheet.column_dimensions[col_letter].width = max_len
|
worksheet.column_dimensions[col_letter].width = max_len
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
blank_row = get_first_blank_df_row(summary_df) + 1
|
blank_row = get_first_blank_df_row(summary_df) + 1
|
||||||
logger.debug(f"Blank row index = {blank_row}")
|
# logger.debug(f"Blank row index = {blank_row}")
|
||||||
for col in range(3,6):
|
for col in range(3,6):
|
||||||
col_letter = row_map[col]
|
col_letter = row_map[col]
|
||||||
worksheet.cell(row=blank_row, column=col, value=f"=SUM({col_letter}2:{col_letter}{str(blank_row-1)})")
|
worksheet.cell(row=blank_row, column=col, value=f"=SUM({col_letter}2:{col_letter}{str(blank_row-1)})")
|
||||||
|
|||||||
@@ -26,11 +26,11 @@ class SubmissionTypeAdder(QWidget):
|
|||||||
scrollContent = QWidget(scroll)
|
scrollContent = QWidget(scroll)
|
||||||
self.grid = QGridLayout()
|
self.grid = QGridLayout()
|
||||||
scrollContent.setLayout(self.grid)
|
scrollContent.setLayout(self.grid)
|
||||||
# insert submit button at top
|
# NOTE: insert submit button at top
|
||||||
self.submit_btn = QPushButton("Submit")
|
self.submit_btn = QPushButton("Submit")
|
||||||
self.grid.addWidget(self.submit_btn,0,0,1,1)
|
self.grid.addWidget(self.submit_btn,0,0,1,1)
|
||||||
self.grid.addWidget(QLabel("Submission Type Name:"),2,0)
|
self.grid.addWidget(QLabel("Submission Type Name:"),2,0)
|
||||||
# widget to get kit name
|
# NOTE: widget to get kit name
|
||||||
self.st_name = QLineEdit()
|
self.st_name = QLineEdit()
|
||||||
self.st_name.setObjectName("submission_type_name")
|
self.st_name.setObjectName("submission_type_name")
|
||||||
self.grid.addWidget(self.st_name,2,1,1,2)
|
self.grid.addWidget(self.st_name,2,1,1,2)
|
||||||
@@ -39,7 +39,7 @@ class SubmissionTypeAdder(QWidget):
|
|||||||
self.grid.addWidget(template_selector,3,1)
|
self.grid.addWidget(template_selector,3,1)
|
||||||
self.template_label = QLabel("None")
|
self.template_label = QLabel("None")
|
||||||
self.grid.addWidget(self.template_label,3,2)
|
self.grid.addWidget(self.template_label,3,2)
|
||||||
# widget to get uses of kit
|
# NOTE: widget to get uses of kit
|
||||||
exclude = ['id', 'submitting_lab_id', 'extraction_kit_id', 'reagents_id', 'extraction_info', 'pcr_info', 'run_cost']
|
exclude = ['id', 'submitting_lab_id', 'extraction_kit_id', 'reagents_id', 'extraction_info', 'pcr_info', 'run_cost']
|
||||||
self.columns = {key:value for key, value in BasicSubmission.__dict__.items() if isinstance(value, InstrumentedAttribute)}
|
self.columns = {key:value for key, value in BasicSubmission.__dict__.items() if isinstance(value, InstrumentedAttribute)}
|
||||||
self.columns = {key:value for key, value in self.columns.items() if hasattr(value, "type") and key not in exclude}
|
self.columns = {key:value for key, value in self.columns.items() if hasattr(value, "type") and key not in exclude}
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ class SubmissionFormContainer(QWidget):
|
|||||||
import_drag = pyqtSignal(Path)
|
import_drag = pyqtSignal(Path)
|
||||||
|
|
||||||
def __init__(self, parent: QWidget) -> None:
|
def __init__(self, parent: QWidget) -> None:
|
||||||
logger.debug(f"Setting form widget...")
|
# logger.debug(f"Setting form widget...")
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.app = self.parent().parent()
|
self.app = self.parent().parent()
|
||||||
self.report = Report()
|
self.report = Report()
|
||||||
@@ -52,7 +52,7 @@ class SubmissionFormContainer(QWidget):
|
|||||||
Sets filename when file dropped
|
Sets filename when file dropped
|
||||||
"""
|
"""
|
||||||
fname = Path([u.toLocalFile() for u in event.mimeData().urls()][0])
|
fname = Path([u.toLocalFile() for u in event.mimeData().urls()][0])
|
||||||
logger.debug(f"App: {self.app}")
|
# logger.debug(f"App: {self.app}")
|
||||||
self.app.last_dir = fname.parent
|
self.app.last_dir = fname.parent
|
||||||
self.import_drag.emit(fname)
|
self.import_drag.emit(fname)
|
||||||
|
|
||||||
@@ -63,7 +63,7 @@ class SubmissionFormContainer(QWidget):
|
|||||||
self.app.raise_()
|
self.app.raise_()
|
||||||
self.app.activateWindow()
|
self.app.activateWindow()
|
||||||
self.import_submission_function(fname)
|
self.import_submission_function(fname)
|
||||||
logger.debug(f"Result from result reporter: {self.report.results}")
|
# logger.debug(f"Result from result reporter: {self.report.results}")
|
||||||
self.app.report.add_result(self.report)
|
self.app.report.add_result(self.report)
|
||||||
self.report = Report()
|
self.report = Report()
|
||||||
self.app.result_reporter()
|
self.app.result_reporter()
|
||||||
@@ -78,7 +78,7 @@ class SubmissionFormContainer(QWidget):
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[QMainWindow, dict|None]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict|None]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
logger.debug(f"\n\nStarting Import...\n\n")
|
logger.info(f"\n\nStarting Import...\n\n")
|
||||||
report = Report()
|
report = Report()
|
||||||
try:
|
try:
|
||||||
self.form.setParent(None)
|
self.form.setParent(None)
|
||||||
@@ -90,7 +90,7 @@ class SubmissionFormContainer(QWidget):
|
|||||||
# set file dialog
|
# set file dialog
|
||||||
if isinstance(fname, bool) or fname == None:
|
if isinstance(fname, bool) or fname == None:
|
||||||
fname = select_open_file(self, file_extension="xlsx")
|
fname = select_open_file(self, file_extension="xlsx")
|
||||||
logger.debug(f"Attempting to parse file: {fname}")
|
# logger.debug(f"Attempting to parse file: {fname}")
|
||||||
if not fname.exists():
|
if not fname.exists():
|
||||||
report.add_result(Result(msg=f"File {fname.__str__()} not found.", status="critical"))
|
report.add_result(Result(msg=f"File {fname.__str__()} not found.", status="critical"))
|
||||||
self.report.add_result(report)
|
self.report.add_result(report)
|
||||||
@@ -103,16 +103,16 @@ class SubmissionFormContainer(QWidget):
|
|||||||
return
|
return
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
self.prsr = SheetParser(filepath=fname)
|
self.prsr = SheetParser(filepath=fname)
|
||||||
logger.debug(f"Submission dictionary:\n{pformat(self.prsr.sub)}")
|
# logger.debug(f"Submission dictionary:\n{pformat(self.prsr.sub)}")
|
||||||
self.pyd = self.prsr.to_pydantic()
|
self.pyd = self.prsr.to_pydantic()
|
||||||
logger.debug(f"Pydantic result: \n\n{pformat(self.pyd)}\n\n")
|
# logger.debug(f"Pydantic result: \n\n{pformat(self.pyd)}\n\n")
|
||||||
self.form = self.pyd.to_form(parent=self)
|
self.form = self.pyd.to_form(parent=self)
|
||||||
self.layout().addWidget(self.form)
|
self.layout().addWidget(self.form)
|
||||||
# if self.prsr.sample_result != None:
|
# if self.prsr.sample_result != None:
|
||||||
# report.add_result(msg=self.prsr.sample_result, status="Warning")
|
# report.add_result(msg=self.prsr.sample_result, status="Warning")
|
||||||
self.report.add_result(report)
|
self.report.add_result(report)
|
||||||
logger.debug(f"Outgoing report: {self.report.results}")
|
# logger.debug(f"Outgoing report: {self.report.results}")
|
||||||
logger.debug(f"All attributes of submission container:\n{pformat(self.__dict__)}")
|
# logger.debug(f"All attributes of submission container:\n{pformat(self.__dict__)}")
|
||||||
|
|
||||||
def add_reagent(self, reagent_lot:str|None=None, reagent_type:str|None=None, expiry:date|None=None, name:str|None=None):
|
def add_reagent(self, reagent_lot:str|None=None, reagent_type:str|None=None, expiry:date|None=None, name:str|None=None):
|
||||||
"""
|
"""
|
||||||
@@ -135,7 +135,7 @@ class SubmissionFormContainer(QWidget):
|
|||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
# extract form info
|
# extract form info
|
||||||
info = dlg.parse_form()
|
info = dlg.parse_form()
|
||||||
logger.debug(f"Reagent info: {info}")
|
# logger.debug(f"Reagent info: {info}")
|
||||||
# create reagent object
|
# create reagent object
|
||||||
reagent = PydReagent(ctx=self.app.ctx, **info, missing=False)
|
reagent = PydReagent(ctx=self.app.ctx, **info, missing=False)
|
||||||
# send reagent to db
|
# send reagent to db
|
||||||
@@ -216,10 +216,10 @@ class SubmissionFormWidget(QWidget):
|
|||||||
caller = inspect.stack()[1].function.__repr__().replace("'", "")
|
caller = inspect.stack()[1].function.__repr__().replace("'", "")
|
||||||
# self.reagents = []
|
# self.reagents = []
|
||||||
# logger.debug(f"Self.reagents: {self.reagents}")
|
# logger.debug(f"Self.reagents: {self.reagents}")
|
||||||
logger.debug(f"\n\n{pformat(caller)}\n\n")
|
# logger.debug(f"\n\n{pformat(caller)}\n\n")
|
||||||
# logger.debug(f"SubmissionType: {self.submission_type}")
|
# logger.debug(f"SubmissionType: {self.submission_type}")
|
||||||
report = Report()
|
report = Report()
|
||||||
logger.debug(f"Extraction kit: {extraction_kit}")
|
# logger.debug(f"Extraction kit: {extraction_kit}")
|
||||||
# Remove previous reagent widgets
|
# Remove previous reagent widgets
|
||||||
try:
|
try:
|
||||||
old_reagents = self.find_widgets()
|
old_reagents = self.find_widgets()
|
||||||
@@ -249,7 +249,7 @@ class SubmissionFormWidget(QWidget):
|
|||||||
add_widget = self.ReagentFormWidget(parent=self, reagent=reagent, extraction_kit=self.pyd.extraction_kit)
|
add_widget = self.ReagentFormWidget(parent=self, reagent=reagent, extraction_kit=self.pyd.extraction_kit)
|
||||||
self.layout.addWidget(add_widget)
|
self.layout.addWidget(add_widget)
|
||||||
report.add_result(integrity_report)
|
report.add_result(integrity_report)
|
||||||
logger.debug(f"Outgoing report: {report.results}")
|
# logger.debug(f"Outgoing report: {report.results}")
|
||||||
if hasattr(self.pyd, "csv"):
|
if hasattr(self.pyd, "csv"):
|
||||||
export_csv_btn = QPushButton("Export CSV")
|
export_csv_btn = QPushButton("Export CSV")
|
||||||
export_csv_btn.setObjectName("export_csv_btn")
|
export_csv_btn.setObjectName("export_csv_btn")
|
||||||
@@ -263,46 +263,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
self.app.report.add_result(report)
|
self.app.report.add_result(report)
|
||||||
self.app.result_reporter()
|
self.app.result_reporter()
|
||||||
|
|
||||||
# def kit_integrity_completion_function(self, extraction_kit:str|None=None):
|
|
||||||
# """
|
|
||||||
# Compare kit contents to parsed contents and creates widgets.
|
|
||||||
#
|
|
||||||
# Args:
|
|
||||||
# obj (QMainWindow): The original app window
|
|
||||||
#
|
|
||||||
# Returns:
|
|
||||||
# Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
|
||||||
# """
|
|
||||||
# report = Report()
|
|
||||||
# missing_reagents = []
|
|
||||||
# # logger.debug(inspect.currentframe().f_back.f_code.co_name)
|
|
||||||
# # find the widget that contains kit info
|
|
||||||
# if extraction_kit is None:
|
|
||||||
# kit_widget = self.find_widgets(object_name="extraction_kit")[0].input
|
|
||||||
# logger.debug(f"Kit selector: {kit_widget}")
|
|
||||||
# # get current kit being used
|
|
||||||
# self.ext_kit = kit_widget.currentText()
|
|
||||||
# else:
|
|
||||||
# self.ext_kit = extraction_kit
|
|
||||||
# for reagent in self.reagents:
|
|
||||||
# logger.debug(f"Creating widget for {reagent}")
|
|
||||||
# add_widget = self.ReagentFormWidget(parent=self, reagent=reagent, extraction_kit=self.ext_kit)
|
|
||||||
# # self.form.layout().addWidget(add_widget)
|
|
||||||
# self.layout.addWidget(add_widget)
|
|
||||||
# if reagent.missing:
|
|
||||||
# missing_reagents.append(reagent)
|
|
||||||
# logger.debug(f"Checking integrity of {self.ext_kit}")
|
|
||||||
# # TODO: put check_kit_integrity here instead of what's here?
|
|
||||||
# # see if there are any missing reagents
|
|
||||||
# if len(missing_reagents) > 0:
|
|
||||||
# result = Result(msg=f"""The submission you are importing is missing some reagents expected by the kit.\n\n
|
|
||||||
# It looks like you are missing: {[item.type.upper() for item in missing_reagents]}\n\n
|
|
||||||
# Alternatively, you may have set the wrong extraction kit.\n\nThe program will populate lists using existing reagents.
|
|
||||||
# \n\nPlease make sure you check the lots carefully!""".replace(" ", ""), status="Warning")
|
|
||||||
# report.add_result(result)
|
|
||||||
# self.report.add_result(report)
|
|
||||||
# logger.debug(f"Outgoing report: {self.report.results}")
|
|
||||||
|
|
||||||
def clear_form(self):
|
def clear_form(self):
|
||||||
"""
|
"""
|
||||||
Removes all form widgets
|
Removes all form widgets
|
||||||
@@ -335,24 +295,23 @@ class SubmissionFormWidget(QWidget):
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
Tuple[QMainWindow, dict]: Collection of new main app window and result dict
|
||||||
"""
|
"""
|
||||||
logger.debug(f"\n\nBeginning Submission\n\n")
|
logger.info(f"\n\nBeginning Submission\n\n")
|
||||||
report = Report()
|
report = Report()
|
||||||
# self.pyd: PydSubmission = self.parse_form()
|
|
||||||
result = self.parse_form()
|
result = self.parse_form()
|
||||||
report.add_result(result)
|
report.add_result(result)
|
||||||
logger.debug(f"Submission: {pformat(self.pyd)}")
|
# logger.debug(f"Submission: {pformat(self.pyd)}")
|
||||||
logger.debug("Checking kit integrity...")
|
# logger.debug("Checking kit integrity...")
|
||||||
_, result = self.pyd.check_kit_integrity()
|
_, result = self.pyd.check_kit_integrity()
|
||||||
report.add_result(result)
|
report.add_result(result)
|
||||||
if len(result.results) > 0:
|
if len(result.results) > 0:
|
||||||
self.app.report.add_result(report)
|
self.app.report.add_result(report)
|
||||||
self.app.result_reporter()
|
self.app.result_reporter()
|
||||||
return
|
return
|
||||||
logger.debug(f"PYD before transformation into SQL:\n\n{self.pyd}\n\n")
|
# logger.debug(f"PYD before transformation into SQL:\n\n{self.pyd}\n\n")
|
||||||
base_submission, result = self.pyd.to_sql()
|
base_submission, result = self.pyd.to_sql()
|
||||||
logger.debug(f"SQL object: {pformat(base_submission.__dict__)}")
|
# logger.debug(f"SQL object: {pformat(base_submission.__dict__)}")
|
||||||
# logger.debug(f"Base submission: {base_submission.to_dict()}")
|
# logger.debug(f"Base submission: {base_submission.to_dict()}")
|
||||||
# check output message for issues
|
# NOTE: check output message for issues
|
||||||
match result.code:
|
match result.code:
|
||||||
# code 0: everything is fine.
|
# code 0: everything is fine.
|
||||||
case 0:
|
case 0:
|
||||||
@@ -379,11 +338,10 @@ class SubmissionFormWidget(QWidget):
|
|||||||
# logger.debug(f"Updating: {reagent} with {reagent.lot}")
|
# logger.debug(f"Updating: {reagent} with {reagent.lot}")
|
||||||
reagent.update_last_used(kit=base_submission.extraction_kit)
|
reagent.update_last_used(kit=base_submission.extraction_kit)
|
||||||
# logger.debug(f"Final reagents: {pformat(base_submission.reagents)}")
|
# logger.debug(f"Final reagents: {pformat(base_submission.reagents)}")
|
||||||
# sys.exit("Programmed stop submission_widget.py, line 381")
|
|
||||||
base_submission.save()
|
base_submission.save()
|
||||||
# update summary sheet
|
# NOTE: update summary sheet
|
||||||
self.app.table_widget.sub_wid.setData()
|
self.app.table_widget.sub_wid.setData()
|
||||||
# reset form
|
# NOTE: reset form
|
||||||
self.setParent(None)
|
self.setParent(None)
|
||||||
# logger.debug(f"All attributes of obj: {pformat(self.__dict__)}")
|
# logger.debug(f"All attributes of obj: {pformat(self.__dict__)}")
|
||||||
self.app.report.add_result(report)
|
self.app.report.add_result(report)
|
||||||
@@ -396,16 +354,14 @@ class SubmissionFormWidget(QWidget):
|
|||||||
Args:
|
Args:
|
||||||
fname (Path | None, optional): Input filename. Defaults to None.
|
fname (Path | None, optional): Input filename. Defaults to None.
|
||||||
"""
|
"""
|
||||||
# self.parse_form()
|
|
||||||
if isinstance(fname, bool) or fname == None:
|
if isinstance(fname, bool) or fname == None:
|
||||||
fname = select_save_file(obj=self, default_name=self.pyd.construct_filename(), extension="csv")
|
fname = select_save_file(obj=self, default_name=self.pyd.construct_filename(), extension="csv")
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# logger.debug(f'')
|
|
||||||
# self.pyd.csv.to_csv(fname.__str__(), index=False)
|
# self.pyd.csv.to_csv(fname.__str__(), index=False)
|
||||||
workbook_2_csv(worksheet=self.pyd.csv, filename=fname)
|
workbook_2_csv(worksheet=self.pyd.csv, filename=fname)
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
logger.debug(f"Could not get permissions to {fname}. Possibly the request was cancelled.")
|
logger.warning(f"Could not get permissions to {fname}. Possibly the request was cancelled.")
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
logger.error(f"No csv file found in the submission at this point.")
|
logger.error(f"No csv file found in the submission at this point.")
|
||||||
|
|
||||||
@@ -417,7 +373,7 @@ class SubmissionFormWidget(QWidget):
|
|||||||
Report: Report on status of parse.
|
Report: Report on status of parse.
|
||||||
"""
|
"""
|
||||||
report = Report()
|
report = Report()
|
||||||
logger.debug(f"Hello from form parser!")
|
logger.info(f"Hello from form parser!")
|
||||||
info = {}
|
info = {}
|
||||||
reagents = []
|
reagents = []
|
||||||
for widget in self.findChildren(QWidget):
|
for widget in self.findChildren(QWidget):
|
||||||
@@ -431,21 +387,19 @@ class SubmissionFormWidget(QWidget):
|
|||||||
field, value = widget.parse_form()
|
field, value = widget.parse_form()
|
||||||
if field is not None:
|
if field is not None:
|
||||||
info[field] = value
|
info[field] = value
|
||||||
logger.debug(f"Info: {pformat(info)}")
|
# logger.debug(f"Info: {pformat(info)}")
|
||||||
logger.debug(f"Reagents going into pyd: {pformat(reagents)}")
|
# logger.debug(f"Reagents going into pyd: {pformat(reagents)}")
|
||||||
self.pyd.reagents = reagents
|
self.pyd.reagents = reagents
|
||||||
|
|
||||||
# logger.debug(f"Attrs not in info: {[k for k, v in self.__dict__.items() if k not in info.keys()]}")
|
# logger.debug(f"Attrs not in info: {[k for k, v in self.__dict__.items() if k not in info.keys()]}")
|
||||||
for item in self.recover:
|
for item in self.recover:
|
||||||
logger.debug(f"Attempting to recover: {item}")
|
# logger.debug(f"Attempting to recover: {item}")
|
||||||
if hasattr(self, item):
|
if hasattr(self, item):
|
||||||
value = getattr(self, item)
|
value = getattr(self, item)
|
||||||
logger.debug(f"Setting {item}")
|
# logger.debug(f"Setting {item}")
|
||||||
info[item] = value
|
info[item] = value
|
||||||
# submission = PydSubmission(reagents=reagents, **info)
|
|
||||||
for k,v in info.items():
|
for k,v in info.items():
|
||||||
self.pyd.set_attribute(key=k, value=v)
|
self.pyd.set_attribute(key=k, value=v)
|
||||||
# return submission
|
# NOTE: return submission
|
||||||
report.add_result(report)
|
report.add_result(report)
|
||||||
return report
|
return report
|
||||||
|
|
||||||
@@ -510,7 +464,7 @@ class SubmissionFormWidget(QWidget):
|
|||||||
except (TypeError, KeyError):
|
except (TypeError, KeyError):
|
||||||
pass
|
pass
|
||||||
obj = parent.parent().parent()
|
obj = parent.parent().parent()
|
||||||
logger.debug(f"Creating widget for: {key}")
|
# logger.debug(f"Creating widget for: {key}")
|
||||||
match key:
|
match key:
|
||||||
case 'submitting_lab':
|
case 'submitting_lab':
|
||||||
add_widget = QComboBox()
|
add_widget = QComboBox()
|
||||||
@@ -531,12 +485,12 @@ class SubmissionFormWidget(QWidget):
|
|||||||
# create combobox to hold looked up kits
|
# create combobox to hold looked up kits
|
||||||
add_widget = QComboBox()
|
add_widget = QComboBox()
|
||||||
# lookup existing kits by 'submission_type' decided on by sheetparser
|
# lookup existing kits by 'submission_type' decided on by sheetparser
|
||||||
logger.debug(f"Looking up kits used for {submission_type}")
|
# logger.debug(f"Looking up kits used for {submission_type}")
|
||||||
uses = [item.name for item in KitType.query(used_for=submission_type)]
|
uses = [item.name for item in KitType.query(used_for=submission_type)]
|
||||||
obj.uses = uses
|
obj.uses = uses
|
||||||
logger.debug(f"Kits received for {submission_type}: {uses}")
|
# logger.debug(f"Kits received for {submission_type}: {uses}")
|
||||||
if check_not_nan(value):
|
if check_not_nan(value):
|
||||||
logger.debug(f"The extraction kit in parser was: {value}")
|
# logger.debug(f"The extraction kit in parser was: {value}")
|
||||||
uses.insert(0, uses.pop(uses.index(value)))
|
uses.insert(0, uses.pop(uses.index(value)))
|
||||||
obj.ext_kit = value
|
obj.ext_kit = value
|
||||||
else:
|
else:
|
||||||
@@ -565,7 +519,7 @@ class SubmissionFormWidget(QWidget):
|
|||||||
case _:
|
case _:
|
||||||
# anything else gets added in as a line edit
|
# anything else gets added in as a line edit
|
||||||
add_widget = QLineEdit()
|
add_widget = QLineEdit()
|
||||||
logger.debug(f"Setting widget text to {str(value).replace('_', ' ')}")
|
# logger.debug(f"Setting widget text to {str(value).replace('_', ' ')}")
|
||||||
add_widget.setText(str(value).replace("_", " "))
|
add_widget.setText(str(value).replace("_", " "))
|
||||||
if add_widget != None:
|
if add_widget != None:
|
||||||
add_widget.setObjectName(key)
|
add_widget.setObjectName(key)
|
||||||
@@ -639,14 +593,6 @@ class SubmissionFormWidget(QWidget):
|
|||||||
# If changed set self.missing to True and update self.label
|
# If changed set self.missing to True and update self.label
|
||||||
self.lot.currentTextChanged.connect(self.updated)
|
self.lot.currentTextChanged.connect(self.updated)
|
||||||
|
|
||||||
# def check_uncheck(self):
|
|
||||||
# if self.check_box.isChecked():
|
|
||||||
# self.lot.setCurrentIndex(0)
|
|
||||||
# self.lot.setEnabled(True)
|
|
||||||
# else:
|
|
||||||
# self.lot.setCurrentText("Not Applicable")
|
|
||||||
# self.lot.setEnabled(False)
|
|
||||||
|
|
||||||
def parse_form(self) -> Tuple[PydReagent, dict]:
|
def parse_form(self) -> Tuple[PydReagent, dict]:
|
||||||
"""
|
"""
|
||||||
Pulls form info into PydReagent
|
Pulls form info into PydReagent
|
||||||
@@ -657,17 +603,17 @@ class SubmissionFormWidget(QWidget):
|
|||||||
# if not self.check_box.isChecked():
|
# if not self.check_box.isChecked():
|
||||||
# return None, None
|
# return None, None
|
||||||
lot = self.lot.currentText()
|
lot = self.lot.currentText()
|
||||||
logger.debug(f"Using this lot for the reagent {self.reagent}: {lot}")
|
# logger.debug(f"Using this lot for the reagent {self.reagent}: {lot}")
|
||||||
wanted_reagent = Reagent.query(lot_number=lot, reagent_type=self.reagent.type)
|
wanted_reagent = Reagent.query(lot_number=lot, reagent_type=self.reagent.type)
|
||||||
# if reagent doesn't exist in database, offer to add it (uses App.add_reagent)
|
# NOTE: if reagent doesn't exist in database, offer to add it (uses App.add_reagent)
|
||||||
if wanted_reagent == None:
|
if wanted_reagent == None:
|
||||||
dlg = QuestionAsker(title=f"Add {lot}?", message=f"Couldn't find reagent type {self.reagent.type}: {lot} in the database.\n\nWould you like to add it?")
|
dlg = QuestionAsker(title=f"Add {lot}?", message=f"Couldn't find reagent type {self.reagent.type}: {lot} in the database.\n\nWould you like to add it?")
|
||||||
if dlg.exec():
|
if dlg.exec():
|
||||||
wanted_reagent = self.parent().parent().add_reagent(reagent_lot=lot, reagent_type=self.reagent.type, expiry=self.reagent.expiry, name=self.reagent.name)
|
wanted_reagent = self.parent().parent().add_reagent(reagent_lot=lot, reagent_type=self.reagent.type, expiry=self.reagent.expiry, name=self.reagent.name)
|
||||||
return wanted_reagent, None
|
return wanted_reagent, None
|
||||||
else:
|
else:
|
||||||
# In this case we will have an empty reagent and the submission will fail kit integrity check
|
# NOTE: In this case we will have an empty reagent and the submission will fail kit integrity check
|
||||||
logger.debug("Will not add reagent.")
|
# logger.debug("Will not add reagent.")
|
||||||
return None, Result(msg="Failed integrity check", status="Critical")
|
return None, Result(msg="Failed integrity check", status="Critical")
|
||||||
else:
|
else:
|
||||||
# Since this now gets passed in directly from the parser -> pyd -> form and the parser gets the name
|
# Since this now gets passed in directly from the parser -> pyd -> form and the parser gets the name
|
||||||
@@ -712,8 +658,8 @@ class SubmissionFormWidget(QWidget):
|
|||||||
def __init__(self, reagent, extraction_kit:str) -> None:
|
def __init__(self, reagent, extraction_kit:str) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.setEditable(True)
|
self.setEditable(True)
|
||||||
logger.debug(f"Attempting lookup of reagents by type: {reagent.type}")
|
# logger.debug(f"Attempting lookup of reagents by type: {reagent.type}")
|
||||||
# below was lookup_reagent_by_type_name_and_kit_name, but I couldn't get it to work.
|
# NOTE: below was lookup_reagent_by_type_name_and_kit_name, but I couldn't get it to work.
|
||||||
lookup = Reagent.query(reagent_type=reagent.type)
|
lookup = Reagent.query(reagent_type=reagent.type)
|
||||||
relevant_reagents = [str(item.lot) for item in lookup]
|
relevant_reagents = [str(item.lot) for item in lookup]
|
||||||
output_reg = []
|
output_reg = []
|
||||||
@@ -725,8 +671,8 @@ class SubmissionFormWidget(QWidget):
|
|||||||
elif isinstance(rel_reagent, str):
|
elif isinstance(rel_reagent, str):
|
||||||
output_reg.append(rel_reagent)
|
output_reg.append(rel_reagent)
|
||||||
relevant_reagents = output_reg
|
relevant_reagents = output_reg
|
||||||
# if reagent in sheet is not found insert it into the front of relevant reagents so it shows
|
# NOTE: if reagent in sheet is not found insert it into the front of relevant reagents so it shows
|
||||||
logger.debug(f"Relevant reagents for {reagent.lot}: {relevant_reagents}")
|
# logger.debug(f"Relevant reagents for {reagent.lot}: {relevant_reagents}")
|
||||||
if str(reagent.lot) not in relevant_reagents:
|
if str(reagent.lot) not in relevant_reagents:
|
||||||
if check_not_nan(reagent.lot):
|
if check_not_nan(reagent.lot):
|
||||||
relevant_reagents.insert(0, str(reagent.lot))
|
relevant_reagents.insert(0, str(reagent.lot))
|
||||||
|
|||||||
@@ -27,12 +27,12 @@
|
|||||||
left: 50%;
|
left: 50%;
|
||||||
margin-left: -60px;
|
margin-left: -60px;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Show the tooltip text when you mouse over the tooltip container */
|
/* Show the tooltip text when you mouse over the tooltip container */
|
||||||
.tooltip:hover .tooltiptext {
|
.tooltip:hover .tooltiptext {
|
||||||
visibility: visible;
|
visibility: visible;
|
||||||
font-size: large;
|
font-size: large;
|
||||||
}
|
}
|
||||||
|
|
||||||
</style>
|
</style>
|
||||||
<title>Submission Details for {{ sub['Plate Number'] }}</title>
|
<title>Submission Details for {{ sub['Plate Number'] }}</title>
|
||||||
<script src="qrc:///qtwebchannel/qwebchannel.js"></script>
|
<script src="qrc:///qtwebchannel/qwebchannel.js"></script>
|
||||||
|
|||||||
@@ -40,8 +40,9 @@ main_aux_dir = Path.home().joinpath(f"{os_config_dir}/submissions")
|
|||||||
CONFIGDIR = main_aux_dir.joinpath("config")
|
CONFIGDIR = main_aux_dir.joinpath("config")
|
||||||
LOGDIR = main_aux_dir.joinpath("logs")
|
LOGDIR = main_aux_dir.joinpath("logs")
|
||||||
|
|
||||||
row_map = {1:"A", 2:"B", 3:"C", 4:"D", 5:"E", 6:"F", 7:"G", 8:"H"}
|
row_map = {1: "A", 2: "B", 3: "C", 4: "D", 5: "E", 6: "F", 7: "G", 8: "H"}
|
||||||
row_keys = {v:k for k,v in row_map.items()}
|
row_keys = {v: k for k, v in row_map.items()}
|
||||||
|
|
||||||
|
|
||||||
def check_not_nan(cell_contents) -> bool:
|
def check_not_nan(cell_contents) -> bool:
|
||||||
"""
|
"""
|
||||||
@@ -52,7 +53,7 @@ def check_not_nan(cell_contents) -> bool:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if cell has value, else, false.
|
bool: True if cell has value, else, false.
|
||||||
"""
|
"""
|
||||||
# check for nan as a string first
|
# check for nan as a string first
|
||||||
exclude = ['unnamed:', 'blank', 'void']
|
exclude = ['unnamed:', 'blank', 'void']
|
||||||
try:
|
try:
|
||||||
@@ -88,7 +89,8 @@ def check_not_nan(cell_contents) -> bool:
|
|||||||
logger.debug(f"Check encountered unknown error: {type(e).__name__} - {e}")
|
logger.debug(f"Check encountered unknown error: {type(e).__name__} - {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def convert_nans_to_nones(input_str) -> str|None:
|
|
||||||
|
def convert_nans_to_nones(input_str) -> str | None:
|
||||||
"""
|
"""
|
||||||
Get rid of various "nan", "NAN", "NaN", etc/
|
Get rid of various "nan", "NAN", "NaN", etc/
|
||||||
|
|
||||||
@@ -97,19 +99,21 @@ def convert_nans_to_nones(input_str) -> str|None:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: _description_
|
str: _description_
|
||||||
"""
|
"""
|
||||||
# logger.debug(f"Input value of: {input_str}")
|
# logger.debug(f"Input value of: {input_str}")
|
||||||
if check_not_nan(input_str):
|
if check_not_nan(input_str):
|
||||||
return input_str
|
return input_str
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def is_missing(value:Any) -> Tuple[Any, bool]:
|
|
||||||
|
def is_missing(value: Any) -> Tuple[Any, bool]:
|
||||||
if check_not_nan(value):
|
if check_not_nan(value):
|
||||||
return value, False
|
return value, False
|
||||||
else:
|
else:
|
||||||
return convert_nans_to_nones(value), True
|
return convert_nans_to_nones(value), True
|
||||||
|
|
||||||
def check_regex_match(pattern:str, check:str) -> bool:
|
|
||||||
|
def check_regex_match(pattern: str, check: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Determines if a pattern matches a str
|
Determines if a pattern matches a str
|
||||||
|
|
||||||
@@ -119,13 +123,14 @@ def check_regex_match(pattern:str, check:str) -> bool:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: match found?
|
bool: match found?
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
return bool(re.match(fr"{pattern}", check))
|
return bool(re.match(fr"{pattern}", check))
|
||||||
except TypeError:
|
except TypeError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_first_blank_df_row(df:pd.DataFrame) -> int:
|
|
||||||
|
def get_first_blank_df_row(df: pd.DataFrame) -> int:
|
||||||
"""
|
"""
|
||||||
For some reason I need a whole function for this.
|
For some reason I need a whole function for this.
|
||||||
|
|
||||||
@@ -134,9 +139,10 @@ def get_first_blank_df_row(df:pd.DataFrame) -> int:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
int: Index of the row after the last used row.
|
int: Index of the row after the last used row.
|
||||||
"""
|
"""
|
||||||
return df.shape[0] + 1
|
return df.shape[0] + 1
|
||||||
|
|
||||||
|
|
||||||
# Settings
|
# Settings
|
||||||
|
|
||||||
class Settings(BaseSettings, extra="allow"):
|
class Settings(BaseSettings, extra="allow"):
|
||||||
@@ -146,16 +152,16 @@ class Settings(BaseSettings, extra="allow"):
|
|||||||
Raises:
|
Raises:
|
||||||
FileNotFoundError: Error if database not found.
|
FileNotFoundError: Error if database not found.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
directory_path: Path
|
directory_path: Path
|
||||||
database_path: Path|str|None = None
|
database_path: Path | str | None = None
|
||||||
backup_path: Path|str|None = None
|
backup_path: Path | str | None = None
|
||||||
# super_users: list|None = None
|
# super_users: list|None = None
|
||||||
# power_users: list|None = None
|
# power_users: list|None = None
|
||||||
# rerun_regex: str
|
# rerun_regex: str
|
||||||
submission_types: dict|None = None
|
submission_types: dict | None = None
|
||||||
database_session: Session|None = None
|
database_session: Session | None = None
|
||||||
package: Any|None = None
|
package: Any | None = None
|
||||||
|
|
||||||
model_config = SettingsConfigDict(env_file_encoding='utf-8')
|
model_config = SettingsConfigDict(env_file_encoding='utf-8')
|
||||||
|
|
||||||
@@ -178,10 +184,10 @@ class Settings(BaseSettings, extra="allow"):
|
|||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = Path(value)
|
value = Path(value)
|
||||||
if not value.exists():
|
if not value.exists():
|
||||||
value = Path().home()
|
value = Path().home()
|
||||||
# metadata.directory_path = value
|
# metadata.directory_path = value
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@field_validator('database_path', mode="before")
|
@field_validator('database_path', mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def ensure_database_exists(cls, value, values):
|
def ensure_database_exists(cls, value, values):
|
||||||
@@ -196,7 +202,7 @@ class Settings(BaseSettings, extra="allow"):
|
|||||||
return value
|
return value
|
||||||
else:
|
else:
|
||||||
raise FileNotFoundError(f"Couldn't find database at {value}")
|
raise FileNotFoundError(f"Couldn't find database at {value}")
|
||||||
|
|
||||||
@field_validator('database_session', mode="before")
|
@field_validator('database_session', mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_database_session(cls, value, values):
|
def create_database_session(cls, value, values):
|
||||||
@@ -223,7 +229,7 @@ class Settings(BaseSettings, extra="allow"):
|
|||||||
else:
|
else:
|
||||||
raise FileNotFoundError("No database file found. Exiting program.")
|
raise FileNotFoundError("No database file found. Exiting program.")
|
||||||
logger.debug(f"Using {database_path} for database file.")
|
logger.debug(f"Using {database_path} for database file.")
|
||||||
engine = create_engine(f"sqlite:///{database_path}")#, echo=True, future=True)
|
engine = create_engine(f"sqlite:///{database_path}") #, echo=True, future=True)
|
||||||
session = Session(engine)
|
session = Session(engine)
|
||||||
# metadata.session = session
|
# metadata.session = session
|
||||||
return session
|
return session
|
||||||
@@ -240,19 +246,20 @@ class Settings(BaseSettings, extra="allow"):
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.set_from_db(db_path=kwargs['database_path'])
|
self.set_from_db(db_path=kwargs['database_path'])
|
||||||
|
|
||||||
def set_from_db(self, db_path:Path):
|
def set_from_db(self, db_path: Path):
|
||||||
if 'pytest' in sys.modules:
|
if 'pytest' in sys.modules:
|
||||||
config_items = dict(power_users=['lwark', 'styson', 'ruwang'])
|
config_items = dict(power_users=['lwark', 'styson', 'ruwang'])
|
||||||
else:
|
else:
|
||||||
session = Session(create_engine(f"sqlite:///{db_path}"))
|
session = Session(create_engine(f"sqlite:///{db_path}"))
|
||||||
config_items = session.execute(text("SELECT * FROM _configitem")).all()
|
config_items = session.execute(text("SELECT * FROM _configitem")).all()
|
||||||
session.close()
|
session.close()
|
||||||
config_items = {item[1]:json.loads(item[2]) for item in config_items}
|
config_items = {item[1]: json.loads(item[2]) for item in config_items}
|
||||||
for k, v in config_items.items():
|
for k, v in config_items.items():
|
||||||
if not hasattr(self, k):
|
if not hasattr(self, k):
|
||||||
self.__setattr__(k, v)
|
self.__setattr__(k, v)
|
||||||
|
|
||||||
def get_config(settings_path: Path|str|None=None) -> Settings:
|
|
||||||
|
def get_config(settings_path: Path | str | None = None) -> Settings:
|
||||||
"""
|
"""
|
||||||
Get configuration settings from path or default if blank.
|
Get configuration settings from path or default if blank.
|
||||||
|
|
||||||
@@ -262,36 +269,38 @@ def get_config(settings_path: Path|str|None=None) -> Settings:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Settings: Pydantic settings object
|
Settings: Pydantic settings object
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Creating settings...")
|
# logger.debug(f"Creating settings...")
|
||||||
if isinstance(settings_path, str):
|
if isinstance(settings_path, str):
|
||||||
settings_path = Path(settings_path)
|
settings_path = Path(settings_path)
|
||||||
|
|
||||||
# custom pyyaml constructor to join fields
|
# custom pyyaml constructor to join fields
|
||||||
def join(loader, node):
|
def join(loader, node):
|
||||||
seq = loader.construct_sequence(node)
|
seq = loader.construct_sequence(node)
|
||||||
return ''.join([str(i) for i in seq])
|
return ''.join([str(i) for i in seq])
|
||||||
|
|
||||||
# register the tag handler
|
# register the tag handler
|
||||||
yaml.add_constructor('!join', join)
|
yaml.add_constructor('!join', join)
|
||||||
logger.debug(f"Making directory: {CONFIGDIR.__str__()}")
|
|
||||||
# make directories
|
# make directories
|
||||||
try:
|
try:
|
||||||
CONFIGDIR.mkdir(parents=True)
|
CONFIGDIR.mkdir(parents=True)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
logger.warning(f"Config directory {CONFIGDIR} already exists.")
|
||||||
logger.debug(f"Making directory: {LOGDIR.__str__()}")
|
|
||||||
try:
|
try:
|
||||||
LOGDIR.mkdir(parents=True)
|
LOGDIR.mkdir(parents=True)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
logger.warning(f"Logging directory {LOGDIR} already exists.")
|
||||||
# if user hasn't defined config path in cli args
|
# NOTE: if user hasn't defined config path in cli args
|
||||||
if settings_path == None:
|
if settings_path == None:
|
||||||
# Check user .config/submissions directory
|
# NOTE: Check user .config/submissions directory
|
||||||
if CONFIGDIR.joinpath("config.yml").exists():
|
if CONFIGDIR.joinpath("config.yml").exists():
|
||||||
settings_path = CONFIGDIR.joinpath("config.yml")
|
settings_path = CONFIGDIR.joinpath("config.yml")
|
||||||
# Check user .submissions directory
|
# NOTE: Check user .submissions directory
|
||||||
elif Path.home().joinpath(".submissions", "config.yml").exists():
|
elif Path.home().joinpath(".submissions", "config.yml").exists():
|
||||||
settings_path = Path.home().joinpath(".submissions", "config.yml")
|
settings_path = Path.home().joinpath(".submissions", "config.yml")
|
||||||
# finally look in the local config
|
# NOTE: finally look in the local config
|
||||||
else:
|
else:
|
||||||
if check_if_app():
|
if check_if_app():
|
||||||
settings_path = Path(sys._MEIPASS).joinpath("files", "config.yml")
|
settings_path = Path(sys._MEIPASS).joinpath("files", "config.yml")
|
||||||
@@ -299,14 +308,14 @@ def get_config(settings_path: Path|str|None=None) -> Settings:
|
|||||||
settings_path = package_dir.joinpath('config.yml')
|
settings_path = package_dir.joinpath('config.yml')
|
||||||
with open(settings_path, "r") as dset:
|
with open(settings_path, "r") as dset:
|
||||||
default_settings = yaml.load(dset, Loader=yaml.Loader)
|
default_settings = yaml.load(dset, Loader=yaml.Loader)
|
||||||
# Tell program we need to copy the config.yml to the user directory
|
# NOTE: Tell program we need to copy the config.yml to the user directory
|
||||||
# copy settings to config directory
|
# NOTE: copy settings to config directory
|
||||||
return Settings(**copy_settings(settings_path=CONFIGDIR.joinpath("config.yml"), settings=default_settings))
|
return Settings(**copy_settings(settings_path=CONFIGDIR.joinpath("config.yml"), settings=default_settings))
|
||||||
else:
|
else:
|
||||||
# check if user defined path is directory
|
# NOTE: check if user defined path is directory
|
||||||
if settings_path.is_dir():
|
if settings_path.is_dir():
|
||||||
settings_path = settings_path.joinpath("config.yml")
|
settings_path = settings_path.joinpath("config.yml")
|
||||||
# check if user defined path is file
|
# NOTE: check if user defined path is file
|
||||||
elif settings_path.is_file():
|
elif settings_path.is_file():
|
||||||
settings_path = settings_path
|
settings_path = settings_path
|
||||||
else:
|
else:
|
||||||
@@ -314,11 +323,12 @@ def get_config(settings_path: Path|str|None=None) -> Settings:
|
|||||||
with open(settings_path, "r") as dset:
|
with open(settings_path, "r") as dset:
|
||||||
default_settings = yaml.load(dset, Loader=yaml.Loader)
|
default_settings = yaml.load(dset, Loader=yaml.Loader)
|
||||||
return Settings(**copy_settings(settings_path=settings_path, settings=default_settings))
|
return Settings(**copy_settings(settings_path=settings_path, settings=default_settings))
|
||||||
logger.debug(f"Using {settings_path} for config file.")
|
# logger.debug(f"Using {settings_path} for config file.")
|
||||||
with open(settings_path, "r") as stream:
|
with open(settings_path, "r") as stream:
|
||||||
settings = yaml.load(stream, Loader=yaml.Loader)
|
settings = yaml.load(stream, Loader=yaml.Loader)
|
||||||
return Settings(**settings)
|
return Settings(**settings)
|
||||||
|
|
||||||
|
|
||||||
# Logging formatters
|
# Logging formatters
|
||||||
|
|
||||||
class GroupWriteRotatingFileHandler(handlers.RotatingFileHandler):
|
class GroupWriteRotatingFileHandler(handlers.RotatingFileHandler):
|
||||||
@@ -334,13 +344,13 @@ class GroupWriteRotatingFileHandler(handlers.RotatingFileHandler):
|
|||||||
os.chmod(self.baseFilename, currMode | stat.S_IWGRP)
|
os.chmod(self.baseFilename, currMode | stat.S_IWGRP)
|
||||||
|
|
||||||
def _open(self):
|
def _open(self):
|
||||||
prevumask=os.umask(0o002)
|
prevumask = os.umask(0o002)
|
||||||
rtv=handlers.RotatingFileHandler._open(self)
|
rtv = handlers.RotatingFileHandler._open(self)
|
||||||
os.umask(prevumask)
|
os.umask(prevumask)
|
||||||
return rtv
|
return rtv
|
||||||
|
|
||||||
class CustomFormatter(logging.Formatter):
|
|
||||||
|
|
||||||
|
class CustomFormatter(logging.Formatter):
|
||||||
class bcolors:
|
class bcolors:
|
||||||
HEADER = '\033[95m'
|
HEADER = '\033[95m'
|
||||||
OKBLUE = '\033[94m'
|
OKBLUE = '\033[94m'
|
||||||
@@ -367,6 +377,7 @@ class CustomFormatter(logging.Formatter):
|
|||||||
formatter = logging.Formatter(log_fmt)
|
formatter = logging.Formatter(log_fmt)
|
||||||
return formatter.format(record)
|
return formatter.format(record)
|
||||||
|
|
||||||
|
|
||||||
class StreamToLogger(object):
|
class StreamToLogger(object):
|
||||||
"""
|
"""
|
||||||
Fake file-like stream object that redirects writes to a logger instance.
|
Fake file-like stream object that redirects writes to a logger instance.
|
||||||
@@ -381,31 +392,33 @@ class StreamToLogger(object):
|
|||||||
for line in buf.rstrip().splitlines():
|
for line in buf.rstrip().splitlines():
|
||||||
self.logger.log(self.log_level, line.rstrip())
|
self.logger.log(self.log_level, line.rstrip())
|
||||||
|
|
||||||
def setup_logger(verbosity:int=3):
|
|
||||||
|
def setup_logger(verbosity: int = 3):
|
||||||
"""
|
"""
|
||||||
Set logger levels using settings.
|
Set logger levels using settings.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
verbosit (int, optional): Level of verbosity desired 3 is highest. Defaults to 3.
|
verbosity (int, optional): Level of verbosity desired 3 is highest. Defaults to 3.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
logger: logger object
|
logger: logger object
|
||||||
"""
|
"""
|
||||||
logger = logging.getLogger("submissions")
|
logger = logging.getLogger("submissions")
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
# create file handler which logs even debug messages
|
# NOTE: create file handler which logs even debug messages
|
||||||
try:
|
try:
|
||||||
Path(LOGDIR).mkdir(parents=True)
|
Path(LOGDIR).mkdir(parents=True)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
logger.warning(f"Logging directory {LOGDIR} already exists.")
|
||||||
|
# NOTE: logging to file turned off due to repeated permission errors
|
||||||
# fh = GroupWriteRotatingFileHandler(LOGDIR.joinpath('submissions.log'), mode='a', maxBytes=100000, backupCount=3, encoding=None, delay=False)
|
# fh = GroupWriteRotatingFileHandler(LOGDIR.joinpath('submissions.log'), mode='a', maxBytes=100000, backupCount=3, encoding=None, delay=False)
|
||||||
# file logging will always be debug
|
# file logging will always be debug
|
||||||
# fh.setLevel(logging.DEBUG)
|
# fh.setLevel(logging.DEBUG)
|
||||||
# fh.name = "File"
|
# fh.name = "File"
|
||||||
# create console handler with a higher log level
|
# NOTE: create console handler with a higher log level
|
||||||
# create custom logger with STERR -> log
|
# NOTE: create custom logger with STERR -> log
|
||||||
ch = logging.StreamHandler(stream=sys.stdout)
|
ch = logging.StreamHandler(stream=sys.stdout)
|
||||||
# set looging level based on verbosity
|
# NOTE: set looging level based on verbosity
|
||||||
match verbosity:
|
match verbosity:
|
||||||
case 3:
|
case 3:
|
||||||
ch.setLevel(logging.DEBUG)
|
ch.setLevel(logging.DEBUG)
|
||||||
@@ -414,24 +427,26 @@ def setup_logger(verbosity:int=3):
|
|||||||
case 1:
|
case 1:
|
||||||
ch.setLevel(logging.WARNING)
|
ch.setLevel(logging.WARNING)
|
||||||
ch.name = "Stream"
|
ch.name = "Stream"
|
||||||
# create formatter and add it to the handlers
|
# NOTE: create formatter and add it to the handlers
|
||||||
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - {%(pathname)s:%(lineno)d} - %(message)s')
|
|
||||||
formatter = CustomFormatter()
|
formatter = CustomFormatter()
|
||||||
# fh.setFormatter(formatter)
|
# fh.setFormatter(formatter)
|
||||||
ch.setFormatter(formatter)
|
ch.setFormatter(formatter)
|
||||||
# add the handlers to the logger
|
# NOTE: add the handlers to the logger
|
||||||
# logger.addHandler(fh)
|
# logger.addHandler(fh)
|
||||||
logger.addHandler(ch)
|
logger.addHandler(ch)
|
||||||
# Output exception and traceback to logger
|
|
||||||
|
# NOTE: Output exception and traceback to logger
|
||||||
def handle_exception(exc_type, exc_value, exc_traceback):
|
def handle_exception(exc_type, exc_value, exc_traceback):
|
||||||
if issubclass(exc_type, KeyboardInterrupt):
|
if issubclass(exc_type, KeyboardInterrupt):
|
||||||
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
||||||
return
|
return
|
||||||
logger.critical("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
|
logger.critical("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
|
||||||
|
|
||||||
sys.excepthook = handle_exception
|
sys.excepthook = handle_exception
|
||||||
return logger
|
return logger
|
||||||
|
|
||||||
def copy_settings(settings_path:Path, settings:dict) -> dict:
|
|
||||||
|
def copy_settings(settings_path: Path, settings: dict) -> dict:
|
||||||
"""
|
"""
|
||||||
copies relevant settings dictionary from the default config.yml to a new directory
|
copies relevant settings dictionary from the default config.yml to a new directory
|
||||||
|
|
||||||
@@ -441,8 +456,8 @@ def copy_settings(settings_path:Path, settings:dict) -> dict:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: output dictionary for use in first run
|
dict: output dictionary for use in first run
|
||||||
"""
|
"""
|
||||||
# if the current user is not a superuser remove the superusers entry
|
# NOTE: if the current user is not a superuser remove the superusers entry
|
||||||
if not getpass.getuser() in settings['super_users']:
|
if not getpass.getuser() in settings['super_users']:
|
||||||
del settings['super_users']
|
del settings['super_users']
|
||||||
if not getpass.getuser() in settings['power_users']:
|
if not getpass.getuser() in settings['power_users']:
|
||||||
@@ -452,40 +467,40 @@ def copy_settings(settings_path:Path, settings:dict) -> dict:
|
|||||||
yaml.dump(settings, f)
|
yaml.dump(settings, f)
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
|
|
||||||
def jinja_template_loading() -> Environment:
|
def jinja_template_loading() -> Environment:
|
||||||
"""
|
"""
|
||||||
Returns jinja2 template environment.
|
Returns jinja2 template environment.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
_type_: _description_
|
_type_: _description_
|
||||||
"""
|
"""
|
||||||
# determine if pyinstaller launcher is being used
|
# NOTE: determine if pyinstaller launcher is being used
|
||||||
if check_if_app():
|
if check_if_app():
|
||||||
loader_path = Path(sys._MEIPASS).joinpath("files", "templates")
|
loader_path = Path(sys._MEIPASS).joinpath("files", "templates")
|
||||||
else:
|
else:
|
||||||
loader_path = Path(__file__).parent.joinpath('templates').absolute()#.__str__()
|
loader_path = Path(__file__).parent.joinpath('templates').absolute() #.__str__()
|
||||||
# jinja template loading
|
# NOTE: jinja template loading
|
||||||
loader = FileSystemLoader(loader_path)
|
loader = FileSystemLoader(loader_path)
|
||||||
env = Environment(loader=loader)
|
env = Environment(loader=loader)
|
||||||
env.globals['STATIC_PREFIX'] = loader_path.joinpath("static", "css")
|
env.globals['STATIC_PREFIX'] = loader_path.joinpath("static", "css")
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
def check_if_app() -> bool:
|
def check_if_app() -> bool:
|
||||||
"""
|
"""
|
||||||
Checks if the program is running from pyinstaller compiled
|
Checks if the program is running from pyinstaller compiled
|
||||||
|
|
||||||
Args:
|
|
||||||
ctx (dict, optional): Settings passed down from gui. Defaults to None.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if running from pyinstaller. Else False.
|
bool: True if running from pyinstaller. Else False.
|
||||||
"""
|
"""
|
||||||
if getattr(sys, 'frozen', False):
|
if getattr(sys, 'frozen', False):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def convert_well_to_row_column(input_str:str) -> Tuple[int, int]:
|
|
||||||
|
def convert_well_to_row_column(input_str: str) -> Tuple[int, int]:
|
||||||
"""
|
"""
|
||||||
Converts typical alphanumeric (i.e. "A2") to row, column
|
Converts typical alphanumeric (i.e. "A2") to row, column
|
||||||
|
|
||||||
@@ -494,22 +509,24 @@ def convert_well_to_row_column(input_str:str) -> Tuple[int, int]:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple[int, int]: row, column
|
Tuple[int, int]: row, column
|
||||||
"""
|
"""
|
||||||
row_keys = {v:k for k,v in row_map.items()}
|
row_keys = {v: k for k, v in row_map.items()}
|
||||||
try:
|
try:
|
||||||
row = int(row_keys[input_str[0].upper()])
|
row = int(row_keys[input_str[0].upper()])
|
||||||
column = int(input_str[1:])
|
column = int(input_str[1:])
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return None, None
|
return None, None
|
||||||
return row, column
|
return row, column
|
||||||
|
|
||||||
|
|
||||||
def setup_lookup(func):
|
def setup_lookup(func):
|
||||||
"""
|
"""
|
||||||
Checks to make sure all args are allowed
|
Checks to make sure all args are allowed
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
func (_type_): _description_
|
func (_type_): wrapped function
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
sanitized_kwargs = {}
|
sanitized_kwargs = {}
|
||||||
for k, v in locals()['kwargs'].items():
|
for k, v in locals()['kwargs'].items():
|
||||||
@@ -521,20 +538,23 @@ def setup_lookup(func):
|
|||||||
elif v is not None:
|
elif v is not None:
|
||||||
sanitized_kwargs[k] = v
|
sanitized_kwargs[k] = v
|
||||||
return func(*args, **sanitized_kwargs)
|
return func(*args, **sanitized_kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
class Result(BaseModel):
|
|
||||||
|
|
||||||
|
class Result(BaseModel):
|
||||||
owner: str = Field(default="", validate_default=True)
|
owner: str = Field(default="", validate_default=True)
|
||||||
code: int = Field(default=0)
|
code: int = Field(default=0)
|
||||||
msg: str
|
msg: str
|
||||||
status: Literal["NoIcon", "Question", "Information", "Warning", "Critical"] = Field(default="NoIcon")
|
status: Literal["NoIcon", "Question", "Information", "Warning", "Critical"] = Field(default="NoIcon")
|
||||||
|
|
||||||
|
|
||||||
@field_validator('status', mode='before')
|
@field_validator('status', mode='before')
|
||||||
@classmethod
|
@classmethod
|
||||||
def to_title(cls, value:str):
|
def to_title(cls, value: str):
|
||||||
return value.title()
|
if value.lower().replace(" ", "") == "noicon":
|
||||||
|
return "NoIcon"
|
||||||
|
else:
|
||||||
|
return value.title()
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"Result({self.owner})"
|
return f"Result({self.owner})"
|
||||||
@@ -546,15 +566,15 @@ class Result(BaseModel):
|
|||||||
def report(self):
|
def report(self):
|
||||||
from frontend.widgets.misc import AlertPop
|
from frontend.widgets.misc import AlertPop
|
||||||
return AlertPop(message=self.msg, status=self.status, owner=self.owner)
|
return AlertPop(message=self.msg, status=self.status, owner=self.owner)
|
||||||
|
|
||||||
class Report(BaseModel):
|
|
||||||
|
|
||||||
|
|
||||||
|
class Report(BaseModel):
|
||||||
results: List[Result] = Field(default=[])
|
results: List[Result] = Field(default=[])
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"Report(result_count:{len(self.results)})"
|
return f"Report(result_count:{len(self.results)})"
|
||||||
|
|
||||||
def add_result(self, result:Result|Report|None):
|
def add_result(self, result: Result | Report | None):
|
||||||
match result:
|
match result:
|
||||||
case Result():
|
case Result():
|
||||||
logger.debug(f"Adding {result} to results.")
|
logger.debug(f"Adding {result} to results.")
|
||||||
@@ -568,36 +588,41 @@ class Report(BaseModel):
|
|||||||
logger.debug(f"Adding {res} from to results.")
|
logger.debug(f"Adding {res} from to results.")
|
||||||
self.results.append(res)
|
self.results.append(res)
|
||||||
case _:
|
case _:
|
||||||
pass
|
logger.error(f"Unknown variable type: {type(result)}")
|
||||||
|
|
||||||
def rreplace(s, old, new):
|
|
||||||
return (s[::-1].replace(old[::-1],new[::-1], 1))[::-1]
|
|
||||||
|
|
||||||
def html_to_pdf(html, output_file:Path|str):
|
|
||||||
|
def rreplace(s, old, new):
|
||||||
|
return (s[::-1].replace(old[::-1], new[::-1], 1))[::-1]
|
||||||
|
|
||||||
|
|
||||||
|
def html_to_pdf(html, output_file: Path | str):
|
||||||
if isinstance(output_file, str):
|
if isinstance(output_file, str):
|
||||||
output_file = Path(output_file)
|
output_file = Path(output_file)
|
||||||
# document = QTextDocument()
|
|
||||||
document = QWebEngineView()
|
document = QWebEngineView()
|
||||||
document.setHtml(html)
|
document.setHtml(html)
|
||||||
printer = QPrinter(QPrinter.PrinterMode.HighResolution)
|
printer = QPrinter(QPrinter.PrinterMode.HighResolution)
|
||||||
printer.setOutputFormat(QPrinter.OutputFormat.PdfFormat)
|
printer.setOutputFormat(QPrinter.OutputFormat.PdfFormat)
|
||||||
printer.setOutputFileName(output_file.absolute().__str__())
|
printer.setOutputFileName(output_file.absolute().__str__())
|
||||||
printer.setPageSize(QPageSize(QPageSize.PageSizeId.A4))
|
printer.setPageSize(QPageSize(QPageSize.PageSizeId.A4))
|
||||||
document.print(printer)
|
document.print(printer)
|
||||||
|
|
||||||
def remove_key_from_list_of_dicts(input:list, key:str):
|
|
||||||
|
def remove_key_from_list_of_dicts(input: list, key: str):
|
||||||
for item in input:
|
for item in input:
|
||||||
del item[key]
|
del item[key]
|
||||||
return input
|
return input
|
||||||
|
|
||||||
def workbook_2_csv(worksheet: Worksheet, filename:Path):
|
|
||||||
|
def workbook_2_csv(worksheet: Worksheet, filename: Path):
|
||||||
with open(filename, 'w', newline="") as f:
|
with open(filename, 'w', newline="") as f:
|
||||||
c = csv.writer(f)
|
c = csv.writer(f)
|
||||||
for r in worksheet.rows:
|
for r in worksheet.rows:
|
||||||
c.writerow([cell.value for cell in r])
|
c.writerow([cell.value for cell in r])
|
||||||
|
|
||||||
|
|
||||||
ctx = get_config(None)
|
ctx = get_config(None)
|
||||||
|
|
||||||
|
|
||||||
def is_power_user() -> bool:
|
def is_power_user() -> bool:
|
||||||
try:
|
try:
|
||||||
check = getpass.getuser() in ctx.power_users
|
check = getpass.getuser() in ctx.power_users
|
||||||
@@ -605,13 +630,14 @@ def is_power_user() -> bool:
|
|||||||
check = False
|
check = False
|
||||||
return check
|
return check
|
||||||
|
|
||||||
|
|
||||||
def check_authorization(func):
|
def check_authorization(func):
|
||||||
"""
|
"""
|
||||||
Decorator to check if user is authorized to access function
|
Decorator to check if user is authorized to access function
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
func (_type_): Function to be used.
|
func (_type_): Function to be used.
|
||||||
"""
|
"""
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
logger.debug(f"Checking authorization")
|
logger.debug(f"Checking authorization")
|
||||||
if is_power_user():
|
if is_power_user():
|
||||||
|
|||||||
Reference in New Issue
Block a user