Split Concentration controls on the chart so they are individually selectable.
This commit is contained in:
@@ -1,3 +1,7 @@
|
|||||||
|
# 202504.03
|
||||||
|
|
||||||
|
- Split Concentration controls on the chart so they are individually selectable.
|
||||||
|
|
||||||
# 202504.02
|
# 202504.02
|
||||||
|
|
||||||
- Added cscscience gitlab remote.
|
- Added cscscience gitlab remote.
|
||||||
|
|||||||
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
@@ -11,9 +11,7 @@ from sqlalchemy.ext.declarative import declared_attr
|
|||||||
from sqlalchemy.exc import ArgumentError
|
from sqlalchemy.exc import ArgumentError
|
||||||
from typing import Any, List
|
from typing import Any, List
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from sqlalchemy.orm.relationships import _RelationshipDeclared
|
from sqlalchemy.orm.relationships import _RelationshipDeclared
|
||||||
|
|
||||||
from tools import report_result, list_sort_dict
|
from tools import report_result, list_sort_dict
|
||||||
|
|
||||||
# NOTE: Load testing environment
|
# NOTE: Load testing environment
|
||||||
@@ -48,7 +46,7 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
__abstract__ = True #: NOTE: Will not be added to DB as a table
|
__abstract__ = True #: NOTE: Will not be added to DB as a table
|
||||||
|
|
||||||
__table_args__ = {'extend_existing': True} #: Will only add new columns
|
__table_args__ = {'extend_existing': True} #: NOTE Will only add new columns
|
||||||
|
|
||||||
singles = ['id']
|
singles = ['id']
|
||||||
omni_removes = ["id", 'submissions', "omnigui_class_dict", "omnigui_instance_dict"]
|
omni_removes = ["id", 'submissions', "omnigui_class_dict", "omnigui_instance_dict"]
|
||||||
@@ -308,7 +306,6 @@ class BaseClass(Base):
|
|||||||
dicto = {'id': dicto.pop('id'), **dicto}
|
dicto = {'id': dicto.pop('id'), **dicto}
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
# logger.debug(f"{self.__class__.__name__} omnigui dict:\n\n{pformat(dicto)}")
|
|
||||||
return dicto
|
return dicto
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
@@ -337,11 +334,6 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
return dict()
|
return dict()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def relevant_relationships(cls, relationship_instance):
|
|
||||||
query_kwargs = {relationship_instance.query_alias: relationship_instance}
|
|
||||||
return cls.query(**query_kwargs)
|
|
||||||
|
|
||||||
def check_all_attributes(self, attributes: dict) -> bool:
|
def check_all_attributes(self, attributes: dict) -> bool:
|
||||||
"""
|
"""
|
||||||
Checks this instance against a dictionary of attributes to determine if they are a match.
|
Checks this instance against a dictionary of attributes to determine if they are a match.
|
||||||
@@ -352,14 +344,14 @@ class BaseClass(Base):
|
|||||||
Returns:
|
Returns:
|
||||||
bool: If a single unequivocal value is found will be false, else true.
|
bool: If a single unequivocal value is found will be false, else true.
|
||||||
"""
|
"""
|
||||||
logger.debug(f"Incoming attributes: {attributes}")
|
# logger.debug(f"Incoming attributes: {attributes}")
|
||||||
for key, value in attributes.items():
|
for key, value in attributes.items():
|
||||||
if value.lower() == "none":
|
if value.lower() == "none":
|
||||||
value = None
|
value = None
|
||||||
logger.debug(f"Attempting to grab attribute: {key}")
|
# logger.debug(f"Attempting to grab attribute: {key}")
|
||||||
self_value = getattr(self, key)
|
self_value = getattr(self, key)
|
||||||
class_attr = getattr(self.__class__, key)
|
class_attr = getattr(self.__class__, key)
|
||||||
logger.debug(f"Self value: {self_value}, class attr: {class_attr} of type: {type(class_attr)}")
|
# logger.debug(f"Self value: {self_value}, class attr: {class_attr} of type: {type(class_attr)}")
|
||||||
if isinstance(class_attr, property):
|
if isinstance(class_attr, property):
|
||||||
filter = "property"
|
filter = "property"
|
||||||
else:
|
else:
|
||||||
@@ -379,7 +371,7 @@ class BaseClass(Base):
|
|||||||
case "property":
|
case "property":
|
||||||
pass
|
pass
|
||||||
case _RelationshipDeclared():
|
case _RelationshipDeclared():
|
||||||
logger.debug(f"Checking {self_value}")
|
# logger.debug(f"Checking {self_value}")
|
||||||
try:
|
try:
|
||||||
self_value = self_value.name
|
self_value = self_value.name
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -387,19 +379,18 @@ class BaseClass(Base):
|
|||||||
if class_attr.property.uselist:
|
if class_attr.property.uselist:
|
||||||
self_value = self_value.__str__()
|
self_value = self_value.__str__()
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Check if {self_value.__class__} is subclass of {self.__class__}")
|
# logger.debug(f"Check if {self_value.__class__} is subclass of {self.__class__}")
|
||||||
check = issubclass(self_value.__class__, self.__class__)
|
check = issubclass(self_value.__class__, self.__class__)
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
logger.error(f"Couldn't check if {self_value.__class__} is subclass of {self.__class__} due to {e}")
|
logger.error(f"Couldn't check if {self_value.__class__} is subclass of {self.__class__} due to {e}")
|
||||||
check = False
|
check = False
|
||||||
if check:
|
if check:
|
||||||
logger.debug(f"Checking for subclass name.")
|
# logger.debug(f"Checking for subclass name.")
|
||||||
self_value = self_value.name
|
self_value = self_value.name
|
||||||
logger.debug(
|
# logger.debug(f"Checking self_value {self_value} of type {type(self_value)} against attribute {value} of type {type(value)}")
|
||||||
f"Checking self_value {self_value} of type {type(self_value)} against attribute {value} of type {type(value)}")
|
|
||||||
if self_value != value:
|
if self_value != value:
|
||||||
output = False
|
output = False
|
||||||
logger.debug(f"Value {key} is False, returning.")
|
# logger.debug(f"Value {key} is False, returning.")
|
||||||
return output
|
return output
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -444,7 +435,6 @@ class BaseClass(Base):
|
|||||||
value = value[0]
|
value = value[0]
|
||||||
else:
|
else:
|
||||||
raise ValueError("Object is too long to parse a single value.")
|
raise ValueError("Object is too long to parse a single value.")
|
||||||
# value = value
|
|
||||||
return super().__setattr__(key, value)
|
return super().__setattr__(key, value)
|
||||||
case _:
|
case _:
|
||||||
return super().__setattr__(key, value)
|
return super().__setattr__(key, value)
|
||||||
@@ -454,6 +444,32 @@ class BaseClass(Base):
|
|||||||
def delete(self):
|
def delete(self):
|
||||||
logger.error(f"Delete has not been implemented for {self.__class__.__name__}")
|
logger.error(f"Delete has not been implemented for {self.__class__.__name__}")
|
||||||
|
|
||||||
|
def rectify_query_date(input_date, eod: bool = False) -> str:
|
||||||
|
"""
|
||||||
|
Converts input into a datetime string for querying purposes
|
||||||
|
|
||||||
|
Args:
|
||||||
|
eod (bool, optional): Whether to use max time to indicate end of day.
|
||||||
|
input_date ():
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
datetime: properly formated datetime
|
||||||
|
"""
|
||||||
|
match input_date:
|
||||||
|
case datetime() | date():
|
||||||
|
output_date = input_date#.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
case int():
|
||||||
|
output_date = datetime.fromordinal(
|
||||||
|
datetime(1900, 1, 1).toordinal() + input_date - 2)#.date().strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
case _:
|
||||||
|
output_date = parse(input_date)#.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
if eod:
|
||||||
|
addition_time = datetime.max.time()
|
||||||
|
else:
|
||||||
|
addition_time = datetime.min.time()
|
||||||
|
output_date = datetime.combine(output_date, addition_time).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
return output_date
|
||||||
|
|
||||||
|
|
||||||
class ConfigItem(BaseClass):
|
class ConfigItem(BaseClass):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
All control related models.
|
All control related models.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from PyQt6.QtWidgets import QWidget, QCheckBox, QLabel
|
from PyQt6.QtWidgets import QWidget, QCheckBox, QLabel
|
||||||
@@ -13,10 +12,9 @@ import logging, re
|
|||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from . import BaseClass
|
from . import BaseClass
|
||||||
from tools import setup_lookup, report_result, Result, Report, Settings, get_unique_values_in_df_column, super_splitter, \
|
from tools import setup_lookup, report_result, Result, Report, Settings, get_unique_values_in_df_column, super_splitter, \
|
||||||
rectify_query_date
|
flatten_list, timer
|
||||||
from datetime import date, datetime, timedelta
|
from datetime import date, datetime, timedelta
|
||||||
from typing import List, Literal, Tuple, Generator
|
from typing import List, Literal, Tuple, Generator
|
||||||
from dateutil.parser import parse
|
|
||||||
from re import Pattern
|
from re import Pattern
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
@@ -31,9 +29,6 @@ class ControlType(BaseClass):
|
|||||||
targets = Column(JSON) #: organisms checked for
|
targets = Column(JSON) #: organisms checked for
|
||||||
instances = relationship("Control", back_populates="controltype") #: control samples created of this type.
|
instances = relationship("Control", back_populates="controltype") #: control samples created of this type.
|
||||||
|
|
||||||
# def __repr__(self) -> str:
|
|
||||||
# return f"<ControlType({self.name})>"
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@setup_lookup
|
@setup_lookup
|
||||||
def query(cls,
|
def query(cls,
|
||||||
@@ -113,6 +108,7 @@ class ControlType(BaseClass):
|
|||||||
Pattern: Constructed pattern
|
Pattern: Constructed pattern
|
||||||
"""
|
"""
|
||||||
strings = list(set([super_splitter(item, "-", 0) for item in cls.get_positive_control_types(control_type)]))
|
strings = list(set([super_splitter(item, "-", 0) for item in cls.get_positive_control_types(control_type)]))
|
||||||
|
# NOTE: This will build a string like ^(ATCC49226|MCS)-.*
|
||||||
return re.compile(rf"(^{'|^'.join(strings)})-.*", flags=re.IGNORECASE)
|
return re.compile(rf"(^{'|^'.join(strings)})-.*", flags=re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
@@ -159,7 +155,7 @@ class Control(BaseClass):
|
|||||||
Lookup control objects in the database based on a number of parameters.
|
Lookup control objects in the database based on a number of parameters.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
submission_type (str | None, optional): Submission type associated with control. Defaults to None.
|
submissiontype (str | None, optional): Submission type associated with control. Defaults to None.
|
||||||
subtype (str | None, optional): Control subtype, eg IridaControl. Defaults to None.
|
subtype (str | None, optional): Control subtype, eg IridaControl. Defaults to None.
|
||||||
start_date (date | str | int | None, optional): Beginning date to search by. Defaults to 2023-01-01 if end_date not None.
|
start_date (date | str | int | None, optional): Beginning date to search by. Defaults to 2023-01-01 if end_date not None.
|
||||||
end_date (date | str | int | None, optional): End date to search by. Defaults to today if start_date not None.
|
end_date (date | str | int | None, optional): End date to search by. Defaults to today if start_date not None.
|
||||||
@@ -202,30 +198,8 @@ class Control(BaseClass):
|
|||||||
logger.warning(f"End date with no start date, using 90 days ago.")
|
logger.warning(f"End date with no start date, using 90 days ago.")
|
||||||
start_date = date.today() - timedelta(days=90)
|
start_date = date.today() - timedelta(days=90)
|
||||||
if start_date is not None:
|
if start_date is not None:
|
||||||
# match start_date:
|
start_date = cls.rectify_query_date(start_date)
|
||||||
# case datetime():
|
end_date = cls.rectify_query_date(end_date, eod=True)
|
||||||
# start_date = start_date.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
# case date():
|
|
||||||
# start_date = datetime.combine(start_date, datetime.min.time())
|
|
||||||
# start_date = start_date.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
# case int():
|
|
||||||
# start_date = datetime.fromordinal(
|
|
||||||
# datetime(1900, 1, 1).toordinal() + start_date - 2).date().strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
# case _:
|
|
||||||
# start_date = parse(start_date).strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
start_date = rectify_query_date(start_date)
|
|
||||||
end_date = rectify_query_date(end_date, eod=True)
|
|
||||||
# match end_date:
|
|
||||||
# case datetime():
|
|
||||||
# end_date = end_date.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
# case date():
|
|
||||||
# end_date = datetime.combine(end_date, datetime.max.time())
|
|
||||||
# end_date = end_date.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
# case int():
|
|
||||||
# end_date = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + end_date - 2).date().strftime(
|
|
||||||
# "%Y-%m-%d %H:%M:%S")
|
|
||||||
# case _:
|
|
||||||
# end_date = parse(end_date).strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
query = query.filter(cls.submitted_date.between(start_date, end_date))
|
query = query.filter(cls.submitted_date.between(start_date, end_date))
|
||||||
match name:
|
match name:
|
||||||
case str():
|
case str():
|
||||||
@@ -372,7 +346,8 @@ class PCRControl(Control):
|
|||||||
|
|
||||||
def to_pydantic(self):
|
def to_pydantic(self):
|
||||||
from backend.validators import PydPCRControl
|
from backend.validators import PydPCRControl
|
||||||
return PydPCRControl(**self.to_sub_dict(), controltype_name=self.controltype_name,
|
return PydPCRControl(**self.to_sub_dict(),
|
||||||
|
controltype_name=self.controltype_name,
|
||||||
submission_id=self.submission_id)
|
submission_id=self.submission_id)
|
||||||
|
|
||||||
|
|
||||||
@@ -565,7 +540,8 @@ class IridaControl(Control):
|
|||||||
consolidate=consolidate) for
|
consolidate=consolidate) for
|
||||||
control in controls]
|
control in controls]
|
||||||
# NOTE: flatten data to one dimensional list
|
# NOTE: flatten data to one dimensional list
|
||||||
data = [item for sublist in data for item in sublist]
|
# data = [item for sublist in data for item in sublist]
|
||||||
|
data = flatten_list(data)
|
||||||
if not data:
|
if not data:
|
||||||
report.add_result(Result(status="Critical", msg="No data found for controls in given date range."))
|
report.add_result(Result(status="Critical", msg="No data found for controls in given date range."))
|
||||||
return report, None
|
return report, None
|
||||||
@@ -731,11 +707,11 @@ class IridaControl(Control):
|
|||||||
Returns:
|
Returns:
|
||||||
DataFrame: dataframe with originals removed in favour of repeats.
|
DataFrame: dataframe with originals removed in favour of repeats.
|
||||||
"""
|
"""
|
||||||
if 'rerun_regex' in ctx:
|
if 'rerun_regex' in ctx.model_extra:
|
||||||
sample_names = get_unique_values_in_df_column(df, column_name="name")
|
sample_names = get_unique_values_in_df_column(df, column_name="name")
|
||||||
rerun_regex = re.compile(fr"{ctx.rerun_regex}")
|
rerun_regex = re.compile(fr"{ctx.rerun_regex}")
|
||||||
exclude = [re.sub(rerun_regex, "", sample) for sample in sample_names if rerun_regex.search(sample)]
|
exclude = [re.sub(rerun_regex, "", sample) for sample in sample_names if rerun_regex.search(sample)]
|
||||||
df = df[df.name not in exclude]
|
df = df[~df.name.isin(exclude)]
|
||||||
return df
|
return df
|
||||||
|
|
||||||
def to_pydantic(self) -> "PydIridaControl":
|
def to_pydantic(self) -> "PydIridaControl":
|
||||||
|
|||||||
@@ -2,8 +2,7 @@
|
|||||||
All kit and reagent related models
|
All kit and reagent related models
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import json, zipfile, yaml, logging, re
|
import json, zipfile, yaml, logging, re, sys
|
||||||
import sys
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from sqlalchemy import Column, String, TIMESTAMP, JSON, INTEGER, ForeignKey, Interval, Table, FLOAT, BLOB
|
from sqlalchemy import Column, String, TIMESTAMP, JSON, INTEGER, ForeignKey, Interval, Table, FLOAT, BLOB
|
||||||
from sqlalchemy.orm import relationship, validates, Query
|
from sqlalchemy.orm import relationship, validates, Query
|
||||||
@@ -11,7 +10,7 @@ from sqlalchemy.ext.associationproxy import association_proxy
|
|||||||
from sqlalchemy.ext.hybrid import hybrid_property
|
from sqlalchemy.ext.hybrid import hybrid_property
|
||||||
from datetime import date, datetime, timedelta
|
from datetime import date, datetime, timedelta
|
||||||
from tools import check_authorization, setup_lookup, Report, Result, check_regex_match, yaml_regex_creator, timezone
|
from tools import check_authorization, setup_lookup, Report, Result, check_regex_match, yaml_regex_creator, timezone
|
||||||
from typing import List, Literal, Generator, Any, Tuple, Dict, AnyStr
|
from typing import List, Literal, Generator, Any, Tuple
|
||||||
from pandas import ExcelFile
|
from pandas import ExcelFile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from . import Base, BaseClass, Organization, LogMixin
|
from . import Base, BaseClass, Organization, LogMixin
|
||||||
@@ -136,18 +135,18 @@ class KitType(BaseClass):
|
|||||||
return self.used_for
|
return self.used_for
|
||||||
|
|
||||||
def get_reagents(self,
|
def get_reagents(self,
|
||||||
required: bool = False,
|
required_only: bool = False,
|
||||||
submission_type: str | SubmissionType | None = None
|
submission_type: str | SubmissionType | None = None
|
||||||
) -> Generator[ReagentRole, None, None]:
|
) -> Generator[ReagentRole, None, None]:
|
||||||
"""
|
"""
|
||||||
Return ReagentTypes linked to kit through KitTypeReagentTypeAssociation.
|
Return ReagentTypes linked to kit through KitTypeReagentTypeAssociation.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
required (bool, optional): If true only return required types. Defaults to False.
|
required_only (bool, optional): If true only return required types. Defaults to False.
|
||||||
submission_type (str | Submissiontype | None, optional): Submission type to narrow results. Defaults to None.
|
submission_type (str | Submissiontype | None, optional): Submission type to narrow results. Defaults to None.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Generator[ReagentRole, None, None]: List of reagents linked to this kit.
|
Generator[ReagentRole, None, None]: List of reagent roles linked to this kit.
|
||||||
"""
|
"""
|
||||||
match submission_type:
|
match submission_type:
|
||||||
case SubmissionType():
|
case SubmissionType():
|
||||||
@@ -158,7 +157,7 @@ class KitType(BaseClass):
|
|||||||
item.submission_type.name == submission_type]
|
item.submission_type.name == submission_type]
|
||||||
case _:
|
case _:
|
||||||
relevant_associations = [item for item in self.kit_reagentrole_associations]
|
relevant_associations = [item for item in self.kit_reagentrole_associations]
|
||||||
if required:
|
if required_only:
|
||||||
return (item.reagent_role for item in relevant_associations if item.required == 1)
|
return (item.reagent_role for item in relevant_associations if item.required == 1)
|
||||||
else:
|
else:
|
||||||
return (item.reagent_role for item in relevant_associations)
|
return (item.reagent_role for item in relevant_associations)
|
||||||
@@ -168,7 +167,6 @@ class KitType(BaseClass):
|
|||||||
Creates map of locations in Excel workbook for a SubmissionType
|
Creates map of locations in Excel workbook for a SubmissionType
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
new_kit ():
|
|
||||||
submission_type (str | SubmissionType): Submissiontype.name
|
submission_type (str | SubmissionType): Submissiontype.name
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -240,7 +238,7 @@ class KitType(BaseClass):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
name (str, optional): Name of desired kit (returns single instance). Defaults to None.
|
name (str, optional): Name of desired kit (returns single instance). Defaults to None.
|
||||||
used_for (str | Submissiontype | None, optional): Submission type the kit is used for. Defaults to None.
|
submissiontype (str | Submissiontype | None, optional): Submission type the kit is used for. Defaults to None.
|
||||||
id (int | None, optional): Kit id in the database. Defaults to None.
|
id (int | None, optional): Kit id in the database. Defaults to None.
|
||||||
limit (int, optional): Maximum number of results to return (0 = all). Defaults to 0.
|
limit (int, optional): Maximum number of results to return (0 = all). Defaults to 0.
|
||||||
|
|
||||||
@@ -276,108 +274,108 @@ class KitType(BaseClass):
|
|||||||
def save(self):
|
def save(self):
|
||||||
super().save()
|
super().save()
|
||||||
|
|
||||||
def to_export_dict(self, submission_type: SubmissionType) -> dict:
|
# def to_export_dict(self, submission_type: SubmissionType) -> dict:
|
||||||
"""
|
# """
|
||||||
Creates dictionary for exporting to yml used in new SubmissionType Construction
|
# Creates dictionary for exporting to yml used in new SubmissionType Construction
|
||||||
|
#
|
||||||
|
# Args:
|
||||||
|
# submission_type (SubmissionType): SubmissionType of interest.
|
||||||
|
#
|
||||||
|
# Returns:
|
||||||
|
# dict: Dictionary containing relevant info for SubmissionType construction
|
||||||
|
# """
|
||||||
|
# base_dict = dict(name=self.name, reagent_roles=[], equipment_roles=[])
|
||||||
|
# for key, value in self.construct_xl_map_for_use(submission_type=submission_type):
|
||||||
|
# try:
|
||||||
|
# assoc = next(item for item in self.kit_reagentrole_associations if item.reagent_role.name == key)
|
||||||
|
# except StopIteration as e:
|
||||||
|
# continue
|
||||||
|
# for kk, vv in assoc.to_export_dict().items():
|
||||||
|
# value[kk] = vv
|
||||||
|
# base_dict['reagent_roles'].append(value)
|
||||||
|
# for key, value in submission_type.construct_field_map("equipment"):
|
||||||
|
# try:
|
||||||
|
# assoc = next(item for item in submission_type.submissiontype_equipmentrole_associations if
|
||||||
|
# item.equipment_role.name == key)
|
||||||
|
# except StopIteration:
|
||||||
|
# continue
|
||||||
|
# for kk, vv in assoc.to_export_dict(extraction_kit=self).items():
|
||||||
|
# value[kk] = vv
|
||||||
|
# base_dict['equipment_roles'].append(value)
|
||||||
|
# return base_dict
|
||||||
|
|
||||||
Args:
|
# @classmethod
|
||||||
submission_type (SubmissionType): SubmissionType of interest.
|
# def import_from_yml(cls, submission_type: str | SubmissionType, filepath: Path | str | None = None,
|
||||||
|
# import_dict: dict | None = None) -> KitType:
|
||||||
Returns:
|
# if isinstance(submission_type, str):
|
||||||
dict: Dictionary containing relevant info for SubmissionType construction
|
# submission_type = SubmissionType.query(name=submission_type)
|
||||||
"""
|
# if filepath:
|
||||||
base_dict = dict(name=self.name, reagent_roles=[], equipment_roles=[])
|
# yaml.add_constructor("!regex", yaml_regex_creator)
|
||||||
for key, value in self.construct_xl_map_for_use(submission_type=submission_type):
|
# if isinstance(filepath, str):
|
||||||
try:
|
# filepath = Path(filepath)
|
||||||
assoc = next(item for item in self.kit_reagentrole_associations if item.reagent_role.name == key)
|
# if not filepath.exists():
|
||||||
except StopIteration as e:
|
# logging.critical(f"Given file could not be found.")
|
||||||
continue
|
# return None
|
||||||
for kk, vv in assoc.to_export_dict().items():
|
# with open(filepath, "r") as f:
|
||||||
value[kk] = vv
|
# if filepath.suffix == ".json":
|
||||||
base_dict['reagent_roles'].append(value)
|
# import_dict = json.load(fp=f)
|
||||||
for key, value in submission_type.construct_field_map("equipment"):
|
# elif filepath.suffix == ".yml":
|
||||||
try:
|
# import_dict = yaml.load(stream=f, Loader=yaml.Loader)
|
||||||
assoc = next(item for item in submission_type.submissiontype_equipmentrole_associations if
|
# else:
|
||||||
item.equipment_role.name == key)
|
# raise Exception(f"Filetype {filepath.suffix} not supported.")
|
||||||
except StopIteration:
|
# new_kit = KitType.query(name=import_dict['kit_type']['name'])
|
||||||
continue
|
# if not new_kit:
|
||||||
for kk, vv in assoc.to_export_dict(extraction_kit=self).items():
|
# new_kit = KitType(name=import_dict['kit_type']['name'])
|
||||||
value[kk] = vv
|
# for role in import_dict['kit_type']['reagent_roles']:
|
||||||
base_dict['equipment_roles'].append(value)
|
# new_role = ReagentRole.query(name=role['role'])
|
||||||
return base_dict
|
# if new_role:
|
||||||
|
# check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
||||||
@classmethod
|
# if check.lower() == "n":
|
||||||
def import_from_yml(cls, submission_type: str | SubmissionType, filepath: Path | str | None = None,
|
# new_role = None
|
||||||
import_dict: dict | None = None) -> KitType:
|
# else:
|
||||||
if isinstance(submission_type, str):
|
# pass
|
||||||
submission_type = SubmissionType.query(name=submission_type)
|
# if not new_role:
|
||||||
if filepath:
|
# eol = timedelta(role['extension_of_life'])
|
||||||
yaml.add_constructor("!regex", yaml_regex_creator)
|
# new_role = ReagentRole(name=role['role'], eol_ext=eol)
|
||||||
if isinstance(filepath, str):
|
# uses = dict(expiry=role['expiry'], lot=role['lot'], name=role['name'], sheet=role['sheet'])
|
||||||
filepath = Path(filepath)
|
# ktrr_assoc = KitTypeReagentRoleAssociation(kit_type=new_kit, reagent_role=new_role, uses=uses)
|
||||||
if not filepath.exists():
|
# ktrr_assoc.submission_type = submission_type
|
||||||
logging.critical(f"Given file could not be found.")
|
# ktrr_assoc.required = role['required']
|
||||||
return None
|
# ktst_assoc = SubmissionTypeKitTypeAssociation(
|
||||||
with open(filepath, "r") as f:
|
# kit_type=new_kit,
|
||||||
if filepath.suffix == ".json":
|
# submission_type=submission_type,
|
||||||
import_dict = json.load(fp=f)
|
# mutable_cost_sample=import_dict['mutable_cost_sample'],
|
||||||
elif filepath.suffix == ".yml":
|
# mutable_cost_column=import_dict['mutable_cost_column'],
|
||||||
import_dict = yaml.load(stream=f, Loader=yaml.Loader)
|
# constant_cost=import_dict['constant_cost']
|
||||||
else:
|
# )
|
||||||
raise Exception(f"Filetype {filepath.suffix} not supported.")
|
# for role in import_dict['kit_type']['equipment_roles']:
|
||||||
new_kit = KitType.query(name=import_dict['kit_type']['name'])
|
# new_role = EquipmentRole.query(name=role['role'])
|
||||||
if not new_kit:
|
# if new_role:
|
||||||
new_kit = KitType(name=import_dict['kit_type']['name'])
|
# check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
||||||
for role in import_dict['kit_type']['reagent_roles']:
|
# if check.lower() == "n":
|
||||||
new_role = ReagentRole.query(name=role['role'])
|
# new_role = None
|
||||||
if new_role:
|
# else:
|
||||||
check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
# pass
|
||||||
if check.lower() == "n":
|
# if not new_role:
|
||||||
new_role = None
|
# new_role = EquipmentRole(name=role['role'])
|
||||||
else:
|
# for equipment in Equipment.assign_equipment(equipment_role=new_role):
|
||||||
pass
|
# new_role.instances.append(equipment)
|
||||||
if not new_role:
|
# ster_assoc = SubmissionTypeEquipmentRoleAssociation(submission_type=submission_type,
|
||||||
eol = timedelta(role['extension_of_life'])
|
# equipment_role=new_role)
|
||||||
new_role = ReagentRole(name=role['role'], eol_ext=eol)
|
# try:
|
||||||
uses = dict(expiry=role['expiry'], lot=role['lot'], name=role['name'], sheet=role['sheet'])
|
# uses = dict(name=role['name'], process=role['process'], sheet=role['sheet'],
|
||||||
ktrr_assoc = KitTypeReagentRoleAssociation(kit_type=new_kit, reagent_role=new_role, uses=uses)
|
# static=role['static'])
|
||||||
ktrr_assoc.submission_type = submission_type
|
# except KeyError:
|
||||||
ktrr_assoc.required = role['required']
|
# uses = None
|
||||||
ktst_assoc = SubmissionTypeKitTypeAssociation(
|
# ster_assoc.uses = uses
|
||||||
kit_type=new_kit,
|
# for process in role['processes']:
|
||||||
submission_type=submission_type,
|
# new_process = Process.query(name=process)
|
||||||
mutable_cost_sample=import_dict['mutable_cost_sample'],
|
# if not new_process:
|
||||||
mutable_cost_column=import_dict['mutable_cost_column'],
|
# new_process = Process(name=process)
|
||||||
constant_cost=import_dict['constant_cost']
|
# new_process.submission_types.append(submission_type)
|
||||||
)
|
# new_process.kit_types.append(new_kit)
|
||||||
for role in import_dict['kit_type']['equipment_roles']:
|
# new_process.equipment_roles.append(new_role)
|
||||||
new_role = EquipmentRole.query(name=role['role'])
|
# return new_kit
|
||||||
if new_role:
|
|
||||||
check = input(f"Found existing role: {new_role.name}. Use this? [Y/n]: ")
|
|
||||||
if check.lower() == "n":
|
|
||||||
new_role = None
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
if not new_role:
|
|
||||||
new_role = EquipmentRole(name=role['role'])
|
|
||||||
for equipment in Equipment.assign_equipment(equipment_role=new_role):
|
|
||||||
new_role.instances.append(equipment)
|
|
||||||
ster_assoc = SubmissionTypeEquipmentRoleAssociation(submission_type=submission_type,
|
|
||||||
equipment_role=new_role)
|
|
||||||
try:
|
|
||||||
uses = dict(name=role['name'], process=role['process'], sheet=role['sheet'],
|
|
||||||
static=role['static'])
|
|
||||||
except KeyError:
|
|
||||||
uses = None
|
|
||||||
ster_assoc.uses = uses
|
|
||||||
for process in role['processes']:
|
|
||||||
new_process = Process.query(name=process)
|
|
||||||
if not new_process:
|
|
||||||
new_process = Process(name=process)
|
|
||||||
new_process.submission_types.append(submission_type)
|
|
||||||
new_process.kit_types.append(new_kit)
|
|
||||||
new_process.equipment_roles.append(new_role)
|
|
||||||
return new_kit
|
|
||||||
|
|
||||||
def to_omni(self, expand: bool = False) -> "OmniKitType":
|
def to_omni(self, expand: bool = False) -> "OmniKitType":
|
||||||
from backend.validators.omni_gui_objects import OmniKitType
|
from backend.validators.omni_gui_objects import OmniKitType
|
||||||
@@ -395,7 +393,7 @@ class KitType(BaseClass):
|
|||||||
kit_reagentrole_associations=kit_reagentrole_associations,
|
kit_reagentrole_associations=kit_reagentrole_associations,
|
||||||
kit_submissiontype_associations=kit_submissiontype_associations
|
kit_submissiontype_associations=kit_submissiontype_associations
|
||||||
)
|
)
|
||||||
logger.debug(f"Creating omni for {pformat(data)}")
|
# logger.debug(f"Creating omni for {pformat(data)}")
|
||||||
return OmniKitType(instance_object=self, **data)
|
return OmniKitType(instance_object=self, **data)
|
||||||
|
|
||||||
|
|
||||||
@@ -405,7 +403,6 @@ class ReagentRole(BaseClass):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
skip_on_edit = False
|
skip_on_edit = False
|
||||||
|
|
||||||
id = Column(INTEGER, primary_key=True) #: primary key
|
id = Column(INTEGER, primary_key=True) #: primary key
|
||||||
name = Column(String(64)) #: name of role reagent plays
|
name = Column(String(64)) #: name of role reagent plays
|
||||||
instances = relationship("Reagent", back_populates="role",
|
instances = relationship("Reagent", back_populates="role",
|
||||||
@@ -453,7 +450,7 @@ class ReagentRole(BaseClass):
|
|||||||
Args:
|
Args:
|
||||||
id (id | None, optional): Id of the object. Defaults to None.
|
id (id | None, optional): Id of the object. Defaults to None.
|
||||||
name (str | None, optional): Reagent type name. Defaults to None.
|
name (str | None, optional): Reagent type name. Defaults to None.
|
||||||
kit_type (KitType | str | None, optional): Kit the type of interest belongs to. Defaults to None.
|
kittype (KitType | str | None, optional): Kit the type of interest belongs to. Defaults to None.
|
||||||
reagent (Reagent | str | None, optional): Concrete instance of the type of interest. Defaults to None.
|
reagent (Reagent | str | None, optional): Concrete instance of the type of interest. Defaults to None.
|
||||||
limit (int, optional): maxmimum number of results to return (0 = all). Defaults to 0.
|
limit (int, optional): maxmimum number of results to return (0 = all). Defaults to 0.
|
||||||
|
|
||||||
@@ -507,14 +504,14 @@ class ReagentRole(BaseClass):
|
|||||||
from backend.validators.pydant import PydReagent
|
from backend.validators.pydant import PydReagent
|
||||||
return PydReagent(lot=None, role=self.name, name=self.name, expiry=date.today())
|
return PydReagent(lot=None, role=self.name, name=self.name, expiry=date.today())
|
||||||
|
|
||||||
def to_export_dict(self) -> dict:
|
# def to_export_dict(self) -> dict:
|
||||||
"""
|
# """
|
||||||
Creates dictionary for exporting to yml used in new SubmissionType Construction
|
# Creates dictionary for exporting to yml used in new SubmissionType Construction
|
||||||
|
#
|
||||||
Returns:
|
# Returns:
|
||||||
dict: Dictionary containing relevant info for SubmissionType construction
|
# dict: Dictionary containing relevant info for SubmissionType construction
|
||||||
"""
|
# """
|
||||||
return dict(role=self.name, extension_of_life=self.eol_ext.days)
|
# return dict(role=self.name, extension_of_life=self.eol_ext.days)
|
||||||
|
|
||||||
@check_authorization
|
@check_authorization
|
||||||
def save(self):
|
def save(self):
|
||||||
@@ -1278,20 +1275,20 @@ class SubmissionType(BaseClass):
|
|||||||
pass
|
pass
|
||||||
return cls.execute_query(query=query, limit=limit)
|
return cls.execute_query(query=query, limit=limit)
|
||||||
|
|
||||||
def to_export_dict(self):
|
# def to_export_dict(self):
|
||||||
"""
|
# """
|
||||||
Creates dictionary for exporting to yml used in new SubmissionType Construction
|
# Creates dictionary for exporting to yml used in new SubmissionType Construction
|
||||||
|
#
|
||||||
Returns:
|
# Returns:
|
||||||
dict: Dictionary containing relevant info for SubmissionType construction
|
# dict: Dictionary containing relevant info for SubmissionType construction
|
||||||
"""
|
# """
|
||||||
base_dict = dict(name=self.name)
|
# base_dict = dict(name=self.name)
|
||||||
base_dict['info'] = self.construct_info_map(mode='export')
|
# base_dict['info'] = self.construct_info_map(mode='export')
|
||||||
base_dict['defaults'] = self.defaults
|
# base_dict['defaults'] = self.defaults
|
||||||
# base_dict['samples'] = self.construct_sample_map()
|
# # base_dict['samples'] = self.construct_sample_map()
|
||||||
base_dict['samples'] = self.sample_map
|
# base_dict['samples'] = self.sample_map
|
||||||
base_dict['kits'] = [item.to_export_dict() for item in self.submissiontype_kit_associations]
|
# base_dict['kits'] = [item.to_export_dict() for item in self.submissiontype_kit_associations]
|
||||||
return base_dict
|
# return base_dict
|
||||||
|
|
||||||
@check_authorization
|
@check_authorization
|
||||||
def save(self):
|
def save(self):
|
||||||
@@ -1499,17 +1496,17 @@ class SubmissionTypeKitTypeAssociation(BaseClass):
|
|||||||
# limit = query.count()
|
# limit = query.count()
|
||||||
return cls.execute_query(query=query, limit=limit)
|
return cls.execute_query(query=query, limit=limit)
|
||||||
|
|
||||||
def to_export_dict(self):
|
# def to_export_dict(self):
|
||||||
"""
|
# """
|
||||||
Creates a dictionary of relevant values in this object.
|
# Creates a dictionary of relevant values in this object.
|
||||||
|
#
|
||||||
Returns:
|
# Returns:
|
||||||
dict: dictionary of Association and related kittype
|
# dict: dictionary of Association and related kittype
|
||||||
"""
|
# """
|
||||||
exclude = ['_sa_instance_state', 'submission_types_id', 'kits_id', 'submission_type', 'kit_type']
|
# exclude = ['_sa_instance_state', 'submission_types_id', 'kits_id', 'submission_type', 'kit_type']
|
||||||
base_dict = {k: v for k, v in self.__dict__.items() if k not in exclude}
|
# base_dict = {k: v for k, v in self.__dict__.items() if k not in exclude}
|
||||||
base_dict['kit_type'] = self.kit_type.to_export_dict(submission_type=self.submission_type)
|
# base_dict['kit_type'] = self.kit_type.to_export_dict(submission_type=self.submission_type)
|
||||||
return base_dict
|
# return base_dict
|
||||||
|
|
||||||
def to_omni(self, expand: bool = False):
|
def to_omni(self, expand: bool = False):
|
||||||
from backend.validators.omni_gui_objects import OmniSubmissionTypeKitTypeAssociation
|
from backend.validators.omni_gui_objects import OmniSubmissionTypeKitTypeAssociation
|
||||||
@@ -1719,17 +1716,17 @@ class KitTypeReagentRoleAssociation(BaseClass):
|
|||||||
limit = 1
|
limit = 1
|
||||||
return cls.execute_query(query=query, limit=limit)
|
return cls.execute_query(query=query, limit=limit)
|
||||||
|
|
||||||
def to_export_dict(self) -> dict:
|
# def to_export_dict(self) -> dict:
|
||||||
"""
|
# """
|
||||||
Creates a dictionary of relevant values in this object.
|
# Creates a dictionary of relevant values in this object.
|
||||||
|
#
|
||||||
Returns:
|
# Returns:
|
||||||
dict: dictionary of Association and related reagent role
|
# dict: dictionary of Association and related reagent role
|
||||||
"""
|
# """
|
||||||
base_dict = dict(required=self.required)
|
# base_dict = dict(required=self.required)
|
||||||
for k, v in self.reagent_role.to_export_dict().items():
|
# for k, v in self.reagent_role.to_export_dict().items():
|
||||||
base_dict[k] = v
|
# base_dict[k] = v
|
||||||
return base_dict
|
# return base_dict
|
||||||
|
|
||||||
def get_all_relevant_reagents(self) -> Generator[Reagent, None, None]:
|
def get_all_relevant_reagents(self) -> Generator[Reagent, None, None]:
|
||||||
"""
|
"""
|
||||||
@@ -1915,13 +1912,6 @@ class Equipment(BaseClass, LogMixin):
|
|||||||
submissions = association_proxy("equipment_submission_associations",
|
submissions = association_proxy("equipment_submission_associations",
|
||||||
"submission") #: proxy to equipment_submission_associations.submission
|
"submission") #: proxy to equipment_submission_associations.submission
|
||||||
|
|
||||||
# def __repr__(self) -> str:
|
|
||||||
# """
|
|
||||||
# Returns:
|
|
||||||
# str: representation of this Equipment
|
|
||||||
# """
|
|
||||||
# return f"<Equipment({self.name})>"
|
|
||||||
|
|
||||||
def to_dict(self, processes: bool = False) -> dict:
|
def to_dict(self, processes: bool = False) -> dict:
|
||||||
"""
|
"""
|
||||||
This Equipment as a dictionary
|
This Equipment as a dictionary
|
||||||
@@ -2085,13 +2075,6 @@ class EquipmentRole(BaseClass):
|
|||||||
submission_types = association_proxy("equipmentrole_submissiontype_associations",
|
submission_types = association_proxy("equipmentrole_submissiontype_associations",
|
||||||
"submission_type") #: proxy to equipmentrole_submissiontype_associations.submission_type
|
"submission_type") #: proxy to equipmentrole_submissiontype_associations.submission_type
|
||||||
|
|
||||||
# def __repr__(self) -> str:
|
|
||||||
# """
|
|
||||||
# Returns:
|
|
||||||
# str: Representation of this EquipmentRole
|
|
||||||
# """
|
|
||||||
# return f"<EquipmentRole({self.name})>"
|
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
"""
|
"""
|
||||||
This EquipmentRole as a dictionary
|
This EquipmentRole as a dictionary
|
||||||
@@ -2192,16 +2175,6 @@ class EquipmentRole(BaseClass):
|
|||||||
continue
|
continue
|
||||||
yield process.name
|
yield process.name
|
||||||
|
|
||||||
def to_export_dict(self, submission_type: SubmissionType, kit_type: KitType):
|
|
||||||
"""
|
|
||||||
Creates a dictionary of relevant values in this object.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: dictionary of Association and related reagent role
|
|
||||||
"""
|
|
||||||
processes = self.get_processes(submission_type=submission_type, extraction_kit=kit_type)
|
|
||||||
return dict(role=self.name, processes=[item for item in processes])
|
|
||||||
|
|
||||||
def to_omni(self, expand: bool = False) -> "OmniEquipmentRole":
|
def to_omni(self, expand: bool = False) -> "OmniEquipmentRole":
|
||||||
from backend.validators.omni_gui_objects import OmniEquipmentRole
|
from backend.validators.omni_gui_objects import OmniEquipmentRole
|
||||||
return OmniEquipmentRole(instance_object=self, name=self.name)
|
return OmniEquipmentRole(instance_object=self, name=self.name)
|
||||||
@@ -2320,23 +2293,6 @@ class SubmissionTypeEquipmentRoleAssociation(BaseClass):
|
|||||||
def save(self):
|
def save(self):
|
||||||
super().save()
|
super().save()
|
||||||
|
|
||||||
def to_export_dict(self, extraction_kit: KitType | str) -> dict:
|
|
||||||
"""
|
|
||||||
Creates dictionary for exporting to yml used in new SubmissionType Construction
|
|
||||||
|
|
||||||
Args:
|
|
||||||
extraction_kit (KitType | str): KitType of interest.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Dictionary containing relevant info for SubmissionType construction
|
|
||||||
"""
|
|
||||||
if isinstance(extraction_kit, str):
|
|
||||||
extraction_kit = KitType.query(name=extraction_kit)
|
|
||||||
base_dict = {k: v for k, v in self.equipment_role.to_export_dict(submission_type=self.submission_type,
|
|
||||||
kit_type=extraction_kit).items()}
|
|
||||||
base_dict['static'] = self.static
|
|
||||||
return base_dict
|
|
||||||
|
|
||||||
|
|
||||||
class Process(BaseClass):
|
class Process(BaseClass):
|
||||||
"""
|
"""
|
||||||
@@ -2360,14 +2316,6 @@ class Process(BaseClass):
|
|||||||
tip_roles = relationship("TipRole", back_populates='processes',
|
tip_roles = relationship("TipRole", back_populates='processes',
|
||||||
secondary=process_tiprole) #: relation to KitType
|
secondary=process_tiprole) #: relation to KitType
|
||||||
|
|
||||||
|
|
||||||
# def __repr__(self) -> str:
|
|
||||||
# """
|
|
||||||
# Returns:
|
|
||||||
# str: Representation of this Process
|
|
||||||
# """
|
|
||||||
# return f"<Process({self.name})>"
|
|
||||||
|
|
||||||
def set_attribute(self, key, value):
|
def set_attribute(self, key, value):
|
||||||
match key:
|
match key:
|
||||||
case "name":
|
case "name":
|
||||||
@@ -2496,9 +2444,6 @@ class TipRole(BaseClass):
|
|||||||
def tips(self):
|
def tips(self):
|
||||||
return self.instances
|
return self.instances
|
||||||
|
|
||||||
# def __repr__(self):
|
|
||||||
# return f"<TipRole({self.name})>"
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def query_or_create(cls, **kwargs) -> Tuple[TipRole, bool]:
|
def query_or_create(cls, **kwargs) -> Tuple[TipRole, bool]:
|
||||||
new = False
|
new = False
|
||||||
@@ -2567,9 +2512,6 @@ class Tips(BaseClass, LogMixin):
|
|||||||
def tiprole(self):
|
def tiprole(self):
|
||||||
return self.role
|
return self.role
|
||||||
|
|
||||||
# def __repr__(self):
|
|
||||||
# return f"<Tips({self.name})>"
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def query_or_create(cls, **kwargs) -> Tuple[Tips, bool]:
|
def query_or_create(cls, **kwargs) -> Tuple[Tips, bool]:
|
||||||
new = False
|
new = False
|
||||||
|
|||||||
@@ -2,14 +2,14 @@
|
|||||||
All client organization related models.
|
All client organization related models.
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import json, yaml, logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from sqlalchemy import Column, String, INTEGER, ForeignKey, Table
|
from sqlalchemy import Column, String, INTEGER, ForeignKey, Table
|
||||||
from sqlalchemy.ext.hybrid import hybrid_property
|
from sqlalchemy.ext.hybrid import hybrid_property
|
||||||
from sqlalchemy.orm import relationship, Query
|
from sqlalchemy.orm import relationship, Query
|
||||||
from . import Base, BaseClass
|
from . import Base, BaseClass
|
||||||
from tools import check_authorization, setup_lookup, yaml_regex_creator
|
from tools import check_authorization, setup_lookup
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
@@ -41,9 +41,6 @@ class Organization(BaseClass):
|
|||||||
def contact(self):
|
def contact(self):
|
||||||
return self.contacts
|
return self.contacts
|
||||||
|
|
||||||
# def __repr__(self) -> str:
|
|
||||||
# return f"<Organization({self.name})>"
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@setup_lookup
|
@setup_lookup
|
||||||
def query(cls,
|
def query(cls,
|
||||||
@@ -80,49 +77,6 @@ class Organization(BaseClass):
|
|||||||
def save(self):
|
def save(self):
|
||||||
super().save()
|
super().save()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@check_authorization
|
|
||||||
def import_from_yml(cls, filepath: Path | str):
|
|
||||||
"""
|
|
||||||
An ambitious project to create a Organization from a yml file
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filepath (Path): Filepath of the yml.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
|
|
||||||
"""
|
|
||||||
yaml.add_constructor("!regex", yaml_regex_creator)
|
|
||||||
if isinstance(filepath, str):
|
|
||||||
filepath = Path(filepath)
|
|
||||||
if not filepath.exists():
|
|
||||||
logging.critical(f"Given file could not be found.")
|
|
||||||
return None
|
|
||||||
with open(filepath, "r") as f:
|
|
||||||
if filepath.suffix == ".json":
|
|
||||||
import_dict = json.load(fp=f)
|
|
||||||
elif filepath.suffix == ".yml":
|
|
||||||
import_dict = yaml.load(stream=f, Loader=yaml.Loader)
|
|
||||||
else:
|
|
||||||
raise Exception(f"Filetype {filepath.suffix} not supported.")
|
|
||||||
data = import_dict['orgs']
|
|
||||||
for org in data:
|
|
||||||
organ = Organization.query(name=org['name'])
|
|
||||||
if organ is None:
|
|
||||||
organ = Organization(name=org['name'])
|
|
||||||
try:
|
|
||||||
organ.cost_centre = org['cost_centre']
|
|
||||||
except KeyError:
|
|
||||||
organ.cost_centre = "xxx"
|
|
||||||
for contact in org['contacts']:
|
|
||||||
cont = Contact.query(name=contact['name'])
|
|
||||||
if cont is None:
|
|
||||||
cont = Contact()
|
|
||||||
for k, v in contact.items():
|
|
||||||
cont.__setattr__(k, v)
|
|
||||||
organ.contacts.append(cont)
|
|
||||||
organ.save()
|
|
||||||
|
|
||||||
def to_omni(self, expand: bool = False):
|
def to_omni(self, expand: bool = False):
|
||||||
from backend.validators.omni_gui_objects import OmniOrganization
|
from backend.validators.omni_gui_objects import OmniOrganization
|
||||||
if self.cost_centre:
|
if self.cost_centre:
|
||||||
@@ -151,9 +105,6 @@ class Contact(BaseClass):
|
|||||||
secondary=orgs_contacts) #: relationship to joined organization
|
secondary=orgs_contacts) #: relationship to joined organization
|
||||||
submissions = relationship("BasicSubmission", back_populates="contact") #: submissions this contact has submitted
|
submissions = relationship("BasicSubmission", back_populates="contact") #: submissions this contact has submitted
|
||||||
|
|
||||||
# def __repr__(self) -> str:
|
|
||||||
# return f"<Contact({self.name})>"
|
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
def searchables(cls):
|
def searchables(cls):
|
||||||
return []
|
return []
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ from zipfile import ZipFile, BadZipfile
|
|||||||
from tempfile import TemporaryDirectory, TemporaryFile
|
from tempfile import TemporaryDirectory, TemporaryFile
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
from sqlalchemy.ext.hybrid import hybrid_property
|
from sqlalchemy.ext.hybrid import hybrid_property
|
||||||
from . import BaseClass, Reagent, SubmissionType, KitType, Organization, Contact, LogMixin, SubmissionReagentAssociation
|
from . import BaseClass, Reagent, SubmissionType, KitType, Organization, Contact, LogMixin, SubmissionReagentAssociation
|
||||||
@@ -27,10 +26,9 @@ from sqlite3 import OperationalError as SQLOperationalError, IntegrityError as S
|
|||||||
from openpyxl import Workbook
|
from openpyxl import Workbook
|
||||||
from openpyxl.drawing.image import Image as OpenpyxlImage
|
from openpyxl.drawing.image import Image as OpenpyxlImage
|
||||||
from tools import row_map, setup_lookup, jinja_template_loading, rreplace, row_keys, check_key_or_attr, Result, Report, \
|
from tools import row_map, setup_lookup, jinja_template_loading, rreplace, row_keys, check_key_or_attr, Result, Report, \
|
||||||
report_result, create_holidays_for_year, check_dictionary_inclusion_equality, rectify_query_date
|
report_result, create_holidays_for_year, check_dictionary_inclusion_equality
|
||||||
from datetime import datetime, date, timedelta
|
from datetime import datetime, date
|
||||||
from typing import List, Any, Tuple, Literal, Generator, Type
|
from typing import List, Any, Tuple, Literal, Generator, Type
|
||||||
from dateutil.parser import parse
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from jinja2.exceptions import TemplateNotFound
|
from jinja2.exceptions import TemplateNotFound
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
@@ -271,7 +269,6 @@ class BasicSubmission(BaseClass, LogMixin):
|
|||||||
Returns:
|
Returns:
|
||||||
dict: sample location map
|
dict: sample location map
|
||||||
"""
|
"""
|
||||||
# return cls.get_submission_type(submission_type).construct_sample_map()
|
|
||||||
return cls.get_submission_type(submission_type).sample_map
|
return cls.get_submission_type(submission_type).sample_map
|
||||||
|
|
||||||
def generate_associations(self, name: str, extra: str | None = None):
|
def generate_associations(self, name: str, extra: str | None = None):
|
||||||
@@ -445,11 +442,11 @@ class BasicSubmission(BaseClass, LogMixin):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Column count error: {e}")
|
logger.error(f"Column count error: {e}")
|
||||||
# NOTE: Get kit associated with this submission
|
# NOTE: Get kit associated with this submission
|
||||||
logger.debug(f"Checking associations with submission type: {self.submission_type_name}")
|
# logger.debug(f"Checking associations with submission type: {self.submission_type_name}")
|
||||||
assoc = next((item for item in self.extraction_kit.kit_submissiontype_associations if
|
assoc = next((item for item in self.extraction_kit.kit_submissiontype_associations if
|
||||||
item.submission_type == self.submission_type),
|
item.submission_type == self.submission_type),
|
||||||
None)
|
None)
|
||||||
logger.debug(f"Got association: {assoc}")
|
# logger.debug(f"Got association: {assoc}")
|
||||||
# NOTE: If every individual cost is 0 this is probably an old plate.
|
# NOTE: If every individual cost is 0 this is probably an old plate.
|
||||||
if all(item == 0.0 for item in [assoc.constant_cost, assoc.mutable_cost_column, assoc.mutable_cost_sample]):
|
if all(item == 0.0 for item in [assoc.constant_cost, assoc.mutable_cost_column, assoc.mutable_cost_sample]):
|
||||||
try:
|
try:
|
||||||
@@ -635,16 +632,13 @@ class BasicSubmission(BaseClass, LogMixin):
|
|||||||
# NOTE: No longer searches for association here, done in caller function
|
# NOTE: No longer searches for association here, done in caller function
|
||||||
for k, v in input_dict.items():
|
for k, v in input_dict.items():
|
||||||
try:
|
try:
|
||||||
# logger.debug(f"Setting assoc {assoc} with key {k} to value {v}")
|
|
||||||
setattr(assoc, k, v)
|
setattr(assoc, k, v)
|
||||||
# NOTE: for some reason I don't think assoc.__setattr__(k, v) works here.
|
# NOTE: for some reason I don't think assoc.__setattr__(k, v) works here.
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
# logger.error(f"Can't set {k} to {v}")
|
|
||||||
pass
|
pass
|
||||||
return assoc
|
return assoc
|
||||||
|
|
||||||
def update_reagentassoc(self, reagent: Reagent, role: str):
|
def update_reagentassoc(self, reagent: Reagent, role: str):
|
||||||
from backend.db import SubmissionReagentAssociation
|
|
||||||
# NOTE: get the first reagent assoc that fills the given role.
|
# NOTE: get the first reagent assoc that fills the given role.
|
||||||
try:
|
try:
|
||||||
assoc = next(item for item in self.submission_reagent_associations if
|
assoc = next(item for item in self.submission_reagent_associations if
|
||||||
@@ -1134,7 +1128,7 @@ class BasicSubmission(BaseClass, LogMixin):
|
|||||||
Returns:
|
Returns:
|
||||||
models.BasicSubmission | List[models.BasicSubmission]: Submission(s) of interest
|
models.BasicSubmission | List[models.BasicSubmission]: Submission(s) of interest
|
||||||
"""
|
"""
|
||||||
from ... import SubmissionReagentAssociation
|
# from ... import SubmissionReagentAssociation
|
||||||
# NOTE: if you go back to using 'model' change the appropriate cls to model in the query filters
|
# NOTE: if you go back to using 'model' change the appropriate cls to model in the query filters
|
||||||
if submissiontype is not None:
|
if submissiontype is not None:
|
||||||
model = cls.find_polymorphic_subclass(polymorphic_identity=submissiontype)
|
model = cls.find_polymorphic_subclass(polymorphic_identity=submissiontype)
|
||||||
@@ -1181,8 +1175,8 @@ class BasicSubmission(BaseClass, LogMixin):
|
|||||||
# # start_date = start_date.strftime("%Y-%m-%d %H:%M:%S.%f")
|
# # start_date = start_date.strftime("%Y-%m-%d %H:%M:%S.%f")
|
||||||
# # query = query.filter(model.submitted_date == start_date)
|
# # query = query.filter(model.submitted_date == start_date)
|
||||||
# # else:
|
# # else:
|
||||||
start_date = rectify_query_date(start_date)
|
start_date = cls.rectify_query_date(start_date)
|
||||||
end_date = rectify_query_date(end_date, eod=True)
|
end_date = cls.rectify_query_date(end_date, eod=True)
|
||||||
query = query.filter(model.submitted_date.between(start_date, end_date))
|
query = query.filter(model.submitted_date.between(start_date, end_date))
|
||||||
# NOTE: by reagent (for some reason)
|
# NOTE: by reagent (for some reason)
|
||||||
match reagent:
|
match reagent:
|
||||||
@@ -1575,19 +1569,40 @@ class BacterialCulture(BasicSubmission):
|
|||||||
column=lookup_table['sample_columns']['concentration']).value
|
column=lookup_table['sample_columns']['concentration']).value
|
||||||
yield sample
|
yield sample
|
||||||
|
|
||||||
def get_provisional_controls(self, controls_only: bool = True):
|
# def get_provisional_controls(self, controls_only: bool = True):
|
||||||
if controls_only:
|
def get_provisional_controls(self, include: List[str] = []):
|
||||||
if self.controls:
|
# NOTE To ensure Samples are done last.
|
||||||
provs = (control.sample for control in self.controls)
|
include = sorted(include)
|
||||||
else:
|
logger.debug(include)
|
||||||
regex = re.compile(r"^(ATCC)|(MCS)|(EN)")
|
pos_str = "(ATCC)|(MCS)"
|
||||||
provs = (sample for sample in self.samples if bool(regex.match(sample.submitter_id)))
|
pos_regex = re.compile(rf"^{pos_str}")
|
||||||
else:
|
neg_str = "(EN)"
|
||||||
provs = self.samples
|
neg_regex = re.compile(rf"^{neg_str}")
|
||||||
for prov in provs:
|
total_str = pos_str + "|" + neg_str
|
||||||
prov.submission = self.rsl_plate_num
|
total_regex = re.compile(rf"^{total_str}")
|
||||||
prov.submitted_date = self.submitted_date
|
output = []
|
||||||
yield prov
|
for item in include:
|
||||||
|
# if self.controls:
|
||||||
|
# logger.debug(item)
|
||||||
|
match item:
|
||||||
|
case "Positive":
|
||||||
|
if self.controls:
|
||||||
|
provs = (control.sample for control in self.controls if control.is_positive_control)
|
||||||
|
else:
|
||||||
|
provs = (sample for sample in self.samples if bool(pos_regex.match(sample.submitter_id)))
|
||||||
|
case "Negative":
|
||||||
|
if self.controls:
|
||||||
|
provs = (control.sample for control in self.controls if not control.is_positive_control)
|
||||||
|
else:
|
||||||
|
provs = (sample for sample in self.samples if bool(neg_regex.match(sample.submitter_id)))
|
||||||
|
case _:
|
||||||
|
provs = (sample for sample in self.samples if not sample.control and sample not in output)
|
||||||
|
for prov in provs:
|
||||||
|
# logger.debug(f"Prov: {prov}")
|
||||||
|
prov.submission = self.rsl_plate_num
|
||||||
|
prov.submitted_date = self.submitted_date
|
||||||
|
output.append(prov)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
class Wastewater(BasicSubmission):
|
class Wastewater(BasicSubmission):
|
||||||
@@ -2794,8 +2809,7 @@ class WastewaterSample(BasicSample):
|
|||||||
output_dict['rsl_number'] = "RSL-WW-" + output_dict['ww_processing_num']
|
output_dict['rsl_number'] = "RSL-WW-" + output_dict['ww_processing_num']
|
||||||
if output_dict['ww_full_sample_id'] is not None and output_dict["submitter_id"] in disallowed:
|
if output_dict['ww_full_sample_id'] is not None and output_dict["submitter_id"] in disallowed:
|
||||||
output_dict["submitter_id"] = output_dict['ww_full_sample_id']
|
output_dict["submitter_id"] = output_dict['ww_full_sample_id']
|
||||||
check = check_key_or_attr("rsl_number", output_dict, check_none=True)
|
# check = check_key_or_attr("rsl_number", output_dict, check_none=True)
|
||||||
# logger.debug(pformat(output_dict, indent=4))
|
|
||||||
return output_dict
|
return output_dict
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
@@ -3089,7 +3103,6 @@ class SubmissionSampleAssociation(BaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
SubmissionSampleAssociation: Queried or new association.
|
SubmissionSampleAssociation: Queried or new association.
|
||||||
"""
|
"""
|
||||||
# disallowed = ['id']
|
|
||||||
match submission:
|
match submission:
|
||||||
case BasicSubmission():
|
case BasicSubmission():
|
||||||
pass
|
pass
|
||||||
@@ -3184,7 +3197,6 @@ class WastewaterAssociation(SubmissionSampleAssociation):
|
|||||||
sample['background_color'] = f"rgb({red}, {grn}, {blu})"
|
sample['background_color'] = f"rgb({red}, {grn}, {blu})"
|
||||||
try:
|
try:
|
||||||
sample[
|
sample[
|
||||||
# 'tooltip'] += f"<br>- ct N1: {'{:.2f}'.format(self.ct_n1)} ({self.n1_status})<br>- ct N2: {'{:.2f}'.format(self.ct_n2)} ({self.n2_status})"
|
|
||||||
'tooltip'] += f"<br>- ct N1: {'{:.2f}'.format(self.ct_n1)}<br>- ct N2: {'{:.2f}'.format(self.ct_n2)}"
|
'tooltip'] += f"<br>- ct N1: {'{:.2f}'.format(self.ct_n1)}<br>- ct N2: {'{:.2f}'.format(self.ct_n2)}"
|
||||||
except (TypeError, AttributeError) as e:
|
except (TypeError, AttributeError) as e:
|
||||||
logger.error(f"Couldn't set tooltip for {self.sample.rsl_number}. Looks like there isn't PCR data.")
|
logger.error(f"Couldn't set tooltip for {self.sample.rsl_number}. Looks like there isn't PCR data.")
|
||||||
|
|||||||
@@ -259,7 +259,6 @@ class ReagentParser(object):
|
|||||||
if isinstance(extraction_kit, dict):
|
if isinstance(extraction_kit, dict):
|
||||||
extraction_kit = extraction_kit['value']
|
extraction_kit = extraction_kit['value']
|
||||||
self.kit_object = KitType.query(name=extraction_kit)
|
self.kit_object = KitType.query(name=extraction_kit)
|
||||||
# self.kit_map = self.kit_map(submission_type=submission_type)
|
|
||||||
self.xl = xl
|
self.xl = xl
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -1,17 +1,14 @@
|
|||||||
"""
|
"""
|
||||||
Contains functions for generating summary reports
|
Contains functions for generating summary reports
|
||||||
"""
|
"""
|
||||||
import itertools
|
import re, sys, logging
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from pandas import DataFrame, ExcelWriter
|
from pandas import DataFrame, ExcelWriter
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from datetime import date
|
from datetime import date
|
||||||
from typing import Tuple
|
from typing import Tuple, List
|
||||||
from backend.db.models import BasicSubmission, IridaControl
|
from backend.db.models import BasicSubmission
|
||||||
from tools import jinja_template_loading, get_first_blank_df_row, row_map
|
from tools import jinja_template_loading, get_first_blank_df_row, row_map, flatten_list
|
||||||
from PyQt6.QtWidgets import QWidget
|
from PyQt6.QtWidgets import QWidget
|
||||||
from openpyxl.worksheet.worksheet import Worksheet
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
|
||||||
@@ -198,14 +195,15 @@ class TurnaroundMaker(ReportArchetype):
|
|||||||
class ConcentrationMaker(ReportArchetype):
|
class ConcentrationMaker(ReportArchetype):
|
||||||
|
|
||||||
def __init__(self, start_date: date, end_date: date, submission_type: str = "Bacterial Culture",
|
def __init__(self, start_date: date, end_date: date, submission_type: str = "Bacterial Culture",
|
||||||
controls_only: bool = True):
|
# controls_only: bool = True):
|
||||||
|
include: List[str] = []):
|
||||||
self.start_date = start_date
|
self.start_date = start_date
|
||||||
self.end_date = end_date
|
self.end_date = end_date
|
||||||
# NOTE: Set page size to zero to override limiting query size.
|
# NOTE: Set page size to zero to override limiting query size.
|
||||||
self.subs = BasicSubmission.query(start_date=start_date, end_date=end_date,
|
self.subs = BasicSubmission.query(start_date=start_date, end_date=end_date,
|
||||||
submission_type_name=submission_type, page_size=0)
|
submission_type_name=submission_type, page_size=0)
|
||||||
# self.known_controls = list(itertools.chain.from_iterable([sub.controls for sub in self.subs]))
|
# self.samples = flatten_list([sub.get_provisional_controls(controls_only=controls_only) for sub in self.subs])
|
||||||
self.samples = list(itertools.chain.from_iterable([sub.get_provisional_controls(controls_only=controls_only) for sub in self.subs]))
|
self.samples = flatten_list([sub.get_provisional_controls(include=include) for sub in self.subs])
|
||||||
self.records = [self.build_record(sample) for sample in self.samples]
|
self.records = [self.build_record(sample) for sample in self.samples]
|
||||||
self.df = DataFrame.from_records(self.records)
|
self.df = DataFrame.from_records(self.records)
|
||||||
self.sheet_name = "Concentration"
|
self.sheet_name = "Concentration"
|
||||||
|
|||||||
@@ -176,7 +176,7 @@ class InfoWriter(object):
|
|||||||
for loc in locations:
|
for loc in locations:
|
||||||
sheet = self.xl[loc['sheet']]
|
sheet = self.xl[loc['sheet']]
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Writing {v['value']} to row {loc['row']} and column {loc['column']}")
|
# logger.debug(f"Writing {v['value']} to row {loc['row']} and column {loc['column']}")
|
||||||
sheet.cell(row=loc['row'], column=loc['column'], value=v['value'])
|
sheet.cell(row=loc['row'], column=loc['column'], value=v['value'])
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
logger.error(f"Can't write {k} to that cell due to AttributeError: {e}")
|
logger.error(f"Can't write {k} to that cell due to AttributeError: {e}")
|
||||||
|
|||||||
@@ -80,26 +80,25 @@ class RSLNamer(object):
|
|||||||
submission_type = cls.retrieve_submission_type(filename=filepath.stem.__str__())
|
submission_type = cls.retrieve_submission_type(filename=filepath.stem.__str__())
|
||||||
return submission_type
|
return submission_type
|
||||||
|
|
||||||
def st_from_str(filename: str) -> str:
|
def st_from_str(file_name: str) -> str:
|
||||||
if filename.startswith("tmp"):
|
if file_name.startswith("tmp"):
|
||||||
return "Bacterial Culture"
|
return "Bacterial Culture"
|
||||||
regex = BasicSubmission.regex
|
regex = BasicSubmission.regex
|
||||||
m = regex.search(filename)
|
m = regex.search(file_name)
|
||||||
try:
|
try:
|
||||||
submission_type = m.lastgroup
|
sub_type = m.lastgroup
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
submission_type = None
|
sub_type = None
|
||||||
logger.critical(f"No submission type found or submission type found!: {e}")
|
logger.critical(f"No submission type found or submission type found!: {e}")
|
||||||
return submission_type
|
return sub_type
|
||||||
|
|
||||||
match filename:
|
match filename:
|
||||||
case Path():
|
case Path():
|
||||||
submission_type = st_from_path(filepath=filename)
|
submission_type = st_from_path(filepath=filename)
|
||||||
case str():
|
case str():
|
||||||
submission_type = st_from_str(filename=filename)
|
submission_type = st_from_str(file_name=filename)
|
||||||
case _:
|
case _:
|
||||||
raise TypeError(f"Unsupported filename type: {type(filename)}.")
|
raise TypeError(f"Unsupported filename type: {type(filename)}.")
|
||||||
submission_type = None
|
|
||||||
try:
|
try:
|
||||||
check = submission_type is None
|
check = submission_type is None
|
||||||
except UnboundLocalError:
|
except UnboundLocalError:
|
||||||
@@ -137,7 +136,7 @@ class RSLNamer(object):
|
|||||||
if m is not None:
|
if m is not None:
|
||||||
try:
|
try:
|
||||||
parsed_name = m.group().upper().strip(".")
|
parsed_name = m.group().upper().strip(".")
|
||||||
except:
|
except AttributeError:
|
||||||
parsed_name = None
|
parsed_name = None
|
||||||
else:
|
else:
|
||||||
parsed_name = None
|
parsed_name = None
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
"""
|
||||||
|
Collection of pydantic objects to be used in the Gui system.
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import logging
|
import logging
|
||||||
from pydantic import BaseModel, field_validator, Field
|
from pydantic import BaseModel, field_validator, Field
|
||||||
@@ -10,6 +14,7 @@ logger = logging.getLogger(f"submissions.{__name__}")
|
|||||||
|
|
||||||
|
|
||||||
class BaseOmni(BaseModel):
|
class BaseOmni(BaseModel):
|
||||||
|
|
||||||
instance_object: Any | None = Field(default=None)
|
instance_object: Any | None = Field(default=None)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
@@ -23,23 +28,23 @@ class BaseOmni(BaseModel):
|
|||||||
return cls.class_object.aliases
|
return cls.class_object.aliases
|
||||||
|
|
||||||
def check_all_attributes(self, attributes: dict) -> bool:
|
def check_all_attributes(self, attributes: dict) -> bool:
|
||||||
logger.debug(f"Incoming attributes: {attributes}")
|
# logger.debug(f"Incoming attributes: {attributes}")
|
||||||
attributes = {k : v for k, v in attributes.items() if k in self.list_searchables.keys()}
|
attributes = {k : v for k, v in attributes.items() if k in self.list_searchables.keys()}
|
||||||
for key, value in attributes.items():
|
for key, value in attributes.items():
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Check if {value.__class__} is subclass of {BaseOmni}")
|
# logger.debug(f"Check if {value.__class__} is subclass of {BaseOmni}")
|
||||||
check = issubclass(value.__class__, BaseOmni)
|
check = issubclass(value.__class__, BaseOmni)
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
logger.error(f"Couldn't check if {value.__class__} is subclass of {BaseOmni} due to {e}")
|
logger.error(f"Couldn't check if {value.__class__} is subclass of {BaseOmni} due to {e}")
|
||||||
check = False
|
check = False
|
||||||
if check:
|
if check:
|
||||||
logger.debug(f"Checking for subclass name.")
|
# logger.debug(f"Checking for subclass name.")
|
||||||
value = value.name
|
value = value.name
|
||||||
self_value = self.list_searchables[key]
|
self_value = self.list_searchables[key]
|
||||||
if value != self_value:
|
if value != self_value:
|
||||||
logger.debug(f"Value {key} is False, these are not the same object.")
|
# logger.debug(f"Value {key} is False, these are not the same object.")
|
||||||
return False
|
return False
|
||||||
logger.debug("Everything checks out, these are the same object.")
|
# logger.debug("Everything checks out, these are the same object.")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def __setattr__(self, key, value):
|
def __setattr__(self, key, value):
|
||||||
@@ -51,24 +56,24 @@ class BaseOmni(BaseModel):
|
|||||||
new_key = class_value.impl.key
|
new_key = class_value.impl.key
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
new_key = None
|
new_key = None
|
||||||
logger.debug(f"Class value before new key: {class_value.property}")
|
# logger.debug(f"Class value before new key: {class_value.property}")
|
||||||
if new_key and new_key != key:
|
if new_key and new_key != key:
|
||||||
class_value = getattr(self.class_object, new_key)
|
class_value = getattr(self.class_object, new_key)
|
||||||
logger.debug(f"Class value after new key: {class_value.property}")
|
# logger.debug(f"Class value after new key: {class_value.property}")
|
||||||
if isinstance(class_value, InstrumentedAttribute):
|
if isinstance(class_value, InstrumentedAttribute):
|
||||||
logger.debug(f"{key} is an InstrumentedAttribute with class_value.property: {class_value.property}.")
|
# logger.debug(f"{key} is an InstrumentedAttribute with class_value.property: {class_value.property}.")
|
||||||
match class_value.property:
|
match class_value.property:
|
||||||
case ColumnProperty():
|
case ColumnProperty():
|
||||||
logger.debug(f"Setting ColumnProperty to {value}")
|
# logger.debug(f"Setting ColumnProperty to {value}")
|
||||||
return super().__setattr__(key, value)
|
return super().__setattr__(key, value)
|
||||||
case _RelationshipDeclared():
|
case _RelationshipDeclared():
|
||||||
logger.debug(f" {self.__class__.__name__} Setting _RelationshipDeclared for {key} to {value}")
|
# logger.debug(f" {self.__class__.__name__} Setting _RelationshipDeclared for {key} to {value}")
|
||||||
if class_value.property.uselist:
|
if class_value.property.uselist:
|
||||||
logger.debug(f"Setting {key} with uselist")
|
# logger.debug(f"Setting {key} with uselist")
|
||||||
existing = self.__getattribute__(key)
|
existing = self.__getattribute__(key)
|
||||||
if existing is not None:
|
if existing is not None:
|
||||||
# NOTE: Getting some really weird duplicates for OmniSubmissionTypeKitTypeAssociation here.
|
# NOTE: Getting some really weird duplicates for OmniSubmissionTypeKitTypeAssociation here.
|
||||||
logger.debug(f"Existing: {existing}, incoming: {value}")
|
# logger.debug(f"Existing: {existing}, incoming: {value}")
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
if value != existing:
|
if value != existing:
|
||||||
value = existing + value
|
value = existing + value
|
||||||
@@ -82,7 +87,7 @@ class BaseOmni(BaseModel):
|
|||||||
if issubclass(value.__class__, self.__class__):
|
if issubclass(value.__class__, self.__class__):
|
||||||
value = value.to_sql()
|
value = value.to_sql()
|
||||||
value = [value]
|
value = [value]
|
||||||
logger.debug(f"Final value for {key}: {value}")
|
# logger.debug(f"Final value for {key}: {value}")
|
||||||
return super().__setattr__(key, value)
|
return super().__setattr__(key, value)
|
||||||
else:
|
else:
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
@@ -98,6 +103,7 @@ class BaseOmni(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class OmniSubmissionType(BaseOmni):
|
class OmniSubmissionType(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = SubmissionType
|
class_object: ClassVar[Any] = SubmissionType
|
||||||
|
|
||||||
name: str = Field(default="", description="property")
|
name: str = Field(default="", description="property")
|
||||||
@@ -161,6 +167,7 @@ class OmniSubmissionType(BaseOmni):
|
|||||||
|
|
||||||
|
|
||||||
class OmniReagentRole(BaseOmni):
|
class OmniReagentRole(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = ReagentRole
|
class_object: ClassVar[Any] = ReagentRole
|
||||||
|
|
||||||
name: str = Field(default="", description="property")
|
name: str = Field(default="", description="property")
|
||||||
@@ -197,6 +204,7 @@ class OmniReagentRole(BaseOmni):
|
|||||||
|
|
||||||
|
|
||||||
class OmniSubmissionTypeKitTypeAssociation(BaseOmni):
|
class OmniSubmissionTypeKitTypeAssociation(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = SubmissionTypeKitTypeAssociation
|
class_object: ClassVar[Any] = SubmissionTypeKitTypeAssociation
|
||||||
|
|
||||||
submissiontype: str | OmniSubmissionType = Field(default="", description="relationship", title="SubmissionType")
|
submissiontype: str | OmniSubmissionType = Field(default="", description="relationship", title="SubmissionType")
|
||||||
@@ -262,7 +270,7 @@ class OmniSubmissionTypeKitTypeAssociation(BaseOmni):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def to_sql(self):
|
def to_sql(self):
|
||||||
logger.debug(f"Self kittype: {self.submissiontype}")
|
# logger.debug(f"Self kittype: {self.submissiontype}")
|
||||||
if issubclass(self.submissiontype.__class__, BaseOmni):
|
if issubclass(self.submissiontype.__class__, BaseOmni):
|
||||||
submissiontype = SubmissionType.query(name=self.submissiontype.name)
|
submissiontype = SubmissionType.query(name=self.submissiontype.name)
|
||||||
else:
|
else:
|
||||||
@@ -272,7 +280,7 @@ class OmniSubmissionTypeKitTypeAssociation(BaseOmni):
|
|||||||
else:
|
else:
|
||||||
kittype = KitType.query(name=self.kittype)
|
kittype = KitType.query(name=self.kittype)
|
||||||
# logger.debug(f"Self kittype: {self.kittype}")
|
# logger.debug(f"Self kittype: {self.kittype}")
|
||||||
logger.debug(f"Query or create with {kittype}, {submissiontype}")
|
# logger.debug(f"Query or create with {kittype}, {submissiontype}")
|
||||||
instance, is_new = self.class_object.query_or_create(kittype=kittype, submissiontype=submissiontype)
|
instance, is_new = self.class_object.query_or_create(kittype=kittype, submissiontype=submissiontype)
|
||||||
instance.mutable_cost_column = self.mutable_cost_column
|
instance.mutable_cost_column = self.mutable_cost_column
|
||||||
instance.mutable_cost_sample = self.mutable_cost_sample
|
instance.mutable_cost_sample = self.mutable_cost_sample
|
||||||
@@ -293,6 +301,7 @@ class OmniSubmissionTypeKitTypeAssociation(BaseOmni):
|
|||||||
|
|
||||||
|
|
||||||
class OmniKitTypeReagentRoleAssociation(BaseOmni):
|
class OmniKitTypeReagentRoleAssociation(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = KitTypeReagentRoleAssociation
|
class_object: ClassVar[Any] = KitTypeReagentRoleAssociation
|
||||||
|
|
||||||
reagent_role: str | OmniReagentRole = Field(default="", description="relationship", title="ReagentRole")
|
reagent_role: str | OmniReagentRole = Field(default="", description="relationship", title="ReagentRole")
|
||||||
@@ -363,7 +372,7 @@ class OmniKitTypeReagentRoleAssociation(BaseOmni):
|
|||||||
kittype=kittype,
|
kittype=kittype,
|
||||||
submissiontype=submissiontype
|
submissiontype=submissiontype
|
||||||
)
|
)
|
||||||
logger.debug(f"KitTypeReagentRoleAssociation coming out of query_or_create: {instance.__dict__}\nnew: {new}")
|
# logger.debug(f"KitTypeReagentRoleAssociation coming out of query_or_create: {instance.__dict__}\nnew: {new}")
|
||||||
if new:
|
if new:
|
||||||
logger.warning(f"This is a new instance: {instance.__dict__}")
|
logger.warning(f"This is a new instance: {instance.__dict__}")
|
||||||
try:
|
try:
|
||||||
@@ -371,10 +380,10 @@ class OmniKitTypeReagentRoleAssociation(BaseOmni):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
reagent_role = ReagentRole.query(name=self.reagent_role)
|
reagent_role = ReagentRole.query(name=self.reagent_role)
|
||||||
instance.reagent_role = reagent_role
|
instance.reagent_role = reagent_role
|
||||||
logger.debug(f"KTRRAssoc uses: {self.uses}")
|
# logger.debug(f"KTRRAssoc uses: {self.uses}")
|
||||||
instance.uses = self.uses
|
instance.uses = self.uses
|
||||||
instance.required = int(self.required)
|
instance.required = int(self.required)
|
||||||
logger.debug(f"KitTypeReagentRoleAssociation: {pformat(instance.__dict__)}")
|
# logger.debug(f"KitTypeReagentRoleAssociation: {pformat(instance.__dict__)}")
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -395,6 +404,7 @@ class OmniKitTypeReagentRoleAssociation(BaseOmni):
|
|||||||
|
|
||||||
|
|
||||||
class OmniEquipmentRole(BaseOmni):
|
class OmniEquipmentRole(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = EquipmentRole
|
class_object: ClassVar[Any] = EquipmentRole
|
||||||
|
|
||||||
name: str = Field(default="", description="property")
|
name: str = Field(default="", description="property")
|
||||||
@@ -421,6 +431,7 @@ class OmniEquipmentRole(BaseOmni):
|
|||||||
|
|
||||||
|
|
||||||
class OmniTips(BaseOmni):
|
class OmniTips(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = Tips
|
class_object: ClassVar[Any] = Tips
|
||||||
|
|
||||||
name: str = Field(default="", description="property")
|
name: str = Field(default="", description="property")
|
||||||
@@ -447,6 +458,7 @@ class OmniTips(BaseOmni):
|
|||||||
|
|
||||||
|
|
||||||
class OmniTipRole(BaseOmni):
|
class OmniTipRole(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = TipRole
|
class_object: ClassVar[Any] = TipRole
|
||||||
|
|
||||||
name: str = Field(default="", description="property")
|
name: str = Field(default="", description="property")
|
||||||
@@ -477,6 +489,7 @@ class OmniTipRole(BaseOmni):
|
|||||||
|
|
||||||
|
|
||||||
class OmniProcess(BaseOmni):
|
class OmniProcess(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = Process
|
class_object: ClassVar[Any] = Process
|
||||||
|
|
||||||
# NOTE: How am I going to figure out relatioinships without getting into recursion issues?
|
# NOTE: How am I going to figure out relatioinships without getting into recursion issues?
|
||||||
@@ -540,6 +553,7 @@ class OmniProcess(BaseOmni):
|
|||||||
|
|
||||||
|
|
||||||
class OmniKitType(BaseOmni):
|
class OmniKitType(BaseOmni):
|
||||||
|
|
||||||
class_object: ClassVar[Any] = KitType
|
class_object: ClassVar[Any] = KitType
|
||||||
|
|
||||||
name: str = Field(default="", description="property")
|
name: str = Field(default="", description="property")
|
||||||
@@ -565,17 +579,17 @@ class OmniKitType(BaseOmni):
|
|||||||
|
|
||||||
def to_sql(self) -> KitType:
|
def to_sql(self) -> KitType:
|
||||||
kit, is_new = KitType.query_or_create(name=self.name)
|
kit, is_new = KitType.query_or_create(name=self.name)
|
||||||
if is_new:
|
# if is_new:
|
||||||
logger.debug(f"New kit made: {kit}")
|
# logger.debug(f"New kit made: {kit}")
|
||||||
else:
|
# else:
|
||||||
logger.debug(f"Kit retrieved: {kit}")
|
# logger.debug(f"Kit retrieved: {kit}")
|
||||||
new_rr = []
|
new_rr = []
|
||||||
for rr_assoc in self.kit_reagentrole_associations:
|
for rr_assoc in self.kit_reagentrole_associations:
|
||||||
new_assoc = rr_assoc.to_sql()
|
new_assoc = rr_assoc.to_sql()
|
||||||
if new_assoc not in new_rr:
|
if new_assoc not in new_rr:
|
||||||
logger.debug(f"Adding {new_assoc} to kit_reagentrole_associations")
|
# logger.debug(f"Adding {new_assoc} to kit_reagentrole_associations")
|
||||||
new_rr.append(new_assoc)
|
new_rr.append(new_assoc)
|
||||||
logger.debug(f"Setting kit_reagentrole_associations to {pformat([item.__dict__ for item in new_rr])}")
|
# logger.debug(f"Setting kit_reagentrole_associations to {pformat([item.__dict__ for item in new_rr])}")
|
||||||
kit.kit_reagentrole_associations = new_rr
|
kit.kit_reagentrole_associations = new_rr
|
||||||
new_st = []
|
new_st = []
|
||||||
for st_assoc in self.kit_submissiontype_associations:
|
for st_assoc in self.kit_submissiontype_associations:
|
||||||
@@ -589,9 +603,9 @@ class OmniKitType(BaseOmni):
|
|||||||
if new_process not in new_processes:
|
if new_process not in new_processes:
|
||||||
new_processes.append(new_process)
|
new_processes.append(new_process)
|
||||||
kit.processes = new_processes
|
kit.processes = new_processes
|
||||||
logger.debug(f"Kit: {pformat(kit.__dict__)}")
|
# logger.debug(f"Kit: {pformat(kit.__dict__)}")
|
||||||
for item in kit.kit_reagentrole_associations:
|
# for item in kit.kit_reagentrole_associations:
|
||||||
logger.debug(f"KTRRassoc: {item.__dict__}")
|
# logger.debug(f"KTRRassoc: {item.__dict__}")
|
||||||
return kit
|
return kit
|
||||||
|
|
||||||
|
|
||||||
@@ -601,11 +615,10 @@ class OmniOrganization(BaseOmni):
|
|||||||
|
|
||||||
name: str = Field(default="", description="property")
|
name: str = Field(default="", description="property")
|
||||||
cost_centre: str = Field(default="", description="property")
|
cost_centre: str = Field(default="", description="property")
|
||||||
# TODO: add in List[OmniContacts]
|
|
||||||
contact: List[str] | List[OmniContact] = Field(default=[], description="relationship", title="Contact")
|
contact: List[str] | List[OmniContact] = Field(default=[], description="relationship", title="Contact")
|
||||||
|
|
||||||
def __init__(self, instance_object: Any, **data):
|
def __init__(self, instance_object: Any, **data):
|
||||||
logger.debug(f"Incoming data: {data}")
|
# logger.debug(f"Incoming data: {data}")
|
||||||
super().__init__(**data)
|
super().__init__(**data)
|
||||||
self.instance_object = instance_object
|
self.instance_object = instance_object
|
||||||
|
|
||||||
@@ -642,8 +655,8 @@ class OmniContact(BaseOmni):
|
|||||||
|
|
||||||
def to_sql(self):
|
def to_sql(self):
|
||||||
contact, is_new = Contact.query_or_create(name=self.name, email=self.email, phone=self.phone)
|
contact, is_new = Contact.query_or_create(name=self.name, email=self.email, phone=self.phone)
|
||||||
if is_new:
|
# if is_new:
|
||||||
logger.debug(f"New contact made: {contact}")
|
# logger.debug(f"New contact made: {contact}")
|
||||||
else:
|
# else:
|
||||||
logger.debug(f"Contact retrieved: {contact}")
|
# logger.debug(f"Contact retrieved: {contact}")
|
||||||
return contact
|
return contact
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ logger = logging.getLogger(f"submissions.{__name__}")
|
|||||||
|
|
||||||
|
|
||||||
class PydReagent(BaseModel):
|
class PydReagent(BaseModel):
|
||||||
|
|
||||||
lot: str | None
|
lot: str | None
|
||||||
role: str | None
|
role: str | None
|
||||||
expiry: date | datetime | Literal['NA'] | None = Field(default=None, validate_default=True)
|
expiry: date | datetime | Literal['NA'] | None = Field(default=None, validate_default=True)
|
||||||
@@ -131,7 +132,7 @@ class PydReagent(BaseModel):
|
|||||||
if self.model_extra is not None:
|
if self.model_extra is not None:
|
||||||
self.__dict__.update(self.model_extra)
|
self.__dict__.update(self.model_extra)
|
||||||
reagent = Reagent.query(lot=self.lot, name=self.name)
|
reagent = Reagent.query(lot=self.lot, name=self.name)
|
||||||
logger.debug(f"Reagent: {reagent}")
|
# logger.debug(f"Reagent: {reagent}")
|
||||||
if reagent is None:
|
if reagent is None:
|
||||||
reagent = Reagent()
|
reagent = Reagent()
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
@@ -151,6 +152,7 @@ class PydReagent(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PydSample(BaseModel, extra='allow'):
|
class PydSample(BaseModel, extra='allow'):
|
||||||
|
|
||||||
submitter_id: str
|
submitter_id: str
|
||||||
sample_type: str
|
sample_type: str
|
||||||
row: int | List[int] | None
|
row: int | List[int] | None
|
||||||
@@ -252,6 +254,7 @@ class PydSample(BaseModel, extra='allow'):
|
|||||||
|
|
||||||
|
|
||||||
class PydTips(BaseModel):
|
class PydTips(BaseModel):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
lot: str | None = Field(default=None)
|
lot: str | None = Field(default=None)
|
||||||
role: str
|
role: str
|
||||||
@@ -282,6 +285,7 @@ class PydTips(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PydEquipment(BaseModel, extra='ignore'):
|
class PydEquipment(BaseModel, extra='ignore'):
|
||||||
|
|
||||||
asset_number: str
|
asset_number: str
|
||||||
name: str
|
name: str
|
||||||
nickname: str | None
|
nickname: str | None
|
||||||
@@ -376,6 +380,7 @@ class PydEquipment(BaseModel, extra='ignore'):
|
|||||||
|
|
||||||
|
|
||||||
class PydSubmission(BaseModel, extra='allow'):
|
class PydSubmission(BaseModel, extra='allow'):
|
||||||
|
|
||||||
filepath: Path
|
filepath: Path
|
||||||
submission_type: dict | None
|
submission_type: dict | None
|
||||||
submitter_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True)
|
submitter_plate_num: dict | None = Field(default=dict(value=None, missing=True), validate_default=True)
|
||||||
@@ -948,7 +953,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
self.extraction_kit['value'] = extraction_kit['value']
|
self.extraction_kit['value'] = extraction_kit['value']
|
||||||
ext_kit = KitType.query(name=self.extraction_kit['value'])
|
ext_kit = KitType.query(name=self.extraction_kit['value'])
|
||||||
ext_kit_rtypes = [item.to_pydantic() for item in
|
ext_kit_rtypes = [item.to_pydantic() for item in
|
||||||
ext_kit.get_reagents(required=True, submission_type=self.submission_type['value'])]
|
ext_kit.get_reagents(required_only=True, submission_type=self.submission_type['value'])]
|
||||||
# NOTE: Exclude any reagenttype found in this pyd not expected in kit.
|
# NOTE: Exclude any reagenttype found in this pyd not expected in kit.
|
||||||
expected_check = [item.role for item in ext_kit_rtypes]
|
expected_check = [item.role for item in ext_kit_rtypes]
|
||||||
output_reagents = [rt for rt in self.reagents if rt.role in expected_check]
|
output_reagents = [rt for rt in self.reagents if rt.role in expected_check]
|
||||||
@@ -1014,6 +1019,7 @@ class PydSubmission(BaseModel, extra='allow'):
|
|||||||
|
|
||||||
|
|
||||||
class PydContact(BaseModel):
|
class PydContact(BaseModel):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
phone: str | None
|
phone: str | None
|
||||||
email: str | None
|
email: str | None
|
||||||
@@ -1061,6 +1067,7 @@ class PydContact(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PydOrganization(BaseModel):
|
class PydOrganization(BaseModel):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
cost_centre: str
|
cost_centre: str
|
||||||
contacts: List[PydContact] | None
|
contacts: List[PydContact] | None
|
||||||
@@ -1101,6 +1108,7 @@ class PydOrganization(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PydReagentRole(BaseModel):
|
class PydReagentRole(BaseModel):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
eol_ext: timedelta | int | None
|
eol_ext: timedelta | int | None
|
||||||
uses: dict | None
|
uses: dict | None
|
||||||
@@ -1139,6 +1147,7 @@ class PydReagentRole(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PydKitType(BaseModel):
|
class PydKitType(BaseModel):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
reagent_roles: List[PydReagent] = []
|
reagent_roles: List[PydReagent] = []
|
||||||
|
|
||||||
@@ -1160,6 +1169,7 @@ class PydKitType(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PydEquipmentRole(BaseModel):
|
class PydEquipmentRole(BaseModel):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
equipment: List[PydEquipment]
|
equipment: List[PydEquipment]
|
||||||
processes: List[str] | None
|
processes: List[str] | None
|
||||||
@@ -1187,6 +1197,7 @@ class PydEquipmentRole(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PydPCRControl(BaseModel):
|
class PydPCRControl(BaseModel):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
subtype: str
|
subtype: str
|
||||||
target: str
|
target: str
|
||||||
@@ -1210,6 +1221,7 @@ class PydPCRControl(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class PydIridaControl(BaseModel, extra='ignore'):
|
class PydIridaControl(BaseModel, extra='ignore'):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
contains: list | dict #: unstructured hashes in contains.tsv for each organism
|
contains: list | dict #: unstructured hashes in contains.tsv for each organism
|
||||||
matches: list | dict #: unstructured hashes in matches.tsv for each organism
|
matches: list | dict #: unstructured hashes in matches.tsv for each organism
|
||||||
@@ -1244,6 +1256,7 @@ class PydIridaControl(BaseModel, extra='ignore'):
|
|||||||
|
|
||||||
|
|
||||||
class PydProcess(BaseModel, extra="allow"):
|
class PydProcess(BaseModel, extra="allow"):
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
version: str = Field(default="1")
|
version: str = Field(default="1")
|
||||||
submission_types: List[str]
|
submission_types: List[str]
|
||||||
@@ -1297,7 +1310,7 @@ class PydElastic(BaseModel, extra="allow", arbitrary_types_allowed=True):
|
|||||||
|
|
||||||
@report_result
|
@report_result
|
||||||
def to_sql(self):
|
def to_sql(self):
|
||||||
print(self.instance)
|
# print(self.instance)
|
||||||
fields = [item for item in self.model_extra]
|
fields = [item for item in self.model_extra]
|
||||||
for field in fields:
|
for field in fields:
|
||||||
try:
|
try:
|
||||||
@@ -1307,11 +1320,11 @@ class PydElastic(BaseModel, extra="allow", arbitrary_types_allowed=True):
|
|||||||
continue
|
continue
|
||||||
match field_type:
|
match field_type:
|
||||||
case _RelationshipDeclared():
|
case _RelationshipDeclared():
|
||||||
logger.debug(f"{field} is a relationship with {field_type.entity.class_}")
|
# logger.debug(f"{field} is a relationship with {field_type.entity.class_}")
|
||||||
field_value = field_type.entity.class_.argument.query(name=getattr(self, field))
|
field_value = field_type.entity.class_.argument.query(name=getattr(self, field))
|
||||||
logger.debug(f"{field} query result: {field_value}")
|
# logger.debug(f"{field} query result: {field_value}")
|
||||||
case ColumnProperty():
|
case ColumnProperty():
|
||||||
logger.debug(f"{field} is a property.")
|
# logger.debug(f"{field} is a property.")
|
||||||
field_value = getattr(self, field)
|
field_value = getattr(self, field)
|
||||||
self.instance.__setattr__(field, field_value)
|
self.instance.__setattr__(field, field_value)
|
||||||
return self.instance
|
return self.instance
|
||||||
|
|||||||
@@ -2,11 +2,11 @@
|
|||||||
Contains all operations for creating charts, graphs and visual effects.
|
Contains all operations for creating charts, graphs and visual effects.
|
||||||
'''
|
'''
|
||||||
from datetime import timedelta, date
|
from datetime import timedelta, date
|
||||||
|
from pathlib import Path
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
from PyQt6.QtWidgets import QWidget
|
from PyQt6.QtWidgets import QWidget
|
||||||
import plotly, logging
|
import pandas as pd, logging
|
||||||
from plotly.graph_objects import Figure
|
from plotly.graph_objects import Figure
|
||||||
import pandas as pd
|
|
||||||
from tools import divide_chunks
|
from tools import divide_chunks
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
@@ -123,12 +123,16 @@ class CustomFigure(Figure):
|
|||||||
Returns:
|
Returns:
|
||||||
str: html string
|
str: html string
|
||||||
"""
|
"""
|
||||||
html = '<html><body>'
|
html = f'<html><body>'
|
||||||
if self is not None:
|
if self is not None:
|
||||||
html += plotly.offline.plot(self, output_type='div', include_plotlyjs='cdn')
|
# NOTE: Just cannot get this load from string to freaking work.
|
||||||
|
html += self.to_html(include_plotlyjs='cdn', full_html=False)
|
||||||
|
# html += plotly.offline.plot(self, output_type='div', include_plotlyjs=True)
|
||||||
else:
|
else:
|
||||||
html += "<h1>No data was retrieved for the given parameters.</h1>"
|
html += "<h1>No data was retrieved for the given parameters.</h1>"
|
||||||
html += '</body></html>'
|
html += '</body></html>'
|
||||||
|
with open("test.html", "w", encoding="utf-8") as f:
|
||||||
|
f.write(html)
|
||||||
return html
|
return html
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,12 +7,14 @@ from backend.excel.reports import ConcentrationMaker
|
|||||||
from frontend.visualizations.concentrations_chart import ConcentrationsChart
|
from frontend.visualizations.concentrations_chart import ConcentrationsChart
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
|
|
||||||
class Concentrations(InfoPane):
|
class Concentrations(InfoPane):
|
||||||
|
|
||||||
def __init__(self, parent: QWidget):
|
def __init__(self, parent: QWidget):
|
||||||
|
from .. import CheckableComboBox
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.save_button = QPushButton("Save Chart", parent=self)
|
self.save_button = QPushButton("Save Chart", parent=self)
|
||||||
self.save_button.pressed.connect(self.save_png)
|
self.save_button.pressed.connect(self.save_png)
|
||||||
@@ -20,12 +22,14 @@ class Concentrations(InfoPane):
|
|||||||
self.export_button = QPushButton("Save Data", parent=self)
|
self.export_button = QPushButton("Save Data", parent=self)
|
||||||
self.export_button.pressed.connect(self.save_excel)
|
self.export_button.pressed.connect(self.save_excel)
|
||||||
self.layout.addWidget(self.export_button, 0, 3, 1, 1)
|
self.layout.addWidget(self.export_button, 0, 3, 1, 1)
|
||||||
check_label = QLabel("Controls Only")
|
self.pos_neg = CheckableComboBox(parent=self)
|
||||||
self.all_box = QCheckBox()
|
self.pos_neg.model().itemChanged.connect(self.update_data)
|
||||||
self.all_box.setChecked(True)
|
self.pos_neg.setEditable(False)
|
||||||
self.all_box.checkStateChanged.connect(self.update_data)
|
self.pos_neg.addItem("Positive")
|
||||||
self.layout.addWidget(check_label, 1, 0, 1, 1)
|
self.pos_neg.addItem("Negative")
|
||||||
self.layout.addWidget(self.all_box, 1, 1, 1, 1)
|
self.pos_neg.addItem("Samples", start_checked=False)
|
||||||
|
self.layout.addWidget(QLabel("Control Types"), 1, 0, 1, 1)
|
||||||
|
self.layout.addWidget(self.pos_neg, 1, 1, 1, 1)
|
||||||
self.fig = None
|
self.fig = None
|
||||||
self.report_object = None
|
self.report_object = None
|
||||||
self.update_data()
|
self.update_data()
|
||||||
@@ -37,10 +41,14 @@ class Concentrations(InfoPane):
|
|||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
"""
|
"""
|
||||||
|
include = self.pos_neg.get_checked()
|
||||||
|
# logger.debug(f"Include: {include}")
|
||||||
super().update_data()
|
super().update_data()
|
||||||
months = self.diff_month(self.start_date, self.end_date)
|
months = self.diff_month(self.start_date, self.end_date)
|
||||||
# logger.debug(f"Box checked: {self.all_box.isChecked()}")
|
# logger.debug(f"Box checked: {self.all_box.isChecked()}")
|
||||||
chart_settings = dict(start_date=self.start_date, end_date=self.end_date, controls_only=self.all_box.isChecked())
|
# chart_settings = dict(start_date=self.start_date, end_date=self.end_date, controls_only=self.all_box.isChecked())
|
||||||
|
chart_settings = dict(start_date=self.start_date, end_date=self.end_date,
|
||||||
|
include=include)
|
||||||
self.report_obj = ConcentrationMaker(**chart_settings)
|
self.report_obj = ConcentrationMaker(**chart_settings)
|
||||||
self.fig = ConcentrationsChart(df=self.report_obj.df, settings=chart_settings, modes=[], months=months)
|
self.fig = ConcentrationsChart(df=self.report_obj.df, settings=chart_settings, modes=[], months=months)
|
||||||
self.webview.setHtml(self.fig.html)
|
self.webview.setHtml(self.fig.html)
|
||||||
|
|||||||
@@ -46,12 +46,15 @@ class CheckableComboBox(QComboBox):
|
|||||||
# once there is a checkState set, it is rendered
|
# once there is a checkState set, it is rendered
|
||||||
# here we assume default Unchecked
|
# here we assume default Unchecked
|
||||||
|
|
||||||
def addItem(self, item, header: bool = False):
|
def addItem(self, item, header: bool = False, start_checked: bool = True):
|
||||||
super(CheckableComboBox, self).addItem(item)
|
super(CheckableComboBox, self).addItem(item)
|
||||||
item: QStandardItem = self.model().item(self.count() - 1, 0)
|
item: QStandardItem = self.model().item(self.count() - 1, 0)
|
||||||
if not header:
|
if not header:
|
||||||
item.setFlags(Qt.ItemFlag.ItemIsUserCheckable | Qt.ItemFlag.ItemIsEnabled)
|
item.setFlags(Qt.ItemFlag.ItemIsUserCheckable | Qt.ItemFlag.ItemIsEnabled)
|
||||||
item.setCheckState(Qt.CheckState.Checked)
|
if start_checked:
|
||||||
|
item.setCheckState(Qt.CheckState.Checked)
|
||||||
|
else:
|
||||||
|
item.setCheckState(Qt.CheckState.Unchecked)
|
||||||
|
|
||||||
def itemChecked(self, index):
|
def itemChecked(self, index):
|
||||||
item = self.model().item(index, 0)
|
item = self.model().item(index, 0)
|
||||||
@@ -60,14 +63,18 @@ class CheckableComboBox(QComboBox):
|
|||||||
def changed(self):
|
def changed(self):
|
||||||
self.updated.emit()
|
self.updated.emit()
|
||||||
|
|
||||||
|
def get_checked(self):
|
||||||
|
checked = [self.itemText(i) for i in range(self.count()) if self.itemChecked(i)]
|
||||||
|
return checked
|
||||||
|
|
||||||
|
|
||||||
class Pagifier(QWidget):
|
class Pagifier(QWidget):
|
||||||
|
|
||||||
def __init__(self, page_max:int):
|
def __init__(self, page_max: int):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.page_max = math.ceil(page_max)
|
self.page_max = math.ceil(page_max)
|
||||||
self.page_anchor = 1
|
self.page_anchor = 1
|
||||||
next = QPushButton(parent=self, icon = QIcon.fromTheme(QIcon.ThemeIcon.GoNext))
|
next = QPushButton(parent=self, icon=QIcon.fromTheme(QIcon.ThemeIcon.GoNext))
|
||||||
next.pressed.connect(self.increment_page)
|
next.pressed.connect(self.increment_page)
|
||||||
previous = QPushButton(parent=self, icon=QIcon.fromTheme(QIcon.ThemeIcon.GoPrevious))
|
previous = QPushButton(parent=self, icon=QIcon.fromTheme(QIcon.ThemeIcon.GoPrevious))
|
||||||
previous.pressed.connect(self.decrement_page)
|
previous.pressed.connect(self.decrement_page)
|
||||||
|
|||||||
@@ -92,6 +92,7 @@ class SubmissionDetails(QDialog):
|
|||||||
Args:
|
Args:
|
||||||
sample (str): Submitter Id of the sample.
|
sample (str): Submitter Id of the sample.
|
||||||
"""
|
"""
|
||||||
|
logger.debug(f"Sample details.")
|
||||||
if isinstance(sample, str):
|
if isinstance(sample, str):
|
||||||
sample = BasicSample.query(submitter_id=sample)
|
sample = BasicSample.query(submitter_id=sample)
|
||||||
base_dict = sample.to_sub_dict(full_data=True)
|
base_dict = sample.to_sub_dict(full_data=True)
|
||||||
@@ -102,6 +103,8 @@ class SubmissionDetails(QDialog):
|
|||||||
with open(template_path.joinpath("css", "styles.css"), "r") as f:
|
with open(template_path.joinpath("css", "styles.css"), "r") as f:
|
||||||
css = f.read()
|
css = f.read()
|
||||||
html = template.render(sample=base_dict, css=css)
|
html = template.render(sample=base_dict, css=css)
|
||||||
|
with open(f"{sample.submitter_id}.html", 'w') as f:
|
||||||
|
f.write(html)
|
||||||
self.webview.setHtml(html)
|
self.webview.setHtml(html)
|
||||||
self.setWindowTitle(f"Sample Details - {sample.submitter_id}")
|
self.setWindowTitle(f"Sample Details - {sample.submitter_id}")
|
||||||
|
|
||||||
@@ -114,6 +117,7 @@ class SubmissionDetails(QDialog):
|
|||||||
kit (str | KitType): Name of kit.
|
kit (str | KitType): Name of kit.
|
||||||
reagent (str | Reagent): Lot number of the reagent
|
reagent (str | Reagent): Lot number of the reagent
|
||||||
"""
|
"""
|
||||||
|
logger.debug(f"Reagent details.")
|
||||||
if isinstance(reagent, str):
|
if isinstance(reagent, str):
|
||||||
reagent = Reagent.query(lot=reagent)
|
reagent = Reagent.query(lot=reagent)
|
||||||
if isinstance(kit, str):
|
if isinstance(kit, str):
|
||||||
@@ -164,6 +168,7 @@ class SubmissionDetails(QDialog):
|
|||||||
Args:
|
Args:
|
||||||
submission (str | BasicSubmission): Submission of interest.
|
submission (str | BasicSubmission): Submission of interest.
|
||||||
"""
|
"""
|
||||||
|
logger.debug(f"Submission details.")
|
||||||
if isinstance(submission, str):
|
if isinstance(submission, str):
|
||||||
submission = BasicSubmission.query(rsl_plate_num=submission)
|
submission = BasicSubmission.query(rsl_plate_num=submission)
|
||||||
self.rsl_plate_num = submission.rsl_plate_num
|
self.rsl_plate_num = submission.rsl_plate_num
|
||||||
|
|||||||
@@ -71,11 +71,8 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block signing_button %}
|
{% block signing_button %}
|
||||||
{% if permission and not sub['signed_by'] %}
|
<button type="button" id="sign_btn" {% if permission and not sub['signed_by'] %}{% else %}hidden{% endif %}>Sign Off</button>
|
||||||
<button type="button" id="sign_btn">Sign Off</button>
|
|
||||||
{% endif %}
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
<br>
|
<br>
|
||||||
<br>
|
<br>
|
||||||
@@ -84,13 +81,11 @@
|
|||||||
{% block script %}
|
{% block script %}
|
||||||
{{ super() }}
|
{{ super() }}
|
||||||
|
|
||||||
document.getElementById("sign_btn").addEventListener("click", function(){
|
|
||||||
backend.sign_off("{{ sub['plate_number'] }}");
|
|
||||||
});
|
|
||||||
var sampleSelection = document.getElementsByClassName('sample');
|
var sampleSelection = document.getElementsByClassName('sample');
|
||||||
|
|
||||||
for(let i = 0; i < sampleSelection.length; i++) {
|
for(let i = 0; i < sampleSelection.length; i++) {
|
||||||
sampleSelection[i].addEventListener("click", function() {
|
sampleSelection[i].addEventListener("click", function() {
|
||||||
|
console.log(sampleSelection[i].id);
|
||||||
backend.sample_details(sampleSelection[i].id);
|
backend.sample_details(sampleSelection[i].id);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -99,10 +94,15 @@
|
|||||||
|
|
||||||
for(let i = 0; i < reagentSelection.length; i++) {
|
for(let i = 0; i < reagentSelection.length; i++) {
|
||||||
reagentSelection[i].addEventListener("click", function() {
|
reagentSelection[i].addEventListener("click", function() {
|
||||||
|
console.log(reagentSelection[i].id);
|
||||||
backend.reagent_details(reagentSelection[i].id, "{{ sub['extraction_kit'] }}");
|
backend.reagent_details(reagentSelection[i].id, "{{ sub['extraction_kit'] }}");
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
document.getElementById("sign_btn").addEventListener("click", function(){
|
||||||
|
backend.sign_off("{{ sub['plate_number'] }}");
|
||||||
|
});
|
||||||
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
</script>
|
</script>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ Contains miscellaenous functions used by both frontend and backend.
|
|||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import builtins, importlib, time, logging, re, yaml, sys, os, stat, platform, getpass, json, numpy as np, pandas as pd
|
import builtins, importlib, time, logging, re, yaml, sys, os, stat, platform, getpass, json, numpy as np, pandas as pd
|
||||||
|
import itertools
|
||||||
from datetime import date, datetime, timedelta
|
from datetime import date, datetime, timedelta
|
||||||
from json import JSONDecodeError
|
from json import JSONDecodeError
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
@@ -254,7 +255,7 @@ def timer(func):
|
|||||||
value = func(*args, **kwargs)
|
value = func(*args, **kwargs)
|
||||||
end_time = time.perf_counter()
|
end_time = time.perf_counter()
|
||||||
run_time = end_time - start_time
|
run_time = end_time - start_time
|
||||||
logger.debug(f"Finished {func.__name__}() in {run_time:.4f} secs")
|
print(f"Finished {func.__name__}() in {run_time:.4f} secs")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
@@ -896,33 +897,8 @@ def check_dictionary_inclusion_equality(listo: List[dict] | dict, dicto: dict) -
|
|||||||
raise TypeError(f"Unsupported variable: {type(listo)}")
|
raise TypeError(f"Unsupported variable: {type(listo)}")
|
||||||
|
|
||||||
|
|
||||||
def rectify_query_date(input_date, eod: bool = False) -> str:
|
def flatten_list(input_list: list):
|
||||||
"""
|
return list(itertools.chain.from_iterable(input_list))
|
||||||
Converts input into a datetime string for querying purposes
|
|
||||||
|
|
||||||
Args:
|
|
||||||
eod (bool, optional): Whether to use max time to indicate end of day.
|
|
||||||
input_date ():
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
datetime: properly formated datetime
|
|
||||||
"""
|
|
||||||
match input_date:
|
|
||||||
case datetime():
|
|
||||||
output_date = input_date.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
case date():
|
|
||||||
if eod:
|
|
||||||
addition_time = datetime.max.time()
|
|
||||||
else:
|
|
||||||
addition_time = datetime.min.time()
|
|
||||||
output_date = datetime.combine(input_date, addition_time)
|
|
||||||
output_date = output_date.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
case int():
|
|
||||||
output_date = datetime.fromordinal(
|
|
||||||
datetime(1900, 1, 1).toordinal() + input_date - 2).date().strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
case _:
|
|
||||||
output_date = parse(input_date).strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
return output_date
|
|
||||||
|
|
||||||
|
|
||||||
class classproperty(property):
|
class classproperty(property):
|
||||||
|
|||||||
Reference in New Issue
Block a user