Qubit sample results now written to export.
This commit is contained in:
@@ -1,3 +1,7 @@
|
||||
# 202510.01
|
||||
|
||||
- Update for Python 3.13
|
||||
|
||||
# 202509.04
|
||||
|
||||
- Qubit results parsing complete.
|
||||
|
||||
@@ -48,10 +48,10 @@ class BaseClass(Base):
|
||||
except AttributeError:
|
||||
return f"<{self.__class__.__name__}(Name Unavailable)>"
|
||||
|
||||
# @classproperty
|
||||
@classmethod
|
||||
|
||||
@declared_attr
|
||||
def aliases(cls) -> List[str]:
|
||||
@classmethod
|
||||
def aliases(cls):
|
||||
"""
|
||||
List of other names this class might be known by.
|
||||
|
||||
@@ -60,9 +60,9 @@ class BaseClass(Base):
|
||||
"""
|
||||
return [cls.query_alias]
|
||||
|
||||
@classmethod
|
||||
@declared_attr
|
||||
def query_alias(cls) -> str:
|
||||
@classmethod
|
||||
def query_alias(cls):
|
||||
"""
|
||||
What to query this class as.
|
||||
|
||||
@@ -71,8 +71,8 @@ class BaseClass(Base):
|
||||
"""
|
||||
return cls.__name__.lower()
|
||||
|
||||
@classmethod
|
||||
@declared_attr
|
||||
@classmethod
|
||||
def __tablename__(cls) -> str:
|
||||
"""
|
||||
Sets table name to lower case class name.
|
||||
@@ -82,8 +82,8 @@ class BaseClass(Base):
|
||||
"""
|
||||
return f"_{cls.__name__.lower()}"
|
||||
|
||||
@classmethod
|
||||
@declared_attr
|
||||
@classmethod
|
||||
def __database_session__(cls) -> Session:
|
||||
"""
|
||||
Pull db session from ctx to be used in operations
|
||||
@@ -93,8 +93,8 @@ class BaseClass(Base):
|
||||
"""
|
||||
return ctx.database_session
|
||||
|
||||
@classmethod
|
||||
@declared_attr
|
||||
@classmethod
|
||||
def __directory_path__(cls) -> Path:
|
||||
"""
|
||||
Pull directory path from ctx to be used in operations.
|
||||
@@ -104,8 +104,8 @@ class BaseClass(Base):
|
||||
"""
|
||||
return ctx.directory_path
|
||||
|
||||
@classmethod
|
||||
@declared_attr
|
||||
@classmethod
|
||||
def __backup_path__(cls) -> Path:
|
||||
"""
|
||||
Pull backup directory path from ctx to be used in operations.
|
||||
@@ -119,10 +119,9 @@ class BaseClass(Base):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._misc_info = dict()
|
||||
|
||||
# @classproperty
|
||||
@classmethod
|
||||
@declared_attr
|
||||
def jsons(cls) -> List[str]:
|
||||
@classmethod
|
||||
def jsons(cls):
|
||||
"""
|
||||
Get list of JSON db columns
|
||||
|
||||
@@ -134,10 +133,9 @@ class BaseClass(Base):
|
||||
except AttributeError:
|
||||
return []
|
||||
|
||||
# @classproperty
|
||||
@classmethod
|
||||
@declared_attr
|
||||
def timestamps(cls) -> List[str]:
|
||||
@classmethod
|
||||
def timestamps(cls):
|
||||
"""
|
||||
Get list of TIMESTAMP columns
|
||||
|
||||
@@ -392,10 +390,9 @@ class BaseClass(Base):
|
||||
pass
|
||||
return dicto
|
||||
|
||||
# @classproperty
|
||||
@classmethod
|
||||
@declared_attr
|
||||
def pydantic_model(cls) -> BaseModel:
|
||||
@classmethod
|
||||
def pydantic_model(cls):
|
||||
"""
|
||||
Gets the pydantic model corresponding to this object.
|
||||
|
||||
@@ -414,9 +411,9 @@ class BaseClass(Base):
|
||||
return model
|
||||
|
||||
# @classproperty
|
||||
@classmethod
|
||||
@declared_attr
|
||||
def add_edit_tooltips(cls) -> dict:
|
||||
@classmethod
|
||||
def add_edit_tooltips(cls):
|
||||
"""
|
||||
Gets tooltips for Omni-add-edit
|
||||
|
||||
@@ -425,10 +422,9 @@ class BaseClass(Base):
|
||||
"""
|
||||
return dict()
|
||||
|
||||
# @classproperty
|
||||
@classmethod
|
||||
@declared_attr
|
||||
def details_template(cls) -> Template:
|
||||
@classmethod
|
||||
def details_template(cls):
|
||||
"""
|
||||
Get the details jinja template for the correct class
|
||||
|
||||
@@ -660,6 +656,7 @@ class BaseClass(Base):
|
||||
pyd = getattr(pydant, pyd_model_name)
|
||||
except AttributeError:
|
||||
raise AttributeError(f"Could not get pydantic class {pyd_model_name}")
|
||||
pyd.model_rebuild()
|
||||
return pyd(**self.details_dict(**kwargs))
|
||||
|
||||
def show_details(self, obj):
|
||||
@@ -699,6 +696,7 @@ class ConfigItem(BaseClass):
|
||||
"""
|
||||
Key:JSON objects to store config settings in database.
|
||||
"""
|
||||
|
||||
id = Column(INTEGER, primary_key=True)
|
||||
key = Column(String(32)) #: Name of the configuration item.
|
||||
value = Column(JSON) #: Value associated with the config item.
|
||||
|
||||
@@ -1937,6 +1937,8 @@ class ProcedureSampleAssociation(BaseClass):
|
||||
|
||||
def to_pydantic(self, **kwargs):
|
||||
output = super().to_pydantic(pyd_model_name="PydSample")
|
||||
# from backend.validators.pydant import PydSample
|
||||
# output = PydSample(**self.details_dict(**kwargs))
|
||||
try:
|
||||
output.submission_rank = output.misc_info['submission_rank']
|
||||
except KeyError:
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
"""
|
||||
Contains all validators
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import logging, re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from openpyxl import load_workbook
|
||||
from backend.db.models import Run, SubmissionType
|
||||
from tools import jinja_template_loading
|
||||
from jinja2 import Template
|
||||
from dateutil.parser import parse
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
if TYPE_CHECKING:
|
||||
from backend.db.models import SubmissionType
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
@@ -27,15 +30,17 @@ class DefaultNamer(object):
|
||||
|
||||
class ClientSubmissionNamer(DefaultNamer):
|
||||
|
||||
def __init__(self, filepath: str | Path, submissiontype: str|SubmissionType|None=None,
|
||||
def __init__(self, filepath: str | Path, submissiontype: str|"SubmissionType"|None=None,
|
||||
data: dict | None = None, **kwargs):
|
||||
from backend.db.models import SubmissionType
|
||||
super().__init__(filepath=filepath)
|
||||
if not submissiontype:
|
||||
submissiontype = self.retrieve_submissiontype(filepath=self.filepath)
|
||||
self.submissiontype = self.retrieve_submissiontype(filepath=self.filepath)
|
||||
if isinstance(submissiontype, str):
|
||||
submissiontype = SubmissionType.query(name=submissiontype)
|
||||
self.submissiontype = SubmissionType.query(name=submissiontype)
|
||||
|
||||
def retrieve_submissiontype(self, filepath: str | Path):
|
||||
def retrieve_submissiontype(self):
|
||||
from backend.db.models import SubmissionType
|
||||
# NOTE: Attempt 1, get from form properties:
|
||||
sub_type = self.get_subtype_from_properties()
|
||||
if not sub_type:
|
||||
@@ -51,6 +56,7 @@ class ClientSubmissionNamer(DefaultNamer):
|
||||
return sub_type
|
||||
|
||||
def get_subtype_from_regex(self) -> SubmissionType:
|
||||
from backend.db.models import SubmissionType
|
||||
regex = SubmissionType.regex
|
||||
m = regex.search(self.filepath.__str__())
|
||||
try:
|
||||
@@ -64,6 +70,7 @@ class ClientSubmissionNamer(DefaultNamer):
|
||||
|
||||
def get_subtype_from_preparse(self) -> SubmissionType:
|
||||
from backend.excel.parsers.clientsubmission_parser import ClientSubmissionInfoParser
|
||||
from backend.db.models import SubmissionType
|
||||
parser = ClientSubmissionInfoParser(self.filepath)
|
||||
sub_type = next((value for k, value in parser.parsed_info.items() if k == "submissiontype"), None)
|
||||
sub_type = SubmissionType.query(name=sub_type)
|
||||
@@ -72,6 +79,7 @@ class ClientSubmissionNamer(DefaultNamer):
|
||||
return sub_type
|
||||
|
||||
def get_subtype_from_properties(self) -> SubmissionType:
|
||||
from backend.db.models import SubmissionType
|
||||
wb = load_workbook(self.filepath)
|
||||
# NOTE: Gets first category in the metadata.
|
||||
categories = wb.properties.category.split(";")
|
||||
@@ -88,6 +96,7 @@ class RSLNamer(object):
|
||||
"""
|
||||
|
||||
def __init__(self, filename: str, submission_type: str | None = None, data: dict | None = None):
|
||||
from backend.db.models import SubmissionType
|
||||
# NOTE: Preferred method is path retrieval, but might also need validation for just string.
|
||||
filename = Path(filename) if Path(filename).exists() else filename
|
||||
self.submission_type = submission_type
|
||||
@@ -113,7 +122,7 @@ class RSLNamer(object):
|
||||
Returns:
|
||||
str: parsed procedure type
|
||||
"""
|
||||
|
||||
from backend.db.models import SubmissionType
|
||||
def st_from_path(filepath: Path) -> str:
|
||||
"""
|
||||
Sub def to get proceduretype from a file path
|
||||
@@ -186,8 +195,9 @@ class RSLNamer(object):
|
||||
regex (str): string to construct pattern
|
||||
filename (str): string to be parsed
|
||||
"""
|
||||
from backend.db.models import Run
|
||||
if regex is None:
|
||||
regex = BasicRun.regex
|
||||
regex = Run.regex
|
||||
match filename:
|
||||
case Path():
|
||||
m = regex.search(filename.stem)
|
||||
@@ -215,6 +225,7 @@ class RSLNamer(object):
|
||||
Returns:
|
||||
str: Output filename
|
||||
"""
|
||||
from backend.db.models import Run
|
||||
if "submitted_date" in data.keys():
|
||||
if isinstance(data['submitted_date'], dict):
|
||||
if data['submitted_date']['value'] is not None:
|
||||
|
||||
@@ -135,6 +135,47 @@ class PydBaseClass(BaseModel, extra='allow', validate_assignment=True):
|
||||
continue
|
||||
return list(set(output))
|
||||
|
||||
class PydResults(PydBaseClass, arbitrary_types_allowed=True):
|
||||
result: dict = Field(default={})
|
||||
result_type: str = Field(default="NA")
|
||||
img: None | bytes = Field(default=None)
|
||||
# parent: Procedure | ProcedureSampleAssociation | None = Field(default=None)
|
||||
parent: Any | None = Field(default=None)
|
||||
date_analyzed: datetime | None = Field(default=None)
|
||||
|
||||
@field_validator("date_analyzed")
|
||||
@classmethod
|
||||
def set_today(cls, value):
|
||||
match value:
|
||||
case str():
|
||||
value = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
case datetime():
|
||||
pass
|
||||
case date():
|
||||
value = datetime.combine(value, datetime.max.time())
|
||||
case _:
|
||||
value = datetime.now()
|
||||
return value
|
||||
|
||||
def to_sql(self):
|
||||
sql, _ = Results.query_or_create(result_type=self.result_type, result=self.results)
|
||||
try:
|
||||
check = sql.image
|
||||
except FileNotFoundError:
|
||||
check = False
|
||||
if not check:
|
||||
sql.image = self.img
|
||||
if not sql.date_analyzed:
|
||||
sql.date_analyzed = self.date_analyzed
|
||||
match self.parent:
|
||||
case ProcedureSampleAssociation():
|
||||
sql.sampleprocedureassociation = self.parent
|
||||
case Procedure():
|
||||
sql.procedure = self.parent
|
||||
case _:
|
||||
logger.error("Improper association found.")
|
||||
return sql
|
||||
|
||||
|
||||
class PydReagentLot(PydBaseClass):
|
||||
lot: str | None
|
||||
@@ -1623,42 +1664,42 @@ class PydRun(PydBaseClass): #, extra='allow'):
|
||||
return samples
|
||||
|
||||
|
||||
class PydResults(PydBaseClass, arbitrary_types_allowed=True):
|
||||
result: dict = Field(default={})
|
||||
result_type: str = Field(default="NA")
|
||||
img: None | bytes = Field(default=None)
|
||||
parent: Procedure | ProcedureSampleAssociation | None = Field(default=None)
|
||||
date_analyzed: datetime | None = Field(default=None)
|
||||
|
||||
@field_validator("date_analyzed")
|
||||
@classmethod
|
||||
def set_today(cls, value):
|
||||
match value:
|
||||
case str():
|
||||
value = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
case datetime():
|
||||
pass
|
||||
case date():
|
||||
value = datetime.combine(value, datetime.max.time())
|
||||
case _:
|
||||
value = datetime.now()
|
||||
return value
|
||||
|
||||
def to_sql(self):
|
||||
sql, _ = Results.query_or_create(result_type=self.result_type, result=self.results)
|
||||
try:
|
||||
check = sql.image
|
||||
except FileNotFoundError:
|
||||
check = False
|
||||
if not check:
|
||||
sql.image = self.img
|
||||
if not sql.date_analyzed:
|
||||
sql.date_analyzed = self.date_analyzed
|
||||
match self.parent:
|
||||
case ProcedureSampleAssociation():
|
||||
sql.sampleprocedureassociation = self.parent
|
||||
case Procedure():
|
||||
sql.procedure = self.parent
|
||||
case _:
|
||||
logger.error("Improper association found.")
|
||||
return sql
|
||||
# class PydResults(PydBaseClass, arbitrary_types_allowed=True):
|
||||
# result: dict = Field(default={})
|
||||
# result_type: str = Field(default="NA")
|
||||
# img: None | bytes = Field(default=None)
|
||||
# parent: Procedure | ProcedureSampleAssociation | None = Field(default=None)
|
||||
# date_analyzed: datetime | None = Field(default=None)
|
||||
#
|
||||
# @field_validator("date_analyzed")
|
||||
# @classmethod
|
||||
# def set_today(cls, value):
|
||||
# match value:
|
||||
# case str():
|
||||
# value = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
# case datetime():
|
||||
# pass
|
||||
# case date():
|
||||
# value = datetime.combine(value, datetime.max.time())
|
||||
# case _:
|
||||
# value = datetime.now()
|
||||
# return value
|
||||
#
|
||||
# def to_sql(self):
|
||||
# sql, _ = Results.query_or_create(result_type=self.result_type, result=self.results)
|
||||
# try:
|
||||
# check = sql.image
|
||||
# except FileNotFoundError:
|
||||
# check = False
|
||||
# if not check:
|
||||
# sql.image = self.img
|
||||
# if not sql.date_analyzed:
|
||||
# sql.date_analyzed = self.date_analyzed
|
||||
# match self.parent:
|
||||
# case ProcedureSampleAssociation():
|
||||
# sql.sampleprocedureassociation = self.parent
|
||||
# case Procedure():
|
||||
# sql.procedure = self.parent
|
||||
# case _:
|
||||
# logger.error("Improper association found.")
|
||||
# return sql
|
||||
|
||||
@@ -782,7 +782,8 @@ def yaml_regex_creator(loader, node):
|
||||
nodes = loader.construct_sequence(node)
|
||||
name = nodes[0].replace(" ", "_")
|
||||
abbr = nodes[1]
|
||||
return f"(?P<{name}>RSL(?:-|_)?{abbr}(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
||||
# return f"(?P<{name}>RSL(?:-|_)?{abbr}(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
||||
return f"(?P<{name}>RSL(?:-|_)?{abbr}(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)?\\d?([^_0123456789\\sA-QS-Z]|$)?R?\\d?)?)"
|
||||
|
||||
|
||||
def super_splitter(ins_str: str, substring: str, idx: int) -> str:
|
||||
|
||||
Reference in New Issue
Block a user