Compare commits
3 Commits
dba4ca0130
...
ccee4b3afe
| Author | SHA1 | Date | |
|---|---|---|---|
| ccee4b3afe | |||
| 1445d2b93b | |||
| 8fee07b0c3 |
@@ -1,3 +1,7 @@
|
|||||||
|
# 202510.01
|
||||||
|
|
||||||
|
- Update for Python 3.13
|
||||||
|
|
||||||
# 202509.04
|
# 202509.04
|
||||||
|
|
||||||
- Qubit results parsing complete.
|
- Qubit results parsing complete.
|
||||||
|
|||||||
@@ -48,10 +48,10 @@ class BaseClass(Base):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
return f"<{self.__class__.__name__}(Name Unavailable)>"
|
return f"<{self.__class__.__name__}(Name Unavailable)>"
|
||||||
|
|
||||||
# @classproperty
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
def aliases(cls) -> List[str]:
|
@classmethod
|
||||||
|
def aliases(cls):
|
||||||
"""
|
"""
|
||||||
List of other names this class might be known by.
|
List of other names this class might be known by.
|
||||||
|
|
||||||
@@ -60,9 +60,9 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
return [cls.query_alias]
|
return [cls.query_alias]
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
def query_alias(cls) -> str:
|
@classmethod
|
||||||
|
def query_alias(cls):
|
||||||
"""
|
"""
|
||||||
What to query this class as.
|
What to query this class as.
|
||||||
|
|
||||||
@@ -71,8 +71,8 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
return cls.__name__.lower()
|
return cls.__name__.lower()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
|
@classmethod
|
||||||
def __tablename__(cls) -> str:
|
def __tablename__(cls) -> str:
|
||||||
"""
|
"""
|
||||||
Sets table name to lower case class name.
|
Sets table name to lower case class name.
|
||||||
@@ -82,8 +82,8 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
return f"_{cls.__name__.lower()}"
|
return f"_{cls.__name__.lower()}"
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
|
@classmethod
|
||||||
def __database_session__(cls) -> Session:
|
def __database_session__(cls) -> Session:
|
||||||
"""
|
"""
|
||||||
Pull db session from ctx to be used in operations
|
Pull db session from ctx to be used in operations
|
||||||
@@ -93,8 +93,8 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
return ctx.database_session
|
return ctx.database_session
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
|
@classmethod
|
||||||
def __directory_path__(cls) -> Path:
|
def __directory_path__(cls) -> Path:
|
||||||
"""
|
"""
|
||||||
Pull directory path from ctx to be used in operations.
|
Pull directory path from ctx to be used in operations.
|
||||||
@@ -104,8 +104,8 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
return ctx.directory_path
|
return ctx.directory_path
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
|
@classmethod
|
||||||
def __backup_path__(cls) -> Path:
|
def __backup_path__(cls) -> Path:
|
||||||
"""
|
"""
|
||||||
Pull backup directory path from ctx to be used in operations.
|
Pull backup directory path from ctx to be used in operations.
|
||||||
@@ -119,10 +119,9 @@ class BaseClass(Base):
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self._misc_info = dict()
|
self._misc_info = dict()
|
||||||
|
|
||||||
# @classproperty
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
def jsons(cls) -> List[str]:
|
@classmethod
|
||||||
|
def jsons(cls):
|
||||||
"""
|
"""
|
||||||
Get list of JSON db columns
|
Get list of JSON db columns
|
||||||
|
|
||||||
@@ -134,10 +133,9 @@ class BaseClass(Base):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# @classproperty
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
def timestamps(cls) -> List[str]:
|
@classmethod
|
||||||
|
def timestamps(cls):
|
||||||
"""
|
"""
|
||||||
Get list of TIMESTAMP columns
|
Get list of TIMESTAMP columns
|
||||||
|
|
||||||
@@ -392,10 +390,9 @@ class BaseClass(Base):
|
|||||||
pass
|
pass
|
||||||
return dicto
|
return dicto
|
||||||
|
|
||||||
# @classproperty
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
def pydantic_model(cls) -> BaseModel:
|
@classmethod
|
||||||
|
def pydantic_model(cls):
|
||||||
"""
|
"""
|
||||||
Gets the pydantic model corresponding to this object.
|
Gets the pydantic model corresponding to this object.
|
||||||
|
|
||||||
@@ -414,9 +411,9 @@ class BaseClass(Base):
|
|||||||
return model
|
return model
|
||||||
|
|
||||||
# @classproperty
|
# @classproperty
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
def add_edit_tooltips(cls) -> dict:
|
@classmethod
|
||||||
|
def add_edit_tooltips(cls):
|
||||||
"""
|
"""
|
||||||
Gets tooltips for Omni-add-edit
|
Gets tooltips for Omni-add-edit
|
||||||
|
|
||||||
@@ -425,10 +422,9 @@ class BaseClass(Base):
|
|||||||
"""
|
"""
|
||||||
return dict()
|
return dict()
|
||||||
|
|
||||||
# @classproperty
|
|
||||||
@classmethod
|
|
||||||
@declared_attr
|
@declared_attr
|
||||||
def details_template(cls) -> Template:
|
@classmethod
|
||||||
|
def details_template(cls):
|
||||||
"""
|
"""
|
||||||
Get the details jinja template for the correct class
|
Get the details jinja template for the correct class
|
||||||
|
|
||||||
@@ -524,6 +520,7 @@ class BaseClass(Base):
|
|||||||
if isinstance(field_type, InstrumentedAttribute):
|
if isinstance(field_type, InstrumentedAttribute):
|
||||||
match field_type.property:
|
match field_type.property:
|
||||||
case ColumnProperty():
|
case ColumnProperty():
|
||||||
|
|
||||||
return super().__setattr__(key, value)
|
return super().__setattr__(key, value)
|
||||||
case _RelationshipDeclared():
|
case _RelationshipDeclared():
|
||||||
if field_type.property.uselist:
|
if field_type.property.uselist:
|
||||||
@@ -660,6 +657,7 @@ class BaseClass(Base):
|
|||||||
pyd = getattr(pydant, pyd_model_name)
|
pyd = getattr(pydant, pyd_model_name)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError(f"Could not get pydantic class {pyd_model_name}")
|
raise AttributeError(f"Could not get pydantic class {pyd_model_name}")
|
||||||
|
pyd.model_rebuild()
|
||||||
return pyd(**self.details_dict(**kwargs))
|
return pyd(**self.details_dict(**kwargs))
|
||||||
|
|
||||||
def show_details(self, obj):
|
def show_details(self, obj):
|
||||||
@@ -699,6 +697,7 @@ class ConfigItem(BaseClass):
|
|||||||
"""
|
"""
|
||||||
Key:JSON objects to store config settings in database.
|
Key:JSON objects to store config settings in database.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
id = Column(INTEGER, primary_key=True)
|
id = Column(INTEGER, primary_key=True)
|
||||||
key = Column(String(32)) #: Name of the configuration item.
|
key = Column(String(32)) #: Name of the configuration item.
|
||||||
value = Column(JSON) #: Value associated with the config item.
|
value = Column(JSON) #: Value associated with the config item.
|
||||||
|
|||||||
@@ -346,6 +346,7 @@ class Reagent(BaseClass, LogMixin):
|
|||||||
return [dict(name=self.name, lot=lot.lot, expiry=lot.expiry + self.eol_ext) for lot in self.reagentlot]
|
return [dict(name=self.name, lot=lot.lot, expiry=lot.expiry + self.eol_ext) for lot in self.reagentlot]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ReagentLot(BaseClass):
|
class ReagentLot(BaseClass):
|
||||||
|
|
||||||
pyd_model_name = "Reagent"
|
pyd_model_name = "Reagent"
|
||||||
@@ -445,6 +446,7 @@ class ReagentLot(BaseClass):
|
|||||||
output['reagent'] = output['reagent'].name
|
output['reagent'] = output['reagent'].name
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
class Discount(BaseClass):
|
class Discount(BaseClass):
|
||||||
"""
|
"""
|
||||||
Relationship table for client labs for certain kits.
|
Relationship table for client labs for certain kits.
|
||||||
@@ -929,7 +931,7 @@ class Procedure(BaseClass):
|
|||||||
logger.info(f"Add Results! {resultstype_name}")
|
logger.info(f"Add Results! {resultstype_name}")
|
||||||
from backend.managers import results
|
from backend.managers import results
|
||||||
results_manager = getattr(results, f"{resultstype_name}Manager")
|
results_manager = getattr(results, f"{resultstype_name}Manager")
|
||||||
rs = results_manager(procedure=self, parent=obj, fname=Path("C:\\Users\lwark\Documents\Submission_Forms\QubitData_18-09-2025_13-43-53.csv"))
|
rs = results_manager(procedure=self, parent=obj)#, fname=Path("C:\\Users\lwark\Documents\Submission_Forms\QubitData_18-09-2025_13-43-53.csv"))
|
||||||
procedure = rs.procedure_to_pydantic()
|
procedure = rs.procedure_to_pydantic()
|
||||||
samples = rs.samples_to_pydantic()
|
samples = rs.samples_to_pydantic()
|
||||||
if procedure:
|
if procedure:
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ class ClientSubmission(BaseClass, LogMixin):
|
|||||||
submission_category = Column(String(64)) #: i.e. Surveillance
|
submission_category = Column(String(64)) #: i.e. Surveillance
|
||||||
sample_count = Column(INTEGER) #: Number of sample in the procedure
|
sample_count = Column(INTEGER) #: Number of sample in the procedure
|
||||||
full_batch_size = Column(INTEGER) #: Number of wells in provided plate. 0 if no plate.
|
full_batch_size = Column(INTEGER) #: Number of wells in provided plate. 0 if no plate.
|
||||||
comment = Column(JSON) #: comment objects from users.
|
comments = Column(JSON) #: comment objects from users.
|
||||||
run = relationship("Run", back_populates="clientsubmission") #: many-to-one relationship
|
run = relationship("Run", back_populates="clientsubmission") #: many-to-one relationship
|
||||||
contact = relationship("Contact", back_populates="clientsubmission") #: contact representing submitting lab.
|
contact = relationship("Contact", back_populates="clientsubmission") #: contact representing submitting lab.
|
||||||
contact_id = Column(INTEGER, ForeignKey("_contact.id", ondelete="SET NULL",
|
contact_id = Column(INTEGER, ForeignKey("_contact.id", ondelete="SET NULL",
|
||||||
@@ -240,9 +240,9 @@ class ClientSubmission(BaseClass, LogMixin):
|
|||||||
custom = None
|
custom = None
|
||||||
runs = None
|
runs = None
|
||||||
try:
|
try:
|
||||||
comments = self.comment
|
comments = self.comments
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error setting comment: {self.comment}, {e}")
|
logger.error(f"Error setting comment: {self.comments}, {e}")
|
||||||
comments = None
|
comments = None
|
||||||
try:
|
try:
|
||||||
contact = self.contact.name
|
contact = self.contact.name
|
||||||
@@ -1926,7 +1926,7 @@ class ProcedureSampleAssociation(BaseClass):
|
|||||||
# NOTE: Figure out how to merge the misc_info if doing .update instead.
|
# NOTE: Figure out how to merge the misc_info if doing .update instead.
|
||||||
relevant = {k: v for k, v in output.items() if k not in ['sample']}
|
relevant = {k: v for k, v in output.items() if k not in ['sample']}
|
||||||
output = output['sample'].details_dict()
|
output = output['sample'].details_dict()
|
||||||
logger.debug(output)
|
# logger.debug(output)
|
||||||
misc = output['misc_info']
|
misc = output['misc_info']
|
||||||
output.update(relevant)
|
output.update(relevant)
|
||||||
output['misc_info'] = misc
|
output['misc_info'] = misc
|
||||||
@@ -1937,6 +1937,8 @@ class ProcedureSampleAssociation(BaseClass):
|
|||||||
|
|
||||||
def to_pydantic(self, **kwargs):
|
def to_pydantic(self, **kwargs):
|
||||||
output = super().to_pydantic(pyd_model_name="PydSample")
|
output = super().to_pydantic(pyd_model_name="PydSample")
|
||||||
|
# from backend.validators.pydant import PydSample
|
||||||
|
# output = PydSample(**self.details_dict(**kwargs))
|
||||||
try:
|
try:
|
||||||
output.submission_rank = output.misc_info['submission_rank']
|
output.submission_rank = output.misc_info['submission_rank']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ class DefaultParser(object):
|
|||||||
self.sheet = sheet
|
self.sheet = sheet
|
||||||
if not start_row:
|
if not start_row:
|
||||||
start_row = self.__class__.start_row
|
start_row = self.__class__.start_row
|
||||||
if self.filepath.suffix == ".xslx":
|
if self.filepath.suffix == ".xlsx":
|
||||||
self.workbook = load_workbook(self.filepath, data_only=True)
|
self.workbook = load_workbook(self.filepath, data_only=True)
|
||||||
self.worksheet = self.workbook[self.sheet]
|
self.worksheet = self.workbook[self.sheet]
|
||||||
elif self.filepath.suffix == ".csv":
|
elif self.filepath.suffix == ".csv":
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ Module for clientsubmission parsing
|
|||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import logging
|
import logging
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from string import ascii_lowercase
|
from string import ascii_lowercase
|
||||||
from typing import Generator, TYPE_CHECKING
|
from typing import Generator, TYPE_CHECKING
|
||||||
@@ -135,6 +137,9 @@ class ClientSubmissionInfoParser(DefaultKEYVALUEParser, SubmissionTyperMixin):
|
|||||||
output['submissiontype']['value'] = self.submissiontype.name.title()
|
output['submissiontype']['value'] = self.submissiontype.name.title()
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
if isinstance(output['submitted_date']['value'], datetime):
|
||||||
|
output['submitted_date']['value'] = output['submitted_date']['value'].date()
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from openpyxl import Workbook
|
||||||
|
|
||||||
from backend.excel.writers import DefaultKEYVALUEWriter, DefaultTABLEWriter
|
from backend.excel.writers import DefaultKEYVALUEWriter, DefaultTABLEWriter
|
||||||
from backend.db.models import ProcedureType
|
from backend.db.models import ProcedureType
|
||||||
from tools import flatten_list
|
from tools import flatten_list
|
||||||
@@ -16,6 +18,15 @@ class DefaultResultsSampleWriter(DefaultTABLEWriter):
|
|||||||
super().__init__(pydant_obj=pydant_obj, proceduretype=proceduretype, *args, **kwargs)
|
super().__init__(pydant_obj=pydant_obj, proceduretype=proceduretype, *args, **kwargs)
|
||||||
self.pydant_obj = flatten_list([sample.results for sample in pydant_obj.sample])
|
self.pydant_obj = flatten_list([sample.results for sample in pydant_obj.sample])
|
||||||
|
|
||||||
|
def write_to_workbook(self, workbook: Workbook, sheet: str | None = None,
|
||||||
|
start_row: int | None = None, *args, **kwargs) -> Workbook:
|
||||||
|
try:
|
||||||
|
self.worksheet = workbook[f"{self.proceduretype.name[:15]} Results"]
|
||||||
|
except KeyError:
|
||||||
|
self.worksheet = workbook.create_sheet(f"{self.proceduretype.name[:15]} Results")
|
||||||
|
# worksheet = workbook[f"{self.proceduretype.name[:15]} Results"]
|
||||||
|
return workbook
|
||||||
|
|
||||||
|
|
||||||
from .qubit_results_writer import QubitInfoWriter, QubitSampleWriter
|
from .qubit_results_writer import QubitInfoWriter, QubitSampleWriter
|
||||||
from .pcr_results_writer import PCRInfoWriter, PCRSampleWriter
|
from .pcr_results_writer import PCRInfoWriter, PCRSampleWriter
|
||||||
|
|||||||
@@ -1,16 +1,13 @@
|
|||||||
"""
|
"""
|
||||||
Writers for PCR results from Design and Analysis Software
|
Writers for PCR results from Qubit device
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import logging
|
import logging
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import Generator, TYPE_CHECKING
|
|
||||||
from openpyxl import Workbook
|
from openpyxl import Workbook
|
||||||
from openpyxl.styles import Alignment
|
from openpyxl.styles import Alignment
|
||||||
from . import DefaultResultsInfoWriter, DefaultResultsSampleWriter
|
from . import DefaultResultsInfoWriter, DefaultResultsSampleWriter
|
||||||
from tools import flatten_list
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from backend.db.models import ProcedureType
|
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
@@ -24,16 +21,12 @@ class QubitInfoWriter(DefaultResultsInfoWriter):
|
|||||||
class QubitSampleWriter(DefaultResultsSampleWriter):
|
class QubitSampleWriter(DefaultResultsSampleWriter):
|
||||||
|
|
||||||
def write_to_workbook(self, workbook: Workbook, *args, **kwargs) -> Workbook:
|
def write_to_workbook(self, workbook: Workbook, *args, **kwargs) -> Workbook:
|
||||||
try:
|
workbook = super().write_to_workbook(workbook=workbook, *args, **kwargs)
|
||||||
self.worksheet = workbook[f"{self.proceduretype.name[:15]} Results"]
|
|
||||||
except KeyError:
|
|
||||||
self.worksheet = workbook.create_sheet(f"{self.proceduretype.name[:15]} Results")
|
|
||||||
# worksheet = workbook[f"{self.proceduretype.name[:15]} Results"]
|
|
||||||
header_row = self.proceduretype.allowed_result_methods['Qubit']['sample']['start_row']
|
header_row = self.proceduretype.allowed_result_methods['Qubit']['sample']['start_row']
|
||||||
for iii, header in enumerate(self.column_headers, start=1):
|
for iii, header in enumerate(self.column_headers, start=1):
|
||||||
logger.debug(f"Row: {header_row}, column: {iii}")
|
# logger.debug(f"Row: {header_row}, column: {iii}")
|
||||||
self.worksheet.cell(row=header_row, column=iii, value=header.replace("_", " ").title())
|
self.worksheet.cell(row=header_row, column=iii, value=header.replace("_", " ").title())
|
||||||
logger.debug(f"Column headers: {self.column_headers}")
|
# logger.debug(f"Column headers: {self.column_headers}")
|
||||||
for iii, result in enumerate(self.pydant_obj, start = 1):
|
for iii, result in enumerate(self.pydant_obj, start = 1):
|
||||||
row = header_row + iii
|
row = header_row + iii
|
||||||
for k, v in result.result.items():
|
for k, v in result.result.items():
|
||||||
@@ -42,7 +35,7 @@ class QubitSampleWriter(DefaultResultsSampleWriter):
|
|||||||
except StopIteration:
|
except StopIteration:
|
||||||
print(f"fail for {k.replace('_', ' ').title()}")
|
print(f"fail for {k.replace('_', ' ').title()}")
|
||||||
continue
|
continue
|
||||||
logger.debug(f"Writing to row: {row}, column {column}")
|
# logger.debug(f"Writing to row: {row}, column {column}")
|
||||||
cell = self.worksheet.cell(row=row, column=column)
|
cell = self.worksheet.cell(row=row, column=column)
|
||||||
cell.value = v
|
cell.value = v
|
||||||
cell.alignment = Alignment(horizontal='left')
|
cell.alignment = Alignment(horizontal='left')
|
||||||
@@ -56,6 +49,3 @@ class QubitSampleWriter(DefaultResultsSampleWriter):
|
|||||||
for k, value in result.result.items():
|
for k, value in result.result.items():
|
||||||
output.append(k)
|
output.append(k)
|
||||||
return sorted(list(set(output)))
|
return sorted(list(set(output)))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class DefaultManager(object):
|
|||||||
|
|
||||||
def __init__(self, parent, input_object: Path | str | None = None):
|
def __init__(self, parent, input_object: Path | str | None = None):
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
logger.debug(f"Input object: {pformat(input_object.__dict__)}")
|
|
||||||
match input_object:
|
match input_object:
|
||||||
case str():
|
case str():
|
||||||
self.input_object = Path(input_object)
|
self.input_object = Path(input_object)
|
||||||
|
|||||||
@@ -1,15 +1,18 @@
|
|||||||
"""
|
"""
|
||||||
Contains all validators
|
Contains all validators
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
import logging, re
|
import logging, re
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from openpyxl import load_workbook
|
from openpyxl import load_workbook
|
||||||
from backend.db.models import Run, SubmissionType
|
|
||||||
from tools import jinja_template_loading
|
from tools import jinja_template_loading
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
|
|
||||||
logger = logging.getLogger(f"submissions.{__name__}")
|
logger = logging.getLogger(f"submissions.{__name__}")
|
||||||
|
|
||||||
@@ -27,15 +30,17 @@ class DefaultNamer(object):
|
|||||||
|
|
||||||
class ClientSubmissionNamer(DefaultNamer):
|
class ClientSubmissionNamer(DefaultNamer):
|
||||||
|
|
||||||
def __init__(self, filepath: str | Path, submissiontype: str|SubmissionType|None=None,
|
def __init__(self, filepath: str | Path, submissiontype: str|"SubmissionType"|None=None,
|
||||||
data: dict | None = None, **kwargs):
|
data: dict | None = None, **kwargs):
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
super().__init__(filepath=filepath)
|
super().__init__(filepath=filepath)
|
||||||
if not submissiontype:
|
if not submissiontype:
|
||||||
submissiontype = self.retrieve_submissiontype(filepath=self.filepath)
|
self.submissiontype = self.retrieve_submissiontype(filepath=self.filepath)
|
||||||
if isinstance(submissiontype, str):
|
if isinstance(submissiontype, str):
|
||||||
submissiontype = SubmissionType.query(name=submissiontype)
|
self.submissiontype = SubmissionType.query(name=submissiontype)
|
||||||
|
|
||||||
def retrieve_submissiontype(self, filepath: str | Path):
|
def retrieve_submissiontype(self):
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
# NOTE: Attempt 1, get from form properties:
|
# NOTE: Attempt 1, get from form properties:
|
||||||
sub_type = self.get_subtype_from_properties()
|
sub_type = self.get_subtype_from_properties()
|
||||||
if not sub_type:
|
if not sub_type:
|
||||||
@@ -51,6 +56,7 @@ class ClientSubmissionNamer(DefaultNamer):
|
|||||||
return sub_type
|
return sub_type
|
||||||
|
|
||||||
def get_subtype_from_regex(self) -> SubmissionType:
|
def get_subtype_from_regex(self) -> SubmissionType:
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
regex = SubmissionType.regex
|
regex = SubmissionType.regex
|
||||||
m = regex.search(self.filepath.__str__())
|
m = regex.search(self.filepath.__str__())
|
||||||
try:
|
try:
|
||||||
@@ -64,6 +70,7 @@ class ClientSubmissionNamer(DefaultNamer):
|
|||||||
|
|
||||||
def get_subtype_from_preparse(self) -> SubmissionType:
|
def get_subtype_from_preparse(self) -> SubmissionType:
|
||||||
from backend.excel.parsers.clientsubmission_parser import ClientSubmissionInfoParser
|
from backend.excel.parsers.clientsubmission_parser import ClientSubmissionInfoParser
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
parser = ClientSubmissionInfoParser(self.filepath)
|
parser = ClientSubmissionInfoParser(self.filepath)
|
||||||
sub_type = next((value for k, value in parser.parsed_info.items() if k == "submissiontype"), None)
|
sub_type = next((value for k, value in parser.parsed_info.items() if k == "submissiontype"), None)
|
||||||
sub_type = SubmissionType.query(name=sub_type)
|
sub_type = SubmissionType.query(name=sub_type)
|
||||||
@@ -72,6 +79,7 @@ class ClientSubmissionNamer(DefaultNamer):
|
|||||||
return sub_type
|
return sub_type
|
||||||
|
|
||||||
def get_subtype_from_properties(self) -> SubmissionType:
|
def get_subtype_from_properties(self) -> SubmissionType:
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
wb = load_workbook(self.filepath)
|
wb = load_workbook(self.filepath)
|
||||||
# NOTE: Gets first category in the metadata.
|
# NOTE: Gets first category in the metadata.
|
||||||
categories = wb.properties.category.split(";")
|
categories = wb.properties.category.split(";")
|
||||||
@@ -88,6 +96,7 @@ class RSLNamer(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, filename: str, submission_type: str | None = None, data: dict | None = None):
|
def __init__(self, filename: str, submission_type: str | None = None, data: dict | None = None):
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
# NOTE: Preferred method is path retrieval, but might also need validation for just string.
|
# NOTE: Preferred method is path retrieval, but might also need validation for just string.
|
||||||
filename = Path(filename) if Path(filename).exists() else filename
|
filename = Path(filename) if Path(filename).exists() else filename
|
||||||
self.submission_type = submission_type
|
self.submission_type = submission_type
|
||||||
@@ -113,7 +122,7 @@ class RSLNamer(object):
|
|||||||
Returns:
|
Returns:
|
||||||
str: parsed procedure type
|
str: parsed procedure type
|
||||||
"""
|
"""
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
def st_from_path(filepath: Path) -> str:
|
def st_from_path(filepath: Path) -> str:
|
||||||
"""
|
"""
|
||||||
Sub def to get proceduretype from a file path
|
Sub def to get proceduretype from a file path
|
||||||
@@ -186,8 +195,9 @@ class RSLNamer(object):
|
|||||||
regex (str): string to construct pattern
|
regex (str): string to construct pattern
|
||||||
filename (str): string to be parsed
|
filename (str): string to be parsed
|
||||||
"""
|
"""
|
||||||
|
from backend.db.models import Run
|
||||||
if regex is None:
|
if regex is None:
|
||||||
regex = BasicRun.regex
|
regex = Run.regex
|
||||||
match filename:
|
match filename:
|
||||||
case Path():
|
case Path():
|
||||||
m = regex.search(filename.stem)
|
m = regex.search(filename.stem)
|
||||||
@@ -215,6 +225,7 @@ class RSLNamer(object):
|
|||||||
Returns:
|
Returns:
|
||||||
str: Output filename
|
str: Output filename
|
||||||
"""
|
"""
|
||||||
|
from backend.db.models import Run
|
||||||
if "submitted_date" in data.keys():
|
if "submitted_date" in data.keys():
|
||||||
if isinstance(data['submitted_date'], dict):
|
if isinstance(data['submitted_date'], dict):
|
||||||
if data['submitted_date']['value'] is not None:
|
if data['submitted_date']['value'] is not None:
|
||||||
|
|||||||
@@ -37,17 +37,21 @@ class PydBaseClass(BaseModel, extra='allow', validate_assignment=True):
|
|||||||
def prevalidate(cls, data):
|
def prevalidate(cls, data):
|
||||||
sql_fields = [k for k, v in cls._sql_object.__dict__.items() if isinstance(v, InstrumentedAttribute)]
|
sql_fields = [k for k, v in cls._sql_object.__dict__.items() if isinstance(v, InstrumentedAttribute)]
|
||||||
output = {}
|
output = {}
|
||||||
try:
|
match data:
|
||||||
items = data.items()
|
case dict():
|
||||||
except AttributeError as e:
|
try:
|
||||||
logger.error(f"Could not prevalidate {cls.__name__} due to {e} for {pformat(data)}")
|
items = data.items()
|
||||||
return data
|
except AttributeError as e:
|
||||||
for key, value in items:
|
logger.error(f"Could not prevalidate {cls.__name__} due to {e} for {pformat(data)}")
|
||||||
new_key = key.replace("_", "")
|
return data
|
||||||
if new_key in sql_fields:
|
for key, value in items:
|
||||||
output[new_key] = value
|
new_key = key.replace("_", "")
|
||||||
else:
|
if new_key in sql_fields:
|
||||||
output[key] = value
|
output[new_key] = value
|
||||||
|
else:
|
||||||
|
output[key] = value
|
||||||
|
case _:
|
||||||
|
output = data
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@model_validator(mode='after')
|
@model_validator(mode='after')
|
||||||
@@ -136,6 +140,48 @@ class PydBaseClass(BaseModel, extra='allow', validate_assignment=True):
|
|||||||
return list(set(output))
|
return list(set(output))
|
||||||
|
|
||||||
|
|
||||||
|
class PydResults(PydBaseClass, arbitrary_types_allowed=True):
|
||||||
|
result: dict = Field(default={})
|
||||||
|
result_type: str = Field(default="NA")
|
||||||
|
img: None | bytes = Field(default=None)
|
||||||
|
# parent: Procedure | ProcedureSampleAssociation | None = Field(default=None)
|
||||||
|
parent: Any | None = Field(default=None)
|
||||||
|
date_analyzed: datetime | None = Field(default=None)
|
||||||
|
|
||||||
|
@field_validator("date_analyzed")
|
||||||
|
@classmethod
|
||||||
|
def set_today(cls, value):
|
||||||
|
match value:
|
||||||
|
case str():
|
||||||
|
value = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||||
|
case datetime():
|
||||||
|
pass
|
||||||
|
case date():
|
||||||
|
value = datetime.combine(value, datetime.max.time())
|
||||||
|
case _:
|
||||||
|
value = datetime.now()
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_sql(self):
|
||||||
|
sql, _ = Results.query_or_create(result_type=self.result_type, result=self.results)
|
||||||
|
try:
|
||||||
|
check = sql.image
|
||||||
|
except FileNotFoundError:
|
||||||
|
check = False
|
||||||
|
if not check:
|
||||||
|
sql.image = self.img
|
||||||
|
if not sql.date_analyzed:
|
||||||
|
sql.date_analyzed = self.date_analyzed
|
||||||
|
match self.parent:
|
||||||
|
case ProcedureSampleAssociation():
|
||||||
|
sql.sampleprocedureassociation = self.parent
|
||||||
|
case Procedure():
|
||||||
|
sql.procedure = self.parent
|
||||||
|
case _:
|
||||||
|
logger.error("Improper association found.")
|
||||||
|
return sql
|
||||||
|
|
||||||
|
|
||||||
class PydReagentLot(PydBaseClass):
|
class PydReagentLot(PydBaseClass):
|
||||||
lot: str | None
|
lot: str | None
|
||||||
name: str | None = Field(default=None)
|
name: str | None = Field(default=None)
|
||||||
@@ -145,9 +191,9 @@ class PydReagentLot(PydBaseClass):
|
|||||||
|
|
||||||
|
|
||||||
class PydReagent(PydBaseClass):
|
class PydReagent(PydBaseClass):
|
||||||
lot: str | None
|
# lot: str | None
|
||||||
reagentrole: str | None
|
reagentrole: str | None
|
||||||
expiry: date | datetime | Literal['NA'] | None = Field(default=None, validate_default=True)
|
# expiry: date | datetime | Literal['NA'] | None = Field(default=None, validate_default=True)
|
||||||
name: str | None = Field(default=None, validate_default=True)
|
name: str | None = Field(default=None, validate_default=True)
|
||||||
missing: bool = Field(default=True)
|
missing: bool = Field(default=True)
|
||||||
comment: str | None = Field(default="", validate_default=True)
|
comment: str | None = Field(default="", validate_default=True)
|
||||||
@@ -178,47 +224,47 @@ class PydReagent(PydBaseClass):
|
|||||||
return value
|
return value
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@field_validator("lot", mode='before')
|
# @field_validator("lot", mode='before')
|
||||||
@classmethod
|
# @classmethod
|
||||||
def rescue_lot_string(cls, value):
|
# def rescue_lot_string(cls, value):
|
||||||
if value is not None:
|
# if value is not None:
|
||||||
return convert_nans_to_nones(str(value).strip())
|
# return convert_nans_to_nones(str(value).strip())
|
||||||
return value
|
# return value
|
||||||
|
#
|
||||||
@field_validator("lot")
|
# @field_validator("lot")
|
||||||
@classmethod
|
# @classmethod
|
||||||
def enforce_lot_string(cls, value):
|
# def enforce_lot_string(cls, value):
|
||||||
if value is not None:
|
# if value is not None:
|
||||||
return value.upper().strip()
|
# return value.upper().strip()
|
||||||
return value
|
# return value
|
||||||
|
#
|
||||||
@field_validator("expiry", mode="before")
|
# @field_validator("expiry", mode="before")
|
||||||
@classmethod
|
# @classmethod
|
||||||
def enforce_date(cls, value):
|
# def enforce_date(cls, value):
|
||||||
if value is not None:
|
# if value is not None:
|
||||||
match value:
|
# match value:
|
||||||
case int():
|
# case int():
|
||||||
return datetime.fromordinal(datetime(1900, 1, 1).toordinal() + value - 2)
|
# return datetime.fromordinal(datetime(1900, 1, 1).toordinal() + value - 2)
|
||||||
case 'NA':
|
# case 'NA':
|
||||||
return value
|
# return value
|
||||||
case str():
|
# case str():
|
||||||
return parse(value)
|
# return parse(value)
|
||||||
case date():
|
# case date():
|
||||||
return datetime.combine(value, datetime.max.time())
|
# return datetime.combine(value, datetime.max.time())
|
||||||
case datetime():
|
# case datetime():
|
||||||
return value
|
# return value
|
||||||
case _:
|
# case _:
|
||||||
return convert_nans_to_nones(str(value))
|
# return convert_nans_to_nones(str(value))
|
||||||
if value is None:
|
# if value is None:
|
||||||
value = datetime.combine(date.today(), datetime.max.time())
|
# value = datetime.combine(date.today(), datetime.max.time())
|
||||||
return value
|
# return value
|
||||||
|
#
|
||||||
@field_validator("expiry")
|
# @field_validator("expiry")
|
||||||
@classmethod
|
# @classmethod
|
||||||
def date_na(cls, value):
|
# def date_na(cls, value):
|
||||||
if isinstance(value, date) and value.year == 1970:
|
# if isinstance(value, date) and value.year == 1970:
|
||||||
value = "NA"
|
# value = "NA"
|
||||||
return value
|
# return value
|
||||||
|
|
||||||
@field_validator("name", mode="before")
|
@field_validator("name", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -228,7 +274,6 @@ class PydReagent(PydBaseClass):
|
|||||||
else:
|
else:
|
||||||
return values.data['reagentrole'].strip()
|
return values.data['reagentrole'].strip()
|
||||||
|
|
||||||
|
|
||||||
def improved_dict(self) -> dict:
|
def improved_dict(self) -> dict:
|
||||||
"""
|
"""
|
||||||
Constructs a dictionary consisting of model.fields and model.extras
|
Constructs a dictionary consisting of model.fields and model.extras
|
||||||
@@ -251,15 +296,18 @@ class PydReagent(PydBaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[Reagent, Report]: Reagent instance and result of function
|
Tuple[Reagent, Report]: Reagent instance and result of function
|
||||||
"""
|
"""
|
||||||
|
from backend.db.models import ReagentLot, Reagent
|
||||||
report = Report()
|
report = Report()
|
||||||
if self.model_extra is not None:
|
if self.model_extra is not None:
|
||||||
self.__dict__.update(self.model_extra)
|
self.__dict__.update(self.model_extra)
|
||||||
reagent, new = ReagentLot.query_or_create(lot=self.lot, name=self.name)
|
reagentlot, new = ReagentLot.query_or_create(lot=self.lot, name=self.name)
|
||||||
if new:
|
if new:
|
||||||
reagentrole = ReagentRole.query(name=self.reagentrole)
|
reagent = Reagent.query(name=self.name, limit=1)
|
||||||
reagent.reagentrole = reagentrole
|
reagentlot.reagent = reagent
|
||||||
reagent.expiry = self.expiry
|
reagentlot.expiry = self.expiry
|
||||||
return reagent, report
|
if isinstance(reagentlot.expiry, str):
|
||||||
|
reagentlot.expiry = datetime.combine(datetime.strptime(reagentlot.expiry, "%Y-%m-%d"), datetime.max.time())
|
||||||
|
return reagentlot, report
|
||||||
|
|
||||||
|
|
||||||
class PydSample(PydBaseClass):
|
class PydSample(PydBaseClass):
|
||||||
@@ -330,6 +378,7 @@ class PydTips(PydBaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
SubmissionTipsAssociation: Association between queried tips and procedure
|
SubmissionTipsAssociation: Association between queried tips and procedure
|
||||||
"""
|
"""
|
||||||
|
from backend.db.models import TipsLot
|
||||||
report = Report()
|
report = Report()
|
||||||
tips = TipsLot.query(lot=self.lot, limit=1)
|
tips = TipsLot.query(lot=self.lot, limit=1)
|
||||||
return tips, report
|
return tips, report
|
||||||
@@ -347,6 +396,7 @@ class PydEquipment(PydBaseClass):
|
|||||||
@field_validator('equipmentrole', mode='before')
|
@field_validator('equipmentrole', mode='before')
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_role_name(cls, value):
|
def get_role_name(cls, value):
|
||||||
|
from backend.db.models import EquipmentRole
|
||||||
match value:
|
match value:
|
||||||
case list():
|
case list():
|
||||||
value = value[0]
|
value = value[0]
|
||||||
@@ -361,6 +411,7 @@ class PydEquipment(PydBaseClass):
|
|||||||
@field_validator('processes', mode='before')
|
@field_validator('processes', mode='before')
|
||||||
@classmethod
|
@classmethod
|
||||||
def process_to_pydantic(cls, value, values):
|
def process_to_pydantic(cls, value, values):
|
||||||
|
from backend.db.models import ProcessVersion, Process
|
||||||
if isinstance(value, GeneratorType):
|
if isinstance(value, GeneratorType):
|
||||||
value = [item for item in value]
|
value = [item for item in value]
|
||||||
value = convert_nans_to_nones(value)
|
value = convert_nans_to_nones(value)
|
||||||
@@ -390,6 +441,7 @@ class PydEquipment(PydBaseClass):
|
|||||||
@field_validator('tips', mode='before')
|
@field_validator('tips', mode='before')
|
||||||
@classmethod
|
@classmethod
|
||||||
def tips_to_pydantic(cls, value, values):
|
def tips_to_pydantic(cls, value, values):
|
||||||
|
from backend.db.models import TipsLot
|
||||||
if isinstance(value, GeneratorType):
|
if isinstance(value, GeneratorType):
|
||||||
value = [item for item in value]
|
value = [item for item in value]
|
||||||
value = convert_nans_to_nones(value)
|
value = convert_nans_to_nones(value)
|
||||||
@@ -426,6 +478,7 @@ class PydEquipment(PydBaseClass):
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[Equipment, RunEquipmentAssociation]: SQL objects
|
Tuple[Equipment, RunEquipmentAssociation]: SQL objects
|
||||||
"""
|
"""
|
||||||
|
from backend.db.models import Equipment, ProcedureEquipmentAssociation, Process
|
||||||
report = Report()
|
report = Report()
|
||||||
if isinstance(procedure, str):
|
if isinstance(procedure, str):
|
||||||
procedure = Procedure.query(name=procedure)
|
procedure = Procedure.query(name=procedure)
|
||||||
@@ -639,6 +692,7 @@ class PydProcess(PydBaseClass, extra="allow"):
|
|||||||
|
|
||||||
@report_result
|
@report_result
|
||||||
def to_sql(self):
|
def to_sql(self):
|
||||||
|
from backend.db.models import ProcessVersion
|
||||||
report = Report()
|
report = Report()
|
||||||
name = self.name.split("-")[0]
|
name = self.name.split("-")[0]
|
||||||
# NOTE: can't use query_or_create due to name not being part of ProcessVersion
|
# NOTE: can't use query_or_create due to name not being part of ProcessVersion
|
||||||
@@ -684,12 +738,12 @@ class PydElastic(BaseModel, extra="allow", arbitrary_types_allowed=True):
|
|||||||
# NOTE: Generified objects below:
|
# NOTE: Generified objects below:
|
||||||
|
|
||||||
class PydProcedure(PydBaseClass, arbitrary_types_allowed=True):
|
class PydProcedure(PydBaseClass, arbitrary_types_allowed=True):
|
||||||
proceduretype: ProcedureType | None = Field(default=None)
|
proceduretype: Any | None = Field(default=None)
|
||||||
run: Run | str | None = Field(default=None)
|
run: Any | str | None = Field(default=None)
|
||||||
name: dict = Field(default=dict(value="NA", missing=True), validate_default=True)
|
name: dict = Field(default=dict(value="NA", missing=True), validate_default=True)
|
||||||
technician: dict = Field(default=dict(value="NA", missing=True))
|
technician: dict = Field(default=dict(value="NA", missing=True))
|
||||||
repeat: bool = Field(default=False)
|
repeat: bool = Field(default=False)
|
||||||
repeat_of: Procedure | None = Field(default=None)
|
repeat_of: Any | None = Field(default=None)
|
||||||
plate_map: str | None = Field(default=None)
|
plate_map: str | None = Field(default=None)
|
||||||
reagent: list | None = Field(default=[])
|
reagent: list | None = Field(default=[])
|
||||||
reagentrole: dict | None = Field(default={}, validate_default=True)
|
reagentrole: dict | None = Field(default={}, validate_default=True)
|
||||||
@@ -878,7 +932,10 @@ class PydProcedure(PydBaseClass, arbitrary_types_allowed=True):
|
|||||||
reg.save()
|
reg.save()
|
||||||
|
|
||||||
def to_sql(self, new: bool = False):
|
def to_sql(self, new: bool = False):
|
||||||
from backend.db.models import RunSampleAssociation, ProcedureSampleAssociation
|
from backend.db.models import (
|
||||||
|
RunSampleAssociation, ProcedureSampleAssociation, Procedure, ProcedureReagentLotAssociation,
|
||||||
|
ProcedureEquipmentAssociation
|
||||||
|
)
|
||||||
logger.debug(f"incoming pyd: {pformat([item.__dict__ for item in self.equipment])}")
|
logger.debug(f"incoming pyd: {pformat([item.__dict__ for item in self.equipment])}")
|
||||||
if new:
|
if new:
|
||||||
sql = Procedure()
|
sql = Procedure()
|
||||||
@@ -1001,9 +1058,11 @@ class PydClientSubmission(PydBaseClass):
|
|||||||
def enforce_submitted_date(cls, value):
|
def enforce_submitted_date(cls, value):
|
||||||
match value:
|
match value:
|
||||||
case str():
|
case str():
|
||||||
value = dict(value=datetime.strptime(value, "%Y-%m-%d %H:%M:%S"), missing=False)
|
value = dict(value=datetime.strptime(value, "%Y-%m-%d %H:%M:%S").date(), missing=False)
|
||||||
case date() | datetime():
|
case date():
|
||||||
value = dict(value=value, missing=False)
|
value = dict(value=value, missing=False)
|
||||||
|
case datetime():
|
||||||
|
value = dict(value=value.date(), missing=False)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
return value
|
return value
|
||||||
@@ -1121,6 +1180,7 @@ class PydClientSubmission(PydBaseClass):
|
|||||||
|
|
||||||
def to_sql(self):
|
def to_sql(self):
|
||||||
sql = super().to_sql()
|
sql = super().to_sql()
|
||||||
|
from backend.db.models import SubmissionType
|
||||||
assert not any([isinstance(item, PydSample) for item in sql.sample])
|
assert not any([isinstance(item, PydSample) for item in sql.sample])
|
||||||
sql.sample = []
|
sql.sample = []
|
||||||
if not sql.submissiontype:
|
if not sql.submissiontype:
|
||||||
@@ -1621,44 +1681,3 @@ class PydRun(PydBaseClass): #, extra='allow'):
|
|||||||
samples.append(sample)
|
samples.append(sample)
|
||||||
samples = sorted(samples, key=itemgetter("submission_rank"))
|
samples = sorted(samples, key=itemgetter("submission_rank"))
|
||||||
return samples
|
return samples
|
||||||
|
|
||||||
|
|
||||||
class PydResults(PydBaseClass, arbitrary_types_allowed=True):
|
|
||||||
result: dict = Field(default={})
|
|
||||||
result_type: str = Field(default="NA")
|
|
||||||
img: None | bytes = Field(default=None)
|
|
||||||
parent: Procedure | ProcedureSampleAssociation | None = Field(default=None)
|
|
||||||
date_analyzed: datetime | None = Field(default=None)
|
|
||||||
|
|
||||||
@field_validator("date_analyzed")
|
|
||||||
@classmethod
|
|
||||||
def set_today(cls, value):
|
|
||||||
match value:
|
|
||||||
case str():
|
|
||||||
value = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
|
||||||
case datetime():
|
|
||||||
pass
|
|
||||||
case date():
|
|
||||||
value = datetime.combine(value, datetime.max.time())
|
|
||||||
case _:
|
|
||||||
value = datetime.now()
|
|
||||||
return value
|
|
||||||
|
|
||||||
def to_sql(self):
|
|
||||||
sql, _ = Results.query_or_create(result_type=self.result_type, result=self.results)
|
|
||||||
try:
|
|
||||||
check = sql.image
|
|
||||||
except FileNotFoundError:
|
|
||||||
check = False
|
|
||||||
if not check:
|
|
||||||
sql.image = self.img
|
|
||||||
if not sql.date_analyzed:
|
|
||||||
sql.date_analyzed = self.date_analyzed
|
|
||||||
match self.parent:
|
|
||||||
case ProcedureSampleAssociation():
|
|
||||||
sql.sampleprocedureassociation = self.parent
|
|
||||||
case Procedure():
|
|
||||||
sql.procedure = self.parent
|
|
||||||
case _:
|
|
||||||
logger.error("Improper association found.")
|
|
||||||
return sql
|
|
||||||
|
|||||||
@@ -56,7 +56,15 @@ class ProcedureCreation(QDialog):
|
|||||||
proceduretype_dict = self.proceduretype.details_dict()
|
proceduretype_dict = self.proceduretype.details_dict()
|
||||||
# NOTE: Add --New-- as an option for reagents.
|
# NOTE: Add --New-- as an option for reagents.
|
||||||
for key, value in self.procedure.reagentrole.items():
|
for key, value in self.procedure.reagentrole.items():
|
||||||
value.append(dict(name="--New--"))
|
try:
|
||||||
|
check = "--New--" in [v['name'] for v in value]
|
||||||
|
except TypeError:
|
||||||
|
try:
|
||||||
|
check = "--New--" in [v.name for v in value]
|
||||||
|
except (TypeError, AttributeError):
|
||||||
|
check = True
|
||||||
|
if not check:
|
||||||
|
value.append(dict(name="--New--"))
|
||||||
if self.procedure.equipment:
|
if self.procedure.equipment:
|
||||||
for equipmentrole in proceduretype_dict['equipment']:
|
for equipmentrole in proceduretype_dict['equipment']:
|
||||||
# NOTE: Check if procedure equipment is present and move to head of the list if so.
|
# NOTE: Check if procedure equipment is present and move to head of the list if so.
|
||||||
@@ -72,7 +80,6 @@ class ProcedureCreation(QDialog):
|
|||||||
proceduretype_dict['equipment'] = [sanitize_object_for_json(object) for object in proceduretype_dict['equipment']]
|
proceduretype_dict['equipment'] = [sanitize_object_for_json(object) for object in proceduretype_dict['equipment']]
|
||||||
regex = re.compile(r".*R\d$")
|
regex = re.compile(r".*R\d$")
|
||||||
proceduretype_dict['previous'] = [""] + [item.name for item in self.run.procedure if item.proceduretype == self.proceduretype and not bool(regex.match(item.name))]
|
proceduretype_dict['previous'] = [""] + [item.name for item in self.run.procedure if item.proceduretype == self.proceduretype and not bool(regex.match(item.name))]
|
||||||
# sys.exit(f"ProcedureDict:\n{pformat(proceduretype_dict)}")
|
|
||||||
html = render_details_template(
|
html = render_details_template(
|
||||||
template_name="procedure_creation",
|
template_name="procedure_creation",
|
||||||
js_in=["procedure_form", "grid_drag", "context_menu"],
|
js_in=["procedure_form", "grid_drag", "context_menu"],
|
||||||
@@ -82,12 +89,13 @@ class ProcedureCreation(QDialog):
|
|||||||
plate_map=self.plate_map,
|
plate_map=self.plate_map,
|
||||||
edit=self.edit
|
edit=self.edit
|
||||||
)
|
)
|
||||||
|
# with open("procedure_creation.html", "w") as f:
|
||||||
|
# f.write(html)
|
||||||
self.webview.setHtml(html)
|
self.webview.setHtml(html)
|
||||||
|
|
||||||
@pyqtSlot(str, str, str, str)
|
@pyqtSlot(str, str, str, str)
|
||||||
def update_equipment(self, equipmentrole: str, equipment: str, processversion: str, tips: str):
|
def update_equipment(self, equipmentrole: str, equipment: str, processversion: str, tips: str):
|
||||||
from backend.db.models import Equipment, ProcessVersion, TipsLot
|
from backend.db.models import Equipment, ProcessVersion, TipsLot
|
||||||
logger.debug(f"\n\nEquipmentRole: {equipmentrole}, Equipment: {equipment}, Process: {processversion}, Tips: {tips}\n\n")
|
|
||||||
try:
|
try:
|
||||||
equipment_of_interest = next(
|
equipment_of_interest = next(
|
||||||
(item for item in self.procedure.equipment if item.equipmentrole == equipmentrole))
|
(item for item in self.procedure.equipment if item.equipmentrole == equipmentrole))
|
||||||
@@ -148,9 +156,10 @@ class ProcedureCreation(QDialog):
|
|||||||
|
|
||||||
@pyqtSlot(str, str, str, str)
|
@pyqtSlot(str, str, str, str)
|
||||||
def add_new_reagent(self, reagentrole: str, name: str, lot: str, expiry: str):
|
def add_new_reagent(self, reagentrole: str, name: str, lot: str, expiry: str):
|
||||||
from backend.validators.pydant import PydReagent
|
from backend.validators.pydant import PydReagentLot
|
||||||
expiry = datetime.datetime.strptime(expiry, "%Y-%m-%d")
|
expiry = datetime.datetime.strptime(expiry, "%Y-%m-%d")
|
||||||
pyd = PydReagent(reagentrole=reagentrole, name=name, lot=lot, expiry=expiry)
|
logger.debug(f"{reagentrole}, {name}, {lot}, {expiry}")
|
||||||
|
pyd = PydReagentLot(reagentrole=reagentrole, name=name, lot=lot, expiry=expiry)
|
||||||
self.procedure.reagentrole[reagentrole].insert(0, pyd)
|
self.procedure.reagentrole[reagentrole].insert(0, pyd)
|
||||||
self.set_html()
|
self.set_html()
|
||||||
|
|
||||||
@@ -162,6 +171,12 @@ class ProcedureCreation(QDialog):
|
|||||||
return
|
return
|
||||||
self.procedure.update_reagents(reagentrole=reagentrole, name=name, lot=lot, expiry=expiry)
|
self.procedure.update_reagents(reagentrole=reagentrole, name=name, lot=lot, expiry=expiry)
|
||||||
|
|
||||||
|
@pyqtSlot(str, result=list)
|
||||||
|
def get_reagent_names(self, reagentrole_name: str):
|
||||||
|
from backend.db.models import ReagentRole
|
||||||
|
reagentrole = ReagentRole.query(name=reagentrole_name)
|
||||||
|
return [item.name for item in reagentrole.get_reagents(proceduretype=self.procedure.proceduretype)]
|
||||||
|
|
||||||
def return_sql(self, new: bool = False):
|
def return_sql(self, new: bool = False):
|
||||||
output = self.procedure.to_sql(new=new)
|
output = self.procedure.to_sql(new=new)
|
||||||
return output
|
return output
|
||||||
|
|||||||
@@ -194,7 +194,7 @@ function contextListener() {
|
|||||||
function clickListener() {
|
function clickListener() {
|
||||||
document.addEventListener( "click", function(e) {
|
document.addEventListener( "click", function(e) {
|
||||||
var clickeElIsLink = clickInsideElement( e, contextMenuLinkClassName );
|
var clickeElIsLink = clickInsideElement( e, contextMenuLinkClassName );
|
||||||
backend.log(e.target.id)
|
|
||||||
if ( clickeElIsLink ) {
|
if ( clickeElIsLink ) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
menuItemListener( clickeElIsLink );
|
menuItemListener( clickeElIsLink );
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ var changed_it = new Event('change');
|
|||||||
var reagentRoles = document.getElementsByClassName("reagentrole");
|
var reagentRoles = document.getElementsByClassName("reagentrole");
|
||||||
|
|
||||||
for(let i = 0; i < reagentRoles.length; i++) {
|
for(let i = 0; i < reagentRoles.length; i++) {
|
||||||
reagentRoles[i].addEventListener("change", function() {
|
reagentRoles[i].addEventListener("change", async function() {
|
||||||
if (reagentRoles[i].value.includes("--New--")) {
|
if (reagentRoles[i].value.includes("--New--")) {
|
||||||
// alert("Create new reagent.")
|
// alert("Create new reagent.")
|
||||||
var br = document.createElement("br");
|
var br = document.createElement("br");
|
||||||
@@ -50,9 +50,15 @@ for(let i = 0; i < reagentRoles.length; i++) {
|
|||||||
var new_form = document.createElement("form");
|
var new_form = document.createElement("form");
|
||||||
new_form.setAttribute("class", "new_reagent_form")
|
new_form.setAttribute("class", "new_reagent_form")
|
||||||
new_form.setAttribute("id", reagentRoles[i].id + "_addition")
|
new_form.setAttribute("id", reagentRoles[i].id + "_addition")
|
||||||
var rr_name = document.createElement("input");
|
var rr_name = document.createElement("select");
|
||||||
rr_name.setAttribute("type", "text");
|
|
||||||
rr_name.setAttribute("id", "new_" + reagentRoles[i].id + "_name");
|
rr_name.setAttribute("id", "new_" + reagentRoles[i].id + "_name");
|
||||||
|
var rr_options = await backend.get_reagent_names(reagentRoles[i].id).then(
|
||||||
|
function(result) {
|
||||||
|
result.forEach( function(item) {
|
||||||
|
rr_name.options.add( new Option(item));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
);
|
||||||
var rr_name_label = document.createElement("label");
|
var rr_name_label = document.createElement("label");
|
||||||
rr_name_label.setAttribute("for", "new_" + reagentRoles[i].id + "_name");
|
rr_name_label.setAttribute("for", "new_" + reagentRoles[i].id + "_name");
|
||||||
rr_name_label.innerHTML = "Name:";
|
rr_name_label.innerHTML = "Name:";
|
||||||
|
|||||||
@@ -782,7 +782,8 @@ def yaml_regex_creator(loader, node):
|
|||||||
nodes = loader.construct_sequence(node)
|
nodes = loader.construct_sequence(node)
|
||||||
name = nodes[0].replace(" ", "_")
|
name = nodes[0].replace(" ", "_")
|
||||||
abbr = nodes[1]
|
abbr = nodes[1]
|
||||||
return f"(?P<{name}>RSL(?:-|_)?{abbr}(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
# return f"(?P<{name}>RSL(?:-|_)?{abbr}(?:-|_)?20\d{2}-?\d{2}-?\d{2}(?:(_|-)?\d?([^_0123456789\sA-QS-Z]|$)?R?\d?)?)"
|
||||||
|
return f"(?P<{name}>RSL(?:-|_)?{abbr}(?:-|_)?20\\d{2}-?\\d{2}-?\\d{2}(?:(_|-)?\\d?([^_0123456789\\sA-QS-Z]|$)?R?\\d?)?)"
|
||||||
|
|
||||||
|
|
||||||
def super_splitter(ins_str: str, substring: str, idx: int) -> str:
|
def super_splitter(ins_str: str, substring: str, idx: int) -> str:
|
||||||
|
|||||||
Reference in New Issue
Block a user