Created omni-manager, omni-addit
This commit is contained in:
@@ -7,9 +7,9 @@ import importlib
|
||||
import time
|
||||
from datetime import date, datetime, timedelta
|
||||
from json import JSONDecodeError
|
||||
import logging, re, yaml, sys, os, stat, platform, getpass, inspect, json, numpy as np, pandas as pd
|
||||
import logging, re, yaml, sys, os, stat, platform, getpass, json, numpy as np, pandas as pd
|
||||
from threading import Thread
|
||||
|
||||
from inspect import getmembers, isfunction, stack
|
||||
from dateutil.easter import easter
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from logging import handlers
|
||||
@@ -478,23 +478,25 @@ class Settings(BaseSettings, extra="allow"):
|
||||
|
||||
def set_scripts(self):
|
||||
"""
|
||||
Imports all functions from "scripts" folder which will run their @registers, adding them to ctx scripts
|
||||
Imports all functions from "scripts" folder, adding them to ctx scripts
|
||||
"""
|
||||
p = Path(__file__).parent.joinpath("scripts").absolute()
|
||||
subs = [item.stem for item in p.glob("*.py") if "__" not in item.stem]
|
||||
for sub in subs:
|
||||
mod = importlib.import_module(f"tools.scripts.{sub}")
|
||||
try:
|
||||
func = mod.__getattribute__(sub)
|
||||
except AttributeError:
|
||||
try:
|
||||
func = mod.__getattribute__("script")
|
||||
except AttributeError:
|
||||
continue
|
||||
if sub in self.startup_scripts.keys():
|
||||
self.startup_scripts[sub] = func
|
||||
if sub in self.teardown_scripts.keys():
|
||||
self.teardown_scripts[sub] = func
|
||||
if check_if_app():
|
||||
p = Path(sys._MEIPASS).joinpath("files", "scripts")
|
||||
else:
|
||||
p = Path(__file__).parents[2].joinpath("scripts").absolute()
|
||||
if p.__str__() not in sys.path:
|
||||
sys.path.append(p.__str__())
|
||||
modules = p.glob("[!__]*.py")
|
||||
for module in modules:
|
||||
mod = importlib.import_module(module.stem)
|
||||
for function in getmembers(mod, isfunction):
|
||||
name = function[0]
|
||||
func = function[1]
|
||||
# NOTE: assign function based on its name being in config: startup/teardown
|
||||
if name in self.startup_scripts.keys():
|
||||
self.startup_scripts[name] = func
|
||||
if name in self.teardown_scripts.keys():
|
||||
self.teardown_scripts[name] = func
|
||||
|
||||
@timer
|
||||
def run_startup(self):
|
||||
@@ -502,9 +504,12 @@ class Settings(BaseSettings, extra="allow"):
|
||||
Runs startup scripts.
|
||||
"""
|
||||
for script in self.startup_scripts.values():
|
||||
logger.info(f"Running startup script: {script.__name__}")
|
||||
thread = Thread(target=script, args=(ctx,))
|
||||
thread.start()
|
||||
try:
|
||||
logger.info(f"Running startup script: {script.__name__}")
|
||||
thread = Thread(target=script, args=(ctx,))
|
||||
thread.start()
|
||||
except AttributeError:
|
||||
logger.error(f"Couldn't run startup script: {script}")
|
||||
|
||||
@timer
|
||||
def run_teardown(self):
|
||||
@@ -512,9 +517,12 @@ class Settings(BaseSettings, extra="allow"):
|
||||
Runs teardown scripts.
|
||||
"""
|
||||
for script in self.teardown_scripts.values():
|
||||
logger.info(f"Running teardown script: {script.__name__}")
|
||||
thread = Thread(target=script, args=(ctx,))
|
||||
thread.start()
|
||||
try:
|
||||
logger.info(f"Running teardown script: {script.__name__}")
|
||||
thread = Thread(target=script, args=(ctx,))
|
||||
thread.start()
|
||||
except AttributeError:
|
||||
logger.error(f"Couldn't run teardown script: {script}")
|
||||
|
||||
@classmethod
|
||||
def get_alembic_db_path(cls, alembic_path, mode=Literal['path', 'schema', 'user', 'pass']) -> Path | str:
|
||||
@@ -874,7 +882,7 @@ class Result(BaseModel, arbitrary_types_allowed=True):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.owner = inspect.stack()[1].function
|
||||
self.owner = stack()[1].function
|
||||
|
||||
def report(self):
|
||||
from frontend.widgets.pop_ups import AlertPop
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
"""
|
||||
script meant to copy database data to new file. Currently for Sqlite only
|
||||
"""
|
||||
import logging, shutil, pyodbc
|
||||
from datetime import date
|
||||
from pathlib import Path
|
||||
from tools import Settings
|
||||
# from .. import register_script
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
# @register_script
|
||||
def backup_database(ctx: Settings):
|
||||
"""
|
||||
Copies the database into the backup directory the first time it is opened every month.
|
||||
"""
|
||||
month = date.today().strftime("%Y-%m")
|
||||
current_month_bak = Path(ctx.backup_path).joinpath(f"submissions_backup-{month}").resolve()
|
||||
logger.info(f"Here is the db directory: {ctx.database_path}")
|
||||
logger.info(f"Here is the backup directory: {ctx.backup_path}")
|
||||
match ctx.database_schema:
|
||||
case "sqlite":
|
||||
db_path = ctx.database_path.joinpath(ctx.database_name).with_suffix(".db")
|
||||
current_month_bak = current_month_bak.with_suffix(".db")
|
||||
if not current_month_bak.exists() and "Archives" not in db_path.__str__():
|
||||
logger.info("No backup found for this month, backing up database.")
|
||||
try:
|
||||
shutil.copyfile(db_path, current_month_bak)
|
||||
except PermissionError as e:
|
||||
logger.error(f"Couldn't backup database due to: {e}")
|
||||
case "postgresql+psycopg2":
|
||||
logger.warning(f"Backup function not yet implemented for psql")
|
||||
current_month_bak = current_month_bak.with_suffix(".psql")
|
||||
case "mssql+pyodbc":
|
||||
logger.warning(f"{ctx.database_schema} backup is currently experiencing permission issues")
|
||||
current_month_bak = current_month_bak.with_suffix(".bak")
|
||||
return
|
||||
if not current_month_bak.exists():
|
||||
logger.info(f"No backup found for this month, backing up database to {current_month_bak}.")
|
||||
connection = pyodbc.connect(driver='{ODBC Driver 18 for SQL Server}',
|
||||
server=f'{ctx.database_path}', database=f'{ctx.database_name}',
|
||||
trusted_connection='yes', trustservercertificate="yes", autocommit=True)
|
||||
backup = f"BACKUP DATABASE [{ctx.database_name}] TO DISK = N'{current_month_bak}'"
|
||||
cursor = connection.cursor().execute(backup)
|
||||
connection.close()
|
||||
@@ -1,9 +0,0 @@
|
||||
"""
|
||||
Test script for teardown_scripts
|
||||
"""
|
||||
|
||||
# from .. import register_script
|
||||
|
||||
# @register_script
|
||||
def goodbye(ctx):
|
||||
print("\n\nGoodbye. Thank you for using Robotics Submission Tracker.\n\n")
|
||||
@@ -1,8 +0,0 @@
|
||||
"""
|
||||
Test script for startup_scripts
|
||||
"""
|
||||
# from .. import register_script
|
||||
|
||||
# @register_script
|
||||
def hello(ctx):
|
||||
print("\n\nHello! Welcome to Robotics Submission Tracker.\n\n")
|
||||
@@ -1,67 +0,0 @@
|
||||
import logging, sqlite3, json
|
||||
from pprint import pformat, pprint
|
||||
from datetime import datetime
|
||||
from tools import Settings
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
# from .. import register_script
|
||||
|
||||
logger = logging.getLogger(f"submissions.{__name__}")
|
||||
|
||||
# @register_script
|
||||
def import_irida(ctx: Settings):
|
||||
"""
|
||||
Grabs Irida controls from secondary database.
|
||||
|
||||
Args:
|
||||
ctx (Settings): Settings inherited from app.
|
||||
"""
|
||||
from backend import BasicSample
|
||||
from backend.db import IridaControl, ControlType
|
||||
# NOTE: Because the main session will be busy in another thread, this requires a new session.
|
||||
new_session = Session(ctx.database_session.get_bind())
|
||||
ct = new_session.query(ControlType).filter(ControlType.name == "Irida Control").first()
|
||||
existing_controls = [item.name for item in new_session.query(IridaControl)]
|
||||
prm_list = ", ".join([f"'{thing}'" for thing in existing_controls])
|
||||
ctrl_db_path = ctx.directory_path.joinpath("submissions_parser_output", "submissions.db")
|
||||
try:
|
||||
conn = sqlite3.connect(ctrl_db_path)
|
||||
except AttributeError as e:
|
||||
logger.error(f"Error, could not import from irida due to {e}")
|
||||
return
|
||||
sql = "SELECT name, submitted_date, submission_id, contains, matches, kraken, subtype, refseq_version, " \
|
||||
"kraken2_version, kraken2_db_version, sample_id FROM _iridacontrol INNER JOIN _control on _control.id " \
|
||||
f"= _iridacontrol.id WHERE _control.name NOT IN ({prm_list})"
|
||||
cursor = conn.execute(sql)
|
||||
records = [
|
||||
dict(name=row[0], submitted_date=row[1], submission_id=row[2], contains=row[3], matches=row[4], kraken=row[5],
|
||||
subtype=row[6], refseq_version=row[7], kraken2_version=row[8], kraken2_db_version=row[9],
|
||||
sample_id=row[10]) for row in cursor]
|
||||
for record in records:
|
||||
# instance = IridaControl.query(name=record['name'])
|
||||
instance = new_session.query(IridaControl).filter(IridaControl.name == record['name']).first()
|
||||
if instance:
|
||||
logger.warning(f"Irida Control {instance.name} already exists, skipping.")
|
||||
continue
|
||||
for thing in ['contains', 'matches', 'kraken']:
|
||||
if record[thing]:
|
||||
record[thing] = json.loads(record[thing])
|
||||
assert isinstance(record[thing], dict)
|
||||
else:
|
||||
record[thing] = {}
|
||||
# record['matches'] = json.loads(record['matches'])
|
||||
# assert isinstance(record['matches'], dict)
|
||||
# record['kraken'] = json.loads(record['kraken'])
|
||||
# assert isinstance(record['kraken'], dict)
|
||||
record['submitted_date'] = datetime.strptime(record['submitted_date'], "%Y-%m-%d %H:%M:%S.%f")
|
||||
assert isinstance(record['submitted_date'], datetime)
|
||||
instance = IridaControl(controltype=ct, **record)
|
||||
# sample = BasicSample.query(submitter_id=instance.name)
|
||||
sample = new_session.query(BasicSample).filter(BasicSample.submitter_id == instance.name).first()
|
||||
if sample:
|
||||
instance.sample = sample
|
||||
instance.submission = sample.submissions[0]
|
||||
# instance.save()
|
||||
new_session.add(instance)
|
||||
new_session.commit()
|
||||
new_session.close()
|
||||
Reference in New Issue
Block a user