Warning
This document is for an in-development version of Galaxy. You can alternatively view this page in the latest release if it exists or view the top of the latest release's documentation.
Source code for galaxy.webapps.reports.controllers.system
import logging
import os
import subprocess
from datetime import datetime, timedelta
from decimal import Decimal
from sqlalchemy import and_, desc, false, null, true
from sqlalchemy.orm import eagerload
from galaxy import model, util
from galaxy.web.base.controller import BaseUIController, web
log = logging.getLogger(__name__)
[docs]class System(BaseUIController):
[docs]    @web.expose
    def index(self, trans, **kwd):
        params = util.Params(kwd)
        message = ''
        if params.userless_histories_days:
            userless_histories_days = params.userless_histories_days
        else:
            userless_histories_days = '60'
        if params.deleted_histories_days:
            deleted_histories_days = params.deleted_histories_days
        else:
            deleted_histories_days = '60'
        if params.deleted_datasets_days:
            deleted_datasets_days = params.deleted_datasets_days
        else:
            deleted_datasets_days = '60'
        file_path, disk_usage, datasets, file_size_str = self.disk_usage(trans, **kwd)
        if 'action' in kwd:
            if kwd['action'] == "userless_histories":
                userless_histories_days, message = self.userless_histories(trans, **kwd)
            elif kwd['action'] == "deleted_histories":
                deleted_histories_days, message = self.deleted_histories(trans, **kwd)
            elif kwd['action'] == "deleted_datasets":
                deleted_datasets_days, message = self.deleted_datasets(trans, **kwd)
        return trans.fill_template('/webapps/reports/system.mako',
                                   file_path=file_path,
                                   disk_usage=disk_usage,
                                   datasets=datasets,
                                   file_size_str=file_size_str,
                                   userless_histories_days=userless_histories_days,
                                   deleted_histories_days=deleted_histories_days,
                                   deleted_datasets_days=deleted_datasets_days,
                                   message=message,
                                   nice_size=nice_size)
[docs]    def userless_histories(self, trans, **kwd):
        """The number of userless histories and associated datasets that have not been updated for the specified number of days."""
        params = util.Params(kwd)
        message = ''
        if params.userless_histories_days:
            userless_histories_days = int(params.userless_histories_days)
            cutoff_time = datetime.utcnow() - timedelta(days=userless_histories_days)
            history_count = 0
            dataset_count = 0
            for history in trans.sa_session.query(model.History) \
                    .filter(and_(model.History.table.c.user_id == null(),
                    model.History.table.c.deleted == true(),
                    model.History.table.c.update_time < cutoff_time)):
                for dataset in history.datasets:
                    if not dataset.deleted:
                        dataset_count += 1
                history_count += 1
            message = "%d userless histories ( including a total of %d datasets ) have not been updated for at least %d days." % (history_count, dataset_count, userless_histories_days)
        else:
            message = "Enter the number of days."
        return str(userless_histories_days), message
[docs]    def deleted_histories(self, trans, **kwd):
        """
        The number of histories that were deleted more than the specified number of days ago, but have not yet been purged.
        Also included is the number of datasets associated with the histories.
        """
        params = util.Params(kwd)
        message = ''
        if params.deleted_histories_days:
            deleted_histories_days = int(params.deleted_histories_days)
            cutoff_time = datetime.utcnow() - timedelta(days=deleted_histories_days)
            history_count = 0
            dataset_count = 0
            disk_space = 0
            histories = trans.sa_session.query(model.History) \
                .filter(and_(model.History.table.c.deleted == true(),
                    model.History.table.c.purged == false(),
                    model.History.table.c.update_time < cutoff_time)) \
                .options(eagerload('datasets'))
            for history in histories:
                for hda in history.datasets:
                    if not hda.dataset.purged:
                        dataset_count += 1
                        try:
                            disk_space += hda.dataset.file_size
                        except Exception:
                            pass
                history_count += 1
            message = "%d histories ( including a total of %d datasets ) were deleted more than %d days ago, but have not yet been purged, " \
                "disk space: %s." % (history_count, dataset_count, deleted_histories_days, nice_size(disk_space, True))
        else:
            message = "Enter the number of days."
        return str(deleted_histories_days), message
[docs]    def deleted_datasets(self, trans, **kwd):
        """The number of datasets that were deleted more than the specified number of days ago, but have not yet been purged."""
        params = util.Params(kwd)
        message = ''
        if params.deleted_datasets_days:
            deleted_datasets_days = int(params.deleted_datasets_days)
            cutoff_time = datetime.utcnow() - timedelta(days=deleted_datasets_days)
            dataset_count = 0
            disk_space = 0
            for dataset in trans.sa_session.query(model.Dataset) \
                .filter(and_(model.Dataset.table.c.deleted == true(),
                    model.Dataset.table.c.purged == false(),
                    model.Dataset.table.c.update_time < cutoff_time)):
                dataset_count += 1
                try:
                    disk_space += dataset.file_size
                except Exception:
                    pass
            message = "%d datasets were deleted more than %d days ago, but have not yet been purged," \
                " disk space: %s." % (dataset_count, deleted_datasets_days, nice_size(disk_space, True))
        else:
            message = "Enter the number of days."
        return str(deleted_datasets_days), message
[docs]    @web.expose
    def dataset_info(self, trans, **kwd):
        message = ''
        dataset = trans.sa_session.query(model.Dataset).get(trans.security.decode_id(kwd.get('id', '')))
        # Get all associated hdas and lddas that use the same disk file.
        associated_hdas = trans.sa_session.query(trans.model.HistoryDatasetAssociation) \
            .filter(and_(trans.model.HistoryDatasetAssociation.deleted == false(),
            trans.model.HistoryDatasetAssociation.dataset_id == dataset.id)) \
            .all()
        associated_lddas = trans.sa_session.query(trans.model.LibraryDatasetDatasetAssociation) \
            .filter(and_(trans.model.LibraryDatasetDatasetAssociation.deleted == false(),
            trans.model.LibraryDatasetDatasetAssociation.dataset_id == dataset.id)) \
            .all()
        return trans.fill_template('/webapps/reports/dataset_info.mako',
                                   dataset=dataset,
                                   associated_hdas=associated_hdas,
                                   associated_lddas=associated_lddas,
                                   message=message)
[docs]    def get_disk_usage(self, file_path):
        is_sym_link = os.path.islink(file_path)
        file_system = disk_size = disk_used = disk_avail = disk_cap_pct = mount = None
        df_output = subprocess.check_output(['df', '-h', file_path])
        for df_line in df_output:
            df_line = df_line.strip()
            if df_line:
                df_line = df_line.lower()
                if 'filesystem' in df_line or 'proc' in df_line:
                    continue
                elif is_sym_link:
                    if ':' in df_line and '/' in df_line:
                        mount = df_line
                    else:
                        try:
                            disk_size, disk_used, disk_avail, disk_cap_pct, file_system = df_line.split()
                            break
                        except Exception:
                            pass
                else:
                    try:
                        file_system, disk_size, disk_used, disk_avail, disk_cap_pct, mount = df_line.split()
                        break
                    except Exception:
                        pass
            else:
                break  # EOF
        return (file_system, disk_size, disk_used, disk_avail, disk_cap_pct, mount)
[docs]    @web.expose
    def disk_usage(self, trans, **kwd):
        file_path = trans.app.config.file_path
        disk_usage = self.get_disk_usage(file_path)
        min_file_size = 2 ** 32  # 4 Gb
        file_size_str = nice_size(min_file_size)
        datasets = trans.sa_session.query(model.Dataset) \
                                   .filter(and_(model.Dataset.table.c.purged == false(),
                                                model.Dataset.table.c.file_size > min_file_size)) \
                                   .order_by(desc(model.Dataset.table.c.file_size))
        return file_path, disk_usage, datasets, file_size_str
[docs]def nice_size(size, include_bytes=False):
    """Returns a readably formatted string with the size"""
    niced = False
    nice_string = "%s bytes" % size
    try:
        nsize = Decimal(size)
        for x in ['bytes', 'KB', 'MB', 'GB']:
            if nsize.compare(Decimal("1024.0")) == Decimal("-1"):
                nice_string = "%3.1f %s" % (nsize, x)
                niced = True
                break
            nsize /= Decimal("1024.0")
        if not niced:
            nice_string = "%3.1f %s" % (nsize, 'TB')
            niced = True
        if include_bytes and x != 'bytes':
            nice_string = "%s (%s bytes)" % (nice_string, size)
    except Exception:
        pass
    return nice_string