Warning

This document is for an old release of Galaxy. You can alternatively view this page in the latest release if it exists or view the top of the latest release's documentation.

galaxy.model.store package

class galaxy.model.store.StoreAppProtocol(*args, **kwargs)[source]

Bases: Protocol

Define the parts of a Galaxy-like app consumed by model store.

datatypes_registry: Registry
object_store: BaseObjectStore
security: IdEncodingHelper
tag_handler: GalaxyTagHandler
model: GalaxyModelMapping
file_sources: ConfiguredFileSources
workflow_contents_manager: WorkflowContentsManager
__init__(*args, **kwargs)
class galaxy.model.store.ImportDiscardedDataType(value)[source]

Bases: Enum

An enumeration.

FORBID = 'forbid'
ALLOW = 'allow'
FORCE = 'force'
class galaxy.model.store.DatasetAttributeImportModel(*, state: DatasetState[DatasetState] | None = None, external_filename: str | None = None, file_size: int | None = None, object_store_id: str | None = None, total_size: int | None = None, created_from_basename: str | None = None, uuid: str | None = None)[source]

Bases: BaseModel

state: DatasetState[DatasetState] | None
external_filename: str | None
file_size: int | None
object_store_id: str | None
total_size: int | None
created_from_basename: str | None
uuid: str | None
model_config: ClassVar[ConfigDict] = {'extra': 'ignore'}

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

model_fields: ClassVar[dict[str, FieldInfo]] = {'created_from_basename': FieldInfo(annotation=Union[str, NoneType], required=False), 'external_filename': FieldInfo(annotation=Union[str, NoneType], required=False), 'file_size': FieldInfo(annotation=Union[int, NoneType], required=False), 'object_store_id': FieldInfo(annotation=Union[str, NoneType], required=False), 'state': FieldInfo(annotation=Union[Annotated[galaxy.schema.schema.DatasetState, BeforeValidator(func=<function <lambda>>), FieldInfo(annotation=NoneType, required=True, title='State', description='The current state of this dataset.')], NoneType], required=False), 'total_size': FieldInfo(annotation=Union[int, NoneType], required=False), 'uuid': FieldInfo(annotation=Union[str, NoneType], required=False)}

Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo].

This replaces Model.__fields__ from Pydantic V1.

model_post_init(__context: Any) None

This function is meant to behave like a BaseModel method to initialise private attributes.

It takes context as an argument since that’s what pydantic-core passes when calling it.

Args:

self: The BaseModel instance. __context: The context.

class galaxy.model.store.ImportOptions(allow_edit: bool = False, allow_library_creation: bool = False, allow_dataset_object_edit: bool | None = None, discarded_data: ImportDiscardedDataType = ImportDiscardedDataType.FORBID)[source]

Bases: object

__init__(allow_edit: bool = False, allow_library_creation: bool = False, allow_dataset_object_edit: bool | None = None, discarded_data: ImportDiscardedDataType = ImportDiscardedDataType.FORBID) None[source]
allow_edit: bool
allow_library_creation: bool
allow_dataset_object_edit: bool
discarded_data: ImportDiscardedDataType
class galaxy.model.store.SessionlessContext[source]

Bases: object

__init__() None[source]
commit() None[source]
flush() None[source]
add(obj: RepresentById) None[source]
query(model_class: RepresentById) Bunch[source]
get(model_class: RepresentById, primary_key: Any)[source]
galaxy.model.store.replace_metadata_file(metadata: Dict[str, Any], dataset_instance: DatasetInstance, sa_session: SessionlessContext | scoped_session) Dict[str, Any][source]
class galaxy.model.store.ModelImportStore(import_options: ImportOptions | None = None, app: StoreAppProtocol | None = None, user: User | None = None, object_store: ObjectStore | None = None, tag_handler: GalaxyTagHandlerSession | None = None)[source]

Bases: object

archive_dir: str
__init__(import_options: ImportOptions | None = None, app: StoreAppProtocol | None = None, user: User | None = None, object_store: ObjectStore | None = None, tag_handler: GalaxyTagHandlerSession | None = None) None[source]
app: StoreAppProtocol | None
abstract workflow_paths() Iterator[Tuple[str, str]][source]
abstract defines_new_history() bool[source]

Does this store define a new history to create.

abstract new_history_properties() Dict[str, Any][source]

Dict of history properties if defines_new_history() is truthy.

abstract datasets_properties() List[Dict[str, Any]][source]

Return a list of HDA properties.

library_properties() List[Dict[str, Any]][source]

Return a list of library properties.

abstract invocations_properties() List[Dict[str, Any]][source]
abstract collections_properties() List[Dict[str, Any]][source]

Return a list of HDCA properties.

abstract implicit_dataset_conversion_properties() List[Dict[str, Any]][source]

Return a list of ImplicitlyConvertedDatasetAssociation properties.

abstract jobs_properties() List[Dict[str, Any]][source]

Return a list of jobs properties.

abstract implicit_collection_jobs_properties() List[Dict[str, Any]][source]
abstract property object_key: str

Key used to connect objects in metadata.

Legacy exports used ‘hid’ but associated objects may not be from the same history and a history may contain multiple objects with the same ‘hid’.

property file_source_root: str | None

Source of valid file data.

trust_hid(obj_attrs: Dict[str, Any]) bool[source]

Trust HID when importing objects into a new History.

target_history(default_history: History | None = None, legacy_history_naming: bool = True) Iterator[History | None][source]
perform_import(history: History | None = None, new_history: bool = False, job: Job | None = None) ObjectImportTracker[source]
class galaxy.model.store.ObjectImportTracker[source]

Bases: object

Keep track of new and existing imported objects.

Needed to re-establish connections and such in multiple passes.

__init__() None[source]
libraries_by_key: Dict[str | int, Library]
hdas_by_key: Dict[str | int, HistoryDatasetAssociation]
hdas_by_id: Dict[int, HistoryDatasetAssociation]
hdcas_by_key: Dict[str | int, HistoryDatasetCollectionAssociation]
hdcas_by_id: Dict[int, HistoryDatasetCollectionAssociation]
dces_by_key: Dict[str | int, DatasetCollectionElement]
dces_by_id: Dict[int, DatasetCollectionElement]
lddas_by_key: Dict[str | int, LibraryDatasetDatasetAssociation]
hda_copied_from_sinks: Dict[str | int, str | int]
hdca_copied_from_sinks: Dict[str | int, str | int]
jobs_by_key: Dict[str | int, Job]
requires_hid: List[HistoryDatasetAssociation | HistoryDatasetCollectionAssociation]
copy_hid_for: Dict[HistoryDatasetAssociation | HistoryDatasetCollectionAssociation, HistoryDatasetAssociation | HistoryDatasetCollectionAssociation]
find_hda(input_key: str | int, hda_id: int | None = None) HistoryDatasetAssociation | None[source]
find_hdca(input_key: str | int) HistoryDatasetCollectionAssociation | None[source]
find_dce(input_key: str | int) DatasetCollectionElement | None[source]
exception galaxy.model.store.FileTracebackException(traceback: str, *args, **kwargs)[source]

Bases: Exception

__init__(traceback: str, *args, **kwargs) None[source]
galaxy.model.store.get_import_model_store_for_directory(archive_dir: str, **kwd) DirectoryImportModelStore1901 | DirectoryImportModelStoreLatest[source]
class galaxy.model.store.DictImportModelStore(store_as_dict: Dict[str, Any], **kwd)[source]

Bases: ModelImportStore

object_key = 'encoded_id'
__init__(store_as_dict: Dict[str, Any], **kwd) None[source]
archive_dir: str
defines_new_history() bool[source]

Does this store define a new history to create.

new_history_properties() Dict[str, Any][source]

Dict of history properties if defines_new_history() is truthy.

datasets_properties() List[Dict[str, Any]][source]

Return a list of HDA properties.

collections_properties() List[Dict[str, Any]][source]

Return a list of HDCA properties.

implicit_dataset_conversion_properties() List[Dict[str, Any]][source]

Return a list of ImplicitlyConvertedDatasetAssociation properties.

library_properties() List[Dict[str, Any]][source]

Return a list of library properties.

jobs_properties() List[Dict[str, Any]][source]

Return a list of jobs properties.

implicit_collection_jobs_properties() List[Dict[str, Any]][source]
invocations_properties() List[Dict[str, Any]][source]
workflow_paths() Iterator[Tuple[str, str]][source]
app: StoreAppProtocol | None
galaxy.model.store.get_import_model_store_for_dict(as_dict: Dict[str, Any], **kwd) DictImportModelStore[source]
class galaxy.model.store.BaseDirectoryImportModelStore(import_options: ImportOptions | None = None, app: StoreAppProtocol | None = None, user: User | None = None, object_store: ObjectStore | None = None, tag_handler: GalaxyTagHandlerSession | None = None)[source]

Bases: ModelImportStore

property file_source_root: str

Source of valid file data.

defines_new_history() bool[source]

Does this store define a new history to create.

new_history_properties() Dict[str, Any][source]

Dict of history properties if defines_new_history() is truthy.

datasets_properties() List[Dict[str, Any]][source]

Return a list of HDA properties.

collections_properties() List[Dict[str, Any]][source]

Return a list of HDCA properties.

implicit_dataset_conversion_properties() List[Dict[str, Any]][source]

Return a list of ImplicitlyConvertedDatasetAssociation properties.

library_properties() List[Dict[str, Any]][source]

Return a list of library properties.

jobs_properties() List[Dict[str, Any]][source]

Return a list of jobs properties.

implicit_collection_jobs_properties() List[Dict[str, Any]][source]
invocations_properties() List[Dict[str, Any]][source]
workflow_paths() Iterator[Tuple[str, str]][source]
app: StoreAppProtocol | None
archive_dir: str
galaxy.model.store.restore_times(model_object: Job | WorkflowInvocation | WorkflowInvocationStep, attrs: Dict[str, Any]) None[source]
class galaxy.model.store.DirectoryImportModelStore1901(archive_dir: str, **kwd)[source]

Bases: BaseDirectoryImportModelStore

object_key = 'hid'
__init__(archive_dir: str, **kwd) None[source]
archive_dir: str
trust_hid(obj_attrs: Dict[str, Any]) bool[source]

Trust HID when importing objects into a new History.

app: StoreAppProtocol | None
class galaxy.model.store.DirectoryImportModelStoreLatest(archive_dir: str, **kwd)[source]

Bases: BaseDirectoryImportModelStore

object_key = 'encoded_id'
__init__(archive_dir: str, **kwd) None[source]
archive_dir: str
app: StoreAppProtocol | None
class galaxy.model.store.BagArchiveImportModelStore(bag_archive: str, **kwd)[source]

Bases: DirectoryImportModelStoreLatest

__init__(bag_archive: str, **kwd) None[source]
app: StoreAppProtocol | None
archive_dir: str
class galaxy.model.store.ModelExportStore[source]

Bases: object

abstract export_history(history: History, include_hidden: bool = False, include_deleted: bool = False) None[source]

Export history to store.

abstract export_library(library: Library, include_hidden: bool = False, include_deleted: bool = False) None[source]

Export library to store.

abstract export_library_folder(library_folder: LibraryFolder, include_hidden: bool = False, include_deleted: bool = False) None[source]

Export library folder to store.

abstract export_workflow_invocation(workflow_invocation, include_hidden=False, include_deleted=False)[source]

Export workflow invocation to store.

abstract add_dataset_collection(collection: DatasetCollection | HistoryDatasetCollectionAssociation)[source]

Add Dataset Collection or HDCA to export store.

abstract add_dataset(dataset: DatasetInstance, include_files: bool = True)[source]

Add HDA to export store.

include_files controls whether file contents are exported as well.

class galaxy.model.store.DirectoryModelExportStore(export_directory: str | PathLike, app: StoreAppProtocol | None = None, file_sources: ConfiguredFileSources | None = None, for_edit: bool = False, serialize_dataset_objects: bool | None = None, export_files: str | None = None, strip_metadata_files: bool = True, serialize_jobs: bool = True, user_context=None)[source]

Bases: ModelExportStore

__init__(export_directory: str | PathLike, app: StoreAppProtocol | None = None, file_sources: ConfiguredFileSources | None = None, for_edit: bool = False, serialize_dataset_objects: bool | None = None, export_files: str | None = None, strip_metadata_files: bool = True, serialize_jobs: bool = True, user_context=None) None[source]
Parameters:
  • export_directory – path to export directory. Will be created if it does not exist.

  • app – Galaxy App or app-like object. Must be provided if for_edit and/or serialize_dataset_objects are True

  • for_edit – Allow modifying existing HDA and dataset metadata during import.

  • serialize_dataset_objects – If True will encode IDs using the host secret. Defaults for_edit.

  • export_files – How files should be exported, can be ‘symlink’, ‘copy’ or None, in which case files will not be serialized.

  • serialize_jobs – Include job data in model export. Not needed for set_metadata script.

app: StoreAppProtocol | None
file_sources: ConfiguredFileSources | None
property workflows_directory: str
serialize_files(dataset: DatasetInstance, as_dict: Dict[str, Any]) None[source]
exported_key(obj: RepresentById) str | int[source]
push_metadata_files()[source]
export_job(job: Job, tool=None, include_job_data=True)[source]
export_jobs(jobs: Iterable[Job], jobs_attrs: List[Dict[str, Any]] | None = None, include_job_data: bool = True) List[Dict[str, Any]][source]

Export jobs.

include_job_data determines whether datasets associated with jobs should be exported as well. This should generally be True, except when re-exporting a job (to store the generated command line) when running the set_meta script.

export_history(history: History, include_hidden: bool = False, include_deleted: bool = False) None[source]

Export history to store.

export_library(library: Library, include_hidden: bool = False, include_deleted: bool = False) None[source]

Export library to store.

export_library_folder(library_folder: LibraryFolder, include_hidden=False, include_deleted=False)[source]

Export library folder to store.

export_library_folder_contents(library_folder: LibraryFolder, include_hidden: bool = False, include_deleted: bool = False) None[source]
export_workflow_invocation(workflow_invocation: WorkflowInvocation, include_hidden: bool = False, include_deleted: bool = False) None[source]

Export workflow invocation to store.

add_job_output_dataset_associations(job_id: int, name: str, dataset_instance: DatasetInstance) None[source]
export_collection(collection: DatasetCollection | HistoryDatasetCollectionAssociation, include_deleted: bool = False, include_hidden: bool = False) None[source]
add_dataset_collection(collection: DatasetCollection | HistoryDatasetCollectionAssociation) None[source]

Add Dataset Collection or HDCA to export store.

add_implicit_conversion_dataset(dataset: DatasetInstance, include_files: bool, conversion: ImplicitlyConvertedDatasetAssociation) None[source]
add_dataset(dataset: DatasetInstance, include_files: bool = True) None[source]

Add HDA to export store.

include_files controls whether file contents are exported as well.

class galaxy.model.store.WriteCrates[source]

Bases: object

included_invocations: List[WorkflowInvocation]
export_directory: str | PathLike
included_datasets: Dict[DatasetInstance, Tuple[DatasetInstance, bool]]
dataset_implicit_conversions: Dict[DatasetInstance, ImplicitlyConvertedDatasetAssociation]
dataset_id_to_path: Dict[int, Tuple[str | None, str | None]]
abstract property workflows_directory: str
class galaxy.model.store.WorkflowInvocationOnlyExportStore(export_directory: str | PathLike, app: StoreAppProtocol | None = None, file_sources: ConfiguredFileSources | None = None, for_edit: bool = False, serialize_dataset_objects: bool | None = None, export_files: str | None = None, strip_metadata_files: bool = True, serialize_jobs: bool = True, user_context=None)[source]

Bases: DirectoryModelExportStore

export_history(history: History, include_hidden: bool = False, include_deleted: bool = False)[source]

Export history to store.

export_library(history, include_hidden=False, include_deleted=False)[source]

Export library to store.

property only_invocation: WorkflowInvocation
app: StoreAppProtocol | None
file_sources: ConfiguredFileSources | None
included_datasets: Dict[DatasetInstance, Tuple[DatasetInstance, bool]]
dataset_implicit_conversions: Dict[DatasetInstance, ImplicitlyConvertedDatasetAssociation]
included_collections: Dict[DatasetCollection | HistoryDatasetCollectionAssociation, DatasetCollection | HistoryDatasetCollectionAssociation]
included_libraries: List[Library]
included_library_folders: List[LibraryFolder]
included_invocations: List[WorkflowInvocation]
collection_datasets: Set[int]
dataset_id_to_path: Dict[int, Tuple[str | None, str | None]]
job_output_dataset_associations: Dict[int, Dict[str, DatasetInstance]]
class galaxy.model.store.BcoExportOptions(galaxy_url: str, galaxy_version: str, merge_history_metadata: bool = False, override_environment_variables: Dict[str, str] | NoneType = None, override_empirical_error: Dict[str, str] | NoneType = None, override_algorithmic_error: Dict[str, str] | NoneType = None, override_xref: List[galaxy.schema.bco.description_domain.XrefItem] | NoneType = None)[source]

Bases: object

galaxy_url: str
galaxy_version: str
merge_history_metadata: bool = False
override_environment_variables: Dict[str, str] | None = None
override_empirical_error: Dict[str, str] | None = None
override_algorithmic_error: Dict[str, str] | None = None
override_xref: List[XrefItem] | None = None
__init__(galaxy_url: str, galaxy_version: str, merge_history_metadata: bool = False, override_environment_variables: Dict[str, str] | None = None, override_empirical_error: Dict[str, str] | None = None, override_algorithmic_error: Dict[str, str] | None = None, override_xref: List[XrefItem] | None = None) None
class galaxy.model.store.BcoModelExportStore(uri, export_options: BcoExportOptions, **kwds)[source]

Bases: WorkflowInvocationOnlyExportStore

__init__(uri, export_options: BcoExportOptions, **kwds)[source]
Parameters:
  • export_directory – path to export directory. Will be created if it does not exist.

  • app – Galaxy App or app-like object. Must be provided if for_edit and/or serialize_dataset_objects are True

  • for_edit – Allow modifying existing HDA and dataset metadata during import.

  • serialize_dataset_objects – If True will encode IDs using the host secret. Defaults for_edit.

  • export_files – How files should be exported, can be ‘symlink’, ‘copy’ or None, in which case files will not be serialized.

  • serialize_jobs – Include job data in model export. Not needed for set_metadata script.

app: StoreAppProtocol | None
file_sources: ConfiguredFileSources | None
included_datasets: Dict[DatasetInstance, Tuple[DatasetInstance, bool]]
dataset_implicit_conversions: Dict[DatasetInstance, ImplicitlyConvertedDatasetAssociation]
included_collections: Dict[DatasetCollection | HistoryDatasetCollectionAssociation, DatasetCollection | HistoryDatasetCollectionAssociation]
included_libraries: List[Library]
included_library_folders: List[LibraryFolder]
included_invocations: List[WorkflowInvocation]
collection_datasets: Set[int]
dataset_id_to_path: Dict[int, Tuple[str | None, str | None]]
job_output_dataset_associations: Dict[int, Dict[str, DatasetInstance]]
class galaxy.model.store.ROCrateModelExportStore(crate_directory: str | PathLike, **kwds)[source]

Bases: DirectoryModelExportStore, WriteCrates

__init__(crate_directory: str | PathLike, **kwds) None[source]
Parameters:
  • export_directory – path to export directory. Will be created if it does not exist.

  • app – Galaxy App or app-like object. Must be provided if for_edit and/or serialize_dataset_objects are True

  • for_edit – Allow modifying existing HDA and dataset metadata during import.

  • serialize_dataset_objects – If True will encode IDs using the host secret. Defaults for_edit.

  • export_files – How files should be exported, can be ‘symlink’, ‘copy’ or None, in which case files will not be serialized.

  • serialize_jobs – Include job data in model export. Not needed for set_metadata script.

app: StoreAppProtocol | None
file_sources: ConfiguredFileSources | None
included_datasets: Dict[DatasetInstance, Tuple[DatasetInstance, bool]]
dataset_implicit_conversions: Dict[DatasetInstance, ImplicitlyConvertedDatasetAssociation]
included_collections: Dict[DatasetCollection | HistoryDatasetCollectionAssociation, DatasetCollection | HistoryDatasetCollectionAssociation]
included_libraries: List[Library]
included_library_folders: List[LibraryFolder]
included_invocations: List[WorkflowInvocation]
collection_datasets: Set[int]
dataset_id_to_path: Dict[int, Tuple[str | None, str | None]]
job_output_dataset_associations: Dict[int, Dict[str, DatasetInstance]]
class galaxy.model.store.ROCrateArchiveModelExportStore(uri: str | PathLike, **kwds)[source]

Bases: DirectoryModelExportStore, WriteCrates

__init__(uri: str | PathLike, **kwds) None[source]
Parameters:
  • export_directory – path to export directory. Will be created if it does not exist.

  • app – Galaxy App or app-like object. Must be provided if for_edit and/or serialize_dataset_objects are True

  • for_edit – Allow modifying existing HDA and dataset metadata during import.

  • serialize_dataset_objects – If True will encode IDs using the host secret. Defaults for_edit.

  • export_files – How files should be exported, can be ‘symlink’, ‘copy’ or None, in which case files will not be serialized.

  • serialize_jobs – Include job data in model export. Not needed for set_metadata script.

out_file: str | PathLike
file_source_uri: str | PathLike | None
class galaxy.model.store.TarModelExportStore(uri: str | PathLike, gzip: bool = True, **kwds)[source]

Bases: DirectoryModelExportStore

__init__(uri: str | PathLike, gzip: bool = True, **kwds) None[source]
Parameters:
  • export_directory – path to export directory. Will be created if it does not exist.

  • app – Galaxy App or app-like object. Must be provided if for_edit and/or serialize_dataset_objects are True

  • for_edit – Allow modifying existing HDA and dataset metadata during import.

  • serialize_dataset_objects – If True will encode IDs using the host secret. Defaults for_edit.

  • export_files – How files should be exported, can be ‘symlink’, ‘copy’ or None, in which case files will not be serialized.

  • serialize_jobs – Include job data in model export. Not needed for set_metadata script.

out_file: str | PathLike
file_source_uri: str | PathLike | None
class galaxy.model.store.BagDirectoryModelExportStore(out_directory: str, **kwds)[source]

Bases: DirectoryModelExportStore

app: StoreAppProtocol | None
file_sources: ConfiguredFileSources | None
included_datasets: Dict[DatasetInstance, Tuple[DatasetInstance, bool]]
dataset_implicit_conversions: Dict[DatasetInstance, ImplicitlyConvertedDatasetAssociation]
included_collections: Dict[DatasetCollection | HistoryDatasetCollectionAssociation, DatasetCollection | HistoryDatasetCollectionAssociation]
included_libraries: List[Library]
included_library_folders: List[LibraryFolder]
included_invocations: List[WorkflowInvocation]
collection_datasets: Set[int]
dataset_id_to_path: Dict[int, Tuple[str | None, str | None]]
job_output_dataset_associations: Dict[int, Dict[str, DatasetInstance]]
__init__(out_directory: str, **kwds) None[source]
Parameters:
  • export_directory – path to export directory. Will be created if it does not exist.

  • app – Galaxy App or app-like object. Must be provided if for_edit and/or serialize_dataset_objects are True

  • for_edit – Allow modifying existing HDA and dataset metadata during import.

  • serialize_dataset_objects – If True will encode IDs using the host secret. Defaults for_edit.

  • export_files – How files should be exported, can be ‘symlink’, ‘copy’ or None, in which case files will not be serialized.

  • serialize_jobs – Include job data in model export. Not needed for set_metadata script.

class galaxy.model.store.BagArchiveModelExportStore(uri: str | PathLike, bag_archiver: str = 'tgz', **kwds)[source]

Bases: BagDirectoryModelExportStore

__init__(uri: str | PathLike, bag_archiver: str = 'tgz', **kwds) None[source]
Parameters:
  • export_directory – path to export directory. Will be created if it does not exist.

  • app – Galaxy App or app-like object. Must be provided if for_edit and/or serialize_dataset_objects are True

  • for_edit – Allow modifying existing HDA and dataset metadata during import.

  • serialize_dataset_objects – If True will encode IDs using the host secret. Defaults for_edit.

  • export_files – How files should be exported, can be ‘symlink’, ‘copy’ or None, in which case files will not be serialized.

  • serialize_jobs – Include job data in model export. Not needed for set_metadata script.

file_source_uri: str | PathLike | None
galaxy.model.store.get_export_store_factory(app, download_format: str, export_files=None, bco_export_options: BcoExportOptions | None = None, user_context=None) Callable[[str | PathLike], ModelExportStore][source]
galaxy.model.store.tar_export_directory(export_directory: str | PathLike, out_file: str | PathLike, gzip: bool) None[source]
galaxy.model.store.get_export_dataset_filename(name: str, ext: str, encoded_id: str, conversion_key: str | None) str[source]

Builds a filename for a dataset using its name an extension.

galaxy.model.store.get_export_dataset_extra_files_dir_name(encoded_id: str, conversion_key: str | None) str[source]
galaxy.model.store.imported_store_for_metadata(directory: str, object_store: ObjectStore | None = None) BaseDirectoryImportModelStore[source]
galaxy.model.store.source_to_import_store(source: str | dict, app: StoreAppProtocol, import_options: ImportOptions | None, model_store_format: ModelStoreFormat | None = None, user_context=None) ModelImportStore[source]
galaxy.model.store.payload_to_source_uri(payload) str[source]
galaxy.model.store.copy_dataset_instance_metadata_attributes(source: DatasetInstance, target: DatasetInstance) None[source]

Submodules

galaxy.model.store.build_objects module

galaxy.model.store.build_objects.main(argv=None)[source]

galaxy.model.store.discover module

Utilities for discovering files to add to a model store.

Working with input “JSON” format used for Fetch API, galaxy.json imports, etc… High-level utilities in this file can be used during job output discovery or for persisting Galaxy model objects corresponding to files in other contexts.

exception galaxy.model.store.discover.MaxDiscoveredFilesExceededError[source]

Bases: ValueError

class galaxy.model.store.discover.ModelPersistenceContext[source]

Bases: object

Class for creating datasets while finding files.

This class implement the create_dataset method that takes care of populating metadata required for datasets and other potential model objects.

job_working_directory: str
max_discovered_files = inf
discovered_file_count: int
get_job() Job | None[source]
create_dataset(ext, designation, visible, dbkey, name, filename=None, extra_files=None, metadata_source_name=None, info=None, library_folder=None, link_data=False, primary_data=None, init_from=None, dataset_attributes=None, tag_list=None, sources=None, hashes=None, created_from_basename=None, final_job_state='ok', creating_job_id=None, output_name=None, storage_callbacks=None, purged=False)[source]
finalize_storage(primary_data, dataset_attributes, extra_files, filename, link_data, output_name, init_from)[source]
static set_datasets_metadata(datasets, datasets_attributes=None)[source]
populate_collection_elements(collection, root_collection_builder, discovered_files, name=None, metadata_source_name=None, final_job_state='ok', change_datatype_actions=None)[source]
add_tags_to_datasets(datasets, tag_lists)[source]
update_object_store_with_datasets(datasets, paths, extra_files, output_name)[source]
abstract property tag_handler

Return a galaxy.model.tags.TagHandler-like object for persisting tags.

abstract property user

If bound to a database, return the user the datasets should be created for.

Return None otherwise.

abstract property sa_session: scoped_session | None

If bound to a database, return the SQL Alchemy session.

Return None otherwise.

abstract property permission_provider: PermissionProvider

If bound to a database, return the SQL Alchemy session.

Return None otherwise.

get_implicit_collection_jobs_association_id() str | None[source]

No-op, no job context.

abstract property job: Job | None

Return associated job object if bound to a job finish context connected to a database.

override_object_store_id(output_name: str | None = None) str | None[source]

Object store ID to assign to a dataset before populating its contents.

abstract property metadata_source_provider: MetadataSourceProvider

Return associated MetadataSourceProvider object.

abstract property object_store: ObjectStore

Return object store to use for populating discovered dataset contents.

abstract property flush_per_n_datasets: int | None
property input_dbkey: str
abstract add_library_dataset_to_folder(library_folder, ld)[source]

Add library dataset to persisted library folder.

abstract create_library_folder(parent_folder, name, description)[source]

Create a library folder ready from supplied attributes for supplied parent.

abstract add_output_dataset_association(name, dataset)[source]

If discovering outputs for a job, persist output dataset association.

abstract add_datasets_to_history(datasets, for_output_dataset=None)[source]

Add datasets to the history this context points at.

job_id()[source]
abstract persist_object(obj)[source]

Add the target to the persistence layer.

persist_library_folder(library_folder: LibraryFolder) None[source]

Add library folder to sessionless export. Noop for session export.

abstract flush()[source]

If database bound, flush the persisted objects to ensure IDs.

increment_discovered_file_count()[source]
class galaxy.model.store.discover.PermissionProvider[source]

Bases: object

Interface for working with permissions while importing datasets with ModelPersistenceContext.

property permissions
set_default_hda_permissions(primary_data)[source]
abstract copy_dataset_permissions(init_from, primary_data)[source]

Copy dataset permissions from supplied input dataset.

class galaxy.model.store.discover.UnusedPermissionProvider[source]

Bases: PermissionProvider

copy_dataset_permissions(init_from, primary_data)[source]

Throws NotImplementedError.

This should only be called as part of job output collection where there should be a session available to initialize this from.

class galaxy.model.store.discover.MetadataSourceProvider[source]

Bases: object

Interface for working with fetching input dataset metadata with ModelPersistenceContext.

abstract get_metadata_source(input_name)[source]

Get metadata for supplied input_name.

class galaxy.model.store.discover.UnusedMetadataSourceProvider[source]

Bases: MetadataSourceProvider

get_metadata_source(input_name)[source]

Throws NotImplementedError.

This should only be called as part of job output collection where one can actually collect metadata from inputs, this is unused in the context of SessionlessModelPersistenceContext.

class galaxy.model.store.discover.SessionlessModelPersistenceContext(object_store, export_store: ModelExportStore, working_directory: str)[source]

Bases: ModelPersistenceContext

A variant of ModelPersistenceContext that persists to an export store instead of database directly.

__init__(object_store, export_store: ModelExportStore, working_directory: str) None[source]
discovered_file_count: int
job_working_directory: str
property tag_handler

Return a galaxy.model.tags.TagHandler-like object for persisting tags.

property sa_session

If bound to a database, return the SQL Alchemy session.

Return None otherwise.

property user

If bound to a database, return the user the datasets should be created for.

Return None otherwise.

property job

Return associated job object if bound to a job finish context connected to a database.

property permission_provider: UnusedPermissionProvider

If bound to a database, return the SQL Alchemy session.

Return None otherwise.

property metadata_source_provider: UnusedMetadataSourceProvider

Return associated MetadataSourceProvider object.

property object_store: ObjectStore

Return object store to use for populating discovered dataset contents.

property flush_per_n_datasets: int | None
add_tags_to_datasets(datasets, tag_lists)[source]
add_library_dataset_to_folder(library_folder, ld)[source]

Add library dataset to persisted library folder.

get_library_folder(destination)[source]
get_hdca(object_id)[source]
create_hdca(name, structure)[source]
create_library_folder(parent_folder, name, description)[source]

Create a library folder ready from supplied attributes for supplied parent.

persist_library_folder(library_folder: LibraryFolder) None[source]

Add library folder to sessionless export. Noop for session export.

add_datasets_to_history(datasets, for_output_dataset=None)[source]

Add datasets to the history this context points at.

persist_object(obj)[source]

No-op right now for the sessionless variant of this.

This works currently because either things are added to a target history with add_datasets_to_history or the parent LibraryFolder was added to the export store in persist_target_to_export_store.

flush()[source]

No-op for the sessionless variant of this, no database to flush.

add_output_dataset_association(name, dataset)[source]

No-op, no job context to persist this association for.

get_implicit_collection_jobs_association_id()[source]

No-op, no job context.

galaxy.model.store.discover.persist_target_to_export_store(target_dict, export_store, object_store, work_directory)[source]
galaxy.model.store.discover.persist_elements_to_hdca(model_persistence_context: ModelPersistenceContext, elements, hdca, collector=None)[source]
galaxy.model.store.discover.persist_elements_to_folder(model_persistence_context, elements, library_folder)[source]
galaxy.model.store.discover.persist_hdas(elements, model_persistence_context, final_job_state='ok')[source]
galaxy.model.store.discover.get_required_item(from_dict, key, message)[source]
galaxy.model.store.discover.validate_and_normalize_target(obj)[source]
galaxy.model.store.discover.replace_request_syntax_sugar(obj)[source]
class galaxy.model.store.discover.DiscoveredFile(path, collector, match)[source]

Bases: tuple

path: str

Alias for field number 0

collector: DatasetCollector | ToolMetadataDatasetCollector | None

Alias for field number 1

match: JsonCollectedDatasetMatch

Alias for field number 2

discovered_state(element: Dict[str, Any], final_job_state='ok') DiscoveredResultState[source]
class galaxy.model.store.discover.DiscoveredResultState(info, state)[source]

Bases: tuple

info: str | None

Alias for field number 0

state: str

Alias for field number 1

class galaxy.model.store.discover.DiscoveredDeferredFile(collector, match)[source]

Bases: tuple

collector: DatasetCollector | ToolMetadataDatasetCollector | None

Alias for field number 0

match: JsonCollectedDatasetMatch

Alias for field number 1

discovered_state(element: Dict[str, Any], final_job_state='ok') DiscoveredResultState[source]
property path
galaxy.model.store.discover.discovered_file_for_element(dataset, model_persistence_context: ModelPersistenceContext, parent_identifiers=None, collector=None) DiscoveredFile | DiscoveredDeferredFile | DiscoveredFileError[source]
galaxy.model.store.discover.discover_target_directory(dir_name, job_working_directory)[source]
class galaxy.model.store.discover.JsonCollectedDatasetMatch(as_dict, collector: DatasetCollector | ToolMetadataDatasetCollector | None, filename, path=None, parent_identifiers=None)[source]

Bases: object

__init__(as_dict, collector: DatasetCollector | ToolMetadataDatasetCollector | None, filename, path=None, parent_identifiers=None)[source]
property designation
property element_identifiers
property raw_element_identifiers
property name

Return name or None if not defined by the discovery pattern.

property dbkey: str
property ext: str
property visible: bool
property tag_list
property object_id
property sources
property hashes
property created_from_basename
property extra_files
property effective_state
class galaxy.model.store.discover.RegexCollectedDatasetMatch(re_match, collector: DatasetCollector | ToolMetadataDatasetCollector | None, filename, path=None)[source]

Bases: JsonCollectedDatasetMatch

__init__(re_match, collector: DatasetCollector | ToolMetadataDatasetCollector | None, filename, path=None)[source]
class galaxy.model.store.discover.DiscoveredFileError(error_message, collector, match, path)[source]

Bases: tuple

error_message: str

Alias for field number 0

collector: DatasetCollector | ToolMetadataDatasetCollector | None

Alias for field number 1

match: JsonCollectedDatasetMatch

Alias for field number 2

path: str | None

Alias for field number 3

discovered_state(element: Dict[str, Any], final_job_state='ok') DiscoveredResultState[source]

galaxy.model.store.load_objects module

galaxy.model.store.load_objects.main(argv=None)[source]