LbAnalysisProductions#

N.B. Some modules are not added (TODO!)

Models#

class LbAnalysisProductions.models.InputFile(**kwargs)[source]#
dataset_size#
id#
path#
size#
test#
test_id#
class LbAnalysisProductions.models.Job(**kwargs)[source]#
AUTO_CONF_COLUMNS = ['simulation', 'luminosity', 'data_type', 'input_type']#
application_name#
application_version#
autoconf_options#
automatically_configure#
checks#
children#
completion_percentage#
conddb_tag#
classmethod create(name, application, input, output, options, wg, inform=None, checks=None, **kwargs)[source]#
data_type#
dddb_tag#
property dynamic_options_path#
generate_configuration()[source]#
id#
inform#
property input#
input_data#
input_type#
property keep_output#
luminosity#
n_test_events#
n_test_lfns#
name#
options#
output#
parent_job_id#
pipeline#
priority#
production#
production_id#
root_in_tes#
simulation#
step#
step_id#
step_index#
tags#
tests#
turbo#
wg#
class LbAnalysisProductions.models.OutputFile(**kwargs)[source]#
dataset_size#
id#
path#
size#
test#
test_id#
class LbAnalysisProductions.models.Pipeline(**kwargs)[source]#
classmethod create(gitlab_job)[source]#
gitlab_job_dump#
gitlab_job_id#
property id#
property job#
productions#
class LbAnalysisProductions.models.Production(**kwargs)[source]#
UT_long = UnicodeText()#
checks_data#
comment#
classmethod create(pipeline, repo_root, name)[source]#
id#
jobs#
name#
pipeline#
pipeline_id#
rendered_yaml#
steps#
property svg#
yaml#
class LbAnalysisProductions.models.Step(**kwargs)[source]#
can_run#
completed_successfully#
classmethod create(jobs)[source]#
execution_id#
execution_type#
id#
property input#
jobs#
launch_test(TestClass)[source]#
property name#
pipeline#
production#
production_id#
running#
class LbAnalysisProductions.models.Test(**kwargs)[source]#
attempt#
cpu_norm#
classmethod create(job)[source]#
events_processed#
events_requested#
property finished#
id#
input_files#
job#
job_id#
memory_max_pss#
memory_max_rss#
memory_max_swap#
n_log_error#
n_log_fatal#
n_log_warning#
output_files#
pipeline#
production#
run_time#
s3_bucket#
s3_fields#
property s3_filenames#
s3_post_url#
status#
step#
validate_status(key, new_status)[source]#

State machine for test statuses

class LbAnalysisProductions.models.WorkerToken(test_id, *, reference=None)[source]#
expires#
id#
revoked#
test_id#
token#

Submission#

LbAnalysisProductions.submission.clean_repository(pipeline)[source]#

Clean up the Analysis Productions submission repository.

Creates a commit to the repository to remove the directory of the production that was just submitted to maintain a clean state in the repository.

Parameters:

pipeline – The pipeline query corresponding to the production submission to be cleaned from the repository.

LbAnalysisProductions.submission.convert_to_dirac_dict(step, tag_name, *, with_merging=True)[source]#

Convert a AnalysisProductions step object to a LHCbDIRAC production

LbAnalysisProductions.submission.gitlab_notify_submission(pipeline_id, version, task_key, task_url)[source]#

Notify involved parties on GitLab that the production based off pipeline_id was submitted.

LbAnalysisProductions.submission.make_stats(step, data)[source]#
LbAnalysisProductions.submission.pairwise(iterable)[source]#
LbAnalysisProductions.submission.send_email_alert(pipeline_id, subject, content, to='lhcb-dpa-wp2-managers@cern.ch')[source]#
LbAnalysisProductions.submission.submit_gitlab_task(working_groups, ap_title, version, pipeline_id, job_statistics, request_metadata, inform_emails=None, user_comment='')[source]#
LbAnalysisProductions.submission.submit_productions(pipeline_id, tag_name, *, dry_run=False, create_jira=True, clean_up=True)[source]#