From a31d09d5c4ca49daa722d080d44bd85ca2fb6c8d Mon Sep 17 00:00:00 2001 From: Lindsey Gray Date: Wed, 29 May 2019 13:56:53 -0500 Subject: [PATCH] flake8 --- .../jetmet_tools/FactorizedJetCorrector.py | 6 +++--- fnal_column_analysis_tools/processor/executor.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/fnal_column_analysis_tools/jetmet_tools/FactorizedJetCorrector.py b/fnal_column_analysis_tools/jetmet_tools/FactorizedJetCorrector.py index 2d9c72dd5..108f36ca7 100644 --- a/fnal_column_analysis_tools/jetmet_tools/FactorizedJetCorrector.py +++ b/fnal_column_analysis_tools/jetmet_tools/FactorizedJetCorrector.py @@ -60,10 +60,10 @@ def __init__(self, **kwargs): campaign = _checkConsistency(campaign, info[0]) dataera = _checkConsistency(dataera, info[1]) - datatype = _checkConsistency(datatype, info[2+offset]) - levels.append(info[3+offset]) + datatype = _checkConsistency(datatype, info[2 + offset]) + levels.append(info[3 + offset]) funcs.append(func) - jettype = _checkConsistency(jettype, info[4+offset]) + jettype = _checkConsistency(jettype, info[4 + offset]) if campaign is None: raise Exception('Unable to determine production campaign of JECs!') diff --git a/fnal_column_analysis_tools/processor/executor.py b/fnal_column_analysis_tools/processor/executor.py index 0532cb7f9..5b7819191 100644 --- a/fnal_column_analysis_tools/processor/executor.py +++ b/fnal_column_analysis_tools/processor/executor.py @@ -113,7 +113,7 @@ def run_uproot_job(fileset, treename, processor_instance, executor, executor_arg if not isinstance(processor_instance, ProcessorABC): raise ValueError("Expected processor_instance to derive from ProcessorABC") - executor_args.setdefault('workers',1) + executor_args.setdefault('workers', 1) items = [] for dataset, filelist in tqdm(fileset.items(), desc='Preprocessing'): @@ -173,7 +173,7 @@ def run_parsl_job(fileset, treename, processor_instance, executor, data_flow=Non if executor_args['config'] is None: executor_args.pop('config') - + # initialize spark if we need to # if we initialize, then we deconstruct # when we're done @@ -252,7 +252,7 @@ def run_spark_job(fileset, processor_instance, executor, executor_args={}, if executor_args['config'] is None: executor_args.pop('config') - + # initialize spark if we need to # if we initialize, then we deconstruct # when we're done