11# runs after the job (and after the default post-filter)
2- from galaxy .jobs .handler import JOB_ERROR
32from galaxy .tools .parameters import DataToolParameter
43
5- # Older py compatibility
6- try :
7- set ()
8- except :
9- from sets import Set as set
104
11-
12- def validate_input ( trans , error_map , param_values , page_param_map ):
5+ def validate_input (trans , error_map , param_values , page_param_map ):
136 dbkeys = set ()
147 data_param_names = set ()
158 data_params = 0
169 for name , param in page_param_map .items ():
17- if isinstance ( param , DataToolParameter ):
10+ if isinstance (param , DataToolParameter ):
1811 # for each dataset parameter
1912 if param_values .get (name , None ) is not None :
20- dbkeys .add ( param_values [name ].dbkey )
13+ dbkeys .add (param_values [name ].dbkey )
2114 data_params += 1
2215 # check meta data
2316 try :
2417 param = param_values [name ]
25- if isinstance ( param .datatype , trans .app .datatypes_registry .get_datatype_by_extension ( 'gff' ).__class__ ):
18+ if isinstance (param .datatype , trans .app .datatypes_registry .get_datatype_by_extension ('gff' ).__class__ ):
2619 # TODO: currently cannot validate GFF inputs b/c they are not derived from interval.
2720 pass
2821 else : # Validate interval datatype.
29- int ( param .metadata .startCol )
30- int ( param .metadata .endCol )
31- int ( param .metadata .chromCol )
22+ int (param .metadata .startCol )
23+ int (param .metadata .endCol )
24+ int (param .metadata .chromCol )
3225 if param .metadata .strandCol is not None :
33- int ( param .metadata .strandCol )
34- except :
26+ int (param .metadata .strandCol )
27+ except Exception :
3528 error_msg = "The attributes of this dataset are not properly set. " + \
3629 "Click the pencil icon in the history item to set the chrom, start, end and strand columns."
3730 error_map [name ] = error_msg
38- data_param_names .add ( name )
39- if len ( dbkeys ) > 1 :
31+ data_param_names .add (name )
32+ if len (dbkeys ) > 1 :
4033 for name in data_param_names :
4134 error_map [name ] = "All datasets must belong to same genomic build, " \
4235 "this dataset is linked to build '%s'" % param_values [name ].dbkey
@@ -45,22 +38,7 @@ def validate_input( trans, error_map, param_values, page_param_map ):
4538 error_map [name ] = "A dataset of the appropriate type is required"
4639
4740
48- # Commented out by INS, 5/30/2007. What is the PURPOSE of this?
49- def exec_after_process (app , inp_data , out_data , param_dict , tool = None , stdout = None , stderr = None ):
50- """Verify the output data after each run"""
51- for data in out_data .values ():
52- try :
53- if stderr and len ( stderr ) > 0 :
54- raise Exception ( stderr )
55- except Exception :
56- data .blurb = JOB_ERROR
57- data .state = JOB_ERROR
58-
59-
6041def exec_after_merge (app , inp_data , out_data , param_dict , tool = None , stdout = None , stderr = None ):
61- exec_after_process (
62- app , inp_data , out_data , param_dict , tool = tool , stdout = stdout , stderr = stderr )
63-
6442 # strip strand column if clusters were merged
6543 for data in out_data .values ():
6644 if param_dict ['returntype' ] is True :
@@ -72,9 +50,6 @@ def exec_after_merge(app, inp_data, out_data, param_dict, tool=None, stdout=None
7250
7351
7452def exec_after_cluster (app , inp_data , out_data , param_dict , tool = None , stdout = None , stderr = None ):
75- exec_after_process (
76- app , inp_data , out_data , param_dict , tool = tool , stdout = stdout , stderr = stderr )
77-
7853 # strip strand column if clusters were merged
7954 if param_dict ["returntype" ] == '1' :
8055 for data in out_data .values ():
0 commit comments