0
|
1 #!/usr/bin/env python
|
|
2 #Processes uploads from the user.
|
|
3
|
|
4 # WARNING: Changes in this tool (particularly as related to parsing) may need
|
|
5 # to be reflected in galaxy.web.controllers.tool_runner and galaxy.tools
|
|
6
|
|
7 import urllib, sys, os, gzip, tempfile, shutil, re, gzip, zipfile, codecs, binascii
|
|
8 from galaxy import eggs
|
|
9 # need to import model before sniff to resolve a circular import dependency
|
|
10 import galaxy.model
|
|
11 from galaxy.datatypes.checkers import *
|
|
12 from galaxy.datatypes import sniff
|
|
13 from galaxy.datatypes.binary import *
|
|
14 from galaxy.datatypes.images import Pdf
|
|
15 from galaxy.datatypes.registry import Registry
|
|
16 from galaxy import util
|
|
17 from galaxy.datatypes.util.image_util import *
|
|
18 from galaxy.util.json import *
|
|
19
|
|
20 try:
|
|
21 import Image as PIL
|
|
22 except ImportError:
|
|
23 try:
|
|
24 from PIL import Image as PIL
|
|
25 except:
|
|
26 PIL = None
|
|
27
|
|
28 try:
|
|
29 import bz2
|
|
30 except:
|
|
31 bz2 = None
|
|
32
|
|
33 assert sys.version_info[:2] >= ( 2, 4 )
|
|
34
|
|
35 def stop_err( msg, ret=1 ):
|
|
36 sys.stderr.write( msg )
|
|
37 sys.exit( ret )
|
|
38 def file_err( msg, dataset, json_file ):
|
|
39 json_file.write( to_json_string( dict( type = 'dataset',
|
|
40 ext = 'data',
|
|
41 dataset_id = dataset.dataset_id,
|
|
42 stderr = msg ) ) + "\n" )
|
|
43 # never remove a server-side upload
|
|
44 if dataset.type in ( 'server_dir', 'path_paste' ):
|
|
45 return
|
|
46 try:
|
|
47 os.remove( dataset.path )
|
|
48 except:
|
|
49 pass
|
|
50 def safe_dict(d):
|
|
51 """
|
|
52 Recursively clone json structure with UTF-8 dictionary keys
|
|
53 http://mellowmachines.com/blog/2009/06/exploding-dictionary-with-unicode-keys-as-python-arguments/
|
|
54 """
|
|
55 if isinstance(d, dict):
|
|
56 return dict([(k.encode('utf-8'), safe_dict(v)) for k,v in d.iteritems()])
|
|
57 elif isinstance(d, list):
|
|
58 return [safe_dict(x) for x in d]
|
|
59 else:
|
|
60 return d
|
|
61 def parse_outputs( args ):
|
|
62 rval = {}
|
|
63 for arg in args:
|
|
64 id, files_path, path = arg.split( ':', 2 )
|
|
65 rval[int( id )] = ( path, files_path )
|
|
66 return rval
|
|
67 def add_file( dataset, registry, json_file, output_path ):
|
|
68 data_type = None
|
|
69 line_count = None
|
|
70 converted_path = None
|
|
71 stdout = None
|
|
72 link_data_only = dataset.get( 'link_data_only', 'copy_files' )
|
|
73 in_place = dataset.get( 'in_place', True )
|
|
74
|
|
75 try:
|
|
76 ext = dataset.file_type
|
|
77 except AttributeError:
|
|
78 file_err( 'Unable to process uploaded file, missing file_type parameter.', dataset, json_file )
|
|
79 return
|
|
80
|
|
81 if dataset.type == 'url':
|
|
82 try:
|
|
83 page = urllib.urlopen( dataset.path ) #page will be .close()ed by sniff methods
|
|
84 temp_name, dataset.is_multi_byte = sniff.stream_to_file( page, prefix='url_paste', source_encoding=util.get_charset_from_http_headers( page.headers ) )
|
|
85 except Exception, e:
|
|
86 file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
|
|
87 return
|
|
88 dataset.path = temp_name
|
|
89 # See if we have an empty file
|
|
90 if not os.path.exists( dataset.path ):
|
|
91 file_err( 'Uploaded temporary file (%s) does not exist.' % dataset.path, dataset, json_file )
|
|
92 return
|
|
93 if not os.path.getsize( dataset.path ) > 0:
|
|
94 file_err( 'The uploaded file is empty', dataset, json_file )
|
|
95 return
|
|
96 if not dataset.type == 'url':
|
|
97 # Already set is_multi_byte above if type == 'url'
|
|
98 try:
|
|
99 dataset.is_multi_byte = util.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) )
|
|
100 except UnicodeDecodeError, e:
|
|
101 dataset.is_multi_byte = False
|
|
102 # Is dataset an image?
|
|
103 image = check_image( dataset.path )
|
|
104 if image:
|
|
105 if not PIL:
|
|
106 image = None
|
|
107 # get_image_ext() returns None if nor a supported Image type
|
|
108 ext = get_image_ext( dataset.path, image )
|
|
109 data_type = ext
|
|
110 # Is dataset content multi-byte?
|
|
111 elif dataset.is_multi_byte:
|
|
112 data_type = 'multi-byte char'
|
|
113 ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
|
|
114 # Is dataset content supported sniffable binary?
|
|
115 else:
|
|
116 type_info = Binary.is_sniffable_binary( dataset.path )
|
|
117 if type_info:
|
|
118 data_type = type_info[0]
|
|
119 ext = type_info[1]
|
|
120 if not data_type:
|
|
121 shutil.move( dataset.path, output_path )
|
|
122 #data_type = "data"
|
|
123 # Save job info for the framework
|
|
124 if ext == 'auto' and dataset.ext:
|
|
125 ext = dataset.ext
|
|
126 if ext == 'auto':
|
|
127 ext = 'data'
|
|
128 datatype = registry.get_datatype_by_extension( ext )
|
|
129 if dataset.type in ( 'server_dir', 'path_paste' ) and link_data_only == 'link_to_files':
|
|
130 # Never alter a file that will not be copied to Galaxy's local file store.
|
|
131 if datatype.dataset_content_needs_grooming( dataset.path ):
|
|
132 err_msg = 'The uploaded files need grooming, so change your <b>Copy data into Galaxy?</b> selection to be ' + \
|
|
133 '<b>Copy files into Galaxy</b> instead of <b>Link to files without copying into Galaxy</b> so grooming can be performed.'
|
|
134 file_err( err_msg, dataset, json_file )
|
|
135 return
|
|
136 if link_data_only == 'copy_files' and dataset.type in ( 'server_dir', 'path_paste' ) and data_type not in [ 'gzip', 'bz2', 'zip' ]:
|
|
137 # Move the dataset to its "real" path
|
|
138 if converted_path is not None:
|
|
139 shutil.copy( converted_path, output_path )
|
|
140 try:
|
|
141 os.remove( converted_path )
|
|
142 except:
|
|
143 pass
|
|
144 else:
|
|
145 # This should not happen, but it's here just in case
|
|
146 shutil.copy( dataset.path, output_path )
|
|
147 elif link_data_only == 'copy_files':
|
|
148 if os.path.exists(dataset.path):
|
|
149 shutil.move( dataset.path, output_path )
|
|
150 # Write the job info
|
|
151 stdout = stdout or 'uploaded %s file' % data_type
|
|
152 info = dict( type = 'dataset',
|
|
153 dataset_id = dataset.dataset_id,
|
|
154 ext = ext,
|
|
155 stdout = stdout,
|
|
156 name = dataset.name,
|
|
157 line_count = line_count )
|
|
158 if dataset.get('uuid', None) is not None:
|
|
159 info['uuid'] = dataset.get('uuid')
|
|
160 json_file.write( to_json_string( info ) + "\n" )
|
|
161
|
|
162 if link_data_only == 'copy_files' and datatype.dataset_content_needs_grooming( output_path ):
|
|
163 # Groom the dataset content if necessary
|
|
164 datatype.groom_dataset_content( output_path )
|
|
165
|
|
166 def add_composite_file( dataset, registry, json_file, output_path, files_path ):
|
|
167 if dataset.composite_files:
|
|
168 os.mkdir( files_path )
|
|
169 for name, value in dataset.composite_files.iteritems():
|
|
170 value = util.bunch.Bunch( **value )
|
|
171 if dataset.composite_file_paths[ value.name ] is None and not value.optional:
|
|
172 file_err( 'A required composite data file was not provided (%s)' % name, dataset, json_file )
|
|
173 break
|
|
174 elif dataset.composite_file_paths[value.name] is not None:
|
|
175 dp = dataset.composite_file_paths[value.name][ 'path' ]
|
|
176 isurl = dp.find('://') <> -1 # todo fixme
|
|
177 if isurl:
|
|
178 try:
|
|
179 temp_name, dataset.is_multi_byte = sniff.stream_to_file( urllib.urlopen( dp ), prefix='url_paste' )
|
|
180 except Exception, e:
|
|
181 file_err( 'Unable to fetch %s\n%s' % ( dp, str( e ) ), dataset, json_file )
|
|
182 return
|
|
183 dataset.path = temp_name
|
|
184 dp = temp_name
|
|
185 if not value.is_binary:
|
|
186 if dataset.composite_file_paths[ value.name ].get( 'space_to_tab', value.space_to_tab ):
|
|
187 sniff.convert_newlines_sep2tabs( dp )
|
|
188 else:
|
|
189 sniff.convert_newlines( dp )
|
|
190 shutil.move( dp, os.path.join( files_path, name ) )
|
|
191 # Move the dataset to its "real" path
|
|
192 shutil.move( dataset.primary_file, output_path )
|
|
193 # Write the job info
|
|
194 info = dict( type = 'dataset',
|
|
195 dataset_id = dataset.dataset_id,
|
|
196 stdout = 'uploaded %s file' % dataset.file_type )
|
|
197 json_file.write( to_json_string( info ) + "\n" )
|
|
198
|
|
199 def __main__():
|
|
200
|
|
201 if len( sys.argv ) < 4:
|
|
202 print >>sys.stderr, 'usage: upload.py <root> <datatypes_conf> <json paramfile> <output spec> ...'
|
|
203 sys.exit( 1 )
|
|
204
|
|
205 output_paths = parse_outputs( sys.argv[4:] )
|
|
206 json_file = open( 'galaxy.json', 'w' )
|
|
207
|
|
208 registry = Registry()
|
|
209 registry.load_datatypes( root_dir=sys.argv[1], config=sys.argv[2] )
|
|
210
|
|
211 for line in open( sys.argv[3], 'r' ):
|
|
212 dataset = from_json_string( line )
|
|
213 dataset = util.bunch.Bunch( **safe_dict( dataset ) )
|
|
214 try:
|
|
215 output_path = output_paths[int( dataset.dataset_id )][0]
|
|
216 except:
|
|
217 print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id
|
|
218 sys.exit( 1 )
|
|
219 if dataset.type == 'composite':
|
|
220 files_path = output_paths[int( dataset.dataset_id )][1]
|
|
221 add_composite_file( dataset, registry, json_file, output_path, files_path )
|
|
222 else:
|
|
223 add_file( dataset, registry, json_file, output_path )
|
|
224
|
|
225 # clean up paramfile
|
|
226 # TODO: this will not work when running as the actual user unless the
|
|
227 # parent directory is writable by the user.
|
|
228 try:
|
|
229 os.remove( sys.argv[3] )
|
|
230 except:
|
|
231 pass
|
|
232
|
|
233 if __name__ == '__main__':
|
|
234 __main__()
|