annotate uploadzip.py @ 17:072167754619 draft

Uploaded
author davidvanzessen
date Mon, 30 Mar 2015 07:58:17 -0400
parents
children
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
17
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
1 #!/usr/bin/env python
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
2 #Processes uploads from the user.
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
3
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
4 # WARNING: Changes in this tool (particularly as related to parsing) may need
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
5 # to be reflected in galaxy.web.controllers.tool_runner and galaxy.tools
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
6
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
7 import urllib, sys, os, gzip, tempfile, shutil, re, gzip, zipfile, codecs, binascii
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
8 from galaxy import eggs
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
9 # need to import model before sniff to resolve a circular import dependency
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
10 import galaxy.model
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
11 from galaxy.datatypes.checkers import *
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
12 from galaxy.datatypes import sniff
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
13 from galaxy.datatypes.binary import *
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
14 from galaxy.datatypes.images import Pdf
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
15 from galaxy.datatypes.registry import Registry
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
16 from galaxy import util
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
17 from galaxy.datatypes.util.image_util import *
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
18 from galaxy.util.json import *
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
19
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
20 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
21 import Image as PIL
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
22 except ImportError:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
23 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
24 from PIL import Image as PIL
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
25 except:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
26 PIL = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
27
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
28 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
29 import bz2
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
30 except:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
31 bz2 = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
32
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
33 assert sys.version_info[:2] >= ( 2, 4 )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
34
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
35 def stop_err( msg, ret=1 ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
36 sys.stderr.write( msg )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
37 sys.exit( ret )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
38 def file_err( msg, dataset, json_file ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
39 json_file.write( to_json_string( dict( type = 'dataset',
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
40 ext = 'data',
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
41 dataset_id = dataset.dataset_id,
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
42 stderr = msg ) ) + "\n" )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
43 # never remove a server-side upload
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
44 if dataset.type in ( 'server_dir', 'path_paste' ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
45 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
46 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
47 os.remove( dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
48 except:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
49 pass
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
50 def safe_dict(d):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
51 """
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
52 Recursively clone json structure with UTF-8 dictionary keys
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
53 http://mellowmachines.com/blog/2009/06/exploding-dictionary-with-unicode-keys-as-python-arguments/
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
54 """
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
55 if isinstance(d, dict):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
56 return dict([(k.encode('utf-8'), safe_dict(v)) for k,v in d.iteritems()])
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
57 elif isinstance(d, list):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
58 return [safe_dict(x) for x in d]
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
59 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
60 return d
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
61 def parse_outputs( args ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
62 rval = {}
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
63 for arg in args:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
64 id, files_path, path = arg.split( ':', 2 )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
65 rval[int( id )] = ( path, files_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
66 return rval
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
67 def add_file( dataset, registry, json_file, output_path ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
68 data_type = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
69 line_count = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
70 converted_path = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
71 stdout = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
72 link_data_only = dataset.get( 'link_data_only', 'copy_files' )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
73 in_place = dataset.get( 'in_place', True )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
74
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
75 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
76 ext = dataset.file_type
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
77 except AttributeError:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
78 file_err( 'Unable to process uploaded file, missing file_type parameter.', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
79 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
80
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
81 if dataset.type == 'url':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
82 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
83 page = urllib.urlopen( dataset.path ) #page will be .close()ed by sniff methods
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
84 temp_name, dataset.is_multi_byte = sniff.stream_to_file( page, prefix='url_paste', source_encoding=util.get_charset_from_http_headers( page.headers ) )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
85 except Exception, e:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
86 file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
87 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
88 dataset.path = temp_name
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
89 # See if we have an empty file
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
90 if not os.path.exists( dataset.path ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
91 file_err( 'Uploaded temporary file (%s) does not exist.' % dataset.path, dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
92 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
93 if not os.path.getsize( dataset.path ) > 0:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
94 file_err( 'The uploaded file is empty', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
95 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
96 if not dataset.type == 'url':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
97 # Already set is_multi_byte above if type == 'url'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
98 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
99 dataset.is_multi_byte = util.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
100 except UnicodeDecodeError, e:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
101 dataset.is_multi_byte = False
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
102 # Is dataset an image?
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
103 image = check_image( dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
104 if image:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
105 if not PIL:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
106 image = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
107 # get_image_ext() returns None if nor a supported Image type
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
108 ext = get_image_ext( dataset.path, image )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
109 data_type = ext
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
110 # Is dataset content multi-byte?
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
111 elif dataset.is_multi_byte:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
112 data_type = 'multi-byte char'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
113 ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
114 # Is dataset content supported sniffable binary?
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
115 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
116 type_info = Binary.is_sniffable_binary( dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
117 if type_info:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
118 data_type = type_info[0]
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
119 ext = type_info[1]
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
120 data_type="binary"
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
121 if not data_type:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
122 # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
123 is_gzipped, is_valid = check_gzip( dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
124 if is_gzipped and not is_valid:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
125 file_err( 'The gzipped uploaded file contains inappropriate content', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
126 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
127 elif is_gzipped and is_valid:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
128 if link_data_only == 'copy_files':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
129 # We need to uncompress the temp_name file, but BAM files must remain compressed in the BGZF format
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
130 CHUNK_SIZE = 2**20 # 1Mb
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
131 fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
132 gzipped_file = gzip.GzipFile( dataset.path, 'rb' )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
133 while 1:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
134 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
135 chunk = gzipped_file.read( CHUNK_SIZE )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
136 except IOError:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
137 os.close( fd )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
138 os.remove( uncompressed )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
139 file_err( 'Problem decompressing gzipped data', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
140 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
141 if not chunk:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
142 break
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
143 os.write( fd, chunk )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
144 os.close( fd )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
145 gzipped_file.close()
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
146 # Replace the gzipped file with the decompressed file if it's safe to do so
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
147 if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
148 dataset.path = uncompressed
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
149 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
150 shutil.move( uncompressed, dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
151 os.chmod(dataset.path, 0644)
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
152 dataset.name = dataset.name.rstrip( '.gz' )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
153 data_type = 'gzip'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
154 if not data_type and bz2 is not None:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
155 # See if we have a bz2 file, much like gzip
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
156 is_bzipped, is_valid = check_bz2( dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
157 if is_bzipped and not is_valid:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
158 file_err( 'The gzipped uploaded file contains inappropriate content', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
159 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
160 elif is_bzipped and is_valid:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
161 if link_data_only == 'copy_files':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
162 # We need to uncompress the temp_name file
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
163 CHUNK_SIZE = 2**20 # 1Mb
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
164 fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_bunzip2_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
165 bzipped_file = bz2.BZ2File( dataset.path, 'rb' )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
166 while 1:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
167 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
168 chunk = bzipped_file.read( CHUNK_SIZE )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
169 except IOError:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
170 os.close( fd )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
171 os.remove( uncompressed )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
172 file_err( 'Problem decompressing bz2 compressed data', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
173 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
174 if not chunk:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
175 break
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
176 os.write( fd, chunk )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
177 os.close( fd )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
178 bzipped_file.close()
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
179 # Replace the bzipped file with the decompressed file if it's safe to do so
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
180 if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
181 dataset.path = uncompressed
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
182 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
183 shutil.move( uncompressed, dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
184 os.chmod(dataset.path, 0644)
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
185 dataset.name = dataset.name.rstrip( '.bz2' )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
186 data_type = 'bz2'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
187 if not data_type:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
188 # See if we have a zip archive
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
189 is_zipped = check_zip( dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
190 if is_zipped:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
191 if link_data_only == 'copy_files':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
192 CHUNK_SIZE = 2**20 # 1Mb
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
193 uncompressed = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
194 uncompressed_name = None
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
195 unzipped = False
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
196 z = zipfile.ZipFile( dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
197 for name in z.namelist():
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
198 if name.endswith('/'):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
199 continue
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
200 if unzipped:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
201 stdout = 'ZIP file contained more than one file, only the first file was added to Galaxy.'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
202 break
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
203 fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_zip_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
204 if sys.version_info[:2] >= ( 2, 6 ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
205 zipped_file = z.open( name )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
206 while 1:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
207 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
208 chunk = zipped_file.read( CHUNK_SIZE )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
209 except IOError:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
210 os.close( fd )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
211 os.remove( uncompressed )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
212 file_err( 'Problem decompressing zipped data', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
213 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
214 if not chunk:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
215 break
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
216 os.write( fd, chunk )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
217 os.close( fd )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
218 zipped_file.close()
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
219 uncompressed_name = name
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
220 unzipped = True
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
221 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
222 # python < 2.5 doesn't have a way to read members in chunks(!)
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
223 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
224 outfile = open( uncompressed, 'wb' )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
225 outfile.write( z.read( name ) )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
226 outfile.close()
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
227 uncompressed_name = name
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
228 unzipped = True
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
229 except IOError:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
230 os.close( fd )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
231 os.remove( uncompressed )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
232 file_err( 'Problem decompressing zipped data', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
233 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
234 z.close()
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
235 # Replace the zipped file with the decompressed file if it's safe to do so
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
236 if uncompressed is not None:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
237 if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
238 dataset.path = uncompressed
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
239 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
240 shutil.move( uncompressed, dataset.path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
241 os.chmod(dataset.path, 0644)
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
242 dataset.name = uncompressed_name
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
243 data_type = 'zip'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
244 if not data_type:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
245 if check_binary( dataset.path ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
246 # We have a binary dataset, but it is not Bam, Sff or Pdf
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
247 data_type = 'binary'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
248 #binary_ok = False
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
249 parts = dataset.name.split( "." )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
250 if len( parts ) > 1:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
251 ext = parts[-1].strip().lower()
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
252 if not Binary.is_ext_unsniffable(ext):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
253 file_err( 'The uploaded binary file contains inappropriate content', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
254 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
255 elif Binary.is_ext_unsniffable(ext) and dataset.file_type != ext:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
256 err_msg = "You must manually set the 'File Format' to '%s' when uploading %s files." % ( ext.capitalize(), ext )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
257 file_err( err_msg, dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
258 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
259 if not data_type:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
260 # We must have a text file
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
261 if check_html( dataset.path ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
262 file_err( 'The uploaded file contains inappropriate HTML content', dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
263 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
264 if data_type != 'binary':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
265 if link_data_only == 'copy_files':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
266 if dataset.type in ( 'server_dir', 'path_paste' ) and data_type not in [ 'gzip', 'bz2', 'zip' ]:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
267 in_place = False
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
268 # Convert universal line endings to Posix line endings, but allow the user to turn it off,
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
269 # so that is becomes possible to upload gzip, bz2 or zip files with binary data without
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
270 # corrupting the content of those files.
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
271 if dataset.to_posix_lines:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
272 if dataset.space_to_tab:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
273 line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
274 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
275 line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
276 if dataset.file_type == 'auto':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
277 ext = sniff.guess_ext( dataset.path, registry.sniff_order )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
278 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
279 ext = dataset.file_type
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
280 data_type = ext
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
281 # Save job info for the framework
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
282 if ext == 'auto' and dataset.ext:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
283 ext = dataset.ext
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
284 if ext == 'auto':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
285 ext = 'data'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
286 datatype = registry.get_datatype_by_extension( ext )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
287 if dataset.type in ( 'server_dir', 'path_paste' ) and link_data_only == 'link_to_files':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
288 # Never alter a file that will not be copied to Galaxy's local file store.
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
289 if datatype.dataset_content_needs_grooming( dataset.path ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
290 err_msg = 'The uploaded files need grooming, so change your <b>Copy data into Galaxy?</b> selection to be ' + \
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
291 '<b>Copy files into Galaxy</b> instead of <b>Link to files without copying into Galaxy</b> so grooming can be performed.'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
292 file_err( err_msg, dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
293 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
294 if link_data_only == 'copy_files' and dataset.type in ( 'server_dir', 'path_paste' ) and data_type not in [ 'gzip', 'bz2', 'zip' ]:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
295 # Move the dataset to its "real" path
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
296 if converted_path is not None:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
297 shutil.copy( converted_path, output_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
298 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
299 os.remove( converted_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
300 except:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
301 pass
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
302 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
303 # This should not happen, but it's here just in case
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
304 shutil.copy( dataset.path, output_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
305 elif link_data_only == 'copy_files':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
306 shutil.move( dataset.path, output_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
307 # Write the job info
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
308 stdout = stdout or 'uploaded %s file' % data_type
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
309 info = dict( type = 'dataset',
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
310 dataset_id = dataset.dataset_id,
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
311 ext = ext,
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
312 stdout = stdout,
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
313 name = dataset.name,
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
314 line_count = line_count )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
315 if dataset.get('uuid', None) is not None:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
316 info['uuid'] = dataset.get('uuid')
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
317 json_file.write( to_json_string( info ) + "\n" )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
318
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
319 if link_data_only == 'copy_files' and datatype.dataset_content_needs_grooming( output_path ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
320 # Groom the dataset content if necessary
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
321 datatype.groom_dataset_content( output_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
322
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
323 def add_composite_file( dataset, registry, json_file, output_path, files_path ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
324 if dataset.composite_files:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
325 os.mkdir( files_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
326 for name, value in dataset.composite_files.iteritems():
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
327 value = util.bunch.Bunch( **value )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
328 if dataset.composite_file_paths[ value.name ] is None and not value.optional:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
329 file_err( 'A required composite data file was not provided (%s)' % name, dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
330 break
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
331 elif dataset.composite_file_paths[value.name] is not None:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
332 dp = dataset.composite_file_paths[value.name][ 'path' ]
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
333 isurl = dp.find('://') <> -1 # todo fixme
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
334 if isurl:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
335 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
336 temp_name, dataset.is_multi_byte = sniff.stream_to_file( urllib.urlopen( dp ), prefix='url_paste' )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
337 except Exception, e:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
338 file_err( 'Unable to fetch %s\n%s' % ( dp, str( e ) ), dataset, json_file )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
339 return
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
340 dataset.path = temp_name
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
341 dp = temp_name
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
342 if not value.is_binary:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
343 if dataset.composite_file_paths[ value.name ].get( 'space_to_tab', value.space_to_tab ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
344 sniff.convert_newlines_sep2tabs( dp )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
345 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
346 sniff.convert_newlines( dp )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
347 shutil.move( dp, os.path.join( files_path, name ) )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
348 # Move the dataset to its "real" path
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
349 shutil.move( dataset.primary_file, output_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
350 # Write the job info
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
351 info = dict( type = 'dataset',
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
352 dataset_id = dataset.dataset_id,
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
353 stdout = 'uploaded %s file' % dataset.file_type )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
354 json_file.write( to_json_string( info ) + "\n" )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
355
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
356 def __main__():
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
357
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
358 if len( sys.argv ) < 4:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
359 print >>sys.stderr, 'usage: upload.py <root> <datatypes_conf> <json paramfile> <output spec> ...'
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
360 sys.exit( 1 )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
361
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
362 output_paths = parse_outputs( sys.argv[4:] )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
363 json_file = open( 'galaxy.json', 'w' )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
364
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
365 registry = Registry()
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
366 registry.load_datatypes( root_dir=sys.argv[1], config=sys.argv[2] )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
367
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
368 for line in open( sys.argv[3], 'r' ):
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
369 dataset = from_json_string( line )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
370 dataset = util.bunch.Bunch( **safe_dict( dataset ) )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
371 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
372 output_path = output_paths[int( dataset.dataset_id )][0]
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
373 except:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
374 print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
375 sys.exit( 1 )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
376 if dataset.type == 'composite':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
377 files_path = output_paths[int( dataset.dataset_id )][1]
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
378 add_composite_file( dataset, registry, json_file, output_path, files_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
379 else:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
380 add_file( dataset, registry, json_file, output_path )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
381
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
382 # clean up paramfile
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
383 # TODO: this will not work when running as the actual user unless the
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
384 # parent directory is writable by the user.
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
385 try:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
386 os.remove( sys.argv[3] )
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
387 except:
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
388 pass
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
389
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
390 if __name__ == '__main__':
072167754619 Uploaded
davidvanzessen
parents:
diff changeset
391 __main__()