changeset 9:b96a5f7770dc draft

Uploaded
author greg
date Wed, 23 Nov 2016 13:15:21 -0500
parents 2aa2ee1f7563
children 54b4a38050eb
files data_manager/data_manager_plant_tribes_scaffolds_download.py
diffstat 1 files changed, 1 insertions(+), 49 deletions(-) [+]
line wrap: on
line diff
--- a/data_manager/data_manager_plant_tribes_scaffolds_download.py	Wed Nov 23 11:04:49 2016 -0500
+++ b/data_manager/data_manager_plant_tribes_scaffolds_download.py	Wed Nov 23 13:15:21 2016 -0500
@@ -21,48 +21,6 @@
     return data_manager_dict
 
 
-def files_from_file_paths(file_paths):
-    """
-    Given a list of file system paths, return a list of
-    absolute paths for all files and directories within
-    those paths.
-    """
-    # Collect files.
-    files = []
-    for file_path in file_paths:
-        file_path = os.path.abspath(file_path)
-        if os.path.isfile(file_path):
-            # Store full path for each file.
-            files.append(file_path)
-        elif os.path.isdir(file_path):
-            # Descend into directory and collect the files
-            for f in os.listdir(file_path):
-                files.extend(files_from_file_paths(os.path.join(file_path, f)))
-    return files
-
-
-def import_from_server(data_manager_dict, target_directory, file_system_paths, description, create_symlink, data_table_names=DEFAULT_DATA_TABLE_NAMES):
-    """
-    Creates references to the specified file(s) on the Galaxy
-    server in the data table.
-    """
-    # Remove escapes for '\n' and '\r' that might have been inserted by Galaxy.
-    file_paths = file_system_paths.replace('__cn__', '\n').replace('__cr__', '\r').split()
-    files = files_from_file_paths(file_paths)
-    for f in files:
-        source_file = os.path.basename(f)
-        target_file = os.path.join(target_directory, source_file)
-        entry_name = source_file
-        if create_symlink == 'create_symlink':
-            os.symlink(f, target_file)
-        else:
-            shutil.copyfile(f, target_file)
-        data_table_entry = dict(value=source_file, name=entry_name, path=f, description=description)
-        for data_table_name in data_table_names:
-            data_manager_dict = add_data_table_entry(data_manager_dict, data_table_name, data_table_entry)
-    return data_manager_dict
-
-
 def make_directory(dir):
     if not os.path.exists(dir):
         os.makedirs(dir)
@@ -118,7 +76,6 @@
 
 
 parser = argparse.ArgumentParser()
-parser.add_argument('--data_source', dest='data_source', help='Data source')
 parser.add_argument('--description', dest='description', default=None, help='Description')
 parser.add_argument('--create_symlink', dest='create_symlink', default=None, help='Link files instead of copying')
 parser.add_argument('--file_system_paths', dest='file_system_paths', default=None, help='File system paths')
@@ -143,12 +100,7 @@
 
 # Initialize the data table.
 data_manager_dict = {}
-
 # Get the scaffolds data.
-if args.data_source == 'web_url':
-    data_manager_dict = url_download(data_manager_dict, target_directory, args.web_url, description, data_table_names=DEFAULT_DATA_TABLE_NAMES)
-else:
-    data_manager_dict = import_from_server(data_manager_dict, target_directory, args.file_system_paths, description, args.create_symlink, data_table_names=DEFAULT_DATA_TABLE_NAMES)
-
+data_manager_dict = url_download(data_manager_dict, target_directory, args.web_url, description, data_table_names=DEFAULT_DATA_TABLE_NAMES)
 # Write the JSON output dataset.
 file(args.out_file, 'w').write(json.dumps(data_manager_dict))