| Next changeset 1:fecc90231d86 (2025-04-10) |
|
Commit message:
planemo upload for repository https://github.com/brsynth commit 15dbdd1f0a222a8e1b0fb5c16b36885520a3d005 |
|
added:
docker_DB.py get_DB_data.xml get_db_info.py output.html output.json test-data/output_annotations.json test-data/test_input.csv testMock.py |
| b |
| diff -r 000000000000 -r 41ac63b5d221 docker_DB.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/docker_DB.py Thu Apr 10 08:45:18 2025 +0000 |
| b |
| @@ -0,0 +1,28 @@ +import docker +import time + +def start_postgresql_container(): + """Starts a PostgreSQL container using Docker.""" + client = docker.from_env() + + print("Pulling the PostgreSQL image...") + client.images.pull("postgres") + + print("Starting PostgreSQL container...") + container = client.containers.run( + "postgres", + name="test-postgres", + environment={"POSTGRES_PASSWORD": "RK17"}, + ports={'5432/tcp': 5432}, + detach=True + ) + + # Wait for the database to start + print("Waiting for PostgreSQL to start...") + time.sleep(10) + + print("PostgreSQL container is running!") + return container + +if __name__ == "__main__": + container = start_postgresql_container() |
| b |
| diff -r 000000000000 -r 41ac63b5d221 get_DB_data.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/get_DB_data.xml Thu Apr 10 08:45:18 2025 +0000 |
| [ |
| @@ -0,0 +1,51 @@ +<tool id="get_DB_data" name="get data from DB" version="0.1.0" profile="21.09"> + <description>Import data from Sorbonne Biofondry DB</description> + <requirements> + <requirement type="package" version="2.2.3">pandas</requirement> + <requirement type="package" version="2.0.40">sqlalchemy</requirement> + <requirement type="package" version="2.9.9">psycopg2</requirement> + </requirements> + <command detect_errors="exit_code"><![CDATA[ + python '$__tool_directory__/get_db_info.py' + --input '$input' + --db_uri '$db_uri' + --table '$table' + --fragment_column '$fragment_column' + --output '$output' + ]]></command> + <inputs> + <param name="input" type="data" format="csv" label="Input CSV file" /> + <param name="table" type="text" label="Database Table Name" optional="false" /> + <param name="fragment_column" type="text" label="Database Fragments Column Name" optional="false" /> + <param name="db_uri" type="text" label="Database Connection URI" help="postgresql+psycopg2://username:password@db_container_name:port/database" optional="false" /> + </inputs> + <outputs> + <data name="output" format="json" label="DB Annotated Fragments (json)"> + </data> + </outputs> + <!--python get_DB_info.py -input 'test-data/test_input.csv' -db_uri 'postgresql://postgres:RK17@localhost:5432/test_fragments_db' -table 'sample' -fragment_column 'fragment' -output 'test-data/output.json'--> + <tests> + <test> + <param name="input" value="test_input.csv" /> + <param name="table" value="sample" /> + <param name="fragment_column" value="fragment" /> + <param name="db_uri" value="postgresql://postgres:RK17@localhost:5432/test_fragments_db" /> + <output name="output" file="output_annotations.json" ftype="json" /> + </test> + </tests> + <help><![CDATA[ +Pick Data From DB +=================== + +DBget data from SQL DB in docker container. + ]]></help> + <citations> + <citation type="bibtex"> + @unpublished{get_DB_data + author = {Ramiz Khaled}, + title = {{get_DB_data}}, + url = {https://github.com/brsynth/}, + } + </citation> + </citations> +</tool> |
| b |
| diff -r 000000000000 -r 41ac63b5d221 get_db_info.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/get_db_info.py Thu Apr 10 08:45:18 2025 +0000 |
| [ |
| @@ -0,0 +1,154 @@ +import subprocess +import time +import argparse +import socket +import os +import pandas as pd +import json +from sqlalchemy import create_engine, inspect +from sqlalchemy.sql import text +from sqlalchemy.engine.url import make_url +from sqlalchemy.exc import OperationalError + +def fix_db_uri(uri): + """Replace __at__ with @ in the URI if needed.""" + return uri.replace("__at__", "@") + +def is_port_in_use(port): + """Check if a TCP port is already in use on localhost.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + return s.connect_ex(('localhost', port)) == 0 + +def extract_db_name(uri): + """Extract the database name from the SQLAlchemy URI.""" + url = make_url(uri) + return url.database + +def start_postgres_container(db_name): + """Start a PostgreSQL container with the given database name as the container name.""" + container_name = db_name + + # Check if container is already running + container_running = subprocess.run( + f"docker ps -q -f name={container_name}", shell=True, capture_output=True, text=True + ) + + if container_running.stdout.strip(): + print(f"Container '{container_name}' is already running.") + return + + # Check if container exists (stopped) + container_exists = subprocess.run( + f"docker ps -a -q -f name={container_name}", shell=True, capture_output=True, text=True + ) + + if container_exists.stdout.strip(): + print(f"Starting existing container '{container_name}'...") + subprocess.run(f"docker start {container_name}", shell=True) + print(f"PostgreSQL Docker container '{container_name}' activated.") + return + + # If container does not exist, create and start a new one + port = 5432 if not is_port_in_use(5432) else 5433 + postgres_password = os.getenv("POSTGRES_PASSWORD", "RK17") + + start_command = [ + "docker", "run", "--name", container_name, + "-e", f"POSTGRES_PASSWORD={postgres_password}", + "-p", f"{port}:5432", + "-d", "postgres" + ] + + try: + subprocess.run(start_command, check=True) + print(f"PostgreSQL Docker container '{container_name}' started on port {port}.") + except subprocess.CalledProcessError as e: + print(f"Failed to start Docker container: {e}") + +def wait_for_db(uri, timeout=60): + """Try connecting to the DB until it works or timeout.""" + engine = create_engine(uri) + start_time = time.time() + while time.time() - start_time < timeout: + try: + with engine.connect(): + print("Connected to database.") + return + except OperationalError: + print("Database not ready, retrying...") + time.sleep(2) + raise Exception("Database connection failed after timeout.") + +def fetch_annotations(csv_file, db_uri, table_name, fragment_column_name, output): + """Fetch annotations from the database and save the result as a JSON file.""" + db_uri = fix_db_uri(db_uri) + df = pd.read_csv(csv_file, sep=',') + + engine = create_engine(db_uri) + connection = engine.connect() + + annotated_data = [] + + try: + with connection: + inspector = inspect(engine) + columns = [column['name'] for column in inspector.get_columns(table_name)] + + # Fetch all fragments from the table once + all_rows = connection.execute(text(f"SELECT * FROM {table_name}")).fetchall() + fragment_map = {row[0]: row for row in all_rows} # Assuming the first column is fragment ID + + for _, row in df.iterrows(): + annotated_row = {"Backbone": row["ID"], "Fragments": []} + + for col in df.columns: + if col != "ID": + fragment = row[col] + db_row = fragment_map.get(fragment) + + if db_row: + fragment_data = {"id": fragment} + for i, column_name in enumerate(columns[1:]): # skip ID column + fragment_data[column_name] = db_row[i + 1] + else: + fragment_data = {"id": fragment, "metadata": "No data found"} + + annotated_row["Fragments"].append(fragment_data) + + annotated_data.append(annotated_row) + + except Exception as e: + print(f"Error occurred during annotation: {e}") + return + + try: + with open(output, "w") as f: + json.dump(annotated_data, f, indent=4) + print(f"Annotation saved to {output}") + except Exception as e: + print(f"Error saving output file: {e}") + + return output + +def main(): + parser = argparse.ArgumentParser(description="Fetch annotations from PostgreSQL database and save as JSON.") + parser.add_argument("--input", required=True, help="Input CSV file") + parser.add_argument("--db_uri", required=True, help="Database URI connection string") + parser.add_argument("--table", required=True, help="Table name in the database") + parser.add_argument("--fragment_column", required=True, help="Fragment column name in the database") + parser.add_argument("--output", required=True, help="Output JSON file") + args = parser.parse_args() + + # Start the Docker container (if not already running) + db_uri = fix_db_uri(args.db_uri) + db_name = extract_db_name(db_uri) + start_postgres_container(db_name) + + # Wait until the database is ready + wait_for_db(db_uri) + + # Fetch annotations from the database and save as JSON + fetch_annotations(args.input, db_uri, args.table, args.fragment_column, args.output) + +if __name__ == "__main__": + main() |
| b |
| diff -r 000000000000 -r 41ac63b5d221 output.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/output.html Thu Apr 10 08:45:18 2025 +0000 |
| [ |
| b'@@ -0,0 +1,291 @@\n+<!DOCTYPE html>\n+<html lang="en">\n+ <head>\n+ <meta charset="utf-8">\n+ <meta http-equiv="X-UA-Compatible" content="IE=edge">\n+ <meta name="viewport" content="width=device-width, initial-scale=1">\n+ <title>Test Results (powered by Planemo)</title>\n+\n+ <!-- Bootstrap -->\n+ <style>/*!\n+ * Bootstrap v3.3.1 (http://getbootstrap.com)\n+ * Copyright 2011-2014 Twitter, Inc.\n+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)\n+ *//*! normalize.css v3.0.2 | MIT License | git.io/normalize */html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em .625em .75em;margin:0 2px;border:1px solid silver}legend{padding:0;border:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:before,:after{color:#000!important;text-shadow:none!important;background:transparent!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="#"]:after,a[href^="javascript:"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}select{background:#fff!important}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered th,.table-bordered td{border:1px solid #ddd!important}}@font-face{font-family:\'Glyphicons Halflings\';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format(\'embedded-opentype\'),url(../fonts/glyphicons-halflings-regular.woff) format(\'woff\'),url(../fonts/glyphicons-halflings-regular.ttf) format(\'truetype\'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format(\'svg\')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:\'Glyphi'..b"CAgICAgICAgICAgICAgIm1ldGFkYXRhXzEiOiAiTWV0YWRhdGExIGZvciBBQ1AxMDAwMUFhQ2JiQlMiLAogICAgICAgICAgICAgICAgICAgICAgIm1ldGFkYXRhXzIiOiAiTWV0YWRhdGEyIGZvciBBQ1AxMDAwMUFhQ2JiQlMiCiAgICAgICAgICAgICAgICAgIH0sCiAgICAgICAgICAgICAgICAgIHsKICAgICAgICAgICAgICAgICAgICAgICJpZCI6ICJRV0UxMDAwNEFhQ2JiQlMiLAogICAgICAgICAgICAgICAgICAgICAgInNlcXVlbmNlIjogInNlcSBmb3IgUVdFMTAwMDRBYUNiYkJTIiwKICAgICAgICAgICAgICAgICAgICAgICJtZXRhZGF0YV8xIjogIk1ldGFkYXRhMSBmb3IgUVdFMTAwMDRBYUNiYkJTIiwKICAgICAgICAgICAgICAgICAgICAgICJtZXRhZGF0YV8yIjogIk1ldGFkYXRhMiBmb3IgUVdFMTAwMDRBYUNiYkJTIgogICAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgICAgXQogICAgICAgICAgfSwKICAgICAgICAgIHsKICAgICAgICAgICAgICAiQmFja2JvbmUiOiAiWFlaMTAwMDNBYUNiYkJTIiwKICAgICAgICAgICAgICAiRnJhZ21lbnRzIjogWwogICAgICAgICAgICAgICAgICB7CiAgICAgICAgICAgICAgICAgICAgICAiaWQiOiAiWFlaMTAwMDNBYUNiYkJTIiwKICAgICAgICAgICAgICAgICAgICAgICJzZXF1ZW5jZSI6ICJzZXEgZm9yIFhZWjEwMDAzQWFDYmJCUyIsCiAgICAgICAgICAgICAgICAgICAgICAibWV0YWRhdGFfMSI6ICJNZXRhZGF0YTEgZm9yIFhZWjEwMDAzQWFDYmJCUyIsCiAgICAgICAgICAgICAgICAgICAgICAibWV0YWRhdGFfMiI6ICJNZXRhZGF0YTIgZm9yIFhZWjEwMDAzQWFDYmJCUyIKICAgICAgICAgICAgICAgICAgfSwKICAgICAgICAgICAgICAgICAgewogICAgICAgICAgICAgICAgICAgICAgImlkIjogIlFXRTEwMDA0QWFDYmJCUyIsCiAgICAgICAgICAgICAgICAgICAgICAic2VxdWVuY2UiOiAic2VxIGZvciBRV0UxMDAwNEFhQ2JiQlMiLAogICAgICAgICAgICAgICAgICAgICAgIm1ldGFkYXRhXzEiOiAiTWV0YWRhdGExIGZvciBRV0UxMDAwNEFhQ2JiQlMiLAogICAgICAgICAgICAgICAgICAgICAgIm1ldGFkYXRhXzIiOiAiTWV0YWRhdGEyIGZvciBRV0UxMDAwNEFhQ2JiQlMiCiAgICAgICAgICAgICAgICAgIH0sCiAgICAgICAgICAgICAgICAgIHsKICAgICAgICAgICAgICAgICAgICAgICJpZCI6ICJBQ1AxMDAwMUFhQ2JiQlMiLAogICAgICAgICAgICAgICAgICAgICAgInNlcXVlbmNlIjogInNlcSBmb3IgQUNQMTAwMDFBYUNiYkJTIiwKICAgICAgICAgICAgICAgICAgICAgICJtZXRhZGF0YV8xIjogIk1ldGFkYXRhMSBmb3IgQUNQMTAwMDFBYUNiYkJTIiwKICAgICAgICAgICAgICAgICAgICAgICJtZXRhZGF0YV8yIjogIk1ldGFkYXRhMiBmb3IgQUNQMTAwMDFBYUNiYkJTIgogICAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgICAgXQogICAgICAgICAgfSwKICAgICAgICAgIHsKICAgICAgICAgICAgICAiQmFja2JvbmUiOiAiUVdFMTAwMDRBYUNiYkJTIiwKICAgICAgICAgICAgICAiRnJhZ21lbnRzIjogWwogICAgICAgICAgICAgICAgICB7CiAgICAgICAgICAgICAgICAgICAgICAiaWQiOiAiUVdFMTAwMDRBYUNiYkJTIiwKICAgICAgICAgICAgICAgICAgICAgICJzZXF1ZW5jZSI6ICJzZXEgZm9yIFFXRTEwMDA0QWFDYmJCUyIsCiAgICAgICAgICAgICAgICAgICAgICAibWV0YWRhdGFfMSI6ICJNZXRhZGF0YTEgZm9yIFFXRTEwMDA0QWFDYmJCUyIsCiAgICAgICAgICAgICAgICAgICAgICAibWV0YWRhdGFfMiI6ICJNZXRhZGF0YTIgZm9yIFFXRTEwMDA0QWFDYmJCUyIKICAgICAgICAgICAgICAgICAgfSwKICAgICAgICAgICAgICAgICAgewogICAgICAgICAgICAgICAgICAgICAgImlkIjogIlhZWjEwMDAzQWFDYmJCUyIsCiAgICAgICAgICAgICAgICAgICAgICAic2VxdWVuY2UiOiAic2VxIGZvciBYWVoxMDAwM0FhQ2JiQlMiLAogICAgICAgICAgICAgICAgICAgICAgIm1ldGFkYXRhXzEiOiAiTWV0YWRhdGExIGZvciBYWVoxMDAwM0FhQ2JiQlMiLAogICAgICAgICAgICAgICAgICAgICAgIm1ldGFkYXRhXzIiOiAiTWV0YWRhdGEyIGZvciBYWVoxMDAwM0FhQ2JiQlMiCiAgICAgICAgICAgICAgICAgIH0sCiAgICAgICAgICAgICAgICAgIHsKICAgICAgICAgICAgICAgICAgICAgICJpZCI6ICJDRlAxMDAwMkFhQ2JiQlMiLAogICAgICAgICAgICAgICAgICAgICAgInNlcXVlbmNlIjogInNlcSBmb3IgQ0ZQMTAwMDJBYUNiYkJTIiwKICAgICAgICAgICAgICAgICAgICAgICJtZXRhZGF0YV8xIjogIk1ldGFkYXRhMSBmb3IgQ0ZQMTAwMDJBYUNiYkJTIiwKICAgICAgICAgICAgICAgICAgICAgICJtZXRhZGF0YV8yIjogIk1ldGFkYXRhMiBmb3IgQ0ZQMTAwMDJBYUNiYkJTIgogICAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgICAgXQogICAgICAgICAgfQogICAgICBdCiAgICAgIGBgYAogICAgKipUcmFjZWJhY2s6KioKCiAgICAqIGBgYGNvbnNvbGUKICAgICAgCiAgICAgIGBgYAogICAqKkpvYiBQYXJhbWV0ZXJzOioqCgogICAqICAgfCBKb2IgcGFyYW1ldGVyIHwgUGFyYW1ldGVyIHZhbHVlIHwKICAgICAgIHwgLS0tLS0tLS0tLS0tLSB8IC0tLS0tLS0tLS0tLS0tLSB8CiAgICAgICB8IHRhYmxlIHwgYCAic2FtcGxlIiBgIHwKICAgICAgIHwgZnJhZ21lbnRcX2NvbHVtbiB8IGAgImZyYWdtZW50IiBgIHwKICAgICAgIHwgZGJcX3VyaSB8IGAgInBvc3RncmVzcWw6Ly9wb3N0Z3JlczpSSzE3QGxvY2FsaG9zdDo1NDMyL3Rlc3RfZnJhZ21lbnRzX2RiIiBgIHwKICAgICAgIHwgY2hyb21JbmZvIHwgYCAiL3RtcC90bXA0OHN4aXhsMS9nYWxheHktZGV2L3Rvb2wtZGF0YS9zaGFyZWQvdWNzYy9jaHJvbS8/LmxlbiIgYCB8CiAgICAgICB8IGRia2V5IHwgYCAiPyIgYCB8CiAgICAgICB8IFxfXF9pbnB1dFxfZXh0IHwgYCAiaW5wdXQiIGAgfAoKCgogICAgPC9kaXY+PC9kZXRhaWxzPgoKCjwvZGV0YWlscz4K'));\n+ </script>\n+ </body>\n+</html>\n\\ No newline at end of file\n" |
| b |
| diff -r 000000000000 -r 41ac63b5d221 output.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/output.json Thu Apr 10 08:45:18 2025 +0000 |
| [ |
| b'@@ -0,0 +1,82 @@\n+{\n+ "summary": {\n+ "num_errors": 0,\n+ "num_failures": 0,\n+ "num_skips": 0,\n+ "num_tests": 1\n+ },\n+ "tests": [\n+ {\n+ "data": {\n+ "inputs": {\n+ "db_uri": "postgresql://postgres:RK17@localhost:5432/test_fragments_db",\n+ "fragment_column": "fragment",\n+ "input": {\n+ "id": "2956404b1302731f",\n+ "src": "hda"\n+ },\n+ "table": "sample"\n+ },\n+ "job": {\n+ "command_line": "python \'/home/rkhaled/galaxytools/tools/get_db_data/get_DB_info.py\' --input \'/tmp/tmp48sxixl1/files/7/5/d/dataset_75daf398-093d-46eb-8cf7-3f75df4101e4.dat\' --db_uri \'postgresql://postgres:RK17__at__localhost:5432/test_fragments_db\' --table \'sample\' --fragment_column \'fragment\' --output \'/tmp/tmp48sxixl1/job_working_directory/000/2/outputs/dataset_7ada1e0a-429a-4d22-84f4-03ee16dbb01a.dat\'&& echo \'debug\' && cat \'/tmp/tmp48sxixl1/job_working_directory/000/2/outputs/dataset_7ada1e0a-429a-4d22-84f4-03ee16dbb01a.dat\'",\n+ "command_version": "",\n+ "copied_from_job_id": null,\n+ "create_time": "2025-04-10T08:35:13.763882",\n+ "dependencies": [],\n+ "exit_code": 0,\n+ "external_id": "49355",\n+ "galaxy_version": "24.2",\n+ "handler": null,\n+ "history_id": "2956404b1302731f",\n+ "id": "d5b5fa41022deb44",\n+ "inputs": {\n+ "input": {\n+ "id": "2956404b1302731f",\n+ "src": "hda",\n+ "uuid": "75daf398-093d-46eb-8cf7-3f75df4101e4"\n+ }\n+ },\n+ "job_messages": [],\n+ "job_metrics": [],\n+ "job_runner_name": null,\n+ "job_stderr": "",\n+ "job_stdout": "",\n+ "model_class": "Job",\n+ "output_collections": {},\n+ "outputs": {\n+ "output": {\n+ "id": "d5b5fa41022deb44",\n+ "src": "hda",\n+ "uuid": "7ada1e0a-429a-4d22-84f4-03ee16dbb01a"\n+ }\n+ },\n+ "params": {\n+ "__input_ext": "\\"input\\"",\n+ "chromInfo": "\\"/tmp/tmp48sxixl1/galaxy-dev/tool-data/shared/ucsc/chrom/?.len\\"",\n+ "db_uri": "\\"postgresql://postgres:RK17@localhost:5432/test_fragments_db\\"",\n+ "dbkey": "\\"?\\"",\n+ "fragment_column": "\\"fragment\\"",\n+ "table": "\\"sample\\""\n+ },\n+ "state": "ok",\n+ "stderr": "",\n+ "stdout": "Container \'test_fragments_db\' is already running.\\nConnected to database.\\nAnnotation saved to /tmp/tmp48sxixl1/job_working_directory/000/2/outputs/dataset_7ada1e0a-429a-4d22-84f4-03ee16dbb01a.dat\\ndebug\\n[\\n {\\n \\"Backbone\\": \\"ACP10001AaCbbBS\\",\\n \\"Fragments\\": [\\n {\\n \\"id\\": \\"ACP10001AaCbbBS\\",\\n \\"sequence\\": \\"seq for ACP10001AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for ACP10001AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for ACP10001AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"CFP10002AaCbbBS\\",\\n \\"sequence\\": \\"seq for CFP10002AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for CFP10002AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for CFP10002AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"XYZ10003AaCbbBS\\",\\n \\"sequ'..b' \\"metadata_1\\": \\"Metadata1 for ACP10001AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for ACP10001AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"CFP10002AaCbbBS\\",\\n \\"sequence\\": \\"seq for CFP10002AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for CFP10002AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for CFP10002AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"XYZ10003AaCbbBS\\",\\n \\"sequence\\": \\"seq for XYZ10003AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for XYZ10003AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for XYZ10003AaCbbBS\\"\\n }\\n ]\\n },\\n {\\n \\"Backbone\\": \\"CFP10002AaCbbBS\\",\\n \\"Fragments\\": [\\n {\\n \\"id\\": \\"CFP10002AaCbbBS\\",\\n \\"sequence\\": \\"seq for CFP10002AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for CFP10002AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for CFP10002AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"ACP10001AaCbbBS\\",\\n \\"sequence\\": \\"seq for ACP10001AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for ACP10001AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for ACP10001AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"QWE10004AaCbbBS\\",\\n \\"sequence\\": \\"seq for QWE10004AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for QWE10004AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for QWE10004AaCbbBS\\"\\n }\\n ]\\n },\\n {\\n \\"Backbone\\": \\"XYZ10003AaCbbBS\\",\\n \\"Fragments\\": [\\n {\\n \\"id\\": \\"XYZ10003AaCbbBS\\",\\n \\"sequence\\": \\"seq for XYZ10003AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for XYZ10003AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for XYZ10003AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"QWE10004AaCbbBS\\",\\n \\"sequence\\": \\"seq for QWE10004AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for QWE10004AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for QWE10004AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"ACP10001AaCbbBS\\",\\n \\"sequence\\": \\"seq for ACP10001AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for ACP10001AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for ACP10001AaCbbBS\\"\\n }\\n ]\\n },\\n {\\n \\"Backbone\\": \\"QWE10004AaCbbBS\\",\\n \\"Fragments\\": [\\n {\\n \\"id\\": \\"QWE10004AaCbbBS\\",\\n \\"sequence\\": \\"seq for QWE10004AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for QWE10004AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for QWE10004AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"XYZ10003AaCbbBS\\",\\n \\"sequence\\": \\"seq for XYZ10003AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for XYZ10003AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for XYZ10003AaCbbBS\\"\\n },\\n {\\n \\"id\\": \\"CFP10002AaCbbBS\\",\\n \\"sequence\\": \\"seq for CFP10002AaCbbBS\\",\\n \\"metadata_1\\": \\"Metadata1 for CFP10002AaCbbBS\\",\\n \\"metadata_2\\": \\"Metadata2 for CFP10002AaCbbBS\\"\\n }\\n ]\\n }\\n]",\n+ "update_time": "2025-04-10T08:35:21.268906",\n+ "user_email": "planemo@galaxyproject.org",\n+ "user_id": "2956404b1302731f"\n+ },\n+ "status": "success",\n+ "test_index": 0,\n+ "time_seconds": 18.426533937454224,\n+ "tool_id": "get_DB_data",\n+ "tool_version": "0.1.0"\n+ },\n+ "has_data": true,\n+ "id": "get_DB_data-0"\n+ }\n+ ],\n+ "version": "0.1"\n+}\n\\ No newline at end of file\n' |
| b |
| diff -r 000000000000 -r 41ac63b5d221 test-data/output_annotations.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/output_annotations.json Thu Apr 10 08:45:18 2025 +0000 |
| [ |
| @@ -0,0 +1,94 @@ +[ + { + "Backbone": "ACP10001AaCbbBS", + "Fragments": [ + { + "id": "ACP10001AaCbbBS", + "sequence": "seq for ACP10001AaCbbBS", + "metadata_1": "Metadata1 for ACP10001AaCbbBS", + "metadata_2": "Metadata2 for ACP10001AaCbbBS" + }, + { + "id": "CFP10002AaCbbBS", + "sequence": "seq for CFP10002AaCbbBS", + "metadata_1": "Metadata1 for CFP10002AaCbbBS", + "metadata_2": "Metadata2 for CFP10002AaCbbBS" + }, + { + "id": "XYZ10003AaCbbBS", + "sequence": "seq for XYZ10003AaCbbBS", + "metadata_1": "Metadata1 for XYZ10003AaCbbBS", + "metadata_2": "Metadata2 for XYZ10003AaCbbBS" + } + ] + }, + { + "Backbone": "CFP10002AaCbbBS", + "Fragments": [ + { + "id": "CFP10002AaCbbBS", + "sequence": "seq for CFP10002AaCbbBS", + "metadata_1": "Metadata1 for CFP10002AaCbbBS", + "metadata_2": "Metadata2 for CFP10002AaCbbBS" + }, + { + "id": "ACP10001AaCbbBS", + "sequence": "seq for ACP10001AaCbbBS", + "metadata_1": "Metadata1 for ACP10001AaCbbBS", + "metadata_2": "Metadata2 for ACP10001AaCbbBS" + }, + { + "id": "QWE10004AaCbbBS", + "sequence": "seq for QWE10004AaCbbBS", + "metadata_1": "Metadata1 for QWE10004AaCbbBS", + "metadata_2": "Metadata2 for QWE10004AaCbbBS" + } + ] + }, + { + "Backbone": "XYZ10003AaCbbBS", + "Fragments": [ + { + "id": "XYZ10003AaCbbBS", + "sequence": "seq for XYZ10003AaCbbBS", + "metadata_1": "Metadata1 for XYZ10003AaCbbBS", + "metadata_2": "Metadata2 for XYZ10003AaCbbBS" + }, + { + "id": "QWE10004AaCbbBS", + "sequence": "seq for QWE10004AaCbbBS", + "metadata_1": "Metadata1 for QWE10004AaCbbBS", + "metadata_2": "Metadata2 for QWE10004AaCbbBS" + }, + { + "id": "ACP10001AaCbbBS", + "sequence": "seq for ACP10001AaCbbBS", + "metadata_1": "Metadata1 for ACP10001AaCbbBS", + "metadata_2": "Metadata2 for ACP10001AaCbbBS" + } + ] + }, + { + "Backbone": "QWE10004AaCbbBS", + "Fragments": [ + { + "id": "QWE10004AaCbbBS", + "sequence": "seq for QWE10004AaCbbBS", + "metadata_1": "Metadata1 for QWE10004AaCbbBS", + "metadata_2": "Metadata2 for QWE10004AaCbbBS" + }, + { + "id": "XYZ10003AaCbbBS", + "sequence": "seq for XYZ10003AaCbbBS", + "metadata_1": "Metadata1 for XYZ10003AaCbbBS", + "metadata_2": "Metadata2 for XYZ10003AaCbbBS" + }, + { + "id": "CFP10002AaCbbBS", + "sequence": "seq for CFP10002AaCbbBS", + "metadata_1": "Metadata1 for CFP10002AaCbbBS", + "metadata_2": "Metadata2 for CFP10002AaCbbBS" + } + ] + } +] \ No newline at end of file |
| b |
| diff -r 000000000000 -r 41ac63b5d221 test-data/test_input.csv --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/test_input.csv Thu Apr 10 08:45:18 2025 +0000 |
| b |
| @@ -0,0 +1,6 @@ +ID,Frag1,Frag2,Frag3 +ACP10001AaCbbBS,ACP10001AaCbbBS,CFP10002AaCbbBS,XYZ10003AaCbbBS +CFP10002AaCbbBS,CFP10002AaCbbBS,ACP10001AaCbbBS,QWE10004AaCbbBS +XYZ10003AaCbbBS,XYZ10003AaCbbBS,QWE10004AaCbbBS,ACP10001AaCbbBS +QWE10004AaCbbBS,QWE10004AaCbbBS,XYZ10003AaCbbBS,CFP10002AaCbbBS + |
| b |
| diff -r 000000000000 -r 41ac63b5d221 testMock.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/testMock.py Thu Apr 10 08:45:18 2025 +0000 |
| [ |
| @@ -0,0 +1,71 @@ +import psycopg2 +from psycopg2 import sql + +def create_db_and_insert_data(): + """Creates a database, a table, and inserts mock data into the PostgreSQL database.""" + # Connect to the PostgreSQL container (default 'postgres' database for setup) + conn = psycopg2.connect( + dbname='postgres', # Default database + user='postgres', # Default user + password='RK17', # Password from Docker environment + host='localhost', # Running locally on the default Docker network + port='5432' # Default PostgreSQL port + ) + + conn.autocommit = True # Necessary to create a database + cursor = conn.cursor() + + # Check if the test database already exists + cursor.execute("SELECT 1 FROM pg_catalog.pg_database WHERE datname = 'test_fragments_db';") + exists = cursor.fetchone() + + if exists: + print("Database 'test_fragments_db' already exists, dropping it...") + cursor.execute("DROP DATABASE test_fragments_db;") + + # Create the new database for testing + cursor.execute('CREATE DATABASE test_fragments_db;') + print("Database 'test_fragments_db' created.") + + cursor.close() + conn.close() + + # Now connect to the new test database + conn = psycopg2.connect( + dbname='test_fragments_db', + user='postgres', + password='RK17', + host='localhost', + port='5432' + ) + + cursor = conn.cursor() + + # Create the 'sample' table instead of 'fragments' + cursor.execute(''' + CREATE TABLE sample ( + fragment TEXT PRIMARY KEY, + sequence TEXT, + metadata_1 TEXT, + metadata_2 TEXT + ); + ''') + + # Insert mock data + cursor.executemany(''' + INSERT INTO sample (fragment, sequence, metadata_1, metadata_2) VALUES (%s, %s, %s, %s); + ''', [ + ('ACP10001AaCbbBS', 'seq for ACP10001AaCbbBS', 'Metadata1 for ACP10001AaCbbBS', 'Metadata2 for ACP10001AaCbbBS'), + ('CFP10002AaCbbBS', 'seq for CFP10002AaCbbBS', 'Metadata1 for CFP10002AaCbbBS', 'Metadata2 for CFP10002AaCbbBS'), + ('XYZ10003AaCbbBS', 'seq for XYZ10003AaCbbBS', 'Metadata1 for XYZ10003AaCbbBS', 'Metadata2 for XYZ10003AaCbbBS'), + ('QWE10004AaCbbBS', 'seq for QWE10004AaCbbBS', 'Metadata1 for QWE10004AaCbbBS', 'Metadata2 for QWE10004AaCbbBS') + ]) + + conn.commit() + print("Mock data inserted into 'sample' table.") + + cursor.close() + conn.close() + +if __name__ == "__main__": + create_db_and_insert_data() |