From fe8271a177de0fa10f3b1bba6cb8b0c77629335a Mon Sep 17 00:00:00 2001 From: Alex Kanitz Date: Thu, 2 Nov 2023 18:55:45 +0100 Subject: [PATCH] build: upgrade FOCA (#64) --- .github/workflows/main.yml | 109 ++++++++++++------ Dockerfile | 41 ++----- docker-compose.yaml | 5 +- pro_wes/__init__.py | 2 +- pro_wes/app.py | 4 +- pro_wes/celery_worker.py | 12 ++ pro_wes/client_wes.py | 15 ++- pro_wes/ga4gh/wes/workflow_runs.py | 167 +++++++++++++--------------- pro_wes/gunicorn.py | 33 +++--- pro_wes/tasks/track_run_progress.py | 26 ++--- pro_wes/utils/db.py | 29 +++-- pro_wes/version.py | 3 + pro_wes/worker.py | 8 -- pro_wes/wsgi.py | 2 + requirements.txt | 4 +- requirements_dev.txt | 13 +-- setup.py | 58 ++++++---- 17 files changed, 284 insertions(+), 247 deletions(-) create mode 100644 pro_wes/celery_worker.py create mode 100644 pro_wes/version.py delete mode 100644 pro_wes/worker.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7d5f71d..30d49fd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,43 +1,86 @@ ---- -name: Test with docker compose +name: proWES checks + on: push: branches: [ dev ] pull_request: branches: [ dev ] + jobs: - build: + lint: + name: Run linting + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Install requirements + run: | + pip install -r requirements_dev.txt + pip install . + - name: Lint with Flake8 + run: flake8 + test: + name: Run tests runs-on: ubuntu-latest env: - PROBE_ENDPOINT: localhost:8080/ga4gh/wes/v1/ui/ - strategy: - fail-fast: true - matrix: - python-version: ["3.7", "3.8","3.9", "3.10"] - + PROBE_ENDPOINT: localhost:8090/ga4gh/wes/v1/service-info + permissions: + contents: read + packages: write steps: - - uses: actions/checkout@v2 - - name: Install dependencies - run: pip install docker-compose - - name: Create data directories - run: mkdir -p ${HOME}/data/{db,output,tmp} - - name: Docker compose up - run: docker-compose up -d - - name: Sleep 30 - run: sleep 30 - - name: PROBE - run: echo "${PROBE_ENDPOINT}" - - name: Test - run: | - test $( \ - curl \ - -sL \ - -v \ - -o /dev/null \ - -w '%{http_code}' \ - -X GET \ - --header 'Accept: application/json' \ - "${PROBE_ENDPOINT}" \ - ) == '200' || exit 1 - + - name: Check out repository + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Deploy app + run: docker-compose up -d --build + - name: Wait for app startup + run: sleep 20 + - name: Probe endpoint + run: | + test $( \ + curl \ + -sL \ + -v \ + -o /dev/null \ + -w '%{http_code}' \ + -X GET \ + --header 'Accept: application/json' \ + "${PROBE_ENDPOINT}" \ + ) == '200' || exit 1 + publish: + name: Build and publish app image + runs-on: ubuntu-latest + if: ${{ github.event_name == 'push' }} + needs: [lint, test] + steps: + - name: Check out repository + uses: actions/checkout@v4 + - name: Generate tag + run: | + echo "TAG=$(date '+%Y%m%d')" >> $GITHUB_ENV + - name: Build and publish image + id: docker + uses: philips-software/docker-ci-scripts@v5.0.0 + with: + dockerfile: . + image-name: "prowes" + tags: "latest ${{ env.TAG }}" + push-branches: "${{ github.event.repository.default_branch }}" + env: + REGISTRY_USERNAME: ${{ secrets.DOCKERHUB_LOGIN }} + REGISTRY_TOKEN: "${{ secrets.DOCKERHUB_TOKEN }}" + DOCKER_ORGANIZATION: ${{ secrets.DOCKERHUB_ORG }} + GITHUB_ORGANIZATION: ${{ github.repository_owner }} + - name: Verify that image was pushed + run: | + echo "Push indicator: ${{ steps.docker.outputs.push-indicator }}" + echo "# Set to 'true' if image was pushed, empty string otherwise" + test "${{ steps.docker.outputs.push-indicator }}" == "true" diff --git a/Dockerfile b/Dockerfile index cef8972..1519c4e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,43 +1,24 @@ ##### BASE IMAGE ##### -FROM elixircloud/foca:20220524-py3.7 +FROM elixircloud/foca:20230818-py3.10 ##### METADATA ##### -LABEL base.image="elixircloud/foca:20220524-py3.7" -LABEL version="2.0" +LABEL version="3.0" LABEL software="proWES" -LABEL software.version="0.15.0" -LABEL software.description="Flask microservice implementing the Global Alliance for Genomics and Health (GA4GH) Workflow Execution Service (WES) API specification." +LABEL software.description="Flask microservice implementing the Global Alliance for Genomics and Health (GA4GH) Workflow Execution Service (WES) API specification as a proxy for middleware injection (e.g., workflow distribution logic)." LABEL software.website="https://github.com/elixir-europe/proWES" LABEL software.documentation="https://github.com/elixir-europe/proWES" -LABEL software.license="https://github.com/elixir-europe/proWES/blob/master/LICENSE" -LABEL software.tags="General" -LABEL maintainer="alexander.kanitz@alumni.ethz.ch" -LABEL maintainer.organisation="Biozentrum, University of Basel" -LABEL maintainer.location="Klingelbergstrasse 50/70, CH-4056 Basel, Switzerland" -LABEL maintainer.lab="ELIXIR Cloud & AAI" -LABEL maintainer.license="https://spdx.org/licenses/Apache-2.0" +LABEL software.license="https://spdx.org/licenses/Apache-2.0" +LABEL maintainer="cloud-service@elixir-europe.org" +LABEL maintainer.organisation="ELIXIR Cloud & AAI" # Python UserID workaround for OpenShift/K8S ENV LOGNAME=ipython ENV USER=ipython -## Set working directory WORKDIR /app +COPY ./ . +RUN pip install -e . -## Copy Python requirements -COPY ./requirements.txt /app/requirements.txt -COPY ./requirements_dev.txt /app/requirements_dev.txt - -## Install Python dependencies -RUN cd /app \ - && pip install -r requirements.txt \ - && pip install -r requirements_dev.txt \ - && cd / - -## Copy remaining app files -COPY ./ /app - -## Install app -RUN cd /app \ - && python setup.py develop \ - && cd / +## Add permissions for storing updated API specification +## (required by FOCA) +RUN chmod -R a+rwx /app/pro_wes/api diff --git a/docker-compose.yaml b/docker-compose.yaml index 95c667b..83c0051 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -10,7 +10,7 @@ services: links: - mongodb - rabbitmq - command: bash -c "cd /app/pro_wes; celery -A worker worker -E --loglevel=info" + command: bash -c "cd /app/pro_wes; celery -A celery_worker worker -E --loglevel=info" volumes: - ${PROWES_DATA_DIR:-../data/pro_wes}:/data @@ -23,8 +23,6 @@ services: links: - mongodb command: bash -c "cd /app/pro_wes; gunicorn -c gunicorn.py wsgi:app" - volumes: - - ${PROWES_DATA_DIR:-../data/pro_wes}:/data ports: - "8090:8080" @@ -53,4 +51,3 @@ services: command: flower --broker=amqp://guest:guest@rabbitmq:5672// --port=5555 ports: - "5565:5555" - diff --git a/pro_wes/__init__.py b/pro_wes/__init__.py index 435d64b..f25c5f2 100644 --- a/pro_wes/__init__.py +++ b/pro_wes/__init__.py @@ -1 +1 @@ -__version__ = '0.17.0' +"""proWES app package root.""" diff --git a/pro_wes/app.py b/pro_wes/app.py index 33f84e4..94a386b 100644 --- a/pro_wes/app.py +++ b/pro_wes/app.py @@ -13,7 +13,7 @@ def init_app() -> App: foca = Foca( config_file=Path(__file__).resolve().parent / "config.yaml", - custom_config_model='pro_wes.config_models.CustomConfig', + custom_config_model="pro_wes.config_models.CustomConfig", ) app = foca.create_app() with app.app.app_context(): @@ -31,6 +31,6 @@ def run_app(app: App) -> None: app.run(port=app.port) -if __name__ == '__main__': +if __name__ == "__main__": app = init_app() run_app(app) diff --git a/pro_wes/celery_worker.py b/pro_wes/celery_worker.py new file mode 100644 index 0000000..4a77841 --- /dev/null +++ b/pro_wes/celery_worker.py @@ -0,0 +1,12 @@ +"""Celery worker entry point.""" + +from pathlib import Path + +from celery import Celery +from foca import Foca # type: ignore + +foca = Foca( + config_file=Path(__file__).resolve().parent / "config.yaml", + custom_config_model="pro_wes.config_models.CustomConfig", +) +celery: Celery = foca.create_celery_app() diff --git a/pro_wes/client_wes.py b/pro_wes/client_wes.py index e61211f..b514d69 100644 --- a/pro_wes/client_wes.py +++ b/pro_wes/client_wes.py @@ -19,7 +19,7 @@ ) -class WesClient(): +class WesClient: """Client to communicate with GA4GH WES API. Arguments: @@ -39,6 +39,7 @@ class WesClient(): token: Bearer token for gaining access to WES endpoints. session: Client server session. """ + def __init__( self, host: str, @@ -123,9 +124,7 @@ def post_run( try: RunRequest(**form_data) except Exception as exc: - raise ValueError( - f"invalid form data: {form_data}" - ) from exc + raise ValueError(f"invalid form data: {form_data}") from exc try: response_unvalidated = self.session.post( url, @@ -320,7 +319,7 @@ def set_token( def set_headers( self, - content_accept: str = 'application/json', + content_accept: str = "application/json", content_type: Optional[str] = None, ) -> None: """Set session headers. @@ -330,8 +329,8 @@ def set_headers( content_type: Type of content sent with the request. """ headers = {} - headers['Accept'] = content_accept + headers["Accept"] = content_accept if content_type is not None: - headers['Content-Type'] = content_type - headers['Authorization'] = f"Bearer {self.token}" + headers["Content-Type"] = content_type + headers["Authorization"] = f"Bearer {self.token}" self.session.headers.update(headers) diff --git a/pro_wes/ga4gh/wes/workflow_runs.py b/pro_wes/ga4gh/wes/workflow_runs.py index d069855..aa3f59c 100644 --- a/pro_wes/ga4gh/wes/workflow_runs.py +++ b/pro_wes/ga4gh/wes/workflow_runs.py @@ -49,7 +49,6 @@ class WorkflowRuns: - def __init__(self) -> None: """Class for WES API server-side controller methods. @@ -63,7 +62,7 @@ def __init__(self) -> None: self.config: Dict = current_app.config self.foca_config: Config = current_app.config.foca self.db_client: Collection = ( - self.foca_config.db.dbs['runStore'].collections['runs'].client + self.foca_config.db.dbs["runStore"].collections["runs"].client ) # controller method for `POST /runs` @@ -93,7 +92,7 @@ def run_workflow( ) # get and attach workflow run owner - document.user_id = kwargs.get('user_id', None) + document.user_id = kwargs.get("user_id", None) # create run environment & insert run document into run collection document_stored = self._create_run_environment(document=document) @@ -105,7 +104,7 @@ def run_workflow( wes_client: WesClient = WesClient( host=document_stored.wes_endpoint.host, base_path=document_stored.wes_endpoint.base_path, - token=kwargs.get('jwt', None), + token=kwargs.get("jwt", None), ) # instantiate database connector @@ -144,7 +143,7 @@ def run_workflow( raise InternalServerError document_stored: DbDocument = ( db_connector.upsert_fields_in_root_object( - root='wes_endpoint', + root="wes_endpoint", run_id=response.run_id, ) ) @@ -153,16 +152,16 @@ def run_workflow( task__track_run_progress.apply_async( None, { - 'jwt': kwargs.get('jwt', None), - 'remote_host': document_stored.wes_endpoint.host, - 'remote_base_path': document_stored.wes_endpoint.base_path, - 'remote_run_id': document_stored.wes_endpoint.run_id, + "jwt": kwargs.get("jwt", None), + "remote_host": document_stored.wes_endpoint.host, + "remote_base_path": document_stored.wes_endpoint.base_path, + "remote_run_id": document_stored.wes_endpoint.run_id, }, task_id=document_stored.task_id, soft_time_limit=controller_config.timeout_job, ) - return {'run_id': document_stored.run_log.run_id} + return {"run_id": document_stored.run_log.run_id} # controller method for `GET /runs` def list_runs( @@ -179,66 +178,64 @@ def list_runs( https://github.com/ga4gh/workflow-execution-service-schemas/blob/c5406f1d3740e21b93d3ac71a4c8d7b874011519/openapi/workflow_execution_service.swagger.yaml#L495-L510 """ # fall back to default page size if not provided by user - if 'page_size' in kwargs: - page_size = kwargs['page_size'] + if "page_size" in kwargs: + page_size = kwargs["page_size"] else: - page_size = ( - self.foca_config.custom.list_runs.default_page_size - ) + page_size = self.foca_config.custom.list_runs.default_page_size # extract/set page token - if 'page_token' in kwargs: - page_token = kwargs['page_token'] + if "page_token" in kwargs: + page_token = kwargs["page_token"] else: - page_token = '' + page_token = "" # initialize filter dictionary filter_dict = {} # add filter for user-owned runs if user ID is available - if 'user_id' in kwargs: - filter_dict['user_id'] = kwargs['user_id'] + if "user_id" in kwargs: + filter_dict["user_id"] = kwargs["user_id"] # add pagination filter based on last object ID - if page_token != '': - filter_dict['_id'] = {'$lt': ObjectId(page_token)} + if page_token != "": + filter_dict["_id"] = {"$lt": ObjectId(page_token)} # query database for workflow runs - cursor = self.db_client.find( - filter=filter_dict, - projection={ - 'run_log.run_id': True, - 'run_log.state': True, - } - # sort results by descending object ID (+/- newest to oldest) - ).sort( - '_id', -1 - # implement page size limit - ).limit( - page_size + cursor = ( + self.db_client.find( + filter=filter_dict, + projection={ + "run_log.run_id": True, + "run_log.state": True, + } + # sort results by descending object ID (+/- newest to oldest) ) + .sort( + "_id", + -1 + # implement page size limit + ) + .limit(page_size) + ) # convert cursor to list runs_list = list(cursor) # get next page token from ID of last run in cursor if runs_list: - next_page_token = str(runs_list[-1]['_id']) + next_page_token = str(runs_list[-1]["_id"]) else: - next_page_token = '' + next_page_token = "" # reshape list of runs for run in runs_list: - run['run_id'] = run['run_log']['run_id'] - run['state'] = run['run_log']['state'] - del run['run_log'] - del run['_id'] + run["run_id"] = run["run_log"]["run_id"] + run["state"] = run["run_log"]["state"] + del run["run_log"] + del run["_id"] # build and return response - return { - 'next_page_token': next_page_token, - 'runs': runs_list - } + return {"next_page_token": next_page_token, "runs": runs_list} # controller method for `GET /runs/{run_id}` def get_run_log( @@ -264,12 +261,12 @@ def get_run_log( """ # retrieve workflow run document = self.db_client.find_one( - filter={'run_log.run_id': run_id}, + filter={"run_log.run_id": run_id}, projection={ - 'user_id': True, - 'run_log': True, - '_id': False, - } + "user_id": True, + "run_log": True, + "_id": False, + }, ) # raise error if workflow run was not found @@ -281,11 +278,11 @@ def get_run_log( # only if authorization enabled self._check_access_permission( resource_id=run_id, - owner=document.get('user_id', None), - requester=kwargs.get('user_id', None), + owner=document.get("user_id", None), + requester=kwargs.get("user_id", None), ) - return document['run_log'] + return document["run_log"] # controller method for `GET /runs/{run_id}/status` def get_run_status( @@ -311,12 +308,12 @@ def get_run_status( """ # retrieve workflow run document = self.db_client.find_one( - filter={'run_log.run_id': run_id}, + filter={"run_log.run_id": run_id}, projection={ - 'user_id': True, - 'run_log.state': True, - '_id': False, - } + "user_id": True, + "run_log.state": True, + "_id": False, + }, ) # ensure resource is available @@ -327,13 +324,13 @@ def get_run_status( # ensure requester has access self._check_access_permission( resource_id=run_id, - owner=document.get('user_id', None), - requester=kwargs.get('user_id', None), + owner=document.get("user_id", None), + requester=kwargs.get("user_id", None), ) return { - 'run_id': run_id, - 'state': document['run_log']['state'], + "run_id": run_id, + "state": document["run_log"]["state"], } # controller method for `POST /runs/{run_id}/cancel` @@ -359,14 +356,14 @@ def cancel_run( """ # retrieve workflow run document = self.db_client.find_one( - filter={'run_log.run_id': run_id}, + filter={"run_log.run_id": run_id}, projection={ - 'user_id': True, - 'wes_endpoint.host': True, - 'wes_endpoint.base_path': True, - 'wes_endpoint.run_id': True, - '_id': False, - } + "user_id": True, + "wes_endpoint.host": True, + "wes_endpoint.base_path": True, + "wes_endpoint.run_id": True, + "_id": False, + }, ) # ensure resource is available @@ -377,19 +374,19 @@ def cancel_run( # ensure requester has access self._check_access_permission( resource_id=run_id, - owner=document.get('user_id', None), - requester=kwargs.get('user_id', None), + owner=document.get("user_id", None), + requester=kwargs.get("user_id", None), ) # cancel workflow run wes_client: WesClient = WesClient( - host=document['wes_endpoint']['host'], - base_path=document['wes_endpoint']['base_path'], - token=kwargs.get('jwt', None), + host=document["wes_endpoint"]["host"], + base_path=document["wes_endpoint"]["base_path"], + token=kwargs.get("jwt", None), ) - wes_client.cancel_run(run_id=document['wes_endpoint']['run_id']) + wes_client.cancel_run(run_id=document["wes_endpoint"]["run_id"]) - return {'run_id': run_id} + return {"run_id": run_id} def _create_run_environment( self, @@ -419,9 +416,7 @@ def _create_run_environment( charset=controller_config.id_charset, length=controller_config.id_length, ) - work_dir = Path( - controller_config.storage_path - ).resolve() / run_id + work_dir = Path(controller_config.storage_path).resolve() / run_id # try to create working directory try: @@ -464,20 +459,20 @@ def _process_attachments(self, work_dir: Path) -> List[Attachment]: List of `Attachment` model instances. """ attachments = [] - logger.warning(request.files) files = request.files.getlist("workflow_attachment") - logger.warning(f"FILES: {files}") for file in files: attachments.append( Attachment( filename=file.filename, object=file.stream, - path=str(work_dir / self._secure_filename( - name=Path(file.filename), - )) + path=str( + work_dir + / self._secure_filename( + name=Path(file.filename), + ) + ), ) ) - logger.warning(f"ATTACHMENTS: {attachments}") return attachments @staticmethod @@ -493,8 +488,7 @@ def _validate_run_request( dict_of_lists = form_data.to_dict(flat=False) # flatten single item lists dict_atomic = { - k: v[0] if len(v) == 1 else v for - k, v in dict_of_lists.items() + k: v[0] if len(v) == 1 else v for k, v in dict_of_lists.items() } # remove 'workflow_attachment' field dict_atomic.pop("workflow_attachment", None) @@ -524,7 +518,6 @@ def _save_attachments(attachments: List[Attachment]) -> None: attachments: List of `Attachment` model instances. """ files = request.files.getlist("workflow_attachment") - logger.warning(files) for attachment in attachments: with open(attachment.path, "wb") as dest: for file in files: diff --git a/pro_wes/gunicorn.py b/pro_wes/gunicorn.py index 271be88..a15c875 100644 --- a/pro_wes/gunicorn.py +++ b/pro_wes/gunicorn.py @@ -1,31 +1,32 @@ +"""Gunicorn entry point.""" + import os +from foca.models.config import Config # type: ignore + from pro_wes.app import init_app # Source application configuration -app_config = init_app().app.config.foca +app_config: Config = init_app().app.config.foca # Set Gunicorn number of workers and threads -workers = int(os.environ.get('GUNICORN_PROCESSES', '1')) -threads = int(os.environ.get('GUNICORN_THREADS', '1')) +workers = int(os.environ.get("GUNICORN_PROCESSES", "1")) +threads = int(os.environ.get("GUNICORN_THREADS", "1")) # Set allowed IPs -forwarded_allow_ips = '*' +forwarded_allow_ips = "*" # pylint: disable=invalid-name # Set Gunicorn bind address -bind = '{address}:{port}'.format( - address=app_config.server.host, - port=app_config.server.port, -) +bind = f"{app_config.server.host}:{app_config.server.port}" # Source environment variables for Gunicorn workers raw_env = [ - "WES_CONFIG=%s" % os.environ.get('WES_CONFIG', ''), - "RABBIT_HOST=%s" % os.environ.get('RABBIT_HOST', app_config.jobs.host), - "RABBIT_PORT=%s" % os.environ.get('RABBIT_PORT', app_config.jobs.port), - "MONGO_HOST=%s" % os.environ.get('MONGO_HOST', app_config.db.host), - "MONGO_PORT=%s" % os.environ.get('MONGO_PORT', app_config.db.port), - "MONGO_DBNAME=%s" % os.environ.get('MONGO_DBNAME', 'runStore'), - "MONGO_USERNAME=%s" % os.environ.get('MONGO_USERNAME', ''), - "MONGO_PASSWORD=%s" % os.environ.get('MONGO_PASSWORD', ''), + f'WES_CONFIG={os.environ.get("WES_CONFIG", "")}', + f'RABBIT_HOST={os.environ.get("RABBIT_HOST", app_config.jobs.host)}', + f'RABBIT_PORT={os.environ.get("RABBIT_PORT", app_config.jobs.port)}', + f'MONGO_HOST={os.environ.get("MONGO_HOST", app_config.db.host)}', + f'MONGO_PORT={os.environ.get("MONGO_PORT", app_config.db.port)}', + f'MONGO_DBNAME={os.environ.get("MONGO_DBNAME", "runStore")}', + f'MONGO_USERNAME={os.environ.get("MONGO_USERNAME", "")}', + f'MONGO_PASSWORD={os.environ.get("MONGO_PASSWORD", "")}', ] diff --git a/pro_wes/tasks/track_run_progress.py b/pro_wes/tasks/track_run_progress.py index 19eb393..9563c28 100644 --- a/pro_wes/tasks/track_run_progress.py +++ b/pro_wes/tasks/track_run_progress.py @@ -9,7 +9,7 @@ from foca.database.register_mongodb import _create_mongo_client from foca.models.config import Config -from flask import (Flask, current_app) +from flask import Flask, current_app from pro_wes.exceptions import ( EngineProblem, @@ -23,13 +23,13 @@ ) from pro_wes.utils.db import DbDocumentConnector from pro_wes.client_wes import WesClient -from pro_wes.worker import celery +from pro_wes.celery_worker import celery logger = logging.getLogger(__name__) @celery.task( - name='tasks.track_run_progress', + name="tasks.track_run_progress", bind=True, ignore_result=True, track_started=True, @@ -72,8 +72,8 @@ def task__track_run_progress( app=Flask(__name__), host=foca_config.db.host, port=foca_config.db.port, - db='runStore', - ).db['runs'] + db="runStore", + ).db["runs"] db_client = DbDocumentConnector( collection=collection, task_id=self.request.id, @@ -97,12 +97,12 @@ def task__track_run_progress( except EngineUnavailable: db_client.update_task_state(state=State.SYSTEM_ERROR.value) raise -# if not isinstance(response, RunLog): -# db_client.update_task_state(state=State.SYSTEM_ERROR.value) -# raise EngineProblem("Did not receive expected response.") + # if not isinstance(response, RunLog): + # db_client.update_task_state(state=State.SYSTEM_ERROR.value) + # raise EngineProblem("Did not receive expected response.") response.pop("request", None) document: DbDocument = db_client.upsert_fields_in_root_object( - root='run_log', + root="run_log", **response, ) @@ -145,12 +145,12 @@ def task__track_run_progress( except EngineUnavailable: db_client.update_task_state(state=State.SYSTEM_ERROR.value) raise -# if not isinstance(response, RunLog): -# db_client.update_task_state(state=State.SYSTEM_ERROR.value) -# raise EngineProblem("Did not receive expected response.") + # if not isinstance(response, RunLog): + # db_client.update_task_state(state=State.SYSTEM_ERROR.value) + # raise EngineProblem("Did not receive expected response.") response.pop("request", None) document = db_client.upsert_fields_in_root_object( - root='run_log', + root="run_log", **response, ) diff --git a/pro_wes/utils/db.py b/pro_wes/utils/db.py index 6f9bf44..b6cd2f8 100644 --- a/pro_wes/utils/db.py +++ b/pro_wes/utils/db.py @@ -18,7 +18,6 @@ class DbDocumentConnector: - def __init__( self, collection: Collection, @@ -36,7 +35,7 @@ def __init__( def get_document( self, - projection: Mapping = {'_id': False}, + projection: Mapping = {"_id": False}, ) -> DbDocument: """Get document associated with task. @@ -53,7 +52,7 @@ def get_document( ValueError: Returned document does not conform to schema. """ document_unvalidated = self.collection.find_one( - filter={'task_id': self.task_id}, + filter={"task_id": self.task_id}, projection=projection, ) try: @@ -67,7 +66,7 @@ def get_document( def update_task_state( self, - state: str = 'UNKNOWN', + state: str = "UNKNOWN", ) -> None: """Update task status. @@ -80,12 +79,10 @@ def update_task_state( try: State(state) except Exception as exc: - raise ValueError( - f"Unknown state: {state}" - ) from exc + raise ValueError(f"Unknown state: {state}") from exc self.collection.find_one_and_update( - {'task_id': self.task_id}, - {'$set': {'run_log.state': state}}, + {"task_id": self.task_id}, + {"$set": {"run_log.state": state}}, ) logger.info(f"[{self.task_id}] {state}") return None @@ -99,12 +96,14 @@ def upsert_fields_in_root_object( document. """ document_unvalidated = self.collection.find_one_and_update( - {'task_id': self.task_id}, - {'$set': { - '.'.join([root, key]): - value for (key, value) in kwargs.items() - }}, - return_document=ReturnDocument.AFTER + {"task_id": self.task_id}, + { + "$set": { + ".".join([root, key]): value + for (key, value) in kwargs.items() + } + }, + return_document=ReturnDocument.AFTER, ) try: document: DbDocument = DbDocument(**document_unvalidated) diff --git a/pro_wes/version.py b/pro_wes/version.py new file mode 100644 index 0000000..471bc6b --- /dev/null +++ b/pro_wes/version.py @@ -0,0 +1,3 @@ +"""Single source of truth for package version.""" + +__version__ = '0.18.0' diff --git a/pro_wes/worker.py b/pro_wes/worker.py deleted file mode 100644 index 445ba7a..0000000 --- a/pro_wes/worker.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Entry point for Celery workers.""" - -from foca.factories.celery_app import create_celery_app - -from pro_wes.app import init_app - -flask_app = init_app().app -celery = create_celery_app(app=flask_app) diff --git a/pro_wes/wsgi.py b/pro_wes/wsgi.py index 1d0b04e..ac3c1e4 100644 --- a/pro_wes/wsgi.py +++ b/pro_wes/wsgi.py @@ -1,3 +1,5 @@ +"""WSGI entry point.""" + from pro_wes.app import init_app app = init_app() diff --git a/requirements.txt b/requirements.txt index bc66704..b64181b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -foca==0.9.0 -gunicorn==20.1.0 \ No newline at end of file +foca>=0.12.1 +gunicorn>=20.1.0,<21 diff --git a/requirements_dev.txt b/requirements_dev.txt index f313a66..7b09ced 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,7 +1,6 @@ -coverage==6.4 -coveralls==3.3.1 -flake8==4.0.1 -mongomock==4.0.0 -pylint==2.13.9 -pytest==7.1.2 -python-semantic-release==7.29.0 +coverage>=6.5,<7 +flake8>=5.0.4,<6 +mongomock>=4.1.2,<5 +pylint>=2.15.5,<3 +pytest>=7.2.0,<8 +python-semantic-release>=7.32.2,<8 diff --git a/setup.py b/setup.py index 3275acd..163ac7e 100644 --- a/setup.py +++ b/setup.py @@ -1,31 +1,47 @@ -from setuptools import (setup, find_packages) +"""Package setup.""" -with open('README.md', 'r') as fh: - long_description = fh.read() +from pathlib import Path +from setuptools import setup, find_packages + +root_dir = Path(__file__).parent.resolve() + +with open(root_dir / "pro_wes" / "version.py", encoding="utf-8") as _file: + exec(_file.read()) # pylint: disable=exec-used + +with open(root_dir / "README.md", encoding="utf-8") as _file: + LONG_DESCRIPTION = _file.read() + +with open(root_dir / "requirements.txt", encoding="utf-8") as _file: + INSTALL_REQUIRES = _file.read().splitlines() setup( - name='proWES', - version='0.14.0', - author='Elixir Europe', - author_email='alexander.kanitz@alumni.ethz.ch', - description='GA4GH WES proxy service', - long_description=long_description, + name='pro-wes', + version=__version__, # noqa: F821 # pylint: disable=undefined-variable + license="Apache License 2.0", + description="Proxy/gateway GA4GH WES service", + long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", - license='Apache License 2.0', - url='https://github.com/elixir-europe/proWES.git', + author="ELIXIR Cloud & AAI", + author_email="cloud-service@elixir-europe.org", + url='https://github.com/elixir-cloud-aai/proWES.git', + project_urls={ + "Repository": "https://github.com/elixir-cloud-aai/proWES", + "ELIXIR Cloud & AAI": "https://elixir-cloud.dcc.sib.swiss/", + "Tracker": "https://github.com/elixir-cloud-aai/proWES/issues", + }, packages=find_packages(), keywords=( - 'ga4gh wes workflow elixir rest restful api app server openapi ' - 'swagger mongodb python flask' + "ga4gh wes proxy rest restful api app server openapi " + "swagger mongodb python flask" ), classifiers=[ - 'License :: OSI Approved :: Apache Software License', - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Science/Research', - 'Topic :: Scientific/Engineering :: Bio-Informatics', - 'Natural Language :: English', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', + "License :: OSI Approved :: Apache Software License", + "Development Status :: 3 - Alpha", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering :: Bio-Informatics", + "Natural Language :: English", + "Programming Language :: Python :: 3.10", ], - install_requires=['connexion', 'Flask-Cors', 'Flask-PyMongo'], + install_requires=INSTALL_REQUIRES, + setup_requires=["setuptools_git>=1.2"], )