Skip to content

Commit

Permalink
Merging staging branch into prod branch
Browse files Browse the repository at this point in the history
  • Loading branch information
Bento007 committed Apr 4, 2024
2 parents cc613f0 + 706dacc commit 5d99247
Show file tree
Hide file tree
Showing 73 changed files with 277 additions and 364 deletions.
1 change: 1 addition & 0 deletions .github/workflows/deploy-happy-stack.yml
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,7 @@ jobs:
npm ci
npx playwright install --with-deps
cp src/configs/local.js src/configs/configs.js
make retrieve-descendants
# Run e2e tests
- name: Run e2e tests
Expand Down
52 changes: 0 additions & 52 deletions .github/workflows/pull-latest-ontology-mappings.yml

This file was deleted.

1 change: 1 addition & 0 deletions .github/workflows/rdev-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ jobs:
npm ci
npx playwright install --with-deps
cp src/configs/local.js src/configs/configs.js
make retrieve-descendants
# Run e2e tests
- name: Run e2e tests
Expand Down
1 change: 1 addition & 0 deletions Dockerfile.backend
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ RUN apt-get update && \
# Don't re-run pip install unless either requirements.txt has changed.
WORKDIR /single-cell-data-portal
COPY /python_dependencies/backend/ .
RUN python3 -m pip install --upgrade pip setuptools

# TODO: Determine if cmake is really needed for ddtrace
# see ticket: https://github.com/chanzuckerberg/single-cell-data-portal/issues/5821
Expand Down
18 changes: 8 additions & 10 deletions Dockerfile.processing
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,19 @@ RUN apt-get update && \
apt-get upgrade -y && \
apt install software-properties-common -y && \
add-apt-repository ppa:deadsnakes/ppa && \
apt-get install -y python3.10 python3-pip

RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.10 1 \
&& update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 1
apt-get update && apt-get install -y python3.10 python3.10-dev python3.10-venv && \
wget https://bootstrap.pypa.io/get-pip.py && \
python3.10 get-pip.py

# For lighter weight Docker images
ENV PIP_NO_CACHE_DIR=1

# Install python dependencies
# Remove packaging dependency once pyparser>3 is supported.
RUN pip3 install python-igraph==0.8.3 louvain==0.7.0 packaging==21.0 awscli

# Activate virtual environment for subsequent commands
RUN python3.10 -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"

COPY /python_dependencies/processing/ .
RUN pip3 install -r requirements.txt
RUN python3.10 -m pip install -r requirements.txt

ADD backend/__init__.py backend/__init__.py
ADD backend/layers backend/layers
Expand All @@ -35,4 +33,4 @@ LABEL commit=${HAPPY_COMMIT}
ENV COMMIT_SHA=${HAPPY_COMMIT}
ENV COMMIT_BRANCH=${HAPPY_BRANCH}

CMD ["python3", "-m", "backend.layers.processing.process"]
CMD ["python3.10", "-m", "backend.layers.processing.process"]
54 changes: 27 additions & 27 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ local-ecr-login:

.PHONY: local-init-test-data
local-init-test-data:
docker-compose $(COMPOSE_OPTS) run --rm -T backend /bin/bash -c "pip3 install awscli && cd /single-cell-data-portal && scripts/setup_dev_data.sh"
docker compose $(COMPOSE_OPTS) run --rm -T backend /bin/bash -c "pip3 install awscli && cd /single-cell-data-portal && scripts/setup_dev_data.sh"

.PHONY: local-init-host
local-init-host: oauth/pkcs12/certificate.pfx .env.ecr local-ecr-login local-start
Expand All @@ -100,31 +100,31 @@ local-status: ## Show the status of the containers in the dev environment.

.PHONY: local-rebuild
local-rebuild: .env.ecr local-ecr-login ## Rebuild local dev without re-importing data
docker-compose $(COMPOSE_OPTS) build frontend backend processing wmg_processing database oidc localstack
docker-compose $(COMPOSE_OPTS) up -d frontend backend processing database oidc localstack
docker compose $(COMPOSE_OPTS) build frontend backend processing wmg_processing database oidc localstack
docker compose $(COMPOSE_OPTS) up -d frontend backend processing database oidc localstack

local-rebuild-backend: .env.ecr local-ecr-login
docker-compose $(COMPOSE_OPTS) build backend
docker compose $(COMPOSE_OPTS) build backend

local-rebuild-processing: .env.ecr local-ecr-login
docker-compose $(COMPOSE_OPTS) build processing
docker compose $(COMPOSE_OPTS) build processing

local-rebuild-wmg-processing: .env.ecr local-ecr-login
docker-compose $(COMPOSE_OPTS) build wmg_processing
docker compose $(COMPOSE_OPTS) build wmg_processing

local-rebuild-cellguide-pipeline: .env.ecr local-ecr-login
docker-compose $(COMPOSE_OPTS) build cellguide_pipeline
docker compose $(COMPOSE_OPTS) build cellguide_pipeline

.PHONY: local-sync
local-sync: local-rebuild local-init ## Re-sync the local-environment state after modifying library deps or docker configs

.PHONY: local-start
local-start: .env.ecr ## Start a local dev environment that's been stopped.
docker-compose $(COMPOSE_OPTS) up -d backend frontend database oidc localstack
docker compose $(COMPOSE_OPTS) up -d backend frontend database oidc localstack

.PHONY: local-stop
local-stop: ## Stop the local dev environment.
docker-compose stop frontend backend database oidc localstack
docker compose stop frontend backend database oidc localstack

.PHONY: local-clean
local-clean: ## Remove everything related to the local dev environment (including db data!)
Expand All @@ -136,64 +136,64 @@ local-clean: ## Remove everything related to the local dev environment (includin
fi;
-rm -rf ./oauth/pkcs12/server*
-rm -rf ./oauth/pkcs12/certificate*
docker-compose rm -sf
docker compose rm -sf
-docker volume rm single-cell-data-portal_database
-docker volume rm single-cell-data-portal_localstack
-docker network rm single-cell-data-portal_corporanet
-docker network rm single-cell-data-portal_default

.PHONY: local-logs
local-logs: ## Tail the logs of the dev env containers. ex: make local-logs CONTAINER=backend
docker-compose logs -f $(CONTAINER)
docker compose logs -f $(CONTAINER)

.PHONY: local-shell
local-shell: ## Open a command shell in one of the dev containers. ex: make local-shell CONTAINER=frontend
docker-compose exec $(CONTAINER) bash
docker compose exec $(CONTAINER) bash

.PHONY: local-unit-test
local-unit-test: local-unit-test-backend local-unit-test-wmg-backend local-unit-test-wmg-processing local-unit-test-cellguide-pipeline local-unit-test-processing local-unit-test-cxg-admin
# Run all backend and processing unit tests in the dev environment, with code coverage

.PHONY: local-unit-test-backend
local-unit-test-backend:
docker-compose run --rm -T backend bash -c \
docker compose run --rm -T backend bash -c \
"cd /single-cell-data-portal && coverage run $(COVERAGE_RUN_ARGS) -m pytest --alluredir=./allure-results tests/unit/backend/layers/ tests/unit/backend/common/";

.PHONY: local-unit-test-wmg-backend
local-unit-test-wmg-backend:
docker-compose run --rm -T backend bash -c \
docker compose run --rm -T backend bash -c \
"cd /single-cell-data-portal && coverage run $(COVERAGE_RUN_ARGS) -m pytest --alluredir=./allure-results tests/unit/backend/wmg/";

.PHONY: local-integration-test-backend
local-integration-test-backend:
docker-compose run --rm -e INTEGRATION_TEST=true -e DB_URI=postgresql://corpora:test_pw@database -T backend \
docker compose run --rm -e INTEGRATION_TEST=true -e DB_URI=postgresql://corpora:test_pw@database -T backend \
bash -c "cd /single-cell-data-portal && coverage run $(COVERAGE_RUN_ARGS) -m pytest tests/unit/backend/layers/ tests/unit/backend/common/";

.PHONY: local-unit-test-processing
local-unit-test-processing: # Run processing-unittest target in `processing` Docker container
docker-compose $(COMPOSE_OPTS) run --rm -e DEV_MODE_COOKIES= -T processing \
docker compose $(COMPOSE_OPTS) run --rm -e DEV_MODE_COOKIES= -T processing \
bash -c "cd /single-cell-data-portal && coverage run $(COVERAGE_RUN_ARGS) -m pytest --alluredir=./allure-results tests/unit/processing/";

.PHONY: local-unit-test-wmg-processing
local-unit-test-wmg-processing: # Run processing-unittest target in `wmg_processing` Docker container
echo "Running all wmg processing unit tests"; \
docker-compose $(COMPOSE_OPTS) run --rm -e DEV_MODE_COOKIES= -T wmg_processing \
docker compose $(COMPOSE_OPTS) run --rm -e DEV_MODE_COOKIES= -T wmg_processing \
bash -c "cd /single-cell-data-portal && make wmg-processing-unittest;"

.PHONY: local-unit-test-cellguide-pipeline
local-unit-test-cellguide-pipeline: # Run processing-unittest target in `cellguide_pipeline` Docker container
echo "Running all cellguide pipeline unit tests"; \
docker-compose $(COMPOSE_OPTS) run --rm -e DEV_MODE_COOKIES= -T cellguide_pipeline \
docker compose $(COMPOSE_OPTS) run --rm -e DEV_MODE_COOKIES= -T cellguide_pipeline \
bash -c "cd /single-cell-data-portal && make cellguide-pipeline-unittest;"

.PHONY: local-unit-test-cxg-admin
local-unit-test-cxg-admin:
docker-compose run --rm -T backend bash -c \
docker compose run --rm -T backend bash -c \
"cd /single-cell-data-portal && coverage run $(COVERAGE_RUN_ARGS) -m pytest --alluredir=./allure-results tests/unit/scripts/";

.PHONY: local-smoke-test
local-smoke-test: ## Run frontend/e2e tests in the dev environment
docker-compose $(COMPOSE_OPTS) run --rm -T frontend make smoke-test-with-local-dev
docker compose $(COMPOSE_OPTS) run --rm -T frontend make smoke-test-with-local-dev


.PHONY: local-dbconsole
Expand All @@ -202,33 +202,33 @@ local-dbconsole: ## Connect to the local postgres database.

.PHONY: local-uploadjob
local-uploadjob: .env.ecr ## Run the upload task with a dataset_id and dropbox_url
docker-compose $(COMPOSE_OPTS) run --rm -T -e DATASET_ID=$(DATASET_ID) -e DROPBOX_URL=$(DROPBOX_URL) processing sh -c "rm -rf /local.* && python3 -m backend.corpora.dataset_processing.process"
docker compose $(COMPOSE_OPTS) run --rm -T -e DATASET_ID=$(DATASET_ID) -e DROPBOX_URL=$(DROPBOX_URL) processing sh -c "rm -rf /local.* && python3 -m backend.corpora.dataset_processing.process"

.PHONY: local-uploadfailure
local-uploadfailure: .env.ecr ## Run the upload failure lambda with a dataset id and cause
docker-compose $(COMPOSE_OPTS) up -d upload_failures
docker compose $(COMPOSE_OPTS) up -d upload_failures
curl -v -XPOST "http://127.0.0.1:9000/2015-03-31/functions/function/invocations" -d '{"dataset_id": "$(DATASET_ID)", "error": {"Cause": "$(CAUSE)"}}'

.PHONY: local-uploadsuccess
local-uploadsuccess: .env.ecr ## Run the upload success lambda with a dataset id and cause
docker-compose $(COMPOSE_OPTS) up -d upload_success
docker compose $(COMPOSE_OPTS) up -d upload_success
curl -v -XPOST "http://127.0.0.1:9001/2015-03-31/functions/function/invocations" -d '{"dataset_id": "$(DATASET_ID)"}'

.PHONY: local-cxguser-cookie
local-cxguser-cookie: ## Get cxguser-cookie
docker-compose $(COMPOSE_OPTS) run --rm backend bash -c "cd /single-cell-data-portal && python login.py"
docker compose $(COMPOSE_OPTS) run --rm backend bash -c "cd /single-cell-data-portal && python login.py"

.PHONY: coverage/combine
coverage/combine:
- docker-compose $(COMPOSE_OPTS) run --rm -T backend bash -c "cd /single-cell-data-portal && coverage combine --data-file=$(COVERAGE_DATA_FILE)"
- docker compose $(COMPOSE_OPTS) run --rm -T backend bash -c "cd /single-cell-data-portal && coverage combine --data-file=$(COVERAGE_DATA_FILE)"

.PHONY: coverage/report
coverage/report-xml: coverage/combine
docker-compose $(COMPOSE_OPTS) run --rm -T backend bash -c "cd /single-cell-data-portal && coverage xml --data-file=$(COVERAGE_DATA_FILE) -i --skip-empty"
docker compose $(COMPOSE_OPTS) run --rm -T backend bash -c "cd /single-cell-data-portal && coverage xml --data-file=$(COVERAGE_DATA_FILE) -i --skip-empty"

.PHONY: coverage/report
coverage/report-html: coverage/combine
docker-compose $(COMPOSE_OPTS) run --rm -T backend bash -c "cd /single-cell-data-portal && coverage html --data-file=$(COVERAGE_DATA_FILE) -i --skip-empty"
docker compose $(COMPOSE_OPTS) run --rm -T backend bash -c "cd /single-cell-data-portal && coverage html --data-file=$(COVERAGE_DATA_FILE) -i --skip-empty"

.PHONY: promote-staging-to-prod
promote-staging-to-prod:
Expand Down
2 changes: 1 addition & 1 deletion backend/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ db/local/load-schema:

db/dump_schema:
ifeq ($(DEPLOYMENT_STAGE),test)
docker-compose exec database pg_dump --schema-only --dbname=corpora --username corpora
docker compose exec database pg_dump --schema-only --dbname=corpora --username corpora
else
$(eval DB_PW = $(shell aws secretsmanager get-secret-value --secret-id corpora/backend/${DEPLOYMENT_STAGE}/database --region us-west-2 | jq -r '.SecretString | match(":([^:]*)@").captures[0].string'))
$(MAKE) db/tunnel/up
Expand Down
4 changes: 2 additions & 2 deletions backend/common/utils/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ def default(self, obj):


class CurationJSONEncoder(CustomJSONEncoder):
"Add support for serializing DateTime into isoformat"
"Add support for serializing DateTime into isoformat, to second precision"

def default(self, obj):
if isinstance(obj, datetime):
return obj.replace(tzinfo=time_zone_info).isoformat()
return obj.replace(microsecond=0, tzinfo=time_zone_info).isoformat()
else:
return super().default(obj)
Empty file.

This file was deleted.

This file was deleted.

This file was deleted.

27 changes: 0 additions & 27 deletions backend/common/utils/ontology_mappings/ontology_map_loader.py

This file was deleted.

12 changes: 6 additions & 6 deletions backend/curation/api/curation-api.yml
Original file line number Diff line number Diff line change
Expand Up @@ -809,7 +809,7 @@ components:
description: The name of the primary contact for the Collection
type: string
created_at:
description: A timestamp of when the resource was created.
description: A timestamp of when the resource was created. The timestamp conforms to the ISO 8601 format to second precision. For example, 2023-05-10T17:22:42+00:00.
type: string
curator_name:
description: The name of the curator for the Collection.
Expand All @@ -820,26 +820,26 @@ components:
nullable: true
type: string
published_at:
description: A timestamp of when the Collection was first published.
description: A timestamp of when the Collection was first published. The timestamp conforms to the ISO 8601 format to second precision. For example, 2023-05-10T17:22:42+00:00.
nullable: true
type: string
dataset_published_at:
description: A timestamp of when the Dataset was first published.
description: A timestamp of when the Dataset was first published. The timestamp conforms to the ISO 8601 format to second precision. For example, 2023-05-10T17:22:42+00:00.
nullable: true
type: string
dataset_version_published_at:
description: A timestamp of when this Dataset Version was published.
description: A timestamp of when this Dataset Version was published. The timestamp conforms to the ISO 8601 format to second precision. For example, 2023-05-10T17:22:42+00:00.
type: string
collection_revised_at:
description: A timestamp indicating the last time a Revision for this Collection was published.
nullable: true
type: string
dataset_revised_at:
description: A timestamp indicating the last time a Revision for this Dataset was published.
description: A timestamp indicating the last time a Revision for this Dataset was published. The timestamp conforms to the ISO 8601 format to second precision. For example, 2023-05-10T17:22:42+00:00.
nullable: true
type: string
collection_version_published_at:
description: A timestamp of when this Collection Version was published.
description: A timestamp of when this Collection Version was published. The timestamp conforms to the ISO 8601 format to second precision. For example, 2023-05-10T17:22:42+00:00.
type: string
collection_revising_in:
type: string
Expand Down
4 changes: 3 additions & 1 deletion backend/layers/thirdparty/crossref_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,9 @@ def fetch_metadata(self, doi: str) -> Tuple[Optional[dict], Optional[str]]:
parsed_authors = []
for author in authors:
if "given" in author and "family" in author:
parsed_authors.append({"given": author["given"], "family": author["family"]})
parsed_author = {"given": author["given"], "family": author["family"]}
if parsed_author not in parsed_authors:
parsed_authors.append(parsed_author)
elif "family" in author:
# Assume family is consortium
parsed_authors.append({"name": author["family"]})
Expand Down
Loading

0 comments on commit 5d99247

Please sign in to comment.