diff --git a/README.md b/README.md index 99547aef3..cea90b067 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ ## Local Development -A combination of Vagrant 2.2+ and Ansible 2.5+ is used to setup the development environment for this project. The project consists of the following virtual machines: +A combination of Vagrant 2.2+ and Ansible 2.8 is used to setup the development environment for this project. The project consists of the following virtual machines: - `app` - `services` @@ -32,10 +32,16 @@ First, ensure that you have a set of Amazon Web Services (AWS) credentials with $ aws configure --profile mmw-stg ``` +Ensure you have the [vagrant-disksize](https://github.com/sprotheroe/vagrant-disksize) plugin installed: + +```bash +$ vagrant plugin install vagrant-disksize +``` + Next, use the following command to bring up a local development environment: ```bash -$ MMW_ITSI_SECRET_KEY="***" vagrant up +$ vagrant up ``` The application will now be running at [http://localhost:8000](http://localhost:8000). @@ -130,6 +136,12 @@ $ vagrant ssh worker -c 'sudo service celeryd restart' To enable the geoprocessing cache simply set it to `1` and restart the `celeryd` service. +In some cases, it may be necessary to remove all cached values. This can be done with: + +```bash +$ vagrant ssh services -c 'redis-cli -n 1 --raw KEYS ":1:geop_*" | xargs redis-cli -n 1 DEL' +``` + ### Test Mode In order to run the app in test mode, which simulates the production static asset bundle, reprovision with `VAGRANT_ENV=TEST vagrant provision`. diff --git a/Vagrantfile b/Vagrantfile index 8743f579f..060245fa9 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -3,6 +3,16 @@ Vagrant.require_version ">= 2.2" +# Install vagrant-disksize to allow resizing the services VM. +unless Vagrant.has_plugin?("vagrant-disksize") + raise Vagrant::Errors::VagrantError.new, "vagrant-disksize plugin is missing. Please install it using 'vagrant plugin install vagrant-disksize' and rerun 'vagrant up'" +end + +# We need to stay on Ansible 2.8 because the version_compare filter was removed +# in 2.9. +# https://github.com/ansible/ansible/issues/64174#issuecomment-548639160 +ANSIBLE_VERSION = "2.8.*" + if ["up", "provision", "status"].include?(ARGV.first) require_relative "vagrant/ansible_galaxy_helper" @@ -32,40 +42,69 @@ else VAGRANT_NETWORK_OPTIONS = { auto_correct: false } end +MMW_EXTRA_VARS = { + django_test_database: ENV["MMW_TEST_DB_NAME"] || "test_mmw", + services_ip: ENV["MMW_SERVICES_IP"] || "33.33.34.30", + tiler_host: ENV["MMW_TILER_IP"] || "33.33.34.35", + itsi_secret_key: ENV["MMW_ITSI_SECRET_KEY"], + concord_secret_key: ENV["MMW_CONCORD_SECRET_KEY"], + hydroshare_secret_key: ENV["MMW_HYDROSHARE_SECRET_KEY"], + srat_catchment_api_key: ENV["MMW_SRAT_CATCHMENT_API_KEY"], + tilecache_bucket_name: ENV["MMW_TILECACHE_BUCKET"] || "", +} + Vagrant.configure("2") do |config| - config.vm.box = "bento/ubuntu-16.04" + config.vm.box = "bento/ubuntu-20.04" config.vm.define "services" do |services| services.vm.hostname = "services" - services.vm.network "private_network", ip: ENV.fetch("MMW_SERVICES_IP", "33.33.34.30") + services.vm.network "private_network", ip: ENV["MMW_SERVICES_IP"] || "33.33.34.30" + services.disksize.size = '64GB' # PostgreSQL - services.vm.network "forwarded_port", { + services.vm.network "forwarded_port", **{ guest: 5432, host: 5432 }.merge(VAGRANT_NETWORK_OPTIONS) # Redis - services.vm.network "forwarded_port", { + services.vm.network "forwarded_port", **{ guest: 6379, host: 6379 }.merge(VAGRANT_NETWORK_OPTIONS) services.vm.provider "virtualbox" do |v| v.customize ["guestproperty", "set", :id, "/VirtualBox/GuestAdd/VBoxService/--timesync-set-threshold", 10000 ] - v.memory = 2048 + v.memory = 4096 + v.cpus = 4 end - services.vm.provision "ansible" do |ansible| + services.vm.provision "ansible_local" do |ansible| ansible.compatibility_mode = "2.0" + ansible.install_mode = "pip_args_only" + ansible.pip_install_cmd = "sudo apt-get install -y python3-distutils && curl https://bootstrap.pypa.io/get-pip.py | sudo python3" + ansible.pip_args = "ansible==#{ANSIBLE_VERSION}" ansible.playbook = "deployment/ansible/services.yml" ansible.groups = ANSIBLE_GROUPS.merge(ANSIBLE_ENV_GROUPS) ansible.raw_arguments = ["--timeout=60"] + ansible.extra_vars = MMW_EXTRA_VARS + end + + services.vm.provision "shell" do |s| + # The base box we use comes with a ~30GB disk. The physical disk is + # expanded to 64GB above using vagrant-disksize. The logical disk and + # the file system need to be expanded as well to make full use of the + # space. `lvextend` expands the logical disk, and `resize2fs` expands + # the files system. + s.inline = <<-SHELL + sudo lvextend -l +100%FREE /dev/ubuntu-vg/ubuntu-lv + sudo resize2fs /dev/mapper/ubuntu--vg-ubuntu--lv + SHELL end end config.vm.define "worker" do |worker| worker.vm.hostname = "worker" - worker.vm.network "private_network", ip: ENV.fetch("MMW_WORKER_IP", "33.33.34.20") + worker.vm.network "private_network", ip: ENV["MMW_WORKER_IP"] || "33.33.34.20" worker.vm.synced_folder "src/mmw", "/opt/app" # Facilitates the sharing of Django media root directories across virtual machines. @@ -73,18 +112,18 @@ Vagrant.configure("2") do |config| create: true # Path to RWD data (ex. /media/passport/rwd-nhd) - worker.vm.synced_folder ENV.fetch("RWD_DATA", "/tmp"), "/opt/rwd-data" + worker.vm.synced_folder ENV["RWD_DATA"] || "/tmp", "/opt/rwd-data" # AWS worker.vm.synced_folder "~/.aws", "/var/lib/mmw/.aws" # Docker - worker.vm.network "forwarded_port", { + worker.vm.network "forwarded_port", **{ guest: 2375, host: 2375 }.merge(VAGRANT_NETWORK_OPTIONS) # Geoprocessing Service - worker.vm.network "forwarded_port", { + worker.vm.network "forwarded_port", **{ guest: 8090, host: 8090 }.merge(VAGRANT_NETWORK_OPTIONS) @@ -95,17 +134,21 @@ Vagrant.configure("2") do |config| v.cpus = 2 end - worker.vm.provision "ansible" do |ansible| + worker.vm.provision "ansible_local" do |ansible| ansible.compatibility_mode = "2.0" + ansible.install_mode = "pip_args_only" + ansible.pip_install_cmd = "sudo apt-get install -y python3-distutils && curl https://bootstrap.pypa.io/get-pip.py | sudo python3" + ansible.pip_args = "ansible==#{ANSIBLE_VERSION}" ansible.playbook = "deployment/ansible/workers.yml" ansible.groups = ANSIBLE_GROUPS.merge(ANSIBLE_ENV_GROUPS) ansible.raw_arguments = ["--timeout=60"] + ansible.extra_vars = MMW_EXTRA_VARS end end config.vm.define "app" do |app| app.vm.hostname = "app" - app.vm.network "private_network", ip: ENV.fetch("MMW_APP_IP", "33.33.34.10") + app.vm.network "private_network", ip: ENV["MMW_APP_IP"] || "33.33.34.10" app.vm.synced_folder "src/mmw", "/opt/app" # Facilitates the sharing of Django media root directories across virtual machines. @@ -113,17 +156,17 @@ Vagrant.configure("2") do |config| create: true, mount_options: ["dmode=777"] # Django via Nginx/Gunicorn - app.vm.network "forwarded_port", { + app.vm.network "forwarded_port", **{ guest: 80, host: 8000 }.merge(VAGRANT_NETWORK_OPTIONS) # Livereload server - app.vm.network "forwarded_port", { + app.vm.network "forwarded_port", **{ guest: 35729, host: 35729, }.merge(VAGRANT_NETWORK_OPTIONS) # Testem server - app.vm.network "forwarded_port", { + app.vm.network "forwarded_port", **{ guest: 7357, host: 7357 }.merge(VAGRANT_NETWORK_OPTIONS) @@ -135,22 +178,26 @@ Vagrant.configure("2") do |config| v.memory = 2048 end - app.vm.provision "ansible" do |ansible| + app.vm.provision "ansible_local" do |ansible| ansible.compatibility_mode = "2.0" + ansible.install_mode = "pip_args_only" + ansible.pip_install_cmd = "sudo apt-get install -y python3-distutils && curl https://bootstrap.pypa.io/get-pip.py | sudo python3" + ansible.pip_args = "ansible==#{ANSIBLE_VERSION}" ansible.playbook = "deployment/ansible/app-servers.yml" ansible.groups = ANSIBLE_GROUPS.merge(ANSIBLE_ENV_GROUPS) ansible.raw_arguments = ["--timeout=60"] + ansible.extra_vars = MMW_EXTRA_VARS end end config.vm.define "tiler" do |tiler| tiler.vm.hostname = "tiler" - tiler.vm.network "private_network", ip: ENV.fetch("MMW_TILER_IP", "33.33.34.35") + tiler.vm.network "private_network", ip: ENV["MMW_TILER_IP"] || "33.33.34.35" tiler.vm.synced_folder "src/tiler", "/opt/tiler" # Expose the tiler. Tiler is served by Nginx. - tiler.vm.network "forwarded_port", { + tiler.vm.network "forwarded_port", **{ guest: 80, host: 4000 }.merge(VAGRANT_NETWORK_OPTIONS) @@ -159,11 +206,15 @@ Vagrant.configure("2") do |config| v.memory = 1024 end - tiler.vm.provision "ansible" do |ansible| + tiler.vm.provision "ansible_local" do |ansible| ansible.compatibility_mode = "2.0" + ansible.install_mode = "pip_args_only" + ansible.pip_install_cmd = "sudo apt-get install -y python3-distutils && curl https://bootstrap.pypa.io/get-pip.py | sudo python3" + ansible.pip_args = "ansible==#{ANSIBLE_VERSION}" ansible.playbook = "deployment/ansible/tile-servers.yml" ansible.groups = ANSIBLE_GROUPS.merge(ANSIBLE_ENV_GROUPS) ansible.raw_arguments = ["--timeout=60"] + ansible.extra_vars = MMW_EXTRA_VARS end end end diff --git a/deployment/ansible/group_vars/all b/deployment/ansible/group_vars/all index 098924559..c353cfa55 100644 --- a/deployment/ansible/group_vars/all +++ b/deployment/ansible/group_vars/all @@ -1,9 +1,4 @@ --- -pip_get_pip_version: "2.7" -pip_version: "20.3.*" - -django_test_database: "{{ lookup('env', 'MMW_TEST_DB_NAME') | default('test_mmw', true) }}" - redis_port: 6379 postgresql_port: 5432 @@ -19,43 +14,46 @@ postgresql_username: mmw postgresql_password: mmw postgresql_database: mmw -postgresql_version: "9.6" -postgresql_package_version: "9.6.*.pgdg16.04+1" +postgresql_version: "13" +postgresql_package_version: "13.*.pgdg20.04+1" postgresql_support_repository_channel: "main" -postgresql_support_libpq_version: "13.*.pgdg16.04+1" -postgresql_support_psycopg2_version: "2.7" -postgis_version: "2.3" -postgis_package_version: "2.3.*.pgdg16.04+1" +postgresql_support_libpq_version: "13.*.pgdg20.04+1" +postgresql_support_psycopg2_version: "2.8.*" +postgis_version: "3" +postgis_package_version: "3.2*pgdg20.04+1" -daemontools_version: "1:0.76-6ubuntu1" +daemontools_version: "1:0.76-7" -python_version: "2.7.12-1~16.04" +python_version: "3.8.*" +ansible_python_interpreter: "/usr/bin/python3" yarn_version: "1.19.*" app_nodejs_version: "12.11.1" app_nodejs_npm_version: "6.9.0" tiler_nodejs_version: "10.16.0" -tiler_nodejs_npm_version: "6.9.0" +tiler_nodejs_npm_version: "7.20.6" java_version: "8u*" java_major_version: "8" java_flavor: "openjdk" -docker_version: "5:18.*" -docker_compose_version: "1.23.*" +docker_version: "5:19.*" +docker_python_version: "4.4.*" +docker_compose_version: "1.26.*" geop_host: "localhost" geop_port: 8090 -geop_version: "4.0.3" +geop_version: "5.2.0" geop_cache_enabled: 1 +geop_timeout: 200 nginx_cache_dir: "/var/cache/nginx" enabled_features: '' -llvmlite_version: "0.31.0" -numba_version: "0.38.1" +llvmlite_version: "0.37.0" +numba_version: "0.54.0" phantomjs_version: "2.1.*" -redis_version: "2:3.0.6-1ubuntu0.*" +redis_version: "5:5.0.*" diff --git a/deployment/ansible/group_vars/development b/deployment/ansible/group_vars/development index 7743aba81..546c00065 100644 --- a/deployment/ansible/group_vars/development +++ b/deployment/ansible/group_vars/development @@ -9,28 +9,18 @@ postgresql_hba_mapping: - { type: "host", database: "all", user: "all", address: "33.33.34.1/24", method: "md5" } - { type: "host", database: "all", user: "all", address: "10.0.2.0/24", method: "md5" } -services_ip: "{{ lookup('env', 'MMW_SERVICES_IP') | default('33.33.34.30', true) }}" - redis_host: "{{ services_ip }}" postgresql_host: "{{ services_ip }}" -tiler_host: "{{ lookup('env', 'MMW_TILER_IP') | default('33.33.34.35', true) }}" celery_log_level: "DEBUG" celery_number_of_workers: 2 celery_processes_per_worker: 1 itsi_base_url: "https://learn.staging.concord.org/" -itsi_secret_key: "{{ lookup('env', 'MMW_ITSI_SECRET_KEY') }}" - -concord_secret_key: "{{ lookup('env', 'MMW_CONCORD_SECRET_KEY') }}" hydroshare_base_url: "https://beta.hydroshare.org/" -hydroshare_secret_key: "{{ lookup('env', 'MMW_HYDROSHARE_SECRET_KEY') }}" srat_catchment_api_url: "https://802or9kkk2.execute-api.us-east-2.amazonaws.com/prod/SratRunModel_DEV" -srat_catchment_api_key: "{{ lookup('env', 'MMW_SRAT_CATCHMENT_API_KEY') }}" - -tilecache_bucket_name: "{{ lookup('env', 'MMW_TILECACHE_BUCKET') | default('', true) }}" docker_options: "-H tcp://0.0.0.0:2375 -H unix:///var/run/docker.sock" diff --git a/deployment/ansible/group_vars/test b/deployment/ansible/group_vars/test index 9a8db0a6d..03da04a25 100644 --- a/deployment/ansible/group_vars/test +++ b/deployment/ansible/group_vars/test @@ -7,25 +7,17 @@ postgresql_listen_addresses: "*" postgresql_hba_mapping: - { type: "host", database: "all", user: "all", address: "33.33.34.1/24", method: "md5" } -services_ip: "{{ lookup('env', 'MMW_SERVICES_IP') | default('33.33.34.30', true) }}" - redis_host: "{{ services_ip }}" postgresql_host: "{{ services_ip }}" -tiler_host: "{{ lookup('env', 'MMW_TILER_IP') | default('33.33.34.35', true) }}" celery_number_of_workers: 2 celery_processes_per_worker: 1 itsi_base_url: "https://learn.staging.concord.org/" -itsi_secret_key: "{{ lookup('env', 'MMW_ITSI_SECRET_KEY') }}" - -concord_secret_key: "{{ lookup('env', 'MMW_CONCORD_SECRET_KEY') }}" hydroshare_base_url: "https://beta.hydroshare.org/" -hydroshare_secret_key: "{{ lookup('env', 'MMW_HYDROSHARE_SECRET_KEY') }}" srat_catchment_api_url: "https://802or9kkk2.execute-api.us-east-2.amazonaws.com/prod/SratRunModel_DEV" -srat_catchment_api_key: "{{ lookup('env', 'MMW_SRAT_CATCHMENT_API_KEY') }}" tilecache_bucket_name: "tile-cache.staging.app.wikiwatershed.org" diff --git a/deployment/ansible/roles.yml b/deployment/ansible/roles.yml index 6926b7e7a..b1bdf8f5c 100644 --- a/deployment/ansible/roles.yml +++ b/deployment/ansible/roles.yml @@ -1,7 +1,5 @@ - src: azavea.ntp version: 0.1.1 -- src: azavea.pip - version: 2.0.1 - src: azavea.nodejs version: 0.3.0 - src: azavea.yarn @@ -9,17 +7,13 @@ - src: azavea.git version: 0.1.0 - src: azavea.nginx - version: 0.3.1 + version: 1.0.0 - src: azavea.daemontools version: 0.1.0 -- src: azavea.postgresql-support - version: 0.3.0 - src: azavea.postgresql version: 1.0.0 - src: azavea.postgis version: 0.3.0 -- src: azavea.python - version: 0.1.0 - src: azavea.redis version: 0.1.0 - src: azavea.phantomjs @@ -27,6 +21,6 @@ - src: azavea.build-essential version: 0.1.0 - src: azavea.java - version: 0.6.2 + version: 0.7.0 - src: azavea.docker version: 6.0.0 diff --git a/deployment/ansible/roles/model-my-watershed.app/defaults/main.yml b/deployment/ansible/roles/model-my-watershed.app/defaults/main.yml index e58189d25..e7bae19df 100644 --- a/deployment/ansible/roles/model-my-watershed.app/defaults/main.yml +++ b/deployment/ansible/roles/model-my-watershed.app/defaults/main.yml @@ -10,7 +10,7 @@ app_config: DJANGO_POSTGIS_VERSION: "{{ app_postgis_version }}" DJANGO_SECRET_KEY: "{{ app_secret_key }}" -app_postgis_version: 2.1.3 +app_postgis_version: 3.1.4 app_secret_key: "{{ postgresql_password | md5 }}" app_static_root: /var/www/mmw/static/ diff --git a/deployment/ansible/roles/model-my-watershed.app/meta/main.yml b/deployment/ansible/roles/model-my-watershed.app/meta/main.yml index d3ddee1ef..f7eb12483 100644 --- a/deployment/ansible/roles/model-my-watershed.app/meta/main.yml +++ b/deployment/ansible/roles/model-my-watershed.app/meta/main.yml @@ -1,11 +1,8 @@ --- dependencies: - { role: "model-my-watershed.base" } - - { role: "azavea.python", python_development: True } - - { role: "azavea.pip" } - { role: "azavea.yarn" } - { role: "azavea.nodejs", nodejs_version: "{{ app_nodejs_version }}", nodejs_npm_version: "{{ app_nodejs_npm_version }}" } - { role: "azavea.phantomjs" } - - { role: "azavea.build-essential" } - { role: "model-my-watershed.celery" } - { role: "azavea.nginx", nginx_delete_default_site: True } diff --git a/deployment/ansible/roles/model-my-watershed.app/tasks/dependencies.yml b/deployment/ansible/roles/model-my-watershed.app/tasks/dependencies.yml index 0ebd955cd..2ad46c371 100644 --- a/deployment/ansible/roles/model-my-watershed.app/tasks/dependencies.yml +++ b/deployment/ansible/roles/model-my-watershed.app/tasks/dependencies.yml @@ -6,7 +6,7 @@ - { name: "numba", version: "{{ numba_version }}" } - name: Install application Python dependencies for development and test - pip: requirements="{{ app_home }}/requirements/{{ item }}.txt" + pip: requirements="{{ app_home }}/requirements/{{ item }}.txt" state=latest with_items: - development - test diff --git a/deployment/ansible/roles/model-my-watershed.app/tasks/dev-and-test-dependencies.yml b/deployment/ansible/roles/model-my-watershed.app/tasks/dev-and-test-dependencies.yml index dc1139e74..f1df9d8d1 100644 --- a/deployment/ansible/roles/model-my-watershed.app/tasks/dev-and-test-dependencies.yml +++ b/deployment/ansible/roles/model-my-watershed.app/tasks/dev-and-test-dependencies.yml @@ -1,6 +1,6 @@ --- - name: Install Firefox for UI tests - apt: pkg="firefox=8*" state=present + apt: pkg="firefox=9*" state=present - name: Install Xvfb for JavaScript tests - apt: pkg="xvfb=2:1.18.*" state=present + apt: pkg="xvfb=2:1.20.*" state=present diff --git a/deployment/ansible/roles/model-my-watershed.base/defaults/main.yml b/deployment/ansible/roles/model-my-watershed.base/defaults/main.yml index 94f3b85a0..57a72e8fa 100644 --- a/deployment/ansible/roles/model-my-watershed.base/defaults/main.yml +++ b/deployment/ansible/roles/model-my-watershed.base/defaults/main.yml @@ -13,6 +13,7 @@ envdir_config: MMW_GEOPROCESSING_HOST: "{{ geop_host }}" MMW_GEOPROCESSING_PORT: "{{ geop_port }}" MMW_GEOPROCESSING_VERSION: "{{ geop_version }}" + MMW_GEOPROCESSING_TIMEOUT: "{{ geop_timeout }}" MMW_ITSI_CLIENT_ID: "{{ itsi_client_id }}" MMW_ITSI_SECRET_KEY: "{{ itsi_secret_key }}" MMW_ITSI_BASE_URL: "{{ itsi_base_url }}" diff --git a/deployment/ansible/roles/model-my-watershed.base/meta/main.yml b/deployment/ansible/roles/model-my-watershed.base/meta/main.yml index 509d7f2e3..129f99906 100644 --- a/deployment/ansible/roles/model-my-watershed.base/meta/main.yml +++ b/deployment/ansible/roles/model-my-watershed.base/meta/main.yml @@ -2,5 +2,7 @@ dependencies: - { role: "azavea.ntp" } - { role: "azavea.git" } + - { role: "azavea.build-essential" } - { role: "azavea.daemontools" } - - { role: "azavea.postgresql-support" } + - { role: "model-my-watershed.python" } + - { role: "model-my-watershed.postgresql-support" } diff --git a/deployment/ansible/roles/model-my-watershed.base/tasks/dependencies.yml b/deployment/ansible/roles/model-my-watershed.base/tasks/dependencies.yml index 972da24a4..ac63e46cb 100644 --- a/deployment/ansible/roles/model-my-watershed.base/tasks/dependencies.yml +++ b/deployment/ansible/roles/model-my-watershed.base/tasks/dependencies.yml @@ -1,15 +1,15 @@ --- - name: Install Geospatial libraries apt: - pkg: ["binutils=2.26*", - "libproj-dev=4.9.*", - "gdal-bin=1.11.*", - "libgdal1-dev=1.11.*"] + pkg: ["binutils=2.34*", + "libproj-dev=6.3.*", + "gdal-bin=3.0.*", + "libgdal-dev=3.0.*"] state: present when: "['tile-servers'] | is_not_in(group_names)" - name: Configure the main PostgreSQL APT repository - apt_repository: repo="deb http://apt.postgresql.org/pub/repos/apt/ {{ ansible_distribution_release}}-pgdg main" + apt_repository: repo="deb [arch=amd64] https://apt-archive.postgresql.org/pub/repos/apt/ {{ ansible_distribution_release }}-pgdg-archive {{ postgresql_support_repository_channel }}" state=present - name: Install PostgreSQL client diff --git a/deployment/ansible/roles/model-my-watershed.base/tasks/papertrail.yml b/deployment/ansible/roles/model-my-watershed.base/tasks/papertrail.yml index de27f9ebf..82eac75f5 100644 --- a/deployment/ansible/roles/model-my-watershed.base/tasks/papertrail.yml +++ b/deployment/ansible/roles/model-my-watershed.base/tasks/papertrail.yml @@ -2,7 +2,7 @@ get_url: url: https://papertrailapp.com/tools/papertrail-bundle.pem dest: /etc/papertrail-bundle.pem - checksum: sha256:79ea479e9f329de7075c40154c591b51eb056d458bc4dff76d9a4b9c6c4f6d0b + checksum: sha256:ae31ecb3c6e9ff3154cb7a55f017090448f88482f0e94ac927c0c67a1f33b9cf - name: Install rsyslog TLS utils apt: name=rsyslog-gnutls diff --git a/deployment/ansible/roles/model-my-watershed.celery-worker/defaults/main.yml b/deployment/ansible/roles/model-my-watershed.celery-worker/defaults/main.yml index b0cd0d5ce..5ef8a270f 100644 --- a/deployment/ansible/roles/model-my-watershed.celery-worker/defaults/main.yml +++ b/deployment/ansible/roles/model-my-watershed.celery-worker/defaults/main.yml @@ -10,7 +10,7 @@ app_config: DJANGO_POSTGIS_VERSION: "{{ app_postgis_version }}" DJANGO_SECRET_KEY: "{{ app_secret_key }}" -app_postgis_version: 2.1.3 +app_postgis_version: 3.1.4 app_secret_key: "{{ postgresql_password | md5 }}" celery_pid_dir: "/run/celery" diff --git a/deployment/ansible/roles/model-my-watershed.celery-worker/meta/main.yml b/deployment/ansible/roles/model-my-watershed.celery-worker/meta/main.yml index ed09e5f8a..4405de2d1 100644 --- a/deployment/ansible/roles/model-my-watershed.celery-worker/meta/main.yml +++ b/deployment/ansible/roles/model-my-watershed.celery-worker/meta/main.yml @@ -1,7 +1,4 @@ --- dependencies: - { role: "model-my-watershed.base" } - - { role: "azavea.python", python_development: True } - - { role: "azavea.pip" } - - { role: "azavea.build-essential" } - { role: "model-my-watershed.celery" } diff --git a/deployment/ansible/roles/model-my-watershed.celery-worker/tasks/dependencies.yml b/deployment/ansible/roles/model-my-watershed.celery-worker/tasks/dependencies.yml index a55296f95..52692ab16 100644 --- a/deployment/ansible/roles/model-my-watershed.celery-worker/tasks/dependencies.yml +++ b/deployment/ansible/roles/model-my-watershed.celery-worker/tasks/dependencies.yml @@ -6,7 +6,7 @@ - { name: "numba", version: "{{ numba_version }}" } - name: Install application Python dependencies for development and test - pip: requirements="{{ app_home }}/requirements/{{ item }}.txt" + pip: requirements="{{ app_home }}/requirements/{{ item }}.txt" state=latest with_items: - development - test diff --git a/deployment/ansible/roles/model-my-watershed.celery/defaults/main.yml b/deployment/ansible/roles/model-my-watershed.celery/defaults/main.yml index 7d8dddc3c..23cdc3f8d 100644 --- a/deployment/ansible/roles/model-my-watershed.celery/defaults/main.yml +++ b/deployment/ansible/roles/model-my-watershed.celery/defaults/main.yml @@ -1,2 +1,2 @@ --- -celery_version: 4.1.0 +celery_version: 5.2.0 diff --git a/deployment/ansible/roles/model-my-watershed.celery/meta/main.yml b/deployment/ansible/roles/model-my-watershed.celery/meta/main.yml deleted file mode 100644 index 27dd85b12..000000000 --- a/deployment/ansible/roles/model-my-watershed.celery/meta/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -dependencies: - - { role: "azavea.pip" } diff --git a/deployment/ansible/roles/model-my-watershed.celery/tasks/main.yml b/deployment/ansible/roles/model-my-watershed.celery/tasks/main.yml index 3908345f4..26213b088 100644 --- a/deployment/ansible/roles/model-my-watershed.celery/tasks/main.yml +++ b/deployment/ansible/roles/model-my-watershed.celery/tasks/main.yml @@ -2,5 +2,4 @@ - name: Install Celery pip: name="{{ item.name }}" version={{ item.version }} state=present with_items: - - { name: "kombu", version: "4.1.0" } - { name: "celery[redis]", version: "{{ celery_version }}" } diff --git a/deployment/ansible/roles/model-my-watershed.geoprocessing/templates/systemd-geoprocessing.service.j2 b/deployment/ansible/roles/model-my-watershed.geoprocessing/templates/systemd-geoprocessing.service.j2 index c4a5f5f98..dbd6ec82b 100644 --- a/deployment/ansible/roles/model-my-watershed.geoprocessing/templates/systemd-geoprocessing.service.j2 +++ b/deployment/ansible/roles/model-my-watershed.geoprocessing/templates/systemd-geoprocessing.service.j2 @@ -4,7 +4,9 @@ After=network.target [Service] {% if ['development', 'test'] | some_are_in(group_names) -%} -Environment=AWS_PROFILE={{ aws_profile }} +Environment=MMW_GEOPROCESSING_TIMEOUT={{ geop_timeout }}s AWS_PROFILE={{ aws_profile }} +{% else %} +Environment=MMW_GEOPROCESSING_TIMEOUT={{ geop_timeout }}s {% endif %} User=mmw WorkingDirectory={{ geop_home }} diff --git a/deployment/ansible/roles/model-my-watershed.postgresql-support/tasks/main.yml b/deployment/ansible/roles/model-my-watershed.postgresql-support/tasks/main.yml new file mode 100644 index 000000000..5646d4d2c --- /dev/null +++ b/deployment/ansible/roles/model-my-watershed.postgresql-support/tasks/main.yml @@ -0,0 +1,20 @@ +--- +- name: Configure the PostgreSQL APT key + apt_key: url=https://www.postgresql.org/media/keys/ACCC4CF8.asc state=present + +- name: Configure the PostgreSQL APT repositories + apt_repository: repo="deb [arch=amd64] https://apt-archive.postgresql.org/pub/repos/apt/ {{ ansible_distribution_release }}-pgdg-archive {{ postgresql_support_repository_channel }}" + state=present + +- name: Install client API libraries for PostgreSQL + apt: + pkg: + - libpq5={{ postgresql_support_libpq_version }} + - libpq-dev={{ postgresql_support_libpq_version }} + state: present + force: true + +- name: Install PostgreSQL driver for Python + pip: name=psycopg2-binary + version={{ postgresql_support_psycopg2_version }} + state=present diff --git a/deployment/ansible/roles/model-my-watershed.postgresql/meta/main.yml b/deployment/ansible/roles/model-my-watershed.postgresql/meta/main.yml index 0bb5b0045..b581fee62 100644 --- a/deployment/ansible/roles/model-my-watershed.postgresql/meta/main.yml +++ b/deployment/ansible/roles/model-my-watershed.postgresql/meta/main.yml @@ -1,5 +1,5 @@ --- dependencies: - - { role: "azavea.postgresql-support" } + - { role: "model-my-watershed.postgresql-support" } - { role: "azavea.postgresql" } - { role: "azavea.postgis" } diff --git a/deployment/ansible/roles/model-my-watershed.postgresql/tasks/main.yml b/deployment/ansible/roles/model-my-watershed.postgresql/tasks/main.yml index ddc94736b..b3d0c0c12 100644 --- a/deployment/ansible/roles/model-my-watershed.postgresql/tasks/main.yml +++ b/deployment/ansible/roles/model-my-watershed.postgresql/tasks/main.yml @@ -1,4 +1,9 @@ --- +- name: Install ACL, required for Ansible Super User + apt: + name: acl + state: present + - name: Create PostgreSQL super user become_user: postgres postgresql_user: name="{{ postgresql_username }}" diff --git a/deployment/ansible/roles/model-my-watershed.python/tasks/main.yml b/deployment/ansible/roles/model-my-watershed.python/tasks/main.yml new file mode 100644 index 000000000..4ea42e361 --- /dev/null +++ b/deployment/ansible/roles/model-my-watershed.python/tasks/main.yml @@ -0,0 +1,11 @@ +- name: Install Python 3 + apt: + pkg: ["python3={{ python_version }}", + "python3-dev={{ python_version }}", + "python3-distutils={{ python_version }}"] + state: present + +- name: Install old setuptools with use_2to3 support + pip: name=setuptools + version=<58 + state=present diff --git a/deployment/ansible/roles/model-my-watershed.rwd/tasks/app.yml b/deployment/ansible/roles/model-my-watershed.rwd/tasks/app.yml index 4c01ecbd1..89c1d0eca 100644 --- a/deployment/ansible/roles/model-my-watershed.rwd/tasks/app.yml +++ b/deployment/ansible/roles/model-my-watershed.rwd/tasks/app.yml @@ -4,7 +4,7 @@ state=directory - name: Configure RWD service definition - docker_service: + docker_compose: project_name: mmw state: present definition: diff --git a/deployment/ansible/roles/model-my-watershed.tiler/tasks/dependencies.yml b/deployment/ansible/roles/model-my-watershed.tiler/tasks/dependencies.yml index 1eab0bc8d..000917582 100644 --- a/deployment/ansible/roles/model-my-watershed.tiler/tasks/dependencies.yml +++ b/deployment/ansible/roles/model-my-watershed.tiler/tasks/dependencies.yml @@ -1,8 +1,8 @@ --- - name: Install canvas rendering dependencies apt: - pkg: ["libcairo2-dev=1.14.*", - "libpango1.0-dev=1.38.*", + pkg: ["libcairo2-dev=1.16.*", + "libpango1.0-dev=1.44.*", "libjpeg8-dev=8c-*", "libgif-dev=5.1.*"] state: present @@ -11,12 +11,11 @@ apt: pkg: ["libmapnik3.0=3.0.*", "libmapnik-dev=3.0.*", - "mapnik-utils=3.0.*", - "python-mapnik=1:0.0~20151125-92e79d2-1build1"] + "mapnik-utils=3.0.*"] state: present - name: Install Windshaft JavaScript dependencies - command: npm install --unsafe-perm + command: sudo npm install --unsafe-perm args: chdir: "{{ tiler_home }}" become: False diff --git a/deployment/ansible/services.yml b/deployment/ansible/services.yml index 5f169fdbd..b032a39df 100644 --- a/deployment/ansible/services.yml +++ b/deployment/ansible/services.yml @@ -7,5 +7,6 @@ apt: update_cache=yes cache_valid_time=3600 roles: + - { role: "model-my-watershed.python", when: "['development', 'test'] | some_are_in(group_names)" } - { role: "model-my-watershed.postgresql", when: "['development', 'test'] | some_are_in(group_names)" } - { role: "azavea.redis", when: "['development', 'test'] | some_are_in(group_names)" } diff --git a/deployment/cfn/application.py b/deployment/cfn/application.py index c6a36db27..d69007a70 100644 --- a/deployment/cfn/application.py +++ b/deployment/cfn/application.py @@ -12,9 +12,9 @@ autoscaling as asg ) -from utils.cfn import get_recent_ami +from cfn.utils.cfn import get_recent_ami -from utils.constants import ( +from cfn.utils.constants import ( ALLOW_ALL_CIDR, EC2_INSTANCE_TYPES, HTTP, diff --git a/deployment/cfn/data_plane.py b/deployment/cfn/data_plane.py index e7aec0711..6f7726598 100644 --- a/deployment/cfn/data_plane.py +++ b/deployment/cfn/data_plane.py @@ -12,9 +12,9 @@ route53 as r53 ) -from utils.cfn import get_recent_ami +from cfn.utils.cfn import get_recent_ami -from utils.constants import ( +from cfn.utils.constants import ( ALLOW_ALL_CIDR, CANONICAL_ACCOUNT_ID, EC2_INSTANCE_TYPES, @@ -64,7 +64,7 @@ class DataPlane(StackNode): 'KeyName': 'mmw-stg', 'IPAccess': ALLOW_ALL_CIDR, 'BastionHostInstanceType': 't2.medium', - 'RDSInstanceType': 'db.t2.micro', + 'RDSInstanceType': 'db.t3.micro', 'RDSDbName': 'modelmywatershed', 'RDSUsername': 'modelmywatershed', 'RDSPassword': 'modelmywatershed', @@ -111,7 +111,7 @@ def set_up_stack(self): ), 'BastionHostAMI') self.rds_instance_type = self.add_parameter(Parameter( - 'RDSInstanceType', Type='String', Default='db.t2.micro', + 'RDSInstanceType', Type='String', Default='db.t3.micro', Description='RDS instance type', AllowedValues=RDS_INSTANCE_TYPES, ConstraintDescription='must be a valid RDS instance type.' ), 'RDSInstanceType') @@ -195,7 +195,7 @@ def get_recent_bastion_ami(self): bastion_ami_id = self.get_input('BastionHostAMI') except MKUnresolvableInputError: filters = {'name': - 'ubuntu/images/hvm-ssd/ubuntu-xenial-16.04-amd64-server-*', + 'ubuntu/images/hvm-ssd/ubuntu-focal-20.04-amd64-server-*', 'architecture': 'x86_64', 'block-device-mapping.volume-type': 'gp2', 'root-device-type': 'ebs', @@ -296,7 +296,7 @@ def create_rds_instance(self): DBParameterGroupName=Ref(self.rds_parameter_group_name), DBSubnetGroupName=Ref(rds_subnet_group), Engine='postgres', - EngineVersion='9.6.14', + EngineVersion='13.4', MasterUsername=Ref(self.rds_username), MasterUserPassword=Ref(self.rds_password), MultiAZ=Ref(self.rds_multi_az), diff --git a/deployment/cfn/stacks.py b/deployment/cfn/stacks.py index e5e4dbf88..21927712a 100644 --- a/deployment/cfn/stacks.py +++ b/deployment/cfn/stacks.py @@ -1,18 +1,18 @@ from majorkirby import GlobalConfigNode -from vpc import VPC -from s3_vpc_endpoint import S3VPCEndpoint -from private_hosted_zone import PrivateHostedZone -from data_plane import DataPlane -from application import Application -from tiler import Tiler -from tile_delivery_network import TileDeliveryNetwork -from worker import Worker -from public_hosted_zone import PublicHostedZone +from cfn.vpc import VPC +from cfn.s3_vpc_endpoint import S3VPCEndpoint +from cfn.private_hosted_zone import PrivateHostedZone +from cfn.data_plane import DataPlane +from cfn.application import Application +from cfn.tiler import Tiler +from cfn.tile_delivery_network import TileDeliveryNetwork +from cfn.worker import Worker +from cfn.public_hosted_zone import PublicHostedZone from boto import cloudformation as cfn -import ConfigParser +import configparser import sys @@ -23,13 +23,13 @@ def get_config(mmw_config_path, profile): :param mmw_config_path: Path to the config file :param profile: Config profile to read """ - mmw_config = ConfigParser.ConfigParser() + mmw_config = configparser.ConfigParser() mmw_config.optionxform = str mmw_config.read(mmw_config_path) try: section = mmw_config.items(profile) - except ConfigParser.NoSectionError: + except configparser.NoSectionError: sys.stderr.write('There is no section [{}] in the configuration ' 'file\n'.format(profile)) sys.stderr.write('you specified. Did you specify the correct file?') diff --git a/deployment/cfn/tile_delivery_network.py b/deployment/cfn/tile_delivery_network.py index 4fc04edd2..18df8f6d1 100644 --- a/deployment/cfn/tile_delivery_network.py +++ b/deployment/cfn/tile_delivery_network.py @@ -10,7 +10,7 @@ s3 ) -from utils.constants import ( +from cfn.utils.constants import ( AMAZON_S3_HOSTED_ZONE_ID, AMAZON_S3_WEBSITE_DOMAIN, ) @@ -89,7 +89,7 @@ def create_cloudfront_distributions(self): DomainName=Join('.', ['tile-cache', Ref(self.public_hosted_zone_name)]), - CustomOriginConfig=cf.CustomOrigin( + CustomOriginConfig=cf.CustomOriginConfig( OriginProtocolPolicy='http-only' ) ) @@ -112,7 +112,7 @@ def create_cloudfront_distributions(self): DomainName=Join('.', ['tile-cache', Ref(self.public_hosted_zone_name)]), - CustomOriginConfig=cf.CustomOrigin( + CustomOriginConfig=cf.CustomOriginConfig( OriginProtocolPolicy='http-only' ) ) diff --git a/deployment/cfn/tiler.py b/deployment/cfn/tiler.py index 46929ff4c..2a3767e9d 100644 --- a/deployment/cfn/tiler.py +++ b/deployment/cfn/tiler.py @@ -13,9 +13,9 @@ route53 as r53 ) -from utils.cfn import get_recent_ami +from cfn.utils.cfn import get_recent_ami -from utils.constants import ( +from cfn.utils.constants import ( ALLOW_ALL_CIDR, EC2_INSTANCE_TYPES, HTTP, diff --git a/deployment/cfn/utils/constants.py b/deployment/cfn/utils/constants.py index d707fe720..cfc2982c7 100644 --- a/deployment/cfn/utils/constants.py +++ b/deployment/cfn/utils/constants.py @@ -8,10 +8,10 @@ ] RDS_INSTANCE_TYPES = [ - 'db.t2.micro', - 'db.t2.small', - 'db.t2.medium', - 'db.t2.large' + 'db.t3.micro', + 'db.t3.small', + 'db.t3.medium', + 'db.t3.large' ] ELASTICACHE_INSTANCE_TYPES = [ diff --git a/deployment/cfn/vpc.py b/deployment/cfn/vpc.py index d25db866f..db0922370 100644 --- a/deployment/cfn/vpc.py +++ b/deployment/cfn/vpc.py @@ -7,13 +7,13 @@ ec2 ) -from utils.cfn import ( +from cfn.utils.cfn import ( get_availability_zones, get_recent_ami, get_subnet_cidr_block ) -from utils.constants import ( +from cfn.utils.constants import ( ALLOW_ALL_CIDR, EC2_INSTANCE_TYPES, HTTP, @@ -94,9 +94,9 @@ def set_up_stack(self): self.add_output(Output('AvailabilityZones', Value=','.join(self.default_azs))) self.add_output(Output('PrivateSubnets', - Value=Join(',', map(Ref, self.default_private_subnets)))) # NOQA + Value=Join(',', list(map(Ref, self.default_private_subnets))))) # NOQA self.add_output(Output('PublicSubnets', - Value=Join(',', map(Ref, self.default_public_subnets)))) # NOQA + Value=Join(',', list(map(Ref, self.default_public_subnets))))) # NOQA self.add_output(Output('RouteTableId', Value=Ref(public_route_table))) def get_recent_nat_ami(self): diff --git a/deployment/cfn/worker.py b/deployment/cfn/worker.py index d6481f00f..3e2ab0a9a 100644 --- a/deployment/cfn/worker.py +++ b/deployment/cfn/worker.py @@ -13,9 +13,9 @@ route53 as r53 ) -from utils.cfn import get_recent_ami +from cfn.utils.cfn import get_recent_ami -from utils.constants import ( +from cfn.utils.constants import ( ALLOW_ALL_CIDR, EC2_INSTANCE_TYPES, HTTP, diff --git a/deployment/packer/driver.py b/deployment/packer/driver.py index c477964fd..8505a31e8 100644 --- a/deployment/packer/driver.py +++ b/deployment/packer/driver.py @@ -18,7 +18,7 @@ def get_recent_ubuntu_ami(region, aws_profile): """Gets AMI ID for current release in region""" filters = { - 'name': 'ubuntu/images/hvm-ssd/ubuntu-xenial-16.04-amd64-server-*', + 'name': 'ubuntu/images/hvm-ssd/ubuntu-focal-20.04-amd64-server-*', 'architecture': 'x86_64', 'root-device-type': 'ebs', 'virtualization-type': 'hvm', diff --git a/deployment/packer/mmw.json b/deployment/packer/mmw.json index 72c174047..faf82aa0d 100644 --- a/deployment/packer/mmw.json +++ b/deployment/packer/mmw.json @@ -89,9 +89,9 @@ "inline": [ "sleep 5", "sudo apt-get update -qq", - "sudo apt-get install python-pip python-dev -y", - "sudo pip install --upgrade pip==18.1", - "sudo pip install ansible==2.6.18", + "sudo apt-get install python3 python3-dev python3-distutils -y", + "curl -sL https://bootstrap.pypa.io/get-pip.py | sudo python3", + "sudo pip install ansible==2.9.27", "sudo /bin/sh -c 'echo {{user `branch`}} {{user `description`}} > /srv/version.txt'" ] }, @@ -101,7 +101,7 @@ "playbook_dir": "ansible", "inventory_file": "ansible/inventory/packer-app-server", "extra_arguments": [ - "--user 'ubuntu' --extra-vars 'app_deploy_branch={{user `version`}}'" + "--user 'ubuntu' --extra-vars 'app_deploy_branch={{user `version`}} django_test_database=test_mmw'" ], "only": [ "mmw-app" @@ -125,7 +125,7 @@ "playbook_dir": "ansible", "inventory_file": "ansible/inventory/packer-worker-server", "extra_arguments": [ - "--user 'ubuntu' --extra-vars 'app_deploy_branch={{user `version`}}'" + "--user 'ubuntu' --extra-vars 'app_deploy_branch={{user `version`}} django_test_database=test_mmw'" ], "only": [ "mmw-worker" diff --git a/deployment/requirements.txt b/deployment/requirements.txt index 7f0d481b1..b40575552 100644 --- a/deployment/requirements.txt +++ b/deployment/requirements.txt @@ -1,5 +1,6 @@ -ansible==2.6.18 -majorkirby>=0.2.0,<0.2.99 -troposphere==1.1.0 -boto==2.39.0 +cryptography==3.2.1 +ansible==2.9.27 +troposphere==2.4.9 +majorkirby==1.0.0 +boto==2.49.0 awscli>=1.9.15 diff --git a/doc/MMW_API_landproperties_demo.ipynb b/doc/MMW_API_landproperties_demo.ipynb index d4557acae..7a4c159ba 100644 --- a/doc/MMW_API_landproperties_demo.ipynb +++ b/doc/MMW_API_landproperties_demo.ipynb @@ -260,7 +260,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### Issue job request: **analyze/land/**" + "#### Issue job request: **analyze/land/2011_2011/**" ] }, { @@ -269,7 +269,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = analyze_api_request('land', s, api_url, json_payload)" + "result = analyze_api_request('land/2011_2011', s, api_url, json_payload)" ] }, { @@ -643,4 +643,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/scripts/aws/setupdb.sh b/scripts/aws/setupdb.sh index 0d5b8443c..80f920481 100755 --- a/scripts/aws/setupdb.sh +++ b/scripts/aws/setupdb.sh @@ -3,14 +3,15 @@ set -e set -x -usage="$(basename "$0") [-h] [-b] [-s] \n +usage="$(basename "$0") [-h] [options] \n --Sets up a postgresql database for MMW \n \n -where: \n +where options are one or more of: \n -h show this help text\n -b load/reload boundary data\n -f load a named boundary sql.gz\n -s load/reload stream data\n + -S load/reload Hi Res stream data (very large)\n -d load/reload DRB stream data\n -m load/reload mapshed data\n -p load/reload DEP data\n @@ -24,11 +25,12 @@ FILE_HOST="https://s3.amazonaws.com/data.mmw.azavea.com" load_boundary=false file_to_load= load_stream=false +load_hires_stream=false load_mapshed=false load_water_quality=false load_catchment=false -while getopts ":hbsdpmqcf:x:" opt; do +while getopts ":hbsSdpmqcf:x:" opt; do case $opt in h) echo -e $usage @@ -37,6 +39,8 @@ while getopts ":hbsdpmqcf:x:" opt; do load_boundary=true ;; s) load_stream=true ;; + S) + load_hires_stream=true ;; d) load_drb_streams=true ;; p) @@ -116,7 +120,7 @@ fi if [ "$load_boundary" = "true" ] ; then # Fetch boundary layer sql files - FILES=("boundary_county.sql.gz" "boundary_school_district.sql.gz" "boundary_district.sql.gz" "boundary_huc12_deduped.sql.gz" "boundary_huc10.sql.gz" "boundary_huc08.sql.gz") + FILES=("boundary_county_20210910.sql.gz" "boundary_school_district.sql.gz" "boundary_district.sql.gz" "boundary_huc12_deduped.sql.gz" "boundary_huc10.sql.gz" "boundary_huc08.sql.gz") PATHS=("county" "district" "huc8" "huc10" "huc12" "school") TRGM_TABLES=("boundary_huc08" "boundary_huc10" "boundary_huc12") @@ -134,6 +138,16 @@ if [ "$load_stream" = "true" ] ; then purge_tile_cache $PATHS fi +if [ "$load_hires_stream" = "true" ] ; then + # Fetch hires stream network layer sql files + FILES=("nhdflowlinehr.sql.gz") + PATHS=("nhd_streams_hr_v1") + + download_and_load $FILES + purge_tile_cache $PATHS +fi + + if [ "$load_drb_streams" = "true" ] ; then # Fetch DRB stream network layer sql file FILES=("drb_streams_50.sql.gz") diff --git a/scripts/aws/staging-deployment.sh b/scripts/aws/staging-deployment.sh index 7702b1393..435e2a958 100755 --- a/scripts/aws/staging-deployment.sh +++ b/scripts/aws/staging-deployment.sh @@ -32,7 +32,7 @@ fi pushd deployment # Attempt to launch a new stack & cutover DNS -python mmw_stack.py launch-stacks \ +python3 mmw_stack.py launch-stacks \ --aws-profile "${MMW_AWS_PROFILE}" \ --mmw-profile "${MMW_PROFILE}" \ --mmw-config-path "${MMW_CONFIG_PATH}" \ @@ -40,7 +40,7 @@ python mmw_stack.py launch-stacks \ --activate-dns # Remove old stack -python mmw_stack.py remove-stacks \ +python3 mmw_stack.py remove-stacks \ --aws-profile "${MMW_AWS_PROFILE}" \ --mmw-profile "${MMW_PROFILE}" \ --mmw-config-path "${MMW_CONFIG_PATH}" \ diff --git a/scripts/data/climate/colorize.py b/scripts/data/climate/colorize.py index 7f0450ba6..50ab94fe8 100644 --- a/scripts/data/climate/colorize.py +++ b/scripts/data/climate/colorize.py @@ -1,6 +1,3 @@ -from __future__ import division -from __future__ import print_function - import os import sys import rasterio diff --git a/src/mmw/apps/bigcz/clients/__init__.py b/src/mmw/apps/bigcz/clients/__init__.py index 3b031e77d..89ae3a64f 100644 --- a/src/mmw/apps/bigcz/clients/__init__.py +++ b/src/mmw/apps/bigcz/clients/__init__.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from apps.bigcz.clients import (cinergi, hydroshare, cuahsi, diff --git a/src/mmw/apps/bigcz/clients/cinergi/__init__.py b/src/mmw/apps/bigcz/clients/cinergi/__init__.py index 762ecc803..5c205fff9 100644 --- a/src/mmw/apps/bigcz/clients/cinergi/__init__.py +++ b/src/mmw/apps/bigcz/clients/cinergi/__init__.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from apps.bigcz.clients.cinergi.models import CinergiResource from apps.bigcz.clients.cinergi.serializers import CinergiResourceSerializer diff --git a/src/mmw/apps/bigcz/clients/cinergi/models.py b/src/mmw/apps/bigcz/clients/cinergi/models.py index 28042363f..c119192b3 100644 --- a/src/mmw/apps/bigcz/clients/cinergi/models.py +++ b/src/mmw/apps/bigcz/clients/cinergi/models.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from apps.bigcz.models import Resource diff --git a/src/mmw/apps/bigcz/clients/cinergi/search.py b/src/mmw/apps/bigcz/clients/cinergi/search.py index 5701241e5..8d3fc9ea8 100644 --- a/src/mmw/apps/bigcz/clients/cinergi/search.py +++ b/src/mmw/apps/bigcz/clients/cinergi/search.py @@ -1,11 +1,7 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - import requests import dateutil.parser -from HTMLParser import HTMLParser +from html.parser import HTMLParser from django.contrib.gis.geos import Polygon from django.conf import settings @@ -18,7 +14,7 @@ CINERGI_HOST = 'http://cinergi.sdsc.edu' CATALOG_NAME = 'cinergi' -CATALOG_URL = '{}/geoportal/opensearch'.format(CINERGI_HOST) +CATALOG_URL = f'{CINERGI_HOST}/geoportal/opensearch' PAGE_SIZE = settings.BIGCZ_CLIENT_PAGE_SIZE @@ -73,7 +69,7 @@ def parse_links(source): result = [] links = source.get('links_s', []) - if isinstance(links, basestring): + if isinstance(links, str): links = [links] for url in links: @@ -87,7 +83,7 @@ def parse_cinergi_url(fileid): Convert fileid to URL in CINERGI Portal """ - return '{}/geoportal/?filter=%22{}%22'.format(CINERGI_HOST, fileid) + return f'{CINERGI_HOST}/geoportal/?filter=%22{fileid}%22' def parse_string_or_list(string_or_list): @@ -95,7 +91,7 @@ def parse_string_or_list(string_or_list): Fields like contact_organizations be either a list of strings, or a string. Make it always a list of strings """ - if isinstance(string_or_list, basestring): + if isinstance(string_or_list, str): return [string_or_list] return string_or_list @@ -116,11 +112,11 @@ def parse_categories(source): categories = source.get('hierarchies_cat', source.get('categories_cat')) if not categories or \ - not all(isinstance(c, basestring) for c in categories): + not all(isinstance(c, str) for c in categories): # We only handle categories that are lists of strings return None - if isinstance(categories, basestring): + if isinstance(categories, str): categories = [categories] split_categories = [category.split(">") for category in categories] @@ -222,7 +218,7 @@ def parse_record(item): def prepare_bbox(box): - return '{},{},{},{}'.format(box.xmin, box.ymin, box.xmax, box.ymax) + return f'{box.xmin},{box.ymin},{box.xmax},{box.ymax}' def prepare_date(value): @@ -234,7 +230,7 @@ def prepare_date(value): def prepare_time(from_date, to_date): value = prepare_date(from_date) if to_date: - value = '{}/{}'.format(value, prepare_date(to_date)) + value = f'{value}/{prepare_date(to_date)}' return value diff --git a/src/mmw/apps/bigcz/clients/cinergi/serializers.py b/src/mmw/apps/bigcz/clients/cinergi/serializers.py index f0ad22f9a..a5feb1088 100644 --- a/src/mmw/apps/bigcz/clients/cinergi/serializers.py +++ b/src/mmw/apps/bigcz/clients/cinergi/serializers.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from rest_framework.serializers import (CharField, ListField, DateTimeField, diff --git a/src/mmw/apps/bigcz/clients/cuahsi/__init__.py b/src/mmw/apps/bigcz/clients/cuahsi/__init__.py index 2dae317d7..4b9ff13a3 100644 --- a/src/mmw/apps/bigcz/clients/cuahsi/__init__.py +++ b/src/mmw/apps/bigcz/clients/cuahsi/__init__.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from apps.bigcz.clients.cuahsi.models import CuahsiResource from apps.bigcz.clients.cuahsi.serializers import CuahsiResourceSerializer diff --git a/src/mmw/apps/bigcz/clients/cuahsi/details.py b/src/mmw/apps/bigcz/clients/cuahsi/details.py index 79719dfbf..cf8f391f7 100644 --- a/src/mmw/apps/bigcz/clients/cuahsi/details.py +++ b/src/mmw/apps/bigcz/clients/cuahsi/details.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from datetime import date, timedelta from socket import timeout diff --git a/src/mmw/apps/bigcz/clients/cuahsi/models.py b/src/mmw/apps/bigcz/clients/cuahsi/models.py index 7e304c00e..99d712350 100644 --- a/src/mmw/apps/bigcz/clients/cuahsi/models.py +++ b/src/mmw/apps/bigcz/clients/cuahsi/models.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from apps.bigcz.models import Resource diff --git a/src/mmw/apps/bigcz/clients/cuahsi/search.py b/src/mmw/apps/bigcz/clients/cuahsi/search.py index b2119e998..295d4dc50 100644 --- a/src/mmw/apps/bigcz/clients/cuahsi/search.py +++ b/src/mmw/apps/bigcz/clients/cuahsi/search.py @@ -1,10 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from datetime import date -from urllib2 import URLError +from urllib.error import URLError from socket import timeout from operator import attrgetter, itemgetter @@ -69,7 +65,7 @@ def recursive_asdict(d): From https://gist.github.com/robcowie/a6a56cf5b17a86fdf461 """ out = {} - for k, v in asdict(d).iteritems(): + for k, v in asdict(d).items(): if hasattr(v, '__keylist__'): out[k] = recursive_asdict(v) elif isinstance(v, list): @@ -121,18 +117,14 @@ def parse_details_url(record): if len(parts) == 2: code, id = parts if code == 'NWISDV': - url = 'https://waterdata.usgs.gov/nwis/dv/?site_no={}' - return url.format(id) + return f'https://waterdata.usgs.gov/nwis/dv/?site_no={id}' elif code == 'NWISUV': - url = 'https://waterdata.usgs.gov/nwis/uv/?site_no={}' - return url.format(id) + return f'https://waterdata.usgs.gov/nwis/uv/?site_no={id}' elif code == 'NWISGW': - url = ('https://nwis.waterdata.usgs.gov/' + - 'usa/nwis/gwlevels/?site_no={}') - return url.format(id) + return ('https://nwis.waterdata.usgs.gov/' + f'usa/nwis/gwlevels/?site_no={id}') elif code == 'EnviroDIY': - url = 'http://data.envirodiy.org/sites/{}/' - return url.format(id) + return f'http://data.envirodiy.org/sites/{id}/' return None @@ -205,7 +197,7 @@ def group_series_by_location(series): group.append(record) records = [] - for location, group in groups.iteritems(): + for location, group in groups.items(): records.append({ 'serv_code': group[0]['ServCode'], 'serv_url': group[0]['ServURL'], @@ -234,8 +226,8 @@ def group_series_by_location(series): def make_request(request, expiry, **kwargs): - key = 'bigcz_cuahsi_{}_{}'.format(request.method.name, - hash(frozenset(kwargs.items()))) + key = \ + f'bigcz_cuahsi_{request.method.name}_{hash(frozenset(kwargs.items()))}' cached = cache.get(key) if cached: return cached @@ -244,7 +236,7 @@ def make_request(request, expiry, **kwargs): response = recursive_asdict(request(**kwargs)) cache.set(key, response, timeout=expiry) return response - except URLError, e: + except URLError as e: if isinstance(e.reason, timeout): raise RequestTimedOutError() else: @@ -316,10 +308,9 @@ def search(**kwargs): if bbox_area > settings.BIGCZ_MAX_AREA: raise ValidationError({ - 'error': 'The selected area of interest with a bounding box of {} ' - 'km² is larger than the currently supported maximum size ' - 'of {} km².'.format(round(bbox_area, 2), - settings.BIGCZ_MAX_AREA)}) + 'error': 'The selected area of interest with a bounding box of ' + f'{round(bbox_area, 2)} km² is larger than the currently ' + f'supported maximum size of {settings.BIGCZ_MAX_AREA} km².'}) # NOQA world = BBox(-180, -90, 180, 90) diff --git a/src/mmw/apps/bigcz/clients/cuahsi/serializers.py b/src/mmw/apps/bigcz/clients/cuahsi/serializers.py index 2d30c1893..989d0fdbe 100644 --- a/src/mmw/apps/bigcz/clients/cuahsi/serializers.py +++ b/src/mmw/apps/bigcz/clients/cuahsi/serializers.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from rest_framework.serializers import (CharField, DateTimeField, ListField, diff --git a/src/mmw/apps/bigcz/clients/hydroshare/__init__.py b/src/mmw/apps/bigcz/clients/hydroshare/__init__.py index 8e6d8123f..a12fe5d26 100644 --- a/src/mmw/apps/bigcz/clients/hydroshare/__init__.py +++ b/src/mmw/apps/bigcz/clients/hydroshare/__init__.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from apps.bigcz.clients.hydroshare import models, serializers # Import catalog name and search function, so it can be exported from here diff --git a/src/mmw/apps/bigcz/clients/hydroshare/models.py b/src/mmw/apps/bigcz/clients/hydroshare/models.py index 189691e55..863855dda 100644 --- a/src/mmw/apps/bigcz/clients/hydroshare/models.py +++ b/src/mmw/apps/bigcz/clients/hydroshare/models.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from apps.bigcz.models import Resource diff --git a/src/mmw/apps/bigcz/clients/hydroshare/search.py b/src/mmw/apps/bigcz/clients/hydroshare/search.py index a0c51b0e1..cf19ea809 100644 --- a/src/mmw/apps/bigcz/clients/hydroshare/search.py +++ b/src/mmw/apps/bigcz/clients/hydroshare/search.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from requests import Request, Session, Timeout import dateutil.parser from datetime import datetime @@ -145,7 +141,7 @@ def search(**kwargs): CATALOG_URL, params=params)) - key = 'bigcz_hydroshare_{}'.format(hash(frozenset(params.items()))) + key = f'bigcz_hydroshare_{hash(frozenset(params.items()))}' cached = cache.get(key) if cached: data = cached diff --git a/src/mmw/apps/bigcz/clients/hydroshare/serializers.py b/src/mmw/apps/bigcz/clients/hydroshare/serializers.py index 0869a68c2..43e7442a7 100644 --- a/src/mmw/apps/bigcz/clients/hydroshare/serializers.py +++ b/src/mmw/apps/bigcz/clients/hydroshare/serializers.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from rest_framework.serializers import DateTimeField from apps.bigcz.serializers import ResourceSerializer diff --git a/src/mmw/apps/bigcz/clients/usgswqp/__init__.py b/src/mmw/apps/bigcz/clients/usgswqp/__init__.py index a914c50a7..3df8945eb 100644 --- a/src/mmw/apps/bigcz/clients/usgswqp/__init__.py +++ b/src/mmw/apps/bigcz/clients/usgswqp/__init__.py @@ -1,9 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import (absolute_import, - division, - print_function, - unicode_literals) - from apps.bigcz.clients.usgswqp.models import USGSResource from apps.bigcz.clients.usgswqp.serializers import USGSResourceSerializer diff --git a/src/mmw/apps/bigcz/clients/usgswqp/models.py b/src/mmw/apps/bigcz/clients/usgswqp/models.py index 8b04ffbc5..3b9235b93 100644 --- a/src/mmw/apps/bigcz/clients/usgswqp/models.py +++ b/src/mmw/apps/bigcz/clients/usgswqp/models.py @@ -1,9 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import (absolute_import, - division, - print_function, - unicode_literals) - from apps.bigcz.models import Resource diff --git a/src/mmw/apps/bigcz/clients/usgswqp/search.py b/src/mmw/apps/bigcz/clients/usgswqp/search.py index ddad22b8c..af906ff46 100644 --- a/src/mmw/apps/bigcz/clients/usgswqp/search.py +++ b/src/mmw/apps/bigcz/clients/usgswqp/search.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import (absolute_import, - division, - print_function, - unicode_literals) import requests from datetime import date @@ -64,15 +60,13 @@ def parse_record(record): created_at=None, updated_at=None, geom=geom, - details_url='https://www.waterqualitydata.us/provider/{prov}/{org}/{id}/'.format(prov=record['ProviderName'], # NOQA - org=record['OrganizationIdentifier'], # NOQA - id=record['MonitoringLocationIdentifier']), # NOQA + details_url=f'https://www.waterqualitydata.us/provider/{record["ProviderName"]}/{record["OrganizationIdentifier"]}/{record["MonitoringLocationIdentifier"]}/', # NOQA sample_mediums=None, variables=None, service_org=record['OrganizationIdentifier'], service_orgname=record['OrganizationFormalName'], service_code=record['MonitoringLocationIdentifier'], - service_url='https://www.waterqualitydata.us/data/Result/search?siteid={}&mimeType=csv&sorted=no&zip=yes'.format(record['MonitoringLocationIdentifier']), # NOQA + service_url=f'https://www.waterqualitydata.us/data/Result/search?siteid={record["MonitoringLocationIdentifier"]}&mimeType=csv&sorted=no&zip=yes', # NOQA service_title=None, service_citation='National Water Quality Monitoring Council, [YEAR]. Water Quality Portal. Accessed [DATE ACCESSED]. https://www.waterqualitydata.us/', # NOQA begin_date=None, @@ -96,9 +90,9 @@ def search(**kwargs): if bbox_area > USGS_MAX_SIZE_SQKM: raise ValidationError({ - 'error': 'The selected area of interest with a bounding box of {} ' - 'km² is larger than the currently supported maximum size ' - 'of {} km².'.format(round(bbox_area, 2), USGS_MAX_SIZE_SQKM)}) # NOQA + 'error': 'The selected area of interest with a bounding box of ' + f'{round(bbox_area, 2)} km² is larger than the currently ' + f'supported maximum size of {USGS_MAX_SIZE_SQKM} km².'}) params = { # bBox might be used in the future # 'bBox': '{0:.3f},{1:.3f},{2:.3f},{3:.3f}'.format(bbox.xmin, bbox.ymin, bbox.xmax, bbox.ymax), # NOQA diff --git a/src/mmw/apps/bigcz/clients/usgswqp/serializers.py b/src/mmw/apps/bigcz/clients/usgswqp/serializers.py index e59ccfdc7..b80d9e253 100644 --- a/src/mmw/apps/bigcz/clients/usgswqp/serializers.py +++ b/src/mmw/apps/bigcz/clients/usgswqp/serializers.py @@ -1,9 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import (absolute_import, - division, - print_function, - unicode_literals) - from rest_framework.serializers import (CharField, DateTimeField, ListField, diff --git a/src/mmw/apps/bigcz/models.py b/src/mmw/apps/bigcz/models.py index 26de27188..c21fce227 100644 --- a/src/mmw/apps/bigcz/models.py +++ b/src/mmw/apps/bigcz/models.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from django.contrib.gis.geos import Polygon @@ -47,6 +43,6 @@ def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) - polygon.set_srid(4326) + polygon.srid = 4326 return polygon.transform(5070, clone=True).area diff --git a/src/mmw/apps/bigcz/serializers.py b/src/mmw/apps/bigcz/serializers.py index f6dcc880c..921219903 100644 --- a/src/mmw/apps/bigcz/serializers.py +++ b/src/mmw/apps/bigcz/serializers.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from math import ceil from rest_framework.serializers import \ diff --git a/src/mmw/apps/bigcz/urls.py b/src/mmw/apps/bigcz/urls.py index 5b2f8de23..f7f758b39 100644 --- a/src/mmw/apps/bigcz/urls.py +++ b/src/mmw/apps/bigcz/urls.py @@ -1,15 +1,11 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - -from django.conf.urls import url +from django.urls import re_path from apps.bigcz import views app_name = 'bigcz' urlpatterns = [ - url(r'^search$', views.search, name='search'), - url(r'^details$', views.details, name='details'), - url(r'^values$', views.values, name='values'), + re_path(r'^search$', views.search, name='search'), + re_path(r'^details$', views.details, name='details'), + re_path(r'^values$', views.values, name='values'), ] diff --git a/src/mmw/apps/bigcz/utils.py b/src/mmw/apps/bigcz/utils.py index 080ba30bf..33f6d4487 100644 --- a/src/mmw/apps/bigcz/utils.py +++ b/src/mmw/apps/bigcz/utils.py @@ -1,9 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - import csv +from io import TextIOWrapper from apps.bigcz.models import BBox @@ -65,9 +62,8 @@ class RequestTimedOutError(APIException): class ValuesTimedOutError(APIException): status_code = status.HTTP_504_GATEWAY_TIMEOUT - default_detail = \ - 'Request for values did not finish in {} seconds'.format( - settings.BIGCZ_CLIENT_TIMEOUT) + default_detail = ('Request for values did not finish in ' + f'{settings.BIGCZ_CLIENT_TIMEOUT} seconds') class ServiceNotAvailableError(ValidationError): @@ -75,10 +71,11 @@ class ServiceNotAvailableError(ValidationError): default_detail = 'Underlying service is not available.' -def read_unicode_csv(utf8_data, **kwargs): +def read_unicode_csv(file_in_zip, **kwargs): + utf8_data = TextIOWrapper(file_in_zip, encoding='utf-8') csv_reader = csv.DictReader(utf8_data, **kwargs) for row in csv_reader: yield { - key.decode('utf-8'): value.decode('utf-8') - for key, value in row.iteritems() + key: value + for key, value in row.items() } diff --git a/src/mmw/apps/bigcz/views.py b/src/mmw/apps/bigcz/views.py index c04636ffb..9d348979a 100644 --- a/src/mmw/apps/bigcz/views.py +++ b/src/mmw/apps/bigcz/views.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - import json from django.contrib.gis.geos import GEOSGeometry @@ -48,8 +44,7 @@ def _do_search(request): if catalog not in CATALOGS: raise ValidationError({ - 'error': 'Catalog must be one of: {}' - .format(', '.join(CATALOGS.keys()))}) + 'error': f'Catalog must be one of: {", ".join(CATALOGS.keys())}'}) # Store geojson to pass in search kwargs geojson = json.dumps(params.get('geom')) @@ -100,15 +95,13 @@ def _get_details(request): if catalog not in CATALOGS: raise ValidationError({ - 'error': 'Catalog must be one of: {}' - .format(', '.join(CATALOGS.keys()))}) + 'error': f'Catalog must be one of: {", ".join(CATALOGS.keys())}'}) details = CATALOGS[catalog]['details'] if not details: raise NotFound({ - 'error': 'No details endpoint for {}' - .format(catalog)}) + 'error': f'No details endpoint for {catalog}'}) details_kwargs = { 'wsdl': params.get('wsdl'), @@ -131,15 +124,13 @@ def _get_values(request): if catalog not in CATALOGS: raise ValidationError({ - 'error': 'Catalog must be one of: {}' - .format(', '.join(CATALOGS.keys()))}) + 'error': f'Catalog must be one of: {", ".join(CATALOGS.keys())}'}) values = CATALOGS[catalog]['values'] if not values: raise NotFound({ - 'error': 'No values endpoint for {}' - .format(catalog)}) + 'error': f'No values endpoint for {catalog}'}) values_kwargs = { 'wsdl': params.get('wsdl'), diff --git a/src/mmw/apps/core/admin.py b/src/mmw/apps/core/admin.py index 1405eb536..d5652ba94 100644 --- a/src/mmw/apps/core/admin.py +++ b/src/mmw/apps/core/admin.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from django.contrib import admin from apps.core.models import Job diff --git a/src/mmw/apps/core/decorators.py b/src/mmw/apps/core/decorators.py index ee0bdfd47..e1867ee95 100644 --- a/src/mmw/apps/core/decorators.py +++ b/src/mmw/apps/core/decorators.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals - import sys import rollbar @@ -65,7 +62,7 @@ def _get_remote_addr(request): ipaddr = request.META.get("HTTP_X_FORWARDED_FOR", None) if ipaddr: # X_FORWARDED_FOR returns client1, proxy1, proxy2,... - return [x.strip() for x in ipaddr.split(",")][0] + return [x.strip() for x in ipaddr.split(",")][0] else: return request.META.get("REMOTE_ADDR", "") diff --git a/src/mmw/apps/core/mail/backends/boto_ses_mailer.py b/src/mmw/apps/core/mail/backends/boto_ses_mailer.py index 7d781f3cb..eb628d107 100644 --- a/src/mmw/apps/core/mail/backends/boto_ses_mailer.py +++ b/src/mmw/apps/core/mail/backends/boto_ses_mailer.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - import json import logging import sys @@ -35,7 +31,7 @@ def send_messages(self, email_messages): if self.check_quota: remaining_quota = self.mailer.get_remaining_message_quota() else: - remaining_quota = sys.maxint + remaining_quota = sys.maxsize if len(email_messages) <= remaining_quota: for email_message in email_messages: diff --git a/src/mmw/apps/core/migrations/0001_initial.py b/src/mmw/apps/core/migrations/0001_initial.py index 2180dd087..dac56e73f 100644 --- a/src/mmw/apps/core/migrations/0001_initial.py +++ b/src/mmw/apps/core/migrations/0001_initial.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals - from django.db import models, migrations from django.conf import settings import django.db.models.deletion diff --git a/src/mmw/apps/core/migrations/0002_requestlog.py b/src/mmw/apps/core/migrations/0002_requestlog.py index 8ff64f41b..61df2a60c 100644 --- a/src/mmw/apps/core/migrations/0002_requestlog.py +++ b/src/mmw/apps/core/migrations/0002_requestlog.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals - from django.db import migrations, models import django.db.models.deletion from django.conf import settings diff --git a/src/mmw/apps/core/migrations/0003_requestlog_api_referrer.py b/src/mmw/apps/core/migrations/0003_requestlog_api_referrer.py index b10fb4bb5..384b49dad 100644 --- a/src/mmw/apps/core/migrations/0003_requestlog_api_referrer.py +++ b/src/mmw/apps/core/migrations/0003_requestlog_api_referrer.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals - from django.db import migrations, models diff --git a/src/mmw/apps/core/migrations/0004_job_uuid_unique_constraint.py b/src/mmw/apps/core/migrations/0004_job_uuid_unique_constraint.py index bd7f487b0..d63017fcf 100644 --- a/src/mmw/apps/core/migrations/0004_job_uuid_unique_constraint.py +++ b/src/mmw/apps/core/migrations/0004_job_uuid_unique_constraint.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals - from django.db import migrations, models diff --git a/src/mmw/apps/core/migrations/0005_job_on_delete.py b/src/mmw/apps/core/migrations/0005_job_on_delete.py index 893d2087d..464d96c58 100644 --- a/src/mmw/apps/core/migrations/0005_job_on_delete.py +++ b/src/mmw/apps/core/migrations/0005_job_on_delete.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-07-16 16:54 -from __future__ import unicode_literals - from django.conf import settings from django.db import migrations, models import django.db.models.deletion diff --git a/src/mmw/apps/core/models.py b/src/mmw/apps/core/models.py index 7474440b5..6da97b5e5 100644 --- a/src/mmw/apps/core/models.py +++ b/src/mmw/apps/core/models.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals - from django.db import models from django.conf import settings @@ -25,7 +22,7 @@ class Job(models.Model): status = models.CharField(max_length=255) def __unicode__(self): - return unicode(self.uuid) + return str(self.uuid) class RequestLog(models.Model): diff --git a/src/mmw/apps/core/tasks.py b/src/mmw/apps/core/tasks.py index fe72d4971..7532a1492 100644 --- a/src/mmw/apps/core/tasks.py +++ b/src/mmw/apps/core/tasks.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import absolute_import - from django.utils.timezone import now from celery import shared_task from apps.core.models import Job @@ -36,9 +32,9 @@ def save_job_error(request, exc, traceback, job_id): job.status = 'failed' job.save() except Exception as e: - logger.error('Failed to save job error status. Job will appear hung. \ - Job Id: {0}'.format(job.id)) - logger.error('Error: {}'.format(e)) + logger.error('Failed to save job error status. Job will appear hung.' + f'Job Id: {job.id}') + logger.error(f'Error: {e}') @shared_task(bind=True) diff --git a/src/mmw/apps/core/templates/base.html b/src/mmw/apps/core/templates/base.html index 29a0e8dd2..dc41eb903 100644 --- a/src/mmw/apps/core/templates/base.html +++ b/src/mmw/apps/core/templates/base.html @@ -1,5 +1,5 @@ {% include 'head.html' %} -{% load staticfiles %} +{% load static %}
{% block header %} diff --git a/src/mmw/apps/core/templates/head.html b/src/mmw/apps/core/templates/head.html index 60590bd5a..4f0f309ac 100644 --- a/src/mmw/apps/core/templates/head.html +++ b/src/mmw/apps/core/templates/head.html @@ -1,4 +1,4 @@ -{% load staticfiles %} +{% load static %} diff --git a/src/mmw/apps/core/tests.py b/src/mmw/apps/core/tests.py index 7734e6a24..23e1480d1 100644 --- a/src/mmw/apps/core/tests.py +++ b/src/mmw/apps/core/tests.py @@ -1,6 +1,3 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division # Create your tests here. diff --git a/src/mmw/apps/export/hydroshare.py b/src/mmw/apps/export/hydroshare.py index 26b1704ea..b6fa9708f 100644 --- a/src/mmw/apps/export/hydroshare.py +++ b/src/mmw/apps/export/hydroshare.py @@ -1,14 +1,10 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - import json -from cStringIO import StringIO +from io import StringIO from zipfile import ZipFile from rauth import OAuth2Service -from urlparse import urljoin, urlparse +from urllib.parse import urljoin, urlparse from hs_restclient import HydroShare, HydroShareAuthOAuth2, HydroShareNotFound from django.conf import settings @@ -59,7 +55,7 @@ def renew_access_token(self, user_id): if 'error' in res: raise RuntimeError(res['error']) - for key, value in res.iteritems(): + for key, value in res.items(): setattr(token, key, value) token.save() diff --git a/src/mmw/apps/export/migrations/0001_hydroshareresource.py b/src/mmw/apps/export/migrations/0001_hydroshareresource.py index c1290911a..0c9474495 100644 --- a/src/mmw/apps/export/migrations/0001_hydroshareresource.py +++ b/src/mmw/apps/export/migrations/0001_hydroshareresource.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals - from django.db import migrations, models import django.db.models.deletion diff --git a/src/mmw/apps/export/migrations/0002_hydroshare_disable_autosync.py b/src/mmw/apps/export/migrations/0002_hydroshare_disable_autosync.py index aeefd1a89..a8618ddba 100644 --- a/src/mmw/apps/export/migrations/0002_hydroshare_disable_autosync.py +++ b/src/mmw/apps/export/migrations/0002_hydroshare_disable_autosync.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals - from django.db import migrations, models diff --git a/src/mmw/apps/export/models.py b/src/mmw/apps/export/models.py index 32405fe52..a1857e8e3 100644 --- a/src/mmw/apps/export/models.py +++ b/src/mmw/apps/export/models.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from django.conf import settings from django.contrib.gis.db import models @@ -33,9 +29,9 @@ class HydroShareResource(models.Model): auto_now=True) def _url(self): - return '{}resource/{}'.format(HYDROSHARE_BASE_URL, self.resource) + return f'{HYDROSHARE_BASE_URL}resource/{self.resource}' url = property(_url) def __unicode__(self): - return '{} <{}>'.format(self.title, self.url) + return f'{self.title} <{self.url}>' diff --git a/src/mmw/apps/export/serializers.py b/src/mmw/apps/export/serializers.py index 07d932a4a..e9baf9cdd 100644 --- a/src/mmw/apps/export/serializers.py +++ b/src/mmw/apps/export/serializers.py @@ -1,11 +1,7 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - from rest_framework import serializers -from models import HydroShareResource +from apps.export.models import HydroShareResource class HydroShareResourceSerializer(serializers.ModelSerializer): diff --git a/src/mmw/apps/export/tasks.py b/src/mmw/apps/export/tasks.py index 4079d9ffd..edf13224e 100644 --- a/src/mmw/apps/export/tasks.py +++ b/src/mmw/apps/export/tasks.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - import fiona import io import json @@ -17,9 +13,9 @@ from apps.modeling.models import Project, Scenario from apps.modeling.tasks import to_gms_file -from hydroshare import HydroShareService -from models import HydroShareResource -from serializers import HydroShareResourceSerializer +from apps.export.hydroshare import HydroShareService +from apps.export.models import HydroShareResource +from apps.export.serializers import HydroShareResourceSerializer hss = HydroShareService() @@ -86,16 +82,15 @@ def create_resource(user_id, project_id, params): crs = {'no_defs': True, 'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84'} schema = {'geometry': aoi_json['type'], 'properties': {}} - with fiona.open('/tmp/{}.shp'.format(resource), 'w', + with fiona.open(f'/tmp/{resource}.shp', 'w', driver='ESRI Shapefile', crs=crs, schema=schema) as shapefile: shapefile.write({'geometry': aoi_json, 'properties': {}}) for ext in SHAPEFILE_EXTENSIONS: - filename = '/tmp/{}.{}'.format(resource, ext) + filename = f'/tmp/{resource}.{ext}' with open(filename) as shapefile: - hs.addResourceFile(resource, shapefile, - 'area-of-interest.{}'.format(ext)) + hs.addResourceFile(resource, shapefile, f'area-of-interest.{ext}') os.remove(filename) # MapShed BMP Spreadsheet Tool @@ -167,7 +162,7 @@ def padep_worksheet(results): """ payload = [] - for k, v in results.iteritems(): + for k, v in results.items(): huc12_stream_length_km = sum( [c['lengthkm'] for c in v['huc12']['streams']['categories']]) huc12_stream_ag_pct = \ diff --git a/src/mmw/apps/export/urls.py b/src/mmw/apps/export/urls.py index 6ada634d4..af826683b 100644 --- a/src/mmw/apps/export/urls.py +++ b/src/mmw/apps/export/urls.py @@ -1,9 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - -from django.conf.urls import url +from django.urls import re_path from apps.modeling.views import get_job from apps.modeling.urls import uuid_regex @@ -12,8 +8,8 @@ app_name = 'export' urlpatterns = [ - url(r'^hydroshare/?$', hydroshare, name='hydroshare'), - url(r'^shapefile/?$', shapefile, name='shapefile'), - url(r'^worksheet/?$', worksheet, name='worksheet'), - url(r'jobs/' + uuid_regex, get_job, name='get_job'), + re_path(r'^hydroshare/?$', hydroshare, name='hydroshare'), + re_path(r'^shapefile/?$', shapefile, name='shapefile'), + re_path(r'^worksheet/?$', worksheet, name='worksheet'), + re_path(r'jobs/' + uuid_regex, get_job, name='get_job'), ] diff --git a/src/mmw/apps/export/views.py b/src/mmw/apps/export/views.py index 48d3fc7a8..449bc23a0 100644 --- a/src/mmw/apps/export/views.py +++ b/src/mmw/apps/export/views.py @@ -1,18 +1,15 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - import BMPxlsx import fiona import glob import json import os import shutil -import StringIO import tempfile import zipfile +from io import BytesIO + from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.http import HttpResponse @@ -26,10 +23,10 @@ from apps.modeling.serializers import AoiSerializer from apps.geoprocessing_api.views import start_celery_job -from hydroshare import HydroShareService -from models import HydroShareResource -from serializers import HydroShareResourceSerializer -from tasks import create_resource, update_resource, padep_worksheet +from apps.export.hydroshare import HydroShareService +from apps.export.models import HydroShareResource +from apps.export.serializers import HydroShareResourceSerializer +from apps.export.tasks import create_resource, update_resource, padep_worksheet hss = HydroShareService() HYDROSHARE_BASE_URL = settings.HYDROSHARE['base_url'] @@ -141,16 +138,16 @@ def shapefile(request): try: # Write shapefiles - with fiona.open('{}/area-of-interest.shp'.format(tempdir), 'w', + with fiona.open(f'{tempdir}/area-of-interest.shp', 'w', driver='ESRI Shapefile', crs=crs, schema=schema) as sf: sf.write({'geometry': aoi_json, 'properties': {}}) - shapefiles = ['{}/area-of-interest.{}'.format(tempdir, ext) + shapefiles = [f'{tempdir}/area-of-interest.{ext}' for ext in SHAPEFILE_EXTENSIONS] # Create a zip file in memory from all the shapefiles - stream = StringIO.StringIO() + stream = BytesIO() with zipfile.ZipFile(stream, 'w') as zf: for fpath in shapefiles: _, fname = os.path.split(fpath) @@ -163,8 +160,7 @@ def shapefile(request): # Return the zip file from memory with appropriate headers resp = HttpResponse(stream.getvalue(), content_type='application/zip') - resp['Content-Disposition'] = 'attachment; '\ - 'filename="{}.zip"'.format(filename) + resp['Content-Disposition'] = f'attachment; filename="{filename}.zip"' return resp @@ -182,7 +178,7 @@ def worksheet(request): try: for item in items: - worksheet_path = '{}/{}.xlsx'.format(tempdir, item['name']) + worksheet_path = f'{tempdir}/{item["name"]}.xlsx' # Copy the Excel template shutil.copyfile(EXCEL_TEMPLATE, worksheet_path) @@ -194,16 +190,15 @@ def worksheet(request): # If geojson specified, write it to file if 'geojson' in item: - geojson_path = '{}/{}__Urban_Area.geojson'.format(tempdir, - item['name']) + geojson_path = f'{tempdir}/{item["name"]}__Urban_Area.geojson' with open(geojson_path, 'w') as geojson_file: json.dump(item['geojson'], geojson_file) - files = glob.glob('{}/*.*'.format(tempdir)) + files = glob.glob(f'{tempdir}/*.*') # Create a zip file in memory for all the files - stream = StringIO.StringIO() + stream = BytesIO() with zipfile.ZipFile(stream, 'w') as zf: for fpath in files: _, fname = os.path.split(fpath) diff --git a/src/mmw/apps/geocode/tests.py b/src/mmw/apps/geocode/tests.py index 98e133d26..c0a6928bc 100644 --- a/src/mmw/apps/geocode/tests.py +++ b/src/mmw/apps/geocode/tests.py @@ -1,10 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - -# NOTE Change to from urllib.parse import urlencode for Python 3 -from urllib import urlencode +from urllib.parse import urlencode from django.test import TestCase, Client from django.urls import reverse @@ -15,7 +10,7 @@ class GeocodeTestCase(TestCase): def assert_candidate_exists_for(self, address): c = Client() - url = '{}?{}'.format(self.SEARCH_URL, urlencode({'search': address})) + url = f'{self.SEARCH_URL}?{urlencode({"search": address})}' response = c.get(url).json() self.assertTrue(len(response) > 0, 'Expected ' diff --git a/src/mmw/apps/geocode/urls.py b/src/mmw/apps/geocode/urls.py index 9c4a126ac..553ea08f8 100644 --- a/src/mmw/apps/geocode/urls.py +++ b/src/mmw/apps/geocode/urls.py @@ -1,13 +1,9 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - -from django.conf.urls import url +from django.urls import re_path from apps.geocode import views app_name = 'geocode' urlpatterns = [ - url(r'^$', views.geocode, name='geocode'), + re_path(r'^$', views.geocode, name='geocode'), ] diff --git a/src/mmw/apps/geocode/views.py b/src/mmw/apps/geocode/views.py index 4b7d705d8..5f4030473 100644 --- a/src/mmw/apps/geocode/views.py +++ b/src/mmw/apps/geocode/views.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals - from django.conf import settings from rest_framework.response import Response diff --git a/src/mmw/apps/geoprocessing_api/calcs.py b/src/mmw/apps/geoprocessing_api/calcs.py index 79d97e2a6..ca4003718 100644 --- a/src/mmw/apps/geoprocessing_api/calcs.py +++ b/src/mmw/apps/geoprocessing_api/calcs.py @@ -1,8 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import absolute_import - import json import requests from operator import itemgetter @@ -39,7 +35,7 @@ def animal_population(geojson): aeu_for_geom = animal_energy_units(geom)[2] aeu_return_values = [] - for animal, aeu_value in aeu_for_geom.iteritems(): + for animal, aeu_value in aeu_for_geom.items(): aeu_return_values.append({ 'type': ANIMAL_DISPLAY_NAMES[animal], 'aeu': int(aeu_value), @@ -52,9 +48,9 @@ def animal_population(geojson): } -def stream_data(results, geojson): +def stream_data(results, geojson, datasource='nhdhr'): """ - Given a GeoJSON shape, retreive stream data from the `nhdflowline` table + Given a GeoJSON shape, retreive stream data from the specified table to display in the Analyze tab Returns a dictionary to append to outgoing JSON for analysis results. @@ -62,17 +58,31 @@ def stream_data(results, geojson): NULL_SLOPE = -9998.0 - sql = ''' - SELECT sum(lengthkm) as lengthkm, + if datasource not in settings.STREAM_TABLES: + raise Exception(f'Invalid stream datasource {datasource}') + + sql = f''' + WITH stream_intersection AS ( + SELECT ST_Length(ST_Transform( + ST_Intersection(geom, + ST_SetSRID(ST_GeomFromGeoJSON(%s), + 4326)), + 5070)) AS lengthm, + stream_order, + slope + FROM {settings.STREAM_TABLES[datasource]} + WHERE ST_Intersects(geom, + ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326))) + + SELECT SUM(lengthm) / 1000 AS lengthkm, stream_order, - sum(lengthkm * NULLIF(slope, {NULL_SLOPE})) as slopesum - FROM nhdflowline - WHERE ST_Intersects(geom, ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326)) + SUM(lengthm * NULLIF(slope, {NULL_SLOPE})) / 1000 AS slopesum + FROM stream_intersection GROUP BY stream_order; - '''.format(NULL_SLOPE=NULL_SLOPE) + ''' with connection.cursor() as cursor: - cursor.execute(sql, [geojson]) + cursor.execute(sql, [geojson, geojson]) if cursor.rowcount: columns = [col[0] for col in cursor.description] @@ -115,7 +125,7 @@ def calculate_avg_slope(slope, length): return { 'displayName': 'Streams', - 'name': 'streams', + 'name': f'streams_{datasource}', 'categories': sorted(list(stream_data.values()), key=itemgetter('order')), } @@ -133,13 +143,12 @@ def point_source_pollution(geojson): geom = GEOSGeometry(geojson, srid=4326) drb = geom.within(DRB) table_name = get_point_source_table(drb) - sql = ''' + sql = f''' SELECT city, state, npdes_id, mgd, kgn_yr, kgp_yr, latitude, - longitude, {facilityname} + longitude, {'facilityname' if drb else 'null'} FROM {table_name} WHERE ST_Intersects(geom, ST_SetSRID(ST_GeomFromText(%s), 4326)) - '''.format(facilityname='facilityname' if drb else 'null', - table_name=table_name) + ''' with connection.cursor() as cursor: cursor.execute(sql, [geom.wkt]) @@ -178,7 +187,7 @@ def catchment_water_quality(geojson): """ geom = GEOSGeometry(geojson, srid=4326) table_name = 'drb_catchment_water_quality' - sql = ''' + sql = f''' SELECT nord, areaha, tn_tot_kgy, tp_tot_kgy, tss_tot_kg, tn_urban_k, tn_riparia, tn_ag_kgyr, tn_natural, tn_pt_kgyr, tp_urban_k, tp_riparia, tp_ag_kgyr, tp_natural, tp_pt_kgyr, @@ -187,7 +196,7 @@ def catchment_water_quality(geojson): ST_AsGeoJSON(ST_Simplify(geom, 0.0003)) as geom FROM {table_name} WHERE ST_Intersects(geom, ST_SetSRID(ST_GeomFromText(%s), 4326)) - '''.format(table_name=table_name) + ''' with connection.cursor() as cursor: cursor.execute(sql, [geom.wkt]) @@ -318,16 +327,20 @@ def huc12s_with_aois(geojson): return matches -def streams_for_huc12s(huc12s, drb=False): +def streams_for_huc12s(huc12s, datasource='nhdhr'): """ Get MultiLineString of all streams in the given HUC-12s """ - sql = ''' - SELECT ST_AsGeoJSON(ST_Collect(ST_Force2D(s.geom))) - FROM {datasource} s INNER JOIN boundary_huc12 b + if datasource not in settings.STREAM_TABLES: + raise Exception(f'Invalid stream datasource {datasource}') + + sql = f''' + SELECT ST_AsGeoJSON(ST_Multi(s.geom)) + FROM {settings.STREAM_TABLES[datasource]} s + INNER JOIN boundary_huc12 b ON ST_Intersects(s.geom, b.geom_detailed) WHERE b.huc12 IN %s - '''.format(datasource='drb_streams_50' if drb else 'nhdflowline') + ''' with connection.cursor() as cursor: cursor.execute(sql, [tuple(huc12s)]) @@ -344,6 +357,6 @@ def drexel_fast_zonal(geojson, key): res.raise_for_status() # Select results for the given key - result = {int(k): v for k, v in res.json()[key].iteritems()} + result = {int(k): v for k, v in res.json()[key].items()} return result diff --git a/src/mmw/apps/geoprocessing_api/permissions.py b/src/mmw/apps/geoprocessing_api/permissions.py index aafbd768a..111b014ee 100644 --- a/src/mmw/apps/geoprocessing_api/permissions.py +++ b/src/mmw/apps/geoprocessing_api/permissions.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals - from rest_framework import authentication from rest_framework.authtoken.serializers import AuthTokenSerializer diff --git a/src/mmw/apps/geoprocessing_api/schemas.py b/src/mmw/apps/geoprocessing_api/schemas.py index c04bb597d..993e9eef8 100644 --- a/src/mmw/apps/geoprocessing_api/schemas.py +++ b/src/mmw/apps/geoprocessing_api/schemas.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals - from drf_yasg.openapi import ( Parameter, Schema, IN_PATH, IN_QUERY, @@ -11,6 +8,35 @@ from django.conf import settings +STREAM_DATASOURCE = Parameter( + 'datasource', + IN_PATH, + description='The stream datasource to query.' + ' Must be one of: "{}"'.format( + '", "'.join(settings.STREAM_TABLES.keys())), + type=TYPE_STRING, + required=True, +) + +nlcd_year_allowed_values = [ + '2019_2019', + '2019_2016', + '2019_2011', + '2019_2006', + '2019_2001', + '2011_2011', +] +NLCD_YEAR = Parameter( + 'nlcd_year', + IN_PATH, + description='The NLCD product version and target year to query.' + ' Must be one of: "{}"'.format( + '", "'.join(nlcd_year_allowed_values) + ), + type=TYPE_STRING, + required=True, +) + DRB_2100_LAND_KEY = Parameter( 'key', IN_PATH, diff --git a/src/mmw/apps/geoprocessing_api/tasks.py b/src/mmw/apps/geoprocessing_api/tasks.py index 3c94ea557..eda50be03 100644 --- a/src/mmw/apps/geoprocessing_api/tasks.py +++ b/src/mmw/apps/geoprocessing_api/tasks.py @@ -1,11 +1,7 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import absolute_import - import os import logging -import urllib +from urllib.parse import urlencode from ast import literal_eval as make_tuple from calendar import month_name @@ -74,7 +70,7 @@ def start_rwd_job(location, snapping, simplify, data_source): if simplify is not False: params['simplify'] = simplify - query_string = urllib.urlencode(params) + query_string = urlencode(params) if query_string: rwd_url += ('?%s' % query_string) @@ -89,12 +85,12 @@ def start_rwd_job(location, snapping, simplify, data_source): @shared_task -def analyze_streams(results, area_of_interest): +def analyze_streams(results, area_of_interest, datasource='nhdhr'): """ Given geoprocessing results with stream data and an area of interest, returns the streams and stream order within it. """ - return {'survey': stream_data(results, area_of_interest)} + return {'survey': stream_data(results, area_of_interest, datasource)} @shared_task @@ -123,9 +119,9 @@ def analyze_catchment_water_quality(area_of_interest): @shared_task(throws=Exception) -def analyze_nlcd(result, area_of_interest=None): +def analyze_nlcd(result, area_of_interest=None, nlcd_year='2011_2011'): if 'error' in result: - raise Exception('[analyze_nlcd] {}'.format(result['error'])) + raise Exception(f'[analyze_nlcd_{nlcd_year}] {result["error"]}') pixel_width = aoi_resolution(area_of_interest) if area_of_interest else 1 @@ -139,7 +135,7 @@ def area(dictionary, key, default=0): return dictionary.get(key, default) * pixel_width * pixel_width # Convert results to histogram, calculate total - for key, count in result.iteritems(): + for key, count in result.items(): nlcd, ara = key total_count += count total_ara += count if ara == 1 else 0 @@ -147,7 +143,7 @@ def area(dictionary, key, default=0): has_ara = total_ara > 0 - for nlcd, (code, name) in layer_classmaps.NLCD.iteritems(): + for nlcd, (code, name) in layer_classmaps.NLCD.items(): categories.append({ 'area': area(histogram, nlcd), 'active_river_area': area(result, (nlcd, 1)) if has_ara else None, @@ -159,8 +155,9 @@ def area(dictionary, key, default=0): return { 'survey': { - 'name': 'land', - 'displayName': 'Land', + 'name': f'land_{nlcd_year}', + 'displayName': + f'Land Use/Cover {nlcd_year[5:]} (NLCD{nlcd_year[2:4]})', 'categories': categories, } } @@ -169,7 +166,7 @@ def area(dictionary, key, default=0): @shared_task(throws=Exception) def analyze_soil(result, area_of_interest=None): if 'error' in result: - raise Exception('[analyze_soil] {}'.format(result['error'])) + raise Exception(f'[analyze_soil] {result["error"]}') pixel_width = aoi_resolution(area_of_interest) if area_of_interest else 1 @@ -178,13 +175,13 @@ def analyze_soil(result, area_of_interest=None): categories = [] # Convert results to histogram, calculate total - for key, count in result.iteritems(): + for key, count in result.items(): total_count += count s = make_tuple(key[4:]) # Change {"List(1)":5} to {1:5} s = s if s != settings.NODATA else 3 # Map NODATA to 3 histogram[s] = count + histogram.get(s, 0) - for soil, (code, name) in layer_classmaps.SOIL.iteritems(): + for soil, (code, name) in layer_classmaps.SOIL.items(): categories.append({ 'area': histogram.get(soil, 0) * pixel_width * pixel_width, 'code': code, @@ -217,7 +214,7 @@ def analyze_climate(result, wkaoi): used for sorting purposes on the client side. """ if 'error' in result: - raise Exception('[analyze_climate] {}'.format(result['error'])) + raise Exception(f'[analyze_climate] {result["error"]}') ppt = {k[5:]: v['List(0)'] for k, v in result[wkaoi].items() if 'ppt' in k} @@ -229,7 +226,7 @@ def analyze_climate(result, wkaoi): 'month': month_name[i], 'ppt': ppt[str(i)] * CM_PER_MM, 'tmean': tmean[str(i)], - } for i in xrange(1, 13)] + } for i in range(1, 13)] return { 'survey': { @@ -282,7 +279,7 @@ def analyze_terrain(result): which has Elevation in m and keeps Slope in %. """ if 'error' in result: - raise Exception('[analyze_terrain] {}'.format(result['error'])) + raise Exception(f'[analyze_terrain] {result["error"]}') [elevation, slope] = result @@ -313,7 +310,7 @@ def cm_to_m(x): @shared_task def analyze_protected_lands(result, area_of_interest=None): if 'error' in result: - raise Exception('[analyze_protected_lands] {}'.format(result['error'])) + raise Exception(f'[analyze_protected_lands] {result["error"]}') pixel_width = aoi_resolution(area_of_interest) if area_of_interest else 1 @@ -322,11 +319,11 @@ def analyze_protected_lands(result, area_of_interest=None): total_count = 0 categories = [] - for key, count in result.iteritems(): + for key, count in result.items(): total_count += count histogram[key] = count + histogram.get(key, 0) - for class_id, (code, name) in layer_classmaps.PROTECTED_LANDS.iteritems(): + for class_id, (code, name) in layer_classmaps.PROTECTED_LANDS.items(): categories.append({ 'area': histogram.get(class_id, 0) * pixel_width * pixel_width, 'class_id': class_id, @@ -351,11 +348,11 @@ def analyze_drb_2100_land(area_of_interest, key): total_count = 0 categories = [] - for nlcd, count in result.iteritems(): + for nlcd, count in result.items(): total_count += count histogram[nlcd] = count + histogram.get(nlcd, 0) - for nlcd, (code, name) in layer_classmaps.NLCD.iteritems(): + for nlcd, (code, name) in layer_classmaps.NLCD.items(): categories.append({ 'area': histogram.get(nlcd, 0), 'code': code, @@ -366,8 +363,8 @@ def analyze_drb_2100_land(area_of_interest, key): return { 'survey': { - 'name': 'drb_2100_land_{}'.format(key), - 'displayName': 'DRB 2100 land forecast ({})'.format(key), + 'name': f'drb_2100_land_{key}', + 'displayName': f'DRB 2100 land forecast ({key})', 'categories': categories, } } @@ -384,7 +381,7 @@ def collect_nlcd(histogram, geojson=None): 'code': code, 'nlcd': nlcd, 'type': name, - } for nlcd, (code, name) in layer_classmaps.NLCD.iteritems()] + } for nlcd, (code, name) in layer_classmaps.NLCD.items()] return {'categories': categories} @@ -398,8 +395,7 @@ def collect_worksheet_aois(result, shapes): their processed results. """ if 'error' in result: - raise Exception('[collect_worksheet_aois] {}' - .format(result['error'])) + raise Exception(f'[collect_worksheet_aois] {result["error"]}') NULL_RESULT = {'nlcd_streams': {}, 'nlcd': {}} collection = {} @@ -423,8 +419,7 @@ def collect_worksheet_wkaois(result, shapes): modeled results, and also the processed NLCD and NLCD+Streams. """ if 'error' in result: - raise Exception('[collect_worksheet_wkaois] {}' - .format(result['error'])) + raise Exception(f'[collect_worksheet_wkaois] {result["error"]}') collection = {} @@ -461,12 +456,12 @@ def collect_worksheet(area_of_interest): worksheet containing these values, which can be used for further modeling. """ def to_aoi_id(m): - return '{}-{}'.format(NOCACHE, m['wkaoi']) + return f'{NOCACHE}-{m["wkaoi"]}' matches = huc12s_with_aois(area_of_interest) huc12_ids = [m['huc12'] for m in matches] - streams = streams_for_huc12s(huc12_ids)[0] + streams = streams_for_huc12s(huc12_ids) aoi_shapes = [{ 'id': to_aoi_id(m), @@ -487,7 +482,7 @@ def to_aoi_id(m): collection = {} for m in matches: - filename = '{}__{}'.format(m['huc12'], m['name'].replace(' ', '_')) + filename = f'{m["huc12"]}__{m["name"].replace(" ", "_")}' collection[filename] = { 'name': m['name'], 'aoi': aoi_results.get(to_aoi_id(m), {}), diff --git a/src/mmw/apps/geoprocessing_api/tests.py b/src/mmw/apps/geoprocessing_api/tests.py index c311bc4c4..4f46d677c 100644 --- a/src/mmw/apps/geoprocessing_api/tests.py +++ b/src/mmw/apps/geoprocessing_api/tests.py @@ -1,10 +1,8 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - import json +from unittest import skip + from django.test import (Client, TestCase, LiveServerTestCase) @@ -157,38 +155,10 @@ def test_survey_land_only(self): expected = { "survey": { - "displayName": "Land", - "name": "land", + "displayName": "Land Use/Cover 2011 (NLCD11)", + "name": "land_2011_2011", "categories": [ { - "code": "mixed_forest", - "active_river_area": None, - "area": 35, - "nlcd": 43, - "coverage": 4.2627666817284424e-05, - "type": "Mixed Forest" - }, { - "code": "grassland", - "active_river_area": None, - "area": 3228, - "nlcd": 71, - "coverage": 0.00393148881389126, - "type": "Grassland/Herbaceous" - }, { - "code": "deciduous_forest", - "active_river_area": None, - "area": 0, - "nlcd": 41, - "coverage": 0.0, - "type": "Deciduous Forest" - }, { - "code": "evergreen_forest", - "active_river_area": None, - "area": 5758, - "nlcd": 42, - "coverage": 0.007012860158112106, - "type": "Evergreen Forest" - }, { "code": "open_water", "active_river_area": None, "area": 279, @@ -202,27 +172,6 @@ def test_survey_land_only(self): "nlcd": 12, "coverage": 0.0, "type": "Perennial Ice/Snow" - }, { - "code": "pasture", - "active_river_area": None, - "area": 57, - "nlcd": 81, - "coverage": 6.942220024529177e-05, - "type": "Pasture/Hay" - }, { - "code": "cultivated_crops", - "active_river_area": None, - "area": 682, - "nlcd": 82, - "coverage": 0.0008306305362682279, - "type": "Cultivated Crops" - }, { - "code": "shrub", - "active_river_area": None, - "area": 499636, - "nlcd": 52, - "coverage": 0.6085233410834492, - "type": "Shrub/Scrub" }, { "code": "developed_open", "active_river_area": None, @@ -251,6 +200,62 @@ def test_survey_land_only(self): "nlcd": 24, "coverage": 0.025234360822494743, "type": "Developed, High Intensity" + }, { + "code": "barren_land", + "active_river_area": None, + "area": 25, + "nlcd": 31, + "coverage": 3.0448333440917446e-05, + "type": "Barren Land (Rock/Sand/Clay)" + }, { + "code": "deciduous_forest", + "active_river_area": None, + "area": 0, + "nlcd": 41, + "coverage": 0.0, + "type": "Deciduous Forest" + }, { + "code": "evergreen_forest", + "active_river_area": None, + "area": 5758, + "nlcd": 42, + "coverage": 0.007012860158112106, + "type": "Evergreen Forest" + }, { + "code": "mixed_forest", + "active_river_area": None, + "area": 35, + "nlcd": 43, + "coverage": 4.2627666817284424e-05, + "type": "Mixed Forest" + }, { + "code": "shrub", + "active_river_area": None, + "area": 499636, + "nlcd": 52, + "coverage": 0.6085233410834492, + "type": "Shrub/Scrub" + }, { + "code": "grassland", + "active_river_area": None, + "area": 3228, + "nlcd": 71, + "coverage": 0.00393148881389126, + "type": "Grassland/Herbaceous" + }, { + "code": "pasture", + "active_river_area": None, + "area": 57, + "nlcd": 81, + "coverage": 6.942220024529177e-05, + "type": "Pasture/Hay" + }, { + "code": "cultivated_crops", + "active_river_area": None, + "area": 682, + "nlcd": 82, + "coverage": 0.0008306305362682279, + "type": "Cultivated Crops" }, { "code": "woody_wetlands", "active_river_area": None, @@ -265,19 +270,12 @@ def test_survey_land_only(self): "nlcd": 95, "coverage": 0.00019365140068423496, "type": "Emergent Herbaceous Wetlands" - }, { - "code": "barren_land", - "active_river_area": None, - "area": 25, - "nlcd": 31, - "coverage": 3.0448333440917446e-05, - "type": "Barren Land (Rock/Sand/Clay)" } ] } } - actual = tasks.analyze_nlcd(histogram) + actual = tasks.analyze_nlcd(histogram, nlcd_year='2011_2011') self.assertEqual(actual, expected) def test_survey_land_with_ara(self): @@ -318,38 +316,6 @@ def test_survey_land_with_ara(self): expected = { "survey": { "categories": [ - { - "active_river_area": 117, - "area": 329, - "code": "mixed_forest", - "coverage": 0.002653825057270997, - "nlcd": 43, - "type": "Mixed Forest" - }, - { - "active_river_area": 260, - "area": 684, - "code": "grassland", - "coverage": 0.005517374891104443, - "nlcd": 71, - "type": "Grassland/Herbaceous" - }, - { - "active_river_area": 7254, - "area": 19218, - "code": "deciduous_forest", - "coverage": 0.1550188752298906, - "nlcd": 41, - "type": "Deciduous Forest" - }, - { - "active_river_area": 15, - "area": 153, - "code": "evergreen_forest", - "coverage": 0.001234149646694415, - "nlcd": 42, - "type": "Evergreen Forest" - }, { "active_river_area": 34, "area": 39, @@ -366,30 +332,6 @@ def test_survey_land_with_ara(self): "nlcd": 12, "type": "Perennial Ice/Snow" }, - { - "active_river_area": 2108, - "area": 8922, - "code": "pasture", - "coverage": 0.07196786371116058, - "nlcd": 81, - "type": "Pasture/Hay" - }, - { - "active_river_area": 1632, - "area": 6345, - "code": "cultivated_crops", - "coverage": 0.051180911818797796, - "nlcd": 82, - "type": "Cultivated Crops" - }, - { - "active_river_area": 963, - "area": 3309, - "code": "shrub", - "coverage": 0.026691510986351755, - "nlcd": 52, - "type": "Shrub/Scrub" - }, { "active_river_area": 9330, "area": 40558, @@ -422,6 +364,70 @@ def test_survey_land_with_ara(self): "nlcd": 24, "type": "Developed, High Intensity" }, + { + "active_river_area": 132, + "area": 364, + "code": "barren_land", + "coverage": 0.0029361468718742943, + "nlcd": 31, + "type": "Barren Land (Rock/Sand/Clay)" + }, + { + "active_river_area": 7254, + "area": 19218, + "code": "deciduous_forest", + "coverage": 0.1550188752298906, + "nlcd": 41, + "type": "Deciduous Forest" + }, + { + "active_river_area": 15, + "area": 153, + "code": "evergreen_forest", + "coverage": 0.001234149646694415, + "nlcd": 42, + "type": "Evergreen Forest" + }, + { + "active_river_area": 117, + "area": 329, + "code": "mixed_forest", + "coverage": 0.002653825057270997, + "nlcd": 43, + "type": "Mixed Forest" + }, + { + "active_river_area": 963, + "area": 3309, + "code": "shrub", + "coverage": 0.026691510986351755, + "nlcd": 52, + "type": "Shrub/Scrub" + }, + { + "active_river_area": 260, + "area": 684, + "code": "grassland", + "coverage": 0.005517374891104443, + "nlcd": 71, + "type": "Grassland/Herbaceous" + }, + { + "active_river_area": 2108, + "area": 8922, + "code": "pasture", + "coverage": 0.07196786371116058, + "nlcd": 81, + "type": "Pasture/Hay" + }, + { + "active_river_area": 1632, + "area": 6345, + "code": "cultivated_crops", + "coverage": 0.051180911818797796, + "nlcd": 82, + "type": "Cultivated Crops" + }, { "active_river_area": 3756, "area": 3940, @@ -438,21 +444,13 @@ def test_survey_land_with_ara(self): "nlcd": 95, "type": "Emergent Herbaceous Wetlands" }, - { - "active_river_area": 132, - "area": 364, - "code": "barren_land", - "coverage": 0.0029361468718742943, - "nlcd": 31, - "type": "Barren Land (Rock/Sand/Clay)" - } ], - "displayName": "Land", - "name": "land" + "displayName": "Land Use/Cover 2011 (NLCD11)", + "name": "land_2011_2011" } } - actual = tasks.analyze_nlcd(histogram) + actual = tasks.analyze_nlcd(histogram, nlcd_year='2011_2011') self.assertEqual(actual, expected) def test_survey_soil(self): @@ -757,6 +755,7 @@ def test_sq_km_aoi(self): self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi, contained_catchment)) + @skip('Disabling until Django Upgrade #3419') def test_hundred_sq_km_aoi(self): aoi = GEOSGeometry(json.dumps({ "type": "Polygon", @@ -878,6 +877,7 @@ def test_hundred_sq_km_aoi(self): self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi, contained_catchment)) + @skip('Disabling until Django Upgrade #3419') def test_thousand_sq_km_aoi(self): aoi = GEOSGeometry(json.dumps({ "type": "Polygon", @@ -999,6 +999,7 @@ def test_thousand_sq_km_aoi(self): self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi, contained_catchment)) + @skip('Disabling until Django Upgrade #3419') def test_ten_thousand_sq_km_aoi(self): aoi = GEOSGeometry(json.dumps({ "type": "Polygon", diff --git a/src/mmw/apps/geoprocessing_api/throttling.py b/src/mmw/apps/geoprocessing_api/throttling.py index e8015cb5b..6b3c30752 100644 --- a/src/mmw/apps/geoprocessing_api/throttling.py +++ b/src/mmw/apps/geoprocessing_api/throttling.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals - from django.conf import settings from rest_framework.throttling import UserRateThrottle diff --git a/src/mmw/apps/geoprocessing_api/urls.py b/src/mmw/apps/geoprocessing_api/urls.py index 4ede72462..1fc7598f2 100644 --- a/src/mmw/apps/geoprocessing_api/urls.py +++ b/src/mmw/apps/geoprocessing_api/urls.py @@ -1,9 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division - -from django.conf.urls import url +from django.urls import re_path from apps.modeling.views import get_job from apps.modeling.urls import uuid_regex @@ -12,32 +8,33 @@ app_name = 'geoprocessing_api' urlpatterns = [ - url(r'^token/', views.get_auth_token, - name="authtoken"), - url(r'analyze/land/$', views.start_analyze_land, - name='start_analyze_land'), - url(r'analyze/soil/$', views.start_analyze_soil, - name='start_analyze_soil'), - url(r'analyze/animals/$', views.start_analyze_animals, - name='start_analyze_animals'), - url(r'analyze/pointsource/$', views.start_analyze_pointsource, - name='start_analyze_pointsource'), - url(r'analyze/catchment-water-quality/$', - views.start_analyze_catchment_water_quality, - name='start_analyze_catchment_water_quality'), - url(r'analyze/climate/$', views.start_analyze_climate, - name='start_analyze_climate'), - url(r'analyze/streams/$', views.start_analyze_streams, - name='start_analyze_streams'), - url(r'analyze/terrain/$', views.start_analyze_terrain, - name='start_analyze_terrain'), - url(r'analyze/protected-lands/$', views.start_analyze_protected_lands, - name='start_analyze_protected_lands'), - url(r'analyze/drb-2100-land/(?P