diff --git a/public-api/db-cdr/build.gradle b/public-api/db-cdr/build.gradle index bc04c659b..6e957a248 100644 --- a/public-api/db-cdr/build.gradle +++ b/public-api/db-cdr/build.gradle @@ -10,7 +10,7 @@ buildscript { apply plugin: 'org.liquibase.gradle' apply plugin: 'application' -def db_host = System.getenv("DB_HOST") ?: "db" +def db_host = System.getenv("DB_HOST") ?: "127.0.0.1" def db_port = System.getenv("DB_PORT") ?: "3306" def db_name = System.getenv("CDR_DB_NAME") ?: "cdr" def liquibase_password = System.getenv("LIQUIBASE_DB_PASSWORD") ?: "lb-notasecret" @@ -53,4 +53,4 @@ liquibase { } runList = project.ext.runList } -} +} \ No newline at end of file diff --git a/public-api/db/build.gradle b/public-api/db/build.gradle index e6cf594a0..7092cd82b 100644 --- a/public-api/db/build.gradle +++ b/public-api/db/build.gradle @@ -1,23 +1,18 @@ -plugins { - id 'org.liquibase.gradle' version '2.1.0' -} - -repositories { - mavenCentral() +buildscript { + repositories { + mavenCentral() + } + dependencies { + classpath 'org.liquibase:liquibase-gradle-plugin:1.2.4' + classpath 'mysql:mysql-connector-java:5.1.37' + } } +apply plugin: 'org.liquibase.gradle' -def db_host = System.getenv("DB_HOST") ?: "db" +def db_host = System.getenv("DB_HOST") ?: "127.0.0.1" def db_port = System.getenv("DB_PORT") ?: "3306" def liquibase_password = System.getenv("LIQUIBASE_DB_PASSWORD") ?: "lb-notasecret" -dependencies { - liquibaseRuntime 'org.liquibase:liquibase-core:3.10.0' - liquibaseRuntime 'org.liquibase:liquibase-groovy-dsl:3.0.0' - liquibaseRuntime 'info.picocli:picocli:4.6.1' - liquibaseRuntime 'mysql:mysql-connector-java:5.1.34' - liquibaseRuntime group: 'javax.xml.bind', name: 'jaxb-api', version: '2.3.1' -} - liquibase { activities { main { @@ -28,4 +23,4 @@ liquibase { } runList = project.ext.runList } -} +} \ No newline at end of file diff --git a/public-api/docker-compose.yaml b/public-api/docker-compose.yaml index 664245960..936d8f5ed 100644 --- a/public-api/docker-compose.yaml +++ b/public-api/docker-compose.yaml @@ -1,34 +1,6 @@ - version: "3.4" -# x- indicates an "extension", so docker-compose will ignore the attribute. The -# name is not important, it's just the necessary syntax to create a YAML anchor -# for reuse across the below services. -x-api-defaults: &api-defaults - # This image label exists for documentation purposes only, e.g. to identify - # it via `docker images`. We do not actually push this image up to DockerHub. - # Since docker-compose should never find this image remotely, it will rebuild - # it from the provided build context or use a cached local version. - # When making changes to this image, you can modify this tag to force all devs - # to rebuild. - image: allofustest/workbench-dev-api:local-3 - build: - context: ./src/dev/server - user: ${UID} - working_dir: /w/public-api - environment: - - GOOGLE_APPLICATION_CREDENTIALS=/w/public-api/sa-key.json - env_file: - - db/vars.env - volumes: - - db-sync:/w:nocopy - - gradle-cache:/.gradle - - ~/.config:/.config:cached - - ~/.gsutil:/.gsutil:cached - services: - scripts: - <<: *api-defaults db: image: mysql:5.7 platform: linux/amd64 @@ -39,91 +11,5 @@ services: ports: - 127.0.0.1:3306:3306 - public-api: - <<: *api-defaults - command: ./gradlew :appengineRun - ports: - - 127.0.0.1:8083:8083 - - 127.0.0.1:8084:8002 - - db-scripts: - <<: *api-defaults - depends_on: - - db - working_dir: /w/public-api/db - entrypoint: [ 'with-uid.sh', 'wait-for', 'db:3306', -- ] - - api-scripts: - <<: *api-defaults - depends_on: - - db - - cdr-scripts: - <<: *api-defaults - depends_on: - - db - working_dir: /w/public-api/db-cdr - entrypoint: [ 'with-uid.sh', 'wait-for', 'db:3306', -- ] - - db-generate-public-cdr-counts: - <<: *api-defaults - depends_on: - - db - working_dir: /w/public-api/db-cdr - entrypoint: ["./generate-cdr/generate-public-cdr-counts.sh"] - env_file: - - db-cdr/vars.env - - db-generate-cloudsql-db: - <<: *api-defaults - depends_on: - - db - working_dir: /w/public-api/db-cdr - entrypoint: ["./generate-cdr/generate-cloudsql-db.sh"] - env_file: - - db-cdr/vars.env - - db-generate-local-cdr-db: - <<: *api-defaults - depends_on: - - db - working_dir: /w/public-api/db-cdr - - entrypoint: ["./generate-cdr/generate-local-cdr-db.sh"] - env_file: - - db-cdr/vars.env - - db-generate-local-count-dbs: - <<: *api-defaults - depends_on: - - db - working_dir: /w/public-api/db-cdr - entrypoint: ["./generate-cdr/generate-local-count-dbs.sh"] - env_file: - - db-cdr/vars.env - - db-mysqldump-local-db: - <<: *api-defaults - depends_on: - - db - working_dir: /w/public-api/db-cdr - entrypoint: ["./generate-cdr/make-mysqldump.sh"] - env_file: - - db-cdr/vars.env - - db-cloudsql-import: - <<: *api-defaults - working_dir: /w/public-api/db-cdr - entrypoint: ["./generate-cdr/cloudsql-import.sh"] - - db-local-mysql-import: - <<: *api-defaults - working_dir: /w/public-api/db-cdr - entrypoint: ["./generate-cdr/local-mysql-import.sh"] - volumes: - db: - gradle-cache: - gradle-public-api-cache: - db-sync: - external: true \ No newline at end of file + db: \ No newline at end of file diff --git a/public-api/libproject/cloudsqlproxycontext.rb b/public-api/libproject/cloudsqlproxycontext.rb index 42587ebfb..4f4488796 100644 --- a/public-api/libproject/cloudsqlproxycontext.rb +++ b/public-api/libproject/cloudsqlproxycontext.rb @@ -1,25 +1,55 @@ require_relative "../../aou-utils/serviceaccounts" +require_relative "../../aou-utils/utils/common" require_relative "../../aou-utils/workbench" class CloudSqlProxyContext < ServiceAccountContext def run() + common = Common.new # TODO(dmohs): An error here does not cause the main thread to die. super do - @ps = fork do - exec(*%W{ + ps = nil + docker_container_id = nil + instance = "#{@project}:us-central1:databrowsermaindb=tcp:0.0.0.0:3307" + if Workbench.in_docker? + ps = fork do + exec(*%W{ cloud_sql_proxy - -instances #{@project}:us-central1:databrowsermaindb=tcp:0.0.0.0:3307 + -instances #{instance} -credential_file=#{@path} - }) + }) + end + else + docker_container_id = common.capture_stdout(%W{docker run -d + -u #{ENV["UID"]} + -v #{@keyfile_path}:/config + -p 0.0.0.0:3307:3307 + gcr.io/cloudsql-docker/gce-proxy:1.19.1 /cloud_sql_proxy + -instances=#{instance} + -credential_file=/config + }).chomp end begin - sleep 1 # TODO(dmohs): Detect running better. + deadlineSec = 40 + + common.status "waiting up to #{deadlineSec}s for cloudsql proxy to start..." + start = Time.now + until (common.run %W{mysqladmin ping --host 0.0.0.0 --port 3307 --silent}).success? + if Time.now - start >= deadlineSec + raise("mysql docker service did not become available after #{deadlineSec}s") + end + sleep 1 + end yield ensure - Process.kill "HUP", @ps - Process.wait + if ps + Process.kill "HUP", ps + Process.wait + else if docker_container_id + common.run_inline(%W{docker kill #{docker_container_id}}) + end end end end end +end \ No newline at end of file diff --git a/public-api/libproject/devstart.rb b/public-api/libproject/devstart.rb index f5d31544a..649e00f54 100644 --- a/public-api/libproject/devstart.rb +++ b/public-api/libproject/devstart.rb @@ -6,17 +6,19 @@ require_relative "../../aou-utils/utils/common" require_relative "../../aou-utils/workbench" require_relative "cloudsqlproxycontext" +require_relative "environments" require_relative "gcloudcontext" require_relative "wboptionsparser" -require_relative "environments" +require "benchmark" require "fileutils" require "io/console" require "json" require "optparse" require "ostruct" require "tempfile" +require "net/http" +require "json" -TEST_CIRCLE_ACCOUNT = "circle-deploy-account@aou-db-test.iam.gserviceaccount.com" INSTANCE_NAME = "databrowsermaindb" FAILOVER_INSTANCE_NAME = "databrowserbackupdb" SERVICES = %W{servicemanagement.googleapis.com storage-component.googleapis.com iam.googleapis.com @@ -30,76 +32,31 @@ def run_inline_or_log(dry_run, args) Common.new.run_inline(cmd_prefix + args) end -def must_get_project_key(project, key) - unless ENVIRONMENTS.fetch(project, {}).has_key?(key) - raise ArgumentError.new("project #{project} missing configuration for #{key}") - end - return ENVIRONMENTS[project][key] -end - def get_cdr_sql_project(project) - must_get_project_key(project, :cdr_sql_instance).split(":")[0] -end - -def get_api_base_path(project) - must_get_project_key(project, :api_base_path) -end - -def get_gae_vars(project) - must_get_project_key(project, :gae_vars) -end - -def ensure_docker(cmd_name, args=nil) - args = (args or []) - unless Workbench.in_docker? - ensure_docker_sync() - exec(*(%W{docker-compose run --rm scripts ./project.rb #{cmd_name}} + args)) - end + return must_get_env_value(project, :cdr_sql_instance).split(":")[0] end -def ensure_docker_sync() - common = Common.new - at_exit do - common.run_inline %W{docker-sync stop} - end - common.run_inline %W{docker-sync start} -end - -# exec against a live local API server - used for script access to a local API -# server or database. -def ensure_docker_api(cmd_name, args) - if Workbench.in_docker? - return - end - Process.wait spawn(*(%W{docker-compose exec api ./project.rb #{cmd_name}} + args)) - unless $?.exited? and $?.success? - Common.new.error "command against docker-compose service 'api' failed, " + - "please verify your local API server is running (dev-up " + - "or run-api)" - end - if $?.exited? - exit $?.exitstatus +def init_new_cdr_db(args) + Dir.chdir('db-cdr') do + Common.new.run_inline %W{./generate-cdr/init-new-cdr-db.sh} + args end - exit 1 end -def init_new_cdr_db(args) - Common.new.run_inline %W{docker-compose run cdr-scripts generate-cdr/init-new-cdr-db.sh} + args +def gcs_vars_path(project) + return "gs://#{project}-credentials/vars.env" end def read_db_vars(gcc) - Workbench.assert_in_docker - vars_path = "gs://#{gcc.project}-credentials/vars.env" vars = Workbench.read_vars(Common.new.capture_stdout(%W{ - gsutil cat #{vars_path} + gsutil cat #{gcs_vars_path(gcc.project)} })) if vars.empty? - Common.new.error "Failed to read #{vars_path}" + Common.new.error "Failed to read #{gcs_vars_path(gcc.project)}" exit 1 end # Note: CDR project and target project may be the same. cdr_project = get_cdr_sql_project(gcc.project) - cdr_vars_path = "gs://#{cdr_project}-credentials/vars.env" + cdr_vars_path = gcs_vars_path(cdr_project) cdr_vars = Workbench.read_vars(Common.new.capture_stdout(%W{ gsutil cat #{cdr_vars_path} })) @@ -114,7 +71,39 @@ def read_db_vars(gcc) }) end -def dev_up() +def format_benchmark(bm) + "%ds" % [bm.real] +end + +def start_local_db_service() + common = Common.new + deadlineSec = 40 + + bm = Benchmark.measure { + common.run_inline %W{docker-compose up -d db} + + common.status "waiting up to #{deadlineSec}s for mysql service to start..." + start = Time.now + until (common.run %W{docker-compose exec -T db mysqladmin ping --silent}).success? + if Time.now - start >= deadlineSec + raise("mysql docker service did not become available after #{deadlineSec}s") + end + sleep 1 + end + } + common.status "Database startup complete (#{format_benchmark(bm)})" +end + +def dev_up(cmd_name, args) + op = WbOptionsParser.new(cmd_name, args) + op.opts.start_db = true + op.add_option( + "--nostart-db", + ->(opts, _) { opts.start_db = false }, + "If specified, don't start the DB service. This is useful when running " + + "within docker, i.e. on CircleCI, as the DB service runs via docker-compose") + op.parse.validate + common = Common.new account = get_auth_login_account() @@ -122,66 +111,99 @@ def dev_up() raise("Please run 'gcloud auth login' before starting the server.") end - at_exit { common.run_inline %W{docker-compose down} } - - # ensures that sa-key.json is included in the docker-sync image - # This is necessary because docker-compose exposes it as GOOGLE_APPLICATION_CREDENTIALS - # which is needed to construct the IamCredentialsClient Bean - ServiceAccountContext.new(TEST_PROJECT).run do - ensure_docker_sync() + at_exit do + common.run_inline %W{docker-compose down} if op.opts.start_db end - common.status "Starting database..." - common.run_inline %W{docker-compose up -d db} - common.status "Running database migrations..." - common.run_inline %W{docker-compose run db-scripts ./run-migrations.sh main} - init_new_cdr_db %W{--cdr-db-name public} + setup_local_environment() + + overall_bm = Benchmark.measure { + start_local_db_service() if op.opts.start_db + + common.status "Database init & migrations..." + bm = Benchmark.measure { + Dir.chdir('db') do + common.run_inline %W{./run-migrations.sh main} + end + init_new_cdr_db %W{--cdr-db-name public} + } + common.status "Database init & migrations complete (#{format_benchmark(bm)})" + + common.status "Loading configs & data..." + bm = Benchmark.measure { + common.run_inline %W{./libproject/load_local_data_and_configs.sh} + } + common.status "Loading configs complete (#{format_benchmark(bm)})" + } + common.status "Total dev-env setup time: #{format_benchmark(overall_bm)}" - common.status "Updating CDR versions..." - common.run_inline %W{docker-compose run api-scripts ./libproject/load_local_data_and_configs.sh} - common.run_inline_swallowing_interrupt %W{docker-compose up public-api} + run_api_incremental() end Common.register_command({ :invocation => "dev-up", :description => "Brings up the development environment, including db migrations and config " \ "update. (You can use run-api instead if database and config are up-to-date.)", - :fn => ->() { dev_up() } + :fn => ->(*args) { dev_up("dev-up", args) } }) +def run_api_incremental() + common = Common.new + + # The GAE gradle configuration depends on the existence of an sa-key.json file for auth. + get_test_service_account() + + begin + common.status "Starting API server..." + # appengineStart must be run with the Gradle daemon or it will stop outputting logs as soon as + # the application has finished starting. + common.run_inline "./gradlew --daemon appengineRun &" + + # incrementalHotSwap must be run without the Gradle daemon or stdout and stderr will not appear + # in the output. + common.run_inline %W{./gradlew --continuous incrementalHotSwap} + rescue Interrupt + # Do nothing + ensure + common.run_inline %W{./gradlew --stop} + end +end + def setup_local_environment() - root_password = ENV["MYSQL_ROOT_PASSWORD"] ENV.update(Workbench.read_vars_file("db/vars.env")) + ENV.update(must_get_env_value("local", :gae_vars)) ENV["DB_HOST"] = "127.0.0.1" ENV["PUBLIC_DB_HOST"] = "127.0.0.1" - ENV["MYSQL_ROOT_PASSWORD"] = root_password ENV["DB_CONNECTION_STRING"] = "jdbc:mysql://127.0.0.1/databrowser?useSSL=false" ENV["PUBLIC_DB_CONNECTION_STRING"] = "jdbc:mysql://127.0.0.1/public?useSSL=false" end +# TODO(RW-605): This command doesn't actually execute locally as it assumes a docker context. +# +# This command is only ever meant to be run via CircleCI; see .circleci/config.yml def run_local_migrations() - setup_local_environment + setup_local_environment() # Runs migrations against the local database. common = Common.new Dir.chdir('db') do common.run_inline %W{./run-migrations.sh main} end Dir.chdir('db-cdr/generate-cdr') do - common.run_inline %W{./init-new-cdr-db.sh --cdr-db-name public} + common.run_inline %W{./init-new-cdr-db.sh --cdr-db-name cdr} end - common.run_inline %W{./gradlew :loadConfig -Pconfig_key=main -Pconfig_file=config/config_local.json} - common.run_inline %W{./gradlew :loadConfig -Pconfig_key=cdrBigQuerySchema -Pconfig_file=config/cdm/cdm_5_2.json} - common.run_inline %W{./gradlew :updateCdrConfig -PappArgs=['config/cdr_config_local.json',false]} + common.run_inline %W{gradle :loadConfig -Pconfig_key=main -Pconfig_file=config/config_local.json} + common.run_inline %W{gradle :loadConfig -Pconfig_key=cdrBigQuerySchema -Pconfig_file=config/cdm/cdm_5_2.json} + common.run_inline %W{gradle :updateCdrConfig -PappArgs=['config/cdr_config_local.json',false]} end Common.register_command({ :invocation => "run-local-migrations", - :description => "Runs DB migrations with the local MySQL instance; does not use docker. You must set MYSQL_ROOT_PASSWORD before running this.", + :description => "Runs DB migrations with the local MySQL instance. You must set MYSQL_ROOT_PASSWORD before running this.", :fn => ->() { run_local_migrations() } }) def start_local_public_api() - setup_local_environment + setup_local_environment() common = Common.new Dir.chdir('../public-api') do common.status "Starting public API server..." @@ -196,7 +218,7 @@ def start_local_public_api() }) def stop_local_public_api() - setup_local_environment + setup_local_environment() common = Common.new Dir.chdir('../public-api') do common.status "Stopping public API server..." @@ -228,37 +250,34 @@ def run_local_public_api_tests() :fn => ->() { run_local_public_api_tests() } }) -def run_public_api_and_db() +def run_public_api() common = Common.new - common.status "Starting database..." ServiceAccountContext.new(TEST_PROJECT).run do - ensure_docker_sync() + common.status "Starting API. This can take a while. Thoughts on reducing development cycle time" + common.status "are here:" + common.status " https://github.com/all-of-us/workbench/blob/master/api/doc/2017/dev-cycle.md" + at_exit { common.run_inline %W{docker-compose down} } + common.run_inline_swallowing_interrupt %W{docker-compose up public-api} end - common.run_inline %W{docker-compose up -d db} - common.status "Starting public API." - common.run_inline_swallowing_interrupt %W{docker-compose up public-api} end -Common.register_command({ - :invocation => "run-public-api", - :description => "Runs the public api server (assumes database is up-to-date.)", - :fn => ->() { run_public_api_and_db() } -}) - +def run_public_api_and_db() + setup_local_environment() -def clean() common = Common.new - common.run_inline %W{docker-compose run --rm public-api ./gradlew clean} + at_exit { common.run_inline %W{docker-compose down} } + start_local_db_service() + + run_api_incremental() end Common.register_command({ - :invocation => "clean", - :description => "Runs gradle clean. Occasionally necessary before generating code from Swagger.", - :fn => ->(*args) { clean(*args) } + :invocation => "run-public-api", + :description => "Runs the api server (assumes database and config are already up-to-date.)", + :fn => ->() { run_public_api_and_db() } }) def validate_swagger(cmd_name, args) - ensure_docker cmd_name, args Common.new.run_inline %W{./gradlew validateSwagger} + args end @@ -269,7 +288,6 @@ def validate_swagger(cmd_name, args) }) def run_public_api_tests(cmd_name, args) - ensure_docker cmd_name, args Dir.chdir('../public-api') do Common.new.run_inline %W{./gradlew :test} + args end @@ -295,43 +313,19 @@ def run_all_tests(cmd_name, args) def run_integration_tests(cmd_name, *args) - ensure_docker cmd_name, args - op = WbOptionsParser.new(cmd_name, args) - op.opts.env = 'local' - op.add_option( - "--env [local|aou-db-test|...]", - ->(opts, v) { opts.env = v}, - "Environment to execute the test against, defaults to local" - ) - op.parse.validate - - api_base = 'http://localhost:8083' - if op.opts.env != 'local' - api_base = get_api_base_path(op.opts.env) - end - ENV['DB_API_BASE_PATH'] = api_base - common = Common.new - common.status "Executing integration tests against '#{api_base}'" - - # We need to run integration tests in against iap staging env with SA context to be able to get past iap. - if op.opts.env == 'aou-db-staging' - ServiceAccountContext.new(op.opts.env, TEST_CIRCLE_ACCOUNT).run do - common.run_inline %W{./gradlew integration} + op.remaining - end - else - common.run_inline %W{./gradlew integration} + op.remaining + ServiceAccountContext.new(TEST_PROJECT).run do + common.run_inline %W{./gradlew integrationTest} + args end end Common.register_command({ :invocation => "integration", - :description => "Runs integration tests.", + :description => "Runs integration tests. Excludes nightly-only tests.", :fn => ->(*args) { run_integration_tests("integration", *args) } }) def run_bigquery_tests(cmd_name, *args) - ensure_docker cmd_name, args common = Common.new ServiceAccountContext.new(TEST_PROJECT).run do common.run_inline %W{./gradlew bigquerytest} + args @@ -345,7 +339,6 @@ def run_bigquery_tests(cmd_name, *args) }) def run_gradle(cmd_name, args) - ensure_docker cmd_name, args begin Common.new.run_inline %W{./gradlew} + args ensure @@ -362,10 +355,10 @@ def run_gradle(cmd_name, args) :fn => ->(*args) { run_gradle("gradle", args) } }) - def connect_to_db() common = Common.new - + common.status "Starting database if necessary..." + common.run_inline %W{docker-compose up -d db} cmd = "MYSQL_PWD=root-notasecret mysql --database=databrowser" common.run_inline %W{docker-compose exec db sh -c #{cmd}} end @@ -376,12 +369,13 @@ def connect_to_db() :fn => ->() { connect_to_db() } }) - def docker_clean() common = Common.new # --volumes clears out any cached data between runs, e.g. the MySQL database - common.run_inline %W{docker-compose down --volumes} + # --rmi local forces a rebuild of any local dev images on the next run - usually the pieces will + # still be cached and this is fast. + common.run_inline %W{docker-compose down --volumes --rmi local} # This keyfile gets created and cached locally on dev-up. Though it's not # specific to Docker, it is mounted locally for docker runs. For lack of a @@ -397,7 +391,8 @@ def docker_clean() :invocation => "docker-clean", :description => \ "Removes docker containers and volumes, allowing the next `dev-up` to" \ - " start from scratch (e.g., the database will be re-created).", + " start from scratch (e.g., the database will be re-created). Includes ALL" \ + " docker images, not just for the API.", :fn => ->() { docker_clean() } }) @@ -436,7 +431,6 @@ def get_auth_login_account() end def drop_cloud_db(cmd_name, *args) - ensure_docker cmd_name, args op = WbOptionsParser.new(cmd_name, args) gcc = GcloudContextV2.new(op) op.parse.validate @@ -458,7 +452,6 @@ def drop_cloud_db(cmd_name, *args) }) def drop_cloud_cdr(cmd_name, *args) - ensure_docker cmd_name, args op = WbOptionsParser.new(cmd_name, args) gcc = GcloudContextV2.new(op) op.parse.validate @@ -479,16 +472,26 @@ def drop_cloud_cdr(cmd_name, *args) :fn => ->(*args) { drop_cloud_cdr("drop-cloud-cdr", *args) } }) -def run_local_data_migrations() +def run_local_all_migrations() + setup_local_environment() + start_local_db_service() + + common = Common.new + Dir.chdir('db') do + common.run_inline %W{./run-migrations.sh main} + end + + init_new_cdr_db %W{--cdr-db-name public} init_new_cdr_db %W{--cdr-db-name public --run-list data --context local} end Common.register_command({ - :invocation => "run-local-data-migrations", - :description => "Runs local data migrations for cdr/workbench schemas.", - :fn => ->() { run_local_data_migrations() } + :invocation => "run-local-all-migrations", + :description => "Runs local data/schema migrations for the cdr and workbench schemas.", + :fn => ->() { run_local_all_migrations() } }) + def generate_public_cdr_counts(cmd_name, *args) op = WbOptionsParser.new(cmd_name, args) op.add_option( @@ -611,11 +614,15 @@ def cloudsql_import(cmd_name, *args) op.parse.validate ServiceAccountContext.new(op.opts.project).run do - common = Common.new - #common.run_inline %W{docker-compose run db-cloudsql-import} + args - common.run_inline %W{docker-compose run db-cloudsql-import - --project #{op.opts.project} --instance #{op.opts.instance} --database #{op.opts.database} - --bucket #{op.opts.bucket}} + Dir.chdir('db-cdr') do + common = Common.new + common.run_inline %W{./generate-cdr/cloudsql-import.sh + --project #{op.opts.project} + --instance #{op.opts.instance} + --database #{op.opts.database} + --bucket #{op.opts.bucket} + --file #{op.opts.file}} + end end end @@ -630,7 +637,9 @@ def cloudsql_import(cmd_name, *args) def generate_local_cdr_db(*args) common = Common.new - common.run_inline %W{docker-compose run db-generate-local-cdr-db} + args + Dir.chdir('db-cdr') do + common.run_inline %W{./generate-cdr/generate-local-cdr-db.sh} + args + end end Common.register_command({ @@ -643,7 +652,9 @@ def generate_local_cdr_db(*args) def generate_local_count_dbs(*args) common = Common.new - common.run_inline %W{docker-compose run db-generate-local-count-dbs} + args + Dir.chdir('db-cdr') do + common.run_inline %W{./generate-cdr/generate-local-count-dbs.sh} + args + end end Common.register_command({ @@ -656,10 +667,11 @@ def generate_local_count_dbs(*args) def mysqldump_db(*args) common = Common.new - common.run_inline %W{docker-compose run db-mysqldump-local-db} + args + Dir.chdir('db-cdr') do + common.run_inline %W{./generate-cdr/make-mysqldump.sh} + args + end end - Common.register_command({ :invocation => "mysqldump-local-db", :description => "mysqldump-local-db --db-name --bucket @@ -669,7 +681,6 @@ def mysqldump_db(*args) def local_mysql_import(cmd_name, *args) op = WbOptionsParser.new(cmd_name, args) - op.add_option( "--sql-dump-file [filename]", ->(opts, v) { opts.file = v}, @@ -683,8 +694,10 @@ def local_mysql_import(cmd_name, *args) op.parse.validate common = Common.new - common.run_inline %W{docker-compose run db-local-mysql-import + Dir.chdir('db-cdr') do + common.run_inline %W{./generate-cdr/local-mysql-import.sh --sql-dump-file #{op.opts.file} --bucket #{op.opts.bucket}} + end end Common.register_command({ :invocation => "local-mysql-import", @@ -695,9 +708,10 @@ def local_mysql_import(cmd_name, *args) def run_drop_cdr_db() - ensure_docker_sync() common = Common.new - common.run_inline %W{docker-compose run cdr-scripts ./run-drop-db.sh} + Dir.chdir('db-cdr') do + common.run_inline %W{./run-drop-db.sh} + end end Common.register_command({ @@ -859,7 +873,6 @@ def update_cdr_config_for_project(cdr_config_file, dry_run) end def update_cdr_config(cmd_name, *args) - ensure_docker cmd_name, args op = update_cdr_config_options(cmd_name, args) gcc = GcloudContextV2.new(op) op.parse.validate @@ -878,14 +891,13 @@ def update_cdr_config(cmd_name, *args) }) def update_cdr_config_local(cmd_name, *args) - ensure_docker_sync() - setup_local_environment + setup_local_environment() op = update_cdr_config_options(cmd_name, args) op.parse.validate cdr_config_file = 'config/cdr_config_local.json' app_args = ["-PappArgs=['/w/public-api/" + cdr_config_file + "',false]"] common = Common.new - common.run_inline %W{docker-compose run --rm api-scripts ./gradlew updateCdrConfig} + app_args + common.run_inline %W{./gradlew updateCdrConfig} + app_args end Common.register_command({ @@ -895,7 +907,6 @@ def update_cdr_config_local(cmd_name, *args) }) def connect_to_cloud_db(cmd_name, *args) - ensure_docker cmd_name, args common = Common.new op = WbOptionsParser.new(cmd_name, args) op.add_option( @@ -922,7 +933,6 @@ def connect_to_cloud_db(cmd_name, *args) :fn => ->(*args) { connect_to_cloud_db("connect-to-cloud-db", *args) } }) - def deploy_app(cmd_name, args) common = Common.new op = WbOptionsParser.new(cmd_name, args) @@ -981,7 +991,6 @@ def deploy_app(cmd_name, args) end def deploy_public_api(cmd_name, args) - ensure_docker cmd_name, args common = Common.new common.status "Deploying public-api..." deploy_app(cmd_name, args) @@ -1048,7 +1057,6 @@ def with_cloud_proxy_and_db_env(cmd_name, args) end def deploy(cmd_name, args) - ensure_docker cmd_name, args op = WbOptionsParser.new(cmd_name, args) op.opts.dry_run = false @@ -1117,7 +1125,6 @@ def deploy(cmd_name, args) def run_cloud_migrations(cmd_name, args) - ensure_docker cmd_name, args with_cloud_proxy_and_db_env(cmd_name, args) { migrate_database } end @@ -1128,7 +1135,6 @@ def run_cloud_migrations(cmd_name, args) }) def update_cloud_config(cmd_name, args) - ensure_docker cmd_name, args with_cloud_proxy_and_db_env(cmd_name, args) do |ctx| load_config(ctx.project) end @@ -1210,7 +1216,6 @@ def random_password() # TODO: add a goal which updates CDR DBs but nothing else def setup_cloud_project(cmd_name, *args) - ensure_docker cmd_name, args op = WbOptionsParser.new(cmd_name, args) op.add_option( "--public-db-name [PUBLIC_DB]", @@ -1244,4 +1249,6 @@ def get_test_service_account() :invocation => "get-test-service-creds", :description => "Copies sa-key.json locally (for use when running tests from an IDE, etc).", :fn => ->() { get_test_service_account()} -}) \ No newline at end of file +}) + + diff --git a/public-api/libproject/gcloudcontext.rb b/public-api/libproject/gcloudcontext.rb index 084c09691..e8d742488 100644 --- a/public-api/libproject/gcloudcontext.rb +++ b/public-api/libproject/gcloudcontext.rb @@ -6,7 +6,6 @@ class GcloudContextV2 attr_reader :account, :creds_file, :project def initialize(options_parser) - Workbench.assert_in_docker @options_parser = options_parser # We use both gcloud and gsutil commands for various tasks. While gcloud can take arguments, # gsutil uses the current gcloud config, so we want to grab and verify the account from there. diff --git a/public-api/src/dev/server/with-mysql-login.sh b/public-api/libproject/with-mysql-login.sh similarity index 100% rename from public-api/src/dev/server/with-mysql-login.sh rename to public-api/libproject/with-mysql-login.sh diff --git a/public-api/src/dev/server/Dockerfile b/public-api/src/dev/server/Dockerfile deleted file mode 100644 index 6ba46126b..000000000 --- a/public-api/src/dev/server/Dockerfile +++ /dev/null @@ -1,74 +0,0 @@ -FROM ubuntu:22.04 -# -# Development requirements -# - -RUN apt-get update && \ - apt-get install --no-install-recommends -y \ - openjdk-8-jdk \ - curl \ - python2 \ - python3 \ - gcc \ - bash \ - openssh-client \ - git \ - gettext \ - mysql-server \ - mysql-client \ - ruby \ - ruby-dev \ - ruby-json \ - make \ - unzip \ - wait-for-it \ - && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -ENV CLOUD_SDK_VERSION 260.0.0 - -ENV PATH /google-cloud-sdk/bin:$PATH - -RUN curl -O https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ - tar xzf google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ - rm google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ - ln -s /lib /lib64 && \ - gcloud config set core/disable_usage_reporting true && \ - gcloud config set component_manager/disable_update_check true && \ - gcloud config set metrics/environment github_docker_image - -# -# Other development tools -# - -RUN gcloud components install app-engine-java - -RUN curl https://dl.google.com/cloudsql/cloud_sql_proxy.linux.amd64 \ - > /usr/local/bin/cloud_sql_proxy && chmod +x /usr/local/bin/cloud_sql_proxy - -RUN curl https://raw.githubusercontent.com/mrako/wait-for/d9699cb9fe8a4622f05c4ee32adf2fd93239d005/wait-for \ - > /usr/local/bin/wait-for && chmod +x /usr/local/bin/wait-for - -RUN apt-get -q update && apt-get -qy install netcat - -# Create a gradle cache directory as a volume that can be read/written by any -# container (including containers running as any user -- hence the a+rwx) -RUN mkdir /.gradle && chmod a+rwx -R /.gradle -VOLUME /.gradle -ENV GRADLE_USER_HOME /.gradle - -# It never makes sense for Gradle to run a daemon within a docker container. -ENV GRADLE_OPTS="-Dorg.gradle.daemon=false" - -RUN curl https://services.gradle.org/distributions/gradle-6.8.1-bin.zip -L > /tmp/gradle.zip -WORKDIR /tmp -RUN unzip gradle.zip && rm gradle.zip \ - && mv gradle-* /gradle -ENV PATH="$PATH:/gradle/bin" -WORKDIR / - - -COPY with-uid.sh /usr/local/bin - -ENTRYPOINT ["with-uid.sh"] \ No newline at end of file diff --git a/public-api/src/dev/server/with-uid.sh b/public-api/src/dev/server/with-uid.sh deleted file mode 100755 index 202878446..000000000 --- a/public-api/src/dev/server/with-uid.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh - -USERNAME=$(whoami 2>/dev/null) -EXIT_CODE=$? - -if [[ $EXIT_CODE -eq 0 ]]; then - >&2 echo 'This container has poor behavior if run as an existing user.' \ - 'The given UID matches the user '"'$USERNAME'"'. Exiting.'; - exit 1; -fi - -exec $@ \ No newline at end of file