From baa8c01eaaec5ea6c7ffa2c7dc2ba5580c9ae068 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Fri, 16 Feb 2024 09:08:03 -0800 Subject: [PATCH 1/3] convert manual steps into a Makefile chain - separated destination DB from container that connects and loads data - separated Ruby runner container from DB image. Now all images are separete and pure as they come from Docker Hub - reduced whole process to 2 `make` commands --- .gitignore | 4 ++ Dockerfile | 18 ------ Makefile | 113 ++++++++++++++++++++++++++++++++++++ README.md | 71 +++++++--------------- create_db.sql.txt | 2 + docker-compose-postgres.yml | 25 ++++++++ docker-compose.yml | 9 --- install.sh | 10 ---- update_csvs.rb | 11 +++- 9 files changed, 173 insertions(+), 90 deletions(-) delete mode 100644 Dockerfile create mode 100644 Makefile create mode 100644 create_db.sql.txt create mode 100644 docker-compose-postgres.yml delete mode 100644 docker-compose.yml delete mode 100644 install.sh diff --git a/.gitignore b/.gitignore index c4c4ffc..14aaf9b 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,5 @@ *.zip +.csvs + +# this one is auto-generated +create_db.sql \ No newline at end of file diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 850c5cc..0000000 --- a/Dockerfile +++ /dev/null @@ -1,18 +0,0 @@ -FROM library/postgres - -RUN apt-get update -RUN apt-get -y install unzip ruby dos2unix - -RUN mkdir /data -COPY install.sql /data/ -COPY update_csvs.rb /data/ -COPY adventure_works_2014_OLTP_script.zip /data/ -RUN cd /data && \ - unzip adventure_works_2014_OLTP_script.zip && \ - rm adventure_works_2014_OLTP_script.zip && \ - ruby update_csvs.rb && \ - rm update_csvs.rb - -COPY install.sh /docker-entrypoint-initdb.d/ -WORKDIR /data/ -RUN dos2unix /docker-entrypoint-initdb.d/*.sh diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..cc0c2ac --- /dev/null +++ b/Makefile @@ -0,0 +1,113 @@ +SOURCE_CSV_DIR:=.csvs +# on some systems docker must be ran with sudo as `DOCKER_COMMAND="sudo docker" make x` +DOCKER_COMMAND?=docker +POSTGRES_IMAGE?=postgres:16 + +default: + @echo "No default action. Review Makefile and pick one." + +AdventureWorks-oltp-install-script.zip: + # -L - follows redirects + # -OJ - saves the file according to Content Disposition header returned by the server + # the value of which is "AdventureWorks-oltp-install-script.zip" + curl -LOJ -vv https://github.com/Microsoft/sql-server-samples/releases/download/adventureworks/AdventureWorks-oltp-install-script.zip + +${SOURCE_CSV_DIR}: AdventureWorks-oltp-install-script.zip + unzip -d ${SOURCE_CSV_DIR} AdventureWorks-oltp-install-script.zip + +${SOURCE_CSV_DIR}/csvs_are_converted: ${SOURCE_CSV_DIR} + ${DOCKER_COMMAND} run -it --rm \ + --mount type=bind,source=${PWD}/${SOURCE_CSV_DIR},target=/usr/app/csvs \ + --mount type=bind,source=${PWD}/update_csvs.rb,target=/usr/app/update_csvs.rb \ + -w /usr/app \ + ruby:latest \ + ruby update_csvs.rb /usr/app/csvs + touch ${SOURCE_CSV_DIR}/csvs_are_converted + + +################################### + +DB_DOCKER_NETWORK?=database + +.PHONY: ensure_db_network_exists +ensure_db_network_exists: + ${DOCKER_COMMAND} network create ${DB_DOCKER_NETWORK} | true + +.PHONY: ensure_db_volume_exists +ensure_db_volume_exists: + ${DOCKER_COMMAND} volume create adventure_works_data | true + +.PHONY: db_start +db_start: ensure_db_network_exists ensure_db_volume_exists + POSTGRES_IMAGE=${POSTGRES_IMAGE} \ + ${DOCKER_COMMAND} compose -f docker-compose-postgres.yml up -d + +.PHONY: db_stop +db_stop: + POSTGRES_IMAGE=${POSTGRES_IMAGE} \ + ${DOCKER_COMMAND} compose -f docker-compose-postgres.yml down + +# used only for PSQL client to talk to the DB server +POSTGRES_USER?=postgres +POSTGRES_PASSWORD?=postgres +DB_NAME?=Adventureworks +# match db service container name in docker-compose-postgres.yaml +DB_HOST?=db +CONTAINER_DATA_DIR:=/mnt/src + +${SOURCE_CSV_DIR}/create_db.sql: ${SOURCE_CSV_DIR}/csvs_are_converted + printf "\ +CREATE DATABASE \"${DB_NAME}\";\n\ +GRANT ALL PRIVILEGES ON DATABASE \"${DB_NAME}\" TO ${POSTGRES_USER};\n\ +" > ${SOURCE_CSV_DIR}/create_db.sql + +${SOURCE_CSV_DIR}/install.sql: ${SOURCE_CSV_DIR}/csvs_are_converted + cp ./install.sql ${SOURCE_CSV_DIR}/install.sql + +${SOURCE_CSV_DIR}/install.sh: ${SOURCE_CSV_DIR}/create_db.sql ${SOURCE_CSV_DIR}/install.sql + printf "\ +cd ${CONTAINER_DATA_DIR}\n\ +psql -h ${DB_HOST} < ${CONTAINER_DATA_DIR}/create_db.sql\n\ +psql -h ${DB_HOST} -d ${DB_NAME} < ${CONTAINER_DATA_DIR}/install.sql\n\ +" > ${SOURCE_CSV_DIR}/install.sh + +.PHONY: upload_data +upload_data: ensure_db_network_exists ${SOURCE_CSV_DIR}/install.sh # db_start + ${DOCKER_COMMAND} run -it --rm \ + -e POSTGRES_USER=${POSTGRES_USER} \ + -e PGUSER=${POSTGRES_USER} \ + -e POSTGRES_PASSWORD=${POSTGRES_PASSWORD} \ + -e PGPASSWORD=${POSTGRES_PASSWORD} \ + --network ${DB_DOCKER_NETWORK} \ + -v ${PWD}/${SOURCE_CSV_DIR}:${CONTAINER_DATA_DIR} \ + -w ${CONTAINER_DATA_DIR} \ + ${POSTGRES_IMAGE} \ + bash ./install.sh + + +.PHONY: db_console +db_console: ensure_db_network_exists # db_start + ${DOCKER_COMMAND} run -it --rm \ + -e POSTGRES_USER=${POSTGRES_USER} \ + -e PGUSER=${POSTGRES_USER} \ + -e POSTGRES_PASSWORD=${POSTGRES_PASSWORD} \ + -e PGPASSWORD=${POSTGRES_PASSWORD} \ + --network ${DB_DOCKER_NETWORK} \ + -v ${PWD}/${SOURCE_CSV_DIR}:${CONTAINER_DATA_DIR} \ + -w ${CONTAINER_DATA_DIR} \ + ${POSTGRES_IMAGE} \ + psql -h ${DB_HOST} -d ${DB_NAME} + +##################################### + +.PHONY: clean +clean: clean + rm -rf AdventureWorks-oltp-install-script.zip + rm -rf ${SOURCE_CSV_DIR} + rm -rf create_db.sql + +.PHONY: clean-db +clean-db: + @echo "Ensure DB is stopped ('make db_stop')" + docker container prune -f + docker volume rm adventureworks-for-postgres_adventure_works_data diff --git a/README.md b/README.md index 228b462..6a73be3 100644 --- a/README.md +++ b/README.md @@ -12,65 +12,36 @@ Provided is a ruby file to convert CSVs available on CodePlex into a format usab well as a Postgres script to create the tables, load the data, convert the hierarchyid columns, add primary and foreign keys, and create some of the views used by Adventureworks. -## How to set up the database: +## Usage -Download [Adventure Works 2014 OLTP Script](https://github.com/Microsoft/sql-server-samples/releases/download/adventureworks/AdventureWorks-oltp-install-script.zip). +All scripts assume Unixy environment where make command is available. +Everything is orchestrated through Makefile -Extract the .zip and copy all of the CSV files into the same folder, also containing update_csvs.rb file and install.sql. +1. Start your destination database -Modify the CSVs to work with Postgres by running: -``` -ruby update_csvs.rb -``` -Create the database and tables, import the data, and set up the views and keys with: -``` -psql -c "CREATE DATABASE \"Adventureworks\";" -psql -d Adventureworks < install.sql -``` -(If you do not have a database created for your user account then you may need to also add: `-U postgres` to the above two commands.) + `make db_start` -All 68 tables are properly set up, and 11 of the 20 views are established. The ones not built are those that rely on XML functions like value and ref. To see a list of tables, open psql, and then connect to the database and show all the tables with these two commands: -``` -\c "Adventureworks" -\dt (humanresources|person|production|purchasing|sales).* -``` +2. Run `make upload_data` once DB is fully up. -## Using with Docker + - downloads the zip with AdventureWorks data + - unpacks it + - converts it using Ruby script + - starts a separate postgres container that connects to the destination DB and streams data there -You can spin up a new database using **Docker** with `docker-compose up`. +## Details -_You will need to rename the Adventure Works 2014 OLTP Script archive to **adventure_works_2014_OLTP_script.zip** to get this to work!_ +Data is downloaded from [Adventure Works 2014 OLTP Script](https://github.com/Microsoft/sql-server-samples/releases/download/adventureworks/AdventureWorks-oltp-install-script.zip). +Contents are extracted and processed / converted by update_csvs.rb -## Motivation +Some DB creation boilerplate SQL is rendered by steps in Makefile -Five years ago I was pretty happy developing .NET apps for large organizations. The stack was -mature, and good practices surrounding software development were very respected. The same kind of -approach I appreciated from my days writing Java code was there, and the community was passionate. +All sql (dynamically rendered DB creation and tables restoration) is ran in a container that connects to the virtual network +on which DB instance is available and streams data to DB using psql command. -Then along came Windows 8. The //build/ conference in September 2011 revealed its first beta, and -even with that early peek at the new direction things were headed, it was clear that everything about -the platform was a haphazard combination of the new Metro apps along with all the traditional control -panel and options and API for classic code. It left a very bad taste in my mouth. Perhaps it would -look pretty, but be very unusable. I couldn't see it ever being successful. Once the "red pill" -registry setting vanished from the builds in mid-2012, the Windows 7 interface was then no longer -available even in Server editions. I knew it was time for a change. For a year I stuck it out -watching to see if there was any hope for some kind of tablet miracle out of Redmond, but I was -consistently unimpressed. +Wherever you see `?=` in Makefile you can override these environment variables by providing alternative values before the `make` command. +Example: -In mid-2013 a friend looped me in on a new project involving Ruby on Rails, and I fervently dove in -and have very much enjoyed the elegance of that ecosystem. A big part of that has been ramping up my -knowledge of Postgres. What a great database engine! I figure that others departing the Microsoft -camp may appreciate the same data samples they're familiar with, so I created this along with the -Northwind sample. It's been useful in the classroom training folks about Rails. I expect with the -heavy-handed tactics Microsoft has now used around Windows 10 that even more organizations will -choose to transition away from that platform, so there will be lots of opportunity for samples like -this to help people learn a new environment. - -As well, with the imminent release of SQL Server 2017 for Linux, this sample could be used to -evaluate performance differences between Postgres and SQL 2017. Never thought I'd see the day that -MS SQL got compiled for Linux, but alas, here we are. - -Let's keep coding fun. - -Enjoy! +``` +POSTGRES_USER=super POSTGRES_PASSWORD=pass DB_HOST=another_host DB_NAME=NotAdventureWorks make upload_data +``` diff --git a/create_db.sql.txt b/create_db.sql.txt new file mode 100644 index 0000000..0313f9c --- /dev/null +++ b/create_db.sql.txt @@ -0,0 +1,2 @@ +CREATE DATABASE "${DB_NAME}"; +GRANT ALL PRIVILEGES ON DATABASE "${DB_NAME}" TO ${POSTGRES_USER}; diff --git a/docker-compose-postgres.yml b/docker-compose-postgres.yml new file mode 100644 index 0000000..191892f --- /dev/null +++ b/docker-compose-postgres.yml @@ -0,0 +1,25 @@ +version: '3.1' + +services: + + db: + image: ${POSTGRES_IMAGE} + restart: always + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + PGDATA: /var/lib/postgresql/data/pgdata + volumes: + - adventure_works_data:/var/lib/postgresql/data + networks: + - database + ports: + - 5432:5432 + +volumes: + adventure_works_data: + external: true + +networks: + database: + external: true diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 32f0979..0000000 --- a/docker-compose.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: '2' -services: - db: - build: ./ - environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - ports: - - '5432:5432' diff --git a/install.sh b/install.sh deleted file mode 100644 index ef387a6..0000000 --- a/install.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -export PGUSER=postgres -psql <<- SHELL - CREATE USER docker; - CREATE DATABASE "Adventureworks"; - GRANT ALL PRIVILEGES ON DATABASE "Adventureworks" TO docker; -SHELL -cd /data -psql -d Adventureworks < /data/install.sql diff --git a/update_csvs.rb b/update_csvs.rb index 0f167c6..1a65afe 100755 --- a/update_csvs.rb +++ b/update_csvs.rb @@ -45,8 +45,13 @@ # Enjoy! +input_path = ARGV.size > 0 ? ARGV[0] : "." + +Dir.glob(input_path + '/*.csv') do |csv_file| + + base_name = File.basename(csv_file) + output_filename = File.join(input_path, base_name + '.tmp') -Dir.glob('./*.csv') do |csv_file| f = if (is_needed = csv_file.end_with?('/Address.csv')) File.open(csv_file, "rb:WINDOWS-1252:UTF-8") else @@ -92,11 +97,11 @@ if is_needed puts "Processing #{csv_file}" f.close - w = File.open(csv_file + ".xyz", "w") + w = File.open(output_filename, "w") w.write(output) w.close File.delete(csv_file) - File.rename(csv_file + ".xyz", csv_file) + File.rename(output_filename, csv_file) end # Here's a list of files that get snagged here: From 9a091bd184bc813e4e8d801cc333814aa6ba41c0 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Wed, 28 Feb 2024 01:21:10 -0800 Subject: [PATCH 2/3] remove "convenience" views. they are inconvenient Names are too short to be intuitive. They don't join or transform anything - just copy of table from another schema. They pollute schema definitions in data lineage tools --- install.sql | 82 ----------------------------------------------------- 1 file changed, 82 deletions(-) diff --git a/install.sql b/install.sql index b83b38c..9c50130 100644 --- a/install.sql +++ b/install.sql @@ -3211,88 +3211,6 @@ FROM Purchasing.Vendor v INNER JOIN Person.AddressType at ON at.AddressTypeID = bea.AddressTypeID; - --- Convenience views - -CREATE SCHEMA pe - CREATE VIEW a AS SELECT addressid AS id, * FROM person.address - CREATE VIEW at AS SELECT addresstypeid AS id, * FROM person.addresstype - CREATE VIEW be AS SELECT businessentityid AS id, * FROM person.businessentity - CREATE VIEW bea AS SELECT businessentityid AS id, * FROM person.businessentityaddress - CREATE VIEW bec AS SELECT businessentityid AS id, * FROM person.businessentitycontact - CREATE VIEW ct AS SELECT contacttypeid AS id, * FROM person.contacttype - CREATE VIEW cr AS SELECT * FROM person.countryregion - CREATE VIEW e AS SELECT emailaddressid AS id, * FROM person.emailaddress - CREATE VIEW pa AS SELECT businessentityid AS id, * FROM person.password - CREATE VIEW p AS SELECT businessentityid AS id, * FROM person.person - CREATE VIEW pp AS SELECT businessentityid AS id, * FROM person.personphone - CREATE VIEW pnt AS SELECT phonenumbertypeid AS id, * FROM person.phonenumbertype - CREATE VIEW sp AS SELECT stateprovinceid AS id, * FROM person.stateprovince -; -CREATE SCHEMA hr - CREATE VIEW d AS SELECT departmentid AS id, * FROM humanresources.department - CREATE VIEW e AS SELECT businessentityid AS id, * FROM humanresources.employee - CREATE VIEW edh AS SELECT businessentityid AS id, * FROM humanresources.employeedepartmenthistory - CREATE VIEW eph AS SELECT businessentityid AS id, * FROM humanresources.employeepayhistory - CREATE VIEW jc AS SELECT jobcandidateid AS id, * FROM humanresources.jobcandidate - CREATE VIEW s AS SELECT shiftid AS id, * FROM humanresources.shift -; -CREATE SCHEMA pr - CREATE VIEW bom AS SELECT billofmaterialsid AS id, * FROM production.billofmaterials - CREATE VIEW c AS SELECT cultureid AS id, * FROM production.culture - CREATE VIEW d AS SELECT * FROM production.document - CREATE VIEW i AS SELECT illustrationid AS id, * FROM production.illustration - CREATE VIEW l AS SELECT locationid AS id, * FROM production.location - CREATE VIEW p AS SELECT productid AS id, * FROM production.product - CREATE VIEW pc AS SELECT productcategoryid AS id, * FROM production.productcategory - CREATE VIEW pch AS SELECT productid AS id, * FROM production.productcosthistory - CREATE VIEW pd AS SELECT productdescriptionid AS id, * FROM production.productdescription - CREATE VIEW pdoc AS SELECT productid AS id, * FROM production.productdocument - CREATE VIEW pi AS SELECT productid AS id, * FROM production.productinventory - CREATE VIEW plph AS SELECT productid AS id, * FROM production.productlistpricehistory - CREATE VIEW pm AS SELECT productmodelid AS id, * FROM production.productmodel - CREATE VIEW pmi AS SELECT * FROM production.productmodelillustration - CREATE VIEW pmpdc AS SELECT * FROM production.productmodelproductdescriptionculture - CREATE VIEW pp AS SELECT productphotoid AS id, * FROM production.productphoto - CREATE VIEW ppp AS SELECT * FROM production.productproductphoto - CREATE VIEW pr AS SELECT productreviewid AS id, * FROM production.productreview - CREATE VIEW psc AS SELECT productsubcategoryid AS id, * FROM production.productsubcategory - CREATE VIEW sr AS SELECT scrapreasonid AS id, * FROM production.scrapreason - CREATE VIEW th AS SELECT transactionid AS id, * FROM production.transactionhistory - CREATE VIEW tha AS SELECT transactionid AS id, * FROM production.transactionhistoryarchive - CREATE VIEW um AS SELECT unitmeasurecode AS id, * FROM production.unitmeasure - CREATE VIEW w AS SELECT workorderid AS id, * FROM production.workorder - CREATE VIEW wr AS SELECT workorderid AS id, * FROM production.workorderrouting -; -CREATE SCHEMA pu - CREATE VIEW pv AS SELECT productid AS id, * FROM purchasing.productvendor - CREATE VIEW pod AS SELECT purchaseorderdetailid AS id, * FROM purchasing.purchaseorderdetail - CREATE VIEW poh AS SELECT purchaseorderid AS id, * FROM purchasing.purchaseorderheader - CREATE VIEW sm AS SELECT shipmethodid AS id, * FROM purchasing.shipmethod - CREATE VIEW v AS SELECT businessentityid AS id, * FROM purchasing.vendor -; -CREATE SCHEMA sa - CREATE VIEW crc AS SELECT * FROM sales.countryregioncurrency - CREATE VIEW cc AS SELECT creditcardid AS id, * FROM sales.creditcard - CREATE VIEW cu AS SELECT currencycode AS id, * FROM sales.currency - CREATE VIEW cr AS SELECT * FROM sales.currencyrate - CREATE VIEW c AS SELECT customerid AS id, * FROM sales.customer - CREATE VIEW pcc AS SELECT businessentityid AS id, * FROM sales.personcreditcard - CREATE VIEW sod AS SELECT salesorderdetailid AS id, * FROM sales.salesorderdetail - CREATE VIEW soh AS SELECT salesorderid AS id, * FROM sales.salesorderheader - CREATE VIEW sohsr AS SELECT * FROM sales.salesorderheadersalesreason - CREATE VIEW sp AS SELECT businessentityid AS id, * FROM sales.salesperson - CREATE VIEW spqh AS SELECT businessentityid AS id, * FROM sales.salespersonquotahistory - CREATE VIEW sr AS SELECT salesreasonid AS id, * FROM sales.salesreason - CREATE VIEW tr AS SELECT salestaxrateid AS id, * FROM sales.salestaxrate - CREATE VIEW st AS SELECT territoryid AS id, * FROM sales.salesterritory - CREATE VIEW sth AS SELECT territoryid AS id, * FROM sales.salesterritoryhistory - CREATE VIEW sci AS SELECT shoppingcartitemid AS id, * FROM sales.shoppingcartitem - CREATE VIEW so AS SELECT specialofferid AS id, * FROM sales.specialoffer - CREATE VIEW sop AS SELECT specialofferid AS id, * FROM sales.specialofferproduct - CREATE VIEW s AS SELECT businessentityid AS id, * FROM sales.store -; - -- Ensure that Perry Skountrianos' change for Türkiye is implemented properly if it was intended -- https://github.com/microsoft/sql-server-samples/commit/cca0f1920e3bec5b9cef97e1fdc32b6883526581 -- Fix any messed up one if such a thing exists and is not proper unicode From e903ea485ce042d64d296bfb68a7e11d5ea40d18 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Wed, 28 Feb 2024 01:38:02 -0800 Subject: [PATCH 3/3] parameterize start of DB instance a bit richer. add volume env var --- Makefile | 8 ++++++-- docker-compose-postgres.yml | 10 +++++----- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index cc0c2ac..4314894 100644 --- a/Makefile +++ b/Makefile @@ -28,6 +28,7 @@ ${SOURCE_CSV_DIR}/csvs_are_converted: ${SOURCE_CSV_DIR} ################################### DB_DOCKER_NETWORK?=database +DB_DOCKER_VOLUME?=adventure_works_data .PHONY: ensure_db_network_exists ensure_db_network_exists: @@ -40,11 +41,15 @@ ensure_db_volume_exists: .PHONY: db_start db_start: ensure_db_network_exists ensure_db_volume_exists POSTGRES_IMAGE=${POSTGRES_IMAGE} \ + DB_DOCKER_VOLUME=${DB_DOCKER_VOLUME} \ + DB_DOCKER_NETWORK=${DB_DOCKER_NETWORK} \ ${DOCKER_COMMAND} compose -f docker-compose-postgres.yml up -d .PHONY: db_stop db_stop: POSTGRES_IMAGE=${POSTGRES_IMAGE} \ + DB_DOCKER_VOLUME=${DB_DOCKER_VOLUME} \ + DB_DOCKER_NETWORK=${DB_DOCKER_NETWORK} \ ${DOCKER_COMMAND} compose -f docker-compose-postgres.yml down # used only for PSQL client to talk to the DB server @@ -109,5 +114,4 @@ clean: clean .PHONY: clean-db clean-db: @echo "Ensure DB is stopped ('make db_stop')" - docker container prune -f - docker volume rm adventureworks-for-postgres_adventure_works_data + docker volume rm ${DB_DOCKER_VOLUME} diff --git a/docker-compose-postgres.yml b/docker-compose-postgres.yml index 191892f..8e6a41f 100644 --- a/docker-compose-postgres.yml +++ b/docker-compose-postgres.yml @@ -1,25 +1,25 @@ -version: '3.1' - services: db: - image: ${POSTGRES_IMAGE} + image: "${POSTGRES_IMAGE}" restart: always environment: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres PGDATA: /var/lib/postgresql/data/pgdata volumes: - - adventure_works_data:/var/lib/postgresql/data + - database:/var/lib/postgresql/data networks: - database ports: - 5432:5432 volumes: - adventure_works_data: + database: external: true + name: "${DB_DOCKER_VOLUME}" networks: database: external: true + name: "${DB_DOCKER_NETWORK}"