-
Notifications
You must be signed in to change notification settings - Fork 113
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
docker backend: process to load file retrieved by sidekick
- Loading branch information
Showing
4 changed files
with
125 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
#!/bin/bash | ||
|
||
INPUT_DIR=$1 | ||
ARCHIVE_NAME="retrieved_data.tar.gz" # Name of the tar.gz file | ||
|
||
if [ -n "$2" ] ; then | ||
. $2 | ||
fi | ||
|
||
# Check the input file exists | ||
if [ ! -f "$INPUT_DIR/$ARCHIVE_NAME" ]; then | ||
exit 0 | ||
fi | ||
|
||
echo "Retrieve file found" | ||
|
||
# Decompress the tar.gz file to get the CSV files | ||
echo "Decompressing tar.gz archive..." | ||
tar -xzf "$INPUT_DIR/$ARCHIVE_NAME" -C "$INPUT_DIR" | ||
|
||
# Check if the decompression was successful | ||
if [[ $? -ne 0 ]]; then | ||
echo "Error decompressing archive." | ||
exit 100 | ||
fi | ||
|
||
# Check if all tables are empty | ||
for input_file in "$INPUT_DIR"/*.csv; do | ||
# Get the table name from the CSV file name, ignoring the prefix | ||
table=$(basename "$input_file" .csv | cut -d'-' -f2-) | ||
if [[ $(PGPASSWORD="${DB_PASS}" psql -h "${DB_HOST}" -U "${DB_USER}" -d "${DB_NAME}" -t -c "SELECT 1 FROM \"$table\" LIMIT 1") ]]; then | ||
echo "Table '$table' is not empty. Will not load retrieved file" | ||
exit 100 | ||
fi | ||
done | ||
|
||
# Loop over each CSV file and generate \COPY command for each | ||
for input_file in "$INPUT_DIR"/*.csv; do | ||
# Get the table name from the CSV file name, ignoring the prefix | ||
table=$(basename "$input_file" .csv | cut -d'-' -f2-) | ||
|
||
# Read the first line of the CSV file | ||
IFS=',' read -r -a columns < "$input_file" | ||
|
||
# Add quotes around each column name | ||
columnList="" | ||
first="1" | ||
for column in "${columns[@]}"; do | ||
if [ -z "${first}" ] ; then | ||
columnList+=", " | ||
else | ||
first="" | ||
fi | ||
columnList+="\"$column\"" | ||
done | ||
|
||
# Generate the \COPY command to import the CSV file into the table | ||
echo "Importing data from '$input_file'" | ||
PGPASSWORD="${DB_PASS}" psql -h "${DB_HOST}" -U "${DB_USER}" -d "${DB_NAME}" -c "\COPY \"$table\"(${columnList}) FROM '$input_file' WITH CSV HEADER" &> "${input_file}.log" | ||
|
||
if [ $? -gt 0 ]; then | ||
echo "Error importing $input_file:" | ||
cat "${input_file}.log" | ||
exit 100 | ||
fi | ||
|
||
# Check if the import was successful | ||
if [[ $? -eq 0 ]]; then | ||
echo "Data imported successfully from $input_file into table '$table'." | ||
else | ||
echo "Error importing data from $input_file into table '$table'." | ||
fi | ||
done | ||
exit 1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
import fs from "fs"; | ||
import { Sequelize } from "sequelize"; | ||
|
||
// eslint-disable-next-line @typescript-eslint/no-var-requires | ||
const dbConfig = require("../config/database"); | ||
|
||
const sequelize = new Sequelize(dbConfig); | ||
const seedsDir = "dist/database/seeds"; | ||
|
||
function getSeedFiles() { | ||
return fs.readdirSync(seedsDir).filter(file => file.endsWith(".js")); | ||
} | ||
|
||
async function markSeedsAsExecuted() { | ||
const seedFiles = getSeedFiles(); | ||
|
||
if (seedFiles.length === 0) { | ||
console.log("No seed files found in the seeds directory."); | ||
return; | ||
} | ||
|
||
console.log( | ||
`Found ${seedFiles.length} seed files. Marking them as executed...` | ||
); | ||
|
||
try { | ||
await sequelize.query(` | ||
CREATE TABLE IF NOT EXISTS "SequelizeData" ( | ||
"name" VARCHAR(255), | ||
PRIMARY KEY ("name") | ||
) | ||
`); | ||
|
||
await sequelize.query( | ||
`INSERT INTO "SequelizeData" (name) VALUES ${seedFiles | ||
.map(seed => `('${seed}')`) | ||
.join(", ")}` | ||
); | ||
console.log("All seeds marked as executed successfully!"); | ||
} catch (error) { | ||
console.error("Error marking seeds as executed:", error); | ||
} finally { | ||
await sequelize.close(); | ||
} | ||
} | ||
|
||
markSeedsAsExecuted(); |