Skip to content

Commit

Permalink
ZIL-5434: PDT setup daemon and cronjobs (#237)
Browse files Browse the repository at this point in the history
* ZIL-5421: PDT - Separate Listen on BQ and PSQL

* ZIL-5421: PDT: Update k8 configs

* Fix manifests

* Fixed cronjob yaml

* fix manifests
  • Loading branch information
WuBruno authored Oct 20, 2023
1 parent 00799da commit 751fbb2
Show file tree
Hide file tree
Showing 9 changed files with 213 additions and 71 deletions.
3 changes: 0 additions & 3 deletions products/pdt/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# Requires alpine to get openssl and build
FROM rust:1.71.0-bullseye as builder

WORKDIR /pdt
Expand All @@ -23,5 +22,3 @@ RUN apt-get update -y && \
rm -rf /var/lib/apt/lists/*

COPY --from=builder /pdt/build/pdt /pdt

CMD ["/pdt", "test"]
14 changes: 14 additions & 0 deletions products/pdt/cd/base/configmap.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: pdt
namespace: pdt
data:
PROJECT_ID: "prj-c-data-analytics-3xs14wez"
DATASET_ID: "ds_zq1_testnet"
NETWORK_TYPE: "testnet"
NETWORK: "testnet-v925"
DOWNLOAD_DIR: "/data/testnet"
UNPACK_DIR: "/data/testnet"
NR_THREADS: "1"
BATCH_BLOCKS: "1000"
69 changes: 69 additions & 0 deletions products/pdt/cd/base/cronjob.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
apiVersion: batch/v1
kind: CronJob
metadata:
name: pdt-cronjob
namespace: pdt
labels:
app.kubernetes.io/name: "pdt-cronjob"
spec:
schedule: "0 0 * * *"
jobTemplate:
spec:
template:
metadata:
labels:
app.kubernetes.io/name: "pdt-cronjob"
spec:
restartPolicy: OnFailure
serviceAccountName: default
containers:
- image: pdt
name: pdt-cronjob
volumeMounts:
- mountPath: /data
name: data
env:
- name: PROJECT_ID
valueFrom:
configMapKeyRef:
name: pdt
key: PROJECT_ID
- name: DATASET_ID
valueFrom:
configMapKeyRef:
name: pdt
key: DATASET_ID
- name: NETWORK
valueFrom:
configMapKeyRef:
name: pdt
key: NETWORK
- name: DOWNLOAD_DIR
valueFrom:
configMapKeyRef:
name: pdt
key: DOWNLOAD_DIR
- name: UNPACK_DIR
valueFrom:
configMapKeyRef:
name: pdt
key: UNPACK_DIR
- name: NR_THREADS
valueFrom:
configMapKeyRef:
name: pdt
key: NR_THREADS
- name: BATCH_BLOCKS
valueFrom:
configMapKeyRef:
name: pdt
key: BATCH_BLOCKS
command: ["/bin/bash", "-c"]
args:
- |
./pdt --download-dir=$DOWNLOAD_DIR --unpack-dir=$UNPACK_DIR --network=$NETWORK download
./pdt --download-dir=$DOWNLOAD_DIR --unpack-dir=$UNPACK_DIR bqmulti --project-id=$PROJECT_ID --dataset-id=$DATASET_ID --nr-threads=$NR_THREADS --batch-blocks=$BATCH_BLOCKS
volumes:
- name: data
persistentVolumeClaim:
claimName: pdt-pvc
38 changes: 24 additions & 14 deletions products/pdt/cd/base/deployment.yaml
Original file line number Diff line number Diff line change
@@ -1,32 +1,42 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: pdt
name: pdt-listen
namespace: pdt
labels:
"app.kubernetes.io/name": "pdt"
"app.kubernetes.io/name": "pdt-listen"
spec:
replicas: 1
selector:
matchLabels:
"app.kubernetes.io/name": "pdt"
"app.kubernetes.io/name": "pdt-listen"
strategy:
type: RollingUpdate
template:
metadata:
labels:
"app.kubernetes.io/name": "pdt"
"app.kubernetes.io/name": "pdt-listen"
spec:
serviceAccountName: default
containers:
- image: pdt
name: pdt
ports:
- containerPort: 80
volumeMounts:
- mountPath: /data
name: data
volumes:
- name: data
persistentVolumeClaim:
claimName: pdt-pvc
name: pdt-listen
env:
- name: PROJECT_ID
valueFrom:
configMapKeyRef:
name: pdt
key: PROJECT_ID
- name: DATASET_ID
valueFrom:
configMapKeyRef:
name: pdt
key: DATASET_ID
- name: NETWORK_TYPE
valueFrom:
configMapKeyRef:
name: pdt
key: NETWORK_TYPE
command: ["/bin/bash", "-c"]
args:
- ./pdt --network-type $NETWORK_TYPE bqlisten --project-id $PROJECT_ID --dataset-id $DATASET_ID,
2 changes: 2 additions & 0 deletions products/pdt/cd/base/kustomization.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- configmap.yaml
- namespace.yaml
- pvc.yaml
- cronjob.yaml
- deployment.yaml
13 changes: 13 additions & 0 deletions products/pdt/cd/overlays/staging/configmap.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: pdt
data:
PROJECT_ID: "prj-c-data-analytics-3xs14wez"
DATASET_ID: "ds_zq1_testnet"
NETWORK_TYPE: "testnet"
NETWORK: "testnet-v925"
DOWNLOAD_DIR: "/data/testnet"
UNPACK_DIR: "/data/testnet"
NR_THREADS: "1"
BATCH_BLOCKS: "1000"
12 changes: 11 additions & 1 deletion products/pdt/cd/overlays/staging/kustomization.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,18 @@ namespace: pdt-staging
patches:
- target:
kind: Deployment
name: pdt
name: pdt-cronjob
patch: |-
- op: replace
path: /spec/jobTemplate/spec/template/spec/serviceAccountName
value: pdt
- target:
kind: Deployment
name: pdt-listen
patch: |-
- op: replace
path: /spec/template/spec/serviceAccountName
value: pdt
patchesStrategicMerge:
- configmap.yaml
43 changes: 25 additions & 18 deletions products/pdt/pdt/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use pdtlib::exporter::Exporter;
use pdtlib::historical::Historical;
use pdtlib::incremental::Incremental;
use pdtlib::render::Renderer;
use pdtlisten::listen;
use pdtlisten::{listen_bq, listen_psql};
use pdtparse::parse_zrc2;

#[derive(Parser)]
Expand Down Expand Up @@ -65,10 +65,10 @@ enum Commands {
ReconcileBlocks(ReconcileOptions),
#[command(name = "parse-events")]
ParseEvents,
#[command(name = "listen")]
Listen(ListenOptions),
#[command(name = "test")]
Test,
#[command(name = "psqllisten")]
PSQLListen,
#[command(name = "bqlisten")]
BQListen(ListenOptions),
}

// #[derive(Debug, Args)]
Expand Down Expand Up @@ -139,9 +139,6 @@ struct ListenOptions {

#[arg(long, default_value = "prj-c-data-analytics-3xs14wez")]
project_id: String,

#[arg(long)]
service_account_key_file: Option<String>,
}

const TESTNET_BUCKET: &str = "301978b4-0c0a-4b6b-ad7b-3a2f63c5182c";
Expand Down Expand Up @@ -273,17 +270,24 @@ async fn bigquery_reconcile_blocks(unpack_dir: &str, opts: &ReconcileOptions) ->
.await
}

async fn listen_outer(
async fn psql_listen_outer(postgres_url: &str, network_type: &NetworkType) -> Result<()> {
let api_url = match network_type {
NetworkType::Testnet => DEV_API_URL,
NetworkType::Mainnet => MAINNET_API_URL,
};
listen_psql(postgres_url, api_url).await
}

async fn bigquery_listen_outer(
bq_project_id: &str,
bq_dataset_id: &str,
postgres_url: &str,
network_type: &NetworkType,
) -> Result<()> {
let api_url = match network_type {
NetworkType::Testnet => DEV_API_URL,
NetworkType::Mainnet => MAINNET_API_URL,
};
listen(bq_project_id, bq_dataset_id, postgres_url, api_url).await
listen_bq(bq_project_id, bq_dataset_id, api_url).await
}

#[tokio::main]
Expand All @@ -307,20 +311,23 @@ async fn main() -> Result<()> {
}
Commands::ReconcileBlocks(opts) => bigquery_reconcile_blocks(&cli.unpack_dir, opts).await,
Commands::ParseEvents => parse_events(&cli.token_type, &cli.postgres_url).await,
Commands::Listen(opts) => {
listen_outer(
&opts.project_id,
&opts.dataset_id,
Commands::PSQLListen => {
psql_listen_outer(
&cli.postgres_url
.expect("no postgres connection url -- did you forget to set --postgres-url?"),
&cli.network_type
.expect("no network type -- did forget to set --network-type?"),
)
.await
}
Commands::Test => {
println!("Hello World");
loop {}
Commands::BQListen(opts) => {
bigquery_listen_outer(
&opts.project_id,
&opts.dataset_id,
&cli.network_type
.expect("no network type -- did forget to set --network-type?"),
)
.await
}
}
}
Loading

0 comments on commit 751fbb2

Please sign in to comment.