Skip to content

Commit a09e358

Browse files
committed
cli binary generation
1 parent 9dfe0dd commit a09e358

File tree

6 files changed

+363
-1
lines changed

6 files changed

+363
-1
lines changed

.github/workflows/ship.yaml

+247
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,247 @@
1+
name: ship
2+
3+
on:
4+
push:
5+
6+
jobs:
7+
nix-build:
8+
name: nix build
9+
runs-on: ubuntu-latest
10+
strategy:
11+
matrix:
12+
target:
13+
- x86_64-linux
14+
- aarch64-linux
15+
steps:
16+
- name: Checkout 🛎️
17+
uses: actions/checkout@v4
18+
19+
- name: Install Nix ❄
20+
uses: cachix/install-nix-action@V27
21+
with:
22+
github_access_token: ${{ secrets.GITHUB_TOKEN }}
23+
24+
- name: Set up the Nix Cache 🔌
25+
uses: cachix/cachix-action@v15
26+
with:
27+
name: hasura-v3-dev
28+
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
29+
30+
- name: Build a binary with Nix
31+
run: nix build --print-build-logs '.#${{ matrix.target }}'
32+
33+
- name: Build a Docker image with Nix
34+
run: nix build --print-build-logs '.#docker-${{ matrix.target }}'
35+
36+
# scream into Slack if something goes wrong
37+
- name: Report Status
38+
if: always() && github.ref == 'refs/heads/main'
39+
uses: ravsamhq/notify-slack-action@v2
40+
with:
41+
status: ${{ job.status }}
42+
notify_when: failure
43+
notification_title: "😧 Error on <{repo_url}|{repo}>"
44+
message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>"
45+
env:
46+
SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }}
47+
48+
push-docker-images:
49+
name: push Docker images
50+
needs:
51+
- nix-build
52+
runs-on: ubuntu-latest
53+
# Only run on the `main` branch or version tags.
54+
# Note we currently tag the image with 'latest', so will want to stop doing
55+
# so if we run this on PR branches, etc.
56+
if: (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v'))
57+
permissions:
58+
contents: read
59+
id-token: write
60+
packages: write
61+
steps:
62+
- name: Checkout 🛎️
63+
uses: actions/checkout@v4
64+
65+
- name: Install Nix ❄
66+
uses: cachix/install-nix-action@V27
67+
with:
68+
github_access_token: ${{ secrets.GITHUB_TOKEN }}
69+
70+
- name: Set up the Nix Cache 🔌
71+
uses: cachix/cachix-action@v15
72+
with:
73+
name: hasura-v3-dev
74+
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
75+
76+
- id: gcloud-auth
77+
name: Authenticate to Google Cloud 🔑
78+
uses: google-github-actions/auth@v2
79+
with:
80+
token_format: access_token
81+
service_account: "hasura-ci-docker-writer@hasura-ddn.iam.gserviceaccount.com"
82+
workload_identity_provider: "projects/1025009031284/locations/global/workloadIdentityPools/hasura-ddn/providers/github"
83+
84+
- name: Login to Google Container Registry 📦
85+
uses: "docker/login-action@v3"
86+
with:
87+
registry: "us-docker.pkg.dev"
88+
username: "oauth2accesstoken"
89+
password: "${{ steps.gcloud-auth.outputs.access_token }}"
90+
91+
- name: Login to GitHub Container Registry 📦
92+
uses: docker/login-action@v3
93+
with:
94+
registry: ghcr.io
95+
username: ${{ github.actor }}
96+
password: ${{ secrets.GITHUB_TOKEN }}
97+
98+
- name: Push Docker images to Google Container Registry 🚢
99+
run: nix run .#publish-docker-image '${{ github.ref }}' 'us-docker.pkg.dev/hasura-ddn/ddn/ndc-bigquery'
100+
101+
- name: Push Docker images to GitHub Packages 🚢
102+
run: nix run .#publish-docker-image '${{ github.ref }}' 'ghcr.io/hasura/ndc-bigquery'
103+
104+
# scream into Slack if something goes wrong
105+
- name: Report Status
106+
if: always()
107+
uses: ravsamhq/notify-slack-action@v2
108+
with:
109+
status: ${{ job.status }}
110+
notify_when: failure
111+
notification_title: "😧 Error on <{repo_url}|{repo}>"
112+
message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>"
113+
env:
114+
SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }}
115+
116+
build-cli-binaries:
117+
name: build the CLI binaries
118+
strategy:
119+
matrix:
120+
include:
121+
- runner: ubuntu-20.04
122+
target: x86_64-unknown-linux-gnu
123+
- runner: ubuntu-20.04
124+
target: aarch64-unknown-linux-gnu
125+
linux-packages: gcc-aarch64-linux-gnu
126+
linker: /usr/bin/aarch64-linux-gnu-gcc
127+
- runner: macos-latest
128+
target: x86_64-apple-darwin
129+
- runner: macos-latest
130+
target: aarch64-apple-darwin
131+
- runner: windows-latest
132+
target: x86_64-pc-windows-msvc
133+
extension: .exe
134+
extra-rust-flags: "-C target-feature=+crt-static"
135+
runs-on: ${{ matrix.runner }}
136+
env:
137+
CARGO_BUILD_TARGET: ${{ matrix.target }}
138+
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
139+
RUSTFLAGS: "-D warnings" # fail on warnings
140+
defaults:
141+
run:
142+
shell: bash
143+
steps:
144+
- uses: actions/checkout@v4
145+
146+
- name: install protoc
147+
uses: arduino/setup-protoc@v3
148+
with:
149+
version: "25.x"
150+
repo-token: ${{ secrets.GITHUB_TOKEN }}
151+
152+
- name: install tools
153+
run: |
154+
rustup show
155+
rustup target add ${{ matrix.target }}
156+
157+
- name: install other packages required
158+
if: matrix.linux-packages
159+
run: |
160+
sudo apt-get update
161+
sudo apt-get install -y ${{ matrix.linux-packages }}
162+
163+
- uses: Swatinem/rust-cache@v2
164+
with:
165+
shared-key: "build-${matrix.runner}" # share the cache across jobs
166+
167+
- name: build the CLI
168+
run: |
169+
# If we're on a tag, use the tag name as the release version.
170+
if [[ "$GITHUB_REF_TYPE" == 'tag' ]]; then
171+
# Ensure that the version specified in Cargo.toml is the same as the tag (with a 'v' prefix).
172+
CARGO_VERSION="$(cargo metadata --format-version=1 | jq -r '.packages | .[] | select(.name == "ndc-bigquery") | .version')"
173+
echo "Git tag: ${GITHUB_REF_NAME}"
174+
echo "Cargo version: ${CARGO_VERSION}"
175+
176+
if [[ "$GITHUB_REF_NAME" != "v${CARGO_VERSION}" ]]; then
177+
echo >&2 "The Git tag is \"${GITHUB_REF_NAME}\", but the version in Cargo.toml is \"${CARGO_VERSION}\"."
178+
echo >&2 'These must be the same, with a "v" prefix for the tag. Aborting.'
179+
exit 1
180+
fi
181+
export RELEASE_VERSION="$GITHUB_REF_NAME"
182+
echo "RELEASE_VERSION = ${RELEASE_VERSION}"
183+
fi
184+
185+
if [[ -n '${{ matrix.linker }}' ]]; then
186+
TARGET_SCREAMING="$(echo '${{ matrix.target }}' | tr '[:lower:]' '[:upper:]' | tr '-' '_')"
187+
echo "CARGO_TARGET_${TARGET_SCREAMING}_LINKER"='${{ matrix.linker }}'
188+
declare "CARGO_TARGET_${TARGET_SCREAMING}_LINKER"='${{ matrix.linker }}'
189+
export "CARGO_TARGET_${TARGET_SCREAMING}_LINKER"
190+
fi
191+
192+
if [[ -n '${{ matrix.extra-rust-flags }}' ]]; then
193+
RUSTFLAGS="${RUSTFLAGS} ${{ matrix.extra-rust-flags }}"
194+
export RUSTFLAGS
195+
fi
196+
echo "RUSTFLAGS = ${RUSTFLAGS}"
197+
198+
echo "Building for target: ${CARGO_BUILD_TARGET}"
199+
cargo build --release --package=ndc-bigquery-cli
200+
201+
mkdir -p release
202+
mv -v target/${{ matrix.target }}/release/ndc-bigquery-cli release/ndc-bigquery-cli-${{ matrix.target }}${{ matrix.extension }}
203+
204+
- uses: actions/upload-artifact@v4
205+
with:
206+
name: ndc-bigquery-cli-${{ matrix.target }}
207+
path: release
208+
if-no-files-found: error
209+
210+
release:
211+
name: release to GitHub
212+
needs:
213+
- push-docker-images # not strictly necessary, but if this fails, we should abort
214+
- build-cli-binaries
215+
runs-on: ubuntu-latest
216+
# We release when a tag is pushed.
217+
if: startsWith(github.ref, 'refs/tags/v')
218+
steps:
219+
- uses: actions/checkout@v4
220+
221+
- uses: actions/download-artifact@v4
222+
with:
223+
path: release/artifacts
224+
merge-multiple: true
225+
226+
- name: generate SHA-256 checksums
227+
run: |
228+
cd release/artifacts
229+
sha256sum * > ./sha256sum
230+
231+
- name: generate a changelog
232+
run: |
233+
./scripts/release-notes.py "${GITHUB_REF_NAME}" >> release/notes.md
234+
235+
- name: generate a connector package
236+
run: |
237+
chmod +x ./release/artifacts/ndc-bigquery-cli-*
238+
mkdir release/package
239+
./release/artifacts/ndc-bigquery-cli-x86_64-unknown-linux-gnu --context=release/package initialize --with-metadata
240+
tar vczf release/artifacts/package.tar.gz -C release/package .
241+
242+
- name: create a draft release
243+
uses: ncipollo/release-action@v1
244+
with:
245+
draft: true
246+
bodyFile: release/notes.md
247+
artifacts: release/artifacts/*

changelog.md

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# Changelog
2+
3+
## [Unreleased]
4+
5+
### Added
6+
7+
### Changed
8+
9+
### Fixed
10+
11+
## [v0.1.0] - 2024-09-19
12+
13+
- Initial release with support of ndc-spec v0.1.6
14+
- Support for CLI plugin for Hasura v3 CLI, which allows the CLI to
15+
introspect the database on demand.
16+
- The default port was changed from 8100 to 8080.
17+
18+
<!-- end -->
19+
20+
[Unreleased]: https://github.com/hasura/ndc-bigquery/compare/v0.2.0...HEAD
21+
[v0.1.0]: https://github.com/hasura/ndc-bigquery/releases/tag/v0.1.0

crates/cli/readme.md

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
# ndc-bigquery-cli
2+
3+
ndc-bigquery-cli is used to configure a deployment of ndc-bigquery.
4+
It is intended to be automatically downloaded and invoked via the Hasura CLI, as a plugin.
5+
6+
## Create a configuration
7+
8+
Create a configuration in a new directory using the following commands:
9+
10+
1. Initialize a configuration:
11+
12+
```sh
13+
export HASURA_BIGQUERY_SERVICE_KEY='<bigquery-service-key>'
14+
export HASURA_BIGQUERY_PROJECT_ID='<bigquery-project-id>'
15+
export HASURA_BIGQUERY_DATASET_ID='<bigquery-dataset-id>'
16+
cargo run --bin ndc-bigquery-cli -- --context='<directory>' initialize
17+
```
18+
19+
2. Update the configuration by introspecting the database:
20+
21+
```sh
22+
export HASURA_BIGQUERY_SERVICE_KEY='<bigquery-service-key>'
23+
export HASURA_BIGQUERY_PROJECT_ID='<bigquery-project-id>'
24+
export HASURA_BIGQUERY_DATASET_ID='<bigquery-dataset-id>'
25+
cargo run --bin ndc-bigquery-cli -- --context='<directory>' update
26+
```

crates/cli/src/lib.rs

+4
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,10 @@ pub async fn run(command: Command, context: Context<impl Environment>) -> anyhow
7272
/// Optionally, this can also create the connector metadata, which is used by the Hasura CLI to
7373
/// automatically work with this CLI as a plugin.
7474
async fn initialize(with_metadata: bool, context: Context<impl Environment>) -> anyhow::Result<()> {
75+
let _ = context
76+
.context_path
77+
.join(configuration::version1::CONFIGURATION_FILENAME);
78+
fs::create_dir_all(&context.context_path).await?;
7579
// refuse to initialize the directory unless it is empty
7680
let mut items_in_dir = fs::read_dir(&context.context_path).await?;
7781
if items_in_dir.next_entry().await?.is_some() {

crates/configuration/src/values/pool_settings.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use schemars::JsonSchema;
22
use serde::{Deserialize, Serialize};
33

4-
/// Settings for the PostgreSQL connection pool
4+
/// Settings for the BigQuery connection pool
55
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)]
66
pub struct PoolSettings {
77
/// maximum number of pool connections

scripts/release-notes.py

+64
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
#!/usr/bin/env python3
2+
3+
# Generates the release notes for a GitHub release automatically.
4+
#
5+
# These notes consist of:
6+
# 1. the Docker image name
7+
# 2. the part of the changelog corresponding to the given version
8+
9+
import argparse
10+
import sys
11+
12+
def main():
13+
parser = argparse.ArgumentParser()
14+
parser.add_argument("version")
15+
args = parser.parse_args()
16+
17+
print_docker_image(args.version)
18+
print_changelog_for_version(args.version)
19+
20+
def print_docker_image(version):
21+
print('The connector Docker image is:')
22+
print('')
23+
print(f' ghcr.io/hasura/ndc-bigquery:{version}')
24+
print('')
25+
26+
def print_changelog_for_version(version):
27+
recording = False
28+
changelog_lines = []
29+
with open("changelog.md") as file:
30+
for line in file:
31+
line = line.rstrip() # remove trailing spaces and newline
32+
# start recording lines when we find the entry corresponding to the
33+
# given version
34+
if line.startswith(f"## [{version}]"):
35+
recording = True
36+
continue
37+
if recording:
38+
# stop when we hit the next section or the end
39+
if line.startswith("## ") or line == "<!-- end -->":
40+
break
41+
changelog_lines.append(line)
42+
43+
# discard blank lines at the start or end
44+
try:
45+
while changelog_lines[0] == "":
46+
changelog_lines.pop(0)
47+
while changelog_lines[-1] == "":
48+
changelog_lines.pop()
49+
except IndexError:
50+
pass
51+
52+
# if it's empty, we have failed
53+
if not changelog_lines:
54+
print(f"Could not find a changelog for version {version}.", file=sys.stderr)
55+
sys.exit(1)
56+
57+
# print the result
58+
print('## Changelog')
59+
print()
60+
for line in changelog_lines:
61+
print(line)
62+
63+
if __name__ == '__main__':
64+
main()

0 commit comments

Comments
 (0)