Skip to content

Commit b78d729

Browse files
committed
collect all env var as part of connection_settings
1 parent ddb2ff0 commit b78d729

File tree

14 files changed

+141
-69
lines changed

14 files changed

+141
-69
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
//! Database connection settings.
2+
3+
use crate::values::{DatasetId, ProjectId, Secret, ServiceKey};
4+
use schemars::JsonSchema;
5+
use serde::{Deserialize, Serialize};
6+
7+
pub const DEFAULT_SERVICE_KEY_VARIABLE: &str = "HASURA_BIGQUERY_SERVICE_KEY";
8+
pub const DEFAULT_PROJECT_ID_VARIABLE: &str = "HASURA_BIGQUERY_PROJECT_ID";
9+
pub const DEFAULT_DATASET_ID_VARIABLE: &str = "HASURA_BIGQUERY_DATASET_ID";
10+
11+
/// Database connection settings.
12+
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize, JsonSchema)]
13+
#[serde(rename_all = "camelCase")]
14+
pub struct DatabaseConnectionSettings {
15+
/// Connection string for a Postgres-compatible database.
16+
pub service_key: ServiceKey,
17+
/// Project ID for a BigQuery database.
18+
pub project_id: ProjectId,
19+
/// Dataset ID for a BigQuery database.
20+
pub dataset_id: DatasetId,
21+
}
22+
23+
impl DatabaseConnectionSettings {
24+
pub fn empty() -> Self {
25+
Self {
26+
service_key: ServiceKey(Secret::FromEnvironment {
27+
variable: DEFAULT_SERVICE_KEY_VARIABLE.into(),
28+
}),
29+
project_id: ProjectId(Secret::FromEnvironment {
30+
variable: DEFAULT_PROJECT_ID_VARIABLE.into(),
31+
}),
32+
dataset_id: DatasetId(Secret::FromEnvironment {
33+
variable: DEFAULT_DATASET_ID_VARIABLE.into(),
34+
}),
35+
}
36+
}
37+
}

crates/configuration/src/error.rs

+8-2
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,14 @@ pub enum ParseConfigurationError {
1616
column: usize,
1717
message: String,
1818
},
19-
#[error("empty connection URI")]
20-
EmptyConnectionUri { file_path: std::path::PathBuf },
19+
#[error("empty service account key")]
20+
EmptyServiceKey { file_path: std::path::PathBuf },
21+
22+
#[error("empty project ID")]
23+
EmptyProjectId { file_path: std::path::PathBuf },
24+
25+
#[error("empty dataset ID")]
26+
EmptyDatasetId { file_path: std::path::PathBuf },
2127

2228
#[error("I/O error: {0}")]
2329
IoErrorButStringified(String),

crates/configuration/src/lib.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
pub mod configuration;
2+
pub mod connection_settings;
23
pub mod environment;
34
pub mod error;
45
pub mod to_runtime_configuration;
56
pub mod values;
67
pub mod version1;
78

89
pub use configuration::Configuration;
9-
pub use values::uri::ConnectionUri;
10+
pub use values::connection_info::ServiceKey;
1011
pub use version1::{
1112
configure,
1213
parse_configuration,

crates/configuration/src/to_runtime_configuration.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use std::collections::BTreeMap;
66
use super::version1::ParsedConfiguration;
77
use crate::environment::Environment;
88
use crate::error::MakeRuntimeConfigurationError;
9-
use crate::values::{ConnectionUri, Secret};
9+
use crate::values::{Secret, ServiceKey};
1010
use query_engine_metadata::{self, metadata};
1111
// use crate::VersionTag;
1212

@@ -16,9 +16,9 @@ pub fn make_runtime_configuration(
1616
parsed_config: ParsedConfiguration,
1717
environment: impl Environment,
1818
) -> Result<crate::Configuration, MakeRuntimeConfigurationError> {
19-
let connection_uri = match parsed_config.service_key {
20-
ConnectionUri(Secret::Plain(uri)) => Ok(uri),
21-
ConnectionUri(Secret::FromEnvironment { variable }) => {
19+
let connection_uri = match parsed_config.connection_settings.service_key {
20+
ServiceKey(Secret::Plain(uri)) => Ok(uri),
21+
ServiceKey(Secret::FromEnvironment { variable }) => {
2222
environment.read(&variable).map_err(|error| {
2323
MakeRuntimeConfigurationError::MissingEnvironmentVariable {
2424
file_path: super::version1::CONFIGURATION_FILENAME.into(),

crates/configuration/src/values/database_info.rs crates/configuration/src/values/connection_info.rs

+15
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,21 @@ use serde::{Deserialize, Serialize};
33

44
use super::Secret;
55

6+
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)]
7+
pub struct ServiceKey(pub Secret);
8+
9+
impl From<String> for ServiceKey {
10+
fn from(value: String) -> Self {
11+
Self(value.into())
12+
}
13+
}
14+
15+
impl From<&str> for ServiceKey {
16+
fn from(value: &str) -> Self {
17+
Self::from(value.to_string())
18+
}
19+
}
20+
621
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)]
722
pub struct ProjectId(pub Secret);
823

+2-4
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
pub mod database_info;
1+
pub mod connection_info;
22
mod pool_settings;
33
mod secret;
4-
pub mod uri;
54

6-
pub use database_info::{DatasetId, ProjectId};
5+
pub use connection_info::{DatasetId, ProjectId, ServiceKey};
76
pub use pool_settings::PoolSettings;
87
pub use secret::Secret;
9-
pub use uri::ConnectionUri;

crates/configuration/src/values/uri.rs

-19
This file was deleted.

crates/configuration/src/version1.rs

+14-24
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
//! Internal Configuration and state for our connector.
22
3+
use crate::connection_settings;
34
use crate::environment::Environment;
45
use crate::error::WriteParsedConfigurationError;
5-
use crate::values::{ConnectionUri, DatasetId, PoolSettings, ProjectId, Secret};
6+
use crate::values::{DatasetId, PoolSettings, ProjectId, Secret, ServiceKey};
67

78
use super::error::ParseConfigurationError;
89
use gcp_bigquery_client::model::query_request::QueryRequest;
@@ -51,10 +52,7 @@ const NOT_APPROX_COUNTABLE: [&str; 4] = ["image", "sql_variant", "ntext", "text"
5152
pub struct ParsedConfiguration {
5253
// Which version of the configuration format are we using
5354
pub version: u32,
54-
// Connection string for a Postgres-compatible database
55-
pub service_key: ConnectionUri,
56-
pub project_id: ProjectId,
57-
pub dataset_id: DatasetId,
55+
pub connection_settings: connection_settings::DatabaseConnectionSettings,
5856
#[serde(skip_serializing_if = "PoolSettings::is_default")]
5957
#[serde(default)]
6058
pub pool_settings: PoolSettings,
@@ -80,15 +78,7 @@ impl ParsedConfiguration {
8078
pub fn empty() -> Self {
8179
Self {
8280
version: CURRENT_VERSION,
83-
service_key: ConnectionUri(Secret::FromEnvironment {
84-
variable: DEFAULT_SERVICE_KEY_VARIABLE.into(),
85-
}),
86-
project_id: ProjectId(Secret::FromEnvironment {
87-
variable: DEFAULT_PROJECT_ID_VARIABLE.into(),
88-
}),
89-
dataset_id: DatasetId(Secret::FromEnvironment {
90-
variable: DEFAULT_DATASET_ID_VARIABLE.into(),
91-
}),
81+
connection_settings: connection_settings::DatabaseConnectionSettings::empty(),
9282
pool_settings: PoolSettings::default(),
9383
metadata: metadata::Metadata::default(),
9484
// aggregate_functions: metadata::AggregateFunctions::default(),
@@ -101,19 +91,17 @@ pub async fn configure(
10191
args: &ParsedConfiguration,
10292
environment: impl Environment,
10393
) -> anyhow::Result<ParsedConfiguration> {
104-
let service_key = match &args.service_key {
105-
ConnectionUri(Secret::Plain(value)) => Cow::Borrowed(value),
106-
ConnectionUri(Secret::FromEnvironment { variable }) => {
107-
Cow::Owned(environment.read(variable)?)
108-
}
94+
let service_key = match &args.connection_settings.service_key {
95+
ServiceKey(Secret::Plain(value)) => Cow::Borrowed(value),
96+
ServiceKey(Secret::FromEnvironment { variable }) => Cow::Owned(environment.read(variable)?),
10997
};
11098

111-
let project_id_ = match &args.project_id {
99+
let project_id_ = match &args.connection_settings.project_id {
112100
ProjectId(Secret::Plain(value)) => Cow::Borrowed(value),
113101
ProjectId(Secret::FromEnvironment { variable }) => Cow::Owned(environment.read(variable)?),
114102
};
115103

116-
let dataset_id_ = match &args.dataset_id {
104+
let dataset_id_ = match &args.connection_settings.dataset_id {
117105
DatasetId(Secret::Plain(value)) => Cow::Borrowed(value),
118106
DatasetId(Secret::FromEnvironment { variable }) => Cow::Owned(environment.read(variable)?),
119107
};
@@ -220,9 +208,11 @@ pub async fn configure(
220208

221209
Ok(ParsedConfiguration {
222210
version: 1,
223-
service_key: args.service_key.clone(),
224-
project_id: args.project_id.clone(),
225-
dataset_id: args.dataset_id.clone(),
211+
connection_settings: connection_settings::DatabaseConnectionSettings {
212+
service_key: args.connection_settings.service_key.clone(),
213+
project_id: args.connection_settings.project_id.clone(),
214+
dataset_id: args.connection_settings.dataset_id.clone(),
215+
},
226216
pool_settings: args.pool_settings.clone(),
227217
metadata: metadata::Metadata {
228218
tables: tables_info,

crates/connectors/ndc-bigquery/src/connector.rs

+23-3
Original file line numberDiff line numberDiff line change
@@ -178,12 +178,32 @@ impl<Env: Environment + Send + Sync> ConnectorSetup for BigQuerySetup<Env> {
178178
message,
179179
})
180180
.into(),
181-
configuration::error::ParseConfigurationError::EmptyConnectionUri { file_path } => {
181+
configuration::error::ParseConfigurationError::EmptyServiceKey { file_path } => {
182182
connector::ParseError::ValidateError(connector::InvalidNodes(vec![
183183
connector::InvalidNode {
184184
file_path,
185-
node_path: vec![connector::KeyOrIndex::Key("connectionUri".into())],
186-
message: "database connection URI must be specified".to_string(),
185+
node_path: vec![connector::KeyOrIndex::Key("serviceKey".into())],
186+
message: "Service account key must be specified".to_string(),
187+
},
188+
]))
189+
.into()
190+
}
191+
configuration::error::ParseConfigurationError::EmptyProjectId { file_path } => {
192+
connector::ParseError::ValidateError(connector::InvalidNodes(vec![
193+
connector::InvalidNode {
194+
file_path,
195+
node_path: vec![connector::KeyOrIndex::Key("projectId".into())],
196+
message: "BigQuery project ID must be specified".to_string(),
197+
},
198+
]))
199+
.into()
200+
}
201+
configuration::error::ParseConfigurationError::EmptyDatasetId { file_path } => {
202+
connector::ParseError::ValidateError(connector::InvalidNodes(vec![
203+
connector::InvalidNode {
204+
file_path,
205+
node_path: vec![connector::KeyOrIndex::Key("datasetId".into())],
206+
message: "BigQuery dataset ID must be specified".to_string(),
187207
},
188208
]))
189209
.into()

crates/connectors/ndc-bigquery/tests/common/mod.rs

+19-6
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,13 @@ use std::collections::HashMap;
44

55
use ndc_postgres::connector;
66

7-
const POSTGRESQL_CONNECTION_STRING: &str = "postgresql://postgres:password@localhost:64002";
8-
97
pub const CHINOOK_DEPLOYMENT_PATH: &str = "static/chinook-deployment.json";
108

9+
pub const BIGQUERY_SERVICE_KEY: &str = "{\"type\": \"service_account\",\"project_id\": \"hasura-development\",\"private_key_id\": \"222dd3f9e98b6743bb8d74d7a126fe89e6ac221d\",\"private_key\": \"-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDZuxyxWk6bOxHr\nht+MPZ7Q+F4D7AzTqCTZOmcldod+KHMlUCKwIOLQabAO8TEPhvcYyzBQ4gCwoN2i\n7VoQbHmlTpQu1s43K25oIoEIicFTbHcL4MALiFnT44XYl+PxL+e//GibJYGjwqI+\n3o2Go//o8BOfQEO/PPiQdub8/4VQjXGE0+xLeUjYURiJPu8ojrL2FdIqvzMaDWKq\n2r6KuukIeWkqmt6fHnrGiseavg3g7pBPjqmRtX6ekY74XbkTQk1kmCKf9MLjZ1UI\n+8QNp1C4pO4eDbp1Zkqz3uHhzccUvStkSCmppjKfD64Tp+6ExbUnMfq1UJ0GJBDM\nVPeJF6+PAgMBAAECggEAFFSf88GKeH02CaeQ5S/1ze57HOOkOPlI443MBtgAA9w0\nEEZgztBrTWmo+mQ0IA6KsSJ78vl/df63Y1jFYaY3X6OsO4lsPQONriSWptzyE9+b\naB0G4azMMnhazaQ1MRa3jZo8jEwexFNOwg8W6P0UTsRoGKUwDkHbteWcYQBdCu3W\nFa/CX3Tw0n/DdAVNi8Ai9K0d+Okmcv+ZRopeNuLENR28/VGSXj+Li1V7A0s+nX9E\nyxuGrDY4WMxSXHkW2yjrDnPUs6dXLFk1HBQPaHrs3i6gGyNXfTNWUJ3nGQwZIqJI\na1b4TMiGVapq33qCo/3Yi6jQ+I6KnpmWgQ7y5LXhoQKBgQDuA80oWCXQv7MERg91\nFwammtXrMjoD234u3RGNtnU67yH87kvL+p18EiNlbmy+CWyoc1mOjLtTHvMBfMGh\nfKt3BSuzrZZArA1GJF6J2Rew5dkJGzwPogLSnXMgrVwknAejKJw97wTJzzIZuuSc\nb7P57+mFoSdR+eSb44WFcuMyoQKBgQDqLu9LWz+LcljDWDeMQ4kl8gkNZMe5//Qd\nOpa6mN6T2nfRgxasaLo7WO8TqT4X28eBJKuru4BOeHVx0Y8GvWYyaW0uEEycdXVl\n6man+YUhZezTjjB/nCeaz7E7LCcUao1JP2Y9xlnpO5jdyi2tYkCqu7vOxmnLArN/\nl3zuXgrkLwKBgEzCzReF1ixMpt9p+PI6StrQdM01laBI2ZkjktWxUn1/Qebgs3FF\nkiTBdMjxpABl6bUp/mgK2x8jjBuesJP0MRhhgoagJSUWV/GXKSYr7YgPmL9nGSex\niFeEj+yp/F2SNKRaJImU3GZ5fB7wN2p8W/7vcNC3+IZnoWLlLdqsAroBAoGAdzZh\nVoki9gfFq9uym1Kd9JUbipftHIBxcpeqt16un7GtIRiMaEP/2cpSGj4jf92/17wl\nMA0JKekkUEaPeqzb43nLvJFLjrI0iyciDwx0eyX5w1A03CFP//0OicLWOgxr1AfU\nMkpQ5uwRy4XqbsL/jGp5Fq/mlxPO8HrbfDSfcr0CgYEAxN/RMCYODz+p9xZ6tbiS\nfHFrCgvPpYR9hEWhb/DyT4Q/OSzk0TItuSXGc3uicYeIycHIndyWej/a1HGg0IRK\nqjGbqGvRJIrzhLvLog1oOGADFSE2IJrxV2m9lQG8IUow4QUFcoZaCXZAQEvWeo+D\nq+4Pe2w4aMZeyqpt/mOSGzQ=\n-----END PRIVATE KEY-----\n\",\"client_email\": \"skm-bq-test@hasura-development.iam.gserviceaccount.com\",\"client_id\": \"116460406056940511807\",\"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\",\"token_uri\": \"https://oauth2.googleapis.com/token\",\"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\",\"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/skm-bq-test%40hasura-development.iam.gserviceaccount.com\",\"universe_domain\": \"googleapis.com\"}";
10+
11+
pub const BIGQUERY_PROJECT_ID: &str = "hasura-development";
12+
pub const BIGQUERY_DATASET_ID: &str = "chinook_sample";
13+
1114
/// Creates a router with a fresh state from the test deployment.
1215
pub async fn create_router() -> axum::Router {
1316
create_router_from_deployment(CHINOOK_DEPLOYMENT_PATH).await
@@ -17,10 +20,20 @@ pub async fn create_router() -> axum::Router {
1720
pub async fn create_router_from_deployment(deployment_path: &str) -> axum::Router {
1821
let _ = env_logger::builder().is_test(true).try_init();
1922

20-
let environment = HashMap::from([(
21-
ndc_bigquery_configuration::version1::DEFAULT_SERVICE_KEY_VARIABLE.into(),
22-
POSTGRESQL_CONNECTION_STRING.to_string(),
23-
)]);
23+
let environment = HashMap::from([
24+
(
25+
ndc_bigquery_configuration::version1::DEFAULT_SERVICE_KEY_VARIABLE.into(),
26+
BIGQUERY_SERVICE_KEY.to_string(),
27+
),
28+
(
29+
ndc_bigquery_configuration::version1::DEFAULT_PROJECT_ID_VARIABLE.into(),
30+
BIGQUERY_PROJECT_ID.to_string(),
31+
),
32+
(
33+
ndc_bigquery_configuration::version1::DEFAULT_DATASET_ID_VARIABLE.into(),
34+
BIGQUERY_DATASET_ID.to_string(),
35+
),
36+
]);
2437

2538
let setup = connector::BigQuerySetup::new(environment);
2639

crates/connectors/ndc-bigquery/tests/configuration_tests.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ use similar_asserts::assert_eq;
1111
use ndc_bigquery_configuration::{
1212
values::Secret,
1313
version1::{self, DEFAULT_SERVICE_KEY_VARIABLE},
14-
ConnectionUri,
14+
ServiceKey,
1515
};
1616

1717
use tests_common::deployment::helpers::get_path_from_project_root;
@@ -40,7 +40,8 @@ async fn test_configure() {
4040
POSTGRESQL_CONNECTION_STRING.into(),
4141
)]);
4242

43-
args.service_key = ConnectionUri(Secret::Plain(DEFAULT_SERVICE_KEY_VARIABLE.to_string()));
43+
args.connection_settings.service_key =
44+
ServiceKey(Secret::Plain(DEFAULT_SERVICE_KEY_VARIABLE.to_string()));
4445

4546
let actual = version1::configure(&args, environment)
4647
.await

crates/query-engine/sql/src/sql/helpers.rs

+1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ use super::ast::*;
44
use std::collections::BTreeMap;
55

66
/// Used as input to helpers to construct SELECTs which return 'rows' and/or 'aggregates' results.
7+
#[derive(Debug, Clone, PartialEq)]
78
pub enum SelectSet {
89
Rows(Select),
910
Aggregates(Select),

crates/query-engine/translation/src/translation/query/mod.rs

+2
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,8 @@ pub fn translate(
4242
&query_request.query,
4343
)?;
4444

45+
dbg!(&select_set);
46+
4547
// form a single JSON item shaped `{ rows: [], aggregates: {} }`
4648
// that matches the models::RowSet type
4749
let json_select = sql::helpers::select_rowset(

crates/query-engine/translation/src/translation/query/values.rs

+11-4
Original file line numberDiff line numberDiff line change
@@ -82,10 +82,17 @@ fn type_to_ast_scalar_type_name(
8282
None => Ok(sql::ast::ScalarTypeName::Unqualified(
8383
scalar_type.type_name.to_string(),
8484
)),
85-
Some(schema_name) => Ok(sql::ast::ScalarTypeName::Qualified {
86-
schema_name: sql::ast::SchemaName(schema_name),
87-
type_name: scalar_type.type_name.to_string(),
88-
}),
85+
Some(_schema_name) =>
86+
// FIXME(PY): How to use Qualified types. In cast it gives something like Cast(@param1 as hasura-development.chinook_sample.string) when using _in operator for strings
87+
// Ok(sql::ast::ScalarTypeName::Qualified {
88+
// schema_name: sql::ast::SchemaName(schema_name),
89+
// type_name: scalar_type.type_name.to_string(),
90+
// }),
91+
{
92+
Ok(sql::ast::ScalarTypeName::Unqualified(
93+
scalar_type.type_name.to_string(),
94+
))
95+
}
8996
}
9097
}
9198
query_engine_metadata::metadata::Type::CompositeType(t) => {

0 commit comments

Comments
 (0)