From cc71fdc443a8eb476e1d5fd63b5d8d19a61c7712 Mon Sep 17 00:00:00 2001 From: Tom Harding Date: Wed, 26 Jun 2024 17:18:52 +0200 Subject: [PATCH 01/28] --wip-- [skip ci] --- Cargo.lock | 113 ++ crates/cli/Cargo.toml | 2 + crates/cli/src/lib.rs | 112 ++ crates/configuration/src/version3/mod.rs | 2 +- .../v3-chinook-ndc-metadata/schema.json | 1503 +++++++++++++++++ test.sql | 1 + 6 files changed, 1732 insertions(+), 1 deletion(-) create mode 100644 static/postgres/v3-chinook-ndc-metadata/schema.json create mode 100644 test.sql diff --git a/Cargo.lock b/Cargo.lock index 274fde9d2..0c31f3234 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -285,6 +285,28 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bindgen" +version = "0.64.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4" +dependencies = [ + "bitflags 1.3.2", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 1.0.109", + "which", +] + [[package]] name = "bit-set" version = "0.5.3" @@ -375,6 +397,15 @@ version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -394,6 +425,17 @@ dependencies = [ "windows-targets 0.52.5", ] +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + [[package]] name = "clap" version = "4.5.7" @@ -1277,18 +1319,57 @@ dependencies = [ "spin 0.5.2", ] +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +[[package]] +name = "libloading" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e310b3a6b5907f99202fcdb4960ff45b93735d7c7d96b760fcff8db2dc0e103d" +dependencies = [ + "cfg-if", + "windows-targets 0.52.5", +] + [[package]] name = "libm" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +[[package]] +name = "libpq" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0742f5e3894a62de35af0817f8fc801ebce542741e37693975d31ce62d120e8" +dependencies = [ + "libc", + "libpq-sys", + "log", + "thiserror", +] + +[[package]] +name = "libpq-sys" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ef060ac05c207c85da15f4eb629100c8782e0db4c06a3c91c86be9c18ae8a23" +dependencies = [ + "bindgen", + "pkg-config", + "vcpkg", +] + [[package]] name = "libsqlite3-sys" version = "0.27.0" @@ -1468,7 +1549,9 @@ dependencies = [ "build-data", "clap", "insta", + "libpq", "ndc-postgres-configuration", + "regex", "serde", "serde_json", "serde_yaml", @@ -1908,6 +1991,12 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -2326,6 +2415,12 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + [[package]] name = "rustix" version = "0.38.34" @@ -2702,6 +2797,12 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signal-hook-registry" version = "1.4.2" @@ -3788,6 +3889,18 @@ version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + [[package]] name = "whoami" version = "1.5.1" diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index c9f9b61d6..00d9e5e0b 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -17,6 +17,8 @@ serde_json = { workspace = true } serde_yaml = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } +libpq = "4.0.0" +regex = "1.10.5" [build-dependencies] build-data = { workspace = true } diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 4790cd356..13395ac2b 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -12,6 +12,7 @@ use tokio::fs; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; +use configuration::version3::metadata as metadatav3; const UPDATE_ATTEMPTS: u8 = 3; @@ -40,6 +41,13 @@ pub enum Command { #[arg(long)] dir_to: PathBuf, }, + CreateNativeOperation { + #[arg(long)] + operation_path: PathBuf, + + #[arg(long)] + is_mutation: bool, // we can make this neater later + }, } /// The set of errors that can go wrong _in addition to_ generic I/O or parsing errors. @@ -55,6 +63,10 @@ pub async fn run(command: Command, context: Context) -> anyhow Command::Initialize { with_metadata } => initialize(with_metadata, context).await?, Command::Update => update(context).await?, Command::Upgrade { dir_from, dir_to } => upgrade(dir_from, dir_to).await?, + Command::CreateNativeOperation { + operation_path, + is_mutation, + } => create_native_operation(operation_path, context, is_mutation).await?, }; Ok(()) } @@ -167,3 +179,103 @@ async fn upgrade(dir_from: PathBuf, dir_to: PathBuf) -> anyhow::Result<()> { Ok(()) } + +async fn create_native_operation( + operation_path: PathBuf, + context: Context, + is_procedure: bool, +) -> anyhow::Result<()> { + let identifier = operation_path + .file_stem() + .ok_or(anyhow::anyhow!("Oh no, file not found"))? + .to_str() + .ok_or(anyhow::anyhow!("Oh no, file not found"))?; + let sql = std::fs::read_to_string(&operation_path)?; + let mut configuration = configuration::parse_configuration(context.context_path.clone()).await?; + + let connection_uri = match configuration { + configuration::ParsedConfiguration::Version3(ref raw_configuration) => { + raw_configuration.connection_settings.connection_uri.clone() + } + configuration::ParsedConfiguration::Version4(ref configuration) => { + configuration.connection_settings.connection_uri.clone() + } + }; + + let connection_string = match connection_uri.0 { + configuration::Secret::Plain(connection_string) => connection_string, + configuration::Secret::FromEnvironment { variable } => std::env::var(variable.to_string())?, + }; + + let connection = libpq::Connection::new(&connection_string)?; + let prepared_statement_name = format!("__hasura_inference_{identifier}"); + + let identifier_regex = regex::Regex::new(r"\{\{(?.*?)\}\}").unwrap(); + let mut parameters = std::collections::HashMap::new(); + + for (index, (_, [name])) in identifier_regex + .captures_iter(&sql) + .map(|c| c.extract()) + .enumerate() + { + parameters.insert(index + 1, name); + } + + let mut final_statement = sql.clone(); + + for (index, name) in ¶meters { + final_statement = final_statement.replace(&format!("{{{{{name}}}}}"), &format!("${index}")); + } + + let _ = connection.prepare(Some(&prepared_statement_name), &final_statement, &[]); + let description = connection.describe_prepared(Some(&prepared_statement_name)); + + let mut arguments = std::collections::BTreeMap::new(); + let mut columns = std::collections::BTreeMap::new(); + + for param in 0 .. description.nparams() { + arguments.insert( + parameters.get(&(param + 1)).ok_or(anyhow::anyhow!(":("))?.to_string(), + metadatav3::ReadOnlyColumnInfo { + name: parameters.get(&(param + 1)).ok_or(anyhow::anyhow!(":("))?.to_string(), + r#type: metadatav3::Type::ScalarType(metadatav3::ScalarType(format!("{}", description.param_type(param).unwrap()))), + description: None, + nullable: metadatav3::Nullable::NonNullable, + } + ); + } + + for field in 0 .. description.nfields() { + columns.insert( + description.field_name(field)?.unwrap(), + metadatav3::ReadOnlyColumnInfo { + name: description.field_name(field)?.unwrap(), + r#type: metadatav3::Type::ScalarType(metadatav3::ScalarType(format!("{}", description.field_type(field)))), + description: None, + nullable: metadatav3::Nullable::NonNullable, + } + ); + } + + match configuration { + configuration::ParsedConfiguration::Version3(ref mut raw_configuration) => + // TODO: should we overwrite or not + raw_configuration.metadata.native_queries.0.insert( + identifier.to_string(), + metadatav3::NativeQueryInfo { + sql: metadatav3::NativeQuerySqlEither::NativeQuerySqlExternal( + metadatav3::NativeQuerySqlExternal::File { file: operation_path } + ), + + arguments, + columns, + is_procedure, + description: None, + } + ), + configuration::ParsedConfiguration::Version4(_) => panic!("Later") + }; + + configuration::write_parsed_configuration(configuration, context.context_path).await?; + Ok(()) +} diff --git a/crates/configuration/src/version3/mod.rs b/crates/configuration/src/version3/mod.rs index fd8352c18..b13eb3071 100644 --- a/crates/configuration/src/version3/mod.rs +++ b/crates/configuration/src/version3/mod.rs @@ -2,7 +2,7 @@ pub(crate) mod comparison; pub mod connection_settings; -pub(crate) mod metadata; +pub mod metadata; pub(crate) mod options; use std::borrow::Cow; diff --git a/static/postgres/v3-chinook-ndc-metadata/schema.json b/static/postgres/v3-chinook-ndc-metadata/schema.json new file mode 100644 index 000000000..abc35aead --- /dev/null +++ b/static/postgres/v3-chinook-ndc-metadata/schema.json @@ -0,0 +1,1503 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "RawConfiguration", + "description": "Initial configuration, just enough to connect to a database and elaborate a full 'Configuration'.", + "type": "object", + "required": [ + "version" + ], + "properties": { + "version": { + "$ref": "#/definitions/Version" + }, + "$schema": { + "description": "Jsonschema of the configuration format.", + "default": null, + "type": [ + "string", + "null" + ] + }, + "connectionSettings": { + "description": "Database connection settings.", + "default": { + "connectionUri": { + "variable": "CONNECTION_URI" + }, + "isolationLevel": "ReadCommitted", + "poolSettings": { + "checkConnectionAfterIdle": 60, + "connectionLifetime": 600, + "idleTimeout": 180, + "maxConnections": 50, + "poolTimeout": 30 + } + }, + "allOf": [ + { + "$ref": "#/definitions/DatabaseConnectionSettings" + } + ] + }, + "metadata": { + "description": "Connector metadata.", + "default": { + "aggregateFunctions": {}, + "comparisonOperators": {}, + "compositeTypes": {}, + "nativeQueries": {}, + "tables": {}, + "typeRepresentations": {} + }, + "allOf": [ + { + "$ref": "#/definitions/Metadata" + } + ] + }, + "introspectionOptions": { + "description": "Database introspection options.", + "default": { + "comparisonOperatorMapping": [ + { + "exposedName": "_eq", + "operatorKind": "equal", + "operatorName": "=" + }, + { + "exposedName": "_lte", + "operatorKind": "custom", + "operatorName": "<=" + }, + { + "exposedName": "_gt", + "operatorKind": "custom", + "operatorName": ">" + }, + { + "exposedName": "_gte", + "operatorKind": "custom", + "operatorName": ">=" + }, + { + "exposedName": "_lt", + "operatorKind": "custom", + "operatorName": "<" + }, + { + "exposedName": "_neq", + "operatorKind": "custom", + "operatorName": "!=" + }, + { + "exposedName": "_like", + "operatorKind": "custom", + "operatorName": "LIKE" + }, + { + "exposedName": "_nlike", + "operatorKind": "custom", + "operatorName": "NOT LIKE" + }, + { + "exposedName": "_ilike", + "operatorKind": "custom", + "operatorName": "ILIKE" + }, + { + "exposedName": "_nilike", + "operatorKind": "custom", + "operatorName": "NOT ILIKE" + }, + { + "exposedName": "_similar", + "operatorKind": "custom", + "operatorName": "SIMILAR TO" + }, + { + "exposedName": "_nsimilar", + "operatorKind": "custom", + "operatorName": "NOT SIMILAR TO" + }, + { + "exposedName": "_neq", + "operatorKind": "custom", + "operatorName": "<>" + }, + { + "exposedName": "_like", + "operatorKind": "custom", + "operatorName": "~~" + }, + { + "exposedName": "_nlike", + "operatorKind": "custom", + "operatorName": "!~~" + }, + { + "exposedName": "_ilike", + "operatorKind": "custom", + "operatorName": "~~*" + }, + { + "exposedName": "_nilike", + "operatorKind": "custom", + "operatorName": "!~~*" + }, + { + "exposedName": "_regex", + "operatorKind": "custom", + "operatorName": "~" + }, + { + "exposedName": "_nregex", + "operatorKind": "custom", + "operatorName": "!~" + }, + { + "exposedName": "_iregex", + "operatorKind": "custom", + "operatorName": "~*" + }, + { + "exposedName": "_niregex", + "operatorKind": "custom", + "operatorName": "!~*" + } + ], + "excludedSchemas": [ + "information_schema", + "pg_catalog", + "tiger", + "crdb_internal", + "columnar", + "columnar_internal" + ], + "introspectPrefixFunctionComparisonOperators": [ + "box_above", + "box_below", + "box_contain", + "box_contain_pt", + "box_contained", + "box_left", + "box_overabove", + "box_overbelow", + "box_overlap", + "box_overleft", + "box_overright", + "box_right", + "box_same", + "circle_above", + "circle_below", + "circle_contain", + "circle_contain_pt", + "circle_contained", + "circle_left", + "circle_overabove", + "circle_overbelow", + "circle_overlap", + "circle_overleft", + "circle_overright", + "circle_right", + "circle_same", + "contains_2d", + "equals", + "geography_overlaps", + "geometry_above", + "geometry_below", + "geometry_contained_3d", + "geometry_contains", + "geometry_contains_3d", + "geometry_contains_nd", + "geometry_left", + "geometry_overabove", + "geometry_overbelow", + "geometry_overlaps", + "geometry_overlaps_3d", + "geometry_overlaps_nd", + "geometry_overleft", + "geometry_overright", + "geometry_right", + "geometry_same", + "geometry_same_3d", + "geometry_same_nd", + "geometry_within", + "geometry_within_nd", + "inet_same_family", + "inter_lb", + "inter_sb", + "inter_sl", + "is_contained_2d", + "ishorizontal", + "isparallel", + "isperp", + "isvertical", + "jsonb_contained", + "jsonb_contains", + "jsonb_exists", + "jsonb_path_exists_opr", + "jsonb_path_match_opr", + "line_intersect", + "line_parallel", + "line_perp", + "lseg_intersect", + "lseg_parallel", + "lseg_perp", + "network_overlap", + "network_sub", + "network_sup", + "on_pb", + "on_pl", + "on_ppath", + "on_ps", + "on_sb", + "on_sl", + "overlaps_2d", + "path_contain_pt", + "path_inter", + "point_above", + "point_below", + "point_horiz", + "point_left", + "point_right", + "point_vert", + "poly_above", + "poly_below", + "poly_contain", + "poly_contain_pt", + "poly_contained", + "poly_left", + "poly_overabove", + "poly_overbelow", + "poly_overlap", + "poly_overleft", + "poly_overright", + "poly_right", + "poly_same", + "pt_contained_poly", + "st_3dintersects", + "st_contains", + "st_containsproperly", + "st_coveredby", + "st_covers", + "st_crosses", + "st_disjoint", + "st_equals", + "st_intersects", + "st_isvalid", + "st_orderingequals", + "st_overlaps", + "st_relatematch", + "st_touches", + "st_within", + "starts_with", + "ts_match_qv", + "ts_match_tq", + "ts_match_tt", + "ts_match_vq", + "tsq_mcontained", + "tsq_mcontains", + "xmlexists", + "xmlvalidate", + "xpath_exists" + ], + "unqualifiedSchemasForTables": [ + "public" + ], + "unqualifiedSchemasForTypesAndProcedures": [ + "public", + "pg_catalog", + "tiger", + "auth", + "pgsodium" + ] + }, + "allOf": [ + { + "$ref": "#/definitions/IntrospectionOptions" + } + ] + }, + "mutationsVersion": { + "description": "Which version of the generated mutation procedures to include in the schema response", + "default": null, + "anyOf": [ + { + "$ref": "#/definitions/MutationsVersion" + }, + { + "type": "null" + } + ] + } + }, + "definitions": { + "Version": { + "type": "string", + "enum": [ + "3" + ] + }, + "DatabaseConnectionSettings": { + "description": "Database connection settings.", + "type": "object", + "required": [ + "connectionUri" + ], + "properties": { + "connectionUri": { + "description": "Connection string for a Postgres-compatible database.", + "allOf": [ + { + "$ref": "#/definitions/ConnectionUri" + } + ] + }, + "poolSettings": { + "description": "Connection pool settings.", + "default": { + "checkConnectionAfterIdle": 60, + "connectionLifetime": 600, + "idleTimeout": 180, + "maxConnections": 50, + "poolTimeout": 30 + }, + "allOf": [ + { + "$ref": "#/definitions/PoolSettings" + } + ] + }, + "isolationLevel": { + "description": "Query isolation level.", + "default": "ReadCommitted", + "allOf": [ + { + "$ref": "#/definitions/IsolationLevel" + } + ] + } + } + }, + "ConnectionUri": { + "$ref": "#/definitions/Secret" + }, + "Secret": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "object", + "required": [ + "variable" + ], + "properties": { + "variable": { + "$ref": "#/definitions/Variable" + } + } + } + ] + }, + "Variable": { + "description": "The name of an an environment variable.", + "type": "string" + }, + "PoolSettings": { + "description": "Settings for the PostgreSQL connection pool", + "type": "object", + "properties": { + "maxConnections": { + "description": "maximum number of pool connections", + "default": 50, + "type": "integer", + "format": "uint32", + "minimum": 0.0 + }, + "poolTimeout": { + "description": "timeout for acquiring a connection from the pool (seconds)", + "default": 30, + "type": "integer", + "format": "uint64", + "minimum": 0.0 + }, + "idleTimeout": { + "description": "idle timeout for releasing a connection from the pool (seconds)", + "default": 180, + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0.0 + }, + "checkConnectionAfterIdle": { + "description": "check the connection is alive after being idle for N seconds. Set to null to always check.", + "default": 60, + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0.0 + }, + "connectionLifetime": { + "description": "maximum lifetime for an individual connection (seconds)", + "default": 600, + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0.0 + } + } + }, + "IsolationLevel": { + "description": "The isolation level of the transaction in which a query is executed.", + "oneOf": [ + { + "description": "Prevents reading data from another uncommitted transaction.", + "type": "string", + "enum": [ + "ReadCommitted" + ] + }, + { + "description": "Reading the same data twice is guaranteed to return the same result.", + "type": "string", + "enum": [ + "RepeatableRead" + ] + }, + { + "description": "Concurrent transactions behave identically to serializing them one at a time.", + "type": "string", + "enum": [ + "Serializable" + ] + } + ] + }, + "Metadata": { + "description": "Metadata information.", + "type": "object", + "properties": { + "tables": { + "default": {}, + "allOf": [ + { + "$ref": "#/definitions/TablesInfo" + } + ] + }, + "compositeTypes": { + "default": {}, + "allOf": [ + { + "$ref": "#/definitions/CompositeTypes" + } + ] + }, + "nativeQueries": { + "default": {}, + "allOf": [ + { + "$ref": "#/definitions/NativeQueries" + } + ] + }, + "aggregateFunctions": { + "default": {}, + "allOf": [ + { + "$ref": "#/definitions/AggregateFunctions" + } + ] + }, + "comparisonOperators": { + "default": {}, + "allOf": [ + { + "$ref": "#/definitions/ComparisonOperators" + } + ] + }, + "typeRepresentations": { + "default": {}, + "allOf": [ + { + "$ref": "#/definitions/TypeRepresentations" + } + ] + } + } + }, + "TablesInfo": { + "description": "Mapping from a \"table\" name to its information.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/TableInfo" + } + }, + "TableInfo": { + "description": "Information about a database table (or any other kind of relation).", + "type": "object", + "required": [ + "columns", + "schemaName", + "tableName" + ], + "properties": { + "schemaName": { + "type": "string" + }, + "tableName": { + "type": "string" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/ColumnInfo" + } + }, + "uniquenessConstraints": { + "default": {}, + "allOf": [ + { + "$ref": "#/definitions/UniquenessConstraints" + } + ] + }, + "foreignRelations": { + "default": {}, + "allOf": [ + { + "$ref": "#/definitions/ForeignRelations" + } + ] + }, + "description": { + "default": null, + "type": [ + "string", + "null" + ] + } + } + }, + "ColumnInfo": { + "description": "Information about a database column.", + "type": "object", + "required": [ + "name", + "type" + ], + "properties": { + "name": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/Type" + }, + "nullable": { + "default": "nullable", + "allOf": [ + { + "$ref": "#/definitions/Nullable" + } + ] + }, + "hasDefault": { + "$ref": "#/definitions/HasDefault" + }, + "isIdentity": { + "$ref": "#/definitions/IsIdentity" + }, + "isGenerated": { + "$ref": "#/definitions/IsGenerated" + }, + "description": { + "default": null, + "type": [ + "string", + "null" + ] + } + } + }, + "Type": { + "description": "The type of values that a column, field, or argument may take.", + "oneOf": [ + { + "type": "object", + "required": [ + "scalarType" + ], + "properties": { + "scalarType": { + "$ref": "#/definitions/ScalarType" + } + }, + "additionalProperties": false + }, + { + "type": "object", + "required": [ + "compositeType" + ], + "properties": { + "compositeType": { + "type": "string" + } + }, + "additionalProperties": false + }, + { + "type": "object", + "required": [ + "arrayType" + ], + "properties": { + "arrayType": { + "$ref": "#/definitions/Type" + } + }, + "additionalProperties": false + } + ] + }, + "ScalarType": { + "description": "A Scalar Type.", + "type": "string" + }, + "Nullable": { + "description": "Can this column contain null values", + "type": "string", + "enum": [ + "nullable", + "nonNullable" + ] + }, + "HasDefault": { + "description": "Does this column have a default value.", + "type": "string", + "enum": [ + "noDefault", + "hasDefault" + ] + }, + "IsIdentity": { + "description": "Is this column an identity column.", + "type": "string", + "enum": [ + "notIdentity", + "identityByDefault", + "identityAlways" + ] + }, + "IsGenerated": { + "description": "Is this column a generated column.", + "type": "string", + "enum": [ + "notGenerated", + "stored" + ] + }, + "UniquenessConstraints": { + "description": "A mapping from the name of a unique constraint to its value.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/UniquenessConstraint" + } + }, + "UniquenessConstraint": { + "description": "The set of columns that make up a uniqueness constraint.", + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "ForeignRelations": { + "description": "A mapping from the name of a foreign key constraint to its value.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/ForeignRelation" + } + }, + "ForeignRelation": { + "description": "A foreign key constraint.", + "type": "object", + "required": [ + "columnMapping", + "foreignTable" + ], + "properties": { + "foreignSchema": { + "type": [ + "string", + "null" + ] + }, + "foreignTable": { + "type": "string" + }, + "columnMapping": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "CompositeTypes": { + "description": "Map of all known composite types.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/CompositeType" + } + }, + "CompositeType": { + "description": "Information about a composite type. These are very similar to tables, but with the crucial difference that composite types do not support constraints (such as NOT NULL).", + "type": "object", + "required": [ + "fields", + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "fields": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/FieldInfo" + } + }, + "description": { + "default": null, + "type": [ + "string", + "null" + ] + } + } + }, + "FieldInfo": { + "description": "Information about a composite type field.", + "type": "object", + "required": [ + "name", + "type" + ], + "properties": { + "name": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/Type" + }, + "description": { + "default": null, + "type": [ + "string", + "null" + ] + } + } + }, + "NativeQueries": { + "description": "Metadata information of native queries.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/NativeQueryInfo" + } + }, + "NativeQueryInfo": { + "description": "Information about a Native Query", + "type": "object", + "required": [ + "columns", + "sql" + ], + "properties": { + "sql": { + "description": "SQL expression to use for the Native Query. We can interpolate values using `{{variable_name}}` syntax, such as `SELECT * FROM authors WHERE name = {{author_name}}`", + "allOf": [ + { + "$ref": "#/definitions/NativeQuerySql" + } + ] + }, + "columns": { + "description": "Columns returned by the Native Query", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/ReadOnlyColumnInfo" + } + }, + "arguments": { + "description": "Names and types of arguments that can be passed to this Native Query", + "default": {}, + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/ReadOnlyColumnInfo" + } + }, + "description": { + "default": null, + "type": [ + "string", + "null" + ] + }, + "isProcedure": { + "description": "True if this native query mutates the database", + "type": "boolean" + } + } + }, + "NativeQuerySql": { + "description": "Native Query SQL location.", + "anyOf": [ + { + "description": "Refer to an external Native Query SQL file.", + "type": "object", + "required": [ + "file" + ], + "properties": { + "file": { + "description": "Relative path to a sql file.", + "type": "string" + } + } + }, + { + "description": "Inline Native Query SQL string.", + "type": "object", + "required": [ + "inline" + ], + "properties": { + "inline": { + "description": "An inline Native Query SQL string.", + "allOf": [ + { + "$ref": "#/definitions/InlineNativeQuerySql" + } + ] + } + } + }, + { + "$ref": "#/definitions/InlineNativeQuerySql" + } + ] + }, + "InlineNativeQuerySql": { + "type": "string" + }, + "ReadOnlyColumnInfo": { + "description": "Information about a native query column.", + "type": "object", + "required": [ + "name", + "type" + ], + "properties": { + "name": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/Type" + }, + "nullable": { + "default": "nullable", + "allOf": [ + { + "$ref": "#/definitions/Nullable" + } + ] + }, + "description": { + "default": null, + "type": [ + "string", + "null" + ] + } + } + }, + "AggregateFunctions": { + "description": "All supported aggregate functions, grouped by type.", + "type": "object", + "additionalProperties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/AggregateFunction" + } + } + }, + "AggregateFunction": { + "type": "object", + "required": [ + "returnType" + ], + "properties": { + "returnType": { + "$ref": "#/definitions/ScalarType" + } + } + }, + "ComparisonOperators": { + "description": "The complete list of supported binary operators for scalar types. Not all of these are supported for every type.", + "type": "object", + "additionalProperties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/ComparisonOperator" + } + } + }, + "ComparisonOperator": { + "description": "Represents a postgres binary comparison operator", + "type": "object", + "required": [ + "argumentType", + "operatorKind", + "operatorName" + ], + "properties": { + "operatorName": { + "type": "string" + }, + "operatorKind": { + "$ref": "#/definitions/OperatorKind" + }, + "argumentType": { + "$ref": "#/definitions/ScalarType" + }, + "isInfix": { + "default": true, + "type": "boolean" + } + } + }, + "OperatorKind": { + "description": "Is it a built-in operator, or a custom operator.", + "type": "string", + "enum": [ + "equal", + "in", + "custom" + ] + }, + "TypeRepresentations": { + "description": "Type representation of scalar types, grouped by type.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/TypeRepresentation" + } + }, + "TypeRepresentation": { + "description": "Type representation of a scalar type.", + "oneOf": [ + { + "description": "JSON booleans", + "type": "string", + "enum": [ + "boolean" + ] + }, + { + "description": "Any JSON string", + "type": "string", + "enum": [ + "string" + ] + }, + { + "description": "float4", + "type": "string", + "enum": [ + "float32" + ] + }, + { + "description": "float8", + "type": "string", + "enum": [ + "float64" + ] + }, + { + "description": "int2", + "type": "string", + "enum": [ + "int16" + ] + }, + { + "description": "int4", + "type": "string", + "enum": [ + "int32" + ] + }, + { + "description": "int8 as integer", + "type": "string", + "enum": [ + "int64" + ] + }, + { + "description": "int8 as string", + "type": "string", + "enum": [ + "int64AsString" + ] + }, + { + "description": "numeric", + "type": "string", + "enum": [ + "bigDecimal" + ] + }, + { + "description": "numeric as string", + "type": "string", + "enum": [ + "bigDecimalAsString" + ] + }, + { + "description": "timestamp", + "type": "string", + "enum": [ + "timestamp" + ] + }, + { + "description": "timestamp with timezone", + "type": "string", + "enum": [ + "timestamptz" + ] + }, + { + "description": "time", + "type": "string", + "enum": [ + "time" + ] + }, + { + "description": "time with timezone", + "type": "string", + "enum": [ + "timetz" + ] + }, + { + "description": "date", + "type": "string", + "enum": [ + "date" + ] + }, + { + "description": "uuid", + "type": "string", + "enum": [ + "uUID" + ] + }, + { + "description": "geography", + "type": "string", + "enum": [ + "geography" + ] + }, + { + "description": "geometry", + "type": "string", + "enum": [ + "geometry" + ] + }, + { + "description": "Any JSON number", + "type": "string", + "enum": [ + "number" + ] + }, + { + "description": "Any JSON number, with no decimal part", + "type": "string", + "enum": [ + "integer" + ] + }, + { + "description": "An arbitrary json.", + "type": "string", + "enum": [ + "json" + ] + }, + { + "description": "One of the specified string values", + "type": "object", + "required": [ + "enum" + ], + "properties": { + "enum": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + } + ] + }, + "IntrospectionOptions": { + "description": "Options which only influence how the configuration is updated.", + "type": "object", + "properties": { + "excludedSchemas": { + "description": "Schemas which are excluded from introspection. The default setting will exclude the internal schemas of Postgres, Citus, Cockroach, and the PostGIS extension.", + "default": [ + "information_schema", + "pg_catalog", + "tiger", + "crdb_internal", + "columnar", + "columnar_internal" + ], + "type": "array", + "items": { + "type": "string" + } + }, + "unqualifiedSchemasForTables": { + "description": "The names of Tables and Views in these schemas will be returned unqualified. The default setting will set the `public` schema as unqualified.", + "default": [ + "public" + ], + "type": "array", + "items": { + "type": "string" + } + }, + "unqualifiedSchemasForTypesAndProcedures": { + "description": "The types and procedures in these schemas will be returned unqualified.", + "default": [ + "public", + "pg_catalog", + "tiger", + "auth", + "pgsodium" + ], + "type": "array", + "items": { + "type": "string" + } + }, + "comparisonOperatorMapping": { + "description": "The mapping of comparison operator names to apply when updating the configuration", + "default": [ + { + "exposedName": "_eq", + "operatorKind": "equal", + "operatorName": "=" + }, + { + "exposedName": "_lte", + "operatorKind": "custom", + "operatorName": "<=" + }, + { + "exposedName": "_gt", + "operatorKind": "custom", + "operatorName": ">" + }, + { + "exposedName": "_gte", + "operatorKind": "custom", + "operatorName": ">=" + }, + { + "exposedName": "_lt", + "operatorKind": "custom", + "operatorName": "<" + }, + { + "exposedName": "_neq", + "operatorKind": "custom", + "operatorName": "!=" + }, + { + "exposedName": "_like", + "operatorKind": "custom", + "operatorName": "LIKE" + }, + { + "exposedName": "_nlike", + "operatorKind": "custom", + "operatorName": "NOT LIKE" + }, + { + "exposedName": "_ilike", + "operatorKind": "custom", + "operatorName": "ILIKE" + }, + { + "exposedName": "_nilike", + "operatorKind": "custom", + "operatorName": "NOT ILIKE" + }, + { + "exposedName": "_similar", + "operatorKind": "custom", + "operatorName": "SIMILAR TO" + }, + { + "exposedName": "_nsimilar", + "operatorKind": "custom", + "operatorName": "NOT SIMILAR TO" + }, + { + "exposedName": "_neq", + "operatorKind": "custom", + "operatorName": "<>" + }, + { + "exposedName": "_like", + "operatorKind": "custom", + "operatorName": "~~" + }, + { + "exposedName": "_nlike", + "operatorKind": "custom", + "operatorName": "!~~" + }, + { + "exposedName": "_ilike", + "operatorKind": "custom", + "operatorName": "~~*" + }, + { + "exposedName": "_nilike", + "operatorKind": "custom", + "operatorName": "!~~*" + }, + { + "exposedName": "_regex", + "operatorKind": "custom", + "operatorName": "~" + }, + { + "exposedName": "_nregex", + "operatorKind": "custom", + "operatorName": "!~" + }, + { + "exposedName": "_iregex", + "operatorKind": "custom", + "operatorName": "~*" + }, + { + "exposedName": "_niregex", + "operatorKind": "custom", + "operatorName": "!~*" + } + ], + "type": "array", + "items": { + "$ref": "#/definitions/ComparisonOperatorMapping" + } + }, + "introspectPrefixFunctionComparisonOperators": { + "description": "Which prefix functions (i.e., non-infix operators) to generate introspection metadata for.\n\nThis list will accept any boolean-returning function taking two concrete scalar types as arguments.\n\nThe default includes comparisons for various build-in types as well as those of PostGIS.", + "default": [ + "box_above", + "box_below", + "box_contain", + "box_contain_pt", + "box_contained", + "box_left", + "box_overabove", + "box_overbelow", + "box_overlap", + "box_overleft", + "box_overright", + "box_right", + "box_same", + "circle_above", + "circle_below", + "circle_contain", + "circle_contain_pt", + "circle_contained", + "circle_left", + "circle_overabove", + "circle_overbelow", + "circle_overlap", + "circle_overleft", + "circle_overright", + "circle_right", + "circle_same", + "contains_2d", + "equals", + "geography_overlaps", + "geometry_above", + "geometry_below", + "geometry_contained_3d", + "geometry_contains", + "geometry_contains_3d", + "geometry_contains_nd", + "geometry_left", + "geometry_overabove", + "geometry_overbelow", + "geometry_overlaps", + "geometry_overlaps_3d", + "geometry_overlaps_nd", + "geometry_overleft", + "geometry_overright", + "geometry_right", + "geometry_same", + "geometry_same_3d", + "geometry_same_nd", + "geometry_within", + "geometry_within_nd", + "inet_same_family", + "inter_lb", + "inter_sb", + "inter_sl", + "is_contained_2d", + "ishorizontal", + "isparallel", + "isperp", + "isvertical", + "jsonb_contained", + "jsonb_contains", + "jsonb_exists", + "jsonb_path_exists_opr", + "jsonb_path_match_opr", + "line_intersect", + "line_parallel", + "line_perp", + "lseg_intersect", + "lseg_parallel", + "lseg_perp", + "network_overlap", + "network_sub", + "network_sup", + "on_pb", + "on_pl", + "on_ppath", + "on_ps", + "on_sb", + "on_sl", + "overlaps_2d", + "path_contain_pt", + "path_inter", + "point_above", + "point_below", + "point_horiz", + "point_left", + "point_right", + "point_vert", + "poly_above", + "poly_below", + "poly_contain", + "poly_contain_pt", + "poly_contained", + "poly_left", + "poly_overabove", + "poly_overbelow", + "poly_overlap", + "poly_overleft", + "poly_overright", + "poly_right", + "poly_same", + "pt_contained_poly", + "st_3dintersects", + "st_contains", + "st_containsproperly", + "st_coveredby", + "st_covers", + "st_crosses", + "st_disjoint", + "st_equals", + "st_intersects", + "st_isvalid", + "st_orderingequals", + "st_overlaps", + "st_relatematch", + "st_touches", + "st_within", + "starts_with", + "ts_match_qv", + "ts_match_tq", + "ts_match_tt", + "ts_match_vq", + "tsq_mcontained", + "tsq_mcontains", + "xmlexists", + "xmlvalidate", + "xpath_exists" + ], + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "ComparisonOperatorMapping": { + "description": "Define the names that comparison operators will be exposed as by the automatic introspection.", + "type": "object", + "required": [ + "exposedName", + "operatorKind", + "operatorName" + ], + "properties": { + "operatorName": { + "description": "The name of the operator as defined by the database", + "type": "string" + }, + "exposedName": { + "description": "The name the operator will appear under in the exposed API", + "type": "string" + }, + "operatorKind": { + "description": "Equal, In or Custom.", + "allOf": [ + { + "$ref": "#/definitions/OperatorKind" + } + ] + } + } + }, + "MutationsVersion": { + "description": "Which version of the generated mutations will be included in the schema", + "type": "string", + "enum": [ + "v1", + "veryExperimentalWip" + ] + } + } +} diff --git a/test.sql b/test.sql new file mode 100644 index 000000000..ddff64943 --- /dev/null +++ b/test.sql @@ -0,0 +1 @@ +SELECT * FROM "Artist" WHERE "ArtistId" > {{gil}} AND "ArtistId" < {{tom}} From 56c99a31b3290d8b7487d3f73fa3a8ba859d5ea1 Mon Sep 17 00:00:00 2001 From: Tom Harding Date: Wed, 26 Jun 2024 17:20:43 +0200 Subject: [PATCH 02/28] Delete schema.json --- .../v3-chinook-ndc-metadata/schema.json | 1503 ----------------- 1 file changed, 1503 deletions(-) delete mode 100644 static/postgres/v3-chinook-ndc-metadata/schema.json diff --git a/static/postgres/v3-chinook-ndc-metadata/schema.json b/static/postgres/v3-chinook-ndc-metadata/schema.json deleted file mode 100644 index abc35aead..000000000 --- a/static/postgres/v3-chinook-ndc-metadata/schema.json +++ /dev/null @@ -1,1503 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "RawConfiguration", - "description": "Initial configuration, just enough to connect to a database and elaborate a full 'Configuration'.", - "type": "object", - "required": [ - "version" - ], - "properties": { - "version": { - "$ref": "#/definitions/Version" - }, - "$schema": { - "description": "Jsonschema of the configuration format.", - "default": null, - "type": [ - "string", - "null" - ] - }, - "connectionSettings": { - "description": "Database connection settings.", - "default": { - "connectionUri": { - "variable": "CONNECTION_URI" - }, - "isolationLevel": "ReadCommitted", - "poolSettings": { - "checkConnectionAfterIdle": 60, - "connectionLifetime": 600, - "idleTimeout": 180, - "maxConnections": 50, - "poolTimeout": 30 - } - }, - "allOf": [ - { - "$ref": "#/definitions/DatabaseConnectionSettings" - } - ] - }, - "metadata": { - "description": "Connector metadata.", - "default": { - "aggregateFunctions": {}, - "comparisonOperators": {}, - "compositeTypes": {}, - "nativeQueries": {}, - "tables": {}, - "typeRepresentations": {} - }, - "allOf": [ - { - "$ref": "#/definitions/Metadata" - } - ] - }, - "introspectionOptions": { - "description": "Database introspection options.", - "default": { - "comparisonOperatorMapping": [ - { - "exposedName": "_eq", - "operatorKind": "equal", - "operatorName": "=" - }, - { - "exposedName": "_lte", - "operatorKind": "custom", - "operatorName": "<=" - }, - { - "exposedName": "_gt", - "operatorKind": "custom", - "operatorName": ">" - }, - { - "exposedName": "_gte", - "operatorKind": "custom", - "operatorName": ">=" - }, - { - "exposedName": "_lt", - "operatorKind": "custom", - "operatorName": "<" - }, - { - "exposedName": "_neq", - "operatorKind": "custom", - "operatorName": "!=" - }, - { - "exposedName": "_like", - "operatorKind": "custom", - "operatorName": "LIKE" - }, - { - "exposedName": "_nlike", - "operatorKind": "custom", - "operatorName": "NOT LIKE" - }, - { - "exposedName": "_ilike", - "operatorKind": "custom", - "operatorName": "ILIKE" - }, - { - "exposedName": "_nilike", - "operatorKind": "custom", - "operatorName": "NOT ILIKE" - }, - { - "exposedName": "_similar", - "operatorKind": "custom", - "operatorName": "SIMILAR TO" - }, - { - "exposedName": "_nsimilar", - "operatorKind": "custom", - "operatorName": "NOT SIMILAR TO" - }, - { - "exposedName": "_neq", - "operatorKind": "custom", - "operatorName": "<>" - }, - { - "exposedName": "_like", - "operatorKind": "custom", - "operatorName": "~~" - }, - { - "exposedName": "_nlike", - "operatorKind": "custom", - "operatorName": "!~~" - }, - { - "exposedName": "_ilike", - "operatorKind": "custom", - "operatorName": "~~*" - }, - { - "exposedName": "_nilike", - "operatorKind": "custom", - "operatorName": "!~~*" - }, - { - "exposedName": "_regex", - "operatorKind": "custom", - "operatorName": "~" - }, - { - "exposedName": "_nregex", - "operatorKind": "custom", - "operatorName": "!~" - }, - { - "exposedName": "_iregex", - "operatorKind": "custom", - "operatorName": "~*" - }, - { - "exposedName": "_niregex", - "operatorKind": "custom", - "operatorName": "!~*" - } - ], - "excludedSchemas": [ - "information_schema", - "pg_catalog", - "tiger", - "crdb_internal", - "columnar", - "columnar_internal" - ], - "introspectPrefixFunctionComparisonOperators": [ - "box_above", - "box_below", - "box_contain", - "box_contain_pt", - "box_contained", - "box_left", - "box_overabove", - "box_overbelow", - "box_overlap", - "box_overleft", - "box_overright", - "box_right", - "box_same", - "circle_above", - "circle_below", - "circle_contain", - "circle_contain_pt", - "circle_contained", - "circle_left", - "circle_overabove", - "circle_overbelow", - "circle_overlap", - "circle_overleft", - "circle_overright", - "circle_right", - "circle_same", - "contains_2d", - "equals", - "geography_overlaps", - "geometry_above", - "geometry_below", - "geometry_contained_3d", - "geometry_contains", - "geometry_contains_3d", - "geometry_contains_nd", - "geometry_left", - "geometry_overabove", - "geometry_overbelow", - "geometry_overlaps", - "geometry_overlaps_3d", - "geometry_overlaps_nd", - "geometry_overleft", - "geometry_overright", - "geometry_right", - "geometry_same", - "geometry_same_3d", - "geometry_same_nd", - "geometry_within", - "geometry_within_nd", - "inet_same_family", - "inter_lb", - "inter_sb", - "inter_sl", - "is_contained_2d", - "ishorizontal", - "isparallel", - "isperp", - "isvertical", - "jsonb_contained", - "jsonb_contains", - "jsonb_exists", - "jsonb_path_exists_opr", - "jsonb_path_match_opr", - "line_intersect", - "line_parallel", - "line_perp", - "lseg_intersect", - "lseg_parallel", - "lseg_perp", - "network_overlap", - "network_sub", - "network_sup", - "on_pb", - "on_pl", - "on_ppath", - "on_ps", - "on_sb", - "on_sl", - "overlaps_2d", - "path_contain_pt", - "path_inter", - "point_above", - "point_below", - "point_horiz", - "point_left", - "point_right", - "point_vert", - "poly_above", - "poly_below", - "poly_contain", - "poly_contain_pt", - "poly_contained", - "poly_left", - "poly_overabove", - "poly_overbelow", - "poly_overlap", - "poly_overleft", - "poly_overright", - "poly_right", - "poly_same", - "pt_contained_poly", - "st_3dintersects", - "st_contains", - "st_containsproperly", - "st_coveredby", - "st_covers", - "st_crosses", - "st_disjoint", - "st_equals", - "st_intersects", - "st_isvalid", - "st_orderingequals", - "st_overlaps", - "st_relatematch", - "st_touches", - "st_within", - "starts_with", - "ts_match_qv", - "ts_match_tq", - "ts_match_tt", - "ts_match_vq", - "tsq_mcontained", - "tsq_mcontains", - "xmlexists", - "xmlvalidate", - "xpath_exists" - ], - "unqualifiedSchemasForTables": [ - "public" - ], - "unqualifiedSchemasForTypesAndProcedures": [ - "public", - "pg_catalog", - "tiger", - "auth", - "pgsodium" - ] - }, - "allOf": [ - { - "$ref": "#/definitions/IntrospectionOptions" - } - ] - }, - "mutationsVersion": { - "description": "Which version of the generated mutation procedures to include in the schema response", - "default": null, - "anyOf": [ - { - "$ref": "#/definitions/MutationsVersion" - }, - { - "type": "null" - } - ] - } - }, - "definitions": { - "Version": { - "type": "string", - "enum": [ - "3" - ] - }, - "DatabaseConnectionSettings": { - "description": "Database connection settings.", - "type": "object", - "required": [ - "connectionUri" - ], - "properties": { - "connectionUri": { - "description": "Connection string for a Postgres-compatible database.", - "allOf": [ - { - "$ref": "#/definitions/ConnectionUri" - } - ] - }, - "poolSettings": { - "description": "Connection pool settings.", - "default": { - "checkConnectionAfterIdle": 60, - "connectionLifetime": 600, - "idleTimeout": 180, - "maxConnections": 50, - "poolTimeout": 30 - }, - "allOf": [ - { - "$ref": "#/definitions/PoolSettings" - } - ] - }, - "isolationLevel": { - "description": "Query isolation level.", - "default": "ReadCommitted", - "allOf": [ - { - "$ref": "#/definitions/IsolationLevel" - } - ] - } - } - }, - "ConnectionUri": { - "$ref": "#/definitions/Secret" - }, - "Secret": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "object", - "required": [ - "variable" - ], - "properties": { - "variable": { - "$ref": "#/definitions/Variable" - } - } - } - ] - }, - "Variable": { - "description": "The name of an an environment variable.", - "type": "string" - }, - "PoolSettings": { - "description": "Settings for the PostgreSQL connection pool", - "type": "object", - "properties": { - "maxConnections": { - "description": "maximum number of pool connections", - "default": 50, - "type": "integer", - "format": "uint32", - "minimum": 0.0 - }, - "poolTimeout": { - "description": "timeout for acquiring a connection from the pool (seconds)", - "default": 30, - "type": "integer", - "format": "uint64", - "minimum": 0.0 - }, - "idleTimeout": { - "description": "idle timeout for releasing a connection from the pool (seconds)", - "default": 180, - "type": [ - "integer", - "null" - ], - "format": "uint64", - "minimum": 0.0 - }, - "checkConnectionAfterIdle": { - "description": "check the connection is alive after being idle for N seconds. Set to null to always check.", - "default": 60, - "type": [ - "integer", - "null" - ], - "format": "uint64", - "minimum": 0.0 - }, - "connectionLifetime": { - "description": "maximum lifetime for an individual connection (seconds)", - "default": 600, - "type": [ - "integer", - "null" - ], - "format": "uint64", - "minimum": 0.0 - } - } - }, - "IsolationLevel": { - "description": "The isolation level of the transaction in which a query is executed.", - "oneOf": [ - { - "description": "Prevents reading data from another uncommitted transaction.", - "type": "string", - "enum": [ - "ReadCommitted" - ] - }, - { - "description": "Reading the same data twice is guaranteed to return the same result.", - "type": "string", - "enum": [ - "RepeatableRead" - ] - }, - { - "description": "Concurrent transactions behave identically to serializing them one at a time.", - "type": "string", - "enum": [ - "Serializable" - ] - } - ] - }, - "Metadata": { - "description": "Metadata information.", - "type": "object", - "properties": { - "tables": { - "default": {}, - "allOf": [ - { - "$ref": "#/definitions/TablesInfo" - } - ] - }, - "compositeTypes": { - "default": {}, - "allOf": [ - { - "$ref": "#/definitions/CompositeTypes" - } - ] - }, - "nativeQueries": { - "default": {}, - "allOf": [ - { - "$ref": "#/definitions/NativeQueries" - } - ] - }, - "aggregateFunctions": { - "default": {}, - "allOf": [ - { - "$ref": "#/definitions/AggregateFunctions" - } - ] - }, - "comparisonOperators": { - "default": {}, - "allOf": [ - { - "$ref": "#/definitions/ComparisonOperators" - } - ] - }, - "typeRepresentations": { - "default": {}, - "allOf": [ - { - "$ref": "#/definitions/TypeRepresentations" - } - ] - } - } - }, - "TablesInfo": { - "description": "Mapping from a \"table\" name to its information.", - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/TableInfo" - } - }, - "TableInfo": { - "description": "Information about a database table (or any other kind of relation).", - "type": "object", - "required": [ - "columns", - "schemaName", - "tableName" - ], - "properties": { - "schemaName": { - "type": "string" - }, - "tableName": { - "type": "string" - }, - "columns": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/ColumnInfo" - } - }, - "uniquenessConstraints": { - "default": {}, - "allOf": [ - { - "$ref": "#/definitions/UniquenessConstraints" - } - ] - }, - "foreignRelations": { - "default": {}, - "allOf": [ - { - "$ref": "#/definitions/ForeignRelations" - } - ] - }, - "description": { - "default": null, - "type": [ - "string", - "null" - ] - } - } - }, - "ColumnInfo": { - "description": "Information about a database column.", - "type": "object", - "required": [ - "name", - "type" - ], - "properties": { - "name": { - "type": "string" - }, - "type": { - "$ref": "#/definitions/Type" - }, - "nullable": { - "default": "nullable", - "allOf": [ - { - "$ref": "#/definitions/Nullable" - } - ] - }, - "hasDefault": { - "$ref": "#/definitions/HasDefault" - }, - "isIdentity": { - "$ref": "#/definitions/IsIdentity" - }, - "isGenerated": { - "$ref": "#/definitions/IsGenerated" - }, - "description": { - "default": null, - "type": [ - "string", - "null" - ] - } - } - }, - "Type": { - "description": "The type of values that a column, field, or argument may take.", - "oneOf": [ - { - "type": "object", - "required": [ - "scalarType" - ], - "properties": { - "scalarType": { - "$ref": "#/definitions/ScalarType" - } - }, - "additionalProperties": false - }, - { - "type": "object", - "required": [ - "compositeType" - ], - "properties": { - "compositeType": { - "type": "string" - } - }, - "additionalProperties": false - }, - { - "type": "object", - "required": [ - "arrayType" - ], - "properties": { - "arrayType": { - "$ref": "#/definitions/Type" - } - }, - "additionalProperties": false - } - ] - }, - "ScalarType": { - "description": "A Scalar Type.", - "type": "string" - }, - "Nullable": { - "description": "Can this column contain null values", - "type": "string", - "enum": [ - "nullable", - "nonNullable" - ] - }, - "HasDefault": { - "description": "Does this column have a default value.", - "type": "string", - "enum": [ - "noDefault", - "hasDefault" - ] - }, - "IsIdentity": { - "description": "Is this column an identity column.", - "type": "string", - "enum": [ - "notIdentity", - "identityByDefault", - "identityAlways" - ] - }, - "IsGenerated": { - "description": "Is this column a generated column.", - "type": "string", - "enum": [ - "notGenerated", - "stored" - ] - }, - "UniquenessConstraints": { - "description": "A mapping from the name of a unique constraint to its value.", - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/UniquenessConstraint" - } - }, - "UniquenessConstraint": { - "description": "The set of columns that make up a uniqueness constraint.", - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - }, - "ForeignRelations": { - "description": "A mapping from the name of a foreign key constraint to its value.", - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/ForeignRelation" - } - }, - "ForeignRelation": { - "description": "A foreign key constraint.", - "type": "object", - "required": [ - "columnMapping", - "foreignTable" - ], - "properties": { - "foreignSchema": { - "type": [ - "string", - "null" - ] - }, - "foreignTable": { - "type": "string" - }, - "columnMapping": { - "type": "object", - "additionalProperties": { - "type": "string" - } - } - } - }, - "CompositeTypes": { - "description": "Map of all known composite types.", - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/CompositeType" - } - }, - "CompositeType": { - "description": "Information about a composite type. These are very similar to tables, but with the crucial difference that composite types do not support constraints (such as NOT NULL).", - "type": "object", - "required": [ - "fields", - "name" - ], - "properties": { - "name": { - "type": "string" - }, - "fields": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/FieldInfo" - } - }, - "description": { - "default": null, - "type": [ - "string", - "null" - ] - } - } - }, - "FieldInfo": { - "description": "Information about a composite type field.", - "type": "object", - "required": [ - "name", - "type" - ], - "properties": { - "name": { - "type": "string" - }, - "type": { - "$ref": "#/definitions/Type" - }, - "description": { - "default": null, - "type": [ - "string", - "null" - ] - } - } - }, - "NativeQueries": { - "description": "Metadata information of native queries.", - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/NativeQueryInfo" - } - }, - "NativeQueryInfo": { - "description": "Information about a Native Query", - "type": "object", - "required": [ - "columns", - "sql" - ], - "properties": { - "sql": { - "description": "SQL expression to use for the Native Query. We can interpolate values using `{{variable_name}}` syntax, such as `SELECT * FROM authors WHERE name = {{author_name}}`", - "allOf": [ - { - "$ref": "#/definitions/NativeQuerySql" - } - ] - }, - "columns": { - "description": "Columns returned by the Native Query", - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/ReadOnlyColumnInfo" - } - }, - "arguments": { - "description": "Names and types of arguments that can be passed to this Native Query", - "default": {}, - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/ReadOnlyColumnInfo" - } - }, - "description": { - "default": null, - "type": [ - "string", - "null" - ] - }, - "isProcedure": { - "description": "True if this native query mutates the database", - "type": "boolean" - } - } - }, - "NativeQuerySql": { - "description": "Native Query SQL location.", - "anyOf": [ - { - "description": "Refer to an external Native Query SQL file.", - "type": "object", - "required": [ - "file" - ], - "properties": { - "file": { - "description": "Relative path to a sql file.", - "type": "string" - } - } - }, - { - "description": "Inline Native Query SQL string.", - "type": "object", - "required": [ - "inline" - ], - "properties": { - "inline": { - "description": "An inline Native Query SQL string.", - "allOf": [ - { - "$ref": "#/definitions/InlineNativeQuerySql" - } - ] - } - } - }, - { - "$ref": "#/definitions/InlineNativeQuerySql" - } - ] - }, - "InlineNativeQuerySql": { - "type": "string" - }, - "ReadOnlyColumnInfo": { - "description": "Information about a native query column.", - "type": "object", - "required": [ - "name", - "type" - ], - "properties": { - "name": { - "type": "string" - }, - "type": { - "$ref": "#/definitions/Type" - }, - "nullable": { - "default": "nullable", - "allOf": [ - { - "$ref": "#/definitions/Nullable" - } - ] - }, - "description": { - "default": null, - "type": [ - "string", - "null" - ] - } - } - }, - "AggregateFunctions": { - "description": "All supported aggregate functions, grouped by type.", - "type": "object", - "additionalProperties": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/AggregateFunction" - } - } - }, - "AggregateFunction": { - "type": "object", - "required": [ - "returnType" - ], - "properties": { - "returnType": { - "$ref": "#/definitions/ScalarType" - } - } - }, - "ComparisonOperators": { - "description": "The complete list of supported binary operators for scalar types. Not all of these are supported for every type.", - "type": "object", - "additionalProperties": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/ComparisonOperator" - } - } - }, - "ComparisonOperator": { - "description": "Represents a postgres binary comparison operator", - "type": "object", - "required": [ - "argumentType", - "operatorKind", - "operatorName" - ], - "properties": { - "operatorName": { - "type": "string" - }, - "operatorKind": { - "$ref": "#/definitions/OperatorKind" - }, - "argumentType": { - "$ref": "#/definitions/ScalarType" - }, - "isInfix": { - "default": true, - "type": "boolean" - } - } - }, - "OperatorKind": { - "description": "Is it a built-in operator, or a custom operator.", - "type": "string", - "enum": [ - "equal", - "in", - "custom" - ] - }, - "TypeRepresentations": { - "description": "Type representation of scalar types, grouped by type.", - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/TypeRepresentation" - } - }, - "TypeRepresentation": { - "description": "Type representation of a scalar type.", - "oneOf": [ - { - "description": "JSON booleans", - "type": "string", - "enum": [ - "boolean" - ] - }, - { - "description": "Any JSON string", - "type": "string", - "enum": [ - "string" - ] - }, - { - "description": "float4", - "type": "string", - "enum": [ - "float32" - ] - }, - { - "description": "float8", - "type": "string", - "enum": [ - "float64" - ] - }, - { - "description": "int2", - "type": "string", - "enum": [ - "int16" - ] - }, - { - "description": "int4", - "type": "string", - "enum": [ - "int32" - ] - }, - { - "description": "int8 as integer", - "type": "string", - "enum": [ - "int64" - ] - }, - { - "description": "int8 as string", - "type": "string", - "enum": [ - "int64AsString" - ] - }, - { - "description": "numeric", - "type": "string", - "enum": [ - "bigDecimal" - ] - }, - { - "description": "numeric as string", - "type": "string", - "enum": [ - "bigDecimalAsString" - ] - }, - { - "description": "timestamp", - "type": "string", - "enum": [ - "timestamp" - ] - }, - { - "description": "timestamp with timezone", - "type": "string", - "enum": [ - "timestamptz" - ] - }, - { - "description": "time", - "type": "string", - "enum": [ - "time" - ] - }, - { - "description": "time with timezone", - "type": "string", - "enum": [ - "timetz" - ] - }, - { - "description": "date", - "type": "string", - "enum": [ - "date" - ] - }, - { - "description": "uuid", - "type": "string", - "enum": [ - "uUID" - ] - }, - { - "description": "geography", - "type": "string", - "enum": [ - "geography" - ] - }, - { - "description": "geometry", - "type": "string", - "enum": [ - "geometry" - ] - }, - { - "description": "Any JSON number", - "type": "string", - "enum": [ - "number" - ] - }, - { - "description": "Any JSON number, with no decimal part", - "type": "string", - "enum": [ - "integer" - ] - }, - { - "description": "An arbitrary json.", - "type": "string", - "enum": [ - "json" - ] - }, - { - "description": "One of the specified string values", - "type": "object", - "required": [ - "enum" - ], - "properties": { - "enum": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false - } - ] - }, - "IntrospectionOptions": { - "description": "Options which only influence how the configuration is updated.", - "type": "object", - "properties": { - "excludedSchemas": { - "description": "Schemas which are excluded from introspection. The default setting will exclude the internal schemas of Postgres, Citus, Cockroach, and the PostGIS extension.", - "default": [ - "information_schema", - "pg_catalog", - "tiger", - "crdb_internal", - "columnar", - "columnar_internal" - ], - "type": "array", - "items": { - "type": "string" - } - }, - "unqualifiedSchemasForTables": { - "description": "The names of Tables and Views in these schemas will be returned unqualified. The default setting will set the `public` schema as unqualified.", - "default": [ - "public" - ], - "type": "array", - "items": { - "type": "string" - } - }, - "unqualifiedSchemasForTypesAndProcedures": { - "description": "The types and procedures in these schemas will be returned unqualified.", - "default": [ - "public", - "pg_catalog", - "tiger", - "auth", - "pgsodium" - ], - "type": "array", - "items": { - "type": "string" - } - }, - "comparisonOperatorMapping": { - "description": "The mapping of comparison operator names to apply when updating the configuration", - "default": [ - { - "exposedName": "_eq", - "operatorKind": "equal", - "operatorName": "=" - }, - { - "exposedName": "_lte", - "operatorKind": "custom", - "operatorName": "<=" - }, - { - "exposedName": "_gt", - "operatorKind": "custom", - "operatorName": ">" - }, - { - "exposedName": "_gte", - "operatorKind": "custom", - "operatorName": ">=" - }, - { - "exposedName": "_lt", - "operatorKind": "custom", - "operatorName": "<" - }, - { - "exposedName": "_neq", - "operatorKind": "custom", - "operatorName": "!=" - }, - { - "exposedName": "_like", - "operatorKind": "custom", - "operatorName": "LIKE" - }, - { - "exposedName": "_nlike", - "operatorKind": "custom", - "operatorName": "NOT LIKE" - }, - { - "exposedName": "_ilike", - "operatorKind": "custom", - "operatorName": "ILIKE" - }, - { - "exposedName": "_nilike", - "operatorKind": "custom", - "operatorName": "NOT ILIKE" - }, - { - "exposedName": "_similar", - "operatorKind": "custom", - "operatorName": "SIMILAR TO" - }, - { - "exposedName": "_nsimilar", - "operatorKind": "custom", - "operatorName": "NOT SIMILAR TO" - }, - { - "exposedName": "_neq", - "operatorKind": "custom", - "operatorName": "<>" - }, - { - "exposedName": "_like", - "operatorKind": "custom", - "operatorName": "~~" - }, - { - "exposedName": "_nlike", - "operatorKind": "custom", - "operatorName": "!~~" - }, - { - "exposedName": "_ilike", - "operatorKind": "custom", - "operatorName": "~~*" - }, - { - "exposedName": "_nilike", - "operatorKind": "custom", - "operatorName": "!~~*" - }, - { - "exposedName": "_regex", - "operatorKind": "custom", - "operatorName": "~" - }, - { - "exposedName": "_nregex", - "operatorKind": "custom", - "operatorName": "!~" - }, - { - "exposedName": "_iregex", - "operatorKind": "custom", - "operatorName": "~*" - }, - { - "exposedName": "_niregex", - "operatorKind": "custom", - "operatorName": "!~*" - } - ], - "type": "array", - "items": { - "$ref": "#/definitions/ComparisonOperatorMapping" - } - }, - "introspectPrefixFunctionComparisonOperators": { - "description": "Which prefix functions (i.e., non-infix operators) to generate introspection metadata for.\n\nThis list will accept any boolean-returning function taking two concrete scalar types as arguments.\n\nThe default includes comparisons for various build-in types as well as those of PostGIS.", - "default": [ - "box_above", - "box_below", - "box_contain", - "box_contain_pt", - "box_contained", - "box_left", - "box_overabove", - "box_overbelow", - "box_overlap", - "box_overleft", - "box_overright", - "box_right", - "box_same", - "circle_above", - "circle_below", - "circle_contain", - "circle_contain_pt", - "circle_contained", - "circle_left", - "circle_overabove", - "circle_overbelow", - "circle_overlap", - "circle_overleft", - "circle_overright", - "circle_right", - "circle_same", - "contains_2d", - "equals", - "geography_overlaps", - "geometry_above", - "geometry_below", - "geometry_contained_3d", - "geometry_contains", - "geometry_contains_3d", - "geometry_contains_nd", - "geometry_left", - "geometry_overabove", - "geometry_overbelow", - "geometry_overlaps", - "geometry_overlaps_3d", - "geometry_overlaps_nd", - "geometry_overleft", - "geometry_overright", - "geometry_right", - "geometry_same", - "geometry_same_3d", - "geometry_same_nd", - "geometry_within", - "geometry_within_nd", - "inet_same_family", - "inter_lb", - "inter_sb", - "inter_sl", - "is_contained_2d", - "ishorizontal", - "isparallel", - "isperp", - "isvertical", - "jsonb_contained", - "jsonb_contains", - "jsonb_exists", - "jsonb_path_exists_opr", - "jsonb_path_match_opr", - "line_intersect", - "line_parallel", - "line_perp", - "lseg_intersect", - "lseg_parallel", - "lseg_perp", - "network_overlap", - "network_sub", - "network_sup", - "on_pb", - "on_pl", - "on_ppath", - "on_ps", - "on_sb", - "on_sl", - "overlaps_2d", - "path_contain_pt", - "path_inter", - "point_above", - "point_below", - "point_horiz", - "point_left", - "point_right", - "point_vert", - "poly_above", - "poly_below", - "poly_contain", - "poly_contain_pt", - "poly_contained", - "poly_left", - "poly_overabove", - "poly_overbelow", - "poly_overlap", - "poly_overleft", - "poly_overright", - "poly_right", - "poly_same", - "pt_contained_poly", - "st_3dintersects", - "st_contains", - "st_containsproperly", - "st_coveredby", - "st_covers", - "st_crosses", - "st_disjoint", - "st_equals", - "st_intersects", - "st_isvalid", - "st_orderingequals", - "st_overlaps", - "st_relatematch", - "st_touches", - "st_within", - "starts_with", - "ts_match_qv", - "ts_match_tq", - "ts_match_tt", - "ts_match_vq", - "tsq_mcontained", - "tsq_mcontains", - "xmlexists", - "xmlvalidate", - "xpath_exists" - ], - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "ComparisonOperatorMapping": { - "description": "Define the names that comparison operators will be exposed as by the automatic introspection.", - "type": "object", - "required": [ - "exposedName", - "operatorKind", - "operatorName" - ], - "properties": { - "operatorName": { - "description": "The name of the operator as defined by the database", - "type": "string" - }, - "exposedName": { - "description": "The name the operator will appear under in the exposed API", - "type": "string" - }, - "operatorKind": { - "description": "Equal, In or Custom.", - "allOf": [ - { - "$ref": "#/definitions/OperatorKind" - } - ] - } - } - }, - "MutationsVersion": { - "description": "Which version of the generated mutations will be included in the schema", - "type": "string", - "enum": [ - "v1", - "veryExperimentalWip" - ] - } - } -} From b9f9bd822882d7b57399209be6a0519ef9a19d95 Mon Sep 17 00:00:00 2001 From: Tom Harding Date: Fri, 28 Jun 2024 09:05:32 +0200 Subject: [PATCH 03/28] Updates --- crates/cli/src/lib.rs | 43 +- crates/configuration/src/version4/mod.rs | 29 +- .../configuration.json | 123 ++++- .../v4-chinook-ndc-metadata/schema.json | 517 ++++++++++++------ 4 files changed, 499 insertions(+), 213 deletions(-) diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 13395ac2b..56a01528b 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -12,7 +12,7 @@ use tokio::fs; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; -use configuration::version3::metadata as metadatav3; +use configuration::version4::metadata as metadatav4; const UPDATE_ATTEMPTS: u8 = 3; @@ -46,7 +46,7 @@ pub enum Command { operation_path: PathBuf, #[arg(long)] - is_mutation: bool, // we can make this neater later + is_procedure: bool, // we can make this neater later }, } @@ -65,8 +65,8 @@ pub async fn run(command: Command, context: Context) -> anyhow Command::Upgrade { dir_from, dir_to } => upgrade(dir_from, dir_to).await?, Command::CreateNativeOperation { operation_path, - is_mutation, - } => create_native_operation(operation_path, context, is_mutation).await?, + is_procedure, + } => create_native_operation(operation_path, context, is_procedure).await?, }; Ok(()) } @@ -190,7 +190,7 @@ async fn create_native_operation( .ok_or(anyhow::anyhow!("Oh no, file not found"))? .to_str() .ok_or(anyhow::anyhow!("Oh no, file not found"))?; - let sql = std::fs::read_to_string(&operation_path)?; + let sql = std::fs::read_to_string(format!("{}/{}", context.context_path.to_str().unwrap(), &operation_path.to_str().unwrap()))?; let mut configuration = configuration::parse_configuration(context.context_path.clone()).await?; let connection_uri = match configuration { @@ -218,7 +218,7 @@ async fn create_native_operation( .map(|c| c.extract()) .enumerate() { - parameters.insert(index + 1, name); + parameters.insert(index + 1, name); // We might use the same param twice } let mut final_statement = sql.clone(); @@ -236,11 +236,11 @@ async fn create_native_operation( for param in 0 .. description.nparams() { arguments.insert( parameters.get(&(param + 1)).ok_or(anyhow::anyhow!(":("))?.to_string(), - metadatav3::ReadOnlyColumnInfo { + metadatav4::ReadOnlyColumnInfo { name: parameters.get(&(param + 1)).ok_or(anyhow::anyhow!(":("))?.to_string(), - r#type: metadatav3::Type::ScalarType(metadatav3::ScalarType(format!("{}", description.param_type(param).unwrap()))), + r#type: metadatav4::Type::ScalarType(metadatav4::ScalarTypeName(format!("{}", description.param_type(param).unwrap()))), description: None, - nullable: metadatav3::Nullable::NonNullable, + nullable: metadatav4::Nullable::NonNullable, } ); } @@ -248,34 +248,37 @@ async fn create_native_operation( for field in 0 .. description.nfields() { columns.insert( description.field_name(field)?.unwrap(), - metadatav3::ReadOnlyColumnInfo { + metadatav4::ReadOnlyColumnInfo { name: description.field_name(field)?.unwrap(), - r#type: metadatav3::Type::ScalarType(metadatav3::ScalarType(format!("{}", description.field_type(field)))), + r#type: metadatav4::Type::ScalarType(metadatav4::ScalarTypeName(format!("{}", description.field_type(field)))), description: None, - nullable: metadatav3::Nullable::NonNullable, + nullable: metadatav4::Nullable::NonNullable, } ); } match configuration { - configuration::ParsedConfiguration::Version3(ref mut raw_configuration) => + configuration::ParsedConfiguration::Version3(_) => { + panic!(":(") + } + configuration::ParsedConfiguration::Version4(ref mut configuration) => { // TODO: should we overwrite or not - raw_configuration.metadata.native_queries.0.insert( + configuration.metadata.native_queries.0.insert( identifier.to_string(), - metadatav3::NativeQueryInfo { - sql: metadatav3::NativeQuerySqlEither::NativeQuerySqlExternal( - metadatav3::NativeQuerySqlExternal::File { file: operation_path } + metadatav4::NativeQueryInfo { + sql: metadatav4::NativeQuerySqlEither::NativeQuerySqlExternal( + metadatav4::NativeQuerySqlExternal::File { file: operation_path } ), - arguments, columns, is_procedure, description: None, } - ), - configuration::ParsedConfiguration::Version4(_) => panic!("Later") + ); + } }; + println!("{:#?}", configuration::version4::attempt_to_find_type_name_for(&connection_string, &[23]).await); configuration::write_parsed_configuration(configuration, context.context_path).await?; Ok(()) } diff --git a/crates/configuration/src/version4/mod.rs b/crates/configuration/src/version4/mod.rs index d8a3814e3..ea53b7234 100644 --- a/crates/configuration/src/version4/mod.rs +++ b/crates/configuration/src/version4/mod.rs @@ -2,13 +2,13 @@ mod comparison; pub mod connection_settings; -mod metadata; +pub mod metadata; mod options; mod to_runtime_configuration; mod upgrade_from_v3; use std::borrow::Cow; -use std::collections::HashSet; +use std::collections::{BTreeMap, HashSet}; use std::path::Path; pub use to_runtime_configuration::make_runtime_configuration; pub use upgrade_from_v3::upgrade_from_v3; @@ -272,3 +272,28 @@ pub async fn write_parsed_configuration( Ok(()) } + +pub async fn attempt_to_find_type_name_for(connection_string: &String, oids: &[i32]) -> Result, sqlx::Error> { + let mut sqlx = PgConnection::connect(&connection_string) + .instrument(info_span!("Connect to database")) + .await?; + + let query = + sqlx::query("SELECT typnamespace::regnamespace::text as schema, typname as name, oid::integer FROM pg_type WHERE oid in (SELECT unnest($1))") + .bind(oids); + + let rows = sqlx + .fetch_all(query) + .instrument(info_span!("Run oid lookup query")) + .await?; + + for row in rows { + let schema: String = row.get(0); + let name: String = row.get(1); + let oid: i32 = row.get(2); + + println!("{schema}, {name}, {oid}"); + } + + Ok(BTreeMap::new()) +} diff --git a/static/postgres/v4-chinook-ndc-metadata/configuration.json b/static/postgres/v4-chinook-ndc-metadata/configuration.json index c5d2b50ab..2fca9153e 100644 --- a/static/postgres/v4-chinook-ndc-metadata/configuration.json +++ b/static/postgres/v4-chinook-ndc-metadata/configuration.json @@ -46,7 +46,9 @@ } }, "uniquenessConstraints": { - "PK_Album": ["AlbumId"] + "PK_Album": [ + "AlbumId" + ] }, "foreignRelations": { "FK_AlbumArtistId": { @@ -81,7 +83,9 @@ } }, "uniquenessConstraints": { - "PK_Artist": ["ArtistId"] + "PK_Artist": [ + "ArtistId" + ] }, "foreignRelations": {}, "description": "The record of all artists" @@ -196,7 +200,9 @@ } }, "uniquenessConstraints": { - "PK_Customer": ["CustomerId"] + "PK_Customer": [ + "CustomerId" + ] }, "foreignRelations": { "FK_CustomerSupportRepId": { @@ -335,7 +341,9 @@ } }, "uniquenessConstraints": { - "PK_Employee": ["EmployeeId"] + "PK_Employee": [ + "EmployeeId" + ] }, "foreignRelations": { "FK_EmployeeReportsTo": { @@ -370,7 +378,9 @@ } }, "uniquenessConstraints": { - "PK_Genre": ["GenreId"] + "PK_Genre": [ + "GenreId" + ] }, "foreignRelations": {}, "description": null @@ -453,7 +463,9 @@ } }, "uniquenessConstraints": { - "PK_Invoice": ["InvoiceId"] + "PK_Invoice": [ + "InvoiceId" + ] }, "foreignRelations": { "FK_InvoiceCustomerId": { @@ -512,7 +524,9 @@ } }, "uniquenessConstraints": { - "PK_InvoiceLine": ["InvoiceLineId"] + "PK_InvoiceLine": [ + "InvoiceLineId" + ] }, "foreignRelations": { "FK_InvoiceLineInvoiceId": { @@ -554,7 +568,9 @@ } }, "uniquenessConstraints": { - "PK_MediaType": ["MediaTypeId"] + "PK_MediaType": [ + "MediaTypeId" + ] }, "foreignRelations": {}, "description": null @@ -581,7 +597,9 @@ } }, "uniquenessConstraints": { - "PK_Playlist": ["PlaylistId"] + "PK_Playlist": [ + "PlaylistId" + ] }, "foreignRelations": {}, "description": null @@ -608,7 +626,10 @@ } }, "uniquenessConstraints": { - "PK_PlaylistTrack": ["PlaylistId", "TrackId"] + "PK_PlaylistTrack": [ + "PlaylistId", + "TrackId" + ] }, "foreignRelations": { "FK_PlaylistTrackPlaylistId": { @@ -706,7 +727,9 @@ } }, "uniquenessConstraints": { - "PK_Track": ["TrackId"] + "PK_Track": [ + "TrackId" + ] }, "foreignRelations": { "FK_TrackAlbumId": { @@ -784,7 +807,9 @@ } }, "uniquenessConstraints": { - "defaults_pkey": ["id"] + "defaults_pkey": [ + "id" + ] }, "foreignRelations": {}, "description": null @@ -847,7 +872,9 @@ } }, "uniquenessConstraints": { - "dog_pkey": ["id"] + "dog_pkey": [ + "id" + ] }, "foreignRelations": {}, "description": null @@ -1007,7 +1034,9 @@ } }, "uniquenessConstraints": { - "spatial_ref_sys_pkey": ["srid"] + "spatial_ref_sys_pkey": [ + "srid" + ] }, "foreignRelations": {}, "description": null @@ -1083,7 +1112,10 @@ } }, "uniquenessConstraints": { - "layer_pkey": ["layer_id", "topology_id"], + "layer_pkey": [ + "layer_id", + "topology_id" + ], "layer_schema_name_table_name_feature_column_key": [ "feature_column", "schema_name", @@ -1149,8 +1181,12 @@ } }, "uniquenessConstraints": { - "topology_name_key": ["name"], - "topology_pkey": ["id"] + "topology_name_key": [ + "name" + ], + "topology_pkey": [ + "id" + ] }, "foreignRelations": {}, "description": null @@ -1417,7 +1453,12 @@ } }, "typeRepresentation": { - "enum": ["hearts", "clubs", "diamonds", "spades"] + "enum": [ + "hearts", + "clubs", + "diamonds", + "spades" + ] } }, "char": { @@ -3500,6 +3541,48 @@ }, "description": "A native query used to test support array-valued variables" }, + "test": { + "sql": { + "file": "test.sql" + }, + "columns": { + "ArtistId": { + "name": "ArtistId", + "type": { + "scalarType": "23" + }, + "nullable": "nonNullable", + "description": null + }, + "Name": { + "name": "Name", + "type": { + "scalarType": "1043" + }, + "nullable": "nonNullable", + "description": null + } + }, + "arguments": { + "gil": { + "name": "gil", + "type": { + "scalarType": "23" + }, + "nullable": "nonNullable", + "description": null + }, + "tom": { + "name": "tom", + "type": { + "scalarType": "23" + }, + "nullable": "nonNullable", + "description": null + } + }, + "description": null + }, "value_types": { "sql": { "inline": "SELECT {{bool}} as bool, {{int4}} as int4, {{int2}} as int2, {{int8}} as int8, {{float4}} as float4, {{float8}} as \"float8\", {{numeric}} as numeric, {{char}} as char, {{varchar}} as \"varchar\", {{text}} as text, {{date}} as date, {{time}} as time, {{timetz}} as timetz, {{timestamp}} as timestamp, {{timestamptz}} as timestamptz, {{uuid}} as uuid" @@ -3777,7 +3860,9 @@ "columnar", "columnar_internal" ], - "unqualifiedSchemasForTables": ["public"], + "unqualifiedSchemasForTables": [ + "public" + ], "unqualifiedSchemasForTypesAndProcedures": [ "public", "pg_catalog", diff --git a/static/postgres/v4-chinook-ndc-metadata/schema.json b/static/postgres/v4-chinook-ndc-metadata/schema.json index 51856f83e..930a7a39e 100644 --- a/static/postgres/v4-chinook-ndc-metadata/schema.json +++ b/static/postgres/v4-chinook-ndc-metadata/schema.json @@ -3,14 +3,20 @@ "title": "ParsedConfiguration", "description": "Initial configuration, just enough to connect to a database and elaborate a full 'Configuration'.", "type": "object", - "required": ["version"], + "required": [ + "version" + ], "properties": { "version": { "$ref": "#/definitions/Version" }, "$schema": { "description": "Jsonschema of the configuration format.", - "type": ["string", "null"] + "default": null, + "type": [ + "string", + "null" + ] }, "connectionSettings": { "description": "Database connection settings.", @@ -18,14 +24,14 @@ "connectionUri": { "variable": "CONNECTION_URI" }, + "isolationLevel": "ReadCommitted", "poolSettings": { - "maxConnections": 50, - "poolTimeout": 30, - "idleTimeout": 180, "checkConnectionAfterIdle": 60, - "connectionLifetime": 600 - }, - "isolationLevel": "ReadCommitted" + "connectionLifetime": 600, + "idleTimeout": 180, + "maxConnections": 50, + "poolTimeout": 30 + } }, "allOf": [ { @@ -36,10 +42,10 @@ "metadata": { "description": "Connector metadata.", "default": { - "tables": {}, - "scalarTypes": {}, "compositeTypes": {}, - "nativeQueries": {} + "nativeQueries": {}, + "scalarTypes": {}, + "tables": {} }, "allOf": [ { @@ -50,122 +56,116 @@ "introspectionOptions": { "description": "Database introspection options.", "default": { - "excludedSchemas": [ - "information_schema", - "pg_catalog", - "tiger", - "crdb_internal", - "columnar", - "columnar_internal" - ], - "unqualifiedSchemasForTables": ["public"], - "unqualifiedSchemasForTypesAndProcedures": [ - "public", - "pg_catalog", - "tiger" - ], "comparisonOperatorMapping": [ { - "operatorName": "=", "exposedName": "_eq", - "operatorKind": "equal" + "operatorKind": "equal", + "operatorName": "=" }, { - "operatorName": "<=", "exposedName": "_lte", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "<=" }, { - "operatorName": ">", "exposedName": "_gt", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": ">" }, { - "operatorName": ">=", "exposedName": "_gte", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": ">=" }, { - "operatorName": "<", "exposedName": "_lt", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "<" }, { - "operatorName": "!=", "exposedName": "_neq", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!=" }, { - "operatorName": "LIKE", "exposedName": "_like", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "LIKE" }, { - "operatorName": "NOT LIKE", "exposedName": "_nlike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "NOT LIKE" }, { - "operatorName": "ILIKE", "exposedName": "_ilike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "ILIKE" }, { - "operatorName": "NOT ILIKE", "exposedName": "_nilike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "NOT ILIKE" }, { - "operatorName": "SIMILAR TO", "exposedName": "_similar", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "SIMILAR TO" }, { - "operatorName": "NOT SIMILAR TO", "exposedName": "_nsimilar", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "NOT SIMILAR TO" }, { - "operatorName": "~~", "exposedName": "_like", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "~~" }, { - "operatorName": "!~~", "exposedName": "_nlike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!~~" }, { - "operatorName": "~~*", "exposedName": "_ilike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "~~*" }, { - "operatorName": "!~~*", "exposedName": "_nilike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!~~*" }, { - "operatorName": "~", "exposedName": "_regex", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "~" }, { - "operatorName": "!~", "exposedName": "_nregex", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!~" }, { - "operatorName": "~*", "exposedName": "_iregex", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "~*" }, { - "operatorName": "!~*", "exposedName": "_niregex", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!~*" } ], + "excludedSchemas": [ + "information_schema", + "pg_catalog", + "tiger", + "crdb_internal", + "columnar", + "columnar_internal" + ], "introspectPrefixFunctionComparisonOperators": [ "box_above", "box_below", @@ -313,7 +313,15 @@ "timetz": "timetz", "uuid": "uUID", "varchar": "string" - } + }, + "unqualifiedSchemasForTables": [ + "public" + ], + "unqualifiedSchemasForTypesAndProcedures": [ + "public", + "pg_catalog", + "tiger" + ] }, "allOf": [ { @@ -323,6 +331,7 @@ }, "mutationsVersion": { "description": "Which version of the generated mutation procedures to include in the schema response", + "default": null, "anyOf": [ { "$ref": "#/definitions/MutationsVersion" @@ -336,12 +345,16 @@ "definitions": { "Version": { "type": "string", - "enum": ["4"] + "enum": [ + "4" + ] }, "DatabaseConnectionSettings": { "description": "Database connection settings.", "type": "object", - "required": ["connectionUri"], + "required": [ + "connectionUri" + ], "properties": { "connectionUri": { "description": "Connection string for a Postgres-compatible database.", @@ -354,11 +367,11 @@ "poolSettings": { "description": "Connection pool settings.", "default": { - "maxConnections": 50, - "poolTimeout": 30, - "idleTimeout": 180, "checkConnectionAfterIdle": 60, - "connectionLifetime": 600 + "connectionLifetime": 600, + "idleTimeout": 180, + "maxConnections": 50, + "poolTimeout": 30 }, "allOf": [ { @@ -387,7 +400,9 @@ }, { "type": "object", - "required": ["variable"], + "required": [ + "variable" + ], "properties": { "variable": { "$ref": "#/definitions/Variable" @@ -421,21 +436,30 @@ "idleTimeout": { "description": "idle timeout for releasing a connection from the pool (seconds)", "default": 180, - "type": ["integer", "null"], + "type": [ + "integer", + "null" + ], "format": "uint64", "minimum": 0.0 }, "checkConnectionAfterIdle": { "description": "check the connection is alive after being idle for N seconds. Set to null to always check.", "default": 60, - "type": ["integer", "null"], + "type": [ + "integer", + "null" + ], "format": "uint64", "minimum": 0.0 }, "connectionLifetime": { "description": "maximum lifetime for an individual connection (seconds)", "default": 600, - "type": ["integer", "null"], + "type": [ + "integer", + "null" + ], "format": "uint64", "minimum": 0.0 } @@ -447,17 +471,23 @@ { "description": "Prevents reading data from another uncommitted transaction.", "type": "string", - "enum": ["ReadCommitted"] + "enum": [ + "ReadCommitted" + ] }, { "description": "Reading the same data twice is guaranteed to return the same result.", "type": "string", - "enum": ["RepeatableRead"] + "enum": [ + "RepeatableRead" + ] }, { "description": "Concurrent transactions behave identically to serializing them one at a time.", "type": "string", - "enum": ["Serializable"] + "enum": [ + "Serializable" + ] } ] }, @@ -509,7 +539,11 @@ "TableInfo": { "description": "Information about a database table (or any other kind of relation).", "type": "object", - "required": ["columns", "schemaName", "tableName"], + "required": [ + "columns", + "schemaName", + "tableName" + ], "properties": { "schemaName": { "type": "string" @@ -540,14 +574,21 @@ ] }, "description": { - "type": ["string", "null"] + "default": null, + "type": [ + "string", + "null" + ] } } }, "ColumnInfo": { "description": "Information about a database column.", "type": "object", - "required": ["name", "type"], + "required": [ + "name", + "type" + ], "properties": { "name": { "type": "string" @@ -573,7 +614,11 @@ "$ref": "#/definitions/IsGenerated" }, "description": { - "type": ["string", "null"] + "default": null, + "type": [ + "string", + "null" + ] } } }, @@ -582,7 +627,9 @@ "oneOf": [ { "type": "object", - "required": ["scalarType"], + "required": [ + "scalarType" + ], "properties": { "scalarType": { "$ref": "#/definitions/ScalarTypeName" @@ -592,7 +639,9 @@ }, { "type": "object", - "required": ["compositeType"], + "required": [ + "compositeType" + ], "properties": { "compositeType": { "$ref": "#/definitions/CompositeTypeName" @@ -602,7 +651,9 @@ }, { "type": "object", - "required": ["arrayType"], + "required": [ + "arrayType" + ], "properties": { "arrayType": { "$ref": "#/definitions/Type" @@ -623,22 +674,35 @@ "Nullable": { "description": "Can this column contain null values", "type": "string", - "enum": ["nullable", "nonNullable"] + "enum": [ + "nullable", + "nonNullable" + ] }, "HasDefault": { "description": "Does this column have a default value.", "type": "string", - "enum": ["noDefault", "hasDefault"] + "enum": [ + "noDefault", + "hasDefault" + ] }, "IsIdentity": { "description": "Is this column an identity column.", "type": "string", - "enum": ["notIdentity", "identityByDefault", "identityAlways"] + "enum": [ + "notIdentity", + "identityByDefault", + "identityAlways" + ] }, "IsGenerated": { "description": "Is this column a generated column.", "type": "string", - "enum": ["notGenerated", "stored"] + "enum": [ + "notGenerated", + "stored" + ] }, "UniquenessConstraints": { "description": "A mapping from the name of a unique constraint to its value.", @@ -665,10 +729,16 @@ "ForeignRelation": { "description": "A foreign key constraint.", "type": "object", - "required": ["columnMapping", "foreignTable"], + "required": [ + "columnMapping", + "foreignTable" + ], "properties": { "foreignSchema": { - "type": ["string", "null"] + "type": [ + "string", + "null" + ] }, "foreignTable": { "type": "string" @@ -705,7 +775,10 @@ "type": "string" }, "description": { - "type": ["string", "null"] + "type": [ + "string", + "null" + ] }, "aggregateFunctions": { "type": "object", @@ -733,7 +806,9 @@ }, "AggregateFunction": { "type": "object", - "required": ["returnType"], + "required": [ + "returnType" + ], "properties": { "returnType": { "$ref": "#/definitions/ScalarTypeName" @@ -743,7 +818,11 @@ "ComparisonOperator": { "description": "Represents a postgres binary comparison operator", "type": "object", - "required": ["argumentType", "operatorKind", "operatorName"], + "required": [ + "argumentType", + "operatorKind", + "operatorName" + ], "properties": { "operatorName": { "type": "string" @@ -763,7 +842,11 @@ "OperatorKind": { "description": "Is it a built-in operator, or a custom operator.", "type": "string", - "enum": ["equal", "in", "custom"] + "enum": [ + "equal", + "in", + "custom" + ] }, "TypeRepresentation": { "description": "Type representation of a scalar type.", @@ -771,112 +854,156 @@ { "description": "JSON booleans", "type": "string", - "enum": ["boolean"] + "enum": [ + "boolean" + ] }, { "description": "Any JSON string", "type": "string", - "enum": ["string"] + "enum": [ + "string" + ] }, { "description": "float4", "type": "string", - "enum": ["float32"] + "enum": [ + "float32" + ] }, { "description": "float8", "type": "string", - "enum": ["float64"] + "enum": [ + "float64" + ] }, { "description": "int2", "type": "string", - "enum": ["int16"] + "enum": [ + "int16" + ] }, { "description": "int4", "type": "string", - "enum": ["int32"] + "enum": [ + "int32" + ] }, { "description": "int8 as integer", "type": "string", - "enum": ["int64"] + "enum": [ + "int64" + ] }, { "description": "int8 as string", "type": "string", - "enum": ["int64AsString"] + "enum": [ + "int64AsString" + ] }, { "description": "numeric", "type": "string", - "enum": ["bigDecimal"] + "enum": [ + "bigDecimal" + ] }, { "description": "numeric as string", "type": "string", - "enum": ["bigDecimalAsString"] + "enum": [ + "bigDecimalAsString" + ] }, { "description": "timestamp", "type": "string", - "enum": ["timestamp"] + "enum": [ + "timestamp" + ] }, { "description": "timestamp with timezone", "type": "string", - "enum": ["timestamptz"] + "enum": [ + "timestamptz" + ] }, { "description": "time", "type": "string", - "enum": ["time"] + "enum": [ + "time" + ] }, { "description": "time with timezone", "type": "string", - "enum": ["timetz"] + "enum": [ + "timetz" + ] }, { "description": "date", "type": "string", - "enum": ["date"] + "enum": [ + "date" + ] }, { "description": "uuid", "type": "string", - "enum": ["uUID"] + "enum": [ + "uUID" + ] }, { "description": "geography", "type": "string", - "enum": ["geography"] + "enum": [ + "geography" + ] }, { "description": "geometry", "type": "string", - "enum": ["geometry"] + "enum": [ + "geometry" + ] }, { "description": "Any JSON number", "type": "string", - "enum": ["number"] + "enum": [ + "number" + ] }, { "description": "Any JSON number, with no decimal part", "type": "string", - "enum": ["integer"] + "enum": [ + "integer" + ] }, { "description": "An arbitrary json.", "type": "string", - "enum": ["json"] + "enum": [ + "json" + ] }, { "description": "One of the specified string values", "type": "object", - "required": ["enum"], + "required": [ + "enum" + ], "properties": { "enum": { "type": "array", @@ -899,7 +1026,11 @@ "CompositeType": { "description": "Information about a composite type. These are very similar to tables, but with the crucial difference that composite types do not support constraints (such as NOT NULL).", "type": "object", - "required": ["fields", "schemaName", "typeName"], + "required": [ + "fields", + "schemaName", + "typeName" + ], "properties": { "typeName": { "type": "string" @@ -914,14 +1045,21 @@ } }, "description": { - "type": ["string", "null"] + "default": null, + "type": [ + "string", + "null" + ] } } }, "FieldInfo": { "description": "Information about a composite type field.", "type": "object", - "required": ["fieldName", "type"], + "required": [ + "fieldName", + "type" + ], "properties": { "fieldName": { "type": "string" @@ -930,7 +1068,11 @@ "$ref": "#/definitions/Type" }, "description": { - "type": ["string", "null"] + "default": null, + "type": [ + "string", + "null" + ] } } }, @@ -944,7 +1086,10 @@ "NativeQueryInfo": { "description": "Information about a Native Query", "type": "object", - "required": ["columns", "sql"], + "required": [ + "columns", + "sql" + ], "properties": { "sql": { "description": "SQL expression to use for the Native Query. We can interpolate values using `{{variable_name}}` syntax, such as `SELECT * FROM authors WHERE name = {{author_name}}`", @@ -970,7 +1115,11 @@ } }, "description": { - "type": ["string", "null"] + "default": null, + "type": [ + "string", + "null" + ] }, "isProcedure": { "description": "True if this native query mutates the database", @@ -984,7 +1133,9 @@ { "description": "Refer to an external Native Query SQL file.", "type": "object", - "required": ["file"], + "required": [ + "file" + ], "properties": { "file": { "description": "Relative path to a sql file.", @@ -995,7 +1146,9 @@ { "description": "Inline Native Query SQL string.", "type": "object", - "required": ["inline"], + "required": [ + "inline" + ], "properties": { "inline": { "description": "An inline Native Query SQL string.", @@ -1018,7 +1171,10 @@ "ReadOnlyColumnInfo": { "description": "Information about a native query column.", "type": "object", - "required": ["name", "type"], + "required": [ + "name", + "type" + ], "properties": { "name": { "type": "string" @@ -1035,7 +1191,11 @@ ] }, "description": { - "type": ["string", "null"] + "default": null, + "type": [ + "string", + "null" + ] } } }, @@ -1060,7 +1220,9 @@ }, "unqualifiedSchemasForTables": { "description": "The names of Tables and Views in these schemas will be returned unqualified. The default setting will set the `public` schema as unqualified.", - "default": ["public"], + "default": [ + "public" + ], "type": "array", "items": { "type": "string" @@ -1068,7 +1230,11 @@ }, "unqualifiedSchemasForTypesAndProcedures": { "description": "The types and procedures in these schemas will be returned unqualified.", - "default": ["public", "pg_catalog", "tiger"], + "default": [ + "public", + "pg_catalog", + "tiger" + ], "type": "array", "items": { "type": "string" @@ -1078,104 +1244,104 @@ "description": "The mapping of comparison operator names to apply when updating the configuration", "default": [ { - "operatorName": "=", "exposedName": "_eq", - "operatorKind": "equal" + "operatorKind": "equal", + "operatorName": "=" }, { - "operatorName": "<=", "exposedName": "_lte", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "<=" }, { - "operatorName": ">", "exposedName": "_gt", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": ">" }, { - "operatorName": ">=", "exposedName": "_gte", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": ">=" }, { - "operatorName": "<", "exposedName": "_lt", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "<" }, { - "operatorName": "!=", "exposedName": "_neq", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!=" }, { - "operatorName": "LIKE", "exposedName": "_like", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "LIKE" }, { - "operatorName": "NOT LIKE", "exposedName": "_nlike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "NOT LIKE" }, { - "operatorName": "ILIKE", "exposedName": "_ilike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "ILIKE" }, { - "operatorName": "NOT ILIKE", "exposedName": "_nilike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "NOT ILIKE" }, { - "operatorName": "SIMILAR TO", "exposedName": "_similar", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "SIMILAR TO" }, { - "operatorName": "NOT SIMILAR TO", "exposedName": "_nsimilar", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "NOT SIMILAR TO" }, { - "operatorName": "~~", "exposedName": "_like", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "~~" }, { - "operatorName": "!~~", "exposedName": "_nlike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!~~" }, { - "operatorName": "~~*", "exposedName": "_ilike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "~~*" }, { - "operatorName": "!~~*", "exposedName": "_nilike", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!~~*" }, { - "operatorName": "~", "exposedName": "_regex", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "~" }, { - "operatorName": "!~", "exposedName": "_nregex", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!~" }, { - "operatorName": "~*", "exposedName": "_iregex", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "~*" }, { - "operatorName": "!~*", "exposedName": "_niregex", - "operatorKind": "custom" + "operatorKind": "custom", + "operatorName": "!~*" } ], "type": "array", @@ -1351,7 +1517,11 @@ "ComparisonOperatorMapping": { "description": "Define the names that comparison operators will be exposed as by the automatic introspection.", "type": "object", - "required": ["exposedName", "operatorKind", "operatorName"], + "required": [ + "exposedName", + "operatorKind", + "operatorName" + ], "properties": { "operatorName": { "description": "The name of the operator as defined by the database", @@ -1381,7 +1551,10 @@ "MutationsVersion": { "description": "Which version of the generated mutations will be included in the schema", "type": "string", - "enum": ["v1", "veryExperimentalWip"] + "enum": [ + "v1", + "veryExperimentalWip" + ] } } } From e8f9b829dd1b4afef89b45bb4fddc1c9be657ff3 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Mon, 1 Jul 2024 12:40:30 +0300 Subject: [PATCH 04/28] format --- crates/configuration/src/version4/mod.rs | 8 +- .../configuration.json | 81 ++--- .../v4-chinook-ndc-metadata/schema.json | 295 ++++-------------- 3 files changed, 90 insertions(+), 294 deletions(-) diff --git a/crates/configuration/src/version4/mod.rs b/crates/configuration/src/version4/mod.rs index ea53b7234..39a034222 100644 --- a/crates/configuration/src/version4/mod.rs +++ b/crates/configuration/src/version4/mod.rs @@ -273,12 +273,16 @@ pub async fn write_parsed_configuration( Ok(()) } -pub async fn attempt_to_find_type_name_for(connection_string: &String, oids: &[i32]) -> Result, sqlx::Error> { +/// +pub async fn attempt_to_find_type_name_for( + connection_string: &String, + oids: &[i32], +) -> Result, sqlx::Error> { let mut sqlx = PgConnection::connect(&connection_string) .instrument(info_span!("Connect to database")) .await?; - let query = + let query = sqlx::query("SELECT typnamespace::regnamespace::text as schema, typname as name, oid::integer FROM pg_type WHERE oid in (SELECT unnest($1))") .bind(oids); diff --git a/static/postgres/v4-chinook-ndc-metadata/configuration.json b/static/postgres/v4-chinook-ndc-metadata/configuration.json index 2fca9153e..64f7d62c1 100644 --- a/static/postgres/v4-chinook-ndc-metadata/configuration.json +++ b/static/postgres/v4-chinook-ndc-metadata/configuration.json @@ -46,9 +46,7 @@ } }, "uniquenessConstraints": { - "PK_Album": [ - "AlbumId" - ] + "PK_Album": ["AlbumId"] }, "foreignRelations": { "FK_AlbumArtistId": { @@ -83,9 +81,7 @@ } }, "uniquenessConstraints": { - "PK_Artist": [ - "ArtistId" - ] + "PK_Artist": ["ArtistId"] }, "foreignRelations": {}, "description": "The record of all artists" @@ -200,9 +196,7 @@ } }, "uniquenessConstraints": { - "PK_Customer": [ - "CustomerId" - ] + "PK_Customer": ["CustomerId"] }, "foreignRelations": { "FK_CustomerSupportRepId": { @@ -341,9 +335,7 @@ } }, "uniquenessConstraints": { - "PK_Employee": [ - "EmployeeId" - ] + "PK_Employee": ["EmployeeId"] }, "foreignRelations": { "FK_EmployeeReportsTo": { @@ -378,9 +370,7 @@ } }, "uniquenessConstraints": { - "PK_Genre": [ - "GenreId" - ] + "PK_Genre": ["GenreId"] }, "foreignRelations": {}, "description": null @@ -463,9 +453,7 @@ } }, "uniquenessConstraints": { - "PK_Invoice": [ - "InvoiceId" - ] + "PK_Invoice": ["InvoiceId"] }, "foreignRelations": { "FK_InvoiceCustomerId": { @@ -524,9 +512,7 @@ } }, "uniquenessConstraints": { - "PK_InvoiceLine": [ - "InvoiceLineId" - ] + "PK_InvoiceLine": ["InvoiceLineId"] }, "foreignRelations": { "FK_InvoiceLineInvoiceId": { @@ -568,9 +554,7 @@ } }, "uniquenessConstraints": { - "PK_MediaType": [ - "MediaTypeId" - ] + "PK_MediaType": ["MediaTypeId"] }, "foreignRelations": {}, "description": null @@ -597,9 +581,7 @@ } }, "uniquenessConstraints": { - "PK_Playlist": [ - "PlaylistId" - ] + "PK_Playlist": ["PlaylistId"] }, "foreignRelations": {}, "description": null @@ -626,10 +608,7 @@ } }, "uniquenessConstraints": { - "PK_PlaylistTrack": [ - "PlaylistId", - "TrackId" - ] + "PK_PlaylistTrack": ["PlaylistId", "TrackId"] }, "foreignRelations": { "FK_PlaylistTrackPlaylistId": { @@ -727,9 +706,7 @@ } }, "uniquenessConstraints": { - "PK_Track": [ - "TrackId" - ] + "PK_Track": ["TrackId"] }, "foreignRelations": { "FK_TrackAlbumId": { @@ -807,9 +784,7 @@ } }, "uniquenessConstraints": { - "defaults_pkey": [ - "id" - ] + "defaults_pkey": ["id"] }, "foreignRelations": {}, "description": null @@ -872,9 +847,7 @@ } }, "uniquenessConstraints": { - "dog_pkey": [ - "id" - ] + "dog_pkey": ["id"] }, "foreignRelations": {}, "description": null @@ -1034,9 +1007,7 @@ } }, "uniquenessConstraints": { - "spatial_ref_sys_pkey": [ - "srid" - ] + "spatial_ref_sys_pkey": ["srid"] }, "foreignRelations": {}, "description": null @@ -1112,10 +1083,7 @@ } }, "uniquenessConstraints": { - "layer_pkey": [ - "layer_id", - "topology_id" - ], + "layer_pkey": ["layer_id", "topology_id"], "layer_schema_name_table_name_feature_column_key": [ "feature_column", "schema_name", @@ -1181,12 +1149,8 @@ } }, "uniquenessConstraints": { - "topology_name_key": [ - "name" - ], - "topology_pkey": [ - "id" - ] + "topology_name_key": ["name"], + "topology_pkey": ["id"] }, "foreignRelations": {}, "description": null @@ -1453,12 +1417,7 @@ } }, "typeRepresentation": { - "enum": [ - "hearts", - "clubs", - "diamonds", - "spades" - ] + "enum": ["hearts", "clubs", "diamonds", "spades"] } }, "char": { @@ -3860,9 +3819,7 @@ "columnar", "columnar_internal" ], - "unqualifiedSchemasForTables": [ - "public" - ], + "unqualifiedSchemasForTables": ["public"], "unqualifiedSchemasForTypesAndProcedures": [ "public", "pg_catalog", diff --git a/static/postgres/v4-chinook-ndc-metadata/schema.json b/static/postgres/v4-chinook-ndc-metadata/schema.json index 930a7a39e..4b5461a34 100644 --- a/static/postgres/v4-chinook-ndc-metadata/schema.json +++ b/static/postgres/v4-chinook-ndc-metadata/schema.json @@ -3,9 +3,7 @@ "title": "ParsedConfiguration", "description": "Initial configuration, just enough to connect to a database and elaborate a full 'Configuration'.", "type": "object", - "required": [ - "version" - ], + "required": ["version"], "properties": { "version": { "$ref": "#/definitions/Version" @@ -13,10 +11,7 @@ "$schema": { "description": "Jsonschema of the configuration format.", "default": null, - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "connectionSettings": { "description": "Database connection settings.", @@ -314,9 +309,7 @@ "uuid": "uUID", "varchar": "string" }, - "unqualifiedSchemasForTables": [ - "public" - ], + "unqualifiedSchemasForTables": ["public"], "unqualifiedSchemasForTypesAndProcedures": [ "public", "pg_catalog", @@ -345,16 +338,12 @@ "definitions": { "Version": { "type": "string", - "enum": [ - "4" - ] + "enum": ["4"] }, "DatabaseConnectionSettings": { "description": "Database connection settings.", "type": "object", - "required": [ - "connectionUri" - ], + "required": ["connectionUri"], "properties": { "connectionUri": { "description": "Connection string for a Postgres-compatible database.", @@ -400,9 +389,7 @@ }, { "type": "object", - "required": [ - "variable" - ], + "required": ["variable"], "properties": { "variable": { "$ref": "#/definitions/Variable" @@ -436,30 +423,21 @@ "idleTimeout": { "description": "idle timeout for releasing a connection from the pool (seconds)", "default": 180, - "type": [ - "integer", - "null" - ], + "type": ["integer", "null"], "format": "uint64", "minimum": 0.0 }, "checkConnectionAfterIdle": { "description": "check the connection is alive after being idle for N seconds. Set to null to always check.", "default": 60, - "type": [ - "integer", - "null" - ], + "type": ["integer", "null"], "format": "uint64", "minimum": 0.0 }, "connectionLifetime": { "description": "maximum lifetime for an individual connection (seconds)", "default": 600, - "type": [ - "integer", - "null" - ], + "type": ["integer", "null"], "format": "uint64", "minimum": 0.0 } @@ -471,23 +449,17 @@ { "description": "Prevents reading data from another uncommitted transaction.", "type": "string", - "enum": [ - "ReadCommitted" - ] + "enum": ["ReadCommitted"] }, { "description": "Reading the same data twice is guaranteed to return the same result.", "type": "string", - "enum": [ - "RepeatableRead" - ] + "enum": ["RepeatableRead"] }, { "description": "Concurrent transactions behave identically to serializing them one at a time.", "type": "string", - "enum": [ - "Serializable" - ] + "enum": ["Serializable"] } ] }, @@ -539,11 +511,7 @@ "TableInfo": { "description": "Information about a database table (or any other kind of relation).", "type": "object", - "required": [ - "columns", - "schemaName", - "tableName" - ], + "required": ["columns", "schemaName", "tableName"], "properties": { "schemaName": { "type": "string" @@ -575,20 +543,14 @@ }, "description": { "default": null, - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } } }, "ColumnInfo": { "description": "Information about a database column.", "type": "object", - "required": [ - "name", - "type" - ], + "required": ["name", "type"], "properties": { "name": { "type": "string" @@ -615,10 +577,7 @@ }, "description": { "default": null, - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } } }, @@ -627,9 +586,7 @@ "oneOf": [ { "type": "object", - "required": [ - "scalarType" - ], + "required": ["scalarType"], "properties": { "scalarType": { "$ref": "#/definitions/ScalarTypeName" @@ -639,9 +596,7 @@ }, { "type": "object", - "required": [ - "compositeType" - ], + "required": ["compositeType"], "properties": { "compositeType": { "$ref": "#/definitions/CompositeTypeName" @@ -651,9 +606,7 @@ }, { "type": "object", - "required": [ - "arrayType" - ], + "required": ["arrayType"], "properties": { "arrayType": { "$ref": "#/definitions/Type" @@ -674,35 +627,22 @@ "Nullable": { "description": "Can this column contain null values", "type": "string", - "enum": [ - "nullable", - "nonNullable" - ] + "enum": ["nullable", "nonNullable"] }, "HasDefault": { "description": "Does this column have a default value.", "type": "string", - "enum": [ - "noDefault", - "hasDefault" - ] + "enum": ["noDefault", "hasDefault"] }, "IsIdentity": { "description": "Is this column an identity column.", "type": "string", - "enum": [ - "notIdentity", - "identityByDefault", - "identityAlways" - ] + "enum": ["notIdentity", "identityByDefault", "identityAlways"] }, "IsGenerated": { "description": "Is this column a generated column.", "type": "string", - "enum": [ - "notGenerated", - "stored" - ] + "enum": ["notGenerated", "stored"] }, "UniquenessConstraints": { "description": "A mapping from the name of a unique constraint to its value.", @@ -729,16 +669,10 @@ "ForeignRelation": { "description": "A foreign key constraint.", "type": "object", - "required": [ - "columnMapping", - "foreignTable" - ], + "required": ["columnMapping", "foreignTable"], "properties": { "foreignSchema": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "foreignTable": { "type": "string" @@ -775,10 +709,7 @@ "type": "string" }, "description": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "aggregateFunctions": { "type": "object", @@ -806,9 +737,7 @@ }, "AggregateFunction": { "type": "object", - "required": [ - "returnType" - ], + "required": ["returnType"], "properties": { "returnType": { "$ref": "#/definitions/ScalarTypeName" @@ -818,11 +747,7 @@ "ComparisonOperator": { "description": "Represents a postgres binary comparison operator", "type": "object", - "required": [ - "argumentType", - "operatorKind", - "operatorName" - ], + "required": ["argumentType", "operatorKind", "operatorName"], "properties": { "operatorName": { "type": "string" @@ -842,11 +767,7 @@ "OperatorKind": { "description": "Is it a built-in operator, or a custom operator.", "type": "string", - "enum": [ - "equal", - "in", - "custom" - ] + "enum": ["equal", "in", "custom"] }, "TypeRepresentation": { "description": "Type representation of a scalar type.", @@ -854,156 +775,112 @@ { "description": "JSON booleans", "type": "string", - "enum": [ - "boolean" - ] + "enum": ["boolean"] }, { "description": "Any JSON string", "type": "string", - "enum": [ - "string" - ] + "enum": ["string"] }, { "description": "float4", "type": "string", - "enum": [ - "float32" - ] + "enum": ["float32"] }, { "description": "float8", "type": "string", - "enum": [ - "float64" - ] + "enum": ["float64"] }, { "description": "int2", "type": "string", - "enum": [ - "int16" - ] + "enum": ["int16"] }, { "description": "int4", "type": "string", - "enum": [ - "int32" - ] + "enum": ["int32"] }, { "description": "int8 as integer", "type": "string", - "enum": [ - "int64" - ] + "enum": ["int64"] }, { "description": "int8 as string", "type": "string", - "enum": [ - "int64AsString" - ] + "enum": ["int64AsString"] }, { "description": "numeric", "type": "string", - "enum": [ - "bigDecimal" - ] + "enum": ["bigDecimal"] }, { "description": "numeric as string", "type": "string", - "enum": [ - "bigDecimalAsString" - ] + "enum": ["bigDecimalAsString"] }, { "description": "timestamp", "type": "string", - "enum": [ - "timestamp" - ] + "enum": ["timestamp"] }, { "description": "timestamp with timezone", "type": "string", - "enum": [ - "timestamptz" - ] + "enum": ["timestamptz"] }, { "description": "time", "type": "string", - "enum": [ - "time" - ] + "enum": ["time"] }, { "description": "time with timezone", "type": "string", - "enum": [ - "timetz" - ] + "enum": ["timetz"] }, { "description": "date", "type": "string", - "enum": [ - "date" - ] + "enum": ["date"] }, { "description": "uuid", "type": "string", - "enum": [ - "uUID" - ] + "enum": ["uUID"] }, { "description": "geography", "type": "string", - "enum": [ - "geography" - ] + "enum": ["geography"] }, { "description": "geometry", "type": "string", - "enum": [ - "geometry" - ] + "enum": ["geometry"] }, { "description": "Any JSON number", "type": "string", - "enum": [ - "number" - ] + "enum": ["number"] }, { "description": "Any JSON number, with no decimal part", "type": "string", - "enum": [ - "integer" - ] + "enum": ["integer"] }, { "description": "An arbitrary json.", "type": "string", - "enum": [ - "json" - ] + "enum": ["json"] }, { "description": "One of the specified string values", "type": "object", - "required": [ - "enum" - ], + "required": ["enum"], "properties": { "enum": { "type": "array", @@ -1026,11 +903,7 @@ "CompositeType": { "description": "Information about a composite type. These are very similar to tables, but with the crucial difference that composite types do not support constraints (such as NOT NULL).", "type": "object", - "required": [ - "fields", - "schemaName", - "typeName" - ], + "required": ["fields", "schemaName", "typeName"], "properties": { "typeName": { "type": "string" @@ -1046,20 +919,14 @@ }, "description": { "default": null, - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } } }, "FieldInfo": { "description": "Information about a composite type field.", "type": "object", - "required": [ - "fieldName", - "type" - ], + "required": ["fieldName", "type"], "properties": { "fieldName": { "type": "string" @@ -1069,10 +936,7 @@ }, "description": { "default": null, - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } } }, @@ -1086,10 +950,7 @@ "NativeQueryInfo": { "description": "Information about a Native Query", "type": "object", - "required": [ - "columns", - "sql" - ], + "required": ["columns", "sql"], "properties": { "sql": { "description": "SQL expression to use for the Native Query. We can interpolate values using `{{variable_name}}` syntax, such as `SELECT * FROM authors WHERE name = {{author_name}}`", @@ -1116,10 +977,7 @@ }, "description": { "default": null, - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "isProcedure": { "description": "True if this native query mutates the database", @@ -1133,9 +991,7 @@ { "description": "Refer to an external Native Query SQL file.", "type": "object", - "required": [ - "file" - ], + "required": ["file"], "properties": { "file": { "description": "Relative path to a sql file.", @@ -1146,9 +1002,7 @@ { "description": "Inline Native Query SQL string.", "type": "object", - "required": [ - "inline" - ], + "required": ["inline"], "properties": { "inline": { "description": "An inline Native Query SQL string.", @@ -1171,10 +1025,7 @@ "ReadOnlyColumnInfo": { "description": "Information about a native query column.", "type": "object", - "required": [ - "name", - "type" - ], + "required": ["name", "type"], "properties": { "name": { "type": "string" @@ -1192,10 +1043,7 @@ }, "description": { "default": null, - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } } }, @@ -1220,9 +1068,7 @@ }, "unqualifiedSchemasForTables": { "description": "The names of Tables and Views in these schemas will be returned unqualified. The default setting will set the `public` schema as unqualified.", - "default": [ - "public" - ], + "default": ["public"], "type": "array", "items": { "type": "string" @@ -1230,11 +1076,7 @@ }, "unqualifiedSchemasForTypesAndProcedures": { "description": "The types and procedures in these schemas will be returned unqualified.", - "default": [ - "public", - "pg_catalog", - "tiger" - ], + "default": ["public", "pg_catalog", "tiger"], "type": "array", "items": { "type": "string" @@ -1517,11 +1359,7 @@ "ComparisonOperatorMapping": { "description": "Define the names that comparison operators will be exposed as by the automatic introspection.", "type": "object", - "required": [ - "exposedName", - "operatorKind", - "operatorName" - ], + "required": ["exposedName", "operatorKind", "operatorName"], "properties": { "operatorName": { "description": "The name of the operator as defined by the database", @@ -1551,10 +1389,7 @@ "MutationsVersion": { "description": "Which version of the generated mutations will be included in the schema", "type": "string", - "enum": [ - "v1", - "veryExperimentalWip" - ] + "enum": ["v1", "veryExperimentalWip"] } } } From d84048e07cfd2b7028947d53e5262e77ce7a0848 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Mon, 1 Jul 2024 12:43:16 +0300 Subject: [PATCH 05/28] add some comments --- crates/cli/src/lib.rs | 102 +++++++++++++--------- crates/configuration/src/configuration.rs | 18 ++++ 2 files changed, 80 insertions(+), 40 deletions(-) diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 56a01528b..9b2339691 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -10,9 +10,9 @@ use std::path::PathBuf; use clap::Subcommand; use tokio::fs; +use configuration::version4::metadata as metadata_v4; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; -use configuration::version4::metadata as metadatav4; const UPDATE_ATTEMPTS: u8 = 3; @@ -180,34 +180,27 @@ async fn upgrade(dir_from: PathBuf, dir_to: PathBuf) -> anyhow::Result<()> { Ok(()) } +/// Take a SQL file containing a Native Operation, check against the database that it is valid, +/// and add it to the configuration if it is. async fn create_native_operation( operation_path: PathBuf, context: Context, is_procedure: bool, ) -> anyhow::Result<()> { + // Read the SQL file. + let sql = std::fs::read_to_string(format!( + "{}/{}", + context.context_path.to_str().unwrap(), + &operation_path.to_str().unwrap() + ))?; + + // Prepare the Native Operation SQL so it can be checked against the db. let identifier = operation_path .file_stem() - .ok_or(anyhow::anyhow!("Oh no, file not found"))? + .ok_or(anyhow::anyhow!("SQL file not found"))? .to_str() - .ok_or(anyhow::anyhow!("Oh no, file not found"))?; - let sql = std::fs::read_to_string(format!("{}/{}", context.context_path.to_str().unwrap(), &operation_path.to_str().unwrap()))?; - let mut configuration = configuration::parse_configuration(context.context_path.clone()).await?; + .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))?; - let connection_uri = match configuration { - configuration::ParsedConfiguration::Version3(ref raw_configuration) => { - raw_configuration.connection_settings.connection_uri.clone() - } - configuration::ParsedConfiguration::Version4(ref configuration) => { - configuration.connection_settings.connection_uri.clone() - } - }; - - let connection_string = match connection_uri.0 { - configuration::Secret::Plain(connection_string) => connection_string, - configuration::Secret::FromEnvironment { variable } => std::env::var(variable.to_string())?, - }; - - let connection = libpq::Connection::new(&connection_string)?; let prepared_statement_name = format!("__hasura_inference_{identifier}"); let identifier_regex = regex::Regex::new(r"\{\{(?.*?)\}\}").unwrap(); @@ -227,58 +220,87 @@ async fn create_native_operation( final_statement = final_statement.replace(&format!("{{{{{name}}}}}"), &format!("${index}")); } + // Read the configuration. + let mut configuration = + configuration::parse_configuration(context.context_path.clone()).await?; + + // Connect to the db. + let connection_string = configuration.get_connection_uri()?; + let connection = libpq::Connection::new(&connection_string)?; + + // Prepare the SQL against the DB and fetch the description which contains + // the types of arguments and columns. let _ = connection.prepare(Some(&prepared_statement_name), &final_statement, &[]); let description = connection.describe_prepared(Some(&prepared_statement_name)); + // Extract the arguments and columns information into data structures. let mut arguments = std::collections::BTreeMap::new(); let mut columns = std::collections::BTreeMap::new(); - for param in 0 .. description.nparams() { + for param in 0..description.nparams() { + let parameter = parameters + .get(&(param + 1)) + .ok_or(anyhow::anyhow!( + "Internal error: parameter index not found." + ))? + .to_string(); arguments.insert( - parameters.get(&(param + 1)).ok_or(anyhow::anyhow!(":("))?.to_string(), - metadatav4::ReadOnlyColumnInfo { - name: parameters.get(&(param + 1)).ok_or(anyhow::anyhow!(":("))?.to_string(), - r#type: metadatav4::Type::ScalarType(metadatav4::ScalarTypeName(format!("{}", description.param_type(param).unwrap()))), + parameter.clone(), + metadata_v4::ReadOnlyColumnInfo { + name: parameter, + r#type: metadata_v4::Type::ScalarType(metadata_v4::ScalarTypeName(format!( + "{}", + description.param_type(param).unwrap() + ))), description: None, - nullable: metadatav4::Nullable::NonNullable, - } + nullable: metadata_v4::Nullable::NonNullable, + }, ); } - for field in 0 .. description.nfields() { + for field in 0..description.nfields() { + let column_name = description.field_name(field)?.unwrap(); columns.insert( - description.field_name(field)?.unwrap(), - metadatav4::ReadOnlyColumnInfo { - name: description.field_name(field)?.unwrap(), - r#type: metadatav4::Type::ScalarType(metadatav4::ScalarTypeName(format!("{}", description.field_type(field)))), + column_name.clone(), + metadata_v4::ReadOnlyColumnInfo { + name: column_name, + r#type: metadata_v4::Type::ScalarType(metadata_v4::ScalarTypeName(format!( + "{}", + description.field_type(field) + ))), description: None, - nullable: metadatav4::Nullable::NonNullable, - } + nullable: metadata_v4::Nullable::NonNullable, + }, ); } match configuration { configuration::ParsedConfiguration::Version3(_) => { - panic!(":(") + panic!("To use the create native operations command, please upgrade to the latest version.") } configuration::ParsedConfiguration::Version4(ref mut configuration) => { // TODO: should we overwrite or not configuration.metadata.native_queries.0.insert( identifier.to_string(), - metadatav4::NativeQueryInfo { - sql: metadatav4::NativeQuerySqlEither::NativeQuerySqlExternal( - metadatav4::NativeQuerySqlExternal::File { file: operation_path } + metadata_v4::NativeQueryInfo { + sql: metadata_v4::NativeQuerySqlEither::NativeQuerySqlExternal( + metadata_v4::NativeQuerySqlExternal::File { + file: operation_path, + }, ), arguments, columns, is_procedure, description: None, - } + }, ); } }; - println!("{:#?}", configuration::version4::attempt_to_find_type_name_for(&connection_string, &[23]).await); + println!( + "{:#?}", + configuration::version4::attempt_to_find_type_name_for(&connection_string, &[23]).await + ); configuration::write_parsed_configuration(configuration, context.context_path).await?; Ok(()) } diff --git a/crates/configuration/src/configuration.rs b/crates/configuration/src/configuration.rs index 56c5967ef..56f5ec320 100644 --- a/crates/configuration/src/configuration.rs +++ b/crates/configuration/src/configuration.rs @@ -45,6 +45,24 @@ impl ParsedConfiguration { pub fn initial() -> Self { ParsedConfiguration::Version4(version4::ParsedConfiguration::empty()) } + /// Extract the connection uri from the configuration + ENV if needed. + pub fn get_connection_uri(&self) -> Result { + let connection_uri = match self { + ParsedConfiguration::Version3(ref raw_configuration) => { + raw_configuration.connection_settings.connection_uri.clone() + } + ParsedConfiguration::Version4(ref configuration) => { + configuration.connection_settings.connection_uri.clone() + } + }; + + match connection_uri.0 { + super::values::Secret::Plain(connection_string) => Ok(connection_string), + super::values::Secret::FromEnvironment { variable } => { + Ok(std::env::var(variable.to_string())?) + } + } + } } /// The 'Configuration' type collects all the information necessary to serve queries at runtime. From b3cf91784cf9418a2676fd62849f15b0ca461f5f Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Mon, 1 Jul 2024 12:53:52 +0300 Subject: [PATCH 06/28] remove the test.sql entry in the config --- .../configuration.json | 42 ------------------- 1 file changed, 42 deletions(-) diff --git a/static/postgres/v4-chinook-ndc-metadata/configuration.json b/static/postgres/v4-chinook-ndc-metadata/configuration.json index 4cb77ccac..1e4f98063 100644 --- a/static/postgres/v4-chinook-ndc-metadata/configuration.json +++ b/static/postgres/v4-chinook-ndc-metadata/configuration.json @@ -3500,48 +3500,6 @@ }, "description": "A native query used to test support array-valued variables" }, - "test": { - "sql": { - "file": "test.sql" - }, - "columns": { - "ArtistId": { - "name": "ArtistId", - "type": { - "scalarType": "23" - }, - "nullable": "nonNullable", - "description": null - }, - "Name": { - "name": "Name", - "type": { - "scalarType": "1043" - }, - "nullable": "nonNullable", - "description": null - } - }, - "arguments": { - "gil": { - "name": "gil", - "type": { - "scalarType": "23" - }, - "nullable": "nonNullable", - "description": null - }, - "tom": { - "name": "tom", - "type": { - "scalarType": "23" - }, - "nullable": "nonNullable", - "description": null - } - }, - "description": null - }, "value_types": { "sql": { "inline": "SELECT {{bool}} as bool, {{int4}} as int4, {{int2}} as int2, {{int8}} as int8, {{float4}} as float4, {{float8}} as \"float8\", {{numeric}} as numeric, {{char}} as char, {{varchar}} as \"varchar\", {{text}} as text, {{date}} as date, {{time}} as time, {{timetz}} as timetz, {{timestamp}} as timestamp, {{timestamptz}} as timestamptz, {{uuid}} as uuid" From ffb7ea700521dd1044a17c137ba4c1e4e58fc566 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Mon, 1 Jul 2024 17:32:05 +0300 Subject: [PATCH 07/28] implement oid lookup and move native operations code to different file --- crates/cli/src/lib.rs | 133 +---------------- crates/cli/src/native_operations.rs | 177 +++++++++++++++++++++++ crates/configuration/src/version4/mod.rs | 81 ++++++++--- test.sql | 1 - 4 files changed, 244 insertions(+), 148 deletions(-) create mode 100644 crates/cli/src/native_operations.rs delete mode 100644 test.sql diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 9b2339691..42a0b333e 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -4,13 +4,13 @@ //! then done, making it easier to test this crate deterministically. mod metadata; +mod native_operations; use std::path::PathBuf; use clap::Subcommand; use tokio::fs; -use configuration::version4::metadata as metadata_v4; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; @@ -46,7 +46,7 @@ pub enum Command { operation_path: PathBuf, #[arg(long)] - is_procedure: bool, // we can make this neater later + kind: native_operations::Kind, }, } @@ -65,8 +65,8 @@ pub async fn run(command: Command, context: Context) -> anyhow Command::Upgrade { dir_from, dir_to } => upgrade(dir_from, dir_to).await?, Command::CreateNativeOperation { operation_path, - is_procedure, - } => create_native_operation(operation_path, context, is_procedure).await?, + kind, + } => native_operations::create(operation_path, context, kind).await?, }; Ok(()) } @@ -179,128 +179,3 @@ async fn upgrade(dir_from: PathBuf, dir_to: PathBuf) -> anyhow::Result<()> { Ok(()) } - -/// Take a SQL file containing a Native Operation, check against the database that it is valid, -/// and add it to the configuration if it is. -async fn create_native_operation( - operation_path: PathBuf, - context: Context, - is_procedure: bool, -) -> anyhow::Result<()> { - // Read the SQL file. - let sql = std::fs::read_to_string(format!( - "{}/{}", - context.context_path.to_str().unwrap(), - &operation_path.to_str().unwrap() - ))?; - - // Prepare the Native Operation SQL so it can be checked against the db. - let identifier = operation_path - .file_stem() - .ok_or(anyhow::anyhow!("SQL file not found"))? - .to_str() - .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))?; - - let prepared_statement_name = format!("__hasura_inference_{identifier}"); - - let identifier_regex = regex::Regex::new(r"\{\{(?.*?)\}\}").unwrap(); - let mut parameters = std::collections::HashMap::new(); - - for (index, (_, [name])) in identifier_regex - .captures_iter(&sql) - .map(|c| c.extract()) - .enumerate() - { - parameters.insert(index + 1, name); // We might use the same param twice - } - - let mut final_statement = sql.clone(); - - for (index, name) in ¶meters { - final_statement = final_statement.replace(&format!("{{{{{name}}}}}"), &format!("${index}")); - } - - // Read the configuration. - let mut configuration = - configuration::parse_configuration(context.context_path.clone()).await?; - - // Connect to the db. - let connection_string = configuration.get_connection_uri()?; - let connection = libpq::Connection::new(&connection_string)?; - - // Prepare the SQL against the DB and fetch the description which contains - // the types of arguments and columns. - let _ = connection.prepare(Some(&prepared_statement_name), &final_statement, &[]); - let description = connection.describe_prepared(Some(&prepared_statement_name)); - - // Extract the arguments and columns information into data structures. - let mut arguments = std::collections::BTreeMap::new(); - let mut columns = std::collections::BTreeMap::new(); - - for param in 0..description.nparams() { - let parameter = parameters - .get(&(param + 1)) - .ok_or(anyhow::anyhow!( - "Internal error: parameter index not found." - ))? - .to_string(); - arguments.insert( - parameter.clone(), - metadata_v4::ReadOnlyColumnInfo { - name: parameter, - r#type: metadata_v4::Type::ScalarType(metadata_v4::ScalarTypeName(format!( - "{}", - description.param_type(param).unwrap() - ))), - description: None, - nullable: metadata_v4::Nullable::NonNullable, - }, - ); - } - - for field in 0..description.nfields() { - let column_name = description.field_name(field)?.unwrap(); - columns.insert( - column_name.clone(), - metadata_v4::ReadOnlyColumnInfo { - name: column_name, - r#type: metadata_v4::Type::ScalarType(metadata_v4::ScalarTypeName(format!( - "{}", - description.field_type(field) - ))), - description: None, - nullable: metadata_v4::Nullable::NonNullable, - }, - ); - } - - match configuration { - configuration::ParsedConfiguration::Version3(_) => { - panic!("To use the create native operations command, please upgrade to the latest version.") - } - configuration::ParsedConfiguration::Version4(ref mut configuration) => { - // TODO: should we overwrite or not - configuration.metadata.native_queries.0.insert( - identifier.to_string(), - metadata_v4::NativeQueryInfo { - sql: metadata_v4::NativeQuerySqlEither::NativeQuerySqlExternal( - metadata_v4::NativeQuerySqlExternal::File { - file: operation_path, - }, - ), - arguments, - columns, - is_procedure, - description: None, - }, - ); - } - }; - - println!( - "{:#?}", - configuration::version4::attempt_to_find_type_name_for(&connection_string, &[23]).await - ); - configuration::write_parsed_configuration(configuration, context.context_path).await?; - Ok(()) -} diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs new file mode 100644 index 000000000..354016f7f --- /dev/null +++ b/crates/cli/src/native_operations.rs @@ -0,0 +1,177 @@ +//! Handle the creation of Native Operations. + +use std::collections::{BTreeMap, BTreeSet}; +use std::path::PathBuf; + +use super::{update, Context}; +use configuration::version4::{metadata as metadata_v4, oids_to_typenames}; +use ndc_postgres_configuration as configuration; +use ndc_postgres_configuration::environment::Environment; + +/// Query or Mutation. +#[derive(Debug, Clone, clap::ValueEnum)] +pub enum Kind { + Query, + Mutation, +} + +/// Take a SQL file containing a Native Operation, check against the database that it is valid, +/// and add it to the configuration if it is. +pub async fn create( + operation_path: PathBuf, + context: Context, + kind: Kind, +) -> anyhow::Result<()> { + // Read the configuration. + let mut configuration = + configuration::parse_configuration(context.context_path.clone()).await?; + + // Connect to the db. + let connection_string = configuration.get_connection_uri()?; + let connection = libpq::Connection::new(&connection_string)?; + + // Create an entry for a Native Operation and insert it into the configuration. + match configuration { + configuration::ParsedConfiguration::Version3(_) => Err(anyhow::anyhow!( + "To use the create native operations command, please upgrade to the latest version." + ))?, + configuration::ParsedConfiguration::Version4(ref mut configuration) => { + // Read the SQL file. + let filepath = format!( + "{}/{}", + context.context_path.to_str().unwrap(), + &operation_path.to_str().unwrap() + ); + let sql = match std::fs::read_to_string(&filepath) { + Ok(sql) => sql, + Err(err) => Err(anyhow::anyhow!("Failed to read file '{filepath}': {}", err))?, + }; + + // Prepare the Native Operation SQL so it can be checked against the db. + let identifier = operation_path + .file_stem() + .ok_or(anyhow::anyhow!("SQL file not found"))? + .to_str() + .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))?; + + let prepared_statement_name = format!("__hasura_inference_{identifier}"); + + let identifier_regex = regex::Regex::new(r"\{\{(?.*?)\}\}").unwrap(); + let mut parameters = std::collections::HashMap::new(); + + for (index, (_, [name])) in identifier_regex + .captures_iter(&sql) + .map(|c| c.extract()) + .enumerate() + { + parameters.insert(index + 1, name); // We might use the same param twice + } + + let mut final_statement = sql.clone(); + + for (index, name) in ¶meters { + final_statement = + final_statement.replace(&format!("{{{{{name}}}}}"), &format!("${index}")); + } + + // Prepare the SQL against the DB. + let result = connection.prepare(Some(&prepared_statement_name), &final_statement, &[]); + match result.error_message()? { + None => {} + Some(error_message) => Err(anyhow::anyhow!("{}", error_message))?, + } + + // Fetch the description which contains the types of arguments and columns. + let description = connection.describe_prepared(Some(&prepared_statement_name)); + + // Extract the arguments and columns information into data structures. + let mut arguments_to_oids = std::collections::BTreeMap::new(); + let mut columns_to_oids = std::collections::BTreeMap::new(); + + for param in 0..description.nparams() { + let parameter = (*parameters.get(&(param + 1)).ok_or(anyhow::anyhow!( + "Internal error: parameter index not found." + ))?) + .to_string(); + arguments_to_oids.insert( + parameter.clone(), + i64::from(description.param_type(param).ok_or(anyhow::anyhow!( + "Invalid OID for parameter '{}'.", + parameter.clone() + ))?), + ); + } + + for field in 0..description.nfields() { + let column_name = description.field_name(field)?.unwrap(); + columns_to_oids.insert( + column_name.clone(), + i64::from(description.field_type(field)), + ); + } + + let mut oids: BTreeSet = BTreeSet::from_iter(arguments_to_oids.values().cloned()); + oids.extend::>(BTreeSet::from_iter(columns_to_oids.values().cloned())); + let oids_vec: Vec<_> = oids.into_iter().collect(); + let oids_map = oids_to_typenames(&configuration, &connection_string, &oids_vec).await?; + + let mut arguments = BTreeMap::new(); + for (name, oid) in arguments_to_oids { + arguments.insert( + name.clone(), + metadata_v4::ReadOnlyColumnInfo { + name: name.clone(), + r#type: metadata_v4::Type::ScalarType(metadata_v4::ScalarTypeName( + oids_map.get(&oid).unwrap().0.clone(), + )), + description: None, + nullable: metadata_v4::Nullable::NonNullable, + }, + ); + } + let mut columns = BTreeMap::new(); + for (name, oid) in columns_to_oids { + columns.insert( + name.clone(), + metadata_v4::ReadOnlyColumnInfo { + name: name.clone(), + r#type: metadata_v4::Type::ScalarType(metadata_v4::ScalarTypeName( + oids_map.get(&oid).unwrap().0.clone(), + )), + description: None, + nullable: metadata_v4::Nullable::NonNullable, + }, + ); + } + + let new_native_operation = metadata_v4::NativeQueryInfo { + sql: metadata_v4::NativeQuerySqlEither::NativeQuerySqlExternal( + metadata_v4::NativeQuerySqlExternal::File { + file: operation_path.clone(), + }, + ), + arguments, + columns, + is_procedure: match kind { + Kind::Query => false, + Kind::Mutation => true, + }, + description: None, + }; + + // TODO: should we overwrite or not + configuration + .metadata + .native_queries + .0 + .insert(identifier.to_string(), new_native_operation); + } + }; + + // We write the configuration including the new Native Operation to file. + configuration::write_parsed_configuration(configuration, context.context_path.clone()).await?; + + // We update the configuration as well so that the introspection will add missing scalar type entries + // if necessary. + update(context).await +} diff --git a/crates/configuration/src/version4/mod.rs b/crates/configuration/src/version4/mod.rs index 39a034222..1504bb6fe 100644 --- a/crates/configuration/src/version4/mod.rs +++ b/crates/configuration/src/version4/mod.rs @@ -26,6 +26,8 @@ use crate::environment::Environment; use crate::error::{ParseConfigurationError, WriteParsedConfigurationError}; use crate::values::{ConnectionUri, Secret}; +use self::metadata::ScalarTypeName; + #[cfg(test)] mod tests; @@ -273,31 +275,74 @@ pub async fn write_parsed_configuration( Ok(()) } -/// -pub async fn attempt_to_find_type_name_for( - connection_string: &String, - oids: &[i32], -) -> Result, sqlx::Error> { - let mut sqlx = PgConnection::connect(&connection_string) +/// Representation of a result row returned from the oid lookup query. +#[derive(Debug, sqlx::FromRow)] +struct OidQueryRow { + schema_name: String, + type_name: String, + oid: i32, +} + +/// Given a vector of OIDs, ask postgres to provide the equivalent type names. +pub async fn oids_to_typenames( + configuration: &ParsedConfiguration, + connection_string: &str, + oids: &Vec, +) -> Result, sqlx::Error> { + let mut connection = PgConnection::connect(connection_string) .instrument(info_span!("Connect to database")) .await?; - let query = - sqlx::query("SELECT typnamespace::regnamespace::text as schema, typname as name, oid::integer FROM pg_type WHERE oid in (SELECT unnest($1))") - .bind(oids); + let rows: Vec = sqlx::query_as( + "SELECT + typnamespace::regnamespace::text as schema_name, + typname as type_name, + oid::integer + FROM pg_type + WHERE oid in (SELECT unnest($1)) + ", + ) + .bind(oids) + .fetch_all(&mut connection) + .instrument(info_span!("Run oid lookup query")) + .await?; - let rows = sqlx - .fetch_all(query) - .instrument(info_span!("Run oid lookup query")) - .await?; + let mut oids_map: BTreeMap = BTreeMap::new(); + // Reverse lookup the schema.typename and find the ndc type name, + // if we find all we can just add the nq and call it a day. for row in rows { - let schema: String = row.get(0); - let name: String = row.get(1); - let oid: i32 = row.get(2); + let schema_name: String = row.schema_name; + let type_name: String = row.type_name; + let oid: i64 = row.oid.into(); + + let mut found = false; + for (scalar_type_name, info) in configuration.metadata.scalar_types.0.iter() { + if info.schema_name == schema_name && info.type_name == type_name { + oids_map.insert(oid, scalar_type_name.clone()); + found = true; + continue; + } + } - println!("{schema}, {name}, {oid}"); + // If we don't find it we generate a name which is either schema_typename + // or just typename depending if the schema is in the unqualified list or not, + // then add the nq and run the introspection. + if !found { + if configuration + .introspection_options + .unqualified_schemas_for_types_and_procedures + .contains(&schema_name) + { + oids_map.insert(oid, ScalarTypeName(type_name)); + } else { + oids_map.insert( + oid, + ScalarTypeName(format!("{}_{}", schema_name, type_name)), + ); + } + } } - Ok(BTreeMap::new()) + Ok(oids_map) } diff --git a/test.sql b/test.sql deleted file mode 100644 index ddff64943..000000000 --- a/test.sql +++ /dev/null @@ -1 +0,0 @@ -SELECT * FROM "Artist" WHERE "ArtistId" > {{gil}} AND "ArtistId" < {{tom}} From 9fb5b4bc5d9d4d002233c1dd8da0f39008225a99 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Mon, 1 Jul 2024 17:35:05 +0300 Subject: [PATCH 08/28] undo changes to schema.json --- .../v4-chinook-ndc-metadata/schema.json | 224 +++++++++--------- 1 file changed, 108 insertions(+), 116 deletions(-) diff --git a/static/postgres/v4-chinook-ndc-metadata/schema.json b/static/postgres/v4-chinook-ndc-metadata/schema.json index aaf646a94..787e63813 100644 --- a/static/postgres/v4-chinook-ndc-metadata/schema.json +++ b/static/postgres/v4-chinook-ndc-metadata/schema.json @@ -10,7 +10,6 @@ }, "$schema": { "description": "Jsonschema of the configuration format.", - "default": null, "type": ["string", "null"] }, "connectionSettings": { @@ -19,14 +18,14 @@ "connectionUri": { "variable": "CONNECTION_URI" }, - "isolationLevel": "ReadCommitted", "poolSettings": { - "checkConnectionAfterIdle": 60, - "connectionLifetime": 600, - "idleTimeout": 180, "maxConnections": 50, - "poolTimeout": 30 - } + "poolTimeout": 30, + "idleTimeout": 180, + "checkConnectionAfterIdle": 60, + "connectionLifetime": 600 + }, + "isolationLevel": "ReadCommitted" }, "allOf": [ { @@ -37,10 +36,10 @@ "metadata": { "description": "Connector metadata.", "default": { - "compositeTypes": {}, - "nativeQueries": {}, + "tables": {}, "scalarTypes": {}, - "tables": {} + "compositeTypes": {}, + "nativeQueries": {} }, "allOf": [ { @@ -51,116 +50,122 @@ "introspectionOptions": { "description": "Database introspection options.", "default": { + "excludedSchemas": [ + "information_schema", + "pg_catalog", + "tiger", + "crdb_internal", + "columnar", + "columnar_internal" + ], + "unqualifiedSchemasForTables": ["public"], + "unqualifiedSchemasForTypesAndProcedures": [ + "public", + "pg_catalog", + "tiger" + ], "comparisonOperatorMapping": [ { + "operatorName": "=", "exposedName": "_eq", - "operatorKind": "equal", - "operatorName": "=" + "operatorKind": "equal" }, { + "operatorName": "<=", "exposedName": "_lte", - "operatorKind": "custom", - "operatorName": "<=" + "operatorKind": "custom" }, { + "operatorName": ">", "exposedName": "_gt", - "operatorKind": "custom", - "operatorName": ">" + "operatorKind": "custom" }, { + "operatorName": ">=", "exposedName": "_gte", - "operatorKind": "custom", - "operatorName": ">=" + "operatorKind": "custom" }, { + "operatorName": "<", "exposedName": "_lt", - "operatorKind": "custom", - "operatorName": "<" + "operatorKind": "custom" }, { + "operatorName": "!=", "exposedName": "_neq", - "operatorKind": "custom", - "operatorName": "!=" + "operatorKind": "custom" }, { + "operatorName": "LIKE", "exposedName": "_like", - "operatorKind": "custom", - "operatorName": "LIKE" + "operatorKind": "custom" }, { + "operatorName": "NOT LIKE", "exposedName": "_nlike", - "operatorKind": "custom", - "operatorName": "NOT LIKE" + "operatorKind": "custom" }, { + "operatorName": "ILIKE", "exposedName": "_ilike", - "operatorKind": "custom", - "operatorName": "ILIKE" + "operatorKind": "custom" }, { + "operatorName": "NOT ILIKE", "exposedName": "_nilike", - "operatorKind": "custom", - "operatorName": "NOT ILIKE" + "operatorKind": "custom" }, { + "operatorName": "SIMILAR TO", "exposedName": "_similar", - "operatorKind": "custom", - "operatorName": "SIMILAR TO" + "operatorKind": "custom" }, { + "operatorName": "NOT SIMILAR TO", "exposedName": "_nsimilar", - "operatorKind": "custom", - "operatorName": "NOT SIMILAR TO" + "operatorKind": "custom" }, { + "operatorName": "~~", "exposedName": "_like", - "operatorKind": "custom", - "operatorName": "~~" + "operatorKind": "custom" }, { + "operatorName": "!~~", "exposedName": "_nlike", - "operatorKind": "custom", - "operatorName": "!~~" + "operatorKind": "custom" }, { + "operatorName": "~~*", "exposedName": "_ilike", - "operatorKind": "custom", - "operatorName": "~~*" + "operatorKind": "custom" }, { + "operatorName": "!~~*", "exposedName": "_nilike", - "operatorKind": "custom", - "operatorName": "!~~*" + "operatorKind": "custom" }, { + "operatorName": "~", "exposedName": "_regex", - "operatorKind": "custom", - "operatorName": "~" + "operatorKind": "custom" }, { + "operatorName": "!~", "exposedName": "_nregex", - "operatorKind": "custom", - "operatorName": "!~" + "operatorKind": "custom" }, { + "operatorName": "~*", "exposedName": "_iregex", - "operatorKind": "custom", - "operatorName": "~*" + "operatorKind": "custom" }, { + "operatorName": "!~*", "exposedName": "_niregex", - "operatorKind": "custom", - "operatorName": "!~*" + "operatorKind": "custom" } ], - "excludedSchemas": [ - "information_schema", - "pg_catalog", - "tiger", - "crdb_internal", - "columnar", - "columnar_internal" - ], "introspectPrefixFunctionComparisonOperators": [ "box_above", "box_below", @@ -308,13 +313,7 @@ "timetz": "timetz", "uuid": "uUID", "varchar": "string" - }, - "unqualifiedSchemasForTables": ["public"], - "unqualifiedSchemasForTypesAndProcedures": [ - "public", - "pg_catalog", - "tiger" - ] + } }, "allOf": [ { @@ -324,7 +323,6 @@ }, "mutationsVersion": { "description": "Which version of the generated mutation procedures to include in the schema response", - "default": null, "anyOf": [ { "$ref": "#/definitions/MutationsVersion" @@ -356,11 +354,11 @@ "poolSettings": { "description": "Connection pool settings.", "default": { - "checkConnectionAfterIdle": 60, - "connectionLifetime": 600, - "idleTimeout": 180, "maxConnections": 50, - "poolTimeout": 30 + "poolTimeout": 30, + "idleTimeout": 180, + "checkConnectionAfterIdle": 60, + "connectionLifetime": 600 }, "allOf": [ { @@ -542,7 +540,6 @@ ] }, "description": { - "default": null, "type": ["string", "null"] } } @@ -576,7 +573,6 @@ "$ref": "#/definitions/IsGenerated" }, "description": { - "default": null, "type": ["string", "null"] } } @@ -918,7 +914,6 @@ } }, "description": { - "default": null, "type": ["string", "null"] } } @@ -935,7 +930,6 @@ "$ref": "#/definitions/Type" }, "description": { - "default": null, "type": ["string", "null"] } } @@ -976,7 +970,6 @@ } }, "description": { - "default": null, "type": ["string", "null"] }, "isProcedure": { @@ -1042,7 +1035,6 @@ ] }, "description": { - "default": null, "type": ["string", "null"] } } @@ -1086,104 +1078,104 @@ "description": "The mapping of comparison operator names to apply when updating the configuration", "default": [ { + "operatorName": "=", "exposedName": "_eq", - "operatorKind": "equal", - "operatorName": "=" + "operatorKind": "equal" }, { + "operatorName": "<=", "exposedName": "_lte", - "operatorKind": "custom", - "operatorName": "<=" + "operatorKind": "custom" }, { + "operatorName": ">", "exposedName": "_gt", - "operatorKind": "custom", - "operatorName": ">" + "operatorKind": "custom" }, { + "operatorName": ">=", "exposedName": "_gte", - "operatorKind": "custom", - "operatorName": ">=" + "operatorKind": "custom" }, { + "operatorName": "<", "exposedName": "_lt", - "operatorKind": "custom", - "operatorName": "<" + "operatorKind": "custom" }, { + "operatorName": "!=", "exposedName": "_neq", - "operatorKind": "custom", - "operatorName": "!=" + "operatorKind": "custom" }, { + "operatorName": "LIKE", "exposedName": "_like", - "operatorKind": "custom", - "operatorName": "LIKE" + "operatorKind": "custom" }, { + "operatorName": "NOT LIKE", "exposedName": "_nlike", - "operatorKind": "custom", - "operatorName": "NOT LIKE" + "operatorKind": "custom" }, { + "operatorName": "ILIKE", "exposedName": "_ilike", - "operatorKind": "custom", - "operatorName": "ILIKE" + "operatorKind": "custom" }, { + "operatorName": "NOT ILIKE", "exposedName": "_nilike", - "operatorKind": "custom", - "operatorName": "NOT ILIKE" + "operatorKind": "custom" }, { + "operatorName": "SIMILAR TO", "exposedName": "_similar", - "operatorKind": "custom", - "operatorName": "SIMILAR TO" + "operatorKind": "custom" }, { + "operatorName": "NOT SIMILAR TO", "exposedName": "_nsimilar", - "operatorKind": "custom", - "operatorName": "NOT SIMILAR TO" + "operatorKind": "custom" }, { + "operatorName": "~~", "exposedName": "_like", - "operatorKind": "custom", - "operatorName": "~~" + "operatorKind": "custom" }, { + "operatorName": "!~~", "exposedName": "_nlike", - "operatorKind": "custom", - "operatorName": "!~~" + "operatorKind": "custom" }, { + "operatorName": "~~*", "exposedName": "_ilike", - "operatorKind": "custom", - "operatorName": "~~*" + "operatorKind": "custom" }, { + "operatorName": "!~~*", "exposedName": "_nilike", - "operatorKind": "custom", - "operatorName": "!~~*" + "operatorKind": "custom" }, { + "operatorName": "~", "exposedName": "_regex", - "operatorKind": "custom", - "operatorName": "~" + "operatorKind": "custom" }, { + "operatorName": "!~", "exposedName": "_nregex", - "operatorKind": "custom", - "operatorName": "!~" + "operatorKind": "custom" }, { + "operatorName": "~*", "exposedName": "_iregex", - "operatorKind": "custom", - "operatorName": "~*" + "operatorKind": "custom" }, { + "operatorName": "!~*", "exposedName": "_niregex", - "operatorKind": "custom", - "operatorName": "!~*" + "operatorKind": "custom" } ], "type": "array", From 452a4900738c3a519e66b561d358318279020f89 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Mon, 1 Jul 2024 18:12:44 +0300 Subject: [PATCH 09/28] use native operations parser --- Cargo.lock | 2 + crates/cli/Cargo.toml | 1 + crates/cli/src/native_operations.rs | 54 ++++++++----------- crates/configuration/Cargo.toml | 1 + .../src/version4/metadata/native_queries.rs | 19 +++++++ 5 files changed, 45 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c31f3234..fc6158ca6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1551,6 +1551,7 @@ dependencies = [ "insta", "libpq", "ndc-postgres-configuration", + "query-engine-sql", "regex", "serde", "serde_json", @@ -1569,6 +1570,7 @@ dependencies = [ "jsonschema", "prometheus", "query-engine-metadata", + "query-engine-sql", "schemars", "serde", "serde_json", diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 00d9e5e0b..f8777e0b9 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -9,6 +9,7 @@ workspace = true [dependencies] ndc-postgres-configuration = { path = "../configuration" } +query-engine-sql = { path = "../query-engine/sql" } anyhow = { workspace = true } clap = { workspace = true, features = ["derive", "env"] } diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 354016f7f..54ae6a3fd 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -7,6 +7,7 @@ use super::{update, Context}; use configuration::version4::{metadata as metadata_v4, oids_to_typenames}; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; +use query_engine_sql::sql; /// Query or Mutation. #[derive(Debug, Clone, clap::ValueEnum)] @@ -37,15 +38,11 @@ pub async fn create( ))?, configuration::ParsedConfiguration::Version4(ref mut configuration) => { // Read the SQL file. - let filepath = format!( - "{}/{}", - context.context_path.to_str().unwrap(), - &operation_path.to_str().unwrap() - ); - let sql = match std::fs::read_to_string(&filepath) { - Ok(sql) => sql, - Err(err) => Err(anyhow::anyhow!("Failed to read file '{filepath}': {}", err))?, - }; + let parsed_file = configuration::version4::metadata::parse_native_query_from_file( + &context.context_path, + &operation_path, + ) + .map_err(|err| anyhow::anyhow!("{}", err))?; // Prepare the Native Operation SQL so it can be checked against the db. let identifier = operation_path @@ -55,27 +52,10 @@ pub async fn create( .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))?; let prepared_statement_name = format!("__hasura_inference_{identifier}"); - - let identifier_regex = regex::Regex::new(r"\{\{(?.*?)\}\}").unwrap(); - let mut parameters = std::collections::HashMap::new(); - - for (index, (_, [name])) in identifier_regex - .captures_iter(&sql) - .map(|c| c.extract()) - .enumerate() - { - parameters.insert(index + 1, name); // We might use the same param twice - } - - let mut final_statement = sql.clone(); - - for (index, name) in ¶meters { - final_statement = - final_statement.replace(&format!("{{{{{name}}}}}"), &format!("${index}")); - } + let sql = parsed_file.sql().to_sql(); // Prepare the SQL against the DB. - let result = connection.prepare(Some(&prepared_statement_name), &final_statement, &[]); + let result = connection.prepare(Some(&prepared_statement_name), &sql.sql, &[]); match result.error_message()? { None => {} Some(error_message) => Err(anyhow::anyhow!("{}", error_message))?, @@ -83,16 +63,26 @@ pub async fn create( // Fetch the description which contains the types of arguments and columns. let description = connection.describe_prepared(Some(&prepared_statement_name)); + match description.error_message()? { + None => {} + Some(error_message) => Err(anyhow::anyhow!("{}", error_message))?, + } // Extract the arguments and columns information into data structures. let mut arguments_to_oids = std::collections::BTreeMap::new(); let mut columns_to_oids = std::collections::BTreeMap::new(); for param in 0..description.nparams() { - let parameter = (*parameters.get(&(param + 1)).ok_or(anyhow::anyhow!( - "Internal error: parameter index not found." - ))?) - .to_string(); + let parameter = if let sql::string::Param::Variable(param) = + sql.params.get(param).ok_or(anyhow::anyhow!( + "Internal error: parameter index not found." + ))? { + param.to_string() + } else { + Err(anyhow::anyhow!( + "Internal error: unexpected non-variable parameter." + ))? + }; arguments_to_oids.insert( parameter.clone(), i64::from(description.param_type(param).ok_or(anyhow::anyhow!( diff --git a/crates/configuration/Cargo.toml b/crates/configuration/Cargo.toml index efae1c04c..3bff860fe 100644 --- a/crates/configuration/Cargo.toml +++ b/crates/configuration/Cargo.toml @@ -9,6 +9,7 @@ workspace = true [dependencies] query-engine-metadata = { path = "../query-engine/metadata" } +query-engine-sql = { path = "../query-engine/sql" } anyhow = { workspace = true } prometheus = {workspace = true } diff --git a/crates/configuration/src/version4/metadata/native_queries.rs b/crates/configuration/src/version4/metadata/native_queries.rs index 2630e11c8..58ea7c400 100644 --- a/crates/configuration/src/version4/metadata/native_queries.rs +++ b/crates/configuration/src/version4/metadata/native_queries.rs @@ -5,6 +5,8 @@ #![allow(clippy::wrong_self_convention)] use super::database::*; +use query_engine_sql::sql; + use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::collections::BTreeMap; @@ -227,6 +229,23 @@ impl From for String { } } +impl NativeQueryParts { + pub fn to_sql(&self) -> sql::string::SQL { + let mut sql = sql::string::SQL::new(); + + for part in self.0.iter() { + match part { + NativeQueryPart::Text(text) => sql.append_syntax(text), + NativeQueryPart::Parameter(param) => { + sql.append_param(sql::string::Param::Variable(param.to_string())) + } + } + } + + sql + } +} + impl JsonSchema for NativeQueryParts { fn schema_name() -> String { "InlineNativeQuerySql".to_string() From 149220443e45a67da840030e21dfc934f81321a8 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Mon, 1 Jul 2024 18:17:11 +0300 Subject: [PATCH 10/28] remove regex dep --- Cargo.lock | 1 - Cargo.toml | 1 + crates/cli/Cargo.toml | 3 +-- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fc6158ca6..a998bbf05 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1552,7 +1552,6 @@ dependencies = [ "libpq", "ndc-postgres-configuration", "query-engine-sql", - "regex", "serde", "serde_json", "serde_yaml", diff --git a/Cargo.toml b/Cargo.toml index 595d4f0e0..aacec2bfa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ hyper = "0.14" indexmap = "2" insta = "1" jsonschema = "0.17" +libpq = "4.0.0" multimap = "0.9" nonempty = "0.10" percent-encoding = "2" diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index f8777e0b9..81df7da53 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -18,8 +18,7 @@ serde_json = { workspace = true } serde_yaml = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } -libpq = "4.0.0" -regex = "1.10.5" +libpq = { workspace = true } [build-dependencies] build-data = { workspace = true } From 080fb86f136eedd6be2954e3d6724fe75d752352 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Mon, 1 Jul 2024 18:23:41 +0300 Subject: [PATCH 11/28] fix warnings --- crates/cli/src/native_operations.rs | 6 +++--- .../configuration/src/version4/metadata/native_queries.rs | 4 ++-- crates/configuration/src/version4/mod.rs | 7 ++----- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 54ae6a3fd..d3f6b6377 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -100,10 +100,10 @@ pub async fn create( ); } - let mut oids: BTreeSet = BTreeSet::from_iter(arguments_to_oids.values().cloned()); - oids.extend::>(BTreeSet::from_iter(columns_to_oids.values().cloned())); + let mut oids: BTreeSet = arguments_to_oids.values().copied().collect(); + oids.extend::>(columns_to_oids.values().copied().collect()); let oids_vec: Vec<_> = oids.into_iter().collect(); - let oids_map = oids_to_typenames(&configuration, &connection_string, &oids_vec).await?; + let oids_map = oids_to_typenames(configuration, &connection_string, &oids_vec).await?; let mut arguments = BTreeMap::new(); for (name, oid) in arguments_to_oids { diff --git a/crates/configuration/src/version4/metadata/native_queries.rs b/crates/configuration/src/version4/metadata/native_queries.rs index 58ea7c400..f6e3658b7 100644 --- a/crates/configuration/src/version4/metadata/native_queries.rs +++ b/crates/configuration/src/version4/metadata/native_queries.rs @@ -233,11 +233,11 @@ impl NativeQueryParts { pub fn to_sql(&self) -> sql::string::SQL { let mut sql = sql::string::SQL::new(); - for part in self.0.iter() { + for part in &self.0 { match part { NativeQueryPart::Text(text) => sql.append_syntax(text), NativeQueryPart::Parameter(param) => { - sql.append_param(sql::string::Param::Variable(param.to_string())) + sql.append_param(sql::string::Param::Variable(param.to_string())); } } } diff --git a/crates/configuration/src/version4/mod.rs b/crates/configuration/src/version4/mod.rs index 1504bb6fe..cbda6fef9 100644 --- a/crates/configuration/src/version4/mod.rs +++ b/crates/configuration/src/version4/mod.rs @@ -317,7 +317,7 @@ pub async fn oids_to_typenames( let oid: i64 = row.oid.into(); let mut found = false; - for (scalar_type_name, info) in configuration.metadata.scalar_types.0.iter() { + for (scalar_type_name, info) in &configuration.metadata.scalar_types.0 { if info.schema_name == schema_name && info.type_name == type_name { oids_map.insert(oid, scalar_type_name.clone()); found = true; @@ -336,10 +336,7 @@ pub async fn oids_to_typenames( { oids_map.insert(oid, ScalarTypeName(type_name)); } else { - oids_map.insert( - oid, - ScalarTypeName(format!("{}_{}", schema_name, type_name)), - ); + oids_map.insert(oid, ScalarTypeName(format!("{schema_name}_{type_name}"))); } } } From b14d25d3204f4792334dca414f6ee68a68f1046a Mon Sep 17 00:00:00 2001 From: Samir Talwar Date: Tue, 2 Jul 2024 10:44:04 +0200 Subject: [PATCH 12/28] Add `postgresql_16` as a Nix build-time dependency. --- nix/app.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nix/app.nix b/nix/app.nix index d4b04e0b3..0a674fddb 100644 --- a/nix/app.nix +++ b/nix/app.nix @@ -5,6 +5,7 @@ , openssl , libiconv , pkg-config +, postgresql_16 , protobuf , darwin }: @@ -30,6 +31,7 @@ let nativeBuildInputs = [ openssl.dev # required to build Rust crates that can conduct TLS connections pkg-config # required to find OpenSSL + postgresql_16 # only for libpq; replace with `libpq` after https://github.com/NixOS/nixpkgs/pull/294504 is merged ]; # runtime inputs From c6da54cebf809bc9d5a7a883f56bea6c8912034b Mon Sep 17 00:00:00 2001 From: Samir Talwar Date: Tue, 2 Jul 2024 10:47:37 +0200 Subject: [PATCH 13/28] Add libclang too. --- nix/app.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nix/app.nix b/nix/app.nix index 0a674fddb..bd7916d30 100644 --- a/nix/app.nix +++ b/nix/app.nix @@ -4,6 +4,7 @@ , hostPlatform , openssl , libiconv +, llvmPackages , pkg-config , postgresql_16 , protobuf @@ -30,6 +31,7 @@ let # build-time inputs nativeBuildInputs = [ openssl.dev # required to build Rust crates that can conduct TLS connections + llvmPackages.libclang # required to build libpq pkg-config # required to find OpenSSL postgresql_16 # only for libpq; replace with `libpq` after https://github.com/NixOS/nixpkgs/pull/294504 is merged ]; From 5cd7e1cdde84c88eef9dc11376a2677453a8f180 Mon Sep 17 00:00:00 2001 From: Philip Lykke Carlsen Date: Tue, 2 Jul 2024 15:35:52 +0200 Subject: [PATCH 14/28] sqlx turned out to be adequate --- Cargo.lock | 113 +--------------------------- Cargo.toml | 1 - crates/cli/Cargo.toml | 2 +- crates/cli/src/native_operations.rs | 77 ++++++++++--------- nix/app.nix | 2 - 5 files changed, 42 insertions(+), 153 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a998bbf05..4b14edfc8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -285,28 +285,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" -[[package]] -name = "bindgen" -version = "0.64.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4" -dependencies = [ - "bitflags 1.3.2", - "cexpr", - "clang-sys", - "lazy_static", - "lazycell", - "log", - "peeking_take_while", - "proc-macro2", - "quote", - "regex", - "rustc-hash", - "shlex", - "syn 1.0.109", - "which", -] - [[package]] name = "bit-set" version = "0.5.3" @@ -397,15 +375,6 @@ version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" -[[package]] -name = "cexpr" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" -dependencies = [ - "nom", -] - [[package]] name = "cfg-if" version = "1.0.0" @@ -425,17 +394,6 @@ dependencies = [ "windows-targets 0.52.5", ] -[[package]] -name = "clang-sys" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" -dependencies = [ - "glob", - "libc", - "libloading", -] - [[package]] name = "clap" version = "4.5.7" @@ -1319,57 +1277,18 @@ dependencies = [ "spin 0.5.2", ] -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "libc" version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" -[[package]] -name = "libloading" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e310b3a6b5907f99202fcdb4960ff45b93735d7c7d96b760fcff8db2dc0e103d" -dependencies = [ - "cfg-if", - "windows-targets 0.52.5", -] - [[package]] name = "libm" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" -[[package]] -name = "libpq" -version = "4.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0742f5e3894a62de35af0817f8fc801ebce542741e37693975d31ce62d120e8" -dependencies = [ - "libc", - "libpq-sys", - "log", - "thiserror", -] - -[[package]] -name = "libpq-sys" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ef060ac05c207c85da15f4eb629100c8782e0db4c06a3c91c86be9c18ae8a23" -dependencies = [ - "bindgen", - "pkg-config", - "vcpkg", -] - [[package]] name = "libsqlite3-sys" version = "0.27.0" @@ -1549,12 +1468,12 @@ dependencies = [ "build-data", "clap", "insta", - "libpq", "ndc-postgres-configuration", "query-engine-sql", "serde", "serde_json", "serde_yaml", + "sqlx", "tempfile", "thiserror", "tokio", @@ -1992,12 +1911,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "peeking_take_while" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" - [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -2416,12 +2329,6 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - [[package]] name = "rustix" version = "0.38.34" @@ -2798,12 +2705,6 @@ dependencies = [ "lazy_static", ] -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - [[package]] name = "signal-hook-registry" version = "1.4.2" @@ -3890,18 +3791,6 @@ version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" -[[package]] -name = "which" -version = "4.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" -dependencies = [ - "either", - "home", - "once_cell", - "rustix", -] - [[package]] name = "whoami" version = "1.5.1" diff --git a/Cargo.toml b/Cargo.toml index aacec2bfa..595d4f0e0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,7 +47,6 @@ hyper = "0.14" indexmap = "2" insta = "1" jsonschema = "0.17" -libpq = "4.0.0" multimap = "0.9" nonempty = "0.10" percent-encoding = "2" diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 81df7da53..1cf5d5d9b 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -18,7 +18,7 @@ serde_json = { workspace = true } serde_yaml = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } -libpq = { workspace = true } +sqlx = { workspace = true, features = ["postgres"] } [build-dependencies] build-data = { workspace = true } diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index d3f6b6377..3ccf720c3 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -1,14 +1,20 @@ //! Handle the creation of Native Operations. +use std::any::Any; use std::collections::{BTreeMap, BTreeSet}; use std::path::PathBuf; use super::{update, Context}; +use anyhow::anyhow; use configuration::version4::{metadata as metadata_v4, oids_to_typenames}; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; use query_engine_sql::sql; +use sqlx::Column; +use sqlx::Connection; +use sqlx::Executor; + /// Query or Mutation. #[derive(Debug, Clone, clap::ValueEnum)] pub enum Kind { @@ -29,7 +35,7 @@ pub async fn create( // Connect to the db. let connection_string = configuration.get_connection_uri()?; - let connection = libpq::Connection::new(&connection_string)?; + let mut connection = sqlx::PgConnection::connect(&connection_string).await?; // Create an entry for a Native Operation and insert it into the configuration. match configuration { @@ -51,53 +57,50 @@ pub async fn create( .to_str() .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))?; - let prepared_statement_name = format!("__hasura_inference_{identifier}"); let sql = parsed_file.sql().to_sql(); // Prepare the SQL against the DB. - let result = connection.prepare(Some(&prepared_statement_name), &sql.sql, &[]); - match result.error_message()? { - None => {} - Some(error_message) => Err(anyhow::anyhow!("{}", error_message))?, - } - - // Fetch the description which contains the types of arguments and columns. - let description = connection.describe_prepared(Some(&prepared_statement_name)); - match description.error_message()? { - None => {} - Some(error_message) => Err(anyhow::anyhow!("{}", error_message))?, - } + let result = connection.describe(&sql.sql).await?; // Extract the arguments and columns information into data structures. let mut arguments_to_oids = std::collections::BTreeMap::new(); let mut columns_to_oids = std::collections::BTreeMap::new(); - for param in 0..description.nparams() { - let parameter = if let sql::string::Param::Variable(param) = - sql.params.get(param).ok_or(anyhow::anyhow!( - "Internal error: parameter index not found." - ))? { - param.to_string() - } else { - Err(anyhow::anyhow!( - "Internal error: unexpected non-variable parameter." - ))? + let result_parameters = match result.parameters { + Some(sqlx::Either::Left(parameters)) => parameters, + _ => anyhow::bail!("Impossible: sqlx params should always be a vector"), + }; + + if result_parameters.len() != sql.params.len() { + anyhow::bail!("Unexpected error: Parameters of native query and sql statement are not aligned") + } + + for (result_param, sql_param) in result_parameters.into_iter().zip(sql.params.iter()) { + let param_name = match sql_param { + sql::string::Param::Variable(v) => v, + _ => anyhow::bail!("Impossible: Native query parameter was not a variable"), }; - arguments_to_oids.insert( - parameter.clone(), - i64::from(description.param_type(param).ok_or(anyhow::anyhow!( - "Invalid OID for parameter '{}'.", - parameter.clone() - ))?), - ); + + let the_oid = result_param + .oid() + .ok_or_else(|| { + anyhow::anyhow!("Impossible: All sqlx TypeInfos should have an oid") + })? + .0; + + arguments_to_oids.insert(param_name, i64::from(the_oid)); } - for field in 0..description.nfields() { - let column_name = description.field_name(field)?.unwrap(); - columns_to_oids.insert( - column_name.clone(), - i64::from(description.field_type(field)), - ); + for column in result.columns { + let the_oid = column + .type_info() + .oid() + .ok_or_else(|| { + anyhow::anyhow!("Impossible: All sqlx TypeInfos should have an oid") + })? + .0; + + columns_to_oids.insert(column.name().to_string(), i64::from(the_oid)); } let mut oids: BTreeSet = arguments_to_oids.values().copied().collect(); diff --git a/nix/app.nix b/nix/app.nix index bd7916d30..ace6ed047 100644 --- a/nix/app.nix +++ b/nix/app.nix @@ -31,9 +31,7 @@ let # build-time inputs nativeBuildInputs = [ openssl.dev # required to build Rust crates that can conduct TLS connections - llvmPackages.libclang # required to build libpq pkg-config # required to find OpenSSL - postgresql_16 # only for libpq; replace with `libpq` after https://github.com/NixOS/nixpkgs/pull/294504 is merged ]; # runtime inputs From 496f7a62423e49b1a299f78f65784fa560df2561 Mon Sep 17 00:00:00 2001 From: Philip Lykke Carlsen Date: Tue, 2 Jul 2024 15:38:18 +0200 Subject: [PATCH 15/28] Remove leftovers --- nix/app.nix | 2 -- 1 file changed, 2 deletions(-) diff --git a/nix/app.nix b/nix/app.nix index ace6ed047..d4b04e0b3 100644 --- a/nix/app.nix +++ b/nix/app.nix @@ -4,9 +4,7 @@ , hostPlatform , openssl , libiconv -, llvmPackages , pkg-config -, postgresql_16 , protobuf , darwin }: From 73186bcd32ca5c7d87f3aba40de9ea7af1cfff77 Mon Sep 17 00:00:00 2001 From: Philip Lykke Carlsen Date: Tue, 2 Jul 2024 15:43:20 +0200 Subject: [PATCH 16/28] linting --- crates/cli/src/native_operations.rs | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 3ccf720c3..83587ed3d 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -1,11 +1,9 @@ //! Handle the creation of Native Operations. -use std::any::Any; use std::collections::{BTreeMap, BTreeSet}; use std::path::PathBuf; use super::{update, Context}; -use anyhow::anyhow; use configuration::version4::{metadata as metadata_v4, oids_to_typenames}; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; @@ -66,9 +64,8 @@ pub async fn create( let mut arguments_to_oids = std::collections::BTreeMap::new(); let mut columns_to_oids = std::collections::BTreeMap::new(); - let result_parameters = match result.parameters { - Some(sqlx::Either::Left(parameters)) => parameters, - _ => anyhow::bail!("Impossible: sqlx params should always be a vector"), + let Some(sqlx::Either::Left(result_parameters)) = result.parameters else { + anyhow::bail!("Impossible: sqlx params should always be a vector") }; if result_parameters.len() != sql.params.len() { @@ -76,9 +73,8 @@ pub async fn create( } for (result_param, sql_param) in result_parameters.into_iter().zip(sql.params.iter()) { - let param_name = match sql_param { - sql::string::Param::Variable(v) => v, - _ => anyhow::bail!("Impossible: Native query parameter was not a variable"), + let sql::string::Param::Variable(param_name) = sql_param else { + anyhow::bail!("Impossible: Native query parameter was not a variable") }; let the_oid = result_param From 459fccce2288d3db6e3f314ae968ea6d66f9d1de Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 11:05:27 +0300 Subject: [PATCH 17/28] Move things around --- crates/cli/src/lib.rs | 22 +- crates/cli/src/native_operations.rs | 166 ++++---------- crates/configuration/src/configuration.rs | 18 -- crates/configuration/src/version4/mod.rs | 86 ++----- .../src/version4/native_operations.rs | 216 ++++++++++++++++++ 5 files changed, 293 insertions(+), 215 deletions(-) create mode 100644 crates/configuration/src/version4/native_operations.rs diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 42a0b333e..186eda90b 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -41,12 +41,19 @@ pub enum Command { #[arg(long)] dir_to: PathBuf, }, + /// Create a new Native Operation from a SQL file. CreateNativeOperation { + /// Relative path to the SQL file inside the connector configuration directory. #[arg(long)] operation_path: PathBuf, + /// Operation kind. #[arg(long)] kind: native_operations::Kind, + + /// Override the Native Operation definition if it exists. + #[arg(long)] + r#override: bool, }, } @@ -66,7 +73,20 @@ pub async fn run(command: Command, context: Context) -> anyhow Command::CreateNativeOperation { operation_path, kind, - } => native_operations::create(operation_path, context, kind).await?, + r#override, + } => { + native_operations::create( + operation_path, + context, + kind, + if r#override { + native_operations::Override::Yes + } else { + native_operations::Override::No + }, + ) + .await? + } }; Ok(()) } diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 83587ed3d..7818563ab 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -1,17 +1,10 @@ //! Handle the creation of Native Operations. -use std::collections::{BTreeMap, BTreeSet}; use std::path::PathBuf; use super::{update, Context}; -use configuration::version4::{metadata as metadata_v4, oids_to_typenames}; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; -use query_engine_sql::sql; - -use sqlx::Column; -use sqlx::Connection; -use sqlx::Executor; /// Query or Mutation. #[derive(Debug, Clone, clap::ValueEnum)] @@ -20,147 +13,72 @@ pub enum Kind { Mutation, } +/// Override Native Operation definition if exists? +#[derive(Debug, Clone, clap::ValueEnum)] +pub enum Override { + Yes, + No, +} + /// Take a SQL file containing a Native Operation, check against the database that it is valid, /// and add it to the configuration if it is. pub async fn create( operation_path: PathBuf, context: Context, kind: Kind, + override_entry: Override, ) -> anyhow::Result<()> { // Read the configuration. let mut configuration = configuration::parse_configuration(context.context_path.clone()).await?; - // Connect to the db. - let connection_string = configuration.get_connection_uri()?; - let mut connection = sqlx::PgConnection::connect(&connection_string).await?; - - // Create an entry for a Native Operation and insert it into the configuration. match configuration { configuration::ParsedConfiguration::Version3(_) => Err(anyhow::anyhow!( "To use the create native operations command, please upgrade to the latest version." ))?, configuration::ParsedConfiguration::Version4(ref mut configuration) => { - // Read the SQL file. - let parsed_file = configuration::version4::metadata::parse_native_query_from_file( + let (name, new_native_operation) = configuration::version4::native_operations::create( + configuration, + operation_path, &context.context_path, - &operation_path, + convert_kind_v4(kind), ) - .map_err(|err| anyhow::anyhow!("{}", err))?; - - // Prepare the Native Operation SQL so it can be checked against the db. - let identifier = operation_path - .file_stem() - .ok_or(anyhow::anyhow!("SQL file not found"))? - .to_str() - .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))?; - - let sql = parsed_file.sql().to_sql(); - - // Prepare the SQL against the DB. - let result = connection.describe(&sql.sql).await?; - - // Extract the arguments and columns information into data structures. - let mut arguments_to_oids = std::collections::BTreeMap::new(); - let mut columns_to_oids = std::collections::BTreeMap::new(); - - let Some(sqlx::Either::Left(result_parameters)) = result.parameters else { - anyhow::bail!("Impossible: sqlx params should always be a vector") - }; - - if result_parameters.len() != sql.params.len() { - anyhow::bail!("Unexpected error: Parameters of native query and sql statement are not aligned") - } - - for (result_param, sql_param) in result_parameters.into_iter().zip(sql.params.iter()) { - let sql::string::Param::Variable(param_name) = sql_param else { - anyhow::bail!("Impossible: Native query parameter was not a variable") - }; - - let the_oid = result_param - .oid() - .ok_or_else(|| { - anyhow::anyhow!("Impossible: All sqlx TypeInfos should have an oid") - })? - .0; - - arguments_to_oids.insert(param_name, i64::from(the_oid)); + .await?; + + // Add the new native operation to the configuration. + match override_entry { + Override::Yes => { + configuration + .metadata + .native_queries + .0 + .insert(name.to_string(), new_native_operation); + } + Override::No => { + if configuration.metadata.native_queries.0.contains_key(&name) { + anyhow::bail!("A Native Operation with the name '{name}' already exists. To override, use the --override flag."); + } else { + configuration + .metadata + .native_queries + .0 + .insert(name.to_string(), new_native_operation); + } + } } - - for column in result.columns { - let the_oid = column - .type_info() - .oid() - .ok_or_else(|| { - anyhow::anyhow!("Impossible: All sqlx TypeInfos should have an oid") - })? - .0; - - columns_to_oids.insert(column.name().to_string(), i64::from(the_oid)); - } - - let mut oids: BTreeSet = arguments_to_oids.values().copied().collect(); - oids.extend::>(columns_to_oids.values().copied().collect()); - let oids_vec: Vec<_> = oids.into_iter().collect(); - let oids_map = oids_to_typenames(configuration, &connection_string, &oids_vec).await?; - - let mut arguments = BTreeMap::new(); - for (name, oid) in arguments_to_oids { - arguments.insert( - name.clone(), - metadata_v4::ReadOnlyColumnInfo { - name: name.clone(), - r#type: metadata_v4::Type::ScalarType(metadata_v4::ScalarTypeName( - oids_map.get(&oid).unwrap().0.clone(), - )), - description: None, - nullable: metadata_v4::Nullable::NonNullable, - }, - ); - } - let mut columns = BTreeMap::new(); - for (name, oid) in columns_to_oids { - columns.insert( - name.clone(), - metadata_v4::ReadOnlyColumnInfo { - name: name.clone(), - r#type: metadata_v4::Type::ScalarType(metadata_v4::ScalarTypeName( - oids_map.get(&oid).unwrap().0.clone(), - )), - description: None, - nullable: metadata_v4::Nullable::NonNullable, - }, - ); - } - - let new_native_operation = metadata_v4::NativeQueryInfo { - sql: metadata_v4::NativeQuerySqlEither::NativeQuerySqlExternal( - metadata_v4::NativeQuerySqlExternal::File { - file: operation_path.clone(), - }, - ), - arguments, - columns, - is_procedure: match kind { - Kind::Query => false, - Kind::Mutation => true, - }, - description: None, - }; - - // TODO: should we overwrite or not - configuration - .metadata - .native_queries - .0 - .insert(identifier.to_string(), new_native_operation); } }; // We write the configuration including the new Native Operation to file. configuration::write_parsed_configuration(configuration, context.context_path.clone()).await?; - // We update the configuration as well so that the introspection will add missing scalar type entries - // if necessary. + // We update the configuration as well so that the introspection will add missing scalar type entries if necessary. update(context).await } + +fn convert_kind_v4(kind: Kind) -> configuration::version4::native_operations::Kind { + match kind { + Kind::Query => configuration::version4::native_operations::Kind::Query, + Kind::Mutation => configuration::version4::native_operations::Kind::Mutation, + } +} diff --git a/crates/configuration/src/configuration.rs b/crates/configuration/src/configuration.rs index 56f5ec320..56c5967ef 100644 --- a/crates/configuration/src/configuration.rs +++ b/crates/configuration/src/configuration.rs @@ -45,24 +45,6 @@ impl ParsedConfiguration { pub fn initial() -> Self { ParsedConfiguration::Version4(version4::ParsedConfiguration::empty()) } - /// Extract the connection uri from the configuration + ENV if needed. - pub fn get_connection_uri(&self) -> Result { - let connection_uri = match self { - ParsedConfiguration::Version3(ref raw_configuration) => { - raw_configuration.connection_settings.connection_uri.clone() - } - ParsedConfiguration::Version4(ref configuration) => { - configuration.connection_settings.connection_uri.clone() - } - }; - - match connection_uri.0 { - super::values::Secret::Plain(connection_string) => Ok(connection_string), - super::values::Secret::FromEnvironment { variable } => { - Ok(std::env::var(variable.to_string())?) - } - } - } } /// The 'Configuration' type collects all the information necessary to serve queries at runtime. diff --git a/crates/configuration/src/version4/mod.rs b/crates/configuration/src/version4/mod.rs index cbda6fef9..4eccf776a 100644 --- a/crates/configuration/src/version4/mod.rs +++ b/crates/configuration/src/version4/mod.rs @@ -3,12 +3,13 @@ mod comparison; pub mod connection_settings; pub mod metadata; +pub mod native_operations; mod options; mod to_runtime_configuration; mod upgrade_from_v3; use std::borrow::Cow; -use std::collections::{BTreeMap, HashSet}; +use std::collections::HashSet; use std::path::Path; pub use to_runtime_configuration::make_runtime_configuration; pub use upgrade_from_v3::upgrade_from_v3; @@ -26,8 +27,6 @@ use crate::environment::Environment; use crate::error::{ParseConfigurationError, WriteParsedConfigurationError}; use crate::values::{ConnectionUri, Secret}; -use self::metadata::ScalarTypeName; - #[cfg(test)] mod tests; @@ -78,6 +77,18 @@ impl ParsedConfiguration { mutations_version: None, } } + + /// Extract the connection uri from the configuration + ENV if needed. + pub fn get_connection_uri(&self) -> Result { + let connection_uri = self.connection_settings.connection_uri.clone(); + + match connection_uri.0 { + super::values::Secret::Plain(connection_string) => Ok(connection_string), + super::values::Secret::FromEnvironment { variable } => { + Ok(std::env::var(variable.to_string())?) + } + } + } } fn get_type_ndc_name(r#type: &metadata::Type) -> &str { @@ -274,72 +285,3 @@ pub async fn write_parsed_configuration( Ok(()) } - -/// Representation of a result row returned from the oid lookup query. -#[derive(Debug, sqlx::FromRow)] -struct OidQueryRow { - schema_name: String, - type_name: String, - oid: i32, -} - -/// Given a vector of OIDs, ask postgres to provide the equivalent type names. -pub async fn oids_to_typenames( - configuration: &ParsedConfiguration, - connection_string: &str, - oids: &Vec, -) -> Result, sqlx::Error> { - let mut connection = PgConnection::connect(connection_string) - .instrument(info_span!("Connect to database")) - .await?; - - let rows: Vec = sqlx::query_as( - "SELECT - typnamespace::regnamespace::text as schema_name, - typname as type_name, - oid::integer - FROM pg_type - WHERE oid in (SELECT unnest($1)) - ", - ) - .bind(oids) - .fetch_all(&mut connection) - .instrument(info_span!("Run oid lookup query")) - .await?; - - let mut oids_map: BTreeMap = BTreeMap::new(); - - // Reverse lookup the schema.typename and find the ndc type name, - // if we find all we can just add the nq and call it a day. - for row in rows { - let schema_name: String = row.schema_name; - let type_name: String = row.type_name; - let oid: i64 = row.oid.into(); - - let mut found = false; - for (scalar_type_name, info) in &configuration.metadata.scalar_types.0 { - if info.schema_name == schema_name && info.type_name == type_name { - oids_map.insert(oid, scalar_type_name.clone()); - found = true; - continue; - } - } - - // If we don't find it we generate a name which is either schema_typename - // or just typename depending if the schema is in the unqualified list or not, - // then add the nq and run the introspection. - if !found { - if configuration - .introspection_options - .unqualified_schemas_for_types_and_procedures - .contains(&schema_name) - { - oids_map.insert(oid, ScalarTypeName(type_name)); - } else { - oids_map.insert(oid, ScalarTypeName(format!("{schema_name}_{type_name}"))); - } - } - } - - Ok(oids_map) -} diff --git a/crates/configuration/src/version4/native_operations.rs b/crates/configuration/src/version4/native_operations.rs new file mode 100644 index 000000000..4a1dce520 --- /dev/null +++ b/crates/configuration/src/version4/native_operations.rs @@ -0,0 +1,216 @@ +//! Infer information about a Native Operation from a Native Operation SQL string. + +use std::collections::{BTreeMap, BTreeSet}; +use std::path::{Path, PathBuf}; + +use query_engine_sql::sql; + +use sqlx::Connection; +use sqlx::Executor; +use sqlx::{Column, PgConnection}; + +use super::metadata; +use tracing::{info_span, Instrument}; + +/// Query or Mutation. +#[derive(Debug, Clone)] +pub enum Kind { + Query, + Mutation, +} + +/// Take a SQL file containing a Native Operation, check against the database that it is valid, +/// and add it to the configuration if it is. +pub async fn create( + configuration: &super::ParsedConfiguration, + operation_path: PathBuf, + context_path: &Path, + kind: Kind, +) -> anyhow::Result<(String, metadata::NativeQueryInfo)> { + // Connect to the db. + let connection_string = configuration.get_connection_uri()?; + let mut connection = sqlx::PgConnection::connect(&connection_string).await?; + + // Create an entry for a Native Operation and insert it into the configuration. + + // Read the SQL file. + let parsed_file = super::metadata::parse_native_query_from_file(context_path, &operation_path) + .map_err(|err| anyhow::anyhow!("{}", err))?; + + // Prepare the Native Operation SQL so it can be checked against the db. + let identifier = operation_path + .file_stem() + .ok_or(anyhow::anyhow!("SQL file not found"))? + .to_str() + .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))?; + + let sql = parsed_file.sql().to_sql(); + + // Prepare the SQL against the DB. + let result = connection.describe(&sql.sql).await?; + + // Extract the arguments and columns information into data structures. + let mut arguments_to_oids = std::collections::BTreeMap::new(); + let mut columns_to_oids = std::collections::BTreeMap::new(); + + let Some(sqlx::Either::Left(result_parameters)) = result.parameters else { + anyhow::bail!("Internal error: sqlx params should always be a vector.") + }; + + if result_parameters.len() != sql.params.len() { + anyhow::bail!( + "Internal error: Parameters of native query and sql statement are not aligned." + ) + } + + for (result_param, sql_param) in result_parameters.into_iter().zip(sql.params.iter()) { + let sql::string::Param::Variable(param_name) = sql_param else { + anyhow::bail!("Internal error: Native operation parameter was not a variable.") + }; + + let the_oid = result_param + .oid() + .ok_or_else(|| { + anyhow::anyhow!("Internal error: All sqlx TypeInfos should have an oid.") + })? + .0; + + arguments_to_oids.insert(param_name, i64::from(the_oid)); + } + + for column in result.columns { + let the_oid = column + .type_info() + .oid() + .ok_or_else(|| { + anyhow::anyhow!("Internal error: All sqlx TypeInfos should have an oid.") + })? + .0; + + columns_to_oids.insert(column.name().to_string(), i64::from(the_oid)); + } + + let mut oids: BTreeSet = arguments_to_oids.values().copied().collect(); + oids.extend::>(columns_to_oids.values().copied().collect()); + let oids_vec: Vec<_> = oids.into_iter().collect(); + let oids_map = oids_to_typenames(&configuration, &connection_string, &oids_vec).await?; + + let mut arguments = BTreeMap::new(); + for (name, oid) in arguments_to_oids { + arguments.insert( + name.clone(), + metadata::ReadOnlyColumnInfo { + name: name.clone(), + r#type: metadata::Type::ScalarType(metadata::ScalarTypeName( + oids_map.get(&oid).unwrap().0.clone(), + )), + description: None, + nullable: metadata::Nullable::NonNullable, + }, + ); + } + let mut columns = BTreeMap::new(); + for (name, oid) in columns_to_oids { + columns.insert( + name.clone(), + metadata::ReadOnlyColumnInfo { + name: name.clone(), + r#type: metadata::Type::ScalarType(metadata::ScalarTypeName( + oids_map.get(&oid).unwrap().0.clone(), + )), + description: None, + nullable: metadata::Nullable::NonNullable, + }, + ); + } + + let new_native_operation = metadata::NativeQueryInfo { + sql: metadata::NativeQuerySqlEither::NativeQuerySqlExternal( + metadata::NativeQuerySqlExternal::File { + file: operation_path.clone(), + }, + ), + arguments, + columns, + is_procedure: match kind { + Kind::Query => false, + Kind::Mutation => true, + }, + description: None, + }; + + Ok((identifier.to_string(), new_native_operation)) +} + +/// Given a vector of OIDs, ask postgres to provide the equivalent type names. +pub async fn oids_to_typenames( + configuration: &super::ParsedConfiguration, + connection_string: &str, + oids: &Vec, +) -> Result, sqlx::Error> { + let mut connection = PgConnection::connect(connection_string) + .instrument(info_span!("Connect to database")) + .await?; + + let rows: Vec = sqlx::query_as(OID_QUERY) + .bind(oids) + .fetch_all(&mut connection) + .instrument(info_span!("Run oid lookup query")) + .await?; + + let mut oids_map: BTreeMap = BTreeMap::new(); + + // Reverse lookup the schema.typename and find the ndc type name, + // if we find all we can just add the nq and call it a day. + for row in rows { + let schema_name: String = row.schema_name; + let type_name: String = row.type_name; + let oid: i64 = row.oid.into(); + + let mut found = false; + for (scalar_type_name, info) in &configuration.metadata.scalar_types.0 { + if info.schema_name == schema_name && info.type_name == type_name { + oids_map.insert(oid, scalar_type_name.clone()); + found = true; + continue; + } + } + + // If we don't find it we generate a name which is either schema_typename + // or just typename depending if the schema is in the unqualified list or not, + // then add the nq and run the introspection. + if !found { + if configuration + .introspection_options + .unqualified_schemas_for_types_and_procedures + .contains(&schema_name) + { + oids_map.insert(oid, metadata::ScalarTypeName(type_name)); + } else { + oids_map.insert( + oid, + metadata::ScalarTypeName(format!("{schema_name}_{type_name}")), + ); + } + } + } + + Ok(oids_map) +} + +const OID_QUERY: &str = " +SELECT + typnamespace::regnamespace::text as schema_name, + typname as type_name, + oid::integer +FROM pg_type +WHERE oid in (SELECT unnest($1)) +"; + +/// Representation of a result row returned from the oid lookup query. +#[derive(Debug, sqlx::FromRow)] +struct OidQueryRow { + schema_name: String, + type_name: String, + oid: i32, +} From e3d88abf4450200ec0bb19a6d6405ad009cc4ddf Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 11:14:22 +0300 Subject: [PATCH 18/28] machete --- Cargo.lock | 2 -- crates/cli/Cargo.toml | 2 -- 2 files changed, 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4b14edfc8..4e704f1c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1469,11 +1469,9 @@ dependencies = [ "clap", "insta", "ndc-postgres-configuration", - "query-engine-sql", "serde", "serde_json", "serde_yaml", - "sqlx", "tempfile", "thiserror", "tokio", diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 1cf5d5d9b..c9f9b61d6 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -9,7 +9,6 @@ workspace = true [dependencies] ndc-postgres-configuration = { path = "../configuration" } -query-engine-sql = { path = "../query-engine/sql" } anyhow = { workspace = true } clap = { workspace = true, features = ["derive", "env"] } @@ -18,7 +17,6 @@ serde_json = { workspace = true } serde_yaml = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } -sqlx = { workspace = true, features = ["postgres"] } [build-dependencies] build-data = { workspace = true } From eb53b3a21e86cdf8ef539e58b015073249502c31 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 11:26:30 +0300 Subject: [PATCH 19/28] lint --- crates/cli/src/lib.rs | 2 +- crates/cli/src/native_operations.rs | 19 +++++++++---------- .../src/version4/native_operations.rs | 2 +- 3 files changed, 11 insertions(+), 12 deletions(-) diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 186eda90b..77fde8bba 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -85,7 +85,7 @@ pub async fn run(command: Command, context: Context) -> anyhow native_operations::Override::No }, ) - .await? + .await?; } }; Ok(()) diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 7818563ab..0f3cfe175 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -41,7 +41,7 @@ pub async fn create( configuration, operation_path, &context.context_path, - convert_kind_v4(kind), + convert_kind_v4(&kind), ) .await?; @@ -52,17 +52,16 @@ pub async fn create( .metadata .native_queries .0 - .insert(name.to_string(), new_native_operation); + .insert(name, new_native_operation); } Override::No => { - if configuration.metadata.native_queries.0.contains_key(&name) { - anyhow::bail!("A Native Operation with the name '{name}' already exists. To override, use the --override flag."); + // Only insert if vacant. + if let std::collections::btree_map::Entry::Vacant(entry) = + configuration.metadata.native_queries.0.entry(name.clone()) + { + entry.insert(new_native_operation); } else { - configuration - .metadata - .native_queries - .0 - .insert(name.to_string(), new_native_operation); + anyhow::bail!("A Native Operation with the name '{}' already exists. To override, use the --override flag.", name); } } } @@ -76,7 +75,7 @@ pub async fn create( update(context).await } -fn convert_kind_v4(kind: Kind) -> configuration::version4::native_operations::Kind { +fn convert_kind_v4(kind: &Kind) -> configuration::version4::native_operations::Kind { match kind { Kind::Query => configuration::version4::native_operations::Kind::Query, Kind::Mutation => configuration::version4::native_operations::Kind::Mutation, diff --git a/crates/configuration/src/version4/native_operations.rs b/crates/configuration/src/version4/native_operations.rs index 4a1dce520..bc0a22ba7 100644 --- a/crates/configuration/src/version4/native_operations.rs +++ b/crates/configuration/src/version4/native_operations.rs @@ -93,7 +93,7 @@ pub async fn create( let mut oids: BTreeSet = arguments_to_oids.values().copied().collect(); oids.extend::>(columns_to_oids.values().copied().collect()); let oids_vec: Vec<_> = oids.into_iter().collect(); - let oids_map = oids_to_typenames(&configuration, &connection_string, &oids_vec).await?; + let oids_map = oids_to_typenames(configuration, &connection_string, &oids_vec).await?; let mut arguments = BTreeMap::new(); for (name, oid) in arguments_to_oids { From 739c77a6dce874aee0e4a08fb266b9c5d3bb4104 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 11:32:12 +0300 Subject: [PATCH 20/28] use one kind --- Cargo.lock | 1 + crates/cli/src/native_operations.rs | 16 ++-------------- crates/configuration/Cargo.toml | 2 ++ .../src/version4/native_operations.rs | 2 +- 4 files changed, 6 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4e704f1c6..7cb2b9f11 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1482,6 +1482,7 @@ name = "ndc-postgres-configuration" version = "0.7.1" dependencies = [ "anyhow", + "clap", "insta", "jsonschema", "prometheus", diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 0f3cfe175..d8c271002 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -6,12 +6,7 @@ use super::{update, Context}; use ndc_postgres_configuration as configuration; use ndc_postgres_configuration::environment::Environment; -/// Query or Mutation. -#[derive(Debug, Clone, clap::ValueEnum)] -pub enum Kind { - Query, - Mutation, -} +pub use configuration::version4::native_operations::Kind; /// Override Native Operation definition if exists? #[derive(Debug, Clone, clap::ValueEnum)] @@ -41,7 +36,7 @@ pub async fn create( configuration, operation_path, &context.context_path, - convert_kind_v4(&kind), + kind, ) .await?; @@ -74,10 +69,3 @@ pub async fn create( // We update the configuration as well so that the introspection will add missing scalar type entries if necessary. update(context).await } - -fn convert_kind_v4(kind: &Kind) -> configuration::version4::native_operations::Kind { - match kind { - Kind::Query => configuration::version4::native_operations::Kind::Query, - Kind::Mutation => configuration::version4::native_operations::Kind::Mutation, - } -} diff --git a/crates/configuration/Cargo.toml b/crates/configuration/Cargo.toml index 3bff860fe..991ace7f3 100644 --- a/crates/configuration/Cargo.toml +++ b/crates/configuration/Cargo.toml @@ -12,6 +12,8 @@ query-engine-metadata = { path = "../query-engine/metadata" } query-engine-sql = { path = "../query-engine/sql" } anyhow = { workspace = true } +# We only use clap for the derive. +clap = { workspace = true, features = ["derive", "env"] } prometheus = {workspace = true } schemars = { workspace = true, features = ["smol_str", "preserve_order"] } serde = { workspace = true } diff --git a/crates/configuration/src/version4/native_operations.rs b/crates/configuration/src/version4/native_operations.rs index bc0a22ba7..464b8e3bb 100644 --- a/crates/configuration/src/version4/native_operations.rs +++ b/crates/configuration/src/version4/native_operations.rs @@ -13,7 +13,7 @@ use super::metadata; use tracing::{info_span, Instrument}; /// Query or Mutation. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, clap::ValueEnum)] pub enum Kind { Query, Mutation, From 98c054215a0f708c2398c68a7ee88f634691baed Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 12:30:31 +0300 Subject: [PATCH 21/28] rename and move native operation commands --- crates/cli/src/lib.rs | 34 +------- crates/cli/src/native_operations.rs | 117 +++++++++++++++++++++++++++- 2 files changed, 118 insertions(+), 33 deletions(-) diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 77fde8bba..c921f078f 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -41,20 +41,8 @@ pub enum Command { #[arg(long)] dir_to: PathBuf, }, - /// Create a new Native Operation from a SQL file. - CreateNativeOperation { - /// Relative path to the SQL file inside the connector configuration directory. - #[arg(long)] - operation_path: PathBuf, - - /// Operation kind. - #[arg(long)] - kind: native_operations::Kind, - - /// Override the Native Operation definition if it exists. - #[arg(long)] - r#override: bool, - }, + #[command(subcommand)] + NativeOperation(native_operations::Command), } /// The set of errors that can go wrong _in addition to_ generic I/O or parsing errors. @@ -70,23 +58,7 @@ pub async fn run(command: Command, context: Context) -> anyhow Command::Initialize { with_metadata } => initialize(with_metadata, context).await?, Command::Update => update(context).await?, Command::Upgrade { dir_from, dir_to } => upgrade(dir_from, dir_to).await?, - Command::CreateNativeOperation { - operation_path, - kind, - r#override, - } => { - native_operations::create( - operation_path, - context, - kind, - if r#override { - native_operations::Override::Yes - } else { - native_operations::Override::No - }, - ) - .await?; - } + Command::NativeOperation(cmd) => native_operations::run(cmd, context).await?, }; Ok(()) } diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index d8c271002..0b3608557 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -8,16 +8,72 @@ use ndc_postgres_configuration::environment::Environment; pub use configuration::version4::native_operations::Kind; +/// Commands on Native Operations. +#[derive(Debug, Clone, clap::Subcommand)] +pub enum Command { + /// Create a new Native Operation from a SQL file. + Create { + /// Relative path to the SQL file inside the connector configuration directory. + #[arg(long)] + operation_path: PathBuf, + + /// Operation kind. + #[arg(long)] + kind: Kind, + + /// Override the Native Operation definition if it exists. + #[arg(long)] + r#override: bool, + }, + /// Delete an existing Native Operation from the configuration. + Delete { + /// The name of the Native Operation. + #[arg(long)] + name: String, + + /// Operation kind. + #[arg(long)] + kind: Kind, + }, +} + +/// Run a command in a given directory. +pub async fn run(command: Command, context: Context) -> anyhow::Result<()> { + match command { + Command::Create { + operation_path, + kind, + r#override, + } => { + create( + operation_path, + context, + kind, + if r#override { + Override::Yes + } else { + Override::No + }, + ) + .await?; + } + Command::Delete { name, kind } => { + delete(context, name, kind).await?; + } + }; + Ok(()) +} + /// Override Native Operation definition if exists? #[derive(Debug, Clone, clap::ValueEnum)] -pub enum Override { +enum Override { Yes, No, } /// Take a SQL file containing a Native Operation, check against the database that it is valid, /// and add it to the configuration if it is. -pub async fn create( +async fn create( operation_path: PathBuf, context: Context, kind: Kind, @@ -69,3 +125,60 @@ pub async fn create( // We update the configuration as well so that the introspection will add missing scalar type entries if necessary. update(context).await } + +/// Delete a Native Operation by name. +async fn delete( + context: Context, + name: String, + kind: Kind, +) -> anyhow::Result<()> { + // Read the configuration. + let mut configuration = + configuration::parse_configuration(context.context_path.clone()).await?; + + let error_message_not_exist = format!( + "A Native {} with the name '{}' does not exists.", + match kind { + Kind::Mutation => "Mutation", + Kind::Query => "Query", + }, + name + ); + + match configuration { + configuration::ParsedConfiguration::Version3(_) => Err(anyhow::anyhow!( + "To use the delete Native Operations command, please upgrade to the latest version." + ))?, + configuration::ParsedConfiguration::Version4(ref mut configuration) => { + // Delete if exists and is of the same type, error if not. + match configuration.metadata.native_queries.0.entry(name.clone()) { + std::collections::btree_map::Entry::Occupied(entry) => { + let value = entry.get(); + if value.is_procedure { + match kind { + Kind::Mutation => { + entry.remove_entry(); + } + Kind::Query => { + anyhow::bail!(format!("{error_message_not_exist}\n Did you mean the Native Mutation with the same name?")); + } + } + } else { + match kind { + Kind::Mutation => { + anyhow::bail!(format!("{error_message_not_exist}\n Did you mean the Native Query with the same name?")); + } + Kind::Query => { + entry.remove_entry(); + } + } + } + } + std::collections::btree_map::Entry::Vacant(_) => { + anyhow::bail!(error_message_not_exist); + } + } + } + } + Ok(()) +} From c44b4fd667c939a6ff11808f882614f9778e938a Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 12:30:48 +0300 Subject: [PATCH 22/28] Add list command --- crates/cli/src/native_operations.rs | 31 +++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 0b3608557..0bd278073 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -11,6 +11,8 @@ pub use configuration::version4::native_operations::Kind; /// Commands on Native Operations. #[derive(Debug, Clone, clap::Subcommand)] pub enum Command { + /// List the existing Native Operations. + List, /// Create a new Native Operation from a SQL file. Create { /// Relative path to the SQL file inside the connector configuration directory. @@ -40,6 +42,7 @@ pub enum Command { /// Run a command in a given directory. pub async fn run(command: Command, context: Context) -> anyhow::Result<()> { match command { + Command::List => list(context).await?, Command::Create { operation_path, kind, @@ -71,6 +74,30 @@ enum Override { No, } +async fn list(context: Context) -> anyhow::Result<()> { + // Read the configuration. + let mut configuration = + configuration::parse_configuration(context.context_path.clone()).await?; + + match configuration { + configuration::ParsedConfiguration::Version3(_) => Err(anyhow::anyhow!( + "To use the native operations commands, please upgrade to the latest version." + ))?, + configuration::ParsedConfiguration::Version4(ref mut configuration) => { + let operations = &configuration.metadata.native_queries.0; + println!("Native Queries:"); + for native_operation in operations.iter().filter(|op| !op.1.is_procedure) { + println!("- {}", native_operation.0); + } + println!("Native Mutations:"); + for native_operation in operations.iter().filter(|op| op.1.is_procedure) { + println!("- {}", native_operation.0); + } + } + }; + Ok(()) +} + /// Take a SQL file containing a Native Operation, check against the database that it is valid, /// and add it to the configuration if it is. async fn create( @@ -85,7 +112,7 @@ async fn create( match configuration { configuration::ParsedConfiguration::Version3(_) => Err(anyhow::anyhow!( - "To use the create native operations command, please upgrade to the latest version." + "To use the native operations commands, please upgrade to the latest version." ))?, configuration::ParsedConfiguration::Version4(ref mut configuration) => { let (name, new_native_operation) = configuration::version4::native_operations::create( @@ -147,7 +174,7 @@ async fn delete( match configuration { configuration::ParsedConfiguration::Version3(_) => Err(anyhow::anyhow!( - "To use the delete Native Operations command, please upgrade to the latest version." + "To use the native operations commands, please upgrade to the latest version." ))?, configuration::ParsedConfiguration::Version4(ref mut configuration) => { // Delete if exists and is of the same type, error if not. From 5f292a4575b6a4a6b2f2726216b90688eebe19cc Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 13:10:44 +0300 Subject: [PATCH 23/28] handle nullability of columns --- .../src/version4/native_operations.rs | 51 +++++++++++++------ 1 file changed, 35 insertions(+), 16 deletions(-) diff --git a/crates/configuration/src/version4/native_operations.rs b/crates/configuration/src/version4/native_operations.rs index 464b8e3bb..a01d74141 100644 --- a/crates/configuration/src/version4/native_operations.rs +++ b/crates/configuration/src/version4/native_operations.rs @@ -13,7 +13,7 @@ use super::metadata; use tracing::{info_span, Instrument}; /// Query or Mutation. -#[derive(Debug, Clone, clap::ValueEnum)] +#[derive(Clone, Debug, clap::ValueEnum)] pub enum Kind { Query, Mutation, @@ -53,7 +53,7 @@ pub async fn create( let mut arguments_to_oids = std::collections::BTreeMap::new(); let mut columns_to_oids = std::collections::BTreeMap::new(); - let Some(sqlx::Either::Left(result_parameters)) = result.parameters else { + let Some(sqlx::Either::Left(ref result_parameters)) = result.parameters else { anyhow::bail!("Internal error: sqlx params should always be a vector.") }; @@ -63,35 +63,41 @@ pub async fn create( ) } - for (result_param, sql_param) in result_parameters.into_iter().zip(sql.params.iter()) { + // Fill the arguments list. + for (result_param, sql_param) in result_parameters.iter().zip(sql.params.iter()) { let sql::string::Param::Variable(param_name) = sql_param else { anyhow::bail!("Internal error: Native operation parameter was not a variable.") }; let the_oid = result_param .oid() - .ok_or_else(|| { - anyhow::anyhow!("Internal error: All sqlx TypeInfos should have an oid.") - })? + .ok_or(anyhow::anyhow!( + "Internal error: All sqlx TypeInfos should have an oid." + ))? .0; arguments_to_oids.insert(param_name, i64::from(the_oid)); } - for column in result.columns { + // Fill the columns list. + for (index, column) in result.columns.iter().enumerate() { let the_oid = column .type_info() .oid() - .ok_or_else(|| { - anyhow::anyhow!("Internal error: All sqlx TypeInfos should have an oid.") - })? + .ok_or(anyhow::anyhow!( + "Internal error: All sqlx TypeInfos should have an oid." + ))? .0; + let is_nullable = result.nullable(index).unwrap_or( + // If we don't know, we assume it is nullable. + true, + ); - columns_to_oids.insert(column.name().to_string(), i64::from(the_oid)); + columns_to_oids.insert(column.name().to_string(), (i64::from(the_oid), is_nullable)); } let mut oids: BTreeSet = arguments_to_oids.values().copied().collect(); - oids.extend::>(columns_to_oids.values().copied().collect()); + oids.extend::>(columns_to_oids.values().copied().map(|x| x.0).collect()); let oids_vec: Vec<_> = oids.into_iter().collect(); let oids_map = oids_to_typenames(configuration, &connection_string, &oids_vec).await?; @@ -102,24 +108,37 @@ pub async fn create( metadata::ReadOnlyColumnInfo { name: name.clone(), r#type: metadata::Type::ScalarType(metadata::ScalarTypeName( - oids_map.get(&oid).unwrap().0.clone(), + oids_map + .get(&oid) + .ok_or_else(|| anyhow::anyhow!("Internal error: oid not found in map."))? + .0 + .clone(), )), description: None, + // we don't have this information, so we assume not nullable. nullable: metadata::Nullable::NonNullable, }, ); } let mut columns = BTreeMap::new(); - for (name, oid) in columns_to_oids { + for (name, (oid, is_nullable)) in columns_to_oids { columns.insert( name.clone(), metadata::ReadOnlyColumnInfo { name: name.clone(), r#type: metadata::Type::ScalarType(metadata::ScalarTypeName( - oids_map.get(&oid).unwrap().0.clone(), + oids_map + .get(&oid) + .ok_or_else(|| anyhow::anyhow!("Internal error: oid not found in map."))? + .0 + .clone(), )), description: None, - nullable: metadata::Nullable::NonNullable, + nullable: if is_nullable { + metadata::Nullable::Nullable + } else { + metadata::Nullable::NonNullable + }, }, ); } From 373697200cd6c3cb38421aa3f258e3bbcf7c381b Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 17:16:38 +0300 Subject: [PATCH 24/28] add a test --- crates/cli/src/native_operations.rs | 24 ++++-- .../src/version4/metadata/native_queries.rs | 2 +- .../src/version4/native_operations.rs | 24 ++---- .../src/postgres/configuration_tests.rs | 46 +++++----- ...ration_tests__create_native_operation.snap | 86 +++++++++++++++++++ .../src/common_tests/configuration_tests.rs | 62 +++++++++++++ .../tests-common/src/common_tests/mod.rs | 1 + 7 files changed, 197 insertions(+), 48 deletions(-) create mode 100644 crates/tests/databases-tests/src/postgres/snapshots/databases_tests__postgres__configuration_tests__create_native_operation.snap create mode 100644 crates/tests/tests-common/src/common_tests/configuration_tests.rs diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 0bd278073..9956f97c5 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -49,8 +49,8 @@ pub async fn run(command: Command, context: Context) -> anyhow r#override, } => { create( - operation_path, context, + operation_path, kind, if r#override { Override::Yes @@ -101,8 +101,8 @@ async fn list(context: Context) -> anyhow::Result<()> { /// Take a SQL file containing a Native Operation, check against the database that it is valid, /// and add it to the configuration if it is. async fn create( - operation_path: PathBuf, context: Context, + operation_path: PathBuf, kind: Kind, override_entry: Override, ) -> anyhow::Result<()> { @@ -110,15 +110,29 @@ async fn create( let mut configuration = configuration::parse_configuration(context.context_path.clone()).await?; + // Prepare the Native Operation SQL so it can be checked against the db. + let name = operation_path + .file_stem() + .ok_or(anyhow::anyhow!("SQL file not found"))? + .to_str() + .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))? + .to_string(); + + // Read the SQL file. + let file_contents = match std::fs::read_to_string(context.context_path.join(&operation_path)) { + Ok(ok) => ok, + Err(err) => anyhow::bail!("{}: {}", operation_path.display(), err), + }; + match configuration { configuration::ParsedConfiguration::Version3(_) => Err(anyhow::anyhow!( "To use the native operations commands, please upgrade to the latest version." ))?, configuration::ParsedConfiguration::Version4(ref mut configuration) => { - let (name, new_native_operation) = configuration::version4::native_operations::create( + let new_native_operation = configuration::version4::native_operations::create( configuration, - operation_path, - &context.context_path, + &operation_path, + &file_contents, kind, ) .await?; diff --git a/crates/configuration/src/version4/metadata/native_queries.rs b/crates/configuration/src/version4/metadata/native_queries.rs index f6e3658b7..8edd16508 100644 --- a/crates/configuration/src/version4/metadata/native_queries.rs +++ b/crates/configuration/src/version4/metadata/native_queries.rs @@ -275,7 +275,7 @@ pub fn parse_native_query_from_file( } /// Parse a native query into parts where variables have the syntax `{{}}`. -fn parse_native_query(string: &str) -> NativeQueryParts { +pub fn parse_native_query(string: &str) -> NativeQueryParts { let vec: Vec> = string .split("{{") .map(|part| match part.split_once("}}") { diff --git a/crates/configuration/src/version4/native_operations.rs b/crates/configuration/src/version4/native_operations.rs index a01d74141..c74ccea72 100644 --- a/crates/configuration/src/version4/native_operations.rs +++ b/crates/configuration/src/version4/native_operations.rs @@ -1,7 +1,7 @@ //! Infer information about a Native Operation from a Native Operation SQL string. use std::collections::{BTreeMap, BTreeSet}; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use query_engine_sql::sql; @@ -23,28 +23,18 @@ pub enum Kind { /// and add it to the configuration if it is. pub async fn create( configuration: &super::ParsedConfiguration, - operation_path: PathBuf, - context_path: &Path, + operation_path: &PathBuf, + operation_file_contents: &str, kind: Kind, -) -> anyhow::Result<(String, metadata::NativeQueryInfo)> { +) -> anyhow::Result { // Connect to the db. let connection_string = configuration.get_connection_uri()?; let mut connection = sqlx::PgConnection::connect(&connection_string).await?; // Create an entry for a Native Operation and insert it into the configuration. - // Read the SQL file. - let parsed_file = super::metadata::parse_native_query_from_file(context_path, &operation_path) - .map_err(|err| anyhow::anyhow!("{}", err))?; - - // Prepare the Native Operation SQL so it can be checked against the db. - let identifier = operation_path - .file_stem() - .ok_or(anyhow::anyhow!("SQL file not found"))? - .to_str() - .ok_or(anyhow::anyhow!("Could not convert SQL file name to string"))?; - - let sql = parsed_file.sql().to_sql(); + // Read the SQL file and parse it. + let sql = super::metadata::parse_native_query(operation_file_contents).to_sql(); // Prepare the SQL against the DB. let result = connection.describe(&sql.sql).await?; @@ -158,7 +148,7 @@ pub async fn create( description: None, }; - Ok((identifier.to_string(), new_native_operation)) + Ok(new_native_operation) } /// Given a vector of OIDs, ask postgres to provide the equivalent type names. diff --git a/crates/tests/databases-tests/src/postgres/configuration_tests.rs b/crates/tests/databases-tests/src/postgres/configuration_tests.rs index f63fb7799..b91abacdf 100644 --- a/crates/tests/databases-tests/src/postgres/configuration_tests.rs +++ b/crates/tests/databases-tests/src/postgres/configuration_tests.rs @@ -8,10 +8,7 @@ //! they rely on supporting data (the chinook NDC metadata configuration) which we maintain only for //! the latest version. -use std::collections::HashMap; -use std::path::{Path, PathBuf}; - -use similar_asserts::assert_eq; +use tests_common::common_tests::configuration_tests::*; pub const CHINOOK_NDC_METADATA_PATH: &str = "static/postgres/v4-chinook-ndc-metadata"; @@ -31,30 +28,29 @@ async fn postgres_current_only_configure_is_idempotent() -> anyhow::Result<()> { introspection_is_idempotent(CONNECTION_URI, CHINOOK_NDC_METADATA_PATH).await } -// Tests that configuration generation has not changed. -// -// This test does not use insta snapshots because it checks the NDC metadata file that is shared -// with other tests. -// -// If you have changed it intentionally, run `just generate-configuration`. -async fn introspection_is_idempotent( - connection_string: &str, - ndc_metadata_path: impl AsRef + Sync, -) -> anyhow::Result<()> { - let parsed_configuration = ndc_postgres_configuration::parse_configuration( - PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .join("../../..") - .join(ndc_metadata_path), +#[tokio::test] +async fn create_native_operation() -> anyhow::Result<()> { + let my_native_query = r#" +SELECT "ArtistId" as artist_id, + "Name", + coalesce("Name", 'David') as "name_with_coalesce", + "group_leader".* +FROM "Artist" +CROSS JOIN "group_leader" +WHERE "Name" LIKE '%' || {{name}} || '%' + AND "ArtistId" > {{lower_bound}} + AND "ArtistId" < {{upper_bound}} +"# + .to_string(); + + let result = test_native_operation_create( + CHINOOK_NDC_METADATA_PATH, + my_native_query, + ndc_postgres_configuration::version4::native_operations::Kind::Query, ) .await?; - let environment = HashMap::from([( - ndc_postgres_configuration::DEFAULT_CONNECTION_URI_VARIABLE.into(), - connection_string.into(), - )]); - let introspected_configuration = - ndc_postgres_configuration::introspect(parsed_configuration.clone(), environment).await?; + insta::assert_json_snapshot!(result); - assert_eq!(parsed_configuration, introspected_configuration); Ok(()) } diff --git a/crates/tests/databases-tests/src/postgres/snapshots/databases_tests__postgres__configuration_tests__create_native_operation.snap b/crates/tests/databases-tests/src/postgres/snapshots/databases_tests__postgres__configuration_tests__create_native_operation.snap new file mode 100644 index 000000000..fbf653d0c --- /dev/null +++ b/crates/tests/databases-tests/src/postgres/snapshots/databases_tests__postgres__configuration_tests__create_native_operation.snap @@ -0,0 +1,86 @@ +--- +source: crates/tests/databases-tests/src/postgres/configuration_tests.rs +expression: result +--- +{ + "sql": { + "file": "test.sql" + }, + "columns": { + "Name": { + "name": "Name", + "type": { + "scalarType": "varchar" + }, + "nullable": "nullable", + "description": null + }, + "artist_id": { + "name": "artist_id", + "type": { + "scalarType": "int4" + }, + "nullable": "nonNullable", + "description": null + }, + "characters": { + "name": "characters", + "type": { + "scalarType": "characters" + }, + "nullable": "nullable", + "description": null + }, + "id": { + "name": "id", + "type": { + "scalarType": "int4" + }, + "nullable": "nullable", + "description": null + }, + "name": { + "name": "name", + "type": { + "scalarType": "chara" + }, + "nullable": "nullable", + "description": null + }, + "name_with_coalesce": { + "name": "name_with_coalesce", + "type": { + "scalarType": "varchar" + }, + "nullable": "nullable", + "description": null + } + }, + "arguments": { + "lower_bound": { + "name": "lower_bound", + "type": { + "scalarType": "int4" + }, + "nullable": "nonNullable", + "description": null + }, + "name": { + "name": "name", + "type": { + "scalarType": "text" + }, + "nullable": "nonNullable", + "description": null + }, + "upper_bound": { + "name": "upper_bound", + "type": { + "scalarType": "int4" + }, + "nullable": "nonNullable", + "description": null + } + }, + "description": null +} diff --git a/crates/tests/tests-common/src/common_tests/configuration_tests.rs b/crates/tests/tests-common/src/common_tests/configuration_tests.rs new file mode 100644 index 000000000..6882f6773 --- /dev/null +++ b/crates/tests/tests-common/src/common_tests/configuration_tests.rs @@ -0,0 +1,62 @@ +//! Tests the configuration generation has not changed. + +use ndc_postgres_configuration::version4; +use ndc_postgres_configuration::ParsedConfiguration; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +pub async fn test_native_operation_create( + ndc_metadata_path: impl AsRef + Sync, + sql: String, + kind: version4::native_operations::Kind, +) -> anyhow::Result { + let configuration = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../../..") + .join(ndc_metadata_path); + + let parsed_configuration = + ndc_postgres_configuration::parse_configuration(configuration).await?; + + match parsed_configuration { + ParsedConfiguration::Version3(_) => anyhow::bail!("version3"), + ParsedConfiguration::Version4(parsed_configuration) => { + let result = version4::native_operations::create( + &parsed_configuration, + &PathBuf::from("test.sql"), + &sql, + kind, + ) + .await?; + + Ok(result) + } + } +} + +/// Tests that configuration generation has not changed. +/// +/// This test does not use insta snapshots because it checks the NDC metadata file that is shared +/// with other tests. +/// +/// If you have changed it intentionally, run `just generate-configuration`. +pub async fn introspection_is_idempotent( + connection_string: &str, + ndc_metadata_path: impl AsRef + Sync, +) -> anyhow::Result<()> { + let parsed_configuration = ndc_postgres_configuration::parse_configuration( + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../../..") + .join(ndc_metadata_path), + ) + .await?; + let environment = HashMap::from([( + ndc_postgres_configuration::DEFAULT_CONNECTION_URI_VARIABLE.into(), + connection_string.into(), + )]); + + let introspected_configuration = + ndc_postgres_configuration::introspect(parsed_configuration.clone(), environment).await?; + + assert_eq!(parsed_configuration, introspected_configuration); + Ok(()) +} diff --git a/crates/tests/tests-common/src/common_tests/mod.rs b/crates/tests/tests-common/src/common_tests/mod.rs index bfc4d0097..1471e000e 100644 --- a/crates/tests/tests-common/src/common_tests/mod.rs +++ b/crates/tests/tests-common/src/common_tests/mod.rs @@ -1 +1,2 @@ +pub mod configuration_tests; pub mod ndc_tests; From 4d866fbf5125f1735506c84e9cfb35b90185ee4c Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 17:19:49 +0300 Subject: [PATCH 25/28] comment --- .../tests/tests-common/src/common_tests/configuration_tests.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/tests/tests-common/src/common_tests/configuration_tests.rs b/crates/tests/tests-common/src/common_tests/configuration_tests.rs index 6882f6773..7ddde2040 100644 --- a/crates/tests/tests-common/src/common_tests/configuration_tests.rs +++ b/crates/tests/tests-common/src/common_tests/configuration_tests.rs @@ -5,6 +5,7 @@ use ndc_postgres_configuration::ParsedConfiguration; use std::collections::HashMap; use std::path::{Path, PathBuf}; +/// Test native query introspection. pub async fn test_native_operation_create( ndc_metadata_path: impl AsRef + Sync, sql: String, From 53066aa7c0f3b0b587022650c3d3e8726497b506 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 18:08:07 +0300 Subject: [PATCH 26/28] comments and lints --- crates/cli/src/native_operations.rs | 15 ++++++++------- .../src/version4/native_operations.rs | 6 +++--- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index 9956f97c5..c3f31ae1c 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -67,13 +67,7 @@ pub async fn run(command: Command, context: Context) -> anyhow Ok(()) } -/// Override Native Operation definition if exists? -#[derive(Debug, Clone, clap::ValueEnum)] -enum Override { - Yes, - No, -} - +/// List all native operations. async fn list(context: Context) -> anyhow::Result<()> { // Read the configuration. let mut configuration = @@ -98,6 +92,13 @@ async fn list(context: Context) -> anyhow::Result<()> { Ok(()) } +/// Override Native Operation definition if exists? +#[derive(Debug, Clone, clap::ValueEnum)] +enum Override { + Yes, + No, +} + /// Take a SQL file containing a Native Operation, check against the database that it is valid, /// and add it to the configuration if it is. async fn create( diff --git a/crates/configuration/src/version4/native_operations.rs b/crates/configuration/src/version4/native_operations.rs index c74ccea72..85216c1c2 100644 --- a/crates/configuration/src/version4/native_operations.rs +++ b/crates/configuration/src/version4/native_operations.rs @@ -1,7 +1,7 @@ //! Infer information about a Native Operation from a Native Operation SQL string. use std::collections::{BTreeMap, BTreeSet}; -use std::path::PathBuf; +use std::path::Path; use query_engine_sql::sql; @@ -23,7 +23,7 @@ pub enum Kind { /// and add it to the configuration if it is. pub async fn create( configuration: &super::ParsedConfiguration, - operation_path: &PathBuf, + operation_path: &Path, operation_file_contents: &str, kind: Kind, ) -> anyhow::Result { @@ -136,7 +136,7 @@ pub async fn create( let new_native_operation = metadata::NativeQueryInfo { sql: metadata::NativeQuerySqlEither::NativeQuerySqlExternal( metadata::NativeQuerySqlExternal::File { - file: operation_path.clone(), + file: operation_path.to_path_buf(), }, ), arguments, From 2d8594437d20d676a4259e0b2bc3414a2bbb39b5 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 18:17:04 +0300 Subject: [PATCH 27/28] pass a connection string --- crates/cli/src/native_operations.rs | 3 +++ crates/configuration/src/version4/native_operations.rs | 2 +- .../tests/databases-tests/src/postgres/configuration_tests.rs | 1 + .../tests/tests-common/src/common_tests/configuration_tests.rs | 2 ++ 4 files changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/cli/src/native_operations.rs b/crates/cli/src/native_operations.rs index c3f31ae1c..87dbf5393 100644 --- a/crates/cli/src/native_operations.rs +++ b/crates/cli/src/native_operations.rs @@ -130,8 +130,11 @@ async fn create( "To use the native operations commands, please upgrade to the latest version." ))?, configuration::ParsedConfiguration::Version4(ref mut configuration) => { + let connection_string = configuration.get_connection_uri()?; + let new_native_operation = configuration::version4::native_operations::create( configuration, + &connection_string, &operation_path, &file_contents, kind, diff --git a/crates/configuration/src/version4/native_operations.rs b/crates/configuration/src/version4/native_operations.rs index 85216c1c2..8b061d1f2 100644 --- a/crates/configuration/src/version4/native_operations.rs +++ b/crates/configuration/src/version4/native_operations.rs @@ -23,12 +23,12 @@ pub enum Kind { /// and add it to the configuration if it is. pub async fn create( configuration: &super::ParsedConfiguration, + connection_string: &str, operation_path: &Path, operation_file_contents: &str, kind: Kind, ) -> anyhow::Result { // Connect to the db. - let connection_string = configuration.get_connection_uri()?; let mut connection = sqlx::PgConnection::connect(&connection_string).await?; // Create an entry for a Native Operation and insert it into the configuration. diff --git a/crates/tests/databases-tests/src/postgres/configuration_tests.rs b/crates/tests/databases-tests/src/postgres/configuration_tests.rs index b91abacdf..55a29f7d2 100644 --- a/crates/tests/databases-tests/src/postgres/configuration_tests.rs +++ b/crates/tests/databases-tests/src/postgres/configuration_tests.rs @@ -44,6 +44,7 @@ WHERE "Name" LIKE '%' || {{name}} || '%' .to_string(); let result = test_native_operation_create( + CONNECTION_URI, CHINOOK_NDC_METADATA_PATH, my_native_query, ndc_postgres_configuration::version4::native_operations::Kind::Query, diff --git a/crates/tests/tests-common/src/common_tests/configuration_tests.rs b/crates/tests/tests-common/src/common_tests/configuration_tests.rs index 7ddde2040..d91c16e9f 100644 --- a/crates/tests/tests-common/src/common_tests/configuration_tests.rs +++ b/crates/tests/tests-common/src/common_tests/configuration_tests.rs @@ -7,6 +7,7 @@ use std::path::{Path, PathBuf}; /// Test native query introspection. pub async fn test_native_operation_create( + connection_string: &str, ndc_metadata_path: impl AsRef + Sync, sql: String, kind: version4::native_operations::Kind, @@ -23,6 +24,7 @@ pub async fn test_native_operation_create( ParsedConfiguration::Version4(parsed_configuration) => { let result = version4::native_operations::create( &parsed_configuration, + connection_string, &PathBuf::from("test.sql"), &sql, kind, From 9e72090dce12e19cbd77abbcff4a81558db52154 Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Wed, 3 Jul 2024 18:30:02 +0300 Subject: [PATCH 28/28] lint --- crates/configuration/src/version4/native_operations.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/configuration/src/version4/native_operations.rs b/crates/configuration/src/version4/native_operations.rs index 8b061d1f2..5831196b4 100644 --- a/crates/configuration/src/version4/native_operations.rs +++ b/crates/configuration/src/version4/native_operations.rs @@ -29,7 +29,7 @@ pub async fn create( kind: Kind, ) -> anyhow::Result { // Connect to the db. - let mut connection = sqlx::PgConnection::connect(&connection_string).await?; + let mut connection = sqlx::PgConnection::connect(connection_string).await?; // Create an entry for a Native Operation and insert it into the configuration. @@ -89,7 +89,7 @@ pub async fn create( let mut oids: BTreeSet = arguments_to_oids.values().copied().collect(); oids.extend::>(columns_to_oids.values().copied().map(|x| x.0).collect()); let oids_vec: Vec<_> = oids.into_iter().collect(); - let oids_map = oids_to_typenames(configuration, &connection_string, &oids_vec).await?; + let oids_map = oids_to_typenames(configuration, connection_string, &oids_vec).await?; let mut arguments = BTreeMap::new(); for (name, oid) in arguments_to_oids {