Skip to content

Commit

Permalink
Merge remote-tracking branch 'refs/remotes/origin/gavin/phoenix-conne…
Browse files Browse the repository at this point in the history
…ctor' into gavin/phoenix-connector

# Conflicts:
#	ndc-connector-phoenix/src/main/kotlin/io/hasura/phoenix/NoRelationshipsQueryGenerator.kt
#	ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt
  • Loading branch information
GavinRay97 committed Dec 3, 2024
2 parents 255e9f0 + 132b07c commit eac9675
Show file tree
Hide file tree
Showing 32 changed files with 1,130 additions and 687 deletions.
100 changes: 100 additions & 0 deletions .github/workflows/build-connectors-action.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
name: Build and Release Database Connector

on:
push:
branches:
- "snowflake/*"
- "mysql/*"
- "oracle/*"
- "phoenix/*"

jobs:
docker-build:
runs-on: ubuntu-latest
strategy:
matrix:
database: [snowflake, mysql, oracle, phoenix]

outputs:
release_tag: ${{ steps.extract_tag.outputs.tag }}

permissions:
contents: read
packages: write # Allows pushing to GHCR
id-token: write # Required for authenticating with GHCR

steps:
- name: Checkout repository
if: contains(github.ref, matrix.database)
uses: actions/checkout@v3

- name: Extract version from branch name
if: contains(github.ref, matrix.database)
id: extract_tag
run: |
# Get the database type from the matrix
DB_TYPE=${{ matrix.database }}
# Use sed to remove the database prefix and get only the version part
VERSION=$(echo "${GITHUB_REF#refs/heads/}" | sed "s|^${DB_TYPE}/||")
# Construct the Docker tag
echo "docker_tag=ghcr.io/hasura/ndc-jvm-${DB_TYPE}:${VERSION}" >> $GITHUB_OUTPUT
- name: Log in to the Container registry
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Set up Docker Buildx
if: contains(github.ref, matrix.database)
uses: docker/setup-buildx-action@v2

- name: Cross-platform Docker build and push
if: contains(github.ref, matrix.database)
uses: docker/build-push-action@v5
with:
context: .
file: ndc-connector-${{ matrix.database }}.dockerfile
push: true
tags: ${{ steps.extract_tag.outputs.docker_tag }}
platforms: linux/amd64,linux/arm64
build-args: |
JOOQ_PRO_EMAIL=${{ secrets.JOOQ_PRO_EMAIL }}
JOOQ_PRO_LICENSE=${{ secrets.JOOQ_PRO_LICENSE }}
create-release:
needs: docker-build
runs-on: ubuntu-latest
strategy:
matrix:
database: [snowflake, mysql, oracle, phoenix]

steps:
- name: Checkout repository
if: contains(github.ref, matrix.database)
uses: actions/checkout@v3

- name: Update dockerImage in connector-metadata.yaml
if: contains(github.ref, matrix.database)
run: |
# Use the full Docker tag from the docker-build job output
sed -i "s|^ dockerImage:.*| dockerImage: \"${{ needs.docker-build.outputs.docker_tag }}\"|" ndc-connector-${{ matrix.database }}/.hasura-connector/connector-metadata.yaml
- name: Compress Hasura Connector Metadata
if: contains(github.ref, matrix.database)
run: |
cd ndc-connector-${{ matrix.database }}
tar -czf package.tar.gz ./.hasura-connector
- name: Upload package.tar.gz to GitHub Release
if: contains(github.ref, matrix.database)
uses: actions/upload-release-asset@v1
with:
tag_name: ${{ needs.docker-build.outputs.tag }} # Use the correct tag output
upload_url: ${{ steps.release.outputs.upload_url }}
asset_path: ndc-connector-${{ matrix.database }}/package.tar.gz
asset_name: package-${{ matrix.database }}.tar.gz
asset_content_type: application/gzip
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
8 changes: 4 additions & 4 deletions create-github-release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@

# Check if the correct number of arguments is provided
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <version> <subdir>"
echo "Usage: $0 <subdir> <version>"
exit 1
fi

# Variables
OWNER="hasura"
REPO="ndc-jvm-mono"
VERSION="$1" # Version passed as the first argument
SUBDIR="$2" # Subdirectory passed as the second argument
SUBDIR="$1" # Subdirectory passed as the second argument
VERSION="$2" # Version passed as the first argument

# Create tag, release name, and description
TAG="${SUBDIR#ndc-connector-}/${VERSION}" # Create tag like oracle/v1.0.0
Expand All @@ -36,4 +36,4 @@ if [ $? -eq 0 ]; then
echo "Release ${RELEASE_NAME} created and file uploaded successfully for ${SUBDIR}."
else
echo "Failed to create release ${RELEASE_NAME} or upload file for ${SUBDIR}."
fi
fi
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
version=0.1.0
version=1.0.1

#Gradle properties
quarkusPluginId=io.quarkus
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ class Filters {
@ServerRequestFilter(priority = 0)
fun logBodyFilter(info: UriInfo, request: HttpServerRequest, ctx: ContainerRequestContext) {
request.body { b ->
logger.debug("INCOMING IR: ${b.result()}")
logger.debug("INCOMING IR:")
logger.debug(b.result())
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class DataConnectorResource @Inject constructor(
val canConnectToDB = dataConnectorService.runHealthCheckQuery()
if (canConnectToDB) {
return Response
.status(Response.Status.NO_CONTENT)
.status(Response.Status.OK)
.build()
} else {
throw RuntimeException("Unable to connect to DB")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import io.agroal.api.security.SimplePassword
import io.hasura.ndc.common.ConnectorConfiguration
import io.opentelemetry.instrumentation.annotations.WithSpan
import io.opentelemetry.instrumentation.jdbc.datasource.OpenTelemetryDataSource
import io.quarkus.agroal.runtime.AgroalOpenTelemetryWrapper
import io.quarkus.agroal.runtime.OpenTelemetryAgroalDataSource
import io.smallrye.config.ConfigMapping
import io.smallrye.config.WithDefault
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import io.hasura.ndc.app.interfaces.ISchemaGenerator
import io.hasura.ndc.common.ConnectorConfiguration
import io.hasura.ndc.app.models.ExplainResponse
import io.hasura.ndc.app.services.AgroalDataSourceService
import io.hasura.ndc.app.services.ConnectorConfigurationLoader
import io.opentelemetry.api.trace.Tracer
import io.opentelemetry.instrumentation.annotations.WithSpan
import jakarta.enterprise.inject.Produces
Expand Down Expand Up @@ -106,14 +105,14 @@ abstract class BaseDataConnectorService(

@WithSpan
open fun mkDSLCtx(): DSLContext {
val config = ConnectorConfigurationLoader.config
val config = ConnectorConfiguration.Loader.config
val ds = dataSourceProvider.getDataSource(config)
return DSL.using(ds, jooqDialect, jooqSettings)
}

@WithSpan
override fun getSchema(): SchemaResponse {
return schemaGenerator.getSchema(ConnectorConfigurationLoader.config)
return schemaGenerator.getSchema(ConnectorConfiguration.Loader.config)
}


Expand Down
1 change: 1 addition & 0 deletions ndc-cli/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ dependencies {
implementation("net.snowflake:snowflake-jdbc:3.16.1")

implementation("org.apache.phoenix:phoenix-client-hbase-2.4:5.1.1")
implementation("org.apache.phoenix:phoenix-queryserver-client:5.0.0-HBase-2.0")
}

tasks.withType<org.jetbrains.kotlin.gradle.tasks.KotlinCompile> {
Expand Down
61 changes: 56 additions & 5 deletions ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,67 @@ import io.hasura.ndc.common.TableSchemaRow
import io.hasura.ndc.common.TableType
import org.jooq.impl.DSL
import java.sql.JDBCType
import java.sql.Types

object PhoenixConfigGenerator : IConfigGenerator {


private fun translatePhoenixDataTypeToSqlType(phoenixDataType: Int?, isThinClient: Boolean = true): String {

val sqlType = if (!isThinClient) {
phoenixDataType ?: Types.OTHER
} else {
when (phoenixDataType) {
null -> Types.OTHER // Handle null data_type
-6 -> Types.TINYINT // TINYINT
-5 -> Types.BIGINT // BIGINT
-3 -> Types.VARBINARY // VARBINARY
-2 -> Types.BINARY // BINARY
1 -> Types.CHAR // CHAR
3 -> Types.DECIMAL // DECIMAL
4 -> Types.INTEGER // INTEGER
5 -> Types.SMALLINT // SMALLINT
6 -> Types.FLOAT // FLOAT
8 -> Types.DOUBLE // DOUBLE
9 -> Types.VARCHAR // VARCHAR
10 -> Types.SMALLINT // UNSIGNED_SMALLINT (maps to SMALLINT)
11 -> Types.FLOAT // UNSIGNED_FLOAT (maps to FLOAT)
12 -> Types.VARCHAR // VARCHAR (Phoenix specific)
13 -> Types.VARCHAR // (Custom/Unsupported, mapped to VARCHAR)
14 -> Types.VARCHAR // (Custom/Unsupported, mapped to VARCHAR)
15 -> Types.VARCHAR // (Custom/Unsupported, mapped to VARCHAR)
16 -> Types.BOOLEAN // BOOLEAN
18 -> Types.ARRAY // ARRAY
19 -> Types.VARBINARY // VARBINARY (Phoenix specific)
20 -> Types.VARBINARY // VARBINARY (Phoenix specific)
91 -> Types.DATE // DATE
92 -> Types.TIME // TIME
93 -> Types.TIMESTAMP // TIMESTAMP
else ->
if (JDBCType.valueOf(phoenixDataType) != null) {
phoenixDataType
} else {
throw IllegalArgumentException("Unknown Phoenix data type: $phoenixDataType")
}
}
}
return JDBCType.valueOf(sqlType).name
}

override fun getConfig(
jdbcUrl: String,
schemas: List<String>
): ConnectorConfiguration {
val ctx = DSL.using(jdbcUrl)

val result = ctx.fetch("""
val isThinClient = jdbcUrl.contains("phoenix:thin", ignoreCase = true)

val result = ctx.fetch(
"""
SELECT * FROM SYSTEM.CATALOG
WHERE TABLE_SCHEM != 'SYSTEM' OR TABLE_SCHEM IS NULL
""")
"""
)

val groupedBySchema = result.groupBy { it["TABLE_SCHEM"] as String? }

Expand All @@ -28,10 +77,12 @@ object PhoenixConfigGenerator : IConfigGenerator {
val columns = records.filter { it["COLUMN_NAME"] != null }.map {
val columnFamily = it["COLUMN_FAMILY"] as String?
val columnName = it["COLUMN_NAME"] as String


ColumnSchemaRow(
name = if (columnFamily != null && columnFamily != "0") "$columnFamily.$columnName" else columnName,
description = null,
type = JDBCType.valueOf(it["DATA_TYPE"] as Int).name,
type = translatePhoenixDataTypeToSqlType(it["DATA_TYPE"] as? Int, isThinClient),
numeric_scale = null,
nullable = it["NULLABLE"] == 1,
auto_increment = it["IS_AUTOINCREMENT"] == "YES",
Expand All @@ -40,7 +91,7 @@ object PhoenixConfigGenerator : IConfigGenerator {
}

TableSchemaRow(
tableName = if (schema != null) "$schema.$tableName" else tableName,
tableName = if (schema != null) "$schema.$tableName" else tableName,
tableType = if (records.any { it["TABLE_TYPE"] == "u" }) TableType.TABLE else TableType.VIEW,
description = null,
columns = columns,
Expand All @@ -59,4 +110,4 @@ object PhoenixConfigGenerator : IConfigGenerator {
functions = emptyList()
)
}
}
}
32 changes: 23 additions & 9 deletions ndc-cli/src/main/kotlin/io/hasura/cli/main.kt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package io.hasura.cli

import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import io.hasura.ndc.common.ConnectorConfiguration
import picocli.CommandLine
import picocli.CommandLine.*
import java.io.File
Expand Down Expand Up @@ -54,11 +55,22 @@ class CLI {
names = ["-s", "--schemas"],
arity = "0..*",
split = ",",
defaultValue = "",
description = ["Comma-separated list of schemas to introspect"]
)
schemas: List<String> = emptyList()
schemas: List<String>?
) {
val file = File(outfile)

println("Checking for configuration file at ${file.absolutePath}")
val existingConfig = file.let {
if (it.exists()) {
println("Existing configuration file detected")
mapper.readValue(it, ConnectorConfiguration::class.java)
} else {
println("Non-existent or empty configuration file detected")
ConnectorConfiguration()
}
}

val configGenerator = when (database) {
DatabaseType.ORACLE -> OracleConfigGenerator
Expand All @@ -67,20 +79,23 @@ class CLI {
DatabaseType.PHOENIX -> PhoenixConfigGenerator
}

val config = configGenerator.getConfig(
println("Generating configuration for $database database...")
val introspectedConfig = configGenerator.getConfig(
jdbcUrl = jdbcUrl,
schemas = schemas
schemas = schemas ?: emptyList()
)
val mergedConfigWithNativeQueries = introspectedConfig.copy(
nativeQueries = existingConfig.nativeQueries
)

val file = File(outfile)
try {
file.createNewFile()
mapper.writerWithDefaultPrettyPrinter().writeValue(file, config)
println("Writing configuration to ${file.absolutePath}")
mapper.writerWithDefaultPrettyPrinter().writeValue(file, mergedConfigWithNativeQueries)
} catch (e: Exception) {
println("Error writing configuration to file: ${e.message}")

val parentDir = file.parentFile
val permissions = Files.getPosixFilePermissions(parentDir.toPath())
val permissions = Files.getPosixFilePermissions(parentDir.toPath())
val posixPermissions = PosixFilePermissions.toString(permissions)

println("Current user: ${System.getProperty("user.name")}")
Expand All @@ -104,4 +119,3 @@ class CLI {
}
}
}

Loading

0 comments on commit eac9675

Please sign in to comment.