Skip to content

Commit

Permalink
Merge branch 'master' into SNOW-1651983-put-on-azure-w-auth-proxy
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-jszczerbinski authored Sep 24, 2024
2 parents 0e8de1a + 9597576 commit 92f75c3
Show file tree
Hide file tree
Showing 15 changed files with 390 additions and 228 deletions.
8 changes: 6 additions & 2 deletions FIPS/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
<parent>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-jdbc-parent</artifactId>
<version>3.19.0</version>
<version>3.19.1-SNAPSHOT</version>
<relativePath>../parent-pom.xml</relativePath>
</parent>

<artifactId>snowflake-jdbc-fips</artifactId>
<version>3.19.0</version>
<version>3.19.1-SNAPSHOT</version>
<packaging>jar</packaging>

<name>snowflake-jdbc-fips</name>
Expand Down Expand Up @@ -429,6 +429,10 @@
<pattern>org.jsoup</pattern>
<shadedPattern>${shadeBase}.org.jsoup</shadedPattern>
</relocation>
<relocation>
<pattern>com.github.luben.zstd</pattern>
<shadedPattern>${shadeBase}.com.github.luben.zstd</shadedPattern>
</relocation>
<relocation>
<pattern>com.nimbusds</pattern>
<shadedPattern>${shadeBase}.com.nimbusds</shadedPattern>
Expand Down
4 changes: 2 additions & 2 deletions FIPS/scripts/check_content.sh
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#!/bin/bash -e

# scripts used to check if all dependency is shaded into snowflake internal path
# scripts used to check if all dependencies are shaded into snowflake internal path

set -o pipefail

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"

if jar tvf $DIR/../target/snowflake-jdbc-fips.jar | awk '{print $8}' | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types; then
if jar tvf $DIR/../target/snowflake-jdbc-fips.jar | awk '{print $8}' | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types | grep -v -E "^aix/" | grep -v -E "^darwin/" | grep -v -E "^freebsd/" | grep -v -E "^linux/" | grep -v -E "^win/"; then
echo "[ERROR] JDBC jar includes class not under the snowflake namespace"
exit 1
fi
2 changes: 2 additions & 0 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,8 @@ timestamps {
}.collectEntries { jobDefinition ->
return [(jobDefinition.runName): { build job: jobDefinition.jobToRun, parameters: jobDefinition.params }]
}

jobDefinitions.put('JDBC-AIX-Unit', { build job: 'JDBC-AIX-UnitTests', parameters: [ string(name: 'BRANCH', value: scmInfo.GIT_BRANCH ) ] } )
stage('Test') {
parallel (jobDefinitions)
}
Expand Down
6 changes: 3 additions & 3 deletions ci/scripts/check_content.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ set -o pipefail

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"

if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types; then
if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types | grep -v -E "^aix/" | grep -v -E "^darwin/" | grep -v -E "^freebsd/" | grep -v -E "^linux/" | grep -v -E "^win/"; then
echo "[ERROR] JDBC jar includes class not under the snowflake namespace"
exit 1
fi

if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -E "^META-INF/versions/.*.class" | grep -v -E "^META-INF/versions/.*/(net|com)/snowflake"; then
echo "[ERROR] JDBC jar includes multi release classes not under the snowflake namespace"
if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -E "^META-INF/versions/.*.class" | grep -v -E "^META-INF/versions/.*/(net|com)/snowflake" | grep -v -E "^aix/" | grep -v -E "^darwin/" | grep -v -E "^freebsd/" | grep -v -E "^linux/" | grep -v -E "^win/"; then
echo "[ERROR] JDBC jar includes multi-release classes not under the snowflake namespace"
exit 1
fi
5 changes: 0 additions & 5 deletions linkage-checker-exclusion-rules.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,6 @@
<Source><Package name="org.apache.commons.compress.compressors"/></Source>
<Reason>Optional</Reason>
</LinkageError>
<LinkageError>
<Target><Package name="com.github.luben.zstd"/></Target>
<Source><Package name="org.apache.commons.compress.compressors"/></Source>
<Reason>Optional</Reason>
</LinkageError>
<LinkageError>
<Target><Package name="com.google.appengine.api.urlfetch"/></Target>
<Source><Package name="com.google.api.client.extensions.appengine"/></Source>
Expand Down
12 changes: 11 additions & 1 deletion parent-pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

<groupId>net.snowflake</groupId>
<artifactId>snowflake-jdbc-parent</artifactId>
<version>3.19.0</version>
<version>3.19.1-SNAPSHOT</version>
<packaging>pom</packaging>

<modules>
Expand All @@ -19,6 +19,7 @@
<apache.commons.text.version>1.10.0</apache.commons.text.version>
<apache.httpclient.version>4.5.14</apache.httpclient.version>
<apache.httpcore.version>4.4.16</apache.httpcore.version>
<zstd-jni.version>1.5.6-5</zstd-jni.version>
<arrow.version>17.0.0</arrow.version>
<asm.version>9.3</asm.version>
<avro.version>1.8.1</avro.version>
Expand Down Expand Up @@ -327,6 +328,11 @@
<artifactId>httpcore</artifactId>
<version>${apache.httpcore.version}</version>
</dependency>
<dependency>
<groupId>com.github.luben</groupId>
<artifactId>zstd-jni</artifactId>
<version>${zstd-jni.version}</version>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
Expand Down Expand Up @@ -644,6 +650,10 @@
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</dependency>
<dependency>
<groupId>com.github.luben</groupId>
<artifactId>zstd-jni</artifactId>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
Expand Down
8 changes: 6 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@
<parent>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-jdbc-parent</artifactId>
<version>3.19.0</version>
<version>3.19.1-SNAPSHOT</version>
<relativePath>./parent-pom.xml</relativePath>
</parent>

<!-- Maven complains about using property here, but it makes install and deploy process easier to override final package names and localization -->
<artifactId>${artifactId}</artifactId>
<version>3.19.0</version>
<version>3.19.1-SNAPSHOT</version>
<packaging>jar</packaging>

<name>${artifactId}</name>
Expand Down Expand Up @@ -947,6 +947,10 @@
<pattern>android.annotation</pattern>
<shadedPattern>${shadeBase}.android.annotation</shadedPattern>
</relocation>
<relocation>
<pattern>com.github.luben.zstd</pattern>
<shadedPattern>${shadeBase}.com.github.luben.zstd</shadedPattern>
</relocation>
</relocations>
<filters>
<filter>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,8 @@
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Map;
import java.util.TimeZone;
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFException;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeSQLException;
import net.snowflake.client.jdbc.SnowflakeSQLLoggedException;
import net.snowflake.client.jdbc.SnowflakeType;
import net.snowflake.common.core.SqlState;
import org.apache.arrow.vector.ValueVector;
import org.apache.arrow.vector.complex.FixedSizeListVector;
import org.apache.arrow.vector.complex.ListVector;
import org.apache.arrow.vector.complex.MapVector;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.arrow.vector.types.Types;

/** Interface to convert from arrow vector values into java data types. */
public interface ArrowVectorConverter {
Expand Down Expand Up @@ -177,201 +163,4 @@ public interface ArrowVectorConverter {
* @param isUTC true or false value of whether NTZ timestamp should be set to UTC
*/
void setTreatNTZAsUTC(boolean isUTC);

/**
* Given an arrow vector (a single column in a single record batch), return an arrow vector
* converter. Note, converter is built on top of arrow vector, so that arrow data can be converted
* back to java data
*
* <p>
*
* <p>Arrow converter mappings for Snowflake fixed-point numbers
* ----------------------------------------------------------------------------------------- Max
* position and scale Converter
* -----------------------------------------------------------------------------------------
* number(3,0) {@link TinyIntToFixedConverter} number(3,2) {@link TinyIntToScaledFixedConverter}
* number(5,0) {@link SmallIntToFixedConverter} number(5,4) {@link SmallIntToScaledFixedConverter}
* number(10,0) {@link IntToFixedConverter} number(10,9) {@link IntToScaledFixedConverter}
* number(19,0) {@link BigIntToFixedConverter} number(19,18) {@link BigIntToFixedConverter}
* number(38,37) {@link DecimalToScaledFixedConverter}
* ------------------------------------------------------------------------------------------
*
* @param vector an arrow vector
* @param context data conversion context
* @param session SFBaseSession for purposes of logging
* @param idx the index of the vector in its batch
* @return A converter on top og the vector
*/
static ArrowVectorConverter initConverter(
ValueVector vector, DataConversionContext context, SFBaseSession session, int idx)
throws SnowflakeSQLException {
// arrow minor type
Types.MinorType type = Types.getMinorTypeForArrowType(vector.getField().getType());

// each column's metadata
Map<String, String> customMeta = vector.getField().getMetadata();
if (type == Types.MinorType.DECIMAL) {
// Note: Decimal vector is different from others
return new DecimalToScaledFixedConverter(vector, idx, context);
} else if (!customMeta.isEmpty()) {
SnowflakeType st = SnowflakeType.valueOf(customMeta.get("logicalType"));
switch (st) {
case ANY:
case CHAR:
case TEXT:
case VARIANT:
return new VarCharConverter(vector, idx, context);

case MAP:
if (vector instanceof MapVector) {
return new MapConverter((MapVector) vector, idx, context);
} else {
return new VarCharConverter(vector, idx, context);
}

case VECTOR:
return new VectorTypeConverter((FixedSizeListVector) vector, idx, context);

case ARRAY:
if (vector instanceof ListVector) {
return new ArrayConverter((ListVector) vector, idx, context);
} else {
return new VarCharConverter(vector, idx, context);
}

case OBJECT:
if (vector instanceof StructVector) {
return new StructConverter((StructVector) vector, idx, context);
} else {
return new VarCharConverter(vector, idx, context);
}

case BINARY:
return new VarBinaryToBinaryConverter(vector, idx, context);

case BOOLEAN:
return new BitToBooleanConverter(vector, idx, context);

case DATE:
boolean getFormatDateWithTimeZone = false;
if (context.getSession() != null) {
getFormatDateWithTimeZone = context.getSession().getFormatDateWithTimezone();
}
return new DateConverter(vector, idx, context, getFormatDateWithTimeZone);

case FIXED:
String scaleStr = vector.getField().getMetadata().get("scale");
int sfScale = Integer.parseInt(scaleStr);
switch (type) {
case TINYINT:
if (sfScale == 0) {
return new TinyIntToFixedConverter(vector, idx, context);
} else {
return new TinyIntToScaledFixedConverter(vector, idx, context, sfScale);
}
case SMALLINT:
if (sfScale == 0) {
return new SmallIntToFixedConverter(vector, idx, context);
} else {
return new SmallIntToScaledFixedConverter(vector, idx, context, sfScale);
}
case INT:
if (sfScale == 0) {
return new IntToFixedConverter(vector, idx, context);
} else {
return new IntToScaledFixedConverter(vector, idx, context, sfScale);
}
case BIGINT:
if (sfScale == 0) {
return new BigIntToFixedConverter(vector, idx, context);
} else {
return new BigIntToScaledFixedConverter(vector, idx, context, sfScale);
}
}
break;

case REAL:
return new DoubleToRealConverter(vector, idx, context);

case TIME:
switch (type) {
case INT:
return new IntToTimeConverter(vector, idx, context);
case BIGINT:
return new BigIntToTimeConverter(vector, idx, context);
default:
throw new SnowflakeSQLLoggedException(
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"Unexpected Arrow Field for ",
st.name());
}

case TIMESTAMP_LTZ:
if (vector.getField().getChildren().isEmpty()) {
// case when the scale of the timestamp is equal or smaller than millisecs since epoch
return new BigIntToTimestampLTZConverter(vector, idx, context);
} else if (vector.getField().getChildren().size() == 2) {
// case when the scale of the timestamp is larger than millisecs since epoch, e.g.,
// nanosecs
return new TwoFieldStructToTimestampLTZConverter(vector, idx, context);
} else {
throw new SnowflakeSQLLoggedException(
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"Unexpected Arrow Field for ",
st.name());
}

case TIMESTAMP_NTZ:
if (vector.getField().getChildren().isEmpty()) {
// case when the scale of the timestamp is equal or smaller than 7
return new BigIntToTimestampNTZConverter(vector, idx, context);
} else if (vector.getField().getChildren().size() == 2) {
// when the timestamp is represent in two-field struct
return new TwoFieldStructToTimestampNTZConverter(vector, idx, context);
} else {
throw new SnowflakeSQLLoggedException(
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"Unexpected Arrow Field for ",
st.name());
}

case TIMESTAMP_TZ:
if (vector.getField().getChildren().size() == 2) {
// case when the scale of the timestamp is equal or smaller than millisecs since epoch
return new TwoFieldStructToTimestampTZConverter(vector, idx, context);
} else if (vector.getField().getChildren().size() == 3) {
// case when the scale of the timestamp is larger than millisecs since epoch, e.g.,
// nanosecs
return new ThreeFieldStructToTimestampTZConverter(vector, idx, context);
} else {
throw new SnowflakeSQLLoggedException(
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"Unexpected SnowflakeType ",
st.name());
}

default:
throw new SnowflakeSQLLoggedException(
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"Unexpected Arrow Field for ",
st.name());
}
}
throw new SnowflakeSQLLoggedException(
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"Unexpected Arrow Field for ",
type.toString());
}
}
Loading

0 comments on commit 92f75c3

Please sign in to comment.