Skip to content

Commit

Permalink
Merge branch 'main' into issue-5900
Browse files Browse the repository at this point in the history
  • Loading branch information
amazingLychee committed Jan 16, 2025
2 parents f944fec + e6225a0 commit cbc6a05
Show file tree
Hide file tree
Showing 148 changed files with 7,492 additions and 1,463 deletions.
14 changes: 12 additions & 2 deletions .github/workflows/gvfs-fuse-build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,18 @@ jobs:
run: |
dev/ci/check_commands.sh
- name: Build and test Gravitino
- name: Build Gvfs-fuse
run: |
./gradlew :clients:filesystem-fuse:build -PenableFuse=true
- name: Integration test
run: |
./gradlew build -x :clients:client-python:build -x test -x web -PjdkVersion=${{ matrix.java-version }}
./gradlew compileDistribution -x :clients:client-python:build -x test -x web -PjdkVersion=${{ matrix.java-version }}
cd clients/filesystem-fuse
make test-s3
make test-fuse-it
- name: Free up disk space
run: |
dev/ci/util_free_space.sh
Expand All @@ -85,5 +93,7 @@ jobs:
with:
name: Gvfs-fuse integrate-test-reports-${{ matrix.java-version }}
path: |
clients/filesystem-fuse/build/test/log/*.log
clients/filesystem-fuse/target/debug/fuse.log
distribution/package/logs/gravitino-server.out
distribution/package/logs/gravitino-server.log
1 change: 1 addition & 0 deletions LICENSE.bin
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,7 @@
Apache Arrow
Rome
Jettison
Awaitility

This product bundles various third-party components also under the
Apache Software Foundation License 1.1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,12 +100,14 @@ public RangerClientExtension(String hostName, String authType, String username,
}
}

@Override
public RangerPolicy createPolicy(RangerPolicy policy) throws RangerServiceException {
Preconditions.checkArgument(
policy.getResources().size() > 0, "Ranger policy resources can not be empty!");
return super.createPolicy(policy);
}

@Override
public RangerPolicy updatePolicy(long policyId, RangerPolicy policy)
throws RangerServiceException {
Preconditions.checkArgument(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ public VXGroup() {
*
* @return formatedStr
*/
@Override
public String toString() {
String str = "VXGroup={";
str += super.toString();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ public String getName() {
*
* @return formatedStr
*/
@Override
public String toString() {
String str = "VXUser={";
str += super.toString();
Expand Down
9 changes: 9 additions & 0 deletions bin/common.sh → bin/common.sh.template
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,15 @@ if [[ -f "${GRAVITINO_CONF_DIR}/gravitino-env.sh" ]]; then
. "${GRAVITINO_CONF_DIR}/gravitino-env.sh"
fi

if [[ -z "${GRAVITINO_VERSION}" ]]; then
echo -e "GRAVITINO_VERSION is not set, you may need to:\n" \
"1. Ensure that a compiled version of Gravitino is available at " \
"\${GRAVITINO_HOME}/distribution/package. You may need to compile it first, " \
"if you are installing the software from source code.\n" \
"2. Execute gravitino.sh in the \${GRAVITINO_HOME}/distribution/package/bin directory."
exit 1
fi

GRAVITINO_CLASSPATH+=":${GRAVITINO_CONF_DIR}"

JVM_VERSION=8
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -32,25 +32,41 @@ public class PaimonPropertiesUtils {
// will only need to set the configuration 'catalog-backend' in Gravitino and Gravitino will
// change it to `catalogType` automatically and pass it to Paimon.
public static final Map<String, String> GRAVITINO_CONFIG_TO_PAIMON;
public static final Map<String, String> PAIMON_CATALOG_CONFIG_TO_GRAVITINO;

static {
Map<String, String> map = new HashMap();
map.put(PaimonConstants.CATALOG_BACKEND, PaimonConstants.CATALOG_BACKEND);
map.put(PaimonConstants.GRAVITINO_JDBC_DRIVER, PaimonConstants.GRAVITINO_JDBC_DRIVER);
map.put(PaimonConstants.GRAVITINO_JDBC_USER, PaimonConstants.PAIMON_JDBC_USER);
map.put(PaimonConstants.GRAVITINO_JDBC_PASSWORD, PaimonConstants.PAIMON_JDBC_PASSWORD);
map.put(PaimonConstants.URI, PaimonConstants.URI);
map.put(PaimonConstants.WAREHOUSE, PaimonConstants.WAREHOUSE);
map.put(PaimonConstants.CATALOG_BACKEND_NAME, PaimonConstants.CATALOG_BACKEND_NAME);
Map<String, String> gravitinoConfigToPaimon = new HashMap<>();
Map<String, String> paimonCatalogConfigToGravitino = new HashMap<>();
gravitinoConfigToPaimon.put(PaimonConstants.CATALOG_BACKEND, PaimonConstants.CATALOG_BACKEND);
gravitinoConfigToPaimon.put(
PaimonConstants.GRAVITINO_JDBC_DRIVER, PaimonConstants.GRAVITINO_JDBC_DRIVER);
gravitinoConfigToPaimon.put(
PaimonConstants.GRAVITINO_JDBC_USER, PaimonConstants.PAIMON_JDBC_USER);
gravitinoConfigToPaimon.put(
PaimonConstants.GRAVITINO_JDBC_PASSWORD, PaimonConstants.PAIMON_JDBC_PASSWORD);
gravitinoConfigToPaimon.put(PaimonConstants.URI, PaimonConstants.URI);
gravitinoConfigToPaimon.put(PaimonConstants.WAREHOUSE, PaimonConstants.WAREHOUSE);
gravitinoConfigToPaimon.put(
PaimonConstants.CATALOG_BACKEND_NAME, PaimonConstants.CATALOG_BACKEND_NAME);
// S3
map.put(S3Properties.GRAVITINO_S3_ENDPOINT, PaimonConstants.S3_ENDPOINT);
map.put(S3Properties.GRAVITINO_S3_ACCESS_KEY_ID, PaimonConstants.S3_ACCESS_KEY);
map.put(S3Properties.GRAVITINO_S3_SECRET_ACCESS_KEY, PaimonConstants.S3_SECRET_KEY);
gravitinoConfigToPaimon.put(S3Properties.GRAVITINO_S3_ENDPOINT, PaimonConstants.S3_ENDPOINT);
gravitinoConfigToPaimon.put(
S3Properties.GRAVITINO_S3_ACCESS_KEY_ID, PaimonConstants.S3_ACCESS_KEY);
gravitinoConfigToPaimon.put(
S3Properties.GRAVITINO_S3_SECRET_ACCESS_KEY, PaimonConstants.S3_SECRET_KEY);
// OSS
map.put(OSSProperties.GRAVITINO_OSS_ENDPOINT, PaimonConstants.OSS_ENDPOINT);
map.put(OSSProperties.GRAVITINO_OSS_ACCESS_KEY_ID, PaimonConstants.OSS_ACCESS_KEY);
map.put(OSSProperties.GRAVITINO_OSS_ACCESS_KEY_SECRET, PaimonConstants.OSS_SECRET_KEY);
GRAVITINO_CONFIG_TO_PAIMON = Collections.unmodifiableMap(map);
gravitinoConfigToPaimon.put(OSSProperties.GRAVITINO_OSS_ENDPOINT, PaimonConstants.OSS_ENDPOINT);
gravitinoConfigToPaimon.put(
OSSProperties.GRAVITINO_OSS_ACCESS_KEY_ID, PaimonConstants.OSS_ACCESS_KEY);
gravitinoConfigToPaimon.put(
OSSProperties.GRAVITINO_OSS_ACCESS_KEY_SECRET, PaimonConstants.OSS_SECRET_KEY);
GRAVITINO_CONFIG_TO_PAIMON = Collections.unmodifiableMap(gravitinoConfigToPaimon);
gravitinoConfigToPaimon.forEach(
(key, value) -> {
paimonCatalogConfigToGravitino.put(value, key);
});
PAIMON_CATALOG_CONFIG_TO_GRAVITINO =
Collections.unmodifiableMap(paimonCatalogConfigToGravitino);
}

/**
Expand Down
1 change: 1 addition & 0 deletions catalogs/catalog-hadoop/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ dependencies {
exclude("org.fusesource.leveldbjni")
}
implementation(libs.slf4j.api)
implementation(libs.awaitility)

compileOnly(libs.guava)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang3.StringUtils;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.Entity;
Expand Down Expand Up @@ -71,6 +73,8 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.awaitility.Awaitility;
import org.awaitility.core.ConditionTimeoutException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -755,6 +759,35 @@ FileSystem getFileSystem(Path path, Map<String, String> config) throws IOExcepti
scheme, path, fileSystemProvidersMap.keySet(), fileSystemProvidersMap.values()));
}

return provider.getFileSystem(path, config);
int timeoutSeconds =
(int)
propertiesMetadata
.catalogPropertiesMetadata()
.getOrDefault(
config, HadoopCatalogPropertiesMetadata.FILESYSTEM_CONNECTION_TIMEOUT_SECONDS);
try {
AtomicReference<FileSystem> fileSystem = new AtomicReference<>();
Awaitility.await()
.atMost(timeoutSeconds, TimeUnit.SECONDS)
.until(
() -> {
fileSystem.set(provider.getFileSystem(path, config));
return true;
});
return fileSystem.get();
} catch (ConditionTimeoutException e) {
throw new IOException(
String.format(
"Failed to get FileSystem for path: %s, scheme: %s, provider: %s, config: %s within %s "
+ "seconds, please check the configuration or increase the "
+ "file system connection timeout time by setting catalog property: %s",
path,
scheme,
provider,
config,
timeoutSeconds,
HadoopCatalogPropertiesMetadata.FILESYSTEM_CONNECTION_TIMEOUT_SECONDS),
e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ public class HadoopCatalogPropertiesMetadata extends BaseCatalogPropertiesMetada
*/
public static final String DEFAULT_FS_PROVIDER = "default-filesystem-provider";

static final String FILESYSTEM_CONNECTION_TIMEOUT_SECONDS = "filesystem-conn-timeout-secs";
static final int DEFAULT_GET_FILESYSTEM_TIMEOUT_SECONDS = 6;

public static final String BUILTIN_LOCAL_FS_PROVIDER = "builtin-local";
public static final String BUILTIN_HDFS_FS_PROVIDER = "builtin-hdfs";

Expand Down Expand Up @@ -82,6 +85,14 @@ public class HadoopCatalogPropertiesMetadata extends BaseCatalogPropertiesMetada
false /* immutable */,
BUILTIN_LOCAL_FS_PROVIDER, // please see LocalFileSystemProvider#name()
false /* hidden */))
.put(
FILESYSTEM_CONNECTION_TIMEOUT_SECONDS,
PropertyEntry.integerOptionalPropertyEntry(
FILESYSTEM_CONNECTION_TIMEOUT_SECONDS,
"Timeout to wait for to create the Hadoop file system client instance.",
false /* immutable */,
DEFAULT_GET_FILESYSTEM_TIMEOUT_SECONDS,
false /* hidden */))
// The following two are about authentication.
.putAll(KERBEROS_PROPERTY_ENTRIES)
.putAll(AuthenticationConfig.AUTHENTICATION_PROPERTY_ENTRIES)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -567,10 +567,6 @@ protected String generateAlterTableSql(
alterSql.add("MODIFY COMMENT \"" + newComment + "\"");
}

if (!setProperties.isEmpty()) {
alterSql.add(generateTableProperties(setProperties));
}

if (CollectionUtils.isEmpty(alterSql)) {
return "";
}
Expand Down Expand Up @@ -602,11 +598,14 @@ private String updateColumnNullabilityDefinition(
}

private String generateTableProperties(List<TableChange.SetProperty> setProperties) {
return setProperties.stream()
.map(
setProperty ->
String.format("\"%s\" = \"%s\"", setProperty.getProperty(), setProperty.getValue()))
.collect(Collectors.joining(",\n"));
String properties =
setProperties.stream()
.map(
setProperty ->
String.format(
"\"%s\" = \"%s\"", setProperty.getProperty(), setProperty.getValue()))
.collect(Collectors.joining(",\n"));
return "set (" + properties + ")";
}

private String updateColumnCommentFieldDefinition(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -577,6 +577,16 @@ void testAlterDorisTable() {
.pollInterval(WAIT_INTERVAL_IN_SECONDS, TimeUnit.SECONDS)
.untilAsserted(
() -> assertEquals(4, tableCatalog.loadTable(tableIdentifier).columns().length));

// set property
tableCatalog.alterTable(tableIdentifier, TableChange.setProperty("in_memory", "true"));
Awaitility.await()
.atMost(MAX_WAIT_IN_SECONDS, TimeUnit.SECONDS)
.pollInterval(WAIT_INTERVAL_IN_SECONDS, TimeUnit.SECONDS)
.untilAsserted(
() ->
assertEquals(
"true", tableCatalog.loadTable(tableIdentifier).properties().get("in_memory")));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ protected String generateCreateTableSql(
}
}

validateIndexes(indexes, columns);
appendIndexesSql(indexes, sqlBuilder);

sqlBuilder.append("\n)");
Expand Down Expand Up @@ -642,4 +643,33 @@ private StringBuilder appendColumnDefinition(JdbcColumn column, StringBuilder sq
private static String quote(String name) {
return BACK_QUOTE + name + BACK_QUOTE;
}

/**
* Verify the columns in the index.
*
* @param columns jdbc column
* @param indexes table indexes
*/
private static void validateIndexes(Index[] indexes, JdbcColumn[] columns) {
Map<String, JdbcColumn> columnMap =
Arrays.stream(columns).collect(Collectors.toMap(JdbcColumn::name, c -> c));
for (Index index : indexes) {
if (index.type() == Index.IndexType.UNIQUE_KEY) {
// the column in the unique index must be not null
for (String[] colNames : index.fieldNames()) {
JdbcColumn column = columnMap.get(colNames[0]);
Preconditions.checkArgument(
column != null,
"Column %s in the unique index %s does not exist in the table",
colNames[0],
index.name());
Preconditions.checkArgument(
!column.nullable(),
"Column %s in the unique index %s must be a not null column",
colNames[0],
index.name());
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1037,6 +1037,27 @@ void testCreateTableIndex() {
Assertions.assertEquals(2, table.index().length);
Assertions.assertNotNull(table.index()[0].name());
Assertions.assertNotNull(table.index()[1].name());

Column notNullCol = Column.of("col_6", Types.LongType.get(), "id", true, false, null);
Exception exception =
assertThrows(
IllegalArgumentException.class,
() ->
tableCatalog.createTable(
tableIdent,
new Column[] {notNullCol},
table_comment,
properties,
Transforms.EMPTY_TRANSFORM,
Distributions.NONE,
new SortOrder[0],
new Index[] {
Indexes.of(Index.IndexType.UNIQUE_KEY, null, new String[][] {{"col_6"}}),
}));
Assertions.assertTrue(
exception
.getMessage()
.contains("Column col_6 in the unique index null must be a not null column"));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public void testOperationTable() {
.withName("col_1")
.withType(VARCHAR)
.withComment("test_comment")
.withNullable(true)
.withNullable(false)
.build());
columns.add(
JdbcColumn.builder()
Expand Down Expand Up @@ -573,7 +573,7 @@ public void testCreateAndLoadTable() {
JdbcColumn.builder()
.withName("col_4")
.withType(Types.DateType.get())
.withNullable(true)
.withNullable(false)
.withComment("date")
.withDefaultValue(Column.DEFAULT_VALUE_NOT_SET)
.build());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@ protected Map<String, String> getTableProperties(Connection connection, String t
}
}

@Override
protected void correctJdbcTableFields(
Connection connection, String databaseName, String tableName, JdbcTable.Builder tableBuilder)
throws SQLException {
Expand Down
Loading

0 comments on commit cbc6a05

Please sign in to comment.