Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
yuqi1129 committed Jan 24, 2025
1 parent ab85969 commit c7968f6
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 21 deletions.
2 changes: 2 additions & 0 deletions catalogs/catalog-hadoop/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ dependencies {
exclude("org.eclipse.jetty", "*")
exclude("io.netty")
exclude("org.fusesource.leveldbjni")
// Exclude protobuf-java to avoid conflict with;
exclude("com.google.protobuf", "protobuf-java")
}
implementation(libs.slf4j.api)
implementation(libs.awaitility)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
Expand Down Expand Up @@ -61,6 +62,7 @@

/* The utilization class of authorization module*/
public class AuthorizationUtils {

private static final Logger LOG = LoggerFactory.getLogger(AuthorizationUtils.class);
static final String USER_DOES_NOT_EXIST_MSG = "User %s does not exist in the metalake %s";
static final String GROUP_DOES_NOT_EXIST_MSG = "Group %s does not exist in the metalake %s";
Expand Down Expand Up @@ -417,7 +419,7 @@ private static String getHiveDefaultLocation(String metalakeName, String catalog
if (defaultSchemaLocation != null && !defaultSchemaLocation.isEmpty()) {
return defaultSchemaLocation;
} else {
LOG.warn("Schema %s location is not found", defaultSchemaIdent);
LOG.warn("Schema {} location is not found", defaultSchemaIdent);
}
}

Expand Down Expand Up @@ -445,7 +447,7 @@ public static List<String> getMetadataObjectLocation(
// The Hive default schema location is Hive warehouse directory
String defaultSchemaLocation =
getHiveDefaultLocation(ident.name(), catalogObj.name());
if (defaultSchemaLocation != null && !defaultSchemaLocation.isEmpty()) {
if (StringUtils.isNotBlank(defaultSchemaLocation)) {
locations.add(defaultSchemaLocation);
}
}
Expand All @@ -462,6 +464,13 @@ public static List<String> getMetadataObjectLocation(
if (defaultSchemaLocation != null && !defaultSchemaLocation.isEmpty()) {
locations.add(defaultSchemaLocation);
}
} else if (catalogObj.provider().equals("hadoop")) {
String catalogLocation = catalogObj.properties().get(HiveConstants.LOCATION);
if (StringUtils.isNotBlank(catalogLocation)) {
locations.add(catalogLocation);
} else {
LOG.warn("Catalog {} location is not found", ident);
}
}
}
break;
Expand All @@ -472,19 +481,20 @@ public static List<String> getMetadataObjectLocation(
.catalogDispatcher()
.loadCatalog(
NameIdentifier.of(ident.namespace().level(0), ident.namespace().level(1)));
LOG.info("Catalog provider is %s", catalogObj.provider());
if (catalogObj.provider().equals("hive")) {
LOG.info("Catalog provider is {}", catalogObj.provider());
String catalogProvider = catalogObj.provider();
if (Objects.equals(catalogProvider, "hive")
|| Objects.equals(catalogProvider, "hadoop")) {
Schema schema = GravitinoEnv.getInstance().schemaDispatcher().loadSchema(ident);
if (schema.properties().containsKey(HiveConstants.LOCATION)) {
String schemaLocation = schema.properties().get(HiveConstants.LOCATION);
if (StringUtils.isNotBlank(schemaLocation)) {
locations.add(schemaLocation);
} else {
LOG.warn("Schema %s location is not found", ident);
LOG.warn("Schema '{}' location is not found", ident);
}
}
}
// TODO: [#6133] Supports get Fileset schema location in the AuthorizationUtils
}
break;
case TABLE:
Expand All @@ -501,35 +511,32 @@ public static List<String> getMetadataObjectLocation(
if (StringUtils.isNotBlank(tableLocation)) {
locations.add(tableLocation);
} else {
LOG.warn("Table %s location is not found", ident);
LOG.warn("Table {} location is not found", ident);
}
}
}
}
break;
case FILESET:
FilesetDispatcher filesetDispatcher = GravitinoEnv.getInstance().filesetDispatcher();
Fileset fileset = filesetDispatcher.loadFileset(ident);
Preconditions.checkArgument(
fileset != null, String.format("Fileset %s is not found", ident));
String filesetLocation = fileset.storageLocation();
Preconditions.checkArgument(
filesetLocation != null, String.format("Fileset %s location is not found", ident));
locations.add(filesetLocation);
{
FilesetDispatcher filesetDispatcher = GravitinoEnv.getInstance().filesetDispatcher();
Fileset fileset = filesetDispatcher.loadFileset(ident);
Preconditions.checkArgument(
fileset != null, String.format("Fileset %s is not found", ident));
String filesetLocation = fileset.storageLocation();
Preconditions.checkArgument(
filesetLocation != null, String.format("Fileset %s location is not found", ident));
locations.add(filesetLocation);
}
break;
default:
throw new AuthorizationPluginException(
"Failed to get location paths for metadata object %s type %s", ident, type);
}
} catch (Exception e) {
LOG.warn("Failed to get location paths for metadata object %s type %s", ident, type, e);
LOG.warn("Failed to get location paths for metadata object {} type {}", ident, type, e);
}

return locations;
}

private static NameIdentifier getObjectNameIdentifier(
String metalake, MetadataObject metadataObject) {
return NameIdentifier.parse(String.format("%s.%s", metalake, metadataObject.fullName()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@
import org.apache.gravitino.GravitinoEnv;
import org.apache.gravitino.NameIdentifier;
import org.apache.gravitino.Namespace;
import org.apache.gravitino.Schema;
import org.apache.gravitino.catalog.CatalogDispatcher;
import org.apache.gravitino.catalog.SchemaDispatcher;
import org.apache.gravitino.catalog.TableDispatcher;
import org.apache.gravitino.exceptions.IllegalNameIdentifierException;
import org.apache.gravitino.exceptions.IllegalNamespaceException;
Expand Down Expand Up @@ -224,6 +226,7 @@ void testFilteredSecurableObjects() {
void testGetMetadataObjectLocation() throws IllegalAccessException {
CatalogDispatcher catalogDispatcher = Mockito.mock(CatalogDispatcher.class);
TableDispatcher tableDispatcher = Mockito.mock(TableDispatcher.class);
SchemaDispatcher schemaDispatcher = Mockito.mock(SchemaDispatcher.class);
Catalog catalog = Mockito.mock(Catalog.class);
Table table = Mockito.mock(Table.class);

Expand All @@ -234,6 +237,7 @@ void testGetMetadataObjectLocation() throws IllegalAccessException {

FieldUtils.writeField(GravitinoEnv.getInstance(), "catalogDispatcher", catalogDispatcher, true);
FieldUtils.writeField(GravitinoEnv.getInstance(), "tableDispatcher", tableDispatcher, true);
FieldUtils.writeField(GravitinoEnv.getInstance(), "schemaDispatcher", schemaDispatcher, true);

List<String> locations =
AuthorizationUtils.getMetadataObjectLocation(
Expand All @@ -246,5 +250,17 @@ void testGetMetadataObjectLocation() throws IllegalAccessException {
NameIdentifier.of("catalog", "schema", "fileset"), Entity.EntityType.TABLE);
Assertions.assertEquals(1, locations.size());
Assertions.assertEquals("gs://bucket/1", locations.get(0));

Schema schema = Mockito.mock(Schema.class);
Mockito.when(schema.properties()).thenReturn(ImmutableMap.of("location", "gs://bucket/2"));
Mockito.when(schemaDispatcher.loadSchema(Mockito.any())).thenReturn(schema);
Mockito.when(catalog.provider()).thenReturn("hadoop");
Mockito.when(catalogDispatcher.loadCatalog(Mockito.any())).thenReturn(catalog);

locations =
AuthorizationUtils.getMetadataObjectLocation(
NameIdentifier.of("test", "catalog", "schema"), Entity.EntityType.SCHEMA);
Assertions.assertEquals(1, locations.size());
Assertions.assertEquals("gs://bucket/2", locations.get(0));
}
}

0 comments on commit c7968f6

Please sign in to comment.