diff --git a/hbase-agent/pom.xml b/hbase-agent/pom.xml index bd60db8f2a..887f76114a 100644 --- a/hbase-agent/pom.xml +++ b/hbase-agent/pom.xml @@ -28,6 +28,8 @@ HBase Security Plugin HBase Security Plugins + true + false 9.4.51.v20230217 UTF-8 diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/AuthorizationSession.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/AuthorizationSession.java index df68f571cb..1c233ead34 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/AuthorizationSession.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/AuthorizationSession.java @@ -18,378 +18,409 @@ */ package org.apache.ranger.authorization.hbase; - -import java.util.Date; -import java.util.List; -import java.util.Set; - import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.thirdparty.com.google.common.base.MoreObjects; import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Sets; -import org.apache.hadoop.thirdparty.com.google.common.base.MoreObjects; import org.apache.ranger.audit.model.AuthzAuditEvent; -import org.apache.ranger.plugin.policyengine.*; +import org.apache.ranger.plugin.policyengine.RangerAccessRequest; +import org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl; +import org.apache.ranger.plugin.policyengine.RangerAccessResource; +import org.apache.ranger.plugin.policyengine.RangerAccessResourceImpl; +import org.apache.ranger.plugin.policyengine.RangerAccessResult; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Date; +import java.util.List; +import java.util.Set; public class AuthorizationSession { + private static final Logger LOG = LoggerFactory.getLogger(AuthorizationSession.class.getName()); + + // collaborator objects + final HbaseFactory factory = HbaseFactory.getInstance(); + final HbaseUserUtils userUtils = factory.getUserUtils(); + final HbaseAuthUtils authUtils = factory.getAuthUtils(); + + // immutable state + final RangerHBasePlugin authorizer; + + // Mutable state: Use supplied state information + String operation; + String otherInformation; + String access; + String table; + String column; + String columnFamily; + String remoteAddress; + + User user; + Set groups; // this exits to avoid having to get group for a user repeatedly. It is kept in sync with _user; + HbaseAuditHandler auditHandler; // Passing a null handler to policy engine would suppress audit logging. + boolean superUser; // is this session for a super user? + RangerAccessRequest request; // internal state per-authorization + RangerAccessResult result; + + private RangerAccessRequest.ResourceMatchingScope resourceMatchingScope = RangerAccessRequest.ResourceMatchingScope.SELF; + private boolean ignoreDescendantDeny = true; + + public AuthorizationSession(RangerHBasePlugin authorizer) { + this.authorizer = authorizer; + } + + public boolean getPropertyIsColumnAuthOptimizationEnabled() { + return authorizer.getPropertyIsColumnAuthOptimizationEnabled(); + } + + AuthorizationSession operation(String anOperation) { + operation = anOperation; + return this; + } + + AuthorizationSession otherInformation(String information) { + otherInformation = information; + return this; + } + + AuthorizationSession remoteAddress(String ipAddress) { + remoteAddress = ipAddress; + return this; + } + + AuthorizationSession access(String anAccess) { + access = anAccess; + return this; + } + + AuthorizationSession user(User aUser) { + user = aUser; + + if (user == null) { + LOG.warn("AuthorizationSession.user: user is null!"); + + groups = null; + } else { + groups = userUtils.getUserGroups(user); + + if (groups.isEmpty() && user.getUGI() != null) { + String[] groups = user.getUGI().getGroupNames(); + + if (groups != null) { + this.groups = Sets.newHashSet(groups); + } + } + + superUser = userUtils.isSuperUser(user); + } + + return this; + } + + AuthorizationSession table(String aTable) { + table = aTable; + return this; + } + + AuthorizationSession columnFamily(String aColumnFamily) { + columnFamily = aColumnFamily; + return this; + } + + AuthorizationSession column(String aColumn) { + column = aColumn; + return this; + } + + void verifyBuildable() { + String template = "Internal error: Incomplete/inconsisten state: [%s]. Can't build auth request!"; + + if (factory == null) { + String message = String.format(template, "factory is null"); + + LOG.error(message); + + throw new IllegalStateException(message); + } + + if (access == null || access.isEmpty()) { + String message = String.format(template, "access is null"); + + LOG.error(message); + + throw new IllegalStateException(message); + } + + if (user == null) { + String message = String.format(template, "user is null"); + + LOG.error(message); + + throw new IllegalStateException(message); + } + + if (isProvided(columnFamily) && !isProvided(table)) { + String message = String.format(template, "Table must be provided if column-family is provided"); + + LOG.error(message); + + throw new IllegalStateException(message); + } + + if (isProvided(column) && !isProvided(columnFamily)) { + String message = String.format(template, "Column family must be provided if column is provided"); + + LOG.error(message); + + throw new IllegalStateException(message); + } + } + + void zapAuthorizationState() { + request = null; + result = null; + } + + boolean isProvided(String aString) { + return aString != null && !aString.isEmpty(); + } + + boolean isNameSpaceOperation() { + return StringUtils.equals(operation, "createNamespace") || + StringUtils.equals(operation, "deleteNamespace") || + StringUtils.equals(operation, "modifyNamespace") || + StringUtils.equals(operation, "setUserNamespaceQuota") || + StringUtils.equals(operation, "setNamespaceQuota") || + StringUtils.equals(operation, "getUserPermissionForNamespace"); + } + + AuthorizationSession buildRequest() { + verifyBuildable(); + + // session can be reused so reset its state + zapAuthorizationState(); + + request = createRangerRequest(); + + LOG.debug("Built request: {}", request); + + return this; + } + + AuthorizationSession authorize() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> AuthorizationSession.authorize: {}", getRequestMessage()); + } + + if (request == null) { + String message = "Invalid state transition: buildRequest() must be called before authorize(). This request would ultimately get denied.!"; + + throw new IllegalStateException(message); + } else { + // ok to pass potentially null handler to policy engine. Null handler effectively suppresses the audit. + if (auditHandler != null && superUser) { + LOG.debug("Setting super-user override on audit handler"); + + auditHandler.setSuperUserOverride(superUser); + } + + result = authorizer.isAccessAllowed(request, auditHandler); + } + + if (LOG.isDebugEnabled()) { + LOG.debug("<== AuthorizationSession.authorize: {}", getLogMessage(isAuthorized(), getDenialReason())); + } + + return this; + } + + void logCapturedEvents() { + if (auditHandler != null) { + List events = auditHandler.getCapturedEvents(); + + auditHandler.logAuthzAudits(events); + } + } + + void publishResults() throws AccessDeniedException { + LOG.debug("==> AuthorizationSession.publishResults()"); + + boolean authorized = isAuthorized(); - private static final Logger LOG = LoggerFactory.getLogger(AuthorizationSession.class.getName()); - // collaborator objects - final HbaseFactory _factory = HbaseFactory.getInstance(); - final HbaseUserUtils _userUtils = _factory.getUserUtils(); - final HbaseAuthUtils _authUtils = _factory.getAuthUtils(); - // immutable state - final RangerHBasePlugin _authorizer; - // Mutable state: Use supplied state information - String _operation; - String _otherInformation; - String _access; - String _table; - String _column; - String _columnFamily; - String _remoteAddress; - - User _user; - Set _groups; // this exits to avoid having to get group for a user repeatedly. It is kept in sync with _user; - // Passing a null handler to policy engine would suppress audit logging. - HbaseAuditHandler _auditHandler = null; - boolean _superUser = false; // is this session for a super user? - private RangerAccessRequest.ResourceMatchingScope _resourceMatchingScope = RangerAccessRequest.ResourceMatchingScope.SELF; - - private boolean _ignoreDescendantDeny = true; - - // internal state per-authorization - RangerAccessRequest _request; - RangerAccessResult _result; - - public AuthorizationSession(RangerHBasePlugin authorizer) { - _authorizer = authorizer; - } - - AuthorizationSession operation(String anOperation) { - _operation = anOperation; - return this; - } - - AuthorizationSession otherInformation(String information) { - _otherInformation = information; - return this; - } - - AuthorizationSession remoteAddress(String ipAddress) { - _remoteAddress = ipAddress; - return this; - } - - AuthorizationSession access(String anAccess) { - _access = anAccess; - return this; - } - - AuthorizationSession user(User aUser) { - _user = aUser; - if (_user == null) { - LOG.warn("AuthorizationSession.user: user is null!"); - _groups = null; - } else { - _groups = _userUtils.getUserGroups(_user); - if (_groups.isEmpty() && _user.getUGI() != null) { - String[] groups = _user.getUGI().getGroupNames(); - if (groups != null) { - _groups = Sets.newHashSet(groups); - } - } - _superUser = _userUtils.isSuperUser(_user); - } - return this; - } - AuthorizationSession table(String aTable) { - _table = aTable; - return this; - } - - AuthorizationSession columnFamily(String aColumnFamily) { - _columnFamily = aColumnFamily; - return this; - } - - AuthorizationSession column(String aColumn) { - _column = aColumn; - return this; - } - - void verifyBuildable() { - - String template = "Internal error: Incomplete/inconsisten state: [%s]. Can't build auth request!"; - if (_factory == null) { - String message = String.format(template, "factory is null"); - LOG.error(message); - throw new IllegalStateException(message); - } - if (_access == null || _access.isEmpty()) { - String message = String.format(template, "access is null"); - LOG.error(message); - throw new IllegalStateException(message); - } - if (_user == null) { - String message = String.format(template, "user is null"); - LOG.error(message); - throw new IllegalStateException(message); - } - if (isProvided(_columnFamily) && !isProvided(_table)) { - String message = String.format(template, "Table must be provided if column-family is provided"); - LOG.error(message); - throw new IllegalStateException(message); - } - if (isProvided(_column) && !isProvided(_columnFamily)) { - String message = String.format(template, "Column family must be provided if column is provided"); - LOG.error(message); - throw new IllegalStateException(message); - } - } - - void zapAuthorizationState() { - _request = null; - _result = null; - } - - boolean isProvided(String aString) { - return aString != null && !aString.isEmpty(); - } - - boolean isNameSpaceOperation() { - return StringUtils.equals(_operation, "createNamespace") || - StringUtils.equals(_operation, "deleteNamespace") || - StringUtils.equals(_operation, "modifyNamespace") || - StringUtils.equals(_operation, "setUserNamespaceQuota") || - StringUtils.equals(_operation, "setNamespaceQuota") || - StringUtils.equals(_operation, "getUserPermissionForNamespace"); - } - - private RangerAccessResource createHBaseResource() { - // TODO get this via a factory instead - RangerAccessResourceImpl resource = new RangerHBaseResource(); - // policy engine should deal sensibly with null/empty values, if any - if (isNameSpaceOperation() && StringUtils.isNotBlank(_otherInformation)) { - resource.setValue(RangerHBaseResource.KEY_TABLE, _otherInformation + RangerHBaseResource.NAMESPACE_SEPARATOR); - } else { - resource.setValue(RangerHBaseResource.KEY_TABLE, _table); - } - resource.setValue(RangerHBaseResource.KEY_COLUMN_FAMILY, _columnFamily); - resource.setValue(RangerHBaseResource.KEY_COLUMN, _column); - return resource; - } - - private RangerAccessRequest createRangerRequest() { - RangerAccessResource resource = createHBaseResource(); - String user = _userUtils.getUserAsString(_user); - RangerAccessRequestImpl request = new RangerAccessRequestImpl(resource, _access, user, _groups, null); - request.setAction(_operation); - request.setRequestData(_otherInformation); - request.setClientIPAddress(_remoteAddress); - request.setResourceMatchingScope(_resourceMatchingScope); - request.setAccessTime(new Date()); - request.setIgnoreDescendantDeny(_ignoreDescendantDeny); - return request; - } - - AuthorizationSession buildRequest() { - verifyBuildable(); - // session can be reused so reset its state - zapAuthorizationState(); - _request = createRangerRequest(); - if (LOG.isDebugEnabled()) { - LOG.debug("Built request: " + _request.toString()); - } - return this; - } - - AuthorizationSession authorize() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AuthorizationSession.authorize: " + getRequestMessage()); - } - - if (_request == null) { - String message = String.format("Invalid state transition: buildRequest() must be called before authorize(). This request would ultimately get denied.!"); - throw new IllegalStateException(message); - } else { - // ok to pass potentially null handler to policy engine. Null handler effectively suppresses the audit. - if (_auditHandler != null && _superUser) { - if (LOG.isDebugEnabled()) { - LOG.debug("Setting super-user override on audit handler"); - } - _auditHandler.setSuperUserOverride(_superUser); - } - _result = _authorizer.isAccessAllowed(_request, _auditHandler); - } - - if (LOG.isDebugEnabled()) { - boolean allowed = isAuthorized(); - String reason = getDenialReason(); - LOG.debug("<== AuthorizationSession.authorize: " + getLogMessage(allowed, reason)); - } - return this; - } - - void logCapturedEvents() { - if (_auditHandler != null) { - List events = _auditHandler.getCapturedEvents(); - _auditHandler.logAuthzAudits(events); - } - } - - void publishResults() throws AccessDeniedException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> AuthorizationSession.publishResults()"); - } - - boolean authorized = isAuthorized(); - if (_auditHandler != null && isAudited()) { - List events = null; - /* - * What we log to audit depends on authorization status. For success we log all accumulated events. In case of failure - * we log just the last set of audit messages as we only need to record the cause of overall denial. - */ - if (authorized) { - List theseEvents = _auditHandler.getCapturedEvents(); - if (theseEvents != null && !theseEvents.isEmpty()) { - events = theseEvents; - } - } else { - AuthzAuditEvent event = _auditHandler.getAndDiscardMostRecentEvent(); - if (event != null) { - events = Lists.newArrayList(event); - } - } - if (LOG.isDebugEnabled()) { - int size = events == null ? 0 : events.size(); - String auditMessage = events == null ? "" : events.toString(); - String message = String.format("Writing %d messages to audit: [%s]", size, auditMessage); - LOG.debug(message); - } - _auditHandler.logAuthzAudits(events); - } - if (!authorized) { - // and throw and exception... callers expect this behavior - String reason = getDenialReason(); - String message = getLogMessage(false, reason); - if (LOG.isDebugEnabled()) { - LOG.debug("<== AuthorizationSession.publishResults: throwing exception: " + message); - } - throw new AccessDeniedException("Insufficient permissions for user '" + _user.getName() + "' (action=" + _access + ")"); - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== AuthorizationSession.publishResults()"); - } - } - - boolean isAudited() { - - boolean audited = false; - if (_result == null) { - String message = String.format("Internal error: _result was null! Assuming no audit. Request[%s]", _request.toString()); - LOG.error(message); - } else { - audited = _result.getIsAudited(); - } - return audited; - } - - boolean isAuthorized() { - boolean allowed = false; - if (_result == null) { - String message = String.format("Internal error: _result was null! Returning false."); - LOG.error(message); - } else { - allowed = _result.getIsAllowed(); - } - if (!allowed && _superUser) { - if (LOG.isDebugEnabled()) { - LOG.debug("User [" + _user + "] is a superUser! Overriding policy engine's decision. Request is deemed authorized!"); - } - allowed = true; - } - return allowed; - } - - String getDenialReason() { - String reason = ""; - if (_result == null) { - String message = String.format("Internal error: _result was null! Returning empty reason."); - LOG.error(message); - } else { - boolean allowed = _result.getIsAllowed(); - if (!allowed) { - reason = _result.getReason(); - } - } - return reason; - } - - String requestToString() { - return MoreObjects.toStringHelper(_request.getClass()) - .add("operation", _operation) - .add("otherInformation", _otherInformation) - .add("access", _access) - .add("user", _user == null ? null : _user.getName()) - .add("groups", _groups) - .add("auditHandler", _auditHandler == null ? null : _auditHandler.getClass().getSimpleName()) - .add(RangerHBaseResource.KEY_TABLE, _table) - .add(RangerHBaseResource.KEY_COLUMN, _column) - .add(RangerHBaseResource.KEY_COLUMN_FAMILY, _columnFamily) - .add("resource-matching-scope", _resourceMatchingScope) - .add("ignoreDescendantDeny", _ignoreDescendantDeny) - .toString(); - } - - String getPrintableValue(String value) { - if (isProvided(value)) { - return value; - } else { - return ""; - } - } - - String getRequestMessage() { - String format = "Access[%s] by user[%s] belonging to groups[%s] to table[%s] for column-family[%s], column[%s] triggered by operation[%s], otherInformation[%s]"; - String user = _userUtils.getUserAsString(); - String message = String.format(format, getPrintableValue(_access), getPrintableValue(user), _groups, getPrintableValue(_table), - getPrintableValue(_columnFamily), getPrintableValue(_column), getPrintableValue(_operation), getPrintableValue(_otherInformation)); - return message; - } - - String getLogMessage(boolean allowed, String reason) { - String format = " %s: status[%s], reason[%s]"; - String message = String.format(format, getRequestMessage(), allowed ? "allowed" : "denied", reason); - return message; - } - - /** - * This method could potentially null out an earlier audit handler -- which effectively would suppress audits. - * @param anAuditHandler - * @return - */ - AuthorizationSession auditHandler(HbaseAuditHandler anAuditHandler) { - _auditHandler = anAuditHandler; - return this; - } - - AuthorizationSession resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope scope) { - _resourceMatchingScope = scope; - return this; - } - - AuthorizationSession ignoreDescendantDeny(boolean ignoreDescendantDeny) { - _ignoreDescendantDeny = ignoreDescendantDeny; - return this; - } - - public boolean getPropertyIsColumnAuthOptimizationEnabled() { - return _authorizer.getPropertyIsColumnAuthOptimizationEnabled(); - } -} \ No newline at end of file + if (auditHandler != null && isAudited()) { + List events = null; + + /* + * What we log to audit depends on authorization status. For success we log all accumulated events. In case of failure + * we log just the last set of audit messages as we only need to record the cause of overall denial. + */ + if (authorized) { + List theseEvents = auditHandler.getCapturedEvents(); + + if (theseEvents != null && !theseEvents.isEmpty()) { + events = theseEvents; + } + } else { + AuthzAuditEvent event = auditHandler.getAndDiscardMostRecentEvent(); + + if (event != null) { + events = Lists.newArrayList(event); + } + } + + if (LOG.isDebugEnabled()) { + LOG.debug("Writing {} messages to audit: [{}]", (events == null ? 0 : events.size()), (events == null ? "" : events.toString())); + } + + auditHandler.logAuthzAudits(events); + } + + if (!authorized) { + // and throw and exception... callers expect this behavior + if (LOG.isDebugEnabled()) { + LOG.debug("<== AuthorizationSession.publishResults: throwing exception: {}", getLogMessage(false, getDenialReason())); + } + + throw new AccessDeniedException("Insufficient permissions for user '" + user.getName() + "' (action=" + access + ")"); + } + + LOG.debug("<== AuthorizationSession.publishResults()"); + } + + boolean isAudited() { + boolean audited = false; + + if (result == null) { + LOG.error("Internal error: _result was null! Assuming no audit. Request[{}]", request); + } else { + audited = result.getIsAudited(); + } + + return audited; + } + + boolean isAuthorized() { + boolean allowed = false; + if (result == null) { + LOG.error("Internal error: _result was null! Returning false."); + } else { + allowed = result.getIsAllowed(); + } + + if (!allowed && superUser) { + LOG.debug("User [{}] is a superUser! Overriding policy engine's decision. Request is deemed authorized!", user); + + allowed = true; + } + + return allowed; + } + + String getDenialReason() { + String reason = ""; + if (result == null) { + LOG.error("Internal error: _result was null! Returning empty reason."); + } else { + boolean allowed = result.getIsAllowed(); + + if (!allowed) { + reason = result.getReason(); + } + } + + return reason; + } + + String requestToString() { + return toString(); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(request.getClass()) + .add("operation", operation) + .add("otherInformation", otherInformation) + .add("access", access) + .add("user", user != null ? user.getName() : null) + .add("groups", groups) + .add("auditHandler", auditHandler != null ? auditHandler.getClass().getSimpleName() : null) + .add(RangerHBaseResource.KEY_TABLE, table) + .add(RangerHBaseResource.KEY_COLUMN, column) + .add(RangerHBaseResource.KEY_COLUMN_FAMILY, columnFamily) + .add("resource-matching-scope", resourceMatchingScope) + .add("ignoreDescendantDeny", ignoreDescendantDeny) + .toString(); + } + + String getPrintableValue(String value) { + if (isProvided(value)) { + return value; + } else { + return ""; + } + } + + String getRequestMessage() { + String format = "Access[%s] by user[%s] belonging to groups[%s] to table[%s] for column-family[%s], column[%s] triggered by operation[%s], otherInformation[%s]"; + String user = userUtils.getUserAsString(); + return String.format(format, getPrintableValue(access), getPrintableValue(user), groups, getPrintableValue(table), + getPrintableValue(columnFamily), getPrintableValue(column), getPrintableValue(operation), getPrintableValue(otherInformation)); + } + + String getLogMessage(boolean allowed, String reason) { + String format = " %s: status[%s], reason[%s]"; + return String.format(format, getRequestMessage(), allowed ? "allowed" : "denied", reason); + } + + /** + * This method could potentially null out an earlier audit handler -- which effectively would suppress audits. + */ + AuthorizationSession auditHandler(HbaseAuditHandler anAuditHandler) { + auditHandler = anAuditHandler; + return this; + } + + AuthorizationSession resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope scope) { + resourceMatchingScope = scope; + return this; + } + + AuthorizationSession ignoreDescendantDeny(boolean ignoreDescendantDeny) { + this.ignoreDescendantDeny = ignoreDescendantDeny; + return this; + } + + private RangerAccessResource createHBaseResource() { + // TODO get this via a factory instead + RangerAccessResourceImpl resource = new RangerHBaseResource(); + + // policy engine should deal sensibly with null/empty values, if any + if (isNameSpaceOperation() && StringUtils.isNotBlank(otherInformation)) { + resource.setValue(RangerHBaseResource.KEY_TABLE, otherInformation + RangerHBaseResource.NAMESPACE_SEPARATOR); + } else { + resource.setValue(RangerHBaseResource.KEY_TABLE, table); + } + + resource.setValue(RangerHBaseResource.KEY_COLUMN_FAMILY, columnFamily); + resource.setValue(RangerHBaseResource.KEY_COLUMN, column); + + return resource; + } + + private RangerAccessRequest createRangerRequest() { + RangerAccessResource resource = createHBaseResource(); + String user = userUtils.getUserAsString(this.user); + RangerAccessRequestImpl request = new RangerAccessRequestImpl(resource, access, user, groups, null); + + request.setAction(operation); + request.setRequestData(otherInformation); + request.setClientIPAddress(remoteAddress); + request.setResourceMatchingScope(resourceMatchingScope); + request.setAccessTime(new Date()); + request.setIgnoreDescendantDeny(ignoreDescendantDeny); + + return request; + } +} diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/ColumnIterator.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/ColumnIterator.java index 513aa3ba48..937f18fca6 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/ColumnIterator.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/ColumnIterator.java @@ -18,78 +18,76 @@ */ package org.apache.ranger.authorization.hbase; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Set; - import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.util.Bytes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Set; + public class ColumnIterator implements Iterator { - // TODO write tests for this class - - private static final Logger LOG = LoggerFactory.getLogger(ColumnIterator.class.getName()); - Iterator _setIterator; - Iterator _listIterator; - - @SuppressWarnings("unchecked") - public ColumnIterator(Collection columnCollection) { - if (columnCollection != null) { - if (columnCollection instanceof Set) { - _setIterator = ((Set)columnCollection).iterator(); - } else if (columnCollection instanceof List) { - _listIterator = ((List)columnCollection).iterator(); - } else { // unexpected - // TODO make message better - LOG.error("Unexpected type " + columnCollection.getClass().getName() + " passed as value in column family collection"); - } - } - } + private static final Logger LOG = LoggerFactory.getLogger(ColumnIterator.class.getName()); + + Iterator setIterator; + Iterator listIterator; + + @SuppressWarnings("unchecked") + public ColumnIterator(Collection columnCollection) { + if (columnCollection != null) { + if (columnCollection instanceof Set) { + setIterator = ((Set) columnCollection).iterator(); + } else if (columnCollection instanceof List) { + listIterator = ((List) columnCollection).iterator(); + } else { // unexpected + // TODO make message better + LOG.error("Unexpected type {} passed as value in column family collection", columnCollection.getClass().getName()); + } + } + } + + @Override + public boolean hasNext() { + if (setIterator != null) { + return setIterator.hasNext(); + } else if (listIterator != null) { + return listIterator.hasNext(); + } else { + return false; + } + } + + /** + * Never returns a null value. Will return empty string in case of null value. + */ + @Override + public String next() { + final String value; + + if (setIterator != null) { + byte[] valueBytes = setIterator.next(); - @Override - public boolean hasNext() { - if (_setIterator != null) { - return _setIterator.hasNext(); - } else if (_listIterator != null) { - return _listIterator.hasNext(); - } else { - return false; - } - } + value = (valueBytes != null) ? Bytes.toString(valueBytes) : ""; + } else if (listIterator != null) { + Cell cell = listIterator.next(); + byte[] v = CellUtil.cloneQualifier(cell); - /** - * Never returns a null value. Will return empty string in case of null value. - */ - @Override - public String next() { - String value = ""; - if (_setIterator != null) { - byte[] valueBytes = _setIterator.next(); - if (valueBytes != null) { - value = Bytes.toString(valueBytes); - } - } else if (_listIterator != null) { - Cell cell = _listIterator.next(); - byte[] v = CellUtil.cloneQualifier(cell); - if (v != null) { - value = Bytes.toString(v); - } - } else { - // TODO make the error message better - throw new NoSuchElementException("Empty values passed in!"); - } - return value; - } + value = Bytes.toString(v); + } else { + // TODO make the error message better + throw new NoSuchElementException("Empty values passed in!"); + } - @Override - public void remove() { - // TODO make the error message better - throw new UnsupportedOperationException("Remove not supported from iterator!"); - } + return value; + } + @Override + public void remove() { + // TODO make the error message better + throw new UnsupportedOperationException("Remove not supported from iterator!"); + } } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuditHandler.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuditHandler.java index 1344e29447..3ffe2afc17 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuditHandler.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuditHandler.java @@ -18,36 +18,32 @@ */ package org.apache.ranger.authorization.hbase; -import java.util.Collection; -import java.util.List; - import org.apache.ranger.audit.model.AuthzAuditEvent; import org.apache.ranger.plugin.policyengine.RangerAccessResultProcessor; +import java.util.Collection; +import java.util.List; + public interface HbaseAuditHandler extends RangerAccessResultProcessor { + List getCapturedEvents(); + + void logAuthzAudits(Collection auditEvents); + + /** + * Discards and returns the last audit events captured by the audit handler. Last audit event should be the ones generated during the most recent authorization request. + * However, it won't be all of the audit events called during an authorize call since implementation class may not override the method which takes a list of responses -- in + * which case there would be several audit messages generated by one call but this only allows you to get last of those messages created during single auth request. + * After this call the last set of audit events won't be returned by getCapturedEvents. + */ + AuthzAuditEvent getAndDiscardMostRecentEvent(); + + /** + * This is a complement to getAndDiscardMostRecentEvent to set the most recent events. Often useful to un-pop audit messages that were take out. + */ + void setMostRecentEvent(AuthzAuditEvent capturedEvents); - List getCapturedEvents(); - - void logAuthzAudits(Collection auditEvents); - - /** - * Discards and returns the last audit events captured by the audit handler. Last audit event should be the ones generated during the most recent authorization request. - * However, it won't be all of the audit events called during an authorize call since implementation class may not override the method which takes a list of responses -- in - * which case there would be several audit messages generated by one call but this only allows you to get last of those messages created during single auth request. - * After this call the last set of audit events won't be returned by getCapturedEvents. - * @return - */ - AuthzAuditEvent getAndDiscardMostRecentEvent(); - - /** - * This is a complement to getAndDiscardMostRecentEvent to set the most recent events. Often useful to un-pop audit messages that were take out. - * @param capturedEvents - */ - void setMostRecentEvent(AuthzAuditEvent capturedEvents); - - /** - * Is audit handler being used in context of a access authorization of a superuser? - * @param override - */ - void setSuperUserOverride(boolean override); + /** + * Is audit handler being used in context of a access authorization of a superuser? + */ + void setSuperUserOverride(boolean override); } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuditHandlerImpl.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuditHandlerImpl.java index 6710f69b2b..b0b0482d92 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuditHandlerImpl.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuditHandlerImpl.java @@ -18,9 +18,6 @@ */ package org.apache.ranger.authorization.hbase; -import java.util.ArrayList; -import java.util.List; - import org.apache.ranger.audit.model.AuthzAuditEvent; import org.apache.ranger.plugin.audit.RangerDefaultAuditHandler; import org.apache.ranger.plugin.policyengine.RangerAccessRequest; @@ -28,138 +25,126 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; +import java.util.List; + public class HbaseAuditHandlerImpl extends RangerDefaultAuditHandler implements HbaseAuditHandler { + private static final Logger LOG = LoggerFactory.getLogger(HbaseAuditHandlerImpl.class); + + final List allEventsList = new ArrayList<>(); + + // we replace its contents anytime new audit events are generated. + AuthzAuditEvent mostRecentEvent; + boolean superUserOverride; + + @Override + public AuthzAuditEvent getAuthzEvents(RangerAccessResult result) { + LOG.debug("==> HbaseAuditHandlerImpl.getAuthzEvents({})", result); + + resetResourceForAudit(result.getAccessRequest()); + + AuthzAuditEvent event = super.getAuthzEvents(result); + + // first accumulate last set of events and then capture these as the most recent ones + if (mostRecentEvent != null) { + LOG.debug("getAuthzEvents: got one event from default audit handler"); + + allEventsList.add(mostRecentEvent); + } else { + LOG.debug("getAuthzEvents: no event produced by default audit handler"); + } + + mostRecentEvent = event; + + LOG.debug("==> getAuthzEvents: mostRecentEvent:{}", mostRecentEvent); + + // We return null because we don't want default audit handler to audit anything! + LOG.debug("<== HbaseAuditHandlerImpl.getAuthzEvents({}): null", result); + + return null; + } + + @Override + public List getCapturedEvents() { + LOG.debug("==> HbaseAuditHandlerImpl.getCapturedEvents()"); + + // construct a new collection since we don't want to lose track of which were the most recent events; + List result = new ArrayList<>(allEventsList); + + if (mostRecentEvent != null) { + result.add(mostRecentEvent); + } + + applySuperUserOverride(result); + + LOG.debug("<== HbaseAuditHandlerImpl.getAuthzEvents(): count[{}] :result : {}", result.size(), result); + + return result; + } + + @Override + public AuthzAuditEvent getAndDiscardMostRecentEvent() { + LOG.debug("==> HbaseAuditHandlerImpl.getAndDiscardMostRecentEvent():"); + + AuthzAuditEvent result = mostRecentEvent; + + applySuperUserOverride(result); + + mostRecentEvent = null; + + LOG.debug("<== HbaseAuditHandlerImpl.getAndDiscardMostRecentEvent(): {}", result); + + return result; + } + + @Override + public void setMostRecentEvent(AuthzAuditEvent event) { + LOG.debug("==> HbaseAuditHandlerImpl.setMostRecentEvent({})", event); + + mostRecentEvent = event; + + LOG.debug("<== HbaseAuditHandlerImpl.setMostRecentEvent(...)"); + } + + @Override + public void setSuperUserOverride(boolean override) { + LOG.debug("==> HbaseAuditHandlerImpl.setSuperUserOverride({})", override); + + superUserOverride = override; + + LOG.debug("<== HbaseAuditHandlerImpl.setSuperUserOverride(...)"); + } + + void applySuperUserOverride(List events) { + LOG.debug("==> HbaseAuditHandlerImpl.applySuperUserOverride({})", events); + + for (AuthzAuditEvent event : events) { + applySuperUserOverride(event); + } + + LOG.debug("<== HbaseAuditHandlerImpl.applySuperUserOverride(...)"); + } + + void applySuperUserOverride(AuthzAuditEvent event) { + LOG.debug("==> HbaseAuditHandlerImpl.applySuperUserOverride({})", event); + + if (event != null && superUserOverride) { + event.setAccessResult((short) 1); + event.setPolicyId(-1); + } + + LOG.debug("<== HbaseAuditHandlerImpl.applySuperUserOverride(...)"); + } + + private void resetResourceForAudit(RangerAccessRequest request) { + LOG.debug("==> HbaseAuditHandlerImpl.resetResourceForAudit({})", request); + + if (request != null && request.getResource() instanceof RangerHBaseResource) { + RangerHBaseResource hbaseResource = (RangerHBaseResource) request.getResource(); + + hbaseResource.resetValue(RangerHBaseResource.KEY_TABLE); + } - private static final Logger LOG = LoggerFactory.getLogger(HbaseAuditHandlerImpl.class); - static final List _EmptyList = new ArrayList(); - final List _allEvents = new ArrayList(); - // we replace its contents anytime new audit events are generated. - AuthzAuditEvent _mostRecentEvent = null; - boolean _superUserOverride = false; - - @Override - public AuthzAuditEvent getAuthzEvents(RangerAccessResult result) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HbaseAuditHandlerImpl.getAuthzEvents(" + result + ")"); - } - - resetResourceForAudit(result.getAccessRequest()); - AuthzAuditEvent event = super.getAuthzEvents(result); - // first accumulate last set of events and then capture these as the most recent ones - if (_mostRecentEvent != null) { - LOG.debug("getAuthzEvents: got one event from default audit handler"); - _allEvents.add(_mostRecentEvent); - } else { - LOG.debug("getAuthzEvents: no event produced by default audit handler"); - } - _mostRecentEvent = event; - - if(LOG.isDebugEnabled()) { - LOG.debug("==> getAuthzEvents: mostRecentEvent:" + _mostRecentEvent); - } - // We return null because we don't want default audit handler to audit anything! - if(LOG.isDebugEnabled()) { - LOG.debug("<== HbaseAuditHandlerImpl.getAuthzEvents(" + result + "): null"); - } - return null; - } - - @Override - public List getCapturedEvents() { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HbaseAuditHandlerImpl.getCapturedEvents()"); - } - - // construct a new collection since we don't want to lose track of which were the most recent events; - List result = new ArrayList(_allEvents); - if (_mostRecentEvent != null) { - result.add(_mostRecentEvent); - } - applySuperUserOverride(result); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HbaseAuditHandlerImpl.getAuthzEvents(): count[" + result.size() + "] :result : " + result); - } - return result; - } - - @Override - public AuthzAuditEvent getAndDiscardMostRecentEvent() { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HbaseAuditHandlerImpl.getAndDiscardMostRecentEvent():"); - } - - AuthzAuditEvent result = _mostRecentEvent; - applySuperUserOverride(result); - _mostRecentEvent = null; - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HbaseAuditHandlerImpl.getAndDiscardMostRecentEvent(): " + result); - } - return result; - } - - @Override - public void setMostRecentEvent(AuthzAuditEvent event) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HbaseAuditHandlerImpl.setMostRecentEvent(" + event + ")"); - } - _mostRecentEvent = event; - if(LOG.isDebugEnabled()) { - LOG.debug("<== HbaseAuditHandlerImpl.setMostRecentEvent(...)"); - } - } - - @Override - public void setSuperUserOverride(boolean override) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HbaseAuditHandlerImpl.setSuperUserOverride(" + override + ")"); - } - - _superUserOverride = override; - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HbaseAuditHandlerImpl.setSuperUserOverride(...)"); - } - } - - void applySuperUserOverride(List events) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HbaseAuditHandlerImpl.applySuperUserOverride(" + events + ")"); - } - - for (AuthzAuditEvent event : events) { - applySuperUserOverride(event); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HbaseAuditHandlerImpl.applySuperUserOverride(...)"); - } - } - - void applySuperUserOverride(AuthzAuditEvent event) { - if(LOG.isDebugEnabled()) { - LOG.debug("==> HbaseAuditHandlerImpl.applySuperUserOverride(" + event + ")"); - } - if (event != null && _superUserOverride) { - event.setAccessResult((short) 1); - event.setPolicyId(-1); - } - if(LOG.isDebugEnabled()) { - LOG.debug("<== HbaseAuditHandlerImpl.applySuperUserOverride(...)"); - } - } - - private void resetResourceForAudit(RangerAccessRequest request) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HbaseAuditHandlerImpl.resetResourceForAudit(" + request + ")"); - } - if (request != null && request.getResource() instanceof RangerHBaseResource) { - RangerHBaseResource hbaseResource = (RangerHBaseResource) request.getResource(); - hbaseResource.resetValue(RangerHBaseResource.KEY_TABLE); - } - if(LOG.isDebugEnabled()) { - LOG.debug("<== HbaseAuditHandlerImpl.resetResourceForAudit(" + request + ")"); - } - } + LOG.debug("<== HbaseAuditHandlerImpl.resetResourceForAudit({})", request); + } } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuthUtils.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuthUtils.java index c9c598f6f9..f72731a1b3 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuthUtils.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuthUtils.java @@ -22,21 +22,21 @@ import org.apache.hadoop.hbase.security.access.Permission.Action; public interface HbaseAuthUtils { - String ACCESS_TYPE_READ = "read"; - String ACCESS_TYPE_WRITE = "write"; - String ACCESS_TYPE_CREATE = "create"; - String ACCESS_TYPE_ADMIN = "admin"; - String ACCESS_TYPE_EXECUTE = "execute"; + String ACCESS_TYPE_READ = "read"; + String ACCESS_TYPE_WRITE = "write"; + String ACCESS_TYPE_CREATE = "create"; + String ACCESS_TYPE_ADMIN = "admin"; + String ACCESS_TYPE_EXECUTE = "execute"; - String getAccess(Action action); + String getAccess(Action action); - String getActionName(String access); + String getActionName(String access); - boolean isReadAccess(String access); - - boolean isWriteAccess(String access); + boolean isReadAccess(String access); - boolean isExecuteAccess(String access); + boolean isWriteAccess(String access); - String getTable(RegionCoprocessorEnvironment regionServerEnv); + boolean isExecuteAccess(String access); + + String getTable(RegionCoprocessorEnvironment regionServerEnv); } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuthUtilsImpl.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuthUtilsImpl.java index 0b2b64b936..c4527e1afa 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuthUtilsImpl.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseAuthUtilsImpl.java @@ -26,68 +26,67 @@ import org.slf4j.LoggerFactory; public class HbaseAuthUtilsImpl implements HbaseAuthUtils { + private static final Logger LOG = LoggerFactory.getLogger(HbaseAuthUtilsImpl.class.getName()); - private static final Logger LOG = LoggerFactory.getLogger(HbaseAuthUtilsImpl.class.getName()); - @Override - public String getAccess(Action action) { - switch(action) { - case READ: - return ACCESS_TYPE_READ; - case WRITE: - return ACCESS_TYPE_WRITE; - case CREATE: - return ACCESS_TYPE_CREATE; - case ADMIN: - return ACCESS_TYPE_ADMIN; - case EXEC: - return ACCESS_TYPE_EXECUTE; - default: - return action.name().toLowerCase(); - } - } + @Override + public String getAccess(Action action) { + switch (action) { + case READ: + return ACCESS_TYPE_READ; + case WRITE: + return ACCESS_TYPE_WRITE; + case CREATE: + return ACCESS_TYPE_CREATE; + case ADMIN: + return ACCESS_TYPE_ADMIN; + case EXEC: + return ACCESS_TYPE_EXECUTE; + default: + return action.name().toLowerCase(); + } + } - @Override - public boolean isReadAccess(String access) { - return getAccess(Action.READ).equals(access); - } + @Override + public String getActionName(String access) { + switch (access) { + case ACCESS_TYPE_READ: + return Action.READ.name(); + case ACCESS_TYPE_WRITE: + return Action.WRITE.name(); + case ACCESS_TYPE_CREATE: + return Action.CREATE.name(); + case ACCESS_TYPE_ADMIN: + return Action.ADMIN.name(); + case ACCESS_TYPE_EXECUTE: + return Action.EXEC.name(); + default: + return access.toUpperCase(); + } + } - @Override - public boolean isWriteAccess(String access) { - return getAccess(Action.WRITE).equals(access); - } + @Override + public boolean isReadAccess(String access) { + return getAccess(Action.READ).equals(access); + } - @Override - public boolean isExecuteAccess(String access) { - return getAccess(Action.EXEC).equals(access); - } + @Override + public boolean isWriteAccess(String access) { + return getAccess(Action.WRITE).equals(access); + } - @Override - public String getTable(RegionCoprocessorEnvironment regionServerEnv) { - RegionInfo hri = regionServerEnv.getRegion().getRegionInfo(); - byte[] tableName = hri.getTable().getName(); - String tableNameStr = Bytes.toString(tableName); - if (LOG.isDebugEnabled()) { - String message = String.format("getTable: Returning tablename[%s]", tableNameStr); - LOG.debug(message); - } - return tableNameStr; - } + @Override + public boolean isExecuteAccess(String access) { + return getAccess(Action.EXEC).equals(access); + } - @Override - public String getActionName(String access) { - switch(access) { - case ACCESS_TYPE_READ: - return Action.READ.name(); - case ACCESS_TYPE_WRITE: - return Action.WRITE.name(); - case ACCESS_TYPE_CREATE: - return Action.CREATE.name(); - case ACCESS_TYPE_ADMIN: - return Action.ADMIN.name(); - case ACCESS_TYPE_EXECUTE: - return Action.EXEC.name(); - default: - return access.toUpperCase(); - } - } + @Override + public String getTable(RegionCoprocessorEnvironment regionServerEnv) { + RegionInfo hri = regionServerEnv.getRegion().getRegionInfo(); + byte[] tableName = hri.getTable().getName(); + String tableNameStr = Bytes.toString(tableName); + + LOG.debug("getTable: Returning tableName[{}]", tableNameStr); + + return tableNameStr; + } } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseConstants.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseConstants.java index 22c6983e7b..1a10435d0f 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseConstants.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseConstants.java @@ -35,4 +35,6 @@ public class HbaseConstants { public static final String FAMILIES = "families"; public static final String SINGLE_QUOTES = "'"; public static final String ARROW = "=>"; + + private HbaseConstants() {} } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseFactory.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseFactory.java index 1322e0f938..2c7dcf8394 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseFactory.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseFactory.java @@ -20,38 +20,36 @@ import org.apache.hadoop.conf.Configuration; - - // TODO remove this in favor of Guice DI public class HbaseFactory { - - static final HbaseUserUtils _UserUtils = new HbaseUserUtilsImpl(); - static final HbaseAuthUtils _AuthUtils = new HbaseAuthUtilsImpl(); - static final HbaseFactory _Factory = new HbaseFactory(); - /** - * This is a singleton - */ - private HbaseFactory() { - // TODO remove this clutch to enforce singleton by moving to a DI framework - } - - static HbaseFactory getInstance() { - return _Factory; - } - - HbaseAuthUtils getAuthUtils() { - return _AuthUtils; - } - - HbaseUserUtils getUserUtils() { - return _UserUtils; - } - - HbaseAuditHandler getAuditHandler() { - return new HbaseAuditHandlerImpl(); - } - - static void initialize(Configuration conf) { - HbaseUserUtilsImpl.initiailize(conf); - } + static final HbaseUserUtils userUtils = new HbaseUserUtilsImpl(); + static final HbaseAuthUtils authUtils = new HbaseAuthUtilsImpl(); + static final HbaseFactory factory = new HbaseFactory(); + + /** + * This is a singleton + */ + private HbaseFactory() { + // TODO remove this clutch to enforce singleton by moving to a DI framework + } + + static HbaseFactory getInstance() { + return factory; + } + + static void initialize(Configuration conf) { + HbaseUserUtilsImpl.initialize(conf); + } + + HbaseAuthUtils getAuthUtils() { + return authUtils; + } + + HbaseUserUtils getUserUtils() { + return userUtils; + } + + HbaseAuditHandler getAuditHandler() { + return new HbaseAuditHandlerImpl(); + } } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseUserUtils.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseUserUtils.java index e2f1efdb58..ab100e2b57 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseUserUtils.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseUserUtils.java @@ -18,42 +18,33 @@ */ package org.apache.ranger.authorization.hbase; -import java.util.Set; - import org.apache.hadoop.hbase.security.User; +import java.util.Set; + public interface HbaseUserUtils { - /** - * Returns user's short name or empty string if null is passed in. - * @param user - * @return - */ - String getUserAsString(User user); + /** + * Returns user's short name or empty string if null is passed in. + */ + String getUserAsString(User user); + + /** + * Returns the groups to which user belongs to as known to User object. For null values it returns an empty set. + */ + Set getUserGroups(User user); - /** - * Returns the groups to which user belongs to as known to User object. For null values it returns an empty set. - * @param user - * @return - */ - Set getUserGroups(User user); + /** + * May return null in case of an error + */ + User getUser(); - /** - * May return null in case of an error - * @return - */ - User getUser(); - - /** - * Returns the user short name. Returns an empty string if Hbase User of context can't be found. - * @param request - * @return - */ - String getUserAsString(); + /** + * Returns the user short name. Returns an empty string if Hbase User of context can't be found. + */ + String getUserAsString(); - /** - * Returns true of specified user is configured to be a super user - * @param user - * @return - */ - boolean isSuperUser(User user); + /** + * Returns true of specified user is configured to be a superuser + */ + boolean isSuperUser(User user); } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseUserUtilsImpl.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseUserUtilsImpl.java index 32b7ccac4c..026b77098c 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseUserUtilsImpl.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/HbaseUserUtilsImpl.java @@ -18,6 +18,12 @@ */ package org.apache.ranger.authorization.hbase; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ipc.RpcServer; +import org.apache.hadoop.hbase.security.User; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.util.Arrays; import java.util.HashSet; @@ -26,112 +32,112 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.ipc.RpcServer; -import org.apache.hadoop.hbase.security.User; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public class HbaseUserUtilsImpl implements HbaseUserUtils { + private static final Logger LOG = LoggerFactory.getLogger(HbaseUserUtilsImpl.class.getName()); + + // only to detect problems with initialization order, not for thread-safety. + static final AtomicBoolean isInitialized = new AtomicBoolean(false); + // should never be null + static final AtomicReference> superUsers = new AtomicReference<>(new HashSet<>()); + + private static final String SUPERUSER_CONFIG_PROP = "hbase.superuser"; + + public static void initialize(Configuration conf) { + if (isInitialized.get()) { + LOG.warn("HbaseUserUtilsImpl.initialize: Unexpected: initialization called more than once!"); + } else { + if (conf == null) { + LOG.error("HbaseUserUtilsImpl.initialize: Internal error: called with null conf value!"); + } else { + String[] users = conf.getStrings(SUPERUSER_CONFIG_PROP); + + if (users != null && users.length > 0) { + Set superUsers = new HashSet<>(users.length); + + for (String user : users) { + user = user.trim(); + + LOG.info("HbaseUserUtilsImpl.initialize: Adding Super User({})", user); + + superUsers.add(user); + } + + HbaseUserUtilsImpl.superUsers.set(superUsers); + } + } + + isInitialized.set(true); + } + } + + @Override + public String getUserAsString(User user) { + if (user == null) { + throw new IllegalArgumentException("User is null!"); + } else { + return user.getShortName(); + } + } + + @Override + public Set getUserGroups(User user) { + if (user == null) { + throw new IllegalArgumentException("User is null!"); + } else { + String[] groupsArray = user.getGroupNames(); + + return new HashSet<>(Arrays.asList(groupsArray)); + } + } + + @Override + public User getUser() { + // current implementation does not use the request object! + User user = null; + + try { + user = RpcServer.getRequestUser().get(); + } catch (NoSuchElementException e) { + LOG.info("Unable to get request user"); + } + + if (user == null) { + try { + user = User.getCurrent(); + } catch (IOException e) { + LOG.error("Unable to get current user: User.getCurrent() threw IOException"); + } + } + + return user; + } + + @Override + public String getUserAsString() { + User user = getUser(); + + if (user == null) { + return ""; + } else { + return getUserAsString(user); + } + } + + /** + * No user can be a superuser till the class is properly initialized. Once class is properly initialized, users specified in + * configuration would be reported as super users. + */ + @Override + public boolean isSuperUser(User user) { + if (!isInitialized.get()) { + LOG.error("HbaseUserUtilsImpl.isSuperUser: Internal error: called before initialization was complete!"); + } + + Set superUsers = HbaseUserUtilsImpl.superUsers.get(); // can never be null + boolean isSuper = superUsers.contains(user.getShortName()); + + LOG.debug("IsSuperCheck on [{}] returns [{}]", user.getShortName(), isSuper); - private static final Logger LOG = LoggerFactory.getLogger(HbaseUserUtilsImpl.class.getName()); - private static final String SUPERUSER_CONFIG_PROP = "hbase.superuser"; - - // only to detect problems with initialization order, not for thread-safety. - static final AtomicBoolean _Initialized = new AtomicBoolean(false); - // should never be null - static final AtomicReference> _SuperUsers = new AtomicReference>(new HashSet()); - - public static void initiailize(Configuration conf) { - - if (_Initialized.get()) { - LOG.warn("HbaseUserUtilsImpl.initialize: Unexpected: initialization called more than once!"); - } else { - if (conf == null) { - LOG.error("HbaseUserUtilsImpl.initialize: Internal error: called with null conf value!"); - } else { - String[] users = conf.getStrings(SUPERUSER_CONFIG_PROP); - if (users != null && users.length > 0) { - Set superUsers = new HashSet(users.length); - for (String user : users) { - user = user.trim(); - LOG.info("HbaseUserUtilsImpl.initialize: Adding Super User(" + user + ")"); - superUsers.add(user); - } - _SuperUsers.set(superUsers); - } - } - _Initialized.set(true); - } - } - - @Override - public String getUserAsString(User user) { - if (user == null) { - throw new IllegalArgumentException("User is null!"); - } - else { - return user.getShortName(); - } - } - - @Override - public Set getUserGroups(User user) { - if (user == null) { - throw new IllegalArgumentException("User is null!"); - } - else { - String[] groupsArray = user.getGroupNames(); - return new HashSet(Arrays.asList(groupsArray)); - } - } - - @Override - public User getUser() { - // current implementation does not use the request object! - User user = null; - try { - user = RpcServer.getRequestUser().get(); - } catch (NoSuchElementException e) { - LOG.info("Unable to get request user"); - } - if (user == null) { - try { - user = User.getCurrent(); - } catch (IOException e) { - LOG.error("Unable to get current user: User.getCurrent() threw IOException"); - user = null; - } - } - return user; - } - - - @Override - public String getUserAsString() { - User user = getUser(); - if (user == null) { - return ""; - } - else { - return getUserAsString(user); - } - } - - /** - * No user can be a superuser till the class is properly initialized. Once class is properly initialized, users specified in - * configuration would be reported as super users. - */ - @Override - public boolean isSuperUser(User user) { - if (!_Initialized.get()) { - LOG.error("HbaseUserUtilsImpl.isSuperUser: Internal error: called before initialization was complete!"); - } - Set superUsers = _SuperUsers.get(); // can never be null - boolean isSuper = superUsers.contains(user.getShortName()); - if (LOG.isDebugEnabled()) { - LOG.debug("IsSuperCheck on [" + user.getShortName() + "] returns [" + isSuper + "]"); - } - return isSuper; - } + return isSuper; + } } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java index 213e15d496..35effffc0d 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java @@ -17,1966 +17,2025 @@ * under the License. */ package org.apache.ranger.authorization.hbase; -import java.io.IOException; -import java.net.InetAddress; -import java.util.*; -import java.util.Map.Entry; -import java.security.PrivilegedExceptionAction; import com.google.protobuf.Message; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; import com.google.protobuf.Service; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; -import org.apache.hadoop.hbase.client.*; -import org.apache.hadoop.hbase.coprocessor.*; +import org.apache.hadoop.hbase.AuthUtil; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.BalanceRequest; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.RegionInfo; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.SnapshotDescription; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver; +import org.apache.hadoop.hbase.coprocessor.CoprocessorException; +import org.apache.hadoop.hbase.coprocessor.EndpointObserver; +import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; +import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.MasterObserver; +import org.apache.hadoop.hbase.coprocessor.ObserverContext; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.RegionObserver; +import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.RegionServerObserver; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.ipc.RpcServer; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService; import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings; -import org.apache.hadoop.hbase.regionserver.*; +import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker; +import org.apache.hadoop.hbase.regionserver.InternalScanner; +import org.apache.hadoop.hbase.regionserver.Region; +import org.apache.hadoop.hbase.regionserver.RegionScanner; +import org.apache.hadoop.hbase.regionserver.ScanType; +import org.apache.hadoop.hbase.regionserver.Store; +import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; -import org.apache.hadoop.hbase.security.access.*; +import org.apache.hadoop.hbase.security.access.AccessControlConstants; +import org.apache.hadoop.hbase.security.access.AccessControlUtil; +import org.apache.hadoop.hbase.security.access.NamespacePermission; +import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.access.Permission.Action; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.security.access.PermissionStorage; +import org.apache.hadoop.hbase.security.access.TablePermission; +import org.apache.hadoop.hbase.security.access.UserPermission; import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WALEdit; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; -import org.apache.hadoop.thirdparty.com.google.common.collect.Sets; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.thirdparty.com.google.common.base.MoreObjects; +import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.MapMaker; -import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.thirdparty.com.google.common.collect.Sets; import org.apache.ranger.audit.model.AuthzAuditEvent; import org.apache.ranger.audit.provider.AuditProviderFactory; import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants; import org.apache.ranger.authorization.utils.StringUtil; import org.apache.ranger.plugin.audit.RangerDefaultAuditHandler; -import org.apache.ranger.plugin.model.RangerPolicy; import org.apache.ranger.plugin.policyengine.RangerAccessRequest; -import org.apache.ranger.plugin.policyengine.RangerAccessResult; -import org.apache.ranger.plugin.policyengine.RangerAccessResultProcessor; import org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl; import org.apache.ranger.plugin.policyengine.RangerAccessResourceImpl; +import org.apache.ranger.plugin.policyengine.RangerAccessResultProcessor; +import org.apache.ranger.plugin.policyengine.RangerPolicyEngine; import org.apache.ranger.plugin.policyengine.RangerResourceACLs; import org.apache.ranger.plugin.policyengine.RangerResourceACLs.AccessResult; -import org.apache.ranger.plugin.policyengine.RangerPolicyEngine; import org.apache.ranger.plugin.policyevaluator.RangerPolicyEvaluator; -import org.apache.ranger.plugin.service.RangerBasePlugin; import org.apache.ranger.plugin.util.GrantRevokeRequest; -import org.apache.ranger.plugin.util.ServicePolicies; +import org.apache.ranger.plugin.util.RangerPerfTracer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import org.apache.ranger.plugin.util.RangerPerfTracer; +import java.io.IOException; +import java.net.InetAddress; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.NavigableSet; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; public class RangerAuthorizationCoprocessor implements AccessControlService.Interface, RegionCoprocessor, MasterCoprocessor, RegionServerCoprocessor, MasterObserver, RegionObserver, RegionServerObserver, EndpointObserver, BulkLoadObserver, Coprocessor { - private static final Logger LOG = LoggerFactory.getLogger(RangerAuthorizationCoprocessor.class.getName()); - private static final Logger PERF_HBASEAUTH_REQUEST_LOG = RangerPerfTracer.getPerfLogger("hbaseauth.request"); - private static boolean UpdateRangerPoliciesOnGrantRevoke = RangerHadoopConstants.HBASE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_DEFAULT_VALUE; - private static final String GROUP_PREFIX = "@"; - - private UserProvider userProvider; - private RegionCoprocessorEnvironment regionEnv; - private Map scannerOwners = new MapMaker().weakKeys().makeMap(); - /** if we should check EXEC permissions */ - private boolean shouldCheckExecPermission; - - /* - * These are package level only for testability and aren't meant to be exposed outside via getters/setters or made available to derived classes. - */ - final HbaseFactory _factory = HbaseFactory.getInstance(); - final HbaseUserUtils _userUtils = _factory.getUserUtils(); - final HbaseAuthUtils _authUtils = _factory.getAuthUtils(); - private static volatile RangerHBasePlugin hbasePlugin = null; - - public void setColumnAuthOptimizationEnabled(boolean enable) throws Exception { - RangerHBasePlugin plugin = hbasePlugin; - if (plugin!=null) { - plugin.setColumnAuthOptimizationEnabled(enable); - } - else { - throw new Exception("Error while enabling column auth optimization"); - } - } - - // Utilities Methods - protected byte[] getTableName(RegionCoprocessorEnvironment e) { - Region region = e.getRegion(); - byte[] tableName = null; - if (region != null) { - RegionInfo regionInfo = region.getRegionInfo(); - if (regionInfo != null) { - tableName = regionInfo.getTable().getName(); - } - } - return tableName; - } - protected void requireSystemOrSuperUser(Configuration conf, ObserverContext ctx) throws IOException { - User user = User.getCurrent(); - if (user == null) { - throw new IOException("Unable to obtain the current user, authorization checks for internal operations will not work correctly!"); - } - String systemUser = user.getShortName(); - User activeUser = getActiveUser(ctx); - if (!Objects.equals(systemUser, activeUser.getShortName()) && !_userUtils.isSuperUser(activeUser)) { - throw new AccessDeniedException("User '" + user.getShortName() + "is not system or super user."); - } - } - protected boolean isSpecialTable(RegionInfo regionInfo) { - return isSpecialTable(regionInfo.getTable().getName()); - } - protected boolean isSpecialTable(byte[] tableName) { - return isSpecialTable(Bytes.toString(tableName)); - } - protected boolean isSpecialTable(String input) { - final String[] specialTables = new String[] { "hbase:meta", "-ROOT-", ".META.", "hbase:acl", "hbase:namespace"}; - for (String specialTable : specialTables ) { - if (specialTable.equals(input)) { - return true; - } - } - - return false; - } - protected boolean isAccessForMetaTables(RegionCoprocessorEnvironment env) { - RegionInfo hri = env.getRegion().getRegionInfo(); - - if (hri.isMetaRegion()) { - return true; - } else { - return false; - } - } - - /* - private User getActiveUser() { - User user = null; - try { - user = RpcServer.getRequestUser().get(); - } catch (NoSuchElementException e) { - LOG.info("Unable to get request user"); - } - - if (user == null) { - // for non-rpc handling, fallback to system user - try { - user = User.getCurrent(); - } catch (IOException e) { - LOG.error("Unable to find the current user"); - user = null; - } - } - return user; - }*/ - - - private User getActiveUser(ObserverContext ctx) { - User user = null; - if (ctx != null) { - try { - Optional optionalUser = ctx.getCaller(); - user = optionalUser.isPresent() ? (User) optionalUser.get() : this.userProvider.getCurrent(); - } catch(Exception e){ - LOG.info("Unable to get request user using context" + ctx); - } - } - - if (user == null) { - try { - user = RpcServer.getRequestUser().get(); - } catch (NoSuchElementException e) { - LOG.info("Unable to get request user via RPCServer"); - } - } - - if (user == null) { - // for non-rpc handling, fallback to system user - try { - user = User.getCurrent(); - } catch (IOException e) { - LOG.error("Unable to find the current user"); - user = null; - } - } - return user; - } - - - private String getRemoteAddress() { - InetAddress remoteAddr = null; - try { - remoteAddr = RpcServer.getRemoteAddress().get(); - } catch (NoSuchElementException e) { - // HBase services will sometimes make calls as a part of - // internal operations. It is not worth logging when we do - // not have a remote address (a client's remote address). - LOG.trace("Unable to get remote Address"); - } - - if(remoteAddr == null) { - remoteAddr = RpcServer.getRemoteIp(); - } - - String strAddr = remoteAddr != null ? remoteAddr.getHostAddress() : null; - - return strAddr; - } - - // Methods that are used within the CoProcessor - private void requireScannerOwner(ObserverContext ctx, InternalScanner s) throws AccessDeniedException { - if (!RpcServer.isInRpcCallContext()) { - return; - } - - User user = getActiveUser(ctx); - String requestUserName = user.getShortName(); - String owner = scannerOwners.get(s); - if (owner != null && !owner.equals(requestUserName)) { - throw new AccessDeniedException("User '"+ requestUserName +"' is not the scanner owner!"); - } - } - /** - * @param families - * @return empty map if families is null, would never have empty or null keys, would never have null values, values could be empty (non-null) set - */ - Map> getColumnFamilies(Map> families) { - if (families == null) { - // null families map passed. Ok, returning empty map. - return Collections.>emptyMap(); - } - Map> result = new HashMap>(); - for (Map.Entry> anEntry : families.entrySet()) { - byte[] familyBytes = anEntry.getKey(); - String family = Bytes.toString(familyBytes); - if (family == null || family.isEmpty()) { - LOG.error("Unexpected Input: got null or empty column family (key) in families map! Ignoring..."); - } else { - Collection columnCollection = anEntry.getValue(); - if (CollectionUtils.isEmpty(columnCollection)) { - // family points to null map, OK. - // if column auth disabled, then also empty set is fine - if (LOG.isDebugEnabled()) { - LOG.debug("RangerAuthorizationCoprocessor getColumnFamilies: columns are empty. " + - "Setting columns to emptySet in familyMap"); - } - result.put(family, Collections. emptySet()); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("RangerAuthorizationCoprocessor getColumnFamilies: columns exist"); - } - Iterator columnIterator = new ColumnIterator(columnCollection); - Set columns = new HashSet(); - try { - while (columnIterator.hasNext()) { - String column = columnIterator.next(); - columns.add(column); - } - } catch (Throwable t) { - LOG.error("Exception encountered when converting family-map to set of columns. Ignoring and returning empty set of columns for family[" + family + "]", t); - LOG.error("Ignoring exception and returning empty set of columns for family[" + family +"]"); - columns.clear(); - } - result.put(family, columns); - } - } - } - return result; - } - - static class ColumnFamilyAccessResult { - final boolean _everythingIsAccessible; - final boolean _somethingIsAccessible; - final List _accessAllowedEvents; - final List _familyLevelAccessEvents; - final AuthzAuditEvent _accessDeniedEvent; - final String _denialReason; - final RangerAuthorizationFilter _filter;; - - ColumnFamilyAccessResult(boolean everythingIsAccessible, boolean somethingIsAccessible, - List accessAllowedEvents, List familyLevelAccessEvents, AuthzAuditEvent accessDeniedEvent, String denialReason, - RangerAuthorizationFilter filter) { - _everythingIsAccessible = everythingIsAccessible; - _somethingIsAccessible = somethingIsAccessible; - // WARNING: we are just holding on to reference of the collection. Potentially risky optimization - _accessAllowedEvents = accessAllowedEvents; - _familyLevelAccessEvents = familyLevelAccessEvents; - _accessDeniedEvent = accessDeniedEvent; - _denialReason = denialReason; - // cached values of access results - _filter = filter; - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(getClass()) - .add("everythingIsAccessible", _everythingIsAccessible) - .add("somethingIsAccessible", _somethingIsAccessible) - .add("accessAllowedEvents", _accessAllowedEvents) - .add("familyLevelAccessEvents", _familyLevelAccessEvents) - .add("accessDeniedEvent", _accessDeniedEvent) - .add("denialReason", _denialReason) - .add("filter", _filter) - .toString(); - - } - } - - ColumnFamilyAccessResult evaluateAccess(ObserverContext ctx, String operation, Action action, final RegionCoprocessorEnvironment env, - final Map> familyMap, String commandStr) throws AccessDeniedException { - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: isColumnAuthOptimizationEnabled="+hbasePlugin.getPropertyIsColumnAuthOptimizationEnabled()); - } - - String access = _authUtils.getAccess(action); - User user = getActiveUser(ctx); - String userName = _userUtils.getUserAsString(user); - final Map> colFamiliesForDebugLoggingOnly; - - if (LOG.isDebugEnabled()) { - colFamiliesForDebugLoggingOnly = getColumnFamilies(familyMap); - LOG.debug(String.format("evaluateAccess: entered: user[%s], Operation[%s], access[%s], families[%s]", - userName, operation, access, colFamiliesForDebugLoggingOnly)); - } - else{ - colFamiliesForDebugLoggingOnly = Collections.emptyMap(); - } - - byte[] tableBytes = getTableName(env); - if (tableBytes == null || tableBytes.length == 0) { - String message = "evaluateAccess: Unexpected: Couldn't get table from RegionCoprocessorEnvironment. Access denied, not audited"; - LOG.debug(message); - throw new AccessDeniedException("Insufficient permissions for operation '" + operation + "',action: " + action); - } - String table = Bytes.toString(tableBytes); - - final String messageTemplate = "evaluateAccess: exiting: user[%s], Operation[%s], access[%s], families[%s], verdict[%s]"; - ColumnFamilyAccessResult result; - if (canSkipAccessCheck(user, operation, access, table) || canSkipAccessCheck(user, operation, access, env)) { - LOG.debug("evaluateAccess: exiting: isKnownAccessPattern returned true: access allowed, not audited"); - result = new ColumnFamilyAccessResult(true, true, null, null, null, null, null); - if (LOG.isDebugEnabled()) { - String message = String.format(messageTemplate, userName, operation, access, colFamiliesForDebugLoggingOnly, result.toString()); - LOG.debug(message); - } - return result; - } - - // let's create a session that would be reused. Set things on it that won't change. - HbaseAuditHandler auditHandler = _factory.getAuditHandler(); - AuthorizationSession session = new AuthorizationSession(hbasePlugin) - .operation(operation) - .otherInformation(commandStr) - .remoteAddress(getRemoteAddress()) - .auditHandler(auditHandler) - .user(user) - .access(access) - .table(table); - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: families to process: " + colFamiliesForDebugLoggingOnly); - } - if (familyMap == null || familyMap.isEmpty()) { - LOG.debug("evaluateAccess: Null or empty families collection, ok. Table level access is desired"); - session.buildRequest() - .authorize(); - boolean authorized = session.isAuthorized(); - String reason = ""; - if (authorized) { - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: table level access granted [" + table + "]"); - } - } else { - reason = String.format("Insufficient permissions for user ‘%s',action: %s, tableName:%s, no column families found.", user.getName(), operation, table); - } - AuthzAuditEvent event = auditHandler.getAndDiscardMostRecentEvent(); // this could be null, of course, depending on audit settings of table. - // if authorized then pass captured events as access allowed set else as access denied set. - result = new ColumnFamilyAccessResult(authorized, authorized, - authorized ? Collections.singletonList(event) : null, - null, authorized ? null : event, reason, null); - if (LOG.isDebugEnabled()) { - String message = String.format(messageTemplate, userName, operation, access, colFamiliesForDebugLoggingOnly, result.toString()); - LOG.debug(message); - } - return result; - } else { - LOG.debug("evaluateAccess: Families collection not null. Skipping table-level check, will do finer level check"); - } - - boolean everythingIsAccessible = true; - boolean somethingIsAccessible = false; - /* - * we would have to accumulate audits of all successful accesses and any one denial (which in our case ends up being the last denial) - * We need to keep audit events for family level access check seperate because we don't want them logged in some cases. - */ - List authorizedEvents = new ArrayList(); - List familyLevelAccessEvents = new ArrayList(); - AuthzAuditEvent deniedEvent = null; - String denialReason = null; - // we need to cache the auths results so that we can create a filter, if needed - Map> columnsAccessAllowed = new HashMap>(); - Set familesAccessAllowed = new HashSet(); - Set familesAccessDenied = new HashSet(); - Set familesAccessIndeterminate = new HashSet(); - Set familiesFullyAuthorized = new HashSet<>(); - - for (Map.Entry> anEntry : familyMap.entrySet()) { - String family = Bytes.toString(anEntry.getKey()); - session.columnFamily(family); - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: Processing family: " + family); - } - Collection columns = anEntry.getValue(); - if (columns == null || columns.isEmpty()) { - LOG.debug("evaluateAccess: columns collection null or empty, ok. Family level access is desired."); - - session.column(null) // zap stale column from prior iteration of this loop, if any - .buildRequest() - .authorize(); - AuthzAuditEvent auditEvent = auditHandler.getAndDiscardMostRecentEvent(); // capture it only for success - - final boolean isColumnFamilyAuthorized = session.isAuthorized(); - - if (auditEvent != null) { - if (isColumnFamilyAuthorized) { - familyLevelAccessEvents.add(auditEvent); - } else { - if (deniedEvent == null) { // we need to capture just one denial event - LOG.debug("evaluateAccess: Setting denied access audit event with last auth failure audit event."); - deniedEvent = auditEvent; - } - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: family level access for [" + family + "] is evaluated to " + isColumnFamilyAuthorized + ". Checking if [" + family + "] descendants have access."); - } - // buildRequest again since resourceMatchingScope changed - // reset ResourceMatchingScope to SELF, ignoreDescendantDeny to true - session.resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS) - .ignoreDescendantDeny(false) - .buildRequest() - .authorize(); - auditEvent = auditHandler.getAndDiscardMostRecentEvent(); // capture it only for failure - if (session.isAuthorized()) { - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: [" + family + "] descendants have access"); - } - somethingIsAccessible = true; - if (isColumnFamilyAuthorized) { - familesAccessAllowed.add(family); - if (auditEvent != null) { - LOG.debug("evaluateAccess: adding to family-level-access-granted-event-set"); - familyLevelAccessEvents.add(auditEvent); - } - } else { - familesAccessIndeterminate.add(family); - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: has partial access (of some type) in family [" + family + "]"); - } - everythingIsAccessible = false; - if (auditEvent != null && deniedEvent == null) { // we need to capture just one denial event - LOG.debug("evaluateAccess: Setting denied access audit event with last auth failure audit event."); - deniedEvent = auditEvent; - } - } - } else { - everythingIsAccessible = false; - if (isColumnFamilyAuthorized) { - somethingIsAccessible = true; - familesAccessIndeterminate.add(family); - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: has partial access (of some type) in family [" + family + "]"); - } - if (auditEvent != null && deniedEvent == null) { // we need to capture just one denial event - LOG.debug("evaluateAccess: Setting denied access audit event with last auth failure audit event."); - deniedEvent = auditEvent; - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: has no access of [" + access + "] type in family [" + family + "]"); - } - familesAccessDenied.add(family); - denialReason = String.format("Insufficient permissions for user ‘%s',action: %s, tableName:%s, family:%s.", user.getName(), operation, table, family); - } - } - // Restore the headMatch setting - session.resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF); - session.ignoreDescendantDeny(true); - } else { - boolean isColumnAuthOptimizationEnabled = hbasePlugin.getPropertyIsColumnAuthOptimizationEnabled(); - if(LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: columns collection not empty." + - " Skipping Family level check, will do finer level access check for columns."); - } - if (isColumnAuthOptimizationEnabled) { - session.column(null) - .buildRequest() - .authorize(); - if (LOG.isDebugEnabled()) { - LOG.debug( - "evaluateAccess: isColumnAuthOptimizationEnabled={}, isColumnFamilyAuthorized={}", - isColumnAuthOptimizationEnabled, session.isAuthorized()); - } - if(session.isAuthorized()) { - - //check if column family fully authorized i.e. no deny for columns - session.column(null) - .resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS) - .ignoreDescendantDeny(false) - .buildRequest() - .authorize(); - - boolean isColumnFamilyAndDescendantsAuthorized = session.isAuthorized(); - AuthzAuditEvent auditEvent = auditHandler.getAndDiscardMostRecentEvent(); - // reset ResourceMatchingScope to SELF, ignoreDescendantDeny to true - session.resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF) - .ignoreDescendantDeny(true); - if (LOG.isDebugEnabled()) { - LOG.debug( - "evaluateAccess: isColumnAuthOptimizationEnabled={}, isColumnFamilyAndDescendantsAuthorized={}", - isColumnAuthOptimizationEnabled, isColumnFamilyAndDescendantsAuthorized); - } - if (isColumnFamilyAndDescendantsAuthorized) { - familiesFullyAuthorized.add(family); - if (auditEvent != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: isColumnAuthOptimizationEnabled ={}, adding family {} to familiesFullyAuthorized", isColumnAuthOptimizationEnabled, family); - } - familyLevelAccessEvents.add(auditEvent); - } - continue; - } - } - } - Set accessibleColumns = new HashSet(); // will be used in to populate our results cache for the filter - Iterator columnIterator = new ColumnIterator(columns); - while (columnIterator.hasNext()) { - String column = columnIterator.next(); - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: Processing column: " + column); - } - //buildRequest required again since now column is being set - session.column(column) - .buildRequest() - .authorize(); - AuthzAuditEvent auditEvent = auditHandler.getAndDiscardMostRecentEvent(); - if (session.isAuthorized()) { - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: has column level access [" + family + ", " + column + "]"); - } - // we need to do 3 things: housekeeping, capturing audit events, building the results cache for filter - somethingIsAccessible = true; - accessibleColumns.add(column); - if (auditEvent != null) { - LOG.debug("evaluateAccess: adding to access-granted-audit-event-set"); - authorizedEvents.add(auditEvent); - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("evaluateAccess: no column level access [" + family + ", " + column + "]"); - } - somethingIsAccessible = false; - everythingIsAccessible = false; - denialReason = String.format("Insufficient permissions for user ‘%s',action: %s, tableName:%s, family:%s, column: %s", user.getName(), operation, table, family, column); - if (auditEvent != null && deniedEvent == null) { // we need to capture just one denial event - LOG.debug("evaluateAccess: Setting denied access audit event with last auth failure audit event."); - deniedEvent = auditEvent; - } - } - if (!accessibleColumns.isEmpty()) { - columnsAccessAllowed.put(family, accessibleColumns); - } - } - } - } - // Cache of auth results are encapsulated the in the filter. Not every caller of the function uses it - only preGet and preOpt will. - RangerAuthorizationFilter filter = new RangerAuthorizationFilter(session, familesAccessAllowed, familesAccessDenied, familesAccessIndeterminate, columnsAccessAllowed, familiesFullyAuthorized); - result = new ColumnFamilyAccessResult(everythingIsAccessible, somethingIsAccessible, authorizedEvents, familyLevelAccessEvents, deniedEvent, denialReason, filter); - if (LOG.isDebugEnabled()) { - String message = String.format(messageTemplate, userName, operation, access, colFamiliesForDebugLoggingOnly, result.toString()); - LOG.debug(message); - } - return result; - } - - Filter authorizeAccess(ObserverContext ctx, String operation, Action action, final RegionCoprocessorEnvironment env, final Map> familyMap, String commandStr) throws AccessDeniedException { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> authorizeAccess"); - } - RangerPerfTracer perf = null; - - try { - perf = RangerPerfTracer.getPerfTracer(PERF_HBASEAUTH_REQUEST_LOG, "RangerAuthorizationCoprocessor.authorizeAccess(request=Operation[" + operation + "]"); - - ColumnFamilyAccessResult accessResult = evaluateAccess(ctx, operation, action, env, familyMap, commandStr); - RangerDefaultAuditHandler auditHandler = new RangerDefaultAuditHandler(hbasePlugin.getConfig()); - if (accessResult._everythingIsAccessible) { - auditHandler.logAuthzAudits(accessResult._accessAllowedEvents); - auditHandler.logAuthzAudits(accessResult._familyLevelAccessEvents); - LOG.debug("authorizeAccess: exiting: No filter returned since all access was allowed"); - return null; // no filter needed since we are good to go. - } else if (accessResult._somethingIsAccessible) { - // NOTE: audit logging is split beween logging here (in scope of preOp/preGet) and logging in the filter component for those that couldn't be determined - auditHandler.logAuthzAudits(accessResult._accessAllowedEvents); - LOG.debug("authorizeAccess: exiting: Filter returned since some access was allowed"); - return accessResult._filter; - } else { - // If we are here then it means nothing was accessible! So let's log one denial (in our case, the last denial) and throw an exception - auditHandler.logAuthzAudit(accessResult._accessDeniedEvent); - LOG.debug("authorizeAccess: exiting: Throwing exception since nothing was accessible"); - throw new AccessDeniedException(accessResult._denialReason); - } - } finally { - RangerPerfTracer.log(perf); - if (LOG.isDebugEnabled()) { - LOG.debug("<== authorizeAccess"); - } - } - } - - Filter combineFilters(Filter filter, Filter existingFilter) { - Filter combinedFilter = filter; - if (existingFilter != null) { - combinedFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL, Lists.newArrayList(filter, existingFilter)); - } - return combinedFilter; - } - - void requirePermission(final ObserverContext ctx, final String operation, final Action action, final RegionCoprocessorEnvironment regionServerEnv, final Map> familyMap) - throws AccessDeniedException { - - RangerPerfTracer perf = null; - - try { - if (RangerPerfTracer.isPerfTraceEnabled(PERF_HBASEAUTH_REQUEST_LOG)) { - perf = RangerPerfTracer.getPerfTracer(PERF_HBASEAUTH_REQUEST_LOG, "RangerAuthorizationCoprocessor.requirePermission(request=Operation[" + operation + "]"); - } - ColumnFamilyAccessResult accessResult = evaluateAccess(ctx, operation, action, regionServerEnv, familyMap, null); - RangerDefaultAuditHandler auditHandler = new RangerDefaultAuditHandler(hbasePlugin.getConfig()); - if (accessResult._everythingIsAccessible) { - auditHandler.logAuthzAudits(accessResult._accessAllowedEvents); - auditHandler.logAuthzAudits(accessResult._familyLevelAccessEvents); - LOG.debug("requirePermission: exiting: all access was allowed"); - return; - } else { - auditHandler.logAuthzAudit(accessResult._accessDeniedEvent); - LOG.debug("requirePermission: exiting: throwing exception as everything wasn't accessible"); - throw new AccessDeniedException(accessResult._denialReason); - } - } finally { - RangerPerfTracer.log(perf); - } - } - - /** - * This could run s - * @param operation - * @param otherInformation - * @param table - * @param columnFamily - * @param column - * @return - * @throws AccessDeniedException - */ - void authorizeAccess(ObserverContext ctx, String operation, String otherInformation, Action action, String table, String columnFamily, String column) throws AccessDeniedException { - User user = getActiveUser(ctx); - String access = _authUtils.getAccess(action); - if (LOG.isDebugEnabled()) { - final String format = "authorizeAccess: %s: Operation[%s], Info[%s], access[%s], table[%s], columnFamily[%s], column[%s]"; - String message = String.format(format, "Entering", operation, otherInformation, access, table, columnFamily, column); - LOG.debug(message); - } - - final String format = "authorizeAccess: %s: Operation[%s], Info[%s], access[%s], table[%s], columnFamily[%s], column[%s], allowed[%s], reason[%s]"; - if (canSkipAccessCheck(user, operation, access, table)) { - if (LOG.isDebugEnabled()) { - String message = String.format(format, "Exiting", operation, otherInformation, access, table, columnFamily, column, true, "can skip auth check"); - LOG.debug(message); - } - return; - } - - - HbaseAuditHandler auditHandler = _factory.getAuditHandler(); - AuthorizationSession session = new AuthorizationSession(hbasePlugin) - .operation(operation) - .otherInformation(otherInformation) - .remoteAddress(getRemoteAddress()) - .auditHandler(auditHandler) - .user(user) - .access(access) - .table(table) - .columnFamily(columnFamily) - .column(column) - .buildRequest() - .authorize(); - - if (LOG.isDebugEnabled()) { - boolean allowed = session.isAuthorized(); - String reason = session.getDenialReason(); - String message = String.format(format, "Exiting", operation, otherInformation, access, table, columnFamily, column, allowed, reason); - LOG.debug(message); - } - - session.publishResults(); - } - - boolean canSkipAccessCheck(User user, final String operation, String access, final String table) - throws AccessDeniedException { - - boolean result = false; - if (user == null) { - String message = "Unexpeceted: User is null: access denied, not audited!"; - LOG.warn("canSkipAccessCheck: exiting" + message); - throw new AccessDeniedException("No user associated with request (" + operation + ") for action: " + access + "on table:" + table); - } else if (isAccessForMetadataRead(access, table)) { - LOG.debug("canSkipAccessCheck: true: metadata read access always allowed, not audited"); - result = true; - } else { - LOG.debug("Can't skip access checks"); - } - - return result; - } - - boolean canSkipAccessCheck(User user, final String operation, String access, final RegionCoprocessorEnvironment regionServerEnv) throws AccessDeniedException { - - // read access to metadata tables is always allowed and isn't audited. - if (isAccessForMetaTables(regionServerEnv) && _authUtils.isReadAccess(access)) { - LOG.debug("isKnownAccessPattern: exiting: Read access for metadata tables allowed, not audited!"); - return true; - } - // if write access is desired to metatables then global create access is sufficient - if (_authUtils.isWriteAccess(access) && isAccessForMetaTables(regionServerEnv)) { - String createAccess = _authUtils.getAccess(Action.CREATE); - AuthorizationSession session = new AuthorizationSession(hbasePlugin) - .operation(operation) - .remoteAddress(getRemoteAddress()) - .user(user) - .access(createAccess) - .buildRequest() - .authorize(); - if (session.isAuthorized()) { - // NOTE: this access isn't logged - LOG.debug("isKnownAccessPattern: exiting: User has global create access, allowed!"); - return true; - } - } - return false; - } - - boolean isAccessForMetadataRead(String access, String table) { - if (_authUtils.isReadAccess(access) && isSpecialTable(table)) { - LOG.debug("isAccessForMetadataRead: Metadata tables read: access allowed!"); - return true; - } - return false; - } - - // Check if the user has global permission ... - protected void requireGlobalPermission(ObserverContext ctx, String request, String objName, Permission.Action action) throws AccessDeniedException { - authorizeAccess(ctx, request, objName, action, null, null, null); - } - - protected void requirePermission(ObserverContext ctx, String request, Permission.Action action) throws AccessDeniedException { - requirePermission(ctx, request, null, action); - } - - protected void requirePermission(ObserverContext ctx, String request, byte[] tableName, Permission.Action action) throws AccessDeniedException { - String table = Bytes.toString(tableName); - - authorizeAccess(ctx, request, null, action, table, null, null); - } - - protected void requirePermission(ObserverContext ctx, String request, byte[] aTableName, byte[] aColumnFamily, byte[] aQualifier, Permission.Action action) throws AccessDeniedException { - - String table = Bytes.toString(aTableName); - String columnFamily = Bytes.toString(aColumnFamily); - String column = Bytes.toString(aQualifier); - - authorizeAccess(ctx, request, null, action, table, columnFamily, column); - } - - protected void requirePermission(ObserverContext ctx, String request, Permission.Action perm, RegionCoprocessorEnvironment env, Collection families) throws IOException { - HashMap> familyMap = new HashMap>(); - - if(families != null) { - for (byte[] family : families) { - familyMap.put(family, null); - } - } - requirePermission(ctx, request, perm, env, familyMap); - } - - @Override - public Optional getRegionObserver() { - return Optional.of(this); - } - - @Override - public Optional getMasterObserver() { - return Optional.of(this); - } - - @Override - public Optional getEndpointObserver() { - return Optional.of(this); - } - - @Override - public Optional getBulkLoadObserver() { - return Optional.of(this); - } - - @Override - public Optional getRegionServerObserver() { - return Optional.of(this); - } - - @Override - public void postScannerClose(ObserverContext c, InternalScanner s) throws IOException { - scannerOwners.remove(s); - } - @Override - public RegionScanner postScannerOpen(ObserverContext c, Scan scan, RegionScanner s) throws IOException { - User user = getActiveUser(c); - if (user != null && user.getShortName() != null) { - scannerOwners.put(s, user.getShortName()); - } - return s; - } - - @Override - public void postStartMaster(ObserverContext ctx) throws IOException { - if(UpdateRangerPoliciesOnGrantRevoke) { - LOG.debug("Calling create ACL table ..."); - Admin admin = (ctx.getEnvironment()).getConnection().getAdmin(); - Throwable var3 = null; - - try { - if(!admin.tableExists(PermissionStorage.ACL_TABLE_NAME)) { - createACLTable(admin); - } - } catch (Throwable var12) { - var3 = var12; - throw var12; - } finally { - if(admin != null) { - if(var3 != null) { - try { - admin.close(); - } catch (Throwable var11) { - var3.addSuppressed(var11); - } - } else { - admin.close(); - } - } - - } - } - } - - private static void createACLTable(Admin admin) throws IOException { - ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(PermissionStorage.ACL_LIST_FAMILY).setMaxVersions(1).setInMemory(true).setBlockCacheEnabled(true).setBlocksize(8192).setBloomFilterType(BloomType.NONE).setScope(0).build(); - TableDescriptor td = TableDescriptorBuilder.newBuilder(PermissionStorage.ACL_TABLE_NAME).addColumnFamily(cfd).build(); - admin.createTable(td); - } - - @Override - public Iterable getServices() { - return Collections.singleton(AccessControlService.newReflectiveService(this)); - } - - @Override - public Result preAppend(ObserverContext c, Append append) throws IOException { - requirePermission(c, "append", TablePermission.Action.WRITE, c.getEnvironment(), append.getFamilyCellMap()); - return null; - } - @Override - public void preAssign(ObserverContext c, RegionInfo regionInfo) throws IOException { - requirePermission(c, "assign", regionInfo.getTable().getName(), null, null, Action.ADMIN); - } - @Override - public void preBalance(ObserverContext c, BalanceRequest request) throws IOException { - requirePermission(c,"balance", Permission.Action.ADMIN); - } - @Override - public void preBalanceSwitch(ObserverContext c, boolean newValue) throws IOException { - requirePermission(c, "balanceSwitch", Permission.Action.ADMIN); - } - @Override - public void preBulkLoadHFile(ObserverContext ctx, List> familyPaths) throws IOException { - List cfs = new LinkedList(); - for (Pair el : familyPaths) { - cfs.add(el.getFirst()); - } - requirePermission(ctx, "bulkLoadHFile", Permission.Action.WRITE, ctx.getEnvironment(), cfs); - } - @Override - public boolean preCheckAndDelete(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOperator compareOp, ByteArrayComparable comparator, Delete delete, boolean result) throws IOException { - Collection familyMap = Arrays.asList(new byte[][] { family }); - requirePermission(c, "checkAndDelete", TablePermission.Action.READ, c.getEnvironment(), familyMap); - requirePermission(c, "checkAndDelete", TablePermission.Action.WRITE, c.getEnvironment(), familyMap); - return result; - } - @Override - public boolean preCheckAndPut(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOperator compareOp, ByteArrayComparable comparator, Put put, boolean result) throws IOException { - Collection familyMap = Arrays.asList(new byte[][] { family }); - requirePermission(c, "checkAndPut", TablePermission.Action.READ, c.getEnvironment(), familyMap); - requirePermission(c, "checkAndPut", TablePermission.Action.WRITE, c.getEnvironment(), familyMap); - return result; - } - @Override - public void preCloneSnapshot(ObserverContext ctx, SnapshotDescription snapshot, TableDescriptor hTableDescriptor) throws IOException { - requirePermission(ctx, "cloneSnapshot", hTableDescriptor.getTableName().getName(), Permission.Action.ADMIN); - } - @Override - public void preClose(ObserverContext e, boolean abortRequested) throws IOException { - requirePermission(e, "close", getTableName(e.getEnvironment()), Permission.Action.ADMIN); - } - @Override - public InternalScanner preCompact(ObserverContext e, Store store, InternalScanner scanner,ScanType scanType, CompactionLifeCycleTracker tracker, CompactionRequest request) throws IOException { - requirePermission(e, "compact", getTableName(e.getEnvironment()), null, null, Action.CREATE); - return scanner; - } - @Override - public void preCompactSelection(ObserverContext e, Store store, List candidates, CompactionLifeCycleTracker tracker) throws IOException { - requirePermission(e, "compactSelection", getTableName(e.getEnvironment()), null, null, Action.CREATE); - } - - @Override - public void preCreateTable(ObserverContext c, TableDescriptor desc, RegionInfo[] regions) throws IOException { - requirePermission(c, "createTable", desc.getTableName().getName(), Permission.Action.CREATE); - } - - - @Override - public void preDelete(ObserverContext c, Delete delete, WALEdit edit, Durability durability) throws IOException { - requirePermission(c, "delete", TablePermission.Action.WRITE, c.getEnvironment(), delete.getFamilyCellMap()); - } - - @Override - public void preDeleteSnapshot(ObserverContext ctx, SnapshotDescription snapshot) throws IOException { - requirePermission(ctx, "deleteSnapshot", snapshot.getTableName().getName(), Permission.Action.ADMIN); - } - @Override - public void preDeleteTable(ObserverContext c, TableName tableName) throws IOException { - requirePermission(c, "deleteTable", tableName.getName(), null, null, Action.CREATE); - } - @Override - public void preDisableTable(ObserverContext c, TableName tableName) throws IOException { - requirePermission(c, "disableTable", tableName.getName(), null, null, Action.CREATE); - } - @Override - public void preEnableTable(ObserverContext c, TableName tableName) throws IOException { - requirePermission(c, "enableTable", tableName.getName(), null, null, Action.CREATE); - } - @Override - public boolean preExists(ObserverContext c, Get get, boolean exists) throws IOException { - requirePermission(c, "exists", TablePermission.Action.READ, c.getEnvironment(), get.familySet()); - return exists; - } - @Override - public void preFlush(ObserverContext e, FlushLifeCycleTracker tracker) throws IOException { - requirePermission(e, "flush", getTableName(e.getEnvironment()), null, null, Action.CREATE); - } - - @Override - public Result preIncrement(ObserverContext c, Increment increment) throws IOException { - requirePermission(c, "increment", TablePermission.Action.WRITE, c.getEnvironment(), increment.getFamilyCellMap().keySet()); - - return null; - } - - @Override - public void preModifyTable(ObserverContext c, TableName tableName, TableDescriptor htd) throws IOException { - requirePermission(c, "modifyTable", tableName.getName(), null, null, Action.CREATE); - } - @Override - public void preMove(ObserverContext c, RegionInfo region, ServerName srcServer, ServerName destServer) throws IOException { - requirePermission(c, "move", region.getTable().getName() , null, null, Action.ADMIN); - } - - @Override - public void preAbortProcedure(ObserverContext observerContext, long procId) throws IOException { - //if(!procEnv.isProcedureOwner(procId, this.getActiveUser())) { - requirePermission(observerContext, "abortProcedure", Action.ADMIN); - //} - } - - @Override - public void postGetProcedures(ObserverContext observerContext) throws IOException { - /*if(!procInfoList.isEmpty()) { - Iterator> itr = procInfoList.iterator(); - User user = this.getActiveUser(); - - while(itr.hasNext()) { - Procedure procInfo = itr.next(); - try { - String owner = procInfo.getOwner(); - if (owner == null || !owner.equals(user.getShortName())) { - requirePermission("getProcedures", Action.ADMIN); - } - } catch (AccessDeniedException var7) { - itr.remove(); - } - } - - }*/ - requirePermission(observerContext, "getProcedures", Action.ADMIN); - } - - @Override - public void preOpen(ObserverContext e) throws IOException { - RegionCoprocessorEnvironment env = e.getEnvironment(); - final Region region = env.getRegion(); - if (region == null) { - LOG.error("NULL region from RegionCoprocessorEnvironment in preOpen()"); - } else { - RegionInfo regionInfo = region.getRegionInfo(); - if (isSpecialTable(regionInfo)) { - requireSystemOrSuperUser(regionEnv.getConfiguration(),e); - } else { - requirePermission(e, "open", getTableName(e.getEnvironment()), Action.ADMIN); - } - } - } - @Override - public void preRestoreSnapshot(ObserverContext ctx, SnapshotDescription snapshot, TableDescriptor hTableDescriptor) throws IOException { - requirePermission(ctx, "restoreSnapshot", hTableDescriptor.getTableName().getName(), Permission.Action.ADMIN); - } - - @Override - public void preScannerClose(ObserverContext c, InternalScanner s) throws IOException { - requireScannerOwner(c,s); - } - @Override - public boolean preScannerNext(ObserverContext c, InternalScanner s, List result, int limit, boolean hasNext) throws IOException { - requireScannerOwner(c,s); - return hasNext; - } - @Override - public void preScannerOpen(ObserverContext c, Scan scan) throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> preScannerOpen"); - } - String commandStr = null; - try { - RegionCoprocessorEnvironment e = c.getEnvironment(); - - Map> familyMap = scan.getFamilyMap(); - String operation = "scannerOpen"; - byte[] tableName = getTableName(e); - String tableNameStr = tableName != null ? new String(tableName):" "; - commandStr = getCommandString(HbaseConstants.SCAN, tableNameStr, scan.toMap()); - Filter filter = authorizeAccess(c, operation, Action.READ, e, familyMap, commandStr); - if (filter == null) { - if (LOG.isDebugEnabled()) { - LOG.debug("preScannerOpen: Access allowed for all families/column. No filter added"); - } - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("preScannerOpen: Access allowed for some of the families/column. New filter added."); - } - Filter existingFilter = scan.getFilter(); - Filter combinedFilter = combineFilters(filter, existingFilter); - scan.setFilter(combinedFilter); - } - } finally { - if (LOG.isDebugEnabled()) { - LOG.debug("<== preScannerOpen: commandStr: " + commandStr); - } - } - } - @Override - public void preShutdown(ObserverContext c) throws IOException { - requirePermission(c, "shutdown", Permission.Action.ADMIN); - cleanUp_HBaseRangerPlugin(); - } - @Override - public void preSnapshot(ObserverContext ctx, SnapshotDescription snapshot, TableDescriptor hTableDescriptor) throws IOException { - requirePermission(ctx, "snapshot", hTableDescriptor.getTableName().getName(), Permission.Action.ADMIN); - } - - @Override - public void preStopMaster(ObserverContext c) throws IOException { - requirePermission(c, "stopMaster", Permission.Action.ADMIN); - cleanUp_HBaseRangerPlugin(); - } - @Override - public void preStopRegionServer(ObserverContext env) throws IOException { - requirePermission(env, "stop", Permission.Action.ADMIN); - cleanUp_HBaseRangerPlugin(); - } - @Override - public void preUnassign(ObserverContext c, RegionInfo regionInfo, boolean force) throws IOException { - requirePermission(c, "unassign", regionInfo.getTable().getName(), null, null, Action.ADMIN); - } - - @Override - public void preSetUserQuota(final ObserverContext ctx, - final String userName, final GlobalQuotaSettings quotas) throws IOException { - requireGlobalPermission(ctx, "setUserQuota", null, Action.ADMIN); - } - - @Override - public void preSetUserQuota(final ObserverContext ctx, - final String userName, final TableName tableName, final GlobalQuotaSettings quotas) throws IOException { - requirePermission(ctx, "setUserTableQuota", tableName.getName(), null, null, Action.ADMIN); - } - - @Override - public void preSetUserQuota(final ObserverContext ctx, - final String userName, final String namespace, final GlobalQuotaSettings quotas) throws IOException { - requireGlobalPermission(ctx, "setUserNamespaceQuota", namespace, Action.ADMIN); - } - - @Override - public void preSetTableQuota(final ObserverContext ctx, - final TableName tableName, final GlobalQuotaSettings quotas) throws IOException { - requirePermission(ctx, "setTableQuota", tableName.getName(), null, null, Action.ADMIN); - } - - @Override - public void preSetNamespaceQuota(final ObserverContext ctx, - final String namespace, final GlobalQuotaSettings quotas) throws IOException { - requireGlobalPermission(ctx, "setNamespaceQuota", namespace, Action.ADMIN); - } - - private String coprocessorType = "unknown"; - private static final String MASTER_COPROCESSOR_TYPE = "master"; - private static final String REGIONAL_COPROCESSOR_TYPE = "regional"; - private static final String REGIONAL_SERVER_COPROCESSOR_TYPE = "regionalServer"; - - @Override - public void start(CoprocessorEnvironment env) throws IOException { - String appType = "unknown"; - - shouldCheckExecPermission = env.getConfiguration().getBoolean( - AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, - AccessControlConstants.DEFAULT_EXEC_PERMISSION_CHECKS); - if (env instanceof MasterCoprocessorEnvironment) { - coprocessorType = MASTER_COPROCESSOR_TYPE; - appType = "hbaseMaster"; - } else if (env instanceof RegionServerCoprocessorEnvironment) { - coprocessorType = REGIONAL_SERVER_COPROCESSOR_TYPE; - appType = "hbaseRegional"; - } else if (env instanceof RegionCoprocessorEnvironment) { - regionEnv = (RegionCoprocessorEnvironment) env; - coprocessorType = REGIONAL_COPROCESSOR_TYPE; - appType = "hbaseRegional"; - } - - this.userProvider = UserProvider.instantiate(env.getConfiguration()); - - Configuration conf = env.getConfiguration(); - HbaseFactory.initialize(conf); - - // create and initialize the plugin class - RangerHBasePlugin plugin = hbasePlugin; - - if(plugin == null) { - synchronized(RangerAuthorizationCoprocessor.class) { - plugin = hbasePlugin; - - if(plugin == null) { - plugin = new RangerHBasePlugin(appType); - - plugin.init(); - - UpdateRangerPoliciesOnGrantRevoke = plugin.getConfig().getBoolean(RangerHadoopConstants.HBASE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_PROP, RangerHadoopConstants.HBASE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_DEFAULT_VALUE); - - hbasePlugin = plugin; - } - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("Start of Coprocessor: [" + coprocessorType + "]"); - } - } - - @Override - public void prePut(ObserverContext c, Put put, WALEdit edit, Durability durability) throws IOException { - requirePermission(c, "put", TablePermission.Action.WRITE, c.getEnvironment(), put.getFamilyCellMap()); - } - - @Override - public void preGetOp(final ObserverContext rEnv, final Get get, final List result) throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> preGetOp"); - } - String commandStr = null; - try { - RegionCoprocessorEnvironment e = rEnv.getEnvironment(); - Map> familyMap = get.getFamilyMap(); - - String operation = "get"; - byte[] tableName = getTableName(e); - String tableNameStr = tableName != null ? new String(tableName):" "; - commandStr = getCommandString(HbaseConstants.GET, tableNameStr, get.toMap()); - Filter filter = authorizeAccess(rEnv, operation, Action.READ, e, familyMap, commandStr); - if (filter == null) { - if (LOG.isDebugEnabled()) { - LOG.debug("preGetOp: all access allowed, no filter returned"); - } - } else { - Filter existingFilter = get.getFilter(); - Filter combinedFilter = combineFilters(filter, existingFilter); - get.setFilter(combinedFilter); - if (LOG.isDebugEnabled()) { - LOG.debug("preGetOp: partial access, new filter added"); - } - } - } finally { - if (LOG.isDebugEnabled()) { - LOG.debug("<== preGetOp: commandStr: " + commandStr ); - } - } - } - @Override - public void preRegionOffline(ObserverContext c, RegionInfo regionInfo) throws IOException { - requirePermission(c, "regionOffline", regionInfo.getTable().getName(), null, null, Action.ADMIN); - } - @Override - public void preCreateNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException { - requireGlobalPermission(ctx, "createNamespace", ns.getName(), Action.ADMIN); - } - @Override - public void preDeleteNamespace(ObserverContext ctx, String namespace) throws IOException { - requireGlobalPermission(ctx, "deleteNamespace", namespace, Action.ADMIN); - } - @Override - public void preModifyNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException { - requireGlobalPermission(ctx, "modifyNamespace", ns.getName(), Action.ADMIN); - } - - @Override - public void postGetTableNames(ObserverContext ctx, List descriptors, String regex) throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> postGetTableNames(count(descriptors)=%s, regex=%s)", descriptors == null ? 0 : descriptors.size(), regex)); - } - checkGetTableInfoAccess(ctx, "getTableNames", descriptors, regex, RangerPolicyEngine.ANY_ACCESS); - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== postGetTableNames(count(descriptors)=%s, regex=%s)", descriptors == null ? 0 : descriptors.size(), regex)); - } - } - - @Override - public void postGetTableDescriptors(ObserverContext ctx, List tableNamesList, List descriptors, String regex) throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("==> postGetTableDescriptors(count(tableNamesList)=%s, count(descriptors)=%s, regex=%s)", tableNamesList == null ? 0 : tableNamesList.size(), - descriptors == null ? 0 : descriptors.size(), regex)); - } - - checkGetTableInfoAccess(ctx, "getTableDescriptors", descriptors, regex, _authUtils.getAccess(Action.CREATE)); - - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("<== postGetTableDescriptors(count(tableNamesList)=%s, count(descriptors)=%s, regex=%s)", tableNamesList == null ? 0 : tableNamesList.size(), - descriptors == null ? 0 : descriptors.size(), regex)); - } - } - - @Override - public void postListNamespaceDescriptors(ObserverContext ctx, List descriptors) throws IOException { - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerAuthorizationCoprocessor.postListNamespaceDescriptors()"); - } - - checkAccessForNamespaceDescriptor(ctx, "getNameSpaceDescriptors", descriptors); - - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerAuthorizationCoprocessor.postListNamespaceDescriptors()"); - } - } - - - public void prePrepareBulkLoad(ObserverContext ctx, PrepareBulkLoadRequest request) throws IOException { - List cfs = null; - - requirePermission(ctx, "prePrepareBulkLoad", Permission.Action.WRITE, ctx.getEnvironment(), cfs); - } - - public void preCleanupBulkLoad(ObserverContext ctx, CleanupBulkLoadRequest request) throws IOException { - List cfs = null; - - requirePermission(ctx, "preCleanupBulkLoad", Permission.Action.WRITE, ctx.getEnvironment(), cfs); - } - - /* ---- EndpointObserver implementation ---- */ - - @Override - public Message preEndpointInvocation(ObserverContext ctx, - Service service, String methodName, Message request) throws IOException { - // Don't intercept calls to our own AccessControlService, we check for - // appropriate permissions in the service handlers - if (shouldCheckExecPermission && !(service instanceof AccessControlService)) { - requirePermission(ctx, - "invoke(" + service.getDescriptorForType().getName() + "." + methodName + ")", - getTableName(ctx.getEnvironment()), null, null, - Action.EXEC); - } - return request; - } - - @Override - public void grant(RpcController controller, AccessControlProtos.GrantRequest request, RpcCallback done) { - boolean isSuccess = false; - - if(UpdateRangerPoliciesOnGrantRevoke) { - GrantRevokeRequest grData = null; - - try { - grData = createGrantData(request); - - RangerHBasePlugin plugin = hbasePlugin; - - if(plugin != null) { - - RangerAccessResultProcessor auditHandler = new RangerDefaultAuditHandler(hbasePlugin.getConfig()); - - plugin.grantAccess(grData, auditHandler); - - isSuccess = true; - } - } catch(AccessControlException excp) { - LOG.warn("grant() failed", excp); - - ResponseConverter.setControllerException(controller, new AccessDeniedException(excp)); - } catch(IOException excp) { - LOG.warn("grant() failed", excp); - - ResponseConverter.setControllerException(controller, excp); - } catch (Exception excp) { - LOG.warn("grant() failed", excp); - - ResponseConverter.setControllerException(controller, new CoprocessorException(excp.getMessage())); - } - } - - AccessControlProtos.GrantResponse response = isSuccess ? AccessControlProtos.GrantResponse.getDefaultInstance() : null; - - done.run(response); - } - - @Override - public void revoke(RpcController controller, AccessControlProtos.RevokeRequest request, RpcCallback done) { - boolean isSuccess = false; - - if(UpdateRangerPoliciesOnGrantRevoke) { - GrantRevokeRequest grData = null; - - try { - grData = createRevokeData(request); - - RangerHBasePlugin plugin = hbasePlugin; - - if(plugin != null) { - - RangerAccessResultProcessor auditHandler = new RangerDefaultAuditHandler(hbasePlugin.getConfig()); - - plugin.revokeAccess(grData, auditHandler); - - isSuccess = true; - } - } catch(AccessControlException excp) { - LOG.warn("revoke() failed", excp); - - ResponseConverter.setControllerException(controller, new AccessDeniedException(excp)); - } catch(IOException excp) { - LOG.warn("revoke() failed", excp); - - ResponseConverter.setControllerException(controller, excp); - } catch (Exception excp) { - LOG.warn("revoke() failed", excp); - - ResponseConverter.setControllerException(controller, new CoprocessorException(excp.getMessage())); - } - } - - AccessControlProtos.RevokeResponse response = isSuccess ? AccessControlProtos.RevokeResponse.getDefaultInstance() : null; - - done.run(response); - } - - @Override - public void hasPermission(RpcController controller, AccessControlProtos.HasPermissionRequest request, RpcCallback done) { - LOG.debug("hasPermission(): "); - } - - @Override - public void checkPermissions(RpcController controller, AccessControlProtos.CheckPermissionsRequest request, RpcCallback done) { - LOG.debug("checkPermissions(): "); - } - - @Override - public void getUserPermissions(RpcController controller, AccessControlProtos.GetUserPermissionsRequest request, - RpcCallback done) { - AccessControlProtos.GetUserPermissionsResponse response = null; - try { - String operation = "userPermissions"; - final RangerAccessResourceImpl resource = new RangerAccessResourceImpl(); - User user = getActiveUser(null); - Set groups = _userUtils.getUserGroups(user); - if (groups.isEmpty() && user.getUGI() != null) { - String[] groupArray = user.getUGI().getGroupNames(); - if (groupArray != null) { - groups = Sets.newHashSet(groupArray); - } - } - RangerAccessRequestImpl rangerAccessrequest = new RangerAccessRequestImpl(resource, null, - _userUtils.getUserAsString(user), groups, null); - rangerAccessrequest.setAction(operation); - rangerAccessrequest.setClientIPAddress(getRemoteAddress()); - rangerAccessrequest.setResourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF); - List perms = null; - if (request.getType() == AccessControlProtos.Permission.Type.Table) { - final TableName table = request.hasTableName() ? ProtobufUtil.toTableName(request.getTableName()) : null; - requirePermission(null, operation, table.getName(), Action.ADMIN); - resource.setValue(RangerHBaseResource.KEY_TABLE, table.getNameAsString()); - perms = User.runAsLoginUser(new PrivilegedExceptionAction>() { - @Override - public List run() throws Exception { - return getUserPermissions( - hbasePlugin.getResourceACLs(rangerAccessrequest), - table.getNameAsString(), false); - } - }); - } else if (request.getType() == AccessControlProtos.Permission.Type.Namespace) { - final String namespace = request.getNamespaceName().toStringUtf8(); - requireGlobalPermission(null, "getUserPermissionForNamespace", namespace, Action.ADMIN); - resource.setValue(RangerHBaseResource.KEY_TABLE, namespace + RangerHBaseResource.NAMESPACE_SEPARATOR); - rangerAccessrequest.setRequestData(namespace); - perms = User.runAsLoginUser(new PrivilegedExceptionAction>() { - @Override - public List run() throws Exception { - return getUserPermissions( - hbasePlugin.getResourceACLs(rangerAccessrequest), - namespace, true); - } - }); - } else { - requirePermission(null, "userPermissions", Action.ADMIN); - perms = User.runAsLoginUser(new PrivilegedExceptionAction>() { - @Override - public List run() throws Exception { - return getUserPermissions( - hbasePlugin.getResourceACLs(rangerAccessrequest), null, - false); - } - }); - if (_userUtils.isSuperUser(user)) { - perms.add(new UserPermission(_userUtils.getUserAsString(user), - Permission.newBuilder(PermissionStorage.ACL_TABLE_NAME).withActions(Action.values()).build())); - } - } - response = AccessControlUtil.buildGetUserPermissionsResponse(perms); - } catch (IOException ioe) { - // pass exception back up - ResponseConverter.setControllerException(controller, ioe); - } - done.run(response); - } - - private List getUserPermissions(RangerResourceACLs rangerResourceACLs, String resource, - boolean isNamespace) { - List userPermissions = new ArrayList(); - Action[] hbaseActions = Action.values(); - List hbaseActionsList = new ArrayList(); - for (Action action : hbaseActions) { - hbaseActionsList.add(action.name()); - } - addPermission(rangerResourceACLs.getUserACLs(), isNamespace, hbaseActionsList, userPermissions, resource, - false); - addPermission(rangerResourceACLs.getGroupACLs(), isNamespace, hbaseActionsList, userPermissions, resource, - true); - return userPermissions; - } - - private void addPermission(Map> acls, boolean isNamespace, - List hbaseActionsList, List userPermissions, String resource, boolean isGroup) { - for (Entry> userAcls : acls.entrySet()) { - String user = !isGroup ? userAcls.getKey() : AuthUtil.toGroupEntry(userAcls.getKey()); - List allowedPermissions = new ArrayList(); - for (Entry permissionAccess : userAcls.getValue().entrySet()) { - String permission = _authUtils.getActionName(permissionAccess.getKey()); - if (hbaseActionsList.contains(permission) - && permissionAccess.getValue().getResult() == RangerPolicyEvaluator.ACCESS_ALLOWED) { - allowedPermissions.add(Action.valueOf(permission)); - } - - } - if (!allowedPermissions.isEmpty()) { - UserPermission up = null; - if (isNamespace) { - up = new UserPermission(user, - Permission.newBuilder(resource).withActions(allowedPermissions.toArray(new Action[allowedPermissions.size()])).build()); - } else { - up = new UserPermission(user, - Permission.newBuilder(TableName.valueOf(resource)).withActions(allowedPermissions.toArray(new Action[allowedPermissions.size()])).build()); - } - userPermissions.add(up); - } - } - } - - private GrantRevokeRequest createGrantData(AccessControlProtos.GrantRequest request) throws Exception { - AccessControlProtos.UserPermission up = request.getUserPermission(); - AccessControlProtos.Permission perm = up == null ? null : up.getPermission(); - - UserPermission userPerm = up == null ? null : AccessControlUtil.toUserPermission(up); - Permission.Action[] actions = userPerm == null ? null : userPerm.getPermission().getActions(); - String userName = userPerm == null ? null : userPerm.getUser(); - String nameSpace = null; - String tableName = null; - String colFamily = null; - String qualifier = null; - - if(perm == null) { - throw new Exception("grant(): invalid data - permission is null"); - } - - if(StringUtil.isEmpty(userName)) { - throw new Exception("grant(): invalid data - username empty"); - } - - if ((actions == null) || (actions.length == 0)) { - throw new Exception("grant(): invalid data - no action specified"); - } - - switch(perm.getType()) { - case Global: - tableName = colFamily = qualifier = RangerHBaseResource.WILDCARD; - break; - - case Table: - TablePermission tablePerm = (TablePermission)userPerm.getPermission(); - tableName = Bytes.toString(tablePerm.getTableName().getName()); - colFamily = Bytes.toString(tablePerm.getFamily()); - qualifier = Bytes.toString(tablePerm.getQualifier()); - break; - - case Namespace: - NamespacePermission namepsacePermission = (NamespacePermission)userPerm.getPermission(); - nameSpace = namepsacePermission.getNamespace(); - break; - } - - if(StringUtil.isEmpty(nameSpace) && StringUtil.isEmpty(tableName) && StringUtil.isEmpty(colFamily) && StringUtil.isEmpty(qualifier)) { - throw new Exception("grant(): namespace/table/columnFamily/columnQualifier not specified"); - } - - tableName = StringUtil.isEmpty(tableName) ? RangerHBaseResource.WILDCARD : tableName; - colFamily = StringUtil.isEmpty(colFamily) ? RangerHBaseResource.WILDCARD : colFamily; - qualifier = StringUtil.isEmpty(qualifier) ? RangerHBaseResource.WILDCARD : qualifier; - - if(! StringUtil.isEmpty(nameSpace)) { - tableName = nameSpace + RangerHBaseResource.NAMESPACE_SEPARATOR + tableName; - } - - User activeUser = getActiveUser(null); - String grantor = activeUser != null ? activeUser.getShortName() : null; - String[] groups = activeUser != null ? activeUser.getGroupNames() : null; - - Set grantorGroups = null; - - if (groups != null && groups.length > 0) { - grantorGroups = new HashSet<>(Arrays.asList(groups)); - } - - Map mapResource = new HashMap(); - mapResource.put(RangerHBaseResource.KEY_TABLE, tableName); - mapResource.put(RangerHBaseResource.KEY_COLUMN_FAMILY, colFamily); - mapResource.put(RangerHBaseResource.KEY_COLUMN, qualifier); - - GrantRevokeRequest ret = new GrantRevokeRequest(); - - ret.setGrantor(grantor); - ret.setGrantorGroups(grantorGroups); - ret.setDelegateAdmin(Boolean.FALSE); - ret.setEnableAudit(Boolean.TRUE); - ret.setReplaceExistingPermissions(Boolean.TRUE); - ret.setResource(mapResource); - ret.setClientIPAddress(getRemoteAddress()); - ret.setForwardedAddresses(null);//TODO: Need to check with Knox proxy how they handle forwarded add. - ret.setRemoteIPAddress(getRemoteAddress()); - ret.setRequestData(up.toString()); - - if(userName.startsWith(GROUP_PREFIX)) { - ret.getGroups().add(userName.substring(GROUP_PREFIX.length())); - } else { - ret.getUsers().add(userName); - } - - for (Permission.Action action : actions) { - switch(action.code()) { - case 'R': - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_READ); - break; - - case 'W': - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_WRITE); - break; - - case 'C': - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_CREATE); - break; - - case 'A': - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_ADMIN); - ret.setDelegateAdmin(Boolean.TRUE); - break; - case 'X': - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_EXECUTE); - break; - default: - LOG.warn("grant(): ignoring action '" + action.name() + "' for user '" + userName + "'"); - } - } - - return ret; - } - - private GrantRevokeRequest createRevokeData(AccessControlProtos.RevokeRequest request) throws Exception { - AccessControlProtos.UserPermission up = request.getUserPermission(); - AccessControlProtos.Permission perm = up == null ? null : up.getPermission(); - - UserPermission userPerm = up == null ? null : AccessControlUtil.toUserPermission(up); - String userName = userPerm == null ? null : userPerm.getUser(); - String nameSpace = null; - String tableName = null; - String colFamily = null; - String qualifier = null; - - if(perm == null) { - throw new Exception("revoke(): invalid data - permission is null"); - } - - if(StringUtil.isEmpty(userName)) { - throw new Exception("revoke(): invalid data - username empty"); - } - - switch(perm.getType()) { - case Global : - tableName = colFamily = qualifier = RangerHBaseResource.WILDCARD; - break; - - case Table : - TablePermission tablePerm = (TablePermission)userPerm.getPermission(); - tableName = Bytes.toString(tablePerm.getTableName().getName()); - colFamily = Bytes.toString(tablePerm.getFamily()); - qualifier = Bytes.toString(tablePerm.getQualifier()); - break; - - case Namespace: - NamespacePermission namespacePermission = (NamespacePermission)userPerm.getPermission(); - nameSpace = namespacePermission.getNamespace(); - break; - } - - if(StringUtil.isEmpty(nameSpace) && StringUtil.isEmpty(tableName) && StringUtil.isEmpty(colFamily) && StringUtil.isEmpty(qualifier)) { - throw new Exception("revoke(): table/columnFamily/columnQualifier not specified"); - } - - tableName = StringUtil.isEmpty(tableName) ? RangerHBaseResource.WILDCARD : tableName; - colFamily = StringUtil.isEmpty(colFamily) ? RangerHBaseResource.WILDCARD : colFamily; - qualifier = StringUtil.isEmpty(qualifier) ? RangerHBaseResource.WILDCARD : qualifier; - - if(! StringUtil.isEmpty(nameSpace)) { - tableName = nameSpace + RangerHBaseResource.NAMESPACE_SEPARATOR + tableName; - } - - User activeUser = getActiveUser(null); - String grantor = activeUser != null ? activeUser.getShortName() : null; - String[] groups = activeUser != null ? activeUser.getGroupNames() : null; - - Set grantorGroups = null; - - if (groups != null && groups.length > 0) { - grantorGroups = new HashSet<>(Arrays.asList(groups)); - } - - Map mapResource = new HashMap(); - mapResource.put(RangerHBaseResource.KEY_TABLE, tableName); - mapResource.put(RangerHBaseResource.KEY_COLUMN_FAMILY, colFamily); - mapResource.put(RangerHBaseResource.KEY_COLUMN, qualifier); - - GrantRevokeRequest ret = new GrantRevokeRequest(); - - ret.setGrantor(grantor); - ret.setGrantorGroups(grantorGroups); - ret.setDelegateAdmin(Boolean.TRUE); // remove delegateAdmin privilege as well - ret.setEnableAudit(Boolean.TRUE); - ret.setReplaceExistingPermissions(Boolean.TRUE); - ret.setResource(mapResource); - ret.setClientIPAddress(getRemoteAddress()); - ret.setForwardedAddresses(null);//TODO: Need to check with Knox proxy how they handle forwarded add. - ret.setRemoteIPAddress(getRemoteAddress()); - ret.setRequestData(up.toString()); - - if(userName.startsWith(GROUP_PREFIX)) { - ret.getGroups().add(userName.substring(GROUP_PREFIX.length())); - } else { - ret.getUsers().add(userName); - } - - // revoke removes all permissions - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_READ); - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_WRITE); - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_CREATE); - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_ADMIN); - ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_EXECUTE); - - return ret; - } - - private void cleanUp_HBaseRangerPlugin() { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerAuthorizationCoprocessor.cleanUp_HBaseRangerPlugin()"); - } - if (hbasePlugin != null) { - hbasePlugin.setHBaseShuttingDown(true); - hbasePlugin.cleanup(); - AuditProviderFactory auditProviderFactory = hbasePlugin.getAuditProviderFactory(); - if (auditProviderFactory != null) { - auditProviderFactory.shutdown(); - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerAuthorizationCoprocessor.cleanUp_HBaseRangerPlugin() completed!"); - } - } - - private String getCommandString(String operationName, String tableNameStr, Map opMetaData) { - StringBuilder ret = new StringBuilder(); - if (!HbaseConstants.HBASE_META_TABLE.equals(tableNameStr)) { - ret.append(operationName); - ret.append(HbaseConstants.SPACE); - ret.append(tableNameStr).append(HbaseConstants.COMMA).append(HbaseConstants.SPACE); - ret.append(getPredicates(operationName, opMetaData)); - } - return ret.toString(); - } - - private String getPredicates(String operationName, Map opMetaData) { - StringBuilder ret = new StringBuilder(); - - if (MapUtils.isNotEmpty(opMetaData)) { - HashMap> families = (HashMap>) opMetaData.get(HbaseConstants.FAMILIES); - String startRowVal = (String) opMetaData.get(HbaseConstants.STARTROW); - String stopRowVal = (String) opMetaData.get(HbaseConstants.STOPROW); - String filterVal = (String) opMetaData.get(HbaseConstants.FILTER); - String rowVal = (String) opMetaData.get(HbaseConstants.ROW); - - if (!isQueryforInfo(families)) { - ret.append(HbaseConstants.OPEN_BRACES); - if (HbaseConstants.SCAN.equals(operationName)) { - if (StringUtils.isNotEmpty(startRowVal)) { - ret.append(formatPredicate(ret, PredicateType.STARTROW, startRowVal)); - } - if (StringUtils.isNotEmpty(stopRowVal)) { - ret.append(formatPredicate(ret, PredicateType.STOPROW, stopRowVal)); - } - } else { - if(StringUtils.isNotEmpty(rowVal)) { - ret.append(formatPredicate(ret, PredicateType.ROW, rowVal)); - } - } - if (StringUtils.isNotEmpty(filterVal)) { - ret.append(formatPredicate(ret, PredicateType.FILTER, filterVal)); - } - if (MapUtils.isNotEmpty(families)) { - String colfamily = families.toString(); - ret.append(formatPredicate(ret, PredicateType.COLUMNS, colfamily)); - } - ret.append(HbaseConstants.SPACE).append(HbaseConstants.CLOSED_BRACES); - } - } - return ret.toString(); - } - - private boolean isQueryforInfo(HashMap> families) { - boolean ret = false; - for(HashMap.Entry family : families.entrySet()) { - String familyKey = (String) family.getKey(); - if (HbaseConstants.INFO.equals(familyKey)) { - ret = true; - break; - } - } - return ret; - } - - private String formatPredicate(StringBuilder commandStr, PredicateType predicateType, String val) { - StringBuilder ret = new StringBuilder(); - if (HbaseConstants.OPEN_BRACES.equals(commandStr.toString())) { - ret.append(HbaseConstants.SPACE); - } else { - ret.append(HbaseConstants.COMMA).append(HbaseConstants.SPACE); - } - ret.append(buildPredicate(predicateType, val)); - return ret.toString(); - } - - private String buildPredicate(PredicateType predicateType, String val) { - StringBuilder ret = new StringBuilder(); - switch (predicateType) { - case STARTROW: - ret.append(PredicateType.STARTROW.name().toUpperCase()); - ret.append(HbaseConstants.ARROW); - ret.append(HbaseConstants.SINGLE_QUOTES).append(val).append(HbaseConstants.SINGLE_QUOTES); - break; - case STOPROW: - ret.append(PredicateType.STOPROW.name().toUpperCase()); - ret.append(HbaseConstants.ARROW); - ret.append(HbaseConstants.SINGLE_QUOTES).append(val).append(HbaseConstants.SINGLE_QUOTES); - break; - case FILTER: - ret.append(PredicateType.FILTER.name().toUpperCase()); - ret.append(HbaseConstants.ARROW); - ret.append(HbaseConstants.SINGLE_QUOTES).append(val).append(HbaseConstants.SINGLE_QUOTES); - break; - case COLUMNS: - ret.append(PredicateType.COLUMNS.name().toUpperCase()); - ret.append(HbaseConstants.ARROW); - ret.append(HbaseConstants.SINGLE_QUOTES).append(val).append(HbaseConstants.SINGLE_QUOTES); - break; - case ROW: - ret.append(val); - break; - } - return ret.toString(); - } - - private void checkGetTableInfoAccess(ObserverContext ctx, String operation, List descriptors, String regex, String accessPermission) { - - if (CollectionUtils.isNotEmpty(descriptors)) { - // Retains only those which passes authorization checks - User user = getActiveUser(ctx); - String access = accessPermission; - HbaseAuditHandler auditHandler = _factory.getAuditHandler(); // this will accumulate audits for all tables that succeed. - AuthorizationSession session = new AuthorizationSession(hbasePlugin) - .operation(operation) - .otherInformation("regex=" + regex) - .remoteAddress(getRemoteAddress()) - .auditHandler(auditHandler) - .user(user) - .access(access); - - Iterator itr = descriptors.iterator(); - while (itr.hasNext()) { - TableDescriptor htd = itr.next(); - String tableName = htd.getTableName().getNameAsString(); - session.table(tableName).buildRequest().authorize(); - if (!session.isAuthorized()) { - List events = null; - itr.remove(); - AuthzAuditEvent event = auditHandler.getAndDiscardMostRecentEvent(); - if (event != null) { - events = Lists.newArrayList(event); - } - auditHandler.logAuthzAudits(events); - } - } - if (descriptors.size() > 0) { - session.logCapturedEvents(); - } - } - } - - private void checkAccessForNamespaceDescriptor(ObserverContext ctx, String operation, List descriptors) { - - if (CollectionUtils.isNotEmpty(descriptors)) { - // Retains only those which passes authorization checks - User user = getActiveUser(ctx); - String access = _authUtils.getAccess(Action.ADMIN); - HbaseAuditHandler auditHandler = _factory.getAuditHandler(); // this will accumulate audits for all tables that succeed. - AuthorizationSession session = new AuthorizationSession(hbasePlugin) - .operation(operation) - .remoteAddress(getRemoteAddress()) - .auditHandler(auditHandler) - .user(user) - .access(access); - - Iterator itr = descriptors.iterator(); - while (itr.hasNext()) { - NamespaceDescriptor namespaceDescriptor = itr.next(); - String namespace = namespaceDescriptor.getName(); - session.table(namespace).buildRequest().authorize(); - if (!session.isAuthorized()) { - List events = null; - itr.remove(); - AuthzAuditEvent event = auditHandler.getAndDiscardMostRecentEvent(); - if (event != null) { - events = Lists.newArrayList(event); - } - auditHandler.logAuthzAudits(events); - } - } - if (descriptors.size() > 0) { - session.logCapturedEvents(); - } - } - } - - enum PredicateType {STARTROW, STOPROW, FILTER, COLUMNS, ROW}; -} + private static final Logger LOG = LoggerFactory.getLogger(RangerAuthorizationCoprocessor.class.getName()); + private static final Logger PERF_HBASEAUTH_REQUEST_LOG = RangerPerfTracer.getPerfLogger("hbaseauth.request"); + + private static final String GROUP_PREFIX = "@"; + private static final String MASTER_COPROCESSOR_TYPE = "master"; + private static final String REGIONAL_COPROCESSOR_TYPE = "regional"; + private static final String REGIONAL_SERVER_COPROCESSOR_TYPE = "regionalServer"; + private static final String[] SPECIAL_TABLES = new String[] {"hbase:meta", "-ROOT-", ".META.", "hbase:acl", "hbase:namespace"}; + + private static boolean updateRangerPoliciesOnGrantRevoke = RangerHadoopConstants.HBASE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_DEFAULT_VALUE; + + private static volatile RangerHBasePlugin hbasePlugin; + + /* + * These are package level only for testability and aren't meant to be exposed outside via getters/setters or made available to derived classes. + */ + final HbaseFactory factory = HbaseFactory.getInstance(); + final HbaseUserUtils userUtils = factory.getUserUtils(); + final HbaseAuthUtils authUtils = factory.getAuthUtils(); + + private UserProvider userProvider; + private RegionCoprocessorEnvironment regionEnv; + private final Map scannerOwners = new MapMaker().weakKeys().makeMap(); + + /** + * if we should check EXEC permissions + */ + private boolean shouldCheckExecPermission; + private String coprocessorType = "unknown"; + + public void setColumnAuthOptimizationEnabled(boolean enable) throws Exception { + RangerHBasePlugin plugin = hbasePlugin; + + if (plugin != null) { + plugin.setColumnAuthOptimizationEnabled(enable); + } else { + throw new Exception("Error while enabling column auth optimization"); + } + } + + @Override + public Optional getRegionObserver() { + return Optional.of(this); + } + + @Override + public Optional getEndpointObserver() { + return Optional.of(this); + } + + @Override + public Optional getBulkLoadObserver() { + return Optional.of(this); + } + + @Override + public Optional getMasterObserver() { + return Optional.of(this); + } + + @Override + public Optional getRegionServerObserver() { + return Optional.of(this); + } + + @Override + public void preCreateTable(ObserverContext c, TableDescriptor desc, RegionInfo[] regions) throws IOException { + requirePermission(c, "createTable", desc.getTableName().getName(), Permission.Action.CREATE); + } + + @Override + public void preDeleteTable(ObserverContext c, TableName tableName) throws IOException { + requirePermission(c, "deleteTable", tableName.getName(), null, null, Action.CREATE); + } + + @Override + public void preModifyTable(ObserverContext c, TableName tableName, TableDescriptor htd) throws IOException { + requirePermission(c, "modifyTable", tableName.getName(), null, null, Action.CREATE); + } + + @Override + public void preEnableTable(ObserverContext c, TableName tableName) throws IOException { + requirePermission(c, "enableTable", tableName.getName(), null, null, Action.CREATE); + } + + @Override + public void preDisableTable(ObserverContext c, TableName tableName) throws IOException { + requirePermission(c, "disableTable", tableName.getName(), null, null, Action.CREATE); + } + + @Override + public void preAbortProcedure(ObserverContext observerContext, long procId) throws IOException { + //if(!procEnv.isProcedureOwner(procId, this.getActiveUser())) { + requirePermission(observerContext, "abortProcedure", Action.ADMIN); + //} + } + + @Override + public void postGetProcedures(ObserverContext observerContext) throws IOException { + requirePermission(observerContext, "getProcedures", Action.ADMIN); + } + + @Override + public void preMove(ObserverContext c, RegionInfo region, ServerName srcServer, ServerName destServer) throws IOException { + requirePermission(c, "move", region.getTable().getName(), null, null, Action.ADMIN); + } + + @Override + public void preAssign(ObserverContext c, RegionInfo regionInfo) throws IOException { + requirePermission(c, "assign", regionInfo.getTable().getName(), null, null, Action.ADMIN); + } + + @Override + public void preUnassign(ObserverContext c, RegionInfo regionInfo, boolean force) throws IOException { + requirePermission(c, "unassign", regionInfo.getTable().getName(), null, null, Action.ADMIN); + } + + @Override + public void preRegionOffline(ObserverContext c, RegionInfo regionInfo) throws IOException { + requirePermission(c, "regionOffline", regionInfo.getTable().getName(), null, null, Action.ADMIN); + } + + @Override + public void preBalance(ObserverContext c, BalanceRequest request) throws IOException { + requirePermission(c, "balance", Permission.Action.ADMIN); + } + + @Override + public void preBalanceSwitch(ObserverContext c, boolean newValue) throws IOException { + requirePermission(c, "balanceSwitch", Permission.Action.ADMIN); + } + + @Override + public void preShutdown(ObserverContext c) throws IOException { + requirePermission(c, "shutdown", Permission.Action.ADMIN); + cleanUpHBaseRangerPlugin(); + } + + @Override + public void preStopMaster(ObserverContext c) throws IOException { + requirePermission(c, "stopMaster", Permission.Action.ADMIN); + cleanUpHBaseRangerPlugin(); + } + + @Override + public void postStartMaster(ObserverContext ctx) throws IOException { + if (updateRangerPoliciesOnGrantRevoke) { + LOG.debug("Calling create ACL table ..."); + + Admin admin = (ctx.getEnvironment()).getConnection().getAdmin(); + Throwable var3 = null; + + try { + if (!admin.tableExists(PermissionStorage.ACL_TABLE_NAME)) { + createACLTable(admin); + } + } catch (Throwable var12) { + var3 = var12; + + throw var12; + } finally { + if (admin != null) { + if (var3 != null) { + try { + admin.close(); + } catch (Throwable var11) { + var3.addSuppressed(var11); + } + } else { + admin.close(); + } + } + } + } + } + + @Override + public void preSnapshot(ObserverContext ctx, SnapshotDescription snapshot, TableDescriptor hTableDescriptor) throws IOException { + requirePermission(ctx, "snapshot", hTableDescriptor.getTableName().getName(), Permission.Action.ADMIN); + } + + @Override + public void preCloneSnapshot(ObserverContext ctx, SnapshotDescription snapshot, TableDescriptor hTableDescriptor) throws IOException { + requirePermission(ctx, "cloneSnapshot", hTableDescriptor.getTableName().getName(), Permission.Action.ADMIN); + } + + @Override + public void preRestoreSnapshot(ObserverContext ctx, SnapshotDescription snapshot, TableDescriptor hTableDescriptor) throws IOException { + requirePermission(ctx, "restoreSnapshot", hTableDescriptor.getTableName().getName(), Permission.Action.ADMIN); + } + + @Override + public void preDeleteSnapshot(ObserverContext ctx, SnapshotDescription snapshot) throws IOException { + requirePermission(ctx, "deleteSnapshot", snapshot.getTableName().getName(), Permission.Action.ADMIN); + } + + @Override + public void postGetTableDescriptors(ObserverContext ctx, List tableNamesList, List descriptors, String regex) { + if (LOG.isDebugEnabled()) { + LOG.debug("==> postGetTableDescriptors(count(tableNamesList)={}, count(descriptors)={}, regex={})", tableNamesList == null ? 0 : tableNamesList.size(), descriptors == null ? 0 : descriptors.size(), regex); + } + + checkGetTableInfoAccess(ctx, "getTableDescriptors", descriptors, regex, authUtils.getAccess(Action.CREATE)); + + if (LOG.isDebugEnabled()) { + LOG.debug("<== postGetTableDescriptors(count(tableNamesList)={}, count(descriptors)={}, regex={})", tableNamesList == null ? 0 : tableNamesList.size(), descriptors == null ? 0 : descriptors.size(), regex); + } + } + + @Override + public void postGetTableNames(ObserverContext ctx, List descriptors, String regex) { + if (LOG.isDebugEnabled()) { + LOG.debug("==> postGetTableNames(count(descriptors)={}, regex={})", descriptors == null ? 0 : descriptors.size(), regex); + } + + checkGetTableInfoAccess(ctx, "getTableNames", descriptors, regex, RangerPolicyEngine.ANY_ACCESS); + + if (LOG.isDebugEnabled()) { + LOG.debug("<== postGetTableNames(count(descriptors)={}, regex={})", descriptors == null ? 0 : descriptors.size(), regex); + } + } + + @Override + public void preCreateNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException { + requireGlobalPermission(ctx, "createNamespace", ns.getName(), Action.ADMIN); + } + + @Override + public void preDeleteNamespace(ObserverContext ctx, String namespace) throws IOException { + requireGlobalPermission(ctx, "deleteNamespace", namespace, Action.ADMIN); + } + + @Override + public void preModifyNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException { + requireGlobalPermission(ctx, "modifyNamespace", ns.getName(), Action.ADMIN); + } + + @Override + public void postListNamespaceDescriptors(ObserverContext ctx, List descriptors) { + LOG.debug("==> RangerAuthorizationCoprocessor.postListNamespaceDescriptors()"); + + checkAccessForNamespaceDescriptor(ctx, "getNameSpaceDescriptors", descriptors); + + LOG.debug("<== RangerAuthorizationCoprocessor.postListNamespaceDescriptors()"); + } + + @Override + public void preSetUserQuota(final ObserverContext ctx, final String userName, final GlobalQuotaSettings quotas) throws IOException { + requireGlobalPermission(ctx, "setUserQuota", null, Action.ADMIN); + } + + @Override + public void preSetUserQuota(final ObserverContext ctx, final String userName, final TableName tableName, final GlobalQuotaSettings quotas) throws IOException { + requirePermission(ctx, "setUserTableQuota", tableName.getName(), null, null, Action.ADMIN); + } + + @Override + public void preSetUserQuota(final ObserverContext ctx, final String userName, final String namespace, final GlobalQuotaSettings quotas) throws IOException { + requireGlobalPermission(ctx, "setUserNamespaceQuota", namespace, Action.ADMIN); + } + + @Override + public void preSetTableQuota(final ObserverContext ctx, final TableName tableName, final GlobalQuotaSettings quotas) throws IOException { + requirePermission(ctx, "setTableQuota", tableName.getName(), null, null, Action.ADMIN); + } + + @Override + public void preSetNamespaceQuota(final ObserverContext ctx, final String namespace, final GlobalQuotaSettings quotas) throws IOException { + requireGlobalPermission(ctx, "setNamespaceQuota", namespace, Action.ADMIN); + } + + @Override + public void preOpen(ObserverContext observerContext) throws IOException { + RegionCoprocessorEnvironment env = observerContext.getEnvironment(); + final Region region = env.getRegion(); + + if (region == null) { + LOG.error("NULL region from RegionCoprocessorEnvironment in preOpen()"); + } else { + RegionInfo regionInfo = region.getRegionInfo(); + + if (isSpecialTable(regionInfo)) { + requireSystemOrSuperUser(observerContext); + } else { + requirePermission(observerContext, "open", getTableName(observerContext.getEnvironment()), Action.ADMIN); + } + } + } + + @Override + public void preFlush(ObserverContext e, FlushLifeCycleTracker tracker) throws IOException { + requirePermission(e, "flush", getTableName(e.getEnvironment()), null, null, Action.CREATE); + } + + @Override + public void preCompactSelection(ObserverContext e, Store store, List candidates, CompactionLifeCycleTracker tracker) throws IOException { + requirePermission(e, "compactSelection", getTableName(e.getEnvironment()), null, null, Action.CREATE); + } + + @Override + public InternalScanner preCompact(ObserverContext e, Store store, InternalScanner scanner, ScanType scanType, CompactionLifeCycleTracker tracker, CompactionRequest request) throws IOException { + requirePermission(e, "compact", getTableName(e.getEnvironment()), null, null, Action.CREATE); + + return scanner; + } + + @Override + public void preClose(ObserverContext e, boolean abortRequested) throws IOException { + requirePermission(e, "close", getTableName(e.getEnvironment()), Permission.Action.ADMIN); + } + @Override + public void preGetOp(final ObserverContext rEnv, final Get get, final List result) throws IOException { + LOG.debug("==> preGetOp"); -class RangerHBasePlugin extends RangerBasePlugin { - private static final Logger LOG = LoggerFactory.getLogger(RangerHBasePlugin.class); - boolean isHBaseShuttingDown = false; - private boolean isColumnAuthOptimizationEnabled = false; - - public RangerHBasePlugin(String appType) { - super("hbase", appType); - } - - public void setHBaseShuttingDown(boolean hbaseShuttingDown) { - isHBaseShuttingDown = hbaseShuttingDown; - } - - @Override - public RangerAccessResult isAccessAllowed(RangerAccessRequest request, RangerAccessResultProcessor resultProcessor) { - RangerAccessResult ret = null; - if (isHBaseShuttingDown) { - ret = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, this.getServiceName(), this.getServiceDef(), request); - ret.setIsAllowed(true); - ret.setIsAudited(false); - LOG.warn("Auth request came after HBase shutdown...."); - } else { - ret = super.isAccessAllowed(request, resultProcessor); - } - return ret; - } - @Override - public void setPolicies(ServicePolicies policies) { - super.setPolicies(policies); - this.isColumnAuthOptimizationEnabled = Boolean.parseBoolean(this.getServiceConfigs().get(RangerHadoopConstants.HBASE_COLUMN_AUTH_OPTIMIZATION)); - LOG.info("isColumnAuthOptimizationEnabled="+this.isColumnAuthOptimizationEnabled); - } - public boolean getPropertyIsColumnAuthOptimizationEnabled(){ - return this.isColumnAuthOptimizationEnabled; - } - public void setColumnAuthOptimizationEnabled(boolean enable){ - this.isColumnAuthOptimizationEnabled = enable; - } -} + String commandStr = null; + + try { + RegionCoprocessorEnvironment e = rEnv.getEnvironment(); + Map> familyMap = get.getFamilyMap(); + + String operation = "get"; + byte[] tableName = getTableName(e); + String tableNameStr = tableName != null ? new String(tableName) : " "; + + commandStr = getCommandString(HbaseConstants.GET, tableNameStr, get.toMap()); + + Filter filter = authorizeAccess(rEnv, operation, Action.READ, e, familyMap, commandStr); + + if (filter == null) { + LOG.debug("preGetOp: all access allowed, no filter returned"); + } else { + Filter existingFilter = get.getFilter(); + Filter combinedFilter = combineFilters(filter, existingFilter); + + get.setFilter(combinedFilter); + + LOG.debug("preGetOp: partial access, new filter added"); + } + } finally { + LOG.debug("<== preGetOp: commandStr: {}", commandStr); + } + } + + @Override + public boolean preExists(ObserverContext c, Get get, boolean exists) throws IOException { + requirePermission(c, "exists", TablePermission.Action.READ, c.getEnvironment(), get.familySet()); + + return exists; + } + + @Override + public void prePut(ObserverContext c, Put put, WALEdit edit, Durability durability) throws IOException { + requirePermission(c, "put", TablePermission.Action.WRITE, c.getEnvironment(), put.getFamilyCellMap()); + } + + @Override + public void preDelete(ObserverContext c, Delete delete, WALEdit edit, Durability durability) throws IOException { + requirePermission(c, "delete", TablePermission.Action.WRITE, c.getEnvironment(), delete.getFamilyCellMap()); + } + + @Override + public boolean preCheckAndPut(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOperator compareOp, ByteArrayComparable comparator, Put put, boolean result) throws IOException { + Collection familyMap = Arrays.asList(new byte[][] {family}); + + requirePermission(c, "checkAndPut", TablePermission.Action.READ, c.getEnvironment(), familyMap); + requirePermission(c, "checkAndPut", TablePermission.Action.WRITE, c.getEnvironment(), familyMap); + + return result; + } + + @Override + public boolean preCheckAndDelete(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOperator compareOp, ByteArrayComparable comparator, Delete delete, boolean result) throws IOException { + Collection familyMap = Arrays.asList(new byte[][] {family}); + + requirePermission(c, "checkAndDelete", TablePermission.Action.READ, c.getEnvironment(), familyMap); + requirePermission(c, "checkAndDelete", TablePermission.Action.WRITE, c.getEnvironment(), familyMap); + + return result; + } + + @Override + public Result preAppend(ObserverContext c, Append append) throws IOException { + requirePermission(c, "append", TablePermission.Action.WRITE, c.getEnvironment(), append.getFamilyCellMap()); + + return null; + } + + @Override + public Result preIncrement(ObserverContext c, Increment increment) throws IOException { + requirePermission(c, "increment", TablePermission.Action.WRITE, c.getEnvironment(), increment.getFamilyCellMap().keySet()); + + return null; + } + + @Override + public void preScannerOpen(ObserverContext c, Scan scan) throws IOException { + LOG.debug("==> preScannerOpen"); + + String commandStr = null; + + try { + RegionCoprocessorEnvironment e = c.getEnvironment(); + + Map> familyMap = scan.getFamilyMap(); + String operation = "scannerOpen"; + byte[] tableName = getTableName(e); + String tableNameStr = tableName != null ? new String(tableName) : " "; + + commandStr = getCommandString(HbaseConstants.SCAN, tableNameStr, scan.toMap()); + + Filter filter = authorizeAccess(c, operation, Action.READ, e, familyMap, commandStr); + + if (filter == null) { + LOG.debug("preScannerOpen: Access allowed for all families/column. No filter added"); + } else { + LOG.debug("preScannerOpen: Access allowed for some of the families/column. New filter added."); + + Filter existingFilter = scan.getFilter(); + Filter combinedFilter = combineFilters(filter, existingFilter); + + scan.setFilter(combinedFilter); + } + } finally { + LOG.debug("<== preScannerOpen: commandStr: {}", commandStr); + } + } + + @Override + public RegionScanner postScannerOpen(ObserverContext c, Scan scan, RegionScanner s) { + User user = getActiveUser(c); + + if (user != null && user.getShortName() != null) { + scannerOwners.put(s, user.getShortName()); + } + + return s; + } + + @Override + public boolean preScannerNext(ObserverContext c, InternalScanner s, List result, int limit, boolean hasNext) throws IOException { + requireScannerOwner(c, s); + + return hasNext; + } + + @Override + public void preScannerClose(ObserverContext c, InternalScanner s) throws IOException { + requireScannerOwner(c, s); + } + + @Override + public void postScannerClose(ObserverContext c, InternalScanner s) { + scannerOwners.remove(s); + } + + @Override + public void preBulkLoadHFile(ObserverContext ctx, List> familyPaths) throws IOException { + List cfs = new LinkedList<>(); + + for (Pair el : familyPaths) { + cfs.add(el.getFirst()); + } + + requirePermission(ctx, "bulkLoadHFile", Permission.Action.WRITE, ctx.getEnvironment(), cfs); + } + + @Override + public void preStopRegionServer(ObserverContext env) throws IOException { + requirePermission(env, "stop", Permission.Action.ADMIN); + cleanUpHBaseRangerPlugin(); + } + + @Override + public void start(CoprocessorEnvironment env) { + String appType = "unknown"; + + shouldCheckExecPermission = env.getConfiguration().getBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, AccessControlConstants.DEFAULT_EXEC_PERMISSION_CHECKS); + + if (env instanceof MasterCoprocessorEnvironment) { + coprocessorType = MASTER_COPROCESSOR_TYPE; + appType = "hbaseMaster"; + } else if (env instanceof RegionServerCoprocessorEnvironment) { + coprocessorType = REGIONAL_SERVER_COPROCESSOR_TYPE; + appType = "hbaseRegional"; + } else if (env instanceof RegionCoprocessorEnvironment) { + regionEnv = (RegionCoprocessorEnvironment) env; + coprocessorType = REGIONAL_COPROCESSOR_TYPE; + appType = "hbaseRegional"; + } + + this.userProvider = UserProvider.instantiate(env.getConfiguration()); + + Configuration conf = env.getConfiguration(); + + HbaseFactory.initialize(conf); + + // create and initialize the plugin class + RangerHBasePlugin plugin = hbasePlugin; + + if (plugin == null) { + synchronized (RangerAuthorizationCoprocessor.class) { + plugin = hbasePlugin; + + if (plugin == null) { + plugin = new RangerHBasePlugin(appType); + + plugin.init(); + + updateRangerPoliciesOnGrantRevoke = plugin.getConfig().getBoolean(RangerHadoopConstants.HBASE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_PROP, RangerHadoopConstants.HBASE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_DEFAULT_VALUE); + + hbasePlugin = plugin; + } + } + } + + LOG.debug("Start of Coprocessor: [{}]", coprocessorType); + } + + @Override + public Iterable getServices() { + return Collections.singleton(AccessControlService.newReflectiveService(this)); + } + + public void prePrepareBulkLoad(ObserverContext ctx, PrepareBulkLoadRequest request) throws IOException { + List cfs = null; + + requirePermission(ctx, "prePrepareBulkLoad", Permission.Action.WRITE, ctx.getEnvironment(), cfs); + } + + public void preCleanupBulkLoad(ObserverContext ctx, CleanupBulkLoadRequest request) throws IOException { + List cfs = null; + + requirePermission(ctx, "preCleanupBulkLoad", Permission.Action.WRITE, ctx.getEnvironment(), cfs); + } + + @Override + public Message preEndpointInvocation(ObserverContext ctx, Service service, String methodName, Message request) throws IOException { + // Don't intercept calls to our own AccessControlService, we check for + // appropriate permissions in the service handlers + if (shouldCheckExecPermission && !(service instanceof AccessControlService)) { + requirePermission(ctx, "invoke(" + service.getDescriptorForType().getName() + "." + methodName + ")", getTableName(ctx.getEnvironment()), null, null, Action.EXEC); + } + + return request; + } + + @Override + public void grant(RpcController controller, AccessControlProtos.GrantRequest request, RpcCallback done) { + boolean isSuccess = false; + + if (updateRangerPoliciesOnGrantRevoke) { + GrantRevokeRequest grData; + + try { + grData = createGrantData(request); + + RangerHBasePlugin plugin = hbasePlugin; + + if (plugin != null) { + RangerAccessResultProcessor auditHandler = new RangerDefaultAuditHandler(hbasePlugin.getConfig()); + + plugin.grantAccess(grData, auditHandler); + + isSuccess = true; + } + } catch (AccessControlException excp) { + LOG.warn("grant() failed", excp); + + ResponseConverter.setControllerException(controller, new AccessDeniedException(excp)); + } catch (IOException excp) { + LOG.warn("grant() failed", excp); + + ResponseConverter.setControllerException(controller, excp); + } catch (Exception excp) { + LOG.warn("grant() failed", excp); + + ResponseConverter.setControllerException(controller, new CoprocessorException(excp.getMessage())); + } + } + + AccessControlProtos.GrantResponse response = isSuccess ? AccessControlProtos.GrantResponse.getDefaultInstance() : null; + + done.run(response); + } + + @Override + public void revoke(RpcController controller, AccessControlProtos.RevokeRequest request, RpcCallback done) { + boolean isSuccess = false; + + if (updateRangerPoliciesOnGrantRevoke) { + GrantRevokeRequest grData; + + try { + grData = createRevokeData(request); + + RangerHBasePlugin plugin = hbasePlugin; + + if (plugin != null) { + RangerAccessResultProcessor auditHandler = new RangerDefaultAuditHandler(hbasePlugin.getConfig()); + + plugin.revokeAccess(grData, auditHandler); + + isSuccess = true; + } + } catch (AccessControlException excp) { + LOG.warn("revoke() failed", excp); + + ResponseConverter.setControllerException(controller, new AccessDeniedException(excp)); + } catch (IOException excp) { + LOG.warn("revoke() failed", excp); + + ResponseConverter.setControllerException(controller, excp); + } catch (Exception excp) { + LOG.warn("revoke() failed", excp); + + ResponseConverter.setControllerException(controller, new CoprocessorException(excp.getMessage())); + } + } + + AccessControlProtos.RevokeResponse response = isSuccess ? AccessControlProtos.RevokeResponse.getDefaultInstance() : null; + + done.run(response); + } + + @Override + public void getUserPermissions(RpcController controller, AccessControlProtos.GetUserPermissionsRequest request, RpcCallback done) { + AccessControlProtos.GetUserPermissionsResponse response = null; + + try { + String operation = "userPermissions"; + final RangerAccessResourceImpl resource = new RangerAccessResourceImpl(); + User user = getActiveUser(null); + Set groups = userUtils.getUserGroups(user); + + if (groups.isEmpty() && user.getUGI() != null) { + String[] groupArray = user.getUGI().getGroupNames(); + + if (groupArray != null) { + groups = Sets.newHashSet(groupArray); + } + } + + RangerAccessRequestImpl rangerAccessrequest = new RangerAccessRequestImpl(resource, null, userUtils.getUserAsString(user), groups, null); + + rangerAccessrequest.setAction(operation); + rangerAccessrequest.setClientIPAddress(getRemoteAddress()); + rangerAccessrequest.setResourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF); + + List perms; + + if (request.getType() == AccessControlProtos.Permission.Type.Table) { + final TableName table = request.hasTableName() ? ProtobufUtil.toTableName(request.getTableName()) : null; + + requirePermission(null, operation, table.getName(), Action.ADMIN); + resource.setValue(RangerHBaseResource.KEY_TABLE, table.getNameAsString()); + + perms = User.runAsLoginUser(() -> getUserPermissions(hbasePlugin.getResourceACLs(rangerAccessrequest), table.getNameAsString(), false)); + } else if (request.getType() == AccessControlProtos.Permission.Type.Namespace) { + final String namespace = request.getNamespaceName().toStringUtf8(); + + requireGlobalPermission(null, "getUserPermissionForNamespace", namespace, Action.ADMIN); + resource.setValue(RangerHBaseResource.KEY_TABLE, namespace + RangerHBaseResource.NAMESPACE_SEPARATOR); + rangerAccessrequest.setRequestData(namespace); + + perms = User.runAsLoginUser(() -> getUserPermissions(hbasePlugin.getResourceACLs(rangerAccessrequest), namespace, true)); + } else { + requirePermission(null, "userPermissions", Action.ADMIN); + + perms = User.runAsLoginUser(() -> getUserPermissions(hbasePlugin.getResourceACLs(rangerAccessrequest), null, false)); + + if (userUtils.isSuperUser(user)) { + perms.add(new UserPermission(userUtils.getUserAsString(user), Permission.newBuilder(PermissionStorage.ACL_TABLE_NAME).withActions(Action.values()).build())); + } + } + + response = AccessControlUtil.buildGetUserPermissionsResponse(perms); + } catch (IOException ioe) { + // pass exception back up + ResponseConverter.setControllerException(controller, ioe); + } + + done.run(response); + } + + @Override + public void checkPermissions(RpcController controller, AccessControlProtos.CheckPermissionsRequest request, RpcCallback done) { + LOG.debug("checkPermissions(): "); + } + + @Override + public void hasPermission(RpcController controller, AccessControlProtos.HasPermissionRequest request, RpcCallback done) { + LOG.debug("hasPermission(): "); + } + + // Utilities Methods + protected byte[] getTableName(RegionCoprocessorEnvironment e) { + Region region = e.getRegion(); + byte[] tableName = null; + + if (region != null) { + RegionInfo regionInfo = region.getRegionInfo(); + + if (regionInfo != null) { + tableName = regionInfo.getTable().getName(); + } + } + + return tableName; + } + + protected void requireSystemOrSuperUser(ObserverContext ctx) throws IOException { + User user = User.getCurrent(); + + if (user == null) { + throw new IOException("Unable to obtain the current user, authorization checks for internal operations will not work correctly!"); + } + String systemUser = user.getShortName(); + User activeUser = getActiveUser(ctx); + if (!Objects.equals(systemUser, activeUser.getShortName()) && !userUtils.isSuperUser(activeUser)) { + throw new AccessDeniedException("User '" + user.getShortName() + "is not system or super user."); + } + } + + protected boolean isSpecialTable(RegionInfo regionInfo) { + return isSpecialTable(regionInfo.getTable().getName()); + } + + protected boolean isSpecialTable(byte[] tableName) { + return isSpecialTable(Bytes.toString(tableName)); + } + + protected boolean isSpecialTable(String input) { + for (String specialTable : SPECIAL_TABLES) { + if (specialTable.equals(input)) { + return true; + } + } + + return false; + } + + protected boolean isAccessForMetaTables(RegionCoprocessorEnvironment env) { + RegionInfo hri = env.getRegion().getRegionInfo(); + + return hri.isMetaRegion(); + } + + // Check if the user has global permission ... + protected void requireGlobalPermission(ObserverContext ctx, String request, String objName, Permission.Action action) throws AccessDeniedException { + authorizeAccess(ctx, request, objName, action, null, null, null); + } + + protected void requirePermission(ObserverContext ctx, String request, Permission.Action action) throws AccessDeniedException { + requirePermission(ctx, request, null, action); + } + + protected void requirePermission(ObserverContext ctx, String request, byte[] tableName, Permission.Action action) throws AccessDeniedException { + String table = Bytes.toString(tableName); + + authorizeAccess(ctx, request, null, action, table, null, null); + } + + protected void requirePermission(ObserverContext ctx, String request, byte[] aTableName, byte[] aColumnFamily, byte[] aQualifier, Permission.Action action) throws AccessDeniedException { + String table = Bytes.toString(aTableName); + String columnFamily = Bytes.toString(aColumnFamily); + String column = Bytes.toString(aQualifier); + + authorizeAccess(ctx, request, null, action, table, columnFamily, column); + } + + protected void requirePermission(ObserverContext ctx, String request, Permission.Action perm, RegionCoprocessorEnvironment env, Collection families) throws IOException { + HashMap> familyMap = new HashMap<>(); + + if (families != null) { + for (byte[] family : families) { + familyMap.put(family, null); + } + } + + requirePermission(ctx, request, perm, env, familyMap); + } + + /** + * @return empty map if families is null, would never have empty or null keys, would never have null values, values could be empty (non-null) set + */ + Map> getColumnFamilies(Map> families) { + if (families == null) { + // null families map passed. Ok, returning empty map. + return Collections.emptyMap(); + } + + Map> result = new HashMap<>(); + + for (Map.Entry> anEntry : families.entrySet()) { + byte[] familyBytes = anEntry.getKey(); + String family = Bytes.toString(familyBytes); + + if (family == null || family.isEmpty()) { + LOG.error("Unexpected Input: got null or empty column family (key) in families map! Ignoring..."); + } else { + Collection columnCollection = anEntry.getValue(); + + if (CollectionUtils.isEmpty(columnCollection)) { + // family points to null map, OK. + // if column auth disabled, then also empty set is fine + LOG.debug("RangerAuthorizationCoprocessor getColumnFamilies: columns are empty. Setting columns to emptySet in familyMap"); + + result.put(family, Collections.emptySet()); + } else { + LOG.debug("RangerAuthorizationCoprocessor getColumnFamilies: columns exist"); + + Iterator columnIterator = new ColumnIterator(columnCollection); + Set columns = new HashSet<>(); + + try { + while (columnIterator.hasNext()) { + String column = columnIterator.next(); + + columns.add(column); + } + } catch (Throwable t) { + LOG.error("Exception encountered when converting family-map to set of columns. Ignoring and returning empty set of columns for family[{}]", family, t); + LOG.error("Ignoring exception and returning empty set of columns for family[{}]", family); + + columns.clear(); + } + + result.put(family, columns); + } + } + } + + return result; + } + + ColumnFamilyAccessResult evaluateAccess(ObserverContext ctx, String operation, Action action, final RegionCoprocessorEnvironment env, final Map> familyMap, String commandStr) throws AccessDeniedException { + if (LOG.isDebugEnabled()) { + LOG.debug("evaluateAccess: isColumnAuthOptimizationEnabled={}", hbasePlugin.getPropertyIsColumnAuthOptimizationEnabled()); + } + + String access = authUtils.getAccess(action); + User user = getActiveUser(ctx); + String userName = userUtils.getUserAsString(user); + final Map> colFamiliesForDebugLoggingOnly; + + if (LOG.isDebugEnabled()) { + colFamiliesForDebugLoggingOnly = getColumnFamilies(familyMap); + + LOG.debug("evaluateAccess: entered: user[{}], Operation[{}], access[{}], families[{}]", userName, operation, access, colFamiliesForDebugLoggingOnly); + } else { + colFamiliesForDebugLoggingOnly = Collections.emptyMap(); + } + + byte[] tableBytes = getTableName(env); + + if (tableBytes == null || tableBytes.length == 0) { + LOG.debug("evaluateAccess: Unexpected: Couldn't get table from RegionCoprocessorEnvironment. Access denied, not audited"); + + throw new AccessDeniedException("Insufficient permissions for operation '" + operation + "',action: " + action); + } + + String table = Bytes.toString(tableBytes); + ColumnFamilyAccessResult result; + + if (canSkipAccessCheck(user, operation, access, table) || canSkipAccessCheck(user, operation, access, env)) { + LOG.debug("evaluateAccess: exiting: isKnownAccessPattern returned true: access allowed, not audited"); + + result = new ColumnFamilyAccessResult(true, true, null, null, null, null, null); + + LOG.debug("evaluateAccess: exiting: user[{}], Operation[{}], access[{}], families[{}], verdict[{}]", userName, operation, access, colFamiliesForDebugLoggingOnly, result); + + return result; + } + + // let's create a session that would be reused. Set things on it that won't change. + HbaseAuditHandler auditHandler = factory.getAuditHandler(); + + AuthorizationSession session = new AuthorizationSession(hbasePlugin) + .operation(operation) + .otherInformation(commandStr) + .remoteAddress(getRemoteAddress()) + .auditHandler(auditHandler) + .user(user) + .access(access) + .table(table); + + LOG.debug("evaluateAccess: families to process: {}", colFamiliesForDebugLoggingOnly); + + if (familyMap == null || familyMap.isEmpty()) { + LOG.debug("evaluateAccess: Null or empty families collection, ok. Table level access is desired"); + + session.buildRequest().authorize(); + + boolean authorized = session.isAuthorized(); + String reason = ""; + + if (authorized) { + LOG.debug("evaluateAccess: table level access granted [{}]", table); + } else { + reason = String.format("Insufficient permissions for user ‘%s',action: %s, tableName:%s, no column families found.", user.getName(), operation, table); + } + + AuthzAuditEvent event = auditHandler.getAndDiscardMostRecentEvent(); // this could be null, of course, depending on audit settings of table. + // if authorized then pass captured events as access allowed set else as access denied set. + result = new ColumnFamilyAccessResult(authorized, authorized, authorized ? Collections.singletonList(event) : null, null, authorized ? null : event, reason, null); + + LOG.debug("evaluateAccess: exiting: user[{}], Operation[{}], access[{}], families[{}], verdict[{}]", userName, operation, access, colFamiliesForDebugLoggingOnly, result); + + return result; + } else { + LOG.debug("evaluateAccess: Families collection not null. Skipping table-level check, will do finer level check"); + } + + boolean everythingIsAccessible = true; + boolean somethingIsAccessible = false; + + /* + * we would have to accumulate audits of all successful accesses and any one denial (which in our case ends up being the last denial) + * We need to keep audit events for family level access check seperate because we don't want them logged in some cases. + */ + List authorizedEvents = new ArrayList<>(); + List familyLevelAccessEvents = new ArrayList<>(); + AuthzAuditEvent deniedEvent = null; + String denialReason = null; + + // we need to cache the auths results so that we can create a filter, if needed + Map> columnsAccessAllowed = new HashMap<>(); + Set familesAccessAllowed = new HashSet<>(); + Set familesAccessDenied = new HashSet<>(); + Set familesAccessIndeterminate = new HashSet<>(); + Set familiesFullyAuthorized = new HashSet<>(); + + for (Map.Entry> anEntry : familyMap.entrySet()) { + String family = Bytes.toString(anEntry.getKey()); + + session.columnFamily(family); + + LOG.debug("evaluateAccess: Processing family: {}", family); + + Collection columns = anEntry.getValue(); + if (columns == null || columns.isEmpty()) { + LOG.debug("evaluateAccess: columns collection null or empty, ok. Family level access is desired."); + + session.column(null) // zap stale column from prior iteration of this loop, if any + .buildRequest() + .authorize(); + + AuthzAuditEvent auditEvent = auditHandler.getAndDiscardMostRecentEvent(); // capture it only for success + + final boolean isColumnFamilyAuthorized = session.isAuthorized(); + + if (auditEvent != null) { + if (isColumnFamilyAuthorized) { + familyLevelAccessEvents.add(auditEvent); + } else { + if (deniedEvent == null) { // we need to capture just one denial event + LOG.debug("evaluateAccess: Setting denied access audit event with last auth failure audit event."); + + deniedEvent = auditEvent; + } + } + } + + LOG.debug("evaluateAccess: family level access for [{}] is evaluated to {}. Checking if [{}] descendants have access.", family, isColumnFamilyAuthorized, family); + + // buildRequest again since resourceMatchingScope changed + // reset ResourceMatchingScope to SELF, ignoreDescendantDeny to true + session.resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS) + .ignoreDescendantDeny(false) + .buildRequest() + .authorize(); + + auditEvent = auditHandler.getAndDiscardMostRecentEvent(); // capture it only for failure + + if (session.isAuthorized()) { + LOG.debug("evaluateAccess: [{}] descendants have access", family); + + somethingIsAccessible = true; + + if (isColumnFamilyAuthorized) { + familesAccessAllowed.add(family); + + if (auditEvent != null) { + LOG.debug("evaluateAccess: adding to family-level-access-granted-event-set"); + + familyLevelAccessEvents.add(auditEvent); + } + } else { + familesAccessIndeterminate.add(family); + + LOG.debug("evaluateAccess: has partial access (of some type) in family [{}]", family); + + everythingIsAccessible = false; + + if (auditEvent != null && deniedEvent == null) { // we need to capture just one denial event + LOG.debug("evaluateAccess: Setting denied access audit event with last auth failure audit event."); + + deniedEvent = auditEvent; + } + } + } else { + everythingIsAccessible = false; + + if (isColumnFamilyAuthorized) { + somethingIsAccessible = true; + + familesAccessIndeterminate.add(family); + + LOG.debug("evaluateAccess: has partial access (of some type) in family [{}]", family); + + if (auditEvent != null && deniedEvent == null) { // we need to capture just one denial event + LOG.debug("evaluateAccess: Setting denied access audit event with last auth failure audit event."); + + deniedEvent = auditEvent; + } + } else { + LOG.debug("evaluateAccess: has no access of [{}] type in family [{}]", access, family); + + familesAccessDenied.add(family); + + denialReason = String.format("Insufficient permissions for user ‘%s',action: %s, tableName:%s, family:%s.", user.getName(), operation, table, family); + } + } + + // Restore the headMatch setting + session.resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF); + session.ignoreDescendantDeny(true); + } else { + boolean isColumnAuthOptimizationEnabled = hbasePlugin.getPropertyIsColumnAuthOptimizationEnabled(); + + LOG.debug("evaluateAccess: columns collection not empty. Skipping Family level check, will do finer level access check for columns."); + + if (isColumnAuthOptimizationEnabled) { + session.column(null) + .buildRequest() + .authorize(); + + if (LOG.isDebugEnabled()) { + LOG.debug("evaluateAccess: isColumnAuthOptimizationEnabled={}, isColumnFamilyAuthorized={}", isColumnAuthOptimizationEnabled, session.isAuthorized()); + } + + if (session.isAuthorized()) { + //check if column family fully authorized i.e. no deny for columns + session.column(null) + .resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS) + .ignoreDescendantDeny(false) + .buildRequest() + .authorize(); + + boolean isColumnFamilyAndDescendantsAuthorized = session.isAuthorized(); + AuthzAuditEvent auditEvent = auditHandler.getAndDiscardMostRecentEvent(); + + // reset ResourceMatchingScope to SELF, ignoreDescendantDeny to true + session.resourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF).ignoreDescendantDeny(true); + + LOG.debug("evaluateAccess: isColumnAuthOptimizationEnabled={}, isColumnFamilyAndDescendantsAuthorized={}", isColumnAuthOptimizationEnabled, isColumnFamilyAndDescendantsAuthorized); + + if (isColumnFamilyAndDescendantsAuthorized) { + familiesFullyAuthorized.add(family); + + if (auditEvent != null) { + LOG.debug("evaluateAccess: isColumnAuthOptimizationEnabled ={}, adding family {} to familiesFullyAuthorized", isColumnAuthOptimizationEnabled, family); + + familyLevelAccessEvents.add(auditEvent); + } + + continue; + } + } + } + + Set accessibleColumns = new HashSet<>(); // will be used in to populate our results cache for the filter + Iterator columnIterator = new ColumnIterator(columns); + + while (columnIterator.hasNext()) { + String column = columnIterator.next(); + + LOG.debug("evaluateAccess: Processing column: {}", column); + + //buildRequest required again since now column is being set + session.column(column).buildRequest().authorize(); + + AuthzAuditEvent auditEvent = auditHandler.getAndDiscardMostRecentEvent(); + + if (session.isAuthorized()) { + LOG.debug("evaluateAccess: has column level access [{}, {}]", family, column); + + // we need to do 3 things: housekeeping, capturing audit events, building the results cache for filter + somethingIsAccessible = true; + + accessibleColumns.add(column); + + if (auditEvent != null) { + LOG.debug("evaluateAccess: adding to access-granted-audit-event-set"); + + authorizedEvents.add(auditEvent); + } + } else { + LOG.debug("evaluateAccess: no column level access [{}, {}]", family, column); + + somethingIsAccessible = false; + everythingIsAccessible = false; + denialReason = String.format("Insufficient permissions for user ‘%s',action: %s, tableName:%s, family:%s, column: %s", user.getName(), operation, table, family, column); + + if (auditEvent != null && deniedEvent == null) { // we need to capture just one denial event + LOG.debug("evaluateAccess: Setting denied access audit event with last auth failure audit event."); + + deniedEvent = auditEvent; + } + } + + if (!accessibleColumns.isEmpty()) { + columnsAccessAllowed.put(family, accessibleColumns); + } + } + } + } + + // Cache of auth results are encapsulated the in the filter. Not every caller of the function uses it - only preGet and preOpt will. + RangerAuthorizationFilter filter = new RangerAuthorizationFilter(session, familesAccessAllowed, familesAccessDenied, familesAccessIndeterminate, columnsAccessAllowed, familiesFullyAuthorized); + + result = new ColumnFamilyAccessResult(everythingIsAccessible, somethingIsAccessible, authorizedEvents, familyLevelAccessEvents, deniedEvent, denialReason, filter); + + LOG.debug("evaluateAccess: exiting: user[{}], Operation[{}], access[{}], families[{}], verdict[{}]", userName, operation, access, colFamiliesForDebugLoggingOnly, result); + + return result; + } + + Filter authorizeAccess(ObserverContext ctx, String operation, Action action, final RegionCoprocessorEnvironment env, final Map> familyMap, String commandStr) throws AccessDeniedException { + LOG.debug("==> authorizeAccess"); + + RangerPerfTracer perf = null; + + try { + perf = RangerPerfTracer.getPerfTracer(PERF_HBASEAUTH_REQUEST_LOG, "RangerAuthorizationCoprocessor.authorizeAccess(request=Operation[" + operation + "]"); + + ColumnFamilyAccessResult accessResult = evaluateAccess(ctx, operation, action, env, familyMap, commandStr); + RangerDefaultAuditHandler auditHandler = new RangerDefaultAuditHandler(hbasePlugin.getConfig()); + + if (accessResult.everythingIsAccessible) { + auditHandler.logAuthzAudits(accessResult.accessAllowedEvents); + auditHandler.logAuthzAudits(accessResult.familyLevelAccessEvents); + + LOG.debug("authorizeAccess: exiting: No filter returned since all access was allowed"); + + return null; // no filter needed since we are good to go. + } else if (accessResult.somethingIsAccessible) { + // NOTE: audit logging is split beween logging here (in scope of preOp/preGet) and logging in the filter component for those that couldn't be determined + auditHandler.logAuthzAudits(accessResult.accessAllowedEvents); + + LOG.debug("authorizeAccess: exiting: Filter returned since some access was allowed"); + + return accessResult.filter; + } else { + // If we are here then it means nothing was accessible! So let's log one denial (in our case, the last denial) and throw an exception + auditHandler.logAuthzAudit(accessResult.accessDeniedEvent); + + LOG.debug("authorizeAccess: exiting: Throwing exception since nothing was accessible"); + + throw new AccessDeniedException(accessResult.denialReason); + } + } finally { + RangerPerfTracer.log(perf); + + LOG.debug("<== authorizeAccess"); + } + } + + Filter combineFilters(Filter filter, Filter existingFilter) { + Filter combinedFilter = filter; + + if (existingFilter != null) { + combinedFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL, Lists.newArrayList(filter, existingFilter)); + } + + return combinedFilter; + } + + void requirePermission(final ObserverContext ctx, final String operation, final Action action, final RegionCoprocessorEnvironment regionServerEnv, final Map> familyMap) throws AccessDeniedException { + RangerPerfTracer perf = null; + + try { + if (RangerPerfTracer.isPerfTraceEnabled(PERF_HBASEAUTH_REQUEST_LOG)) { + perf = RangerPerfTracer.getPerfTracer(PERF_HBASEAUTH_REQUEST_LOG, "RangerAuthorizationCoprocessor.requirePermission(request=Operation[" + operation + "]"); + } + + ColumnFamilyAccessResult accessResult = evaluateAccess(ctx, operation, action, regionServerEnv, familyMap, null); + RangerDefaultAuditHandler auditHandler = new RangerDefaultAuditHandler(hbasePlugin.getConfig()); + + if (accessResult.everythingIsAccessible) { + auditHandler.logAuthzAudits(accessResult.accessAllowedEvents); + auditHandler.logAuthzAudits(accessResult.familyLevelAccessEvents); + + LOG.debug("requirePermission: exiting: all access was allowed"); + } else { + auditHandler.logAuthzAudit(accessResult.accessDeniedEvent); + + LOG.debug("requirePermission: exiting: throwing exception as everything wasn't accessible"); + + throw new AccessDeniedException(accessResult.denialReason); + } + } finally { + RangerPerfTracer.log(perf); + } + } + + void authorizeAccess(ObserverContext ctx, String operation, String otherInformation, Action action, String table, String columnFamily, String column) throws AccessDeniedException { + User user = getActiveUser(ctx); + String access = authUtils.getAccess(action); + + LOG.debug("authorizeAccess: Entering : Operation[{}], Info[{}], access[{}], table[{}], columnFamily[{}], column[{}]", operation, otherInformation, access, table, columnFamily, column); + + if (canSkipAccessCheck(user, operation, access, table)) { + LOG.debug("authorizeAccess: {}: Operation[{}], Info[{}], access[{}], table[{}], columnFamily[{}], column[{}], allowed[{}], reason[{}]", "Exiting", operation, otherInformation, access, table, columnFamily, column, true, "can skip auth check"); + + return; + } + + HbaseAuditHandler auditHandler = factory.getAuditHandler(); + AuthorizationSession session = new AuthorizationSession(hbasePlugin) + .operation(operation) + .otherInformation(otherInformation) + .remoteAddress(getRemoteAddress()) + .auditHandler(auditHandler) + .user(user) + .access(access) + .table(table) + .columnFamily(columnFamily) + .column(column) + .buildRequest() + .authorize(); + + if (LOG.isDebugEnabled()) { + LOG.debug("authorizeAccess: {}: Operation[{}], Info[{}], access[{}], table[{}], columnFamily[{}], column[{}], allowed[{}], reason[{}]", "Exiting", operation, otherInformation, access, table, columnFamily, column, session.isAuthorized(), session.getDenialReason()); + } + + session.publishResults(); + } + + boolean canSkipAccessCheck(User user, final String operation, String access, final String table) throws AccessDeniedException { + boolean result = false; + + if (user == null) { + LOG.warn("canSkipAccessCheck: exiting{}", "Unexpeceted: User is null: access denied, not audited!"); + + throw new AccessDeniedException("No user associated with request (" + operation + ") for action: " + access + "on table:" + table); + } else if (isAccessForMetadataRead(access, table)) { + LOG.debug("canSkipAccessCheck: true: metadata read access always allowed, not audited"); + + result = true; + } else { + LOG.debug("Can't skip access checks"); + } + + return result; + } + + boolean canSkipAccessCheck(User user, final String operation, String access, final RegionCoprocessorEnvironment regionServerEnv) throws AccessDeniedException { + // read access to metadata tables is always allowed and isn't audited. + if (isAccessForMetaTables(regionServerEnv) && authUtils.isReadAccess(access)) { + LOG.debug("isKnownAccessPattern: exiting: Read access for metadata tables allowed, not audited!"); + + return true; + } + + // if write access is desired to metatables then global create access is sufficient + if (authUtils.isWriteAccess(access) && isAccessForMetaTables(regionServerEnv)) { + String createAccess = authUtils.getAccess(Action.CREATE); + + AuthorizationSession session = new AuthorizationSession(hbasePlugin) + .operation(operation) + .remoteAddress(getRemoteAddress()) + .user(user) + .access(createAccess) + .buildRequest() + .authorize(); + + if (session.isAuthorized()) { + // NOTE: this access isn't logged + LOG.debug("isKnownAccessPattern: exiting: User has global create access, allowed!"); + + return true; + } + } + + return false; + } + + /* ---- EndpointObserver implementation ---- */ + + boolean isAccessForMetadataRead(String access, String table) { + if (authUtils.isReadAccess(access) && isSpecialTable(table)) { + LOG.debug("isAccessForMetadataRead: Metadata tables read: access allowed!"); + + return true; + } + + return false; + } + + private User getActiveUser(ObserverContext ctx) { + User user = null; + + if (ctx != null) { + try { + Optional optionalUser = ctx.getCaller(); + + user = optionalUser.isPresent() ? (User) optionalUser.get() : this.userProvider.getCurrent(); + } catch (Exception e) { + LOG.info("Unable to get request user using context{}", ctx); + } + } + + if (user == null) { + try { + user = RpcServer.getRequestUser().get(); + } catch (NoSuchElementException e) { + LOG.info("Unable to get request user via RPCServer"); + } + } + + if (user == null) { + // for non-rpc handling, fallback to system user + try { + user = User.getCurrent(); + } catch (IOException e) { + LOG.error("Unable to find the current user"); + + user = null; + } + } + + return user; + } + + private String getRemoteAddress() { + InetAddress remoteAddr = null; + + try { + remoteAddr = RpcServer.getRemoteAddress().get(); + } catch (NoSuchElementException e) { + // HBase services will sometimes make calls as a part of + // internal operations. It is not worth logging when we do + // not have a remote address (a client's remote address). + LOG.trace("Unable to get remote Address"); + } + + if (remoteAddr == null) { + remoteAddr = RpcServer.getRemoteIp(); + } + + return remoteAddr != null ? remoteAddr.getHostAddress() : null; + } + + // Methods that are used within the CoProcessor + private void requireScannerOwner(ObserverContext ctx, InternalScanner s) throws AccessDeniedException { + if (!RpcServer.isInRpcCallContext()) { + return; + } + + User user = getActiveUser(ctx); + String requestUserName = user.getShortName(); + String owner = scannerOwners.get(s); + + if (owner != null && !owner.equals(requestUserName)) { + throw new AccessDeniedException("User '" + requestUserName + "' is not the scanner owner!"); + } + } + + private static void createACLTable(Admin admin) throws IOException { + ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(PermissionStorage.ACL_LIST_FAMILY).setMaxVersions(1).setInMemory(true).setBlockCacheEnabled(true).setBlocksize(8192).setBloomFilterType(BloomType.NONE).setScope(0).build(); + TableDescriptor td = TableDescriptorBuilder.newBuilder(PermissionStorage.ACL_TABLE_NAME).addColumnFamily(cfd).build(); + + admin.createTable(td); + } + + private List getUserPermissions(RangerResourceACLs rangerResourceACLs, String resource, boolean isNamespace) { + List userPermissions = new ArrayList<>(); + Action[] hbaseActions = Action.values(); + List hbaseActionsList = new ArrayList<>(); + + for (Action action : hbaseActions) { + hbaseActionsList.add(action.name()); + } + + addPermission(rangerResourceACLs.getUserACLs(), isNamespace, hbaseActionsList, userPermissions, resource, false); + addPermission(rangerResourceACLs.getGroupACLs(), isNamespace, hbaseActionsList, userPermissions, resource, true); + + return userPermissions; + } + + private void addPermission(Map> acls, boolean isNamespace, List hbaseActionsList, List userPermissions, String resource, boolean isGroup) { + for (Entry> userAcls : acls.entrySet()) { + String user = !isGroup ? userAcls.getKey() : AuthUtil.toGroupEntry(userAcls.getKey()); + List allowedPermissions = new ArrayList<>(); + + for (Entry permissionAccess : userAcls.getValue().entrySet()) { + String permission = authUtils.getActionName(permissionAccess.getKey()); + + if (hbaseActionsList.contains(permission) && permissionAccess.getValue().getResult() == RangerPolicyEvaluator.ACCESS_ALLOWED) { + allowedPermissions.add(Action.valueOf(permission)); + } + } + + if (!allowedPermissions.isEmpty()) { + UserPermission up; + + if (isNamespace) { + up = new UserPermission(user, Permission.newBuilder(resource).withActions(allowedPermissions.toArray(new Action[allowedPermissions.size()])).build()); + } else { + up = new UserPermission(user, Permission.newBuilder(TableName.valueOf(resource)).withActions(allowedPermissions.toArray(new Action[allowedPermissions.size()])).build()); + } + + userPermissions.add(up); + } + } + } + + private GrantRevokeRequest createGrantData(AccessControlProtos.GrantRequest request) throws Exception { + AccessControlProtos.UserPermission up = request.getUserPermission(); + AccessControlProtos.Permission perm = up == null ? null : up.getPermission(); + + UserPermission userPerm = up == null ? null : AccessControlUtil.toUserPermission(up); + Permission.Action[] actions = userPerm == null ? null : userPerm.getPermission().getActions(); + String userName = userPerm == null ? null : userPerm.getUser(); + String nameSpace = null; + String tableName = null; + String colFamily = null; + String qualifier = null; + + if (perm == null) { + throw new Exception("grant(): invalid data - permission is null"); + } + + if (StringUtil.isEmpty(userName)) { + throw new Exception("grant(): invalid data - username empty"); + } + + if ((actions == null) || (actions.length == 0)) { + throw new Exception("grant(): invalid data - no action specified"); + } + + switch (perm.getType()) { + case Global: + qualifier = RangerHBaseResource.WILDCARD; + colFamily = RangerHBaseResource.WILDCARD; + tableName = RangerHBaseResource.WILDCARD; + break; + + case Table: + TablePermission tablePerm = (TablePermission) userPerm.getPermission(); + + tableName = Bytes.toString(tablePerm.getTableName().getName()); + colFamily = Bytes.toString(tablePerm.getFamily()); + qualifier = Bytes.toString(tablePerm.getQualifier()); + break; + + case Namespace: + NamespacePermission namepsacePermission = (NamespacePermission) userPerm.getPermission(); + + nameSpace = namepsacePermission.getNamespace(); + break; + } + + if (StringUtil.isEmpty(nameSpace) && StringUtil.isEmpty(tableName) && StringUtil.isEmpty(colFamily) && StringUtil.isEmpty(qualifier)) { + throw new Exception("grant(): namespace/table/columnFamily/columnQualifier not specified"); + } + + tableName = StringUtil.isEmpty(tableName) ? RangerHBaseResource.WILDCARD : tableName; + colFamily = StringUtil.isEmpty(colFamily) ? RangerHBaseResource.WILDCARD : colFamily; + qualifier = StringUtil.isEmpty(qualifier) ? RangerHBaseResource.WILDCARD : qualifier; + + if (!StringUtil.isEmpty(nameSpace)) { + tableName = nameSpace + RangerHBaseResource.NAMESPACE_SEPARATOR + tableName; + } + + User activeUser = getActiveUser(null); + String grantor = activeUser != null ? activeUser.getShortName() : null; + String[] groups = activeUser != null ? activeUser.getGroupNames() : null; + + Set grantorGroups = null; + + if (groups != null && groups.length > 0) { + grantorGroups = new HashSet<>(Arrays.asList(groups)); + } + + Map mapResource = new HashMap<>(); + + mapResource.put(RangerHBaseResource.KEY_TABLE, tableName); + mapResource.put(RangerHBaseResource.KEY_COLUMN_FAMILY, colFamily); + mapResource.put(RangerHBaseResource.KEY_COLUMN, qualifier); + + GrantRevokeRequest ret = new GrantRevokeRequest(); + + ret.setGrantor(grantor); + ret.setGrantorGroups(grantorGroups); + ret.setDelegateAdmin(Boolean.FALSE); + ret.setEnableAudit(Boolean.TRUE); + ret.setReplaceExistingPermissions(Boolean.TRUE); + ret.setResource(mapResource); + ret.setClientIPAddress(getRemoteAddress()); + ret.setForwardedAddresses(null); //TODO: Need to check with Knox proxy how they handle forwarded add. + ret.setRemoteIPAddress(getRemoteAddress()); + ret.setRequestData(up.toString()); + + if (userName.startsWith(GROUP_PREFIX)) { + ret.getGroups().add(userName.substring(GROUP_PREFIX.length())); + } else { + ret.getUsers().add(userName); + } + + for (Permission.Action action : actions) { + switch (action.code()) { + case 'R': + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_READ); + break; + + case 'W': + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_WRITE); + break; + + case 'C': + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_CREATE); + break; + + case 'A': + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_ADMIN); + ret.setDelegateAdmin(Boolean.TRUE); + break; + case 'X': + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_EXECUTE); + break; + default: + LOG.warn("grant(): ignoring action '{}' for user '{}'", action.name(), userName); + } + } + + return ret; + } + + private GrantRevokeRequest createRevokeData(AccessControlProtos.RevokeRequest request) throws Exception { + AccessControlProtos.UserPermission up = request.getUserPermission(); + AccessControlProtos.Permission perm = up == null ? null : up.getPermission(); + + UserPermission userPerm = up == null ? null : AccessControlUtil.toUserPermission(up); + String userName = userPerm == null ? null : userPerm.getUser(); + String nameSpace = null; + String tableName = null; + String colFamily = null; + String qualifier = null; + + if (perm == null) { + throw new Exception("revoke(): invalid data - permission is null"); + } + + if (StringUtil.isEmpty(userName)) { + throw new Exception("revoke(): invalid data - username empty"); + } + + switch (perm.getType()) { + case Global: + qualifier = RangerHBaseResource.WILDCARD; + colFamily = RangerHBaseResource.WILDCARD; + tableName = RangerHBaseResource.WILDCARD; + break; + + case Table: + TablePermission tablePerm = (TablePermission) userPerm.getPermission(); + + tableName = Bytes.toString(tablePerm.getTableName().getName()); + colFamily = Bytes.toString(tablePerm.getFamily()); + qualifier = Bytes.toString(tablePerm.getQualifier()); + break; + + case Namespace: + NamespacePermission namespacePermission = (NamespacePermission) userPerm.getPermission(); + + nameSpace = namespacePermission.getNamespace(); + break; + } + + if (StringUtil.isEmpty(nameSpace) && StringUtil.isEmpty(tableName) && StringUtil.isEmpty(colFamily) && StringUtil.isEmpty(qualifier)) { + throw new Exception("revoke(): table/columnFamily/columnQualifier not specified"); + } + + tableName = StringUtil.isEmpty(tableName) ? RangerHBaseResource.WILDCARD : tableName; + colFamily = StringUtil.isEmpty(colFamily) ? RangerHBaseResource.WILDCARD : colFamily; + qualifier = StringUtil.isEmpty(qualifier) ? RangerHBaseResource.WILDCARD : qualifier; + + if (!StringUtil.isEmpty(nameSpace)) { + tableName = nameSpace + RangerHBaseResource.NAMESPACE_SEPARATOR + tableName; + } + + User activeUser = getActiveUser(null); + String grantor = activeUser != null ? activeUser.getShortName() : null; + String[] groups = activeUser != null ? activeUser.getGroupNames() : null; + + Set grantorGroups = null; + + if (groups != null && groups.length > 0) { + grantorGroups = new HashSet<>(Arrays.asList(groups)); + } + + Map mapResource = new HashMap<>(); + + mapResource.put(RangerHBaseResource.KEY_TABLE, tableName); + mapResource.put(RangerHBaseResource.KEY_COLUMN_FAMILY, colFamily); + mapResource.put(RangerHBaseResource.KEY_COLUMN, qualifier); + + GrantRevokeRequest ret = new GrantRevokeRequest(); + + ret.setGrantor(grantor); + ret.setGrantorGroups(grantorGroups); + ret.setDelegateAdmin(Boolean.TRUE); // remove delegateAdmin privilege as well + ret.setEnableAudit(Boolean.TRUE); + ret.setReplaceExistingPermissions(Boolean.TRUE); + ret.setResource(mapResource); + ret.setClientIPAddress(getRemoteAddress()); + ret.setForwardedAddresses(null); //TODO: Need to check with Knox proxy how they handle forwarded add. + ret.setRemoteIPAddress(getRemoteAddress()); + ret.setRequestData(up.toString()); + + if (userName.startsWith(GROUP_PREFIX)) { + ret.getGroups().add(userName.substring(GROUP_PREFIX.length())); + } else { + ret.getUsers().add(userName); + } + + // revoke removes all permissions + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_READ); + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_WRITE); + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_CREATE); + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_ADMIN); + ret.getAccessTypes().add(HbaseAuthUtils.ACCESS_TYPE_EXECUTE); + + return ret; + } + + private void cleanUpHBaseRangerPlugin() { + LOG.debug("==> RangerAuthorizationCoprocessor.cleanUp_HBaseRangerPlugin()"); + + if (hbasePlugin != null) { + hbasePlugin.setHBaseShuttingDown(true); + hbasePlugin.cleanup(); + + AuditProviderFactory auditProviderFactory = hbasePlugin.getAuditProviderFactory(); + + if (auditProviderFactory != null) { + auditProviderFactory.shutdown(); + } + } + + LOG.debug("<== RangerAuthorizationCoprocessor.cleanUp_HBaseRangerPlugin() completed!"); + } + + private String getCommandString(String operationName, String tableNameStr, Map opMetaData) { + StringBuilder ret = new StringBuilder(); + + if (!HbaseConstants.HBASE_META_TABLE.equals(tableNameStr)) { + ret.append(operationName); + ret.append(HbaseConstants.SPACE); + ret.append(tableNameStr).append(HbaseConstants.COMMA).append(HbaseConstants.SPACE); + ret.append(getPredicates(operationName, opMetaData)); + } + + return ret.toString(); + } + + private String getPredicates(String operationName, Map opMetaData) { + StringBuilder ret = new StringBuilder(); + + if (MapUtils.isNotEmpty(opMetaData)) { + HashMap> families = (HashMap>) opMetaData.get(HbaseConstants.FAMILIES); + String startRowVal = (String) opMetaData.get(HbaseConstants.STARTROW); + String stopRowVal = (String) opMetaData.get(HbaseConstants.STOPROW); + String filterVal = (String) opMetaData.get(HbaseConstants.FILTER); + String rowVal = (String) opMetaData.get(HbaseConstants.ROW); + + if (!isQueryforInfo(families)) { + ret.append(HbaseConstants.OPEN_BRACES); + + if (HbaseConstants.SCAN.equals(operationName)) { + if (StringUtils.isNotEmpty(startRowVal)) { + ret.append(formatPredicate(ret, PredicateType.STARTROW, startRowVal)); + } + + if (StringUtils.isNotEmpty(stopRowVal)) { + ret.append(formatPredicate(ret, PredicateType.STOPROW, stopRowVal)); + } + } else { + if (StringUtils.isNotEmpty(rowVal)) { + ret.append(formatPredicate(ret, PredicateType.ROW, rowVal)); + } + } + + if (StringUtils.isNotEmpty(filterVal)) { + ret.append(formatPredicate(ret, PredicateType.FILTER, filterVal)); + } + + if (MapUtils.isNotEmpty(families)) { + String colfamily = families.toString(); + + ret.append(formatPredicate(ret, PredicateType.COLUMNS, colfamily)); + } + + ret.append(HbaseConstants.SPACE).append(HbaseConstants.CLOSED_BRACES); + } + } + + return ret.toString(); + } + + private boolean isQueryforInfo(HashMap> families) { + boolean ret = false; + + for (HashMap.Entry family : families.entrySet()) { + String familyKey = (String) family.getKey(); + + if (HbaseConstants.INFO.equals(familyKey)) { + ret = true; + break; + } + } + + return ret; + } + + private String formatPredicate(StringBuilder commandStr, PredicateType predicateType, String val) { + StringBuilder ret = new StringBuilder(); + + if (HbaseConstants.OPEN_BRACES.contentEquals(commandStr)) { + ret.append(HbaseConstants.SPACE); + } else { + ret.append(HbaseConstants.COMMA).append(HbaseConstants.SPACE); + } + + ret.append(buildPredicate(predicateType, val)); + + return ret.toString(); + } + + private String buildPredicate(PredicateType predicateType, String val) { + StringBuilder ret = new StringBuilder(); + + switch (predicateType) { + case STARTROW: + ret.append(PredicateType.STARTROW.name().toUpperCase()); + ret.append(HbaseConstants.ARROW); + ret.append(HbaseConstants.SINGLE_QUOTES).append(val).append(HbaseConstants.SINGLE_QUOTES); + break; + case STOPROW: + ret.append(PredicateType.STOPROW.name().toUpperCase()); + ret.append(HbaseConstants.ARROW); + ret.append(HbaseConstants.SINGLE_QUOTES).append(val).append(HbaseConstants.SINGLE_QUOTES); + break; + case FILTER: + ret.append(PredicateType.FILTER.name().toUpperCase()); + ret.append(HbaseConstants.ARROW); + ret.append(HbaseConstants.SINGLE_QUOTES).append(val).append(HbaseConstants.SINGLE_QUOTES); + break; + case COLUMNS: + ret.append(PredicateType.COLUMNS.name().toUpperCase()); + ret.append(HbaseConstants.ARROW); + ret.append(HbaseConstants.SINGLE_QUOTES).append(val).append(HbaseConstants.SINGLE_QUOTES); + break; + case ROW: + ret.append(val); + break; + } + + return ret.toString(); + } + + private void checkGetTableInfoAccess(ObserverContext ctx, String operation, List descriptors, String regex, String accessPermission) { + if (CollectionUtils.isNotEmpty(descriptors)) { + // Retains only those which passes authorization checks + User user = getActiveUser(ctx); + String access = accessPermission; + HbaseAuditHandler auditHandler = factory.getAuditHandler(); // this will accumulate audits for all tables that succeed. + + AuthorizationSession session = new AuthorizationSession(hbasePlugin) + .operation(operation) + .otherInformation("regex=" + regex) + .remoteAddress(getRemoteAddress()) + .auditHandler(auditHandler) + .user(user) + .access(access); + + Iterator itr = descriptors.iterator(); + + while (itr.hasNext()) { + TableDescriptor htd = itr.next(); + String tableName = htd.getTableName().getNameAsString(); + + session.table(tableName).buildRequest().authorize(); + + if (!session.isAuthorized()) { + List events = null; + + itr.remove(); + + AuthzAuditEvent event = auditHandler.getAndDiscardMostRecentEvent(); + + if (event != null) { + events = Lists.newArrayList(event); + } + + auditHandler.logAuthzAudits(events); + } + } + + if (!descriptors.isEmpty()) { + session.logCapturedEvents(); + } + } + } + + private void checkAccessForNamespaceDescriptor(ObserverContext ctx, String operation, List descriptors) { + if (CollectionUtils.isNotEmpty(descriptors)) { + // Retains only those which passes authorization checks + User user = getActiveUser(ctx); + String access = authUtils.getAccess(Action.ADMIN); + HbaseAuditHandler auditHandler = factory.getAuditHandler(); // this will accumulate audits for all tables that succeed. + + AuthorizationSession session = new AuthorizationSession(hbasePlugin) + .operation(operation) + .remoteAddress(getRemoteAddress()) + .auditHandler(auditHandler) + .user(user) + .access(access); + + Iterator itr = descriptors.iterator(); + + while (itr.hasNext()) { + NamespaceDescriptor namespaceDescriptor = itr.next(); + String namespace = namespaceDescriptor.getName(); + + session.table(namespace).buildRequest().authorize(); + + if (!session.isAuthorized()) { + List events = null; + + itr.remove(); + + AuthzAuditEvent event = auditHandler.getAndDiscardMostRecentEvent(); + + if (event != null) { + events = Lists.newArrayList(event); + } + + auditHandler.logAuthzAudits(events); + } + } + + if (!descriptors.isEmpty()) { + session.logCapturedEvents(); + } + } + } + + enum PredicateType { STARTROW, STOPROW, FILTER, COLUMNS, ROW } + + static class ColumnFamilyAccessResult { + final boolean everythingIsAccessible; + final boolean somethingIsAccessible; + final List accessAllowedEvents; + final List familyLevelAccessEvents; + final AuthzAuditEvent accessDeniedEvent; + final String denialReason; + final RangerAuthorizationFilter filter; + + ColumnFamilyAccessResult(boolean everythingIsAccessible, boolean somethingIsAccessible, List accessAllowedEvents, List familyLevelAccessEvents, AuthzAuditEvent accessDeniedEvent, String denialReason, RangerAuthorizationFilter filter) { + this.everythingIsAccessible = everythingIsAccessible; + this.somethingIsAccessible = somethingIsAccessible; + this.accessAllowedEvents = accessAllowedEvents; // WARNING: we are just holding on to reference of the collection. Potentially risky optimization + this.familyLevelAccessEvents = familyLevelAccessEvents; + this.accessDeniedEvent = accessDeniedEvent; + this.denialReason = denialReason; + this.filter = filter; // cached values of access results + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(getClass()) + .add("everythingIsAccessible", everythingIsAccessible) + .add("somethingIsAccessible", somethingIsAccessible) + .add("accessAllowedEvents", accessAllowedEvents) + .add("familyLevelAccessEvents", familyLevelAccessEvents) + .add("accessDeniedEvent", accessDeniedEvent) + .add("denialReason", denialReason) + .add("filter", filter) + .toString(); + } + } +} diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationFilter.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationFilter.java index 7ddd0910ba..9559ba7959 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationFilter.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationFilter.java @@ -19,11 +19,6 @@ package org.apache.ranger.authorization.hbase; -import java.io.IOException; -import java.util.Collections; -import java.util.Map; -import java.util.Set; - import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.filter.FilterBase; @@ -33,120 +28,133 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Collections; +import java.util.Map; +import java.util.Set; + public class RangerAuthorizationFilter extends FilterBase { + private static final Logger LOG = LoggerFactory.getLogger(RangerAuthorizationFilter.class.getName()); + + final Set familiesAccessAllowed; + final Set familiesAccessDenied; + final Set familiesAccessIndeterminate; + final Map> columnsAccessAllowed; + final Set familiesFullyAuthorized; + final AuthorizationSession session; + final HbaseAuditHandler auditHandler = HbaseFactory.getInstance().getAuditHandler(); + + public RangerAuthorizationFilter(AuthorizationSession session, Set familiesAccessAllowed, Set familiesAccessDenied, Set familiesAccessIndeterminate, Map> columnsAccessAllowed, Set familiesFullyAuthorized) { + // the class assumes that all of these can be empty but none of these can be null + this.familiesAccessAllowed = familiesAccessAllowed; + this.familiesAccessDenied = familiesAccessDenied; + this.familiesAccessIndeterminate = familiesAccessIndeterminate; + this.columnsAccessAllowed = columnsAccessAllowed; + this.familiesFullyAuthorized = familiesFullyAuthorized; + + // this session should have everything set on it except family and column which would be altered based on need + this.session = session; + + // we don't want to audit denial, so we need to make sure the hander is what we need it to be. + this.session.auditHandler(auditHandler); + } + + @Override + public ReturnCode filterKeyValue(Cell kv) { + LOG.debug("==> filterKeyValue"); + + String family = null; + byte[] familyBytes = CellUtil.cloneFamily(kv); + + if (familyBytes.length > 0) { + family = Bytes.toString(familyBytes); + + LOG.debug("filterKeyValue: evaluating family[{}].", family); + } + + String column = null; + byte[] qualifier = CellUtil.cloneQualifier(kv); + + if (qualifier.length > 0) { + column = Bytes.toString(qualifier); + + LOG.debug("filterKeyValue: evaluating column[{}].", column); + } else { + LOG.warn("filterKeyValue: empty/null column set! Unexpected!"); + } + + ReturnCode result = ReturnCode.NEXT_COL; + boolean authCheckNeeded = false; + + if (family == null) { + LOG.warn("filterKeyValue: Unexpected - null/empty family! Access denied!"); + } else if (familiesAccessDenied.contains(family)) { + LOG.debug("filterKeyValue: family found in access denied families cache. Access denied."); + } else if (session.getPropertyIsColumnAuthOptimizationEnabled() && familiesFullyAuthorized.contains(family)) { + LOG.debug("filterKeyValue: ColumnAuthOptimizationEnabled and family found in fully authorized families cache. Column authorization is not required"); + + result = ReturnCode.INCLUDE; + } else if (columnsAccessAllowed.containsKey(family)) { + LOG.debug("filterKeyValue: family found in column level access results cache."); + + if (columnsAccessAllowed.get(family).contains(column)) { + LOG.debug("filterKeyValue: family/column found in column level access results cache. Access allowed."); + + result = ReturnCode.INCLUDE; + } else { + LOG.debug("filterKeyValue: family/column not in column level access results cache. Access denied."); + } + } else if (familiesAccessAllowed.contains(family)) { + LOG.debug("filterKeyValue: family found in access allowed families cache. Must re-authorize for correct audit generation."); + + authCheckNeeded = true; + } else if (familiesAccessIndeterminate.contains(family)) { + LOG.debug("filterKeyValue: family found in indeterminate families cache. Evaluating access..."); + + authCheckNeeded = true; + } else { + LOG.warn("filterKeyValue: Unexpected - alien family encountered that wasn't seen by pre-hook! Access Denied.!"); + } + + if (authCheckNeeded) { + LOG.debug("filterKeyValue: Checking authorization..."); + + session.columnFamily(family) + .column(column) + .buildRequest() + .authorize(); + + // must always purge the captured audit event out of the audit handler to avoid messing up the next check + AuthzAuditEvent auditEvent = auditHandler.getAndDiscardMostRecentEvent(); + + if (session.isAuthorized()) { + LOG.debug("filterKeyValue: Access granted."); + + result = ReturnCode.INCLUDE; + + if (auditEvent != null) { + LOG.debug("filterKeyValue: access is audited."); + + auditHandler.logAuthzAudits(Collections.singletonList(auditEvent)); + } else { + LOG.debug("filterKeyValue: no audit event returned. Access not audited."); + } + } else { + LOG.debug("filterKeyValue: Access denied. Denial not audited."); + } + } + + LOG.debug("filterKeyValue: {}", result); - private static final Logger LOG = LoggerFactory.getLogger(RangerAuthorizationFilter.class.getName()); - final Set _familiesAccessAllowed; - final Set _familiesAccessDenied; - final Set _familiesAccessIndeterminate; - final Map> _columnsAccessAllowed; - final Set _familiesFullyAuthorized; - final AuthorizationSession _session; - final HbaseAuditHandler _auditHandler = HbaseFactory.getInstance().getAuditHandler(); - - public RangerAuthorizationFilter(AuthorizationSession session, Set familiesAccessAllowed, Set familiesAccessDenied, Set familiesAccessIndeterminate, - Map> columnsAccessAllowed, Set familiesFullyAuthorized) { - // the class assumes that all of these can be empty but none of these can be null - _familiesAccessAllowed = familiesAccessAllowed; - _familiesAccessDenied = familiesAccessDenied; - _familiesAccessIndeterminate = familiesAccessIndeterminate; - _columnsAccessAllowed = columnsAccessAllowed; - _familiesFullyAuthorized = familiesFullyAuthorized; - // this session should have everything set on it except family and column which would be altered based on need - _session = session; - // we don't want to audit denial, so we need to make sure the hander is what we need it to be. - _session.auditHandler(_auditHandler); - } - - @Override - public ReturnCode filterKeyValue(Cell kv) throws IOException { - - if (LOG.isDebugEnabled()) { - LOG.debug("==> filterKeyValue"); - } - - String family = null; - byte[] familyBytes = CellUtil.cloneFamily(kv); - if (familyBytes != null && familyBytes.length > 0) { - family = Bytes.toString(familyBytes); - if (LOG.isDebugEnabled()) { - LOG.debug("filterKeyValue: evaluating family[" + family + "]."); - } - } - String column = null; - byte[] qualifier = CellUtil.cloneQualifier(kv); - if (qualifier != null && qualifier.length > 0) { - column = Bytes.toString(qualifier); - if (LOG.isDebugEnabled()) { - LOG.debug("filterKeyValue: evaluating column[" + column + "]."); - } - } else { - LOG.warn("filterKeyValue: empty/null column set! Unexpected!"); - } - - ReturnCode result = ReturnCode.NEXT_COL; - boolean authCheckNeeded = false; - if (family == null) { - LOG.warn("filterKeyValue: Unexpected - null/empty family! Access denied!"); - } else if (_familiesAccessDenied.contains(family)) { - LOG.debug("filterKeyValue: family found in access denied families cache. Access denied."); - } else if (_session.getPropertyIsColumnAuthOptimizationEnabled() && _familiesFullyAuthorized.contains(family)){ - LOG.debug("filterKeyValue: ColumnAuthOptimizationEnabled and family found in fully authorized families cache. Column authorization is not required"); - result = ReturnCode.INCLUDE; - } else if (_columnsAccessAllowed.containsKey(family)) { - LOG.debug("filterKeyValue: family found in column level access results cache."); - if (_columnsAccessAllowed.get(family).contains(column)) { - LOG.debug("filterKeyValue: family/column found in column level access results cache. Access allowed."); - result = ReturnCode.INCLUDE; - } else { - LOG.debug("filterKeyValue: family/column not in column level access results cache. Access denied."); - } - } else if (_familiesAccessAllowed.contains(family)) { - LOG.debug("filterKeyValue: family found in access allowed families cache. Must re-authorize for correct audit generation."); - authCheckNeeded = true; - } else if (_familiesAccessIndeterminate.contains(family)) { - LOG.debug("filterKeyValue: family found in indeterminate families cache. Evaluating access..."); - authCheckNeeded = true; - } else { - LOG.warn("filterKeyValue: Unexpected - alien family encountered that wasn't seen by pre-hook! Access Denied.!"); - } - - if (authCheckNeeded) { - LOG.debug("filterKeyValue: Checking authorization..."); - _session.columnFamily(family) - .column(column) - .buildRequest() - .authorize(); - // must always purge the captured audit event out of the audit handler to avoid messing up the next check - AuthzAuditEvent auditEvent = _auditHandler.getAndDiscardMostRecentEvent(); - if (_session.isAuthorized()) { - LOG.debug("filterKeyValue: Access granted."); - result = ReturnCode.INCLUDE; - if (auditEvent != null) { - LOG.debug("filterKeyValue: access is audited."); - _auditHandler.logAuthzAudits(Collections.singletonList(auditEvent)); - } else { - LOG.debug("filterKeyValue: no audit event returned. Access not audited."); - } - } else { - LOG.debug("filterKeyValue: Access denied. Denial not audited."); - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("filterKeyValue: " + result); - } - return result; - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(getClass()) - .add("familiesAccessAllowed", _familiesAccessAllowed) - .add("familiesAccessDenied", _familiesAccessDenied) - .add("familiesAccessUnknown", _familiesAccessIndeterminate) - .add("columnsAccessAllowed", _columnsAccessAllowed) - .toString(); - - } + return result; + } + @Override + public String toString() { + return MoreObjects.toStringHelper(getClass()) + .add("familiesAccessAllowed", familiesAccessAllowed) + .add("familiesAccessDenied", familiesAccessDenied) + .add("familiesAccessUnknown", familiesAccessIndeterminate) + .add("columnsAccessAllowed", columnsAccessAllowed) + .toString(); + } } diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerHBasePlugin.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerHBasePlugin.java new file mode 100644 index 0000000000..52c33edfb5 --- /dev/null +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerHBasePlugin.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.ranger.authorization.hbase; + +import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants; +import org.apache.ranger.plugin.model.RangerPolicy; +import org.apache.ranger.plugin.policyengine.RangerAccessRequest; +import org.apache.ranger.plugin.policyengine.RangerAccessResult; +import org.apache.ranger.plugin.policyengine.RangerAccessResultProcessor; +import org.apache.ranger.plugin.service.RangerBasePlugin; +import org.apache.ranger.plugin.util.ServicePolicies; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RangerHBasePlugin extends RangerBasePlugin { + private static final Logger LOG = LoggerFactory.getLogger(RangerHBasePlugin.class); + + private boolean isHBaseShuttingDown; + private boolean isColumnAuthOptimizationEnabled; + + public RangerHBasePlugin(String appType) { + super("hbase", appType); + } + + public void setHBaseShuttingDown(boolean hbaseShuttingDown) { + isHBaseShuttingDown = hbaseShuttingDown; + } + + @Override + public void setPolicies(ServicePolicies policies) { + super.setPolicies(policies); + + this.isColumnAuthOptimizationEnabled = Boolean.parseBoolean(this.getServiceConfigs().get(RangerHadoopConstants.HBASE_COLUMN_AUTH_OPTIMIZATION)); + + LOG.info("isColumnAuthOptimizationEnabled={}", this.isColumnAuthOptimizationEnabled); + } + + @Override + public RangerAccessResult isAccessAllowed(RangerAccessRequest request, RangerAccessResultProcessor resultProcessor) { + RangerAccessResult ret; + + if (isHBaseShuttingDown) { + ret = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, this.getServiceName(), this.getServiceDef(), request); + + ret.setIsAllowed(true); + ret.setIsAudited(false); + + LOG.warn("Auth request came after HBase shutdown...."); + } else { + ret = super.isAccessAllowed(request, resultProcessor); + } + + return ret; + } + + public boolean getPropertyIsColumnAuthOptimizationEnabled() { + return this.isColumnAuthOptimizationEnabled; + } + + public void setColumnAuthOptimizationEnabled(boolean enable) { + this.isColumnAuthOptimizationEnabled = enable; + } +} diff --git a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerHBaseResource.java b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerHBaseResource.java index bfa5d7a59c..1fb169ab81 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerHBaseResource.java +++ b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerHBaseResource.java @@ -19,7 +19,6 @@ package org.apache.ranger.authorization.hbase; - import org.apache.commons.lang.StringUtils; import org.apache.ranger.plugin.policyengine.RangerAccessResourceImpl; @@ -27,14 +26,13 @@ import java.util.List; import java.util.Map; - public class RangerHBaseResource extends RangerAccessResourceImpl { - public static final String KEY_TABLE = "table"; - public static final String KEY_COLUMN_FAMILY = "column-family"; - public static final String KEY_COLUMN = "column"; - public static final String WILDCARD = "*"; - public static final String NAMESPACE_SEPARATOR = ":"; - public static final String DEFAULT_NAMESPACE = "default" + NAMESPACE_SEPARATOR; + public static final String KEY_TABLE = "table"; + public static final String KEY_COLUMN_FAMILY = "column-family"; + public static final String KEY_COLUMN = "column"; + public static final String WILDCARD = "*"; + public static final String NAMESPACE_SEPARATOR = ":"; + public static final String DEFAULT_NAMESPACE = "default" + NAMESPACE_SEPARATOR; public RangerHBaseResource() { } diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java index 863f66242a..a9f616bca0 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java +++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java @@ -18,115 +18,110 @@ */ package org.apache.ranger.services.hbase; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - +import org.apache.commons.lang.StringUtils; import org.apache.ranger.plugin.client.HadoopException; import org.apache.ranger.plugin.model.RangerPolicy; -import org.apache.ranger.plugin.model.RangerService; -import org.apache.ranger.plugin.model.RangerServiceDef; import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItem; import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItemAccess; +import org.apache.ranger.plugin.model.RangerService; +import org.apache.ranger.plugin.model.RangerServiceDef; import org.apache.ranger.plugin.service.RangerBaseService; import org.apache.ranger.plugin.service.ResourceLookupContext; import org.apache.ranger.services.hbase.client.HBaseResourceMgr; -import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + public class RangerServiceHBase extends RangerBaseService { + private static final Logger LOG = LoggerFactory.getLogger(RangerServiceHBase.class); + + public static final String ACCESS_TYPE_READ = "read"; + public static final String ACCESS_TYPE_CREATE = "create"; + + public RangerServiceHBase() { + super(); + } + + @Override + public void init(RangerServiceDef serviceDef, RangerService service) { + super.init(serviceDef, service); + } - private static final Logger LOG = LoggerFactory.getLogger(RangerServiceHBase.class); - public static final String ACCESS_TYPE_READ = "read"; - public static final String ACCESS_TYPE_CREATE = "create"; - - public RangerServiceHBase() { - super(); - } - - @Override - public void init(RangerServiceDef serviceDef, RangerService service) { - super.init(serviceDef, service); - } - - @Override - public List getDefaultRangerPolicies() throws Exception { - if (LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceHbase.getDefaultRangerPolicies()"); - } - - List ret = super.getDefaultRangerPolicies(); - for (RangerPolicy defaultPolicy : ret) { - if (defaultPolicy.getName().contains("all") && StringUtils.isNotBlank(lookUpUser)) { - List accessListForLookupUser = new ArrayList(); - accessListForLookupUser.add(new RangerPolicyItemAccess(ACCESS_TYPE_READ)); - accessListForLookupUser.add(new RangerPolicyItemAccess(ACCESS_TYPE_CREATE)); - RangerPolicyItem policyItemForLookupUser = new RangerPolicyItem(); - policyItemForLookupUser.setUsers(Collections.singletonList(lookUpUser)); - policyItemForLookupUser.setAccesses(accessListForLookupUser); - policyItemForLookupUser.setDelegateAdmin(false); - defaultPolicy.addPolicyItem(policyItemForLookupUser); - } - } - - if (LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceHbase.getDefaultRangerPolicies()"); + @Override + public Map validateConfig() throws Exception { + Map ret = new HashMap<>(); + String serviceName = getServiceName(); + + LOG.debug("==> RangerServiceHBase.validateConfig() Service: ({})", serviceName); + + if (configs != null) { + try { + ret = HBaseResourceMgr.connectionTest(serviceName, configs); + } catch (HadoopException e) { + LOG.error("<== RangerServiceHBase.validateConfig() Error:{}", String.valueOf(e)); + + throw e; + } } - return ret; - } - - - @Override - public Map validateConfig() throws Exception { - Map ret = new HashMap(); - - String serviceName = getServiceName(); - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceHBase.validateConfig() Service: (" + serviceName + " )"); - } - if ( configs != null) { - try { - ret = HBaseResourceMgr.connectionTest(serviceName, configs); - } catch (HadoopException e) { - LOG.error("<== RangerServiceHBase.validateConfig() Error:" + e); - throw e; - } - } - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceHBase.validateConfig() Response : (" + ret + " )"); - } - return ret; - } - - @Override - public List lookupResource(ResourceLookupContext context) throws Exception { - - List ret = new ArrayList(); - String serviceName = getServiceName(); - String serviceType = getServiceType(); - Map configs = getConfigs(); - - if(LOG.isDebugEnabled()) { - LOG.debug("==> RangerServiceHBase.lookupResource() Service : " + serviceName + " Context: (" + context + ")"); - } - - if (context != null) { - try { - ret = HBaseResourceMgr.getHBaseResource(serviceName,serviceType,configs,context); - } catch (Exception e) { - LOG.error( "<==RangerServiceHBase.lookupResource() Error : " + e); - throw e; - } - } - if(LOG.isDebugEnabled()) { - LOG.debug("<== RangerServiceHBase.lookupResource() Response: (" + ret + ")"); - } - return ret; - } -} + LOG.debug("<== RangerServiceHBase.validateConfig() Response : ({})", ret); + + return ret; + } + + @Override + public List lookupResource(ResourceLookupContext context) throws Exception { + List ret = new ArrayList<>(); + String serviceName = getServiceName(); + String serviceType = getServiceType(); + Map configs = getConfigs(); + + LOG.debug("==> RangerServiceHBase.lookupResource() Service : {} Context: ({})", serviceName, context); + + if (context != null) { + try { + ret = HBaseResourceMgr.getHBaseResource(serviceName, serviceType, configs, context); + } catch (Exception e) { + LOG.error("<==RangerServiceHBase.lookupResource() Error : {}", String.valueOf(e)); + + throw e; + } + } + + LOG.debug("<== RangerServiceHBase.lookupResource() Response: ({})", ret); + return ret; + } + + @Override + public List getDefaultRangerPolicies() throws Exception { + LOG.debug("==> RangerServiceHbase.getDefaultRangerPolicies()"); + + List ret = super.getDefaultRangerPolicies(); + for (RangerPolicy defaultPolicy : ret) { + if (defaultPolicy.getName().contains("all") && StringUtils.isNotBlank(lookUpUser)) { + List accessListForLookupUser = new ArrayList<>(); + + accessListForLookupUser.add(new RangerPolicyItemAccess(ACCESS_TYPE_READ)); + accessListForLookupUser.add(new RangerPolicyItemAccess(ACCESS_TYPE_CREATE)); + + RangerPolicyItem policyItemForLookupUser = new RangerPolicyItem(); + + policyItemForLookupUser.setUsers(Collections.singletonList(lookUpUser)); + policyItemForLookupUser.setAccesses(accessListForLookupUser); + policyItemForLookupUser.setDelegateAdmin(false); + + defaultPolicy.addPolicyItem(policyItemForLookupUser); + } + } + + LOG.debug("<== RangerServiceHbase.getDefaultRangerPolicies()"); + + return ret; + } +} diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java index 1af26f223a..1b1100189a 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java +++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java @@ -19,419 +19,426 @@ package org.apache.ranger.services.hbase.client; -import java.io.IOException; -import java.security.PrivilegedAction; -import java.util.*; -import java.util.Map.Entry; -import java.util.regex.Pattern; - -import javax.security.auth.Subject; - import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; -import org.apache.hadoop.hbase.client.*; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.ZooKeeperConnectionException; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.ranger.plugin.client.BaseClient; import org.apache.ranger.plugin.client.HadoopException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.security.auth.Subject; + +import java.io.IOException; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.regex.Pattern; public class HBaseClient extends BaseClient { + private static final Logger LOG = LoggerFactory.getLogger(HBaseClient.class); - private static final Logger LOG = LoggerFactory.getLogger(HBaseClient.class); - - private static Subject subj = null; - - private Configuration conf; - - public HBaseClient(String serivceName,Map connectionProp) { - - super(serivceName, addDefaultHBaseProp(connectionProp)); - conf = HBaseConfiguration.create(); - - Set rangerInternalPropertyKeys = getConfigHolder().getRangerInternalPropertyKeys(); - for (Map.Entry entry: connectionProperties.entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - if (!rangerInternalPropertyKeys.contains(key)) { - conf.set(key, value); - } - } - - } - - //TODO: temporary solution - to be added to the UI for HBase - private static Map addDefaultHBaseProp(Map connectionProp) { - if (connectionProp != null) { - String param = "zookeeper.znode.parent"; - String unsecuredPath = "/hbase-unsecure"; - String authParam = "hadoop.security.authorization"; - - String ret = connectionProp.get(param); - LOG.info("HBase connection has [" + param + "] with value [" + ret + "]"); - if (ret == null) { - ret = connectionProp.get(authParam); - LOG.info("HBase connection has [" + authParam + "] with value [" + ret + "]"); - if (ret != null && ret.trim().equalsIgnoreCase("false")) { - LOG.info("HBase connection is resetting [" + param + "] with value [" + unsecuredPath + "]"); - connectionProp.put(param, unsecuredPath); - } - } - } - return connectionProp; - } - - public static Map connectionTest (String dataSource, - Map configs) throws Exception { - - Map responseData = new HashMap(); - final String errMsg = " You can still save the repository and start creating " - + "policies, but you would not be able to use autocomplete for " - + "resource names. Check ranger_admin.log for more info."; - boolean connectivityStatus = false; - - HBaseClient connectionObj = new HBaseClient(dataSource, configs); - if (connectionObj != null) { - try { - connectivityStatus = connectionObj.getHBaseStatus(); - } catch ( HadoopException e) { - LOG.error("<== HBaseClient.testConnection(): Unable to retrieve any databases using given parameters", e); - throw e; - } - } - - if (connectivityStatus) { - String successMsg = "ConnectionTest Successful"; - generateResponseDataMap(connectivityStatus, successMsg, successMsg, - null, null, responseData); - } else { - String failureMsg = "Unable to retrieve any databases using given parameters."; - generateResponseDataMap(connectivityStatus, failureMsg, failureMsg - + errMsg, null, null, responseData); - } - return responseData; - } - - public boolean getHBaseStatus() throws HadoopException{ - boolean hbaseStatus = false; - subj = getLoginSubject(); - final String errMsg = " You can still save the repository and start creating " - + "policies, but you would not be able to use autocomplete for " - + "resource names. Check ranger_admin.log for more info."; - if (subj != null) { - try { - - hbaseStatus = Subject.doAs(subj, new PrivilegedAction() { - @Override - public Boolean run() { - Boolean hbaseStatus1 = false; - try { - LOG.info("getHBaseStatus: creating default Hbase configuration"); - - LOG.info("getHBaseStatus: setting config values from client"); - setClientConfigValues(conf); - LOG.info("getHBaseStatus: checking HbaseAvailability with the new config"); - HBaseAdmin.available(conf); - LOG.info("getHBaseStatus: no exception: HbaseAvailability true"); - hbaseStatus1 = true; - } catch (ZooKeeperConnectionException zce) { - String msgDesc = "getHBaseStatus: Unable to connect to `ZooKeeper` " - + "using given config parameters."; - HadoopException hdpException = new HadoopException(msgDesc, zce); - hdpException.generateResponseDataMap(false, getMessage(zce), - msgDesc + errMsg, null, null); - - LOG.error(msgDesc + zce); - throw hdpException; - - } catch (MasterNotRunningException mnre) { - String msgDesc = "getHBaseStatus: Looks like `Master` is not running, " - + "so couldn't check that running HBase is available or not, " - + "Please try again later."; - HadoopException hdpException = new HadoopException( - msgDesc, mnre); - hdpException.generateResponseDataMap(false, - getMessage(mnre), msgDesc + errMsg, - null, null); - LOG.error(msgDesc + mnre); - throw hdpException; - - } catch(IOException io) { - String msgDesc = "getHBaseStatus: Unable to check availability of" - + " Hbase environment [" + getConfigHolder().getDatasourceName() + "]."; - HadoopException hdpException = new HadoopException(msgDesc, io); - hdpException.generateResponseDataMap(false, getMessage(io), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + io); - throw hdpException; - - } catch (Throwable e) { - String msgDesc = "getHBaseStatus: Unable to check availability of" - + " Hbase environment [" + getConfigHolder().getDatasourceName() + "]."; - LOG.error(msgDesc + e); - hbaseStatus1 = false; - HadoopException hdpException = new HadoopException(msgDesc, e); - hdpException.generateResponseDataMap(false, getMessage(e), - msgDesc + errMsg, null, null); - throw hdpException; - } - return hbaseStatus1; - } - }); - } catch (SecurityException se) { - String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance "; - HadoopException hdpException = new HadoopException(msgDesc, se); - hdpException.generateResponseDataMap(false, getMessage(se), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + se); - throw hdpException; - } - } else { - LOG.error("getHBaseStatus: secure login not done, subject is null"); - } - - return hbaseStatus; - } - - private void setClientConfigValues(Configuration conf) { - if (this.connectionProperties == null) { - return; - } - Iterator> i = this.connectionProperties.entrySet().iterator(); - while (i.hasNext()) { - Entry e = i.next(); - String v = conf.get(e.getKey()); - if (v != null && !v.equalsIgnoreCase(e.getValue())) { - conf.set(e.getKey(), e.getValue()); - } - } - } - - public List getTableList(final String tableNameMatching, final List existingTableList ) throws HadoopException { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HbaseClient.getTableList() tableNameMatching " + tableNameMatching + " ExisitingTableList " + existingTableList); - } - - List ret = null; - final String errMsg = " You can still save the repository and start creating " - + "policies, but you would not be able to use autocomplete for " - + "resource names. Check ranger_admin.log for more info."; - - subj = getLoginSubject(); - - if (subj != null) { - ret = Subject.doAs(subj, new PrivilegedAction>() { - - @Override - public List run() { - - List tableList = new ArrayList(); - Admin admin = null; - try { - LOG.info("getTableList: setting config values from client"); - setClientConfigValues(conf); - LOG.info("getTableList: checking HbaseAvailability with the new config"); - try (Connection conn = ConnectionFactory.createConnection(conf)) { - LOG.info("getTableList: no exception: HbaseAvailability true"); - admin = conn.getAdmin(); - List htds = admin.listTableDescriptors(Pattern.compile(tableNameMatching)); - if (htds != null) { - for (TableDescriptor htd : htds) { - String tableName = htd.getTableName().getNameAsString(); - if (existingTableList != null && existingTableList.contains(tableName)) { - continue; - } else { - tableList.add(htd.getTableName().getNameAsString()); - } - } - } else { - LOG.error("getTableList: null HTableDescription received from HBaseAdmin.listTables"); - } - } - } catch (ZooKeeperConnectionException zce) { - String msgDesc = "getTableList: Unable to connect to `ZooKeeper` " - + "using given config parameters."; - HadoopException hdpException = new HadoopException(msgDesc, zce); - hdpException.generateResponseDataMap(false, getMessage(zce), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + zce); - throw hdpException; - - } catch (MasterNotRunningException mnre) { - String msgDesc = "getTableList: Looks like `Master` is not running, " - + "so couldn't check that running HBase is available or not, " - + "Please try again later."; - HadoopException hdpException = new HadoopException( - msgDesc, mnre); - hdpException.generateResponseDataMap(false, - getMessage(mnre), msgDesc + errMsg, - null, null); - LOG.error(msgDesc + mnre); - throw hdpException; - - } catch(IOException io) { - String msgDesc = "getTableList: Unable to get HBase table List for [repository:" - + getConfigHolder().getDatasourceName() + ",table-match:" - + tableNameMatching + "]."; - HadoopException hdpException = new HadoopException(msgDesc, io); - hdpException.generateResponseDataMap(false, getMessage(io), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + io); - throw hdpException; - } catch (Throwable e) { - String msgDesc = "getTableList : Unable to get HBase table List for [repository:" - + getConfigHolder().getDatasourceName() + ",table-match:" - + tableNameMatching + "]."; - LOG.error(msgDesc + e); - HadoopException hdpException = new HadoopException(msgDesc, e); - hdpException.generateResponseDataMap(false, getMessage(e), - msgDesc + errMsg, null, null); - throw hdpException; - } finally { - if (admin != null) { - try { - admin.close(); - } catch (IOException e) { - LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e); - } - } - } - return tableList; - } - - }); - } - if (LOG.isDebugEnabled()) { - LOG.debug("<== HbaseClient.getTableList() " + ret); - } - return ret; - } - - - public List getColumnFamilyList(final String columnFamilyMatching, final List tableList,final List existingColumnFamilies) { - if (LOG.isDebugEnabled()) { - LOG.debug("==> HbaseClient.getColumnFamilyList() columnFamilyMatching " + columnFamilyMatching + " ExisitingTableList " + tableList + "existingColumnFamilies " + existingColumnFamilies); - } - - List ret = null; - final String errMsg = " You can still save the repository and start creating " - + "policies, but you would not be able to use autocomplete for " - + "resource names. Check ranger_admin.log for more info."; - - subj = getLoginSubject(); - if (subj != null) { - try { - - ret = Subject.doAs(subj, new PrivilegedAction>() { - String tblName = null; - @Override - public List run() { - List colfList = new ArrayList(); - Admin admin = null; - try { - LOG.info("getColumnFamilyList: setting config values from client"); - setClientConfigValues(conf); - LOG.info("getColumnFamilyList: checking HbaseAvailability with the new config"); - try (Connection conn = ConnectionFactory.createConnection(conf)) { - LOG.info("getColumnFamilyList: no exception: HbaseAvailability true"); - admin = conn.getAdmin(); - if (tableList != null) { - for (String tableName : tableList) { - tblName = tableName; - TableDescriptor htd = admin.getDescriptor(TableName.valueOf(tableName)); - if (htd != null) { - for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) { - String colf = hcd.getNameAsString(); - if (colf.matches(columnFamilyMatching)) { - if (existingColumnFamilies != null && existingColumnFamilies.contains(colf)) { - continue; - } else { - colfList.add(colf); - } - - } - } - } - } - } - } - } catch (ZooKeeperConnectionException zce) { - String msgDesc = "getColumnFamilyList: Unable to connect to `ZooKeeper` " - + "using given config parameters."; - HadoopException hdpException = new HadoopException(msgDesc, zce); - hdpException.generateResponseDataMap(false, getMessage(zce), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + zce); - throw hdpException; - - } catch (MasterNotRunningException mnre) { - String msgDesc = "getColumnFamilyList: Looks like `Master` is not running, " - + "so couldn't check that running HBase is available or not, " - + "Please try again later."; - HadoopException hdpException = new HadoopException( - msgDesc, mnre); - hdpException.generateResponseDataMap(false, - getMessage(mnre), msgDesc + errMsg, - null, null); - LOG.error(msgDesc + mnre); - throw hdpException; - - } catch(IOException io) { - String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " - + "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName - + ", table-match:" + columnFamilyMatching + "] "; - HadoopException hdpException = new HadoopException(msgDesc, io); - hdpException.generateResponseDataMap(false, getMessage(io), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + io); - throw hdpException; - } catch (SecurityException se) { - String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " - + "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName - + ", table-match:" + columnFamilyMatching + "] "; - HadoopException hdpException = new HadoopException(msgDesc, se); - hdpException.generateResponseDataMap(false, getMessage(se), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + se); - throw hdpException; - - } catch (Throwable e) { - String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " - + "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName - + ", table-match:" + columnFamilyMatching + "] "; - LOG.error(msgDesc); - HadoopException hdpException = new HadoopException(msgDesc, e); - hdpException.generateResponseDataMap(false, getMessage(e), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + e); - throw hdpException; - } finally { - if (admin != null) { - try { - admin.close(); - } catch (IOException e) { - LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e); - } - } - } - return colfList; - } - - }); - } catch (SecurityException se) { - String msgDesc = "getColumnFamilyList: Unable to connect to HBase Server instance "; - HadoopException hdpException = new HadoopException(msgDesc, se); - hdpException.generateResponseDataMap(false, getMessage(se), - msgDesc + errMsg, null, null); - LOG.error(msgDesc + se); - throw hdpException; - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("<== HbaseClient.getColumnFamilyList() " + ret); - } - return ret; - } + private static final String ERROR_MSG = " You can still save the repository and start creating policies, but you would not be able to use autocomplete for resource names. Check ranger_admin.log for more info."; -} + private final Configuration conf; + + public HBaseClient(String serivceName, Map connectionProp) { + super(serivceName, addDefaultHBaseProp(connectionProp)); + + conf = HBaseConfiguration.create(); + + Set rangerInternalPropertyKeys = getConfigHolder().getRangerInternalPropertyKeys(); + + for (Map.Entry entry : connectionProperties.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + + if (!rangerInternalPropertyKeys.contains(key)) { + conf.set(key, value); + } + } + } + + public static Map connectionTest(String dataSource, Map configs) throws Exception { + Map responseData = new HashMap<>(); + boolean connectivityStatus = false; + HBaseClient connectionObj = new HBaseClient(dataSource, configs); + + if (connectionObj != null) { + try { + connectivityStatus = connectionObj.getHBaseStatus(); + } catch (HadoopException e) { + LOG.error("<== HBaseClient.testConnection(): Unable to retrieve any databases using given parameters", e); + + throw e; + } + } + + if (connectivityStatus) { + String successMsg = "ConnectionTest Successful"; + + generateResponseDataMap(connectivityStatus, successMsg, successMsg, null, null, responseData); + } else { + String failureMsg = "Unable to retrieve any databases using given parameters."; + + generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + ERROR_MSG, null, null, responseData); + } + + return responseData; + } + + public boolean getHBaseStatus() throws HadoopException { + boolean hbaseStatus = false; + Subject subj = getLoginSubject(); + + if (subj != null) { + try { + hbaseStatus = Subject.doAs(subj, new PrivilegedAction() { + @Override + public Boolean run() { + boolean hbaseStatus1 = false; + + try { + LOG.info("getHBaseStatus: creating default Hbase configuration"); + LOG.info("getHBaseStatus: setting config values from client"); + + setClientConfigValues(conf); + + LOG.info("getHBaseStatus: checking HbaseAvailability with the new config"); + + HBaseAdmin.available(conf); + + LOG.info("getHBaseStatus: no exception: HbaseAvailability true"); + + hbaseStatus1 = true; + } catch (ZooKeeperConnectionException zce) { + String msgDesc = "getHBaseStatus: Unable to connect to `ZooKeeper` using given config parameters."; + HadoopException hdpException = new HadoopException(msgDesc, zce); + + hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + zce); + + throw hdpException; + } catch (MasterNotRunningException mnre) { + String msgDesc = "getHBaseStatus: Looks like `Master` is not running, so couldn't check that running HBase is available or not, Please try again later."; + HadoopException hdpException = new HadoopException(msgDesc, mnre); + + hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + mnre); + + throw hdpException; + } catch (IOException io) { + String msgDesc = "getHBaseStatus: Unable to check availability of Hbase environment [" + getConfigHolder().getDatasourceName() + "]."; + HadoopException hdpException = new HadoopException(msgDesc, io); + + hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + io); + + throw hdpException; + } catch (Throwable e) { + String msgDesc = "getHBaseStatus: Unable to check availability of Hbase environment [" + getConfigHolder().getDatasourceName() + "]."; + HadoopException hdpException = new HadoopException(msgDesc, e); + + hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + e); + + throw hdpException; + } + + return hbaseStatus1; + } + }); + } catch (SecurityException se) { + String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance "; + HadoopException hdpException = new HadoopException(msgDesc, se); + + hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + se); + + throw hdpException; + } + } else { + LOG.error("getHBaseStatus: secure login not done, subject is null"); + } + + return hbaseStatus; + } + + public List getTableList(final String tableNameMatching, final List existingTableList) throws HadoopException { + LOG.debug("==> HbaseClient.getTableList() tableNameMatching {} ExisitingTableList {}", tableNameMatching, existingTableList); + + List ret = null; + Subject subj = getLoginSubject(); + + if (subj != null) { + ret = Subject.doAs(subj, new PrivilegedAction>() { + @Override + public List run() { + List tableList = new ArrayList<>(); + Admin admin = null; + + try { + LOG.info("getTableList: setting config values from client"); + + setClientConfigValues(conf); + + LOG.info("getTableList: checking HbaseAvailability with the new config"); + + try (Connection conn = ConnectionFactory.createConnection(conf)) { + LOG.info("getTableList: no exception: HbaseAvailability true"); + + admin = conn.getAdmin(); + + List htds = admin.listTableDescriptors(Pattern.compile(tableNameMatching)); + if (htds != null) { + for (TableDescriptor htd : htds) { + String tableName = htd.getTableName().getNameAsString(); + + if (existingTableList != null && existingTableList.contains(tableName)) { + continue; + } else { + tableList.add(htd.getTableName().getNameAsString()); + } + } + } else { + LOG.error("getTableList: null HTableDescription received from HBaseAdmin.listTables"); + } + } + } catch (ZooKeeperConnectionException zce) { + String msgDesc = "getTableList: Unable to connect to `ZooKeeper` using given config parameters."; + HadoopException hdpException = new HadoopException(msgDesc, zce); + + hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + zce); + + throw hdpException; + } catch (MasterNotRunningException mnre) { + String msgDesc = "getTableList: Looks like `Master` is not running, so couldn't check that running HBase is available or not, Please try again later."; + HadoopException hdpException = new HadoopException(msgDesc, mnre); + + hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + mnre); + + throw hdpException; + } catch (IOException io) { + String msgDesc = "getTableList: Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching + "]."; + HadoopException hdpException = new HadoopException(msgDesc, io); + + hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + io); + + throw hdpException; + } catch (Throwable e) { + String msgDesc = "getTableList : Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching + "]."; + HadoopException hdpException = new HadoopException(msgDesc, e); + + hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + e); + + throw hdpException; + } finally { + if (admin != null) { + try { + admin.close(); + } catch (IOException e) { + LOG.error("Unable to close HBase connection [{}]", getConfigHolder().getDatasourceName(), e); + } + } + } + + return tableList; + } + }); + } + + LOG.debug("<== HbaseClient.getTableList() {}", ret); + + return ret; + } + + public List getColumnFamilyList(final String columnFamilyMatching, final List tableList, final List existingColumnFamilies) { + LOG.debug("==> HbaseClient.getColumnFamilyList() columnFamilyMatching {} ExisitingTableList {}existingColumnFamilies {}", columnFamilyMatching, tableList, existingColumnFamilies); + + List ret = null; + Subject subj = getLoginSubject(); + + if (subj != null) { + try { + ret = Subject.doAs(subj, new PrivilegedAction>() { + String tblName; + + @Override + public List run() { + List colfList = new ArrayList<>(); + Admin admin = null; + + try { + LOG.info("getColumnFamilyList: setting config values from client"); + + setClientConfigValues(conf); + + LOG.info("getColumnFamilyList: checking HbaseAvailability with the new config"); + + try (Connection conn = ConnectionFactory.createConnection(conf)) { + LOG.info("getColumnFamilyList: no exception: HbaseAvailability true"); + + admin = conn.getAdmin(); + + if (tableList != null) { + for (String tableName : tableList) { + tblName = tableName; + + TableDescriptor htd = admin.getDescriptor(TableName.valueOf(tableName)); + if (htd != null) { + for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) { + String colf = hcd.getNameAsString(); + + if (colf.matches(columnFamilyMatching)) { + if (existingColumnFamilies != null && existingColumnFamilies.contains(colf)) { + continue; + } else { + colfList.add(colf); + } + } + } + } + } + } + } + } catch (ZooKeeperConnectionException zce) { + String msgDesc = "getColumnFamilyList: Unable to connect to `ZooKeeper` using given config parameters."; + HadoopException hdpException = new HadoopException(msgDesc, zce); + + hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + zce); + + throw hdpException; + } catch (MasterNotRunningException mnre) { + String msgDesc = "getColumnFamilyList: Looks like `Master` is not running, so couldn't check that running HBase is available or not, Please try again later."; + HadoopException hdpException = new HadoopException(msgDesc, mnre); + + hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + mnre); + + throw hdpException; + } catch (IOException io) { + String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for [repository:" + getConfigHolder().getDatasourceName() + ",table:" + tblName + ", table-match:" + columnFamilyMatching + "] "; + HadoopException hdpException = new HadoopException(msgDesc, io); + + hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + io); + + throw hdpException; + } catch (SecurityException se) { + String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for [repository:" + getConfigHolder().getDatasourceName() + ",table:" + tblName + ", table-match:" + columnFamilyMatching + "] "; + HadoopException hdpException = new HadoopException(msgDesc, se); + + hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + se); + + throw hdpException; + } catch (Throwable e) { + String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for [repository:" + getConfigHolder().getDatasourceName() + ",table:" + tblName + ", table-match:" + columnFamilyMatching + "] "; + HadoopException hdpException = new HadoopException(msgDesc, e); + + hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + e); + + throw hdpException; + } finally { + if (admin != null) { + try { + admin.close(); + } catch (IOException e) { + LOG.error("Unable to close HBase connection [{}]", getConfigHolder().getDatasourceName(), e); + } + } + } + + return colfList; + } + }); + } catch (SecurityException se) { + String msgDesc = "getColumnFamilyList: Unable to connect to HBase Server instance "; + HadoopException hdpException = new HadoopException(msgDesc, se); + + hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERROR_MSG, null, null); + + LOG.error(msgDesc + se); + + throw hdpException; + } + } + + LOG.debug("<== HbaseClient.getColumnFamilyList() {}", ret); + + return ret; + } + + //TODO: temporary solution - to be added to the UI for HBase + private static Map addDefaultHBaseProp(Map connectionProp) { + if (connectionProp != null) { + String param = "zookeeper.znode.parent"; + String unsecuredPath = "/hbase-unsecure"; + String authParam = "hadoop.security.authorization"; + String ret = connectionProp.get(param); + + LOG.info("HBase connection has [{}] with value [{}]", param, ret); + + if (ret == null) { + ret = connectionProp.get(authParam); + + LOG.info("HBase connection has [{}] with value [{}]", authParam, ret); + + if (ret != null && ret.trim().equalsIgnoreCase("false")) { + LOG.info("HBase connection is resetting [{}] with value [{}]", param, unsecuredPath); + + connectionProp.put(param, unsecuredPath); + } + } + } + + return connectionProp; + } + + private void setClientConfigValues(Configuration conf) { + if (this.connectionProperties == null) { + return; + } + + for (Entry e : this.connectionProperties.entrySet()) { + String v = conf.get(e.getKey()); + + if (v != null && !v.equalsIgnoreCase(e.getValue())) { + conf.set(e.getKey(), e.getValue()); + } + } + } +} diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java index 7b17f88d96..faa235d420 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java +++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java @@ -19,6 +19,10 @@ package org.apache.ranger.services.hbase.client; +import org.apache.ranger.plugin.util.TimedEventUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.util.List; import java.util.Map; import java.util.concurrent.Callable; @@ -26,106 +30,102 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; -import org.apache.ranger.plugin.util.TimedEventUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +public class HBaseConnectionMgr { + private static final Logger LOG = LoggerFactory.getLogger(HBaseConnectionMgr.class); + protected ConcurrentMap hbaseConnectionCache; + protected ConcurrentMap repoConnectStatusMap; -public class HBaseConnectionMgr { + public HBaseConnectionMgr() { + hbaseConnectionCache = new ConcurrentHashMap<>(); + repoConnectStatusMap = new ConcurrentHashMap<>(); + } + + public HBaseClient getHBaseConnection(final String serviceName, final String serviceType, final Map configs) { + HBaseClient client = null; + + if (serviceType != null) { + // get it from the cache + client = hbaseConnectionCache.get(serviceName); + + if (client == null) { + if (configs == null) { + final Callable connectHBase = new Callable() { + @Override + public HBaseClient call() throws Exception { + HBaseClient hBaseClient = null; + + if (serviceName != null) { + try { + hBaseClient = new HBaseClient(serviceName, configs); + } catch (Exception ex) { + LOG.error("Error connecting HBase repository : ", ex); + } + } + + return hBaseClient; + } + }; + + try { + if (connectHBase != null) { + client = TimedEventUtil.timedTask(connectHBase, 5, TimeUnit.SECONDS); + } + } catch (Exception e) { + LOG.error("Error connecting HBase repository : {}", serviceName); + } + } else { + final Callable connectHBase = new Callable() { + @Override + public HBaseClient call() throws Exception { + HBaseClient hBaseClient = null; + + if (serviceName != null && configs != null) { + try { + hBaseClient = new HBaseClient(serviceName, configs); + } catch (Exception ex) { + LOG.error("Error connecting HBase repository : ", ex); + } + } + + return hBaseClient; + } + }; + + try { + if (connectHBase != null) { + client = TimedEventUtil.timedTask(connectHBase, 5, TimeUnit.SECONDS); + } + } catch (Exception e) { + LOG.error("Error connecting HBase repository : {} using config : {}", serviceName, configs); + } + } + + if (client != null) { + HBaseClient oldClient = hbaseConnectionCache.putIfAbsent(serviceName, client); + + if (oldClient != null) { + // in the meantime someone else has put a valid client into the cache, let's use that instead. + client = oldClient; + } + } + } else { + List testConnect = client.getTableList(".\\*", null); + + if (testConnect == null) { + hbaseConnectionCache.remove(serviceName); + + client = getHBaseConnection(serviceName, serviceType, configs); + } + } + + repoConnectStatusMap.put(serviceName, true); + } else { + LOG.error("Service Name not found with name {}", serviceName, new Throwable()); + } + + LOG.debug("<== HBaseConnectionMgr.getHBaseConnection() HbaseClient : {}", client); - private static final Logger LOG = LoggerFactory.getLogger(HBaseConnectionMgr.class); - - protected ConcurrentMap hbaseConnectionCache; - - protected ConcurrentMap repoConnectStatusMap; - - public HBaseConnectionMgr() { - hbaseConnectionCache = new ConcurrentHashMap(); - repoConnectStatusMap = new ConcurrentHashMap(); - } - - public HBaseClient getHBaseConnection(final String serviceName, final String serviceType, final Map configs) { - - HBaseClient client = null; - if (serviceType != null) { - // get it from the cache - client = hbaseConnectionCache.get(serviceName); - if (client == null) { - if ( configs == null ) { - final Callable connectHBase = new Callable() { - @Override - public HBaseClient call() throws Exception { - HBaseClient hBaseClient=null; - if(serviceName!=null){ - try{ - hBaseClient=new HBaseClient(serviceName, configs); - }catch(Exception ex){ - LOG.error("Error connecting HBase repository : ", ex); - } - } - return hBaseClient; - } - }; - - try { - if(connectHBase!=null){ - client = TimedEventUtil.timedTask(connectHBase, 5, TimeUnit.SECONDS); - } - } catch(Exception e){ - LOG.error("Error connecting HBase repository : " + serviceName); - } - } else { - - final Callable connectHBase = new Callable() { - @Override - public HBaseClient call() throws Exception { - HBaseClient hBaseClient=null; - if(serviceName!=null && configs !=null){ - try{ - hBaseClient=new HBaseClient(serviceName,configs); - }catch(Exception ex){ - LOG.error("Error connecting HBase repository : ", ex); - } - } - return hBaseClient; - } - }; - - try { - if(connectHBase!=null){ - client = TimedEventUtil.timedTask(connectHBase, 5, TimeUnit.SECONDS); - } - } catch(Exception e){ - LOG.error("Error connecting HBase repository : "+ - serviceName +" using config : "+ configs); - } - } - - if(client!=null){ - HBaseClient oldClient = hbaseConnectionCache.putIfAbsent(serviceName, client); - if (oldClient != null) { - // in the meantime someone else has put a valid client into the cache, let's use that instead. - client = oldClient; - } - } - - } else { - - List testConnect = client.getTableList(".\\*",null); - - if(testConnect == null){ - hbaseConnectionCache.remove(serviceName); - client = getHBaseConnection(serviceName,serviceType,configs); - } - } - repoConnectStatusMap.put(serviceName, true); - } else { - LOG.error("Service Name not found with name " + serviceName, - new Throwable()); - } - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseConnectionMgr.getHBaseConnection() HbaseClient : "+ client ); - } - return client; - } + return client; + } } diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java index 1577f5b5da..f8e4eee02c 100644 --- a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java +++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java @@ -19,140 +19,135 @@ package org.apache.ranger.services.hbase.client; -import java.util.List; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; - import org.apache.ranger.plugin.client.HadoopException; import org.apache.ranger.plugin.service.ResourceLookupContext; import org.apache.ranger.plugin.util.TimedEventUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + public class HBaseResourceMgr { + private static final Logger LOG = LoggerFactory.getLogger(HBaseResourceMgr.class); + + private static final String TABLE = "table"; + private static final String COLUMNFAMILY = "column-family"; + + private HBaseResourceMgr() {} + + public static Map connectionTest(String serviceName, Map configs) throws Exception { + LOG.debug("==> HBaseResourceMgr.connectionTest() ServiceName: {} Configs{}", serviceName, configs); + + Map ret = null; + + try { + ret = HBaseClient.connectionTest(serviceName, configs); + } catch (HadoopException e) { + LOG.error("<== HBaseResourceMgr.connectionTest() Error: " + e); + + throw e; + } + + LOG.debug("<== HBaseResourceMgr.connectionTest() Result: {}", ret); + + return ret; + } + + public static List getHBaseResource(String serviceName, String serviceType, Map configs, ResourceLookupContext context) throws Exception { + String userInput = context.getUserInput(); + String resource = context.getResourceName(); + Map> resourceMap = context.getResources(); + List resultList = null; + String tableName = null; + String columnFamilies = null; + List tableList = null; + List columnFamilyList = null; + + LOG.debug("==> HBaseResourceMgr.getHBaseResource UserInput: \"{}\" resource : {} resourceMap: {}", userInput, resource, resourceMap); + + if (userInput != null && resource != null) { + if (resourceMap != null && !resourceMap.isEmpty()) { + tableList = resourceMap.get(TABLE); + columnFamilyList = resourceMap.get(COLUMNFAMILY); + } + + switch (resource.trim().toLowerCase()) { + case TABLE: + tableName = userInput; + break; + case COLUMNFAMILY: + columnFamilies = userInput; + break; + default: + break; + } + } + + if (serviceName != null && userInput != null) { + final List finaltableList = tableList; + final List finalcolumnFamilyList = columnFamilyList; + + try { + final HBaseClient hBaseClient = new HBaseConnectionMgr().getHBaseConnection(serviceName, serviceType, configs); + Callable> callableObj = null; + + if (hBaseClient != null) { + if (tableName != null && !tableName.isEmpty()) { + final String finalTableName; + + //get tableList + if (!tableName.endsWith("*")) { + tableName += "*"; + } + + tableName = tableName.replaceAll("\\*", ".\\*"); + finalTableName = tableName; + + callableObj = new Callable>() { + @Override + public List call() { + return hBaseClient.getTableList(finalTableName, finaltableList); + } + }; + } else { + //get columfamilyList + final String finalColFamilies; + + if (columnFamilies != null && !columnFamilies.isEmpty()) { + if (!columnFamilies.endsWith("*")) { + columnFamilies += "*"; + } + + columnFamilies = columnFamilies.replaceAll("\\*", ".\\*"); + finalColFamilies = columnFamilies; + + callableObj = new Callable>() { + @Override + public List call() { + return hBaseClient.getColumnFamilyList(finalColFamilies, finaltableList, finalcolumnFamilyList); + } + }; + } + } + + if (callableObj != null) { + synchronized (hBaseClient) { + resultList = TimedEventUtil.timedTask(callableObj, 5, TimeUnit.SECONDS); + } + } + } + } catch (Exception e) { + LOG.error("Unable to get hbase resources.", e); + + throw e; + } + } + + LOG.debug("<== HBaseResourceMgr.getHBaseResource() Result :{}", resultList); - private static final Logger LOG = LoggerFactory.getLogger(HBaseResourceMgr.class); - - private static final String TABLE = "table"; - private static final String COLUMNFAMILY = "column-family"; - - public static Map connectionTest(String serviceName, Map configs) throws Exception { - Map ret = null; - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseResourceMgr.connectionTest() ServiceName: "+ serviceName + "Configs" + configs ); - } - - try { - ret = HBaseClient.connectionTest(serviceName, configs); - } catch (HadoopException e) { - LOG.error("<== HBaseResourceMgr.connectionTest() Error: " + e); - throw e; - } - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseResourceMgr.connectionTest() Result: "+ ret ); - } - return ret; - } - - public static List getHBaseResource(String serviceName, String serviceType, Map configs,ResourceLookupContext context) throws Exception{ - - String userInput = context.getUserInput(); - String resource = context.getResourceName(); - Map> resourceMap = context.getResources(); - List resultList = null; - String tableName = null; - String columnFamilies = null; - List tableList = null; - List columnFamilyList = null; - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseResourceMgr.getHBaseResource UserInput: \""+ userInput + "\" resource : " + resource + " resourceMap: " + resourceMap); - } - - if ( userInput != null && resource != null) { - if ( resourceMap != null && !resourceMap.isEmpty() ) { - tableList = resourceMap.get(TABLE); - columnFamilyList = resourceMap.get(COLUMNFAMILY); - } - switch (resource.trim().toLowerCase()) { - case TABLE: - tableName = userInput; - break; - case COLUMNFAMILY: - columnFamilies = userInput; - break; - default: - break; - } - } - - if (serviceName != null && userInput != null) { - final List finaltableList = tableList; - final List finalcolumnFamilyList = columnFamilyList; - - try { - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseResourceMgr.getHBaseResource UserInput: \""+ userInput + "\" configs: " + configs + " context: " + context); - } - final HBaseClient hBaseClient = new HBaseConnectionMgr().getHBaseConnection(serviceName,serviceType,configs); - Callable> callableObj = null; - - if ( hBaseClient != null) { - if ( tableName != null && !tableName.isEmpty()) { - final String finalTableName; - //get tableList - if (!tableName.endsWith("*")) { - tableName += "*"; - } - - tableName = tableName.replaceAll("\\*", ".\\*"); - finalTableName = tableName; - - callableObj = new Callable>() { - @Override - public List call() { - return hBaseClient.getTableList(finalTableName,finaltableList); - } - }; - } else { - //get columfamilyList - final String finalColFamilies; - if (columnFamilies != null && !columnFamilies.isEmpty()) { - if (!columnFamilies.endsWith("*")) { - columnFamilies += "*"; - } - - columnFamilies = columnFamilies.replaceAll("\\*", - ".\\*"); - finalColFamilies = columnFamilies; - - callableObj = new Callable>() { - @Override - public List call() { - return hBaseClient.getColumnFamilyList(finalColFamilies,finaltableList,finalcolumnFamilyList); - } - }; - } - } - if (callableObj != null) { - synchronized(hBaseClient) { - resultList = TimedEventUtil.timedTask(callableObj, 5, - TimeUnit.SECONDS); - } - } - - } - } catch (Exception e) { - LOG.error("Unable to get hbase resources.", e); - throw e; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("<== HBaseResourceMgr.getHBaseResource() Result :" + resultList); - } - - return resultList; - } + return resultList; + } } diff --git a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/AuthorizationSessionTest.java b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/AuthorizationSessionTest.java index 2be256ffd3..b96ef93055 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/AuthorizationSessionTest.java +++ b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/AuthorizationSessionTest.java @@ -18,198 +18,199 @@ */ package org.apache.ranger.authorization.hbase; - - -import static org.mockito.Mockito.*; - import org.apache.hadoop.hbase.security.User; import org.junit.Assert; import org.junit.Test; -public class AuthorizationSessionTest { +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; - @Test - public void testAuthorizationSession() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testOperation() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testOtherInformation() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testAccess() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testUser() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testTable() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testColumnFamily() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testColumn() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testIsBuildable() { - RangerHBasePlugin plugin = new RangerHBasePlugin( "hbase"); - AuthorizationSession session = new AuthorizationSession(plugin); - try { - session.verifyBuildable(); - Assert.fail("Should have thrown exception"); - } catch (IllegalStateException e) { } - // user and access are the only required ones. - User user = mock(User.class); - when(user.getGroupNames()).thenReturn(new String[] { "groups", "group2" }); - session.access(" "); - session.user(user); - try { - session.verifyBuildable(); - } catch (IllegalStateException e) { - Assert.fail("Shouldn't have thrown an exception!"); - } - // setting column-family without table is a problem - session.columnFamily("family"); - try { - session.verifyBuildable(); - Assert.fail("Should have thrown an exception"); - } catch (IllegalStateException e) { } - - session.table("table"); - try { - session.verifyBuildable(); - } catch (IllegalStateException e) { - Assert.fail("Shouldn't have thrown an exception!"); - } - // setting column without column-family is a problem - session.columnFamily(null); - session.column("col"); - try { - session.verifyBuildable(); - Assert.fail("Should have thrown an exception"); - } catch (IllegalStateException e) { } - session.columnFamily("family"); - try { - session.verifyBuildable(); - } catch (IllegalStateException e) { - Assert.fail("Should have thrown an exception"); - } - } - - @Test - public void testZapAuthorizationState() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testIsProvided() { - AuthorizationSession session = new AuthorizationSession(null); - Assert.assertFalse(session.isProvided(null)); - Assert.assertFalse(session.isProvided("")); - Assert.assertTrue(session.isProvided(" ")); - Assert.assertTrue(session.isProvided("xtq")); - } - - @Test - public void testBuildRequest() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testAuthorize() { - RangerHBasePlugin plugin = new RangerHBasePlugin( "hbase"); - - User user = mock(User.class); - when(user.getShortName()).thenReturn("user1"); - when(user.getGroupNames()).thenReturn(new String[] { "users" } ); - AuthorizationSession session = new AuthorizationSession(plugin); - session.access("read") - .user(user) - .table(":meta:") - .buildRequest() - .authorize(); - } - - @Test - public void testPublishResults() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testIsAuthorized() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testGetDenialReason() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testGetResourceType() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testRequestToString() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testAudit() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testGetPrintableValue() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testBuildAccessDeniedMessage() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testBuildAccessDeniedMessageString() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testKnownPatternAllowedNotAudited() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testKnownPatternDisallowedNotAudited() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testAuditHandler() { -// Assert.fail("Not yet implemented"); - } - - @Test - public void testBuildResult() { -// Assert.fail("Not yet implemented"); - } +public class AuthorizationSessionTest { + @Test + public void testAuthorizationSession() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testOperation() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testOtherInformation() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testAccess() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testUser() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testTable() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testColumnFamily() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testColumn() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testIsBuildable() { + RangerHBasePlugin plugin = new RangerHBasePlugin("hbase"); + AuthorizationSession session = new AuthorizationSession(plugin); + try { + session.verifyBuildable(); + Assert.fail("Should have thrown exception"); + } catch (IllegalStateException e) { + } + // user and access are the only required ones. + User user = mock(User.class); + when(user.getGroupNames()).thenReturn(new String[] {"groups", "group2"}); + session.access(" "); + session.user(user); + try { + session.verifyBuildable(); + } catch (IllegalStateException e) { + Assert.fail("Shouldn't have thrown an exception!"); + } + // setting column-family without table is a problem + session.columnFamily("family"); + try { + session.verifyBuildable(); + Assert.fail("Should have thrown an exception"); + } catch (IllegalStateException e) { + } + + session.table("table"); + try { + session.verifyBuildable(); + } catch (IllegalStateException e) { + Assert.fail("Shouldn't have thrown an exception!"); + } + // setting column without column-family is a problem + session.columnFamily(null); + session.column("col"); + try { + session.verifyBuildable(); + Assert.fail("Should have thrown an exception"); + } catch (IllegalStateException e) { + } + session.columnFamily("family"); + try { + session.verifyBuildable(); + } catch (IllegalStateException e) { + Assert.fail("Should have thrown an exception"); + } + } + + @Test + public void testZapAuthorizationState() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testIsProvided() { + AuthorizationSession session = new AuthorizationSession(null); + Assert.assertFalse(session.isProvided(null)); + Assert.assertFalse(session.isProvided("")); + Assert.assertTrue(session.isProvided(" ")); + Assert.assertTrue(session.isProvided("xtq")); + } + + @Test + public void testBuildRequest() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testAuthorize() { + RangerHBasePlugin plugin = new RangerHBasePlugin("hbase"); + + User user = mock(User.class); + when(user.getShortName()).thenReturn("user1"); + when(user.getGroupNames()).thenReturn(new String[] {"users"}); + AuthorizationSession session = new AuthorizationSession(plugin); + session.access("read") + .user(user) + .table(":meta:") + .buildRequest() + .authorize(); + } + + @Test + public void testPublishResults() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testIsAuthorized() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testGetDenialReason() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testGetResourceType() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testRequestToString() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testAudit() { + //Assert.fail("Not yet implemented"); + } + + @Test + public void testGetPrintableValue() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testBuildAccessDeniedMessage() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testBuildAccessDeniedMessageString() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testKnownPatternAllowedNotAudited() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testKnownPatternDisallowedNotAudited() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testAuditHandler() { + // Assert.fail("Not yet implemented"); + } + + @Test + public void testBuildResult() { + // Assert.fail("Not yet implemented"); + } } diff --git a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/ColumnIteratorTest.java b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/ColumnIteratorTest.java index 917062b00a..b9943afb37 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/ColumnIteratorTest.java +++ b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/ColumnIteratorTest.java @@ -18,126 +18,123 @@ */ package org.apache.ranger.authorization.hbase; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.util.Iterator; -import java.util.List; -import java.util.Set; - import org.apache.hadoop.hbase.Cell; import org.junit.Assert; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import java.util.Iterator; +import java.util.List; +import java.util.Set; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + public class ColumnIteratorTest { + @Test + public void test_firewalling() { + // passing null collection + ColumnIterator iterator = new ColumnIterator(null); + Assert.assertFalse(iterator.hasNext()); + } + + @SuppressWarnings("unchecked") + @Test + public void test_setOfBytes() { + /* + * It is pointless to test the functionality of base iterator! What we want to Assert.assert is that ColumnIterator delegates to the real iterators appropriately. + */ + Iterator iterator = mock(Iterator.class); + // We want to make sure ColumnIterator will return exactly what the real iterator gives it. Let's us doctor mock iteracor to return items in a particular order. + final String[] values = new String[] {"a", "b", "c"}; + when(iterator.next()).thenAnswer(new Answer() { + // return all the items of the values array in order as byte[]. After which return null. + int index; + @Override + public byte[] answer(InvocationOnMock invocation) throws Throwable { + if (index < values.length) { + return values[index++].getBytes(); // we need post increment + } else { + return null; + } + } + }); + + // We want hasNext() to return false after as many times as values were stuffed into it. + when(iterator.hasNext()).thenAnswer(new Answer() { + int i; + @Override + public Boolean answer(InvocationOnMock invocation) throws Throwable { + return i++ < values.length; // we want post increment + } + }); - @Test - public void test_firewalling() { - // passing null collection - ColumnIterator iterator = new ColumnIterator(null); - Assert.assertFalse(iterator.hasNext()); - } - - @SuppressWarnings("unchecked") - @Test - public void test_setOfBytes() { - /* - * It is pointless to test the functionality of base iterator! What we want to Assert.assert is that ColumnIterator delegates to the real iterators appropriately. - */ - Iterator iterator = mock(Iterator.class); - // We want to make sure ColumnIterator will return exactly what the real iterator gives it. Let's us doctor mock iteracor to return items in a particular order. - final String[] values = new String[] {"a", "b", "c"}; - when(iterator.next()).thenAnswer(new Answer() { - // return all the items of the values array in order as byte[]. After which return null. - int index = 0; - @Override - public byte[] answer(InvocationOnMock invocation) throws Throwable { - if (index < values.length) { - return values[index++].getBytes(); // we need post increment - } else { - return null; - } - } - }); - - // We want hasNext() to return false after as many times as values were stuffed into it. - when(iterator.hasNext()).thenAnswer(new Answer() { - int i = 0; - @Override - public Boolean answer(InvocationOnMock invocation) throws Throwable { - return i++ < values.length; // we want post increment - } - }); - - // let's stuff this iterator into the collection that we would pass to the ColumnIterator - Set collection = mock(Set.class); - when(collection.iterator()).thenReturn(iterator); - ColumnIterator columnIterator = new ColumnIterator(collection); - int i = 0; - while (columnIterator.hasNext()) { - String value = columnIterator.next(); - Assert.assertEquals(values[i++], value); - } - // We should get back exactly as many items as were in the real iterator, no more no less - Assert.assertEquals(3, i); + // let's stuff this iterator into the collection that we would pass to the ColumnIterator + Set collection = mock(Set.class); + when(collection.iterator()).thenReturn(iterator); + ColumnIterator columnIterator = new ColumnIterator(collection); + int i = 0; + while (columnIterator.hasNext()) { + String value = columnIterator.next(); + Assert.assertEquals(values[i++], value); + } + // We should get back exactly as many items as were in the real iterator, no more no less + Assert.assertEquals(3, i); - // this should be called only once! - verify(collection, times(1)).iterator(); - // verify next() was called on the iterator exactly 3 times - verify(iterator, times(3)).next(); - - } + // this should be called only once! + verify(collection, times(1)).iterator(); + // verify next() was called on the iterator exactly 3 times + verify(iterator, times(3)).next(); + } - @SuppressWarnings("unchecked") - @Test - public void test_ListOfCell() { - /* - * We are not interested in validating the behavior of the real iterator. Instead just the behavior specific to the column iterator. - */ - final String[] qualifiers = new String[] {"a", "b", "c"}; - Iterator iterator = mock(Iterator.class); - // Have the iterator return true as many times as the size of keys array - when(iterator.hasNext()).thenAnswer(new Answer() { - int i = 0; - @Override - public Boolean answer(InvocationOnMock invocation) throws Throwable { - return i++ < qualifiers.length; - } - }); - // have the iterator return a Cell composed of the key and value arrays - when(iterator.next()).thenAnswer(new Answer() { - int i = 0; - @Override - public Cell answer(InvocationOnMock invocation) - throws Throwable { - Cell cell = mock(Cell.class); - when(cell.getQualifierOffset()).thenReturn(0); - when(cell.getQualifierLength()).thenReturn(1); - when(cell.getQualifierArray()).thenReturn(qualifiers[i++].getBytes()); - return cell; - } - }); - // stuff it into the collection - List list = mock(List.class); - when(list.iterator()).thenReturn(iterator); - // now let's check the behavior - ColumnIterator columnIterator = new ColumnIterator(list); - int i = 0; - while (columnIterator.hasNext()) { - String value = columnIterator.next(); - Assert.assertEquals(qualifiers[i++], value); - } - // We should get back exactly as many items as were in the real iterator, no more no less - Assert.assertEquals(3, i); + @SuppressWarnings("unchecked") + @Test + public void test_ListOfCell() { + /* + * We are not interested in validating the behavior of the real iterator. Instead just the behavior specific to the column iterator. + */ + final String[] qualifiers = new String[] {"a", "b", "c"}; + Iterator iterator = mock(Iterator.class); + // Have the iterator return true as many times as the size of keys array + when(iterator.hasNext()).thenAnswer(new Answer() { + int i; + @Override + public Boolean answer(InvocationOnMock invocation) throws Throwable { + return i++ < qualifiers.length; + } + }); + // have the iterator return a Cell composed of the key and value arrays + when(iterator.next()).thenAnswer(new Answer() { + int i; + @Override + public Cell answer(InvocationOnMock invocation) + throws Throwable { + Cell cell = mock(Cell.class); + when(cell.getQualifierOffset()).thenReturn(0); + when(cell.getQualifierLength()).thenReturn(1); + when(cell.getQualifierArray()).thenReturn(qualifiers[i++].getBytes()); + return cell; + } + }); + // stuff it into the collection + List list = mock(List.class); + when(list.iterator()).thenReturn(iterator); + // now let's check the behavior + ColumnIterator columnIterator = new ColumnIterator(list); + int i = 0; + while (columnIterator.hasNext()) { + String value = columnIterator.next(); + Assert.assertEquals(qualifiers[i++], value); + } + // We should get back exactly as many items as were in the real iterator, no more no less + Assert.assertEquals(3, i); - // this should be called only once! - verify(list, times(1)).iterator(); - // verify next() was called on the iterator exactly 3 times - verify(iterator, times(3)).next(); - - } + // this should be called only once! + verify(list, times(1)).iterator(); + // verify next() was called on the iterator exactly 3 times + verify(iterator, times(3)).next(); + } } diff --git a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/HBaseRangerAuthorizationTest.java b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/HBaseRangerAuthorizationTest.java index d973079c7b..0d05e0f234 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/HBaseRangerAuthorizationTest.java +++ b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/HBaseRangerAuthorizationTest.java @@ -17,13 +17,6 @@ package org.apache.ranger.authorization.hbase; -import java.io.IOException; -import java.net.ServerSocket; -import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - import com.google.protobuf.ByteString; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; @@ -33,10 +26,26 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.*; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.SnapshotDescription; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; -import org.apache.hadoop.hbase.security.access.*; +import org.apache.hadoop.hbase.security.access.AccessControlUtil; +import org.apache.hadoop.hbase.security.access.NamespacePermission; +import org.apache.hadoop.hbase.security.access.Permission; +import org.apache.hadoop.hbase.security.access.UserPermission; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.security.UserGroupInformation; import org.junit.Assert; @@ -44,37 +53,42 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.net.ServerSocket; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + /** * A custom RangerAdminClient is plugged into Ranger in turn, which loads security policies from a local file. These policies were * generated in the Ranger Admin UI for a service called "HBaseTest": - * + *

* a) The "logged in" user can do anything * b) The IT group can read and write to the "temp" table, but only the "colfam1" column family. * c) The QA group can read and write to tables in "test_namespace" namespace. * d) The IT group can read and write to the "temp6" table's "colfam1" column family and "col1" column only. * e) The IT group for "temp7" table and "temp8" table can read "colfam1" column family but has denied for "col1" column in that column family * f) The IT2 group for "temp7" table can read "colfam1" column family and does not have any denied columns in the column family - * + *

* In addition we have some TAG based policies created in Atlas and synced into Ranger: - * + *

* a) The tag "HbaseTableTag" is associated with "create" permission to the "dev" group to the "temp3" table * b) The tag "HbaseColFamTag" is associated with "read" permission to the "dev" group to the "colfam1" column family of the "temp3" table. * c) The tag "HbaseColTag" is associated with "write" permission to the "dev" group to the "col1" column of the "colfam1" column family of * the "temp3" table. - * + *

* Policies available from admin via: - * + *

* http://localhost:6080/service/plugins/policies/download/cl1_hbase */ @org.junit.Ignore public class HBaseRangerAuthorizationTest { - private static final Logger LOG = LoggerFactory.getLogger(HBaseRangerAuthorizationTest.class.getName()); - private static int port; + private static int port; private static HBaseTestingUtility utility; - @org.junit.BeforeClass public static void setup() throws Exception { port = getFreePort(); @@ -92,9 +106,9 @@ public static void setup() throws Exception { // Enable authorization utility.getConfiguration().set("hbase.security.authorization", "true"); utility.getConfiguration().set("hbase.coprocessor.master.classes", - "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor"); + "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor"); utility.getConfiguration().set("hbase.coprocessor.region.classes", - "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor"); + "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor"); utility.startMiniCluster(); @@ -105,8 +119,8 @@ public static void setup() throws Exception { conf.set("zookeeper.znode.parent", "/hbase-unsecure"); // Create a table - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); // Create a table if (!admin.tableExists(TableName.valueOf("default:temp"))) { @@ -135,7 +149,7 @@ public static void setup() throws Exception { // Create a table if (!admin.tableExists(TableName.valueOf("test_namespace", "temp"))) { - TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("test_namespace", "temp")); + TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("test_namespace", "temp")); // Adding column families to table descriptor tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam1".getBytes()).build()); @@ -145,19 +159,19 @@ public static void setup() throws Exception { } if (!admin.tableExists(TableName.valueOf("default:temp5"))) { - TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("default:temp5")); + TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("default:temp5")); // Adding column families to table descriptor tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam1".getBytes()).build()); admin.createTable(tableDescriptor.build()); } if (!admin.tableExists(TableName.valueOf("default:temp6"))) { - TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("default:temp6")); + TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("default:temp6")); // Adding column families to table descriptor tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam1".getBytes()).build()); admin.createTable(tableDescriptor.build()); } if (!admin.tableExists(TableName.valueOf("default:temp7"))) { - TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("default:temp7")); + TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("default:temp7")); // Adding column families to table descriptor tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam1".getBytes()).build()); admin.createTable(tableDescriptor.build()); @@ -181,7 +195,7 @@ public static void setup() throws Exception { table.put(put); if (!admin.tableExists(TableName.valueOf("default:temp8"))) { - TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("default:temp8")); + TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("default:temp8")); // Adding column families to table descriptor tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam1".getBytes()).build()); admin.createTable(tableDescriptor.build()); @@ -213,8 +227,8 @@ public void testReadTablesAsProcessOwner() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); List tableDescriptors = admin.listTableDescriptors(); for (TableDescriptor desc : tableDescriptors) { @@ -238,10 +252,10 @@ public void testReadTablesAsGroupIT() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); - List tableDescriptors = admin.listTableDescriptors(); + List tableDescriptors = admin.listTableDescriptors(); for (TableDescriptor desc : tableDescriptors) { LOG.info("Found table:[" + desc.getTableName().getNameAsString() + "]"); } @@ -260,8 +274,8 @@ public void testCreateAndDropTables() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); // Create a new table as process owner TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("temp2")); @@ -280,8 +294,8 @@ public void testCreateAndDropTables() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); try { admin.disableTable(TableName.valueOf("temp2")); @@ -297,7 +311,7 @@ public Void run() throws Exception { }); // Now disable and delete as process owner - conn = ConnectionFactory.createConnection(conf); + conn = ConnectionFactory.createConnection(conf); admin = conn.getAdmin(); admin.disableTable(TableName.valueOf("temp2")); admin.deleteTable(TableName.valueOf("temp2")); @@ -312,12 +326,12 @@ public void testReadRowAsProcessOwner() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Read a row - Get get = new Get(Bytes.toBytes("row1")); - Result result = table.get(get); + Get get = new Get(Bytes.toBytes("row1")); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam1"), Bytes.toBytes("col1")); Assert.assertTrue(Arrays.equals(valResult, Bytes.toBytes("val1"))); @@ -336,12 +350,12 @@ public void testReadRowAsGroupIT() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Read a row - Get get = new Get(Bytes.toBytes("row1")); - Result result = table.get(get); + Get get = new Get(Bytes.toBytes("row1")); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam1"), Bytes.toBytes("col1")); Assert.assertTrue(Arrays.equals(valResult, Bytes.toBytes("val1"))); @@ -364,13 +378,13 @@ public void testReadRowAsGroupPublic() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"public"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Read a row try { - Get get = new Get(Bytes.toBytes("row1")); - Result result = table.get(get); + Get get = new Get(Bytes.toBytes("row1")); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam1"), Bytes.toBytes("col1")); Assert.assertNull("Failure expected on an unauthorized user", valResult); } catch (IOException ex) { @@ -390,12 +404,12 @@ public void testReadRowFromColFam2AsProcessOwner() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Read a row - Get get = new Get(Bytes.toBytes("row1")); - Result result = table.get(get); + Get get = new Get(Bytes.toBytes("row1")); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam2"), Bytes.toBytes("col1")); Assert.assertTrue(Arrays.equals(valResult, Bytes.toBytes("val2"))); @@ -414,12 +428,12 @@ public void testReadRowFromColFam2AsGroupIT() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Read a row - Get get = new Get(Bytes.toBytes("row1")); - Result result = table.get(get); + Get get = new Get(Bytes.toBytes("row1")); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam2"), Bytes.toBytes("col1")); Assert.assertNull(valResult); @@ -436,8 +450,8 @@ public void testWriteRowAsProcessOwner() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Add a new row Put put = new Put(Bytes.toBytes("row2")); @@ -459,8 +473,8 @@ public void testWriteRowAsGroupIT() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Add a new row Put put = new Put(Bytes.toBytes("row3")); @@ -485,8 +499,8 @@ public void testWriteRowAsGroupPublic() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"public"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Add a new row try { @@ -516,8 +530,8 @@ public void testWriteRowInColFam2AsGroupIT() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Add a new row try { @@ -542,8 +556,8 @@ public void testReadRowInAnotherTable() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); // Create a new table as process owner TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("temp4")); @@ -561,8 +575,8 @@ public void testReadRowInAnotherTable() throws Exception { table.put(put); // Read a row - Get get = new Get(Bytes.toBytes("row1")); - Result result = table.get(get); + Get get = new Get(Bytes.toBytes("row1")); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam2"), Bytes.toBytes("col1")); Assert.assertNull(valResult); @@ -574,13 +588,13 @@ public void testReadRowInAnotherTable() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp4")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp4")); // Read a row try { - Get get = new Get(Bytes.toBytes("row1")); - Result result = table.get(get); + Get get = new Get(Bytes.toBytes("row1")); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam2"), Bytes.toBytes("col1")); Assert.assertNull("Failure expected on an unauthorized user", valResult); } catch (IOException ex) { @@ -593,7 +607,7 @@ public Void run() throws Exception { }); // Now disable and delete as process owner - conn = ConnectionFactory.createConnection(conf); + conn = ConnectionFactory.createConnection(conf); admin = conn.getAdmin(); admin.disableTable(TableName.valueOf("temp4")); admin.deleteTable(TableName.valueOf("temp4")); @@ -608,8 +622,8 @@ public void testDeleteRowAsProcessOwner() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Add a new row Put put = new Put(Bytes.toBytes("row4")); @@ -630,8 +644,8 @@ public void testDeleteRowAsGroupIT() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Add a new row (as process owner) Put put = new Put(Bytes.toBytes("row5")); @@ -643,8 +657,8 @@ public void testDeleteRowAsGroupIT() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); try { // Delete the new row @@ -674,8 +688,8 @@ public void testCloneSnapshotAsGroupQA() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); List snapshots = admin.listSnapshots("test_snapshot"); if (CollectionUtils.isNotEmpty(snapshots)) { @@ -683,14 +697,14 @@ public void testCloneSnapshotAsGroupQA() throws Exception { } String user = "QA"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[]{"QA"}); + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"QA"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); - Table table = conn.getTable(TableName.valueOf("test_namespace", "temp")); - TableName tableName = table.getName(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); + Table table = conn.getTable(TableName.valueOf("test_namespace", "temp")); + TableName tableName = table.getName(); admin.disableTable(tableName); @@ -725,9 +739,9 @@ public void testCloneSnapshotAsNonQAGroup() throws Exception { conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); - TableName tableName = conn.getTable(TableName.valueOf("test_namespace", "temp")).getName(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); + TableName tableName = conn.getTable(TableName.valueOf("test_namespace", "temp")).getName(); admin.disableTable(tableName); @@ -745,8 +759,8 @@ public void testCloneSnapshotAsNonQAGroup() throws Exception { ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); try { TableName clone = TableName.valueOf("test_namespace", "temp_cloned_public"); @@ -757,7 +771,7 @@ public Void run() throws Exception { // Clone snapshot admin.cloneSnapshot("test_snapshot", clone); Assert.fail("Failure expected on an unauthorized group public"); - } catch(Exception e) { + } catch (Exception e) { // Expected } conn.close(); @@ -792,8 +806,8 @@ public void testTagBasedTablePolicy() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); try { admin.createTable(tableDescriptor.build()); @@ -811,8 +825,8 @@ public Void run() throws Exception { ugi = UserGroupInformation.createUserForTesting("dev", new String[] {"dev"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); admin.createTable(tableDescriptor.build()); @@ -822,8 +836,8 @@ public Void run() throws Exception { }); // Drop the table - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); admin.disableTable(TableName.valueOf("temp3")); admin.deleteTable(TableName.valueOf("temp3")); @@ -845,8 +859,8 @@ public void testTagBasedColumnFamilyPolicy() throws Exception { tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam1".getBytes()).build()); tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam2".getBytes()).build()); - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); admin.createTable(tableDescriptor.build()); @@ -862,22 +876,22 @@ public void testTagBasedColumnFamilyPolicy() throws Exception { conn.close(); - String user = "dev"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"dev"}); + String user = "dev"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"dev"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp3")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp3")); // Try to read the "colfam1" of the "temp3" table as the "dev" group - this should work - Get get = new Get(Bytes.toBytes("row1")); - Result result = table.get(get); + Get get = new Get(Bytes.toBytes("row1")); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam1"), Bytes.toBytes("col1")); Assert.assertTrue(Arrays.equals(valResult, Bytes.toBytes("val1"))); // Now try to read the "colfam2" column family of the temp3 table - this should fail - get = new Get(Bytes.toBytes("row1")); - result = table.get(get); + get = new Get(Bytes.toBytes("row1")); + result = table.get(get); valResult = result.getValue(Bytes.toBytes("colfam2"), Bytes.toBytes("col1")); Assert.assertNull(valResult); @@ -890,12 +904,12 @@ public Void run() throws Exception { ugi = UserGroupInformation.createUserForTesting("IT", new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp3")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp3")); Get get = new Get(Bytes.toBytes("row1")); try { - Result result = table.get(get); + Result result = table.get(get); byte[] valResult = result.getValue(Bytes.toBytes("colfam2"), Bytes.toBytes("col1")); Assert.assertNull("Failure expected on an unauthorized user", valResult); } catch (IOException ex) { @@ -907,7 +921,7 @@ public Void run() throws Exception { }); // Drop the table - conn = ConnectionFactory.createConnection(conf); + conn = ConnectionFactory.createConnection(conf); admin = conn.getAdmin(); admin.disableTable(TableName.valueOf("temp3")); @@ -930,8 +944,8 @@ public void testTagBasedColumnPolicy() throws Exception { tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam1".getBytes()).build()); tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam2".getBytes()).build()); - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); admin.createTable(tableDescriptor.build()); @@ -947,12 +961,12 @@ public void testTagBasedColumnPolicy() throws Exception { conn.close(); - String user = "dev"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"dev"}); + String user = "dev"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"dev"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp3")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp3")); // Try to write something to the "col1" column of the "colfam1" of the "temp3" table as the "dev" group // - this should work @@ -979,8 +993,8 @@ public Void run() throws Exception { ugi = UserGroupInformation.createUserForTesting("IT", new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp3")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp3")); // Try to write something to the "col1" column of the "colfam1" of the "temp3" table as the "IT" group // - this should fail @@ -999,7 +1013,7 @@ public Void run() throws Exception { }); // Drop the table - conn = ConnectionFactory.createConnection(conf); + conn = ConnectionFactory.createConnection(conf); admin = conn.getAdmin(); admin.disableTable(TableName.valueOf("temp3")); @@ -1014,16 +1028,13 @@ public void testGetUserPermission() throws Throwable { conf.set("hbase.zookeeper.quorum", "localhost"); conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - String user = "IT"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] { "IT" }); - if(!utility.getHBaseCluster().isDistributedCluster()) { - RangerAuthorizationCoprocessor authorizationCoprocessor = - utility.getHBaseCluster().getMaster().getMasterCoprocessorHost(). - findCoprocessor(RangerAuthorizationCoprocessor.class); + String user = "IT"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); + if (!utility.getHBaseCluster().isDistributedCluster()) { + RangerAuthorizationCoprocessor authorizationCoprocessor = utility.getHBaseCluster().getMaster().getMasterCoprocessorHost().findCoprocessor(RangerAuthorizationCoprocessor.class); RpcController rpcController = new RpcController() { @Override public void reset() { - } @Override @@ -1038,12 +1049,10 @@ public String errorText() { @Override public void startCancel() { - } @Override public void setFailed(String reason) { - } @Override @@ -1053,40 +1062,38 @@ public boolean isCanceled() { @Override public void notifyOnCancel(RpcCallback callback) { - } }; ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { AccessControlProtos.GetUserPermissionsRequest requestTablePerms = getTableUserPermissions("temp"); authorizationCoprocessor.getUserPermissions(rpcController, requestTablePerms, - new RpcCallback() { - @Override - public void run(AccessControlProtos.GetUserPermissionsResponse message) { - if (message != null) { - for (AccessControlProtos.UserPermission perm : message - .getUserPermissionList()) { - AccessControlUtil.toUserPermission(perm); - Assert.fail(); + new RpcCallback() { + @Override + public void run(AccessControlProtos.GetUserPermissionsResponse message) { + if (message != null) { + for (AccessControlProtos.UserPermission perm : message + .getUserPermissionList()) { + AccessControlUtil.toUserPermission(perm); + Assert.fail(); + } } } - } - }); + }); return null; } - }); user = "QA"; - ugi = UserGroupInformation.createUserForTesting(user, new String[] { "QA" }); + ugi = UserGroupInformation.createUserForTesting(user, new String[] {"QA"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - final List userPermissions = new ArrayList(); + final List userPermissions = new ArrayList(); AccessControlProtos.GetUserPermissionsRequest requestTablePerms = getNamespaceUserPermissions("test_namespace"); getUserPermissions(userPermissions, requestTablePerms, authorizationCoprocessor, rpcController); boolean found = false; for (UserPermission namespacePermission : userPermissions) { - if (namespacePermission.getPermission() instanceof NamespacePermission ) { + if (namespacePermission.getPermission() instanceof NamespacePermission) { found = Bytes.equals(namespacePermission.getUser().getBytes(), Bytes.toBytes("@QA")); if (found) { break; @@ -1098,15 +1105,15 @@ public Void run() throws Exception { } }); - final List userPermissions = new ArrayList<>(); + final List userPermissions = new ArrayList<>(); AccessControlProtos.GetUserPermissionsRequest requestTablePerms = getTableUserPermissions("temp5"); getUserPermissions(userPermissions, requestTablePerms, authorizationCoprocessor, rpcController); - Permission p = Permission.newBuilder(TableName.valueOf("temp5")). - withActions(Permission.Action.READ, Permission.Action.WRITE, Permission.Action.EXEC).build(); + Permission p = Permission.newBuilder(TableName.valueOf("temp5")).withActions(Permission.Action.READ, Permission.Action.WRITE, Permission.Action.EXEC).build(); UserPermission userPermission = new UserPermission("@IT", p); Assert.assertTrue("@IT permission should be there", userPermissions.contains(userPermission)); } } + @Test public void testWriteRowAsGroupIT2() throws Exception { //check access to table temp6 to test the policy TempPolicyForOptimizedColAuth with non * column @@ -1114,12 +1121,12 @@ public void testWriteRowAsGroupIT2() throws Exception { conf.set("hbase.zookeeper.quorum", "localhost"); conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - String user = "IT"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); + String user = "IT"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp6")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp6")); // Add a new row Put put = new Put(Bytes.toBytes("row3")); put.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col1"), Bytes.toBytes("val2")); @@ -1129,6 +1136,7 @@ public Void run() throws Exception { } }); } + @Test public void testWriteRowAsGroupIT2Optimized() throws Exception { // No behavior change from testWriteRowAsGroupIT2() @@ -1137,12 +1145,12 @@ public void testWriteRowAsGroupIT2Optimized() throws Exception { conf.set("hbase.zookeeper.quorum", "localhost"); conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - String user = "IT"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); + String user = "IT"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp6")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp6")); // Add a new row Put put = new Put(Bytes.toBytes("row3")); put.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col1"), Bytes.toBytes("val2")); @@ -1153,26 +1161,26 @@ public Void run() throws Exception { }); enableColumnAuthOptimization(false); } + @Test public void testWriteRowDeniedAsGroupIT2() throws Exception { final Configuration conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", "localhost"); conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - String user = "IT"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); + String user = "IT"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp7")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp7")); try { // Add a new row Put put = new Put(Bytes.toBytes("row3")); put.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col1"), Bytes.toBytes("val2")); table.put(put); Assert.fail("Failure expected on an unauthorized user"); - } - catch (IOException ex) { + } catch (IOException ex) { // expected } conn.close(); @@ -1180,6 +1188,7 @@ public Void run() throws Exception { } }); } + @Test public void testWriteRowDeniedAsGroupIT2Optimized() throws Exception { // no behavior change from testWriteRowDeniedAsGroupIT2 @@ -1188,20 +1197,19 @@ public void testWriteRowDeniedAsGroupIT2Optimized() throws Exception { conf.set("hbase.zookeeper.quorum", "localhost"); conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - String user = "IT"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); + String user = "IT"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp7")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp7")); try { // Add a new row Put put = new Put(Bytes.toBytes("row3")); put.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col1"), Bytes.toBytes("val2")); table.put(put); Assert.fail("Failure expected on an unauthorized user"); - } - catch (IOException ex) { + } catch (IOException ex) { // expected } conn.close(); @@ -1210,30 +1218,30 @@ public Void run() throws Exception { }); enableColumnAuthOptimization(false); } + @Test public void testScanTableAsGroupIT() throws Exception { final Configuration conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", "localhost"); conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - String user = "IT"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); + String user = "IT"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp8")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp8")); try { - Scan scan = new Scan(); - ResultScanner scanner = table.getScanner(scan); - int numRowsInResult = 0; - for (Result result = scanner.next(); result != null; result = scanner.next()){ + Scan scan = new Scan(); + ResultScanner scanner = table.getScanner(scan); + int numRowsInResult = 0; + for (Result result = scanner.next(); result != null; result = scanner.next()) { System.out.println("Found row : " + result); numRowsInResult += 1; } //while there are 2 rows in this table, one of the columns is explicitly denied so only one column should be in the result - Assert.assertEquals(1,numRowsInResult); - } - catch (IOException ex) { + Assert.assertEquals(1, numRowsInResult); + } catch (IOException ex) { // expected } conn.close(); @@ -1242,23 +1250,22 @@ public Void run() throws Exception { }); user = "IT2"; - ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT2"}); + ugi = UserGroupInformation.createUserForTesting(user, new String[] {"IT2"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp8")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp8")); try { - Scan scan = new Scan(); - ResultScanner scanner = table.getScanner(scan); - int numRowsInResult = 0; - for (Result result = scanner.next(); result != null; result = scanner.next()){ + Scan scan = new Scan(); + ResultScanner scanner = table.getScanner(scan); + int numRowsInResult = 0; + for (Result result = scanner.next(); result != null; result = scanner.next()) { System.out.println("Found row : " + result); numRowsInResult += 1; } //there are 2 rows in this table, group IT2 does not have any denied columns - Assert.assertEquals(2,numRowsInResult); - } - catch (IOException ex) { + Assert.assertEquals(2, numRowsInResult); + } catch (IOException ex) { // expected } conn.close(); @@ -1267,7 +1274,6 @@ public Void run() throws Exception { }); } - @Test public void testWriteRowAsGroupPublicOptimized() throws Exception { enableColumnAuthOptimization(true); // enable optimization @@ -1275,12 +1281,12 @@ public void testWriteRowAsGroupPublicOptimized() throws Exception { conf.set("hbase.zookeeper.quorum", "localhost"); conf.set("hbase.zookeeper.property.clientPort", "" + port); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); - String user = "public"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"public"}); + String user = "public"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"public"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp")); // Add a new row try { Put put = new Put(Bytes.toBytes("row3")); @@ -1296,6 +1302,7 @@ public Void run() throws Exception { }); enableColumnAuthOptimization(false); // disable optimization after test case complete } + @Test public void testTagBasedColumnPolicyOptimized() throws Exception { // There should not be any behavior change from testTagBasedColumnPolicy() even after @@ -1314,8 +1321,8 @@ public void testTagBasedColumnPolicyOptimized() throws Exception { tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam1".getBytes()).build()); tableDescriptor.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder("colfam2".getBytes()).build()); - Connection conn = ConnectionFactory.createConnection(conf); - Admin admin = conn.getAdmin(); + Connection conn = ConnectionFactory.createConnection(conf); + Admin admin = conn.getAdmin(); admin.createTable(tableDescriptor.build()); @@ -1331,12 +1338,12 @@ public void testTagBasedColumnPolicyOptimized() throws Exception { conn.close(); - String user = "dev"; - UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"dev"}); + String user = "dev"; + UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {"dev"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp3")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp3")); // Try to write something to the "col1" column of the "colfam1" of the "temp3" table as the "dev" group // - this should work @@ -1363,8 +1370,8 @@ public Void run() throws Exception { ugi = UserGroupInformation.createUserForTesting("IT", new String[] {"IT"}); ugi.doAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - Connection conn = ConnectionFactory.createConnection(conf); - Table table = conn.getTable(TableName.valueOf("temp3")); + Connection conn = ConnectionFactory.createConnection(conf); + Table table = conn.getTable(TableName.valueOf("temp3")); // Try to write something to the "col1" column of the "colfam1" of the "temp3" table as the "IT" group // - this should fail @@ -1382,7 +1389,7 @@ public Void run() throws Exception { }); // Drop the table - conn = ConnectionFactory.createConnection(conf); + conn = ConnectionFactory.createConnection(conf); admin = conn.getAdmin(); admin.disableTable(TableName.valueOf("temp3")); @@ -1392,10 +1399,8 @@ public Void run() throws Exception { enableColumnAuthOptimization(false); } - private static void enableColumnAuthOptimization(boolean enable){ - RangerAuthorizationCoprocessor authorizationCoprocessor = - utility.getHBaseCluster().getMaster().getMasterCoprocessorHost(). - findCoprocessor(RangerAuthorizationCoprocessor.class); + private static void enableColumnAuthOptimization(boolean enable) { + RangerAuthorizationCoprocessor authorizationCoprocessor = utility.getHBaseCluster().getMaster().getMasterCoprocessorHost().findCoprocessor(RangerAuthorizationCoprocessor.class); try { authorizationCoprocessor.setColumnAuthOptimizationEnabled(enable); } catch (Exception e) { @@ -1405,7 +1410,7 @@ private static void enableColumnAuthOptimization(boolean enable){ private AccessControlProtos.GetUserPermissionsRequest getNamespaceUserPermissions(String namespace) { AccessControlProtos.GetUserPermissionsRequest.Builder builderTablePerms = AccessControlProtos.GetUserPermissionsRequest - .newBuilder(); + .newBuilder(); builderTablePerms.setNamespaceName(ByteString.copyFromUtf8(namespace)); builderTablePerms.setType(AccessControlProtos.Permission.Type.Namespace); return builderTablePerms.build(); @@ -1413,7 +1418,7 @@ private AccessControlProtos.GetUserPermissionsRequest getNamespaceUserPermission private AccessControlProtos.GetUserPermissionsRequest getTableUserPermissions(String tableName) { AccessControlProtos.GetUserPermissionsRequest.Builder builderTablePerms = AccessControlProtos.GetUserPermissionsRequest - .newBuilder(); + .newBuilder(); builderTablePerms.setTableName(ProtobufUtil.toProtoTableName(TableName.valueOf(tableName))); builderTablePerms.setType(AccessControlProtos.Permission.Type.Table); return builderTablePerms.build(); @@ -1421,24 +1426,23 @@ private AccessControlProtos.GetUserPermissionsRequest getTableUserPermissions(St private void getUserPermissions(List userPermissions, AccessControlProtos.GetUserPermissionsRequest requestTablePerms, RangerAuthorizationCoprocessor authorizationCoprocessor, RpcController rpcController) { authorizationCoprocessor.getUserPermissions(rpcController, requestTablePerms, - new RpcCallback() { - @Override - public void run(AccessControlProtos.GetUserPermissionsResponse message) { - if (message != null) { - for (AccessControlProtos.UserPermission perm : message - .getUserPermissionList()) { - userPermissions.add(AccessControlUtil.toUserPermission(perm)); + new RpcCallback() { + @Override + public void run(AccessControlProtos.GetUserPermissionsResponse message) { + if (message != null) { + for (AccessControlProtos.UserPermission perm : message + .getUserPermissionList()) { + userPermissions.add(AccessControlUtil.toUserPermission(perm)); + } } } - } - }); + }); } private static int getFreePort() throws IOException { ServerSocket serverSocket = new ServerSocket(0); - int port = serverSocket.getLocalPort(); + int port = serverSocket.getLocalPort(); serverSocket.close(); return port; } - -} \ No newline at end of file +} diff --git a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/HbaseAuthUtilsImplTest.java b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/HbaseAuthUtilsImplTest.java index d14b83e5f5..d93c125550 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/HbaseAuthUtilsImplTest.java +++ b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/HbaseAuthUtilsImplTest.java @@ -18,17 +18,16 @@ */ package org.apache.ranger.authorization.hbase; -import static org.junit.Assert.assertTrue; - import org.junit.Test; -public class HbaseAuthUtilsImplTest { +import static org.junit.Assert.assertTrue; - @Test - public void testIsReadAccess() { - HbaseAuthUtilsImpl authUtils = new HbaseAuthUtilsImpl(); - assertTrue(authUtils.isReadAccess("read")); - assertTrue(authUtils.isWriteAccess("write")); - assertTrue(authUtils.isExecuteAccess("execute")); - } +public class HbaseAuthUtilsImplTest { + @Test + public void testIsReadAccess() { + HbaseAuthUtilsImpl authUtils = new HbaseAuthUtilsImpl(); + assertTrue(authUtils.isReadAccess("read")); + assertTrue(authUtils.isWriteAccess("write")); + assertTrue(authUtils.isExecuteAccess("execute")); + } } diff --git a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAdminClientImpl.java b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAdminClientImpl.java index b7b0442a4e..f2e4bb98d3 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAdminClientImpl.java +++ b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAdminClientImpl.java @@ -17,31 +17,30 @@ package org.apache.ranger.authorization.hbase; +import org.apache.ranger.admin.client.AbstractRangerAdminClient; +import org.apache.ranger.plugin.util.ServicePolicies; +import org.apache.ranger.plugin.util.ServiceTags; + import java.io.File; import java.nio.file.FileSystems; import java.nio.file.Files; import java.util.List; -import org.apache.ranger.admin.client.AbstractRangerAdminClient; -import org.apache.ranger.plugin.util.ServicePolicies; -import org.apache.ranger.plugin.util.ServiceTags; - /** * A test implementation of the RangerAdminClient interface that just reads policies in from a file and returns them */ public class RangerAdminClientImpl extends AbstractRangerAdminClient { - private final static String cacheFilename = "hbase-policies.json"; - private final static String tagFilename = "hbase-policies-tag.json"; + private static final String cacheFilename = "hbase-policies.json"; + private static final String tagFilename = "hbase-policies-tag.json"; public ServicePolicies getServicePoliciesIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception { - String basedir = System.getProperty("basedir"); if (basedir == null) { basedir = new File(".").getCanonicalPath(); } - java.nio.file.Path cachePath = FileSystems.getDefault().getPath(basedir, "/target/test-classes/" + cacheFilename); - byte[] cacheBytes = Files.readAllBytes(cachePath); + java.nio.file.Path cachePath = FileSystems.getDefault().getPath(basedir, "/target/test-classes/" + cacheFilename); + byte[] cacheBytes = Files.readAllBytes(cachePath); return gson.fromJson(new String(cacheBytes), ServicePolicies.class); } @@ -52,8 +51,8 @@ public ServiceTags getServiceTagsIfUpdated(long lastKnownVersion, long lastActiv basedir = new File(".").getCanonicalPath(); } - java.nio.file.Path cachePath = FileSystems.getDefault().getPath(basedir, "/src/test/resources/" + tagFilename); - byte[] cacheBytes = Files.readAllBytes(cachePath); + java.nio.file.Path cachePath = FileSystems.getDefault().getPath(basedir, "/src/test/resources/" + tagFilename); + byte[] cacheBytes = Files.readAllBytes(cachePath); return gson.fromJson(new String(cacheBytes), ServiceTags.class); } @@ -61,6 +60,4 @@ public ServiceTags getServiceTagsIfUpdated(long lastKnownVersion, long lastActiv public List getTagTypes(String tagTypePattern) throws Exception { return null; } - - -} \ No newline at end of file +} diff --git a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessorTest.java b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessorTest.java index f982910ee0..d3c8dcd586 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessorTest.java +++ b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessorTest.java @@ -18,34 +18,33 @@ */ package org.apache.ranger.authorization.hbase; -import static org.junit.Assert.*; +import org.junit.Test; import java.util.Map; import java.util.Set; -import org.junit.Test; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; public class RangerAuthorizationCoprocessorTest { + @Test + public void test_canBeNewed() { + RangerAuthorizationCoprocessor coprocessor = new RangerAuthorizationCoprocessor(); + assertNotNull(coprocessor); + } - @Test - public void test_canBeNewed() { - RangerAuthorizationCoprocessor _coprocessor = new RangerAuthorizationCoprocessor(); - assertNotNull(_coprocessor); - } - - @Test - public void test_getColumnFamilies_happypath() { - - } + @Test + public void test_getColumnFamilies_happypath() { + } - @Test - public void test_getColumnFamilies_firewalling() { - // passing null collection should return back an empty map - RangerAuthorizationCoprocessor _coprocessor = new RangerAuthorizationCoprocessor(); - Map> result = _coprocessor.getColumnFamilies(null); - assertNotNull(result); - assertTrue(result.isEmpty()); - // same for passing in an empty collection -// result = _coprocessor.getColumnFamilies(new HashMap>()); - } + @Test + public void test_getColumnFamilies_firewalling() { + // passing null collection should return back an empty map + RangerAuthorizationCoprocessor coprocessor = new RangerAuthorizationCoprocessor(); + Map> result = coprocessor.getColumnFamilies(null); + assertNotNull(result); + assertTrue(result.isEmpty()); + // same for passing in an empty collection + // result = coprocessor.getColumnFamilies(new HashMap>()); + } } diff --git a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAuthorizationFilterTest.java b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAuthorizationFilterTest.java index fffc7943db..3840b5b4cb 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAuthorizationFilterTest.java +++ b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/RangerAuthorizationFilterTest.java @@ -18,10 +18,10 @@ */ package org.apache.ranger.authorization.hbase; -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.filter.Filter.ReturnCode; +import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; +import org.junit.Test; import java.io.IOException; import java.util.HashMap; @@ -29,100 +29,98 @@ import java.util.Map; import java.util.Set; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.filter.Filter.ReturnCode; -import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; -import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class RangerAuthorizationFilterTest { + @Test + public void testFilterKeyValueCell_happyPath() throws IOException { + // null/empty column collection in cache for a family implies family level access + String[] allowedFamilies = new String[] {"family1", "family2"}; + String[] deniedFamilies = new String[] {"family3", "family4"}; + String[] indeterminateFamilies = new String[] {"family5", "family6"}; + Set familiesAccessAllowed = ImmutableSet.copyOf(allowedFamilies); + Set familiesAccessDenied = ImmutableSet.copyOf(deniedFamilies); + Set familiesAccessIndeterminate = ImmutableSet.copyOf(indeterminateFamilies); - @Test - public void testFilterKeyValueCell_happyPath() throws IOException { - - // null/empty column collection in cache for a family implies family level access - String[] allowedFamilies = new String[] { "family1", "family2" }; - String[] deniedFamilies = new String[] { "family3", "family4" }; - String[] indeterminateFamilies = new String[] { "family5", "family6" }; - Set familiesAccessAllowed = ImmutableSet.copyOf(allowedFamilies); - Set familiesAccessDenied = ImmutableSet.copyOf(deniedFamilies); - Set familiesAccessIndeterminate = ImmutableSet.copyOf(indeterminateFamilies); - - Map> columnsAccessAllowed = new HashMap>(); - String[] family7KnowGoodColumns = new String[] {"family7-column1", "family7-column2"}; - columnsAccessAllowed.put("family7", ImmutableSet.copyOf(family7KnowGoodColumns)); - String[] family8KnowGoodColumns = new String[] {"family8-column1", "family8-column2"}; - columnsAccessAllowed.put("family8", ImmutableSet.copyOf(family8KnowGoodColumns)); + Map> columnsAccessAllowed = new HashMap>(); + String[] family7KnowGoodColumns = new String[] {"family7-column1", "family7-column2"}; + columnsAccessAllowed.put("family7", ImmutableSet.copyOf(family7KnowGoodColumns)); + String[] family8KnowGoodColumns = new String[] {"family8-column1", "family8-column2"}; + columnsAccessAllowed.put("family8", ImmutableSet.copyOf(family8KnowGoodColumns)); - // auth session - AuthorizationSession session = createSessionMock(); - RangerAuthorizationFilter filter = new RangerAuthorizationFilter(session, familiesAccessAllowed, familiesAccessDenied, familiesAccessIndeterminate, columnsAccessAllowed, new HashSet<>()); + // auth session + AuthorizationSession session = createSessionMock(); + RangerAuthorizationFilter filter = new RangerAuthorizationFilter(session, familiesAccessAllowed, familiesAccessDenied, familiesAccessIndeterminate, columnsAccessAllowed, new HashSet<>()); - // evaluate access for various types of cases - Cell aCell = mock(Cell.class); - // families with know denied acess - for (String family : deniedFamilies) { - setFamilyArray(aCell, family.getBytes()); - setQualifierArray(aCell, new byte[0]); - assertEquals(ReturnCode.NEXT_COL, filter.filterKeyValue(aCell)); - } - // family that isn't in allowed and if cell does not have column then it should be denied - setFamilyArray(aCell, "family7".getBytes()); - setQualifierArray(aCell, new byte[0]); - assertEquals(ReturnCode.NEXT_COL, filter.filterKeyValue(aCell)); - // families with known partial access - for (String column : family7KnowGoodColumns ) { - setQualifierArray(aCell, column.getBytes()); - assertEquals(ReturnCode.INCLUDE, filter.filterKeyValue(aCell)); - } - setFamilyArray(aCell, "family8".getBytes()); - for (String column : family8KnowGoodColumns ) { - setQualifierArray(aCell, column.getBytes()); - assertEquals(ReturnCode.INCLUDE, filter.filterKeyValue(aCell)); - } - // try some columns that are not in the cache - for (String column : new String[] { "family8-column3", "family8-column4"}) { - setQualifierArray(aCell, column.getBytes()); - assertEquals(ReturnCode.NEXT_COL, filter.filterKeyValue(aCell)); - } - // families with known allowed access - for these we need to doctor up the session - when(session.isAuthorized()).thenReturn(true); - for (String family : allowedFamilies) { - setFamilyArray(aCell, family.getBytes()); - setQualifierArray(aCell, "some-column".getBytes()); - assertEquals(ReturnCode.INCLUDE, filter.filterKeyValue(aCell)); - } - when(session.isAuthorized()).thenReturn(false); - for (String family : indeterminateFamilies) { - setFamilyArray(aCell, family.getBytes()); - setQualifierArray(aCell, "some-column".getBytes()); - assertEquals(ReturnCode.NEXT_COL, filter.filterKeyValue(aCell)); - } - } + // evaluate access for various types of cases + Cell aCell = mock(Cell.class); + // families with know denied acess + for (String family : deniedFamilies) { + setFamilyArray(aCell, family.getBytes()); + setQualifierArray(aCell, new byte[0]); + assertEquals(ReturnCode.NEXT_COL, filter.filterKeyValue(aCell)); + } + // family that isn't in allowed and if cell does not have column then it should be denied + setFamilyArray(aCell, "family7".getBytes()); + setQualifierArray(aCell, new byte[0]); + assertEquals(ReturnCode.NEXT_COL, filter.filterKeyValue(aCell)); + // families with known partial access + for (String column : family7KnowGoodColumns) { + setQualifierArray(aCell, column.getBytes()); + assertEquals(ReturnCode.INCLUDE, filter.filterKeyValue(aCell)); + } + setFamilyArray(aCell, "family8".getBytes()); + for (String column : family8KnowGoodColumns) { + setQualifierArray(aCell, column.getBytes()); + assertEquals(ReturnCode.INCLUDE, filter.filterKeyValue(aCell)); + } + // try some columns that are not in the cache + for (String column : new String[] {"family8-column3", "family8-column4"}) { + setQualifierArray(aCell, column.getBytes()); + assertEquals(ReturnCode.NEXT_COL, filter.filterKeyValue(aCell)); + } + // families with known allowed access - for these we need to doctor up the session + when(session.isAuthorized()).thenReturn(true); + for (String family : allowedFamilies) { + setFamilyArray(aCell, family.getBytes()); + setQualifierArray(aCell, "some-column".getBytes()); + assertEquals(ReturnCode.INCLUDE, filter.filterKeyValue(aCell)); + } + when(session.isAuthorized()).thenReturn(false); + for (String family : indeterminateFamilies) { + setFamilyArray(aCell, family.getBytes()); + setQualifierArray(aCell, "some-column".getBytes()); + assertEquals(ReturnCode.NEXT_COL, filter.filterKeyValue(aCell)); + } + } - private void setFamilyArray(Cell aCell, byte[] familyArray) { - when(aCell.getFamilyArray()).thenReturn(familyArray); - when(aCell.getFamilyLength()).thenReturn((byte) familyArray.length); - when(aCell.getFamilyOffset()).thenReturn(0); - } + AuthorizationSession createSessionMock() { + AuthorizationSession session = mock(AuthorizationSession.class); + when(session.column(anyString())).thenReturn(session); + when(session.columnFamily(anyString())).thenReturn(session); + when(session.table(anyString())).thenReturn(session); + when(session.buildRequest()).thenReturn(session); + when(session.authorize()).thenReturn(session); + when(session.isAuthorized()).thenReturn(false); // by default the mock fails all auth requests - private void setQualifierArray(Cell aCell, byte[] qualifierArray) { - when(aCell.getQualifierArray()).thenReturn(qualifierArray); - when(aCell.getQualifierLength()).thenReturn(qualifierArray.length); - when(aCell.getQualifierOffset()).thenReturn(0); - } + HbaseAuditHandler auditHandler = mock(HbaseAuditHandler.class); + session.auditHandler = auditHandler; - AuthorizationSession createSessionMock() { - AuthorizationSession session = mock(AuthorizationSession.class); - when(session.column(anyString())).thenReturn(session); - when(session.columnFamily(anyString())).thenReturn(session); - when(session.table(anyString())).thenReturn(session); - when(session.buildRequest()).thenReturn(session); - when(session.authorize()).thenReturn(session); - when(session.isAuthorized()).thenReturn(false); // by default the mock fails all auth requests + return session; + } - HbaseAuditHandler auditHandler = mock(HbaseAuditHandler.class); - session._auditHandler = auditHandler; + private void setFamilyArray(Cell aCell, byte[] familyArray) { + when(aCell.getFamilyArray()).thenReturn(familyArray); + when(aCell.getFamilyLength()).thenReturn((byte) familyArray.length); + when(aCell.getFamilyOffset()).thenReturn(0); + } - return session; - } + private void setQualifierArray(Cell aCell, byte[] qualifierArray) { + when(aCell.getQualifierArray()).thenReturn(qualifierArray); + when(aCell.getQualifierLength()).thenReturn(qualifierArray.length); + when(aCell.getQualifierOffset()).thenReturn(0); + } } diff --git a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/TestPolicyEngine.java b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/TestPolicyEngine.java index 8c120419d1..5c7ef26028 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/TestPolicyEngine.java +++ b/hbase-agent/src/test/java/org/apache/ranger/authorization/hbase/TestPolicyEngine.java @@ -19,14 +19,12 @@ package org.apache.ranger.authorization.hbase; - -import static org.junit.Assert.*; - -import java.io.InputStream; -import java.io.InputStreamReader; -import java.lang.reflect.Type; -import java.util.List; - +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonParseException; import org.apache.ranger.authorization.hadoop.config.RangerPluginConfig; import org.apache.ranger.authorization.hbase.TestPolicyEngine.PolicyEngineTestCase.TestData; import org.apache.ranger.plugin.audit.RangerDefaultAuditHandler; @@ -34,9 +32,9 @@ import org.apache.ranger.plugin.model.RangerServiceDef; import org.apache.ranger.plugin.policyengine.RangerAccessRequest; import org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl; +import org.apache.ranger.plugin.policyengine.RangerAccessResource; import org.apache.ranger.plugin.policyengine.RangerAccessResourceImpl; import org.apache.ranger.plugin.policyengine.RangerAccessResult; -import org.apache.ranger.plugin.policyengine.RangerAccessResource; import org.apache.ranger.plugin.policyengine.RangerAccessResultProcessor; import org.apache.ranger.plugin.policyengine.RangerPluginContext; import org.apache.ranger.plugin.policyengine.RangerPolicyEngine; @@ -46,126 +44,123 @@ import org.apache.ranger.plugin.util.ServicePolicies; import org.junit.AfterClass; import org.junit.BeforeClass; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; -import com.google.gson.JsonElement; -import com.google.gson.JsonParseException; import org.junit.Test; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.lang.reflect.Type; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; public class TestPolicyEngine { - static RangerBasePlugin plugin = null; - static Gson gsonBuilder = null; - - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - plugin = new RangerBasePlugin("hbase", "hbase"); - gsonBuilder = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z") - .setPrettyPrinting() - .registerTypeAdapter(RangerAccessRequest.class, new RangerAccessRequestDeserializer()) - .registerTypeAdapter(RangerAccessResource.class, new RangerResourceDeserializer()) - .create(); - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - } - - - @Test - public void testPolicyEngine_hbase() { - String[] hbaseTestResourceFiles = { "/policyengine/test_policyengine_hbase.json" }; - - runTestsFromResourceFiles(hbaseTestResourceFiles); - } - @Test - public void testPolicyEngine_hbase_ignoreDescendantDeny() { - String[] hbaseTestResourceFiles = { "/policyengine/test_policyengine_hbase_ignoreDenyDescendant.json" }; - - runTestsFromResourceFiles(hbaseTestResourceFiles); - } - - private void runTestsFromResourceFiles(String[] resourceNames) { - for(String resourceName : resourceNames) { - InputStream inStream = this.getClass().getResourceAsStream(resourceName); - InputStreamReader reader = new InputStreamReader(inStream); - - runTests(reader, resourceName); - } - } - - private void runTests(InputStreamReader reader, String testName) { - try { - PolicyEngineTestCase testCase = gsonBuilder.fromJson(reader, PolicyEngineTestCase.class); - - assertTrue("invalid input: " + testName, testCase != null && testCase.serviceDef != null && testCase.policies != null && testCase.tests != null); - - ServicePolicies servicePolicies = new ServicePolicies(); - servicePolicies.setServiceName(testCase.serviceName); - servicePolicies.setServiceDef(testCase.serviceDef); - servicePolicies.setPolicies(testCase.policies); - - RangerPolicyEngineOptions policyEngineOptions = new RangerPolicyEngineOptions(); - RangerPluginContext pluginContext = new RangerPluginContext(new RangerPluginConfig("hbase", null, testName, "cl1", "on-prem", policyEngineOptions)); - RangerPolicyEngine policyEngine = new RangerPolicyEngineImpl(servicePolicies, pluginContext, null); - - RangerAccessResultProcessor auditHandler = new RangerDefaultAuditHandler(pluginContext.getConfig()); - - for(TestData test : testCase.tests) { - RangerAccessResult expected = test.result; - RangerAccessRequest request = test.request; - - RangerAccessResult result = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, auditHandler); - - assertNotNull("result was null! - " + test.name, result); - assertEquals("isAllowed mismatched! - " + test.name, expected.getIsAllowed(), result.getIsAllowed()); - assertEquals("isAudited mismatched! - " + test.name, expected.getIsAudited(), result.getIsAudited()); - assertEquals("policyId mismatched! - " + test.name, expected.getPolicyId(), result.getPolicyId()); - } - - } catch(Throwable excp) { - excp.printStackTrace(); - } - - } - - static class PolicyEngineTestCase { - public String serviceName; - public RangerServiceDef serviceDef; - public List policies; - public List tests; - - class TestData { - public String name; - public RangerAccessRequest request; - public RangerAccessResult result; - } - } - - static class RangerAccessRequestDeserializer implements JsonDeserializer { - @Override - public RangerAccessRequest deserialize(JsonElement jsonObj, Type type, - JsonDeserializationContext context) throws JsonParseException { - RangerAccessRequestImpl ret = gsonBuilder.fromJson(jsonObj, RangerAccessRequestImpl.class); - - ret.setAccessType(ret.getAccessType()); // to force computation of isAccessTypeAny and isAccessTypeDelegatedAdmin - - return ret; - } - } - - static class RangerResourceDeserializer implements JsonDeserializer { - @Override - public RangerAccessResource deserialize(JsonElement jsonObj, Type type, - JsonDeserializationContext context) throws JsonParseException { - RangerAccessResourceImpl resource = gsonBuilder.fromJson(jsonObj, RangerHBaseResource.class); - resource.setValue("table", resource.getValue("table")); - return resource; - } - } + static RangerBasePlugin plugin; + static Gson gsonBuilder; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + plugin = new RangerBasePlugin("hbase", "hbase"); + gsonBuilder = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z") + .setPrettyPrinting() + .registerTypeAdapter(RangerAccessRequest.class, new RangerAccessRequestDeserializer()) + .registerTypeAdapter(RangerAccessResource.class, new RangerResourceDeserializer()) + .create(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + } + + @Test + public void testPolicyEngine_hbase() { + String[] hbaseTestResourceFiles = {"/policyengine/test_policyengine_hbase.json"}; + + runTestsFromResourceFiles(hbaseTestResourceFiles); + } + + @Test + public void testPolicyEngine_hbase_ignoreDescendantDeny() { + String[] hbaseTestResourceFiles = {"/policyengine/test_policyengine_hbase_ignoreDenyDescendant.json"}; + + runTestsFromResourceFiles(hbaseTestResourceFiles); + } + + private void runTestsFromResourceFiles(String[] resourceNames) { + for (String resourceName : resourceNames) { + InputStream inStream = this.getClass().getResourceAsStream(resourceName); + InputStreamReader reader = new InputStreamReader(inStream); + + runTests(reader, resourceName); + } + } + + private void runTests(InputStreamReader reader, String testName) { + try { + PolicyEngineTestCase testCase = gsonBuilder.fromJson(reader, PolicyEngineTestCase.class); + + assertTrue("invalid input: " + testName, testCase != null && testCase.serviceDef != null && testCase.policies != null && testCase.tests != null); + + ServicePolicies servicePolicies = new ServicePolicies(); + servicePolicies.setServiceName(testCase.serviceName); + servicePolicies.setServiceDef(testCase.serviceDef); + servicePolicies.setPolicies(testCase.policies); + + RangerPolicyEngineOptions policyEngineOptions = new RangerPolicyEngineOptions(); + RangerPluginContext pluginContext = new RangerPluginContext(new RangerPluginConfig("hbase", null, testName, "cl1", "on-prem", policyEngineOptions)); + RangerPolicyEngine policyEngine = new RangerPolicyEngineImpl(servicePolicies, pluginContext, null); + + RangerAccessResultProcessor auditHandler = new RangerDefaultAuditHandler(pluginContext.getConfig()); + + for (TestData test : testCase.tests) { + RangerAccessResult expected = test.result; + RangerAccessRequest request = test.request; + + RangerAccessResult result = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, auditHandler); + + assertNotNull("result was null! - " + test.name, result); + assertEquals("isAllowed mismatched! - " + test.name, expected.getIsAllowed(), result.getIsAllowed()); + assertEquals("isAudited mismatched! - " + test.name, expected.getIsAudited(), result.getIsAudited()); + assertEquals("policyId mismatched! - " + test.name, expected.getPolicyId(), result.getPolicyId()); + } + } catch (Throwable excp) { + excp.printStackTrace(); + } + } + + static class PolicyEngineTestCase { + public String serviceName; + public RangerServiceDef serviceDef; + public List policies; + public List tests; + + class TestData { + public String name; + public RangerAccessRequest request; + public RangerAccessResult result; + } + } + + static class RangerAccessRequestDeserializer implements JsonDeserializer { + @Override + public RangerAccessRequest deserialize(JsonElement jsonObj, Type type, + JsonDeserializationContext context) throws JsonParseException { + RangerAccessRequestImpl ret = gsonBuilder.fromJson(jsonObj, RangerAccessRequestImpl.class); + + ret.setAccessType(ret.getAccessType()); // to force computation of isAccessTypeAny and isAccessTypeDelegatedAdmin + + return ret; + } + } + + static class RangerResourceDeserializer implements JsonDeserializer { + @Override + public RangerAccessResource deserialize(JsonElement jsonObj, Type type, + JsonDeserializationContext context) throws JsonParseException { + RangerAccessResourceImpl resource = gsonBuilder.fromJson(jsonObj, RangerHBaseResource.class); + resource.setValue("table", resource.getValue("table")); + return resource; + } + } } - diff --git a/hbase-agent/src/test/java/org/apache/ranger/services/hbase/TestRangerServiceHBase.java b/hbase-agent/src/test/java/org/apache/ranger/services/hbase/TestRangerServiceHBase.java index ac7a146ba0..27b6ee35b1 100644 --- a/hbase-agent/src/test/java/org/apache/ranger/services/hbase/TestRangerServiceHBase.java +++ b/hbase-agent/src/test/java/org/apache/ranger/services/hbase/TestRangerServiceHBase.java @@ -19,10 +19,6 @@ package org.apache.ranger.services.hbase; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import org.apache.ranger.plugin.model.RangerService; import org.apache.ranger.plugin.model.RangerServiceDef; import org.apache.ranger.plugin.service.ResourceLookupContext; @@ -30,110 +26,105 @@ import org.junit.Before; import org.junit.Test; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + public class TestRangerServiceHBase { - static final String sdName = "svcDef-HBase"; - static final String serviceName = "HBaseDef"; - HashMap responseData = null; - Map configs = null; - RangerServiceHBase svcHBase = null; - RangerServiceDef sd = null; - RangerService svc = null; - ResourceLookupContext lookupContext = null; - - - @Before - public void setup() { - configs = new HashMap(); - lookupContext = new ResourceLookupContext(); - - buildHbaseConnectionConfig(); - buildLookupContext(); - - sd = new RangerServiceDef(sdName, "org.apache.ranger.services.hbase.RangerServiceHBase", "TestService", "test servicedef description", null, null, null, null, null, null, null); - svc = new RangerService(sdName, serviceName, "unit test hbase resource lookup and validateConfig", null, configs); - svcHBase = new RangerServiceHBase(); - svcHBase.init(sd, svc); - } - - @Test - public void testValidateConfig() { + static final String sdName = "svcDef-HBase"; + static final String serviceName = "HBaseDef"; + HashMap responseData; + Map configs; + RangerServiceHBase svcHBase; + RangerServiceDef sd; + RangerService svc; + ResourceLookupContext lookupContext; + + @Before + public void setup() { + configs = new HashMap(); + lookupContext = new ResourceLookupContext(); + + buildHbaseConnectionConfig(); + buildLookupContext(); + + sd = new RangerServiceDef(sdName, "org.apache.ranger.services.hbase.RangerServiceHBase", "TestService", "test servicedef description", null, null, null, null, null, null, null); + svc = new RangerService(sdName, serviceName, "unit test hbase resource lookup and validateConfig", null, configs); + svcHBase = new RangerServiceHBase(); + svcHBase.init(sd, svc); + } + + @Test + public void testValidateConfig() { + /* + TODO: does this test require a live HBase environment? + HashMap ret = null; + String errorMessage = null; + + try { + ret = svcHBase.validateConfig(); + }catch (Exception e) { + errorMessage = e.getMessage(); + if ( e instanceof HadoopException) { + errorMessage = "HadoopException"; + } + } + + if ( errorMessage != null) { + assertTrue(errorMessage.contains("HadoopException")); + } else { + assertNotNull(ret); + } + */ + } + + @Test + public void testLookUpResource() { +// TODO: does this test require a live HBase environment? +// List ret = new ArrayList(); +// List mockresult = new ArrayList(){{add("iemployee");add("idepartment");}}; +// String errorMessage = null; +// HBaseClient hbaseClient = new HBaseClient("hbasedev", configs); +// try { +// Mockito.when(hbaseClient.getTableList("iem", null)).thenReturn(mockresult); +// ret = svcHBase.lookupResource(lookupContext); +// }catch (Throwable e) { +// errorMessage = e.getMessage(); +// if ( e instanceof HadoopException) { +// errorMessage = "HadoopException"; +// } +// } +// +// if ( errorMessage != null) { +// assertTrue(errorMessage.contains("HadoopException")); +// } else { +// assertNotNull(ret); +// } + } + + public void buildHbaseConnectionConfig() { + configs.put("username", "hbaseuser"); + configs.put("password", "*******"); + configs.put("hadoop.security.authentication", "simple"); + configs.put("hbase.master.kerberos.principal", "hbase/_HOST@EXAMPLE.COM"); + configs.put("hbase.security.authentication", "simple"); + configs.put("hbase.zookeeper.property.clientPort", "2181"); + configs.put("hbase.zookeeper.quorum", "localhost"); + configs.put("zookeeper.znode.parent", "/hbase-unsecure"); + configs.put("isencrypted", "true"); + } - /* TODO: does this test require a live HBase environment? - * - HashMap ret = null; - String errorMessage = null; - - try { - ret = svcHBase.validateConfig(); - }catch (Exception e) { - errorMessage = e.getMessage(); - if ( e instanceof HadoopException) { - errorMessage = "HadoopException"; - } - } - - if ( errorMessage != null) { - assertTrue(errorMessage.contains("HadoopException")); - } else { - assertNotNull(ret); - } - * - */ - } - - - @Test - public void testLookUpResource() { - /* TODO: does this test require a live HBase environment? - * - List ret = new ArrayList(); - List mockresult = new ArrayList(){{add("iemployee");add("idepartment");}}; - String errorMessage = null; - HBaseClient hbaseClient = new HBaseClient("hbasedev", configs); - try { - Mockito.when(hbaseClient.getTableList("iem", null)).thenReturn(mockresult); - ret = svcHBase.lookupResource(lookupContext); - }catch (Throwable e) { - errorMessage = e.getMessage(); - if ( e instanceof HadoopException) { - errorMessage = "HadoopException"; - } - } - - if ( errorMessage != null) { - assertTrue(errorMessage.contains("HadoopException")); - } else { - assertNotNull(ret); - } - * - */ - } - - public void buildHbaseConnectionConfig() { - configs.put("username", "hbaseuser"); - configs.put("password", "*******"); - configs.put("hadoop.security.authentication", "simple"); - configs.put("hbase.master.kerberos.principal", "hbase/_HOST@EXAMPLE.COM"); - configs.put("hbase.security.authentication", "simple"); - configs.put("hbase.zookeeper.property.clientPort", "2181"); - configs.put("hbase.zookeeper.quorum", "localhost"); - configs.put("zookeeper.znode.parent","/hbase-unsecure"); - configs.put("isencrypted", "true"); - } + public void buildLookupContext() { + Map> resourceMap = new HashMap>(); + resourceMap.put(null, null); + lookupContext.setUserInput("iem"); + lookupContext.setResourceName("table"); + lookupContext.setResources(resourceMap); + } - public void buildLookupContext() { - Map> resourceMap = new HashMap>(); - resourceMap.put(null, null); - lookupContext.setUserInput("iem"); - lookupContext.setResourceName("table"); - lookupContext.setResources(resourceMap); - } - - - @After - public void tearDown() { - sd = null; - svc = null; - } - + @After + public void tearDown() { + sd = null; + svc = null; + } }