Review Board 1.7.22


HCATALOG-245 StorageHandler authorization providers

Review Request #3846 - Created Feb. 11, 2012 and updated

enis
HCATALOG-245
Reviewers
hcatalog
hcatalog-git
As per the design in the parent issue, we will delegate the authorization checks to the storage handler (hdfs is considered as a storage handler as well). This jira will introduce HiveAuthorizationProviders for hbase + hdfs.

 
src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java
Revision efbb79a New Change
[20] 14 lines
[+20]
15
 * See the License for the specific language governing permissions and
15
 * See the License for the specific language governing permissions and
16
 * limitations under the License.
16
 * limitations under the License.
17
 */
17
 */
18
package org.apache.hcatalog.cli.SemanticAnalysis;
18
package org.apache.hcatalog.cli.SemanticAnalysis;
19

    
   
19

   

    
   
20
import java.util.List;
20
import java.util.Map;
21
import java.util.Map;
21

    
   
22

   

    
   
23
import org.apache.hadoop.fs.Path;

    
   
24
import org.apache.hadoop.hive.metastore.api.FieldSchema;

    
   
25
import org.apache.hadoop.hive.metastore.api.Order;

    
   
26
import org.apache.hadoop.hive.ql.metadata.Hive;
22
import org.apache.hadoop.hive.ql.metadata.HiveException;
27
import org.apache.hadoop.hive.ql.metadata.HiveException;

    
   
28
import org.apache.hadoop.hive.ql.metadata.Partition;

    
   
29
import org.apache.hadoop.hive.ql.metadata.Table;
23
import org.apache.hadoop.hive.ql.parse.ASTNode;
30
import org.apache.hadoop.hive.ql.parse.ASTNode;
24
import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;

   
25
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
31
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
26
import org.apache.hadoop.hive.ql.parse.SemanticException;
32
import org.apache.hadoop.hive.ql.parse.SemanticException;

    
   
33
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;

    
   
34
import org.apache.hadoop.hive.ql.plan.DDLWork;

    
   
35
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
27
import org.apache.hcatalog.common.HCatConstants;
36
import org.apache.hcatalog.common.HCatConstants;
28

    
   
37

   
29
public class AddPartitionHook extends AbstractSemanticAnalyzerHook{
38
public class AddPartitionHook extends HCatSemanticAnalyzerBase {
30

    
   
39

   
31
  private String tblName, inDriver, outDriver;
40
  private String tblName, inDriver, outDriver;
32

    
   
41

   
33
  @Override
42
  @Override
34
  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
43
  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
[+20] [20] 17 lines
[+20] public class AddPartitionHook extends AbstractSemanticAnalyzerHook{ [+] public class AddPartitionHook extends HCatSemanticAnalyzerBase {
52
  }
61
  }
53

    
   
62

   
54
//  @Override
63
//  @Override
55
//  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
64
//  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
56
//      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
65
//      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
57
//
66
//    authorizeDDL(context, rootTasks);
58
//    try {
67
//    try {
59
//      Hive db = context.getHive();
68
//      Hive db = context.getHive();
60
//      Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
69
//      Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
61
//      for(Task<? extends Serializable> task : rootTasks){
70
//      for(Task<? extends Serializable> task : rootTasks){
62
//        System.err.println("PArt spec: "+((DDLWork)task.getWork()).getAddPartitionDesc().getPartSpec());
71
//        System.err.println("PArt spec: "+((DDLWork)task.getWork()).getAddPartitionDesc().getPartSpec());
[+20] [20] 12 lines
[+20] public class AddPartitionHook extends AbstractSemanticAnalyzerHook{ public class AddPartitionHook extends HCatSemanticAnalyzerBase {
75
//      throw new SemanticException(he);
84
//      throw new SemanticException(he);
76
//    } catch (InvalidOperationException e) {
85
//    } catch (InvalidOperationException e) {
77
//      throw new SemanticException(e);
86
//      throw new SemanticException(e);
78
//    }
87
//    }
79
//  }
88
//  }

    
   
89
  

    
   
90
  @Override

    
   
91
  protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context,

    
   
92
      Hive hive, DDLWork work) throws HiveException {

    
   
93
    AddPartitionDesc addPartitionDesc = work.getAddPartitionDesc();

    
   
94
    if (addPartitionDesc != null) {

    
   
95
      String dbName = getDbName(hive, addPartitionDesc.getDbName());

    
   
96
      Table table = hive.getTable(dbName, addPartitionDesc.getTableName());

    
   
97
      Path partPath = null;

    
   
98
      if (addPartitionDesc.getLocation() != null) {

    
   
99
        partPath = new Path(table.getPath(), addPartitionDesc.getLocation());
80
}
100
      }

    
   
101
      

    
   
102
      Partition part = newPartition(

    
   
103
          table, addPartitionDesc.getPartSpec(), partPath,

    
   
104
          addPartitionDesc.getPartParams(),

    
   
105
          addPartitionDesc.getInputFormat(),

    
   
106
          addPartitionDesc.getOutputFormat(),

    
   
107
          addPartitionDesc.getNumBuckets(),

    
   
108
          addPartitionDesc.getCols(),

    
   
109
          addPartitionDesc.getSerializationLib(),

    
   
110
          addPartitionDesc.getSerdeParams(),

    
   
111
          addPartitionDesc.getBucketCols(),

    
   
112
          addPartitionDesc.getSortCols());

    
   
113
      

    
   
114
      authorize(part, Privilege.CREATE);

    
   
115
    }

    
   
116
  }

    
   
117
  

    
   
118
  protected Partition newPartition(Table tbl, Map<String, String> partSpec,

    
   
119
      Path location, Map<String, String> partParams, String inputFormat, String outputFormat,

    
   
120
      int numBuckets, List<FieldSchema> cols,

    
   
121
      String serializationLib, Map<String, String> serdeParams,

    
   
122
      List<String> bucketCols, List<Order> sortCols) throws HiveException {
81

    
   
123

   
82

    
   
124
    try {
83

    
   
125
      Partition tmpPart = new Partition(tbl, partSpec, location);

    
   
126
      org.apache.hadoop.hive.metastore.api.Partition inPart

    
   
127
        = tmpPart.getTPartition();

    
   
128
      if (partParams != null) {

    
   
129
        inPart.setParameters(partParams);

    
   
130
      }

    
   
131
      if (inputFormat != null) {

    
   
132
        inPart.getSd().setInputFormat(inputFormat);

    
   
133
      }

    
   
134
      if (outputFormat != null) {

    
   
135
        inPart.getSd().setOutputFormat(outputFormat);

    
   
136
      }

    
   
137
      if (numBuckets != -1) {

    
   
138
        inPart.getSd().setNumBuckets(numBuckets);

    
   
139
      }

    
   
140
      if (cols != null) {

    
   
141
        inPart.getSd().setCols(cols);

    
   
142
      }

    
   
143
      if (serializationLib != null) {

    
   
144
          inPart.getSd().getSerdeInfo().setSerializationLib(serializationLib);

    
   
145
      }

    
   
146
      if (serdeParams != null) {

    
   
147
        inPart.getSd().getSerdeInfo().setParameters(serdeParams);

    
   
148
      }

    
   
149
      if (bucketCols != null) {

    
   
150
        inPart.getSd().setBucketCols(bucketCols);

    
   
151
      }

    
   
152
      if (sortCols != null) {

    
   
153
        inPart.getSd().setSortCols(sortCols);

    
   
154
      }

    
   
155
      

    
   
156
      return new Partition(tbl, inPart);

    
   
157
    } catch (Exception e) {

    
   
158
      throw new HiveException(e);

    
   
159
    }

    
   
160
  }

    
   
161
}
src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java
Revision 109de31 New Change
 
src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
Revision 098a06b New Change
 
src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
Revision 8387d8e New Change
 
src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java
New File
 
src/java/org/apache/hcatalog/common/AuthUtils.java
Revision 7cba8dc New Change
 
src/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java
New File
 
src/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java
New File
 
src/test/org/apache/hcatalog/HcatTestUtils.java
New File
 
src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java
Revision 64bde1b New Change
 
src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java.broken
New File
 
src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java
New File
 
  1. src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java: Loading...
  2. src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java: Loading...
  3. src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java: Loading...
  4. src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java: Loading...
  5. src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java: Loading...
  6. src/java/org/apache/hcatalog/common/AuthUtils.java: Loading...
  7. src/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java: Loading...
  8. src/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java: Loading...
  9. src/test/org/apache/hcatalog/HcatTestUtils.java: Loading...
  10. src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java: Loading...
  11. src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java.broken: Loading...
  12. src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java: Loading...