spark-user mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "guxiaobo1982" <guxiaobo1...@qq.com>
Subject Re: Can't access remote Hive table from spark
Date Sun, 25 Jan 2015 08:26:47 GMT
Here is the full log message.
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PART_COL_STATS.MAX_COL_LEN" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics.maxColLen] -> Column(s) [PART_COL_STATS.MAX_COL_LEN] using mapping of type "org.datanucleus.store.rdbms.mapping.java.LongMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PART_COL_STATS.NUM_DISTINCTS" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics.numDVs] -> Column(s) [PART_COL_STATS.NUM_DISTINCTS] using mapping of type "org.datanucleus.store.rdbms.mapping.java.LongMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PART_COL_STATS.NUM_FALSES" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics.numFalses] -> Column(s) [PART_COL_STATS.NUM_FALSES] using mapping of type "org.datanucleus.store.rdbms.mapping.java.LongMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PART_COL_STATS.NUM_NULLS" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics.numNulls] -> Column(s) [PART_COL_STATS.NUM_NULLS] using mapping of type "org.datanucleus.store.rdbms.mapping.java.LongMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PART_COL_STATS.NUM_TRUES" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics.numTrues] -> Column(s) [PART_COL_STATS.NUM_TRUES] using mapping of type "org.datanucleus.store.rdbms.mapping.java.LongMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PART_COL_STATS.PART_ID" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics.partition] -> Column(s) [PART_COL_STATS.PART_ID] using mapping of type "org.datanucleus.store.rdbms.mapping.java.PersistableMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PART_COL_STATS.PARTITION_NAME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics.partitionName] -> Column(s) [PART_COL_STATS.PARTITION_NAME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PART_COL_STATS."TABLE_NAME"" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics.tableName] -> Column(s) [PART_COL_STATS."TABLE_NAME"] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Table/View PART_COL_STATS has been initialised
 
[main] DEBUG DataNucleus.Datastore.Schema - Table PARTITIONS will manage the persistence of the fields for class org.apache.hadoop.hive.metastore.model.MPartition (inheritance strategy="new-table") 
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITIONS.CREATE_TIME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartition.createTime] -> Column(s) [PARTITIONS.CREATE_TIME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITIONS.LAST_ACCESS_TIME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartition.lastAccessTime] -> Column(s) [PARTITIONS.LAST_ACCESS_TIME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
 
[main] DEBUG DataNucleus.Persistence - Managing Persistence of Field : org.apache.hadoop.hive.metastore.model.MPartition.parameters [Table : PARTITION_PARAMS]
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartition.parameters] -> Column(s) [[none]] using mapping of type "org.datanucleus.store.rdbms.mapping.java.MapMapping" ()
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITIONS.PART_NAME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartition.partitionName] -> Column(s) [PARTITIONS.PART_NAME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITIONS.SD_ID" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartition.sd] -> Column(s) [PARTITIONS.SD_ID] using mapping of type "org.datanucleus.store.rdbms.mapping.java.PersistableMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITIONS.TBL_ID" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartition.table] -> Column(s) [PARTITIONS.TBL_ID] using mapping of type "org.datanucleus.store.rdbms.mapping.java.PersistableMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Persistence - Managing Persistence of Field : org.apache.hadoop.hive.metastore.model.MPartition.values [Table : PARTITION_KEY_VALS]
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MPartition.values] -> Column(s) [[none]] using mapping of type "org.datanucleus.store.rdbms.mapping.java.CollectionMapping" ()
 
[main] DEBUG DataNucleus.Datastore.Schema - Table/View PARTITIONS has been initialised
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITION_PARAMS.PART_ID" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore - Field [org.apache.hadoop.hive.metastore.model.MPartition.parameters] -> Column(s) [PARTITION_PARAMS.PART_ID] using mapping of type "org.datanucleus.store.rdbms.mapping.java.PersistableMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITION_PARAMS.PARAM_KEY" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore - Field [org.apache.hadoop.hive.metastore.model.MPartition.parameters] -> Column(s) [PARTITION_PARAMS.PARAM_KEY] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITION_PARAMS.PARAM_VALUE" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore - Field [org.apache.hadoop.hive.metastore.model.MPartition.parameters] -> Column(s) [PARTITION_PARAMS.PARAM_VALUE] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Table/View PARTITION_PARAMS has been initialised
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITION_KEY_VALS.PART_ID" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore - Field [org.apache.hadoop.hive.metastore.model.MPartition.values] -> Column(s) [PARTITION_KEY_VALS.PART_ID] using mapping of type "org.datanucleus.store.rdbms.mapping.java.PersistableMapping" (org.datanucleus.store.rdbms.mapping.datastore.BigIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITION_KEY_VALS.PART_KEY_VAL" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore - Field [org.apache.hadoop.hive.metastore.model.MPartition.values] -> Column(s) [PARTITION_KEY_VALS.PART_KEY_VAL] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "PARTITION_KEY_VALS.INTEGER_IDX" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore - Field [org.apache.hadoop.hive.metastore.model.MPartition.values] -> Column(s) [PARTITION_KEY_VALS.INTEGER_IDX] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Table/View PARTITION_KEY_VALS has been initialised
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@6c7c7498" opened with isolation level "serializable" and auto-commit=false
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction started with connection "com.jolbox.bonecp.ConnectionHandle@6c7c7498" with isolation "serializable"
 
[main] DEBUG DataNucleus.Datastore.Schema - Check of existence of PART_COL_STATS returned table type of TABLE
 
[main] DEBUG DataNucleus.Datastore.Schema - Loading column info for table(s) "TBLS, PARTITION_KEY_VALS, SKEWED_STRING_LIST, SERDES, PARTITION_KEYS, PART_COL_STATS, TAB_COL_STATS, BUCKETING_COLS, CDS, PARTITIONS, SERDE_PARAMS, SKEWED_VALUES, COLUMNS_V2, DBS, SD_PARAMS, SKEWED_COL_VALUE_LOC_MAP, SDS, SKEWED_COL_NAMES, SORT_COLS, TABLE_PARAMS, DATABASE_PARAMS, PARTITION_PARAMS, SKEWED_STRING_LIST_VALUES" in Catalog "", Schema ""
 
[main] DEBUG DataNucleus.Datastore.Schema - Column info loaded for Catalog "", Schema "", 23 tables, time = 34 ms
 
[main] DEBUG DataNucleus.Datastore.Schema - Column info retrieved for table "PART_COL_STATS" : 20 columns found
 
[main] DEBUG DataNucleus.Datastore.Schema - Check of existence of PARTITIONS returned table type of TABLE
 
[main] DEBUG DataNucleus.Datastore.Schema - Check of existence of PARTITION_PARAMS returned table type of TABLE
 
[main] DEBUG DataNucleus.Datastore.Schema - Check of existence of PARTITION_KEY_VALS returned table type of TABLE
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 3 index(es) for table PART_COL_STATS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 1 foreign key(s) for table PART_COL_STATS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 1 unique key(s) for table PART_COL_STATS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 6 index(es) for table PARTITIONS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 2 foreign key(s) for table PARTITIONS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 2 unique key(s) for table PARTITIONS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 3 index(es) for table PARTITION_PARAMS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 1 foreign key(s) for table PARTITION_PARAMS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 1 unique key(s) for table PARTITION_PARAMS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 3 index(es) for table PARTITION_KEY_VALS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 1 foreign key(s) for table PARTITION_KEY_VALS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 1 unique key(s) for table PARTITION_KEY_VALS
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction committing with connection "com.jolbox.bonecp.ConnectionHandle@6c7c7498"
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction closing with connection "com.jolbox.bonecp.ConnectionHandle@6c7c7498"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@6c7c7498" non enlisted to a transaction is being committed.
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@6c7c7498" closed
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time for datastore = 90 ms
 
[main] DEBUG DataNucleus.Query - SELECT FROM org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics WHERE dbName == '' Query compiled to datastore query "SELECT 'org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics' AS NUCLEUS_TYPE,A0.AVG_COL_LEN,A0."COLUMN_NAME",A0.COLUMN_TYPE,A0.DB_NAME,A0.BIG_DECIMAL_HIGH_VALUE,A0.BIG_DECIMAL_LOW_VALUE,A0.DOUBLE_HIGH_VALUE,A0.DOUBLE_LOW_VALUE,A0.LAST_ANALYZED,A0.LONG_HIGH_VALUE,A0.LONG_LOW_VALUE,A0.MAX_COL_LEN,A0.NUM_DISTINCTS,A0.NUM_FALSES,A0.NUM_NULLS,A0.NUM_TRUES,A0.PARTITION_NAME,A0."TABLE_NAME",A0.CS_ID FROM PART_COL_STATS A0 WHERE A0.DB_NAME = ''"
 
[main] DEBUG DataNucleus.Connection - Connection found in the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@3eac8cfc [conn=com.jolbox.bonecp.ConnectionHandle@15c2791c, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Executing "SELECT FROM org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics WHERE dbName == ''" ...
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@484e20a1"
 
[main] DEBUG DataNucleus.Datastore.Native - SELECT 'org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics' AS NUCLEUS_TYPE,A0.AVG_COL_LEN,A0."COLUMN_NAME",A0.COLUMN_TYPE,A0.DB_NAME,A0.BIG_DECIMAL_HIGH_VALUE,A0.BIG_DECIMAL_LOW_VALUE,A0.DOUBLE_HIGH_VALUE,A0.DOUBLE_LOW_VALUE,A0.LAST_ANALYZED,A0.LONG_HIGH_VALUE,A0.LONG_LOW_VALUE,A0.MAX_COL_LEN,A0.NUM_DISTINCTS,A0.NUM_FALSES,A0.NUM_NULLS,A0.NUM_TRUES,A0.PARTITION_NAME,A0."TABLE_NAME",A0.CS_ID FROM PART_COL_STATS A0 WHERE A0.DB_NAME = ''
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 0 ms
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Execution Time = 11 ms
 
[main] DEBUG DataNucleus.Query - SQL Query : "select "DB_ID" from "DBS""
 
[main] DEBUG DataNucleus.Connection - Connection found in the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@3eac8cfc [conn=com.jolbox.bonecp.ConnectionHandle@15c2791c, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@7f1c839d"
 
[main] DEBUG DataNucleus.Datastore.Native - select "DB_ID" from "DBS"
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 0 ms
 
[main] DEBUG DataNucleus.Transaction - Transaction committing for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9
 
[main] INFO DataNucleus.Query - Reading in results for query "org.datanucleus.store.rdbms.query.SQLQuery@0" since the connection used is closing
 
[main] DEBUG DataNucleus.Persistence - ExecutionContext.internalFlush() process started using ordered flush - 0 enlisted objects
 
[main] DEBUG DataNucleus.Persistence - ExecutionContext.internalFlush() process finished
 
[main] DEBUG DataNucleus.Persistence - Performing check of objects for "persistence-by-reachability" (commit) ...
 
[main] DEBUG DataNucleus.Persistence - Completed check of objects for "persistence-by-reachability" (commit).
 
[main] DEBUG DataNucleus.Transaction - Committing [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@129d286d]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@129d286d is committing for transaction Xid= with onePhase=true
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@129d286d committed connection for transaction Xid= with onePhase=true
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@15c2791c" closed
 
[main] DEBUG DataNucleus.Connection - Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@3eac8cfc [conn=com.jolbox.bonecp.ConnectionHandle@15c2791c, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Transaction - Transaction committed in 2 ms
 
[main] INFO org.apache.hadoop.hive.metastore.ObjectStore - Initialized ObjectStore
 
[sparkDriver-akka.actor.default-dispatcher-3] DEBUG org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - [actor] received message RetrieveSparkProps from Actor[akka.tcp://driverPropsFetcher@lix1.bh.com:56057/temp/$a]
 
[sparkDriver-akka.actor.default-dispatcher-3] DEBUG org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - [actor] handled message (0.059 ms) RetrieveSparkProps from Actor[akka.tcp://driverPropsFetcher@lix1.bh.com:56057/temp/$a]
 
[main] DEBUG DataNucleus.Transaction - Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Transaction - Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9 (optimistic=false)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 1, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6344)
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT FROM org.apache.hadoop.hive.metastore.model.MVersionTable"
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time = 0 ms
 
[main] DEBUG DataNucleus.Query - QueryCompilation:
 
  [symbols: this type=org.apache.hadoop.hive.metastore.model.MVersionTable]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT FROM org.apache.hadoop.hive.metastore.model.MVersionTable" for datastore
 
[main] DEBUG DataNucleus.Persistence - Managing Persistence of Class : org.apache.hadoop.hive.metastore.model.MVersionTable [Table : VERSION, InheritanceStrategy : new-table]
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "VERSION.VER_ID" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Table VERSION will manage the persistence of the fields for class org.apache.hadoop.hive.metastore.model.MVersionTable (inheritance strategy="new-table") 
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "VERSION.SCHEMA_VERSION" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MVersionTable.schemaVersion] -> Column(s) [VERSION.SCHEMA_VERSION] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "VERSION.VERSION_COMMENT" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MVersionTable.versionComment] -> Column(s) [VERSION.VERSION_COMMENT] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Table/View VERSION has been initialised
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@1def2e93" opened with isolation level "serializable" and auto-commit=false
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction started with connection "com.jolbox.bonecp.ConnectionHandle@1def2e93" with isolation "serializable"
 
[main] DEBUG DataNucleus.Datastore.Schema - Check of existence of VERSION returned table type of TABLE
 
[main] DEBUG DataNucleus.Datastore.Schema - Loading column info for table(s) "TBLS, PARTITION_KEY_VALS, SKEWED_STRING_LIST, SERDES, PARTITION_KEYS, PART_COL_STATS, TAB_COL_STATS, BUCKETING_COLS, CDS, PARTITIONS, SERDE_PARAMS, SKEWED_VALUES, COLUMNS_V2, DBS, SD_PARAMS, VERSION, SKEWED_COL_VALUE_LOC_MAP, SDS, SKEWED_COL_NAMES, SORT_COLS, TABLE_PARAMS, DATABASE_PARAMS, PARTITION_PARAMS, SKEWED_STRING_LIST_VALUES" in Catalog "", Schema ""
 
[main] DEBUG DataNucleus.Datastore.Schema - Column info loaded for Catalog "", Schema "", 24 tables, time = 30 ms
 
[main] DEBUG DataNucleus.Datastore.Schema - Column info retrieved for table "VERSION" : 3 columns found
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 1 index(es) for table VERSION
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 0 foreign key(s) for table VERSION
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 1 unique key(s) for table VERSION
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction committing with connection "com.jolbox.bonecp.ConnectionHandle@1def2e93"
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction closing with connection "com.jolbox.bonecp.ConnectionHandle@1def2e93"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@1def2e93" non enlisted to a transaction is being committed.
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@1def2e93" closed
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time for datastore = 37 ms
 
[main] DEBUG DataNucleus.Query - SELECT FROM org.apache.hadoop.hive.metastore.model.MVersionTable Query compiled to datastore query "SELECT 'org.apache.hadoop.hive.metastore.model.MVersionTable' AS NUCLEUS_TYPE,A0.SCHEMA_VERSION,A0.VERSION_COMMENT,A0.VER_ID FROM VERSION A0"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@3bc4a464" opened with isolation level "read-committed" and auto-commit=false
 
[main] DEBUG DataNucleus.Transaction - Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@6b04bfe3, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@6b04bfe3 is starting for transaction Xid= with flags 0
 
[main] DEBUG DataNucleus.Connection - Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@78c7c7e7 [conn=com.jolbox.bonecp.ConnectionHandle@3bc4a464, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Executing "SELECT FROM org.apache.hadoop.hive.metastore.model.MVersionTable" ...
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@55173c5b"
 
[main] DEBUG DataNucleus.Datastore.Native - SELECT 'org.apache.hadoop.hive.metastore.model.MVersionTable' AS NUCLEUS_TYPE,A0.SCHEMA_VERSION,A0.VERSION_COMMENT,A0.VER_ID FROM VERSION A0
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 0 ms
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Execution Time = 6 ms
 
[main] DEBUG DataNucleus.Persistence - Retrieved object with OID "1[OID]org.apache.hadoop.hive.metastore.model.MVersionTable"
 
[main] DEBUG DataNucleus.Cache - Object with id "1[OID]org.apache.hadoop.hive.metastore.model.MVersionTable" not found in Level 1 cache [cache size = 0]
 
[main] DEBUG DataNucleus.MetaData - Listener found initialisation for persistable class org.apache.hadoop.hive.metastore.model.MVersionTable
 
[main] DEBUG DataNucleus.Cache - Object "org.apache.hadoop.hive.metastore.model.MVersionTable@3934c382" (id="1[OID]org.apache.hadoop.hive.metastore.model.MVersionTable") added to Level 1 cache (loadedFlags="[NN]")
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MVersionTable@3934c382" (id="1[OID]org.apache.hadoop.hive.metastore.model.MVersionTable") has a lifecycle change : "HOLLOW"->"P_CLEAN"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MVersionTable@3934c382" (id="1[OID]org.apache.hadoop.hive.metastore.model.MVersionTable") enlisted in transactional cache
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Commit transaction: count = 0, isactive true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6358)
 
[main] DEBUG DataNucleus.Transaction - Transaction committing for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9
 
[main] DEBUG DataNucleus.Persistence - ExecutionContext.internalFlush() process started using ordered flush - 0 enlisted objects
 
[main] DEBUG DataNucleus.Persistence - ExecutionContext.internalFlush() process finished
 
[main] DEBUG DataNucleus.Persistence - Performing check of objects for "persistence-by-reachability" (commit) ...
 
[main] DEBUG DataNucleus.Persistence - Completed check of objects for "persistence-by-reachability" (commit).
 
[main] DEBUG DataNucleus.Transaction - Committing [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@6b04bfe3]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@6b04bfe3 is committing for transaction Xid= with onePhase=true
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@6b04bfe3 committed connection for transaction Xid= with onePhase=true
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@3bc4a464" closed
 
[main] DEBUG DataNucleus.Connection - Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@78c7c7e7 [conn=com.jolbox.bonecp.ConnectionHandle@3bc4a464, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Persistence - Detaching object from persistence : "org.apache.hadoop.hive.metastore.model.MVersionTable@3934c382" (depth=0)
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MVersionTable@3934c382" (id="1[OID]org.apache.hadoop.hive.metastore.model.MVersionTable") has a lifecycle change : "P_CLEAN"->"DETACHED_CLEAN"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MVersionTable@3934c382" (id="1[OID]org.apache.hadoop.hive.metastore.model.MVersionTable") being evicted from transactional cache
 
[main] DEBUG DataNucleus.Persistence - Disconnecting org.apache.hadoop.hive.metastore.model.MVersionTable@3934c382 from StateManager[pc=org.apache.hadoop.hive.metastore.model.MVersionTable@3934c382, lifecycle=DETACHED_CLEAN]
 
[main] DEBUG DataNucleus.Cache - Object with id="1[OID]org.apache.hadoop.hive.metastore.model.MVersionTable" being removed from Level 1 cache [current cache size = 1]
 
[main] DEBUG DataNucleus.Transaction - Transaction committed in 4 ms
 
[main] DEBUG DataNucleus.Transaction - Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Transaction - Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9 (optimistic=false)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 1, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getDatabase(ObjectStore.java:497)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 2, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMDatabase(ObjectStore.java:473)
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MDatabase WHERE name == dbname PARAMETERS java.lang.String dbname"
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time = 0 ms
 
[main] DEBUG DataNucleus.Query - QueryCompilation:
 
  [filter:DyadicExpression{PrimaryExpression{name}  =  ParameterExpression{dbname}}]
 
  [symbols: dbname type=java.lang.String, this type=org.apache.hadoop.hive.metastore.model.MDatabase]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MDatabase WHERE name == dbname PARAMETERS java.lang.String dbname" for datastore
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time for datastore = 1 ms
 
[main] DEBUG DataNucleus.Query - SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MDatabase WHERE name == dbname PARAMETERS java.lang.String dbname Query compiled to datastore query "SELECT 'org.apache.hadoop.hive.metastore.model.MDatabase' AS NUCLEUS_TYPE,A0."DESC",A0.DB_LOCATION_URI,A0."NAME",A0.OWNER_NAME,A0.OWNER_TYPE,A0.DB_ID FROM DBS A0 WHERE A0."NAME" = ?"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@1f7f2b12" opened with isolation level "read-committed" and auto-commit=false
 
[main] DEBUG DataNucleus.Transaction - Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@55e4bae5, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@55e4bae5 is starting for transaction Xid= with flags 0
 
[main] DEBUG DataNucleus.Connection - Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@45662ce7 [conn=com.jolbox.bonecp.ConnectionHandle@1f7f2b12, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Executing "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MDatabase WHERE name == dbname PARAMETERS java.lang.String dbname" ...
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@2f9f728f"
 
[main] DEBUG DataNucleus.Datastore.Native - SELECT 'org.apache.hadoop.hive.metastore.model.MDatabase' AS NUCLEUS_TYPE,A0."DESC",A0.DB_LOCATION_URI,A0."NAME",A0.OWNER_NAME,A0.OWNER_TYPE,A0.DB_ID FROM DBS A0 WHERE A0."NAME" = <'default'>
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 1 ms
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Execution Time = 8 ms
 
[main] DEBUG DataNucleus.Persistence - Retrieved object with OID "1[OID]org.apache.hadoop.hive.metastore.model.MDatabase"
 
[main] DEBUG DataNucleus.Cache - Object with id "1[OID]org.apache.hadoop.hive.metastore.model.MDatabase" not found in Level 1 cache [cache size = 0]
 
[main] DEBUG DataNucleus.MetaData - Listener found initialisation for persistable class org.apache.hadoop.hive.metastore.model.MDatabase
 
[main] DEBUG DataNucleus.Cache - Object "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" (id="1[OID]org.apache.hadoop.hive.metastore.model.MDatabase") added to Level 1 cache (loadedFlags="[NNNNNN]")
 
[main] DEBUG DataNucleus.Persistence - Object "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" field "parameters" is replaced by a SCO wrapper of type "org.datanucleus.store.types.backed.Map" [cache-values=true, lazy-loading=true, queued-operations=false, allow-nulls=true]
 
[main] DEBUG DataNucleus.Persistence - Object "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" field "parameters" loading contents to SCO wrapper from the datastore
 
[main] DEBUG DataNucleus.Connection - Connection found in the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@45662ce7 [conn=com.jolbox.bonecp.ConnectionHandle@1f7f2b12, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@147eecdc"
 
[main] DEBUG DataNucleus.Datastore.Native - SELECT A0.PARAM_KEY,A0.PARAM_VALUE FROM DATABASE_PARAMS A0 WHERE A0.DB_ID = <1> AND A0.PARAM_KEY IS NOT NULL
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 0 ms
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "org.datanucleus.store.rdbms.ParamLoggingPreparedStatement@564bbf76"
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" (id="1[OID]org.apache.hadoop.hive.metastore.model.MDatabase") has a lifecycle change : "HOLLOW"->"P_CLEAN"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" (id="1[OID]org.apache.hadoop.hive.metastore.model.MDatabase") enlisted in transactional cache
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Commit transaction: count = 1, isactive true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMDatabase(ObjectStore.java:480)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Commit transaction: count = 0, isactive true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getDatabase(ObjectStore.java:499)
 
[main] DEBUG DataNucleus.Transaction - Transaction committing for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9
 
[main] DEBUG DataNucleus.Persistence - ExecutionContext.internalFlush() process started using ordered flush - 0 enlisted objects
 
[main] DEBUG DataNucleus.Persistence - ExecutionContext.internalFlush() process finished
 
[main] DEBUG DataNucleus.Persistence - Performing check of objects for "persistence-by-reachability" (commit) ...
 
[main] DEBUG DataNucleus.Persistence - Completed check of objects for "persistence-by-reachability" (commit).
 
[main] DEBUG DataNucleus.Transaction - Committing [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@55e4bae5]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@55e4bae5 is committing for transaction Xid= with onePhase=true
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@55e4bae5 committed connection for transaction Xid= with onePhase=true
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@1f7f2b12" closed
 
[main] DEBUG DataNucleus.Connection - Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@45662ce7 [conn=com.jolbox.bonecp.ConnectionHandle@1f7f2b12, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Persistence - Detaching object from persistence : "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" (depth=0)
 
[main] DEBUG DataNucleus.Persistence - Object "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" (id="1[OID]org.apache.hadoop.hive.metastore.model.MDatabase") is having the SCO wrapper in field "parameters" replaced by the unwrapped value
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" (id="1[OID]org.apache.hadoop.hive.metastore.model.MDatabase") has a lifecycle change : "P_CLEAN"->"DETACHED_CLEAN"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63" (id="1[OID]org.apache.hadoop.hive.metastore.model.MDatabase") being evicted from transactional cache
 
[main] DEBUG DataNucleus.Persistence - Disconnecting org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63 from StateManager[pc=org.apache.hadoop.hive.metastore.model.MDatabase@324d7c63, lifecycle=DETACHED_CLEAN]
 
[main] DEBUG DataNucleus.Cache - Object with id="1[OID]org.apache.hadoop.hive.metastore.model.MDatabase" being removed from Level 1 cache [current cache size = 1]
 
[main] DEBUG DataNucleus.Transaction - Transaction committed in 1 ms
 
[main] DEBUG DataNucleus.Transaction - Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Transaction - Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9 (optimistic=false)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 1, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3017)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 2, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMRole(ObjectStore.java:3296)
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1"
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time = 0 ms
 
[main] DEBUG DataNucleus.Query - QueryCompilation:
 
  [filter:DyadicExpression{PrimaryExpression{roleName}  =  ParameterExpression{t1}}]
 
  [symbols: t1 type=java.lang.String, this type=org.apache.hadoop.hive.metastore.model.MRole]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1" for datastore
 
[sparkDriver-akka.actor.default-dispatcher-3] DEBUG org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - [actor] received message Disassociated [akka.tcp://sparkDriver@mac.bh.com:55383] <- [akka.tcp://driverPropsFetcher@lix1.bh.com:56057] from Actor[akka://sparkDriver/deadLetters]
 
[main] DEBUG DataNucleus.Persistence - Managing Persistence of Class : org.apache.hadoop.hive.metastore.model.MRole [Table : ROLES, InheritanceStrategy : new-table]
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "ROLES.ROLE_ID" added to internal representation of table.
 
[sparkDriver-akka.actor.default-dispatcher-3] DEBUG org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - [actor] handled message (0.342 ms) Disassociated [akka.tcp://sparkDriver@mac.bh.com:55383] <- [akka.tcp://driverPropsFetcher@lix1.bh.com:56057] from Actor[akka://sparkDriver/deadLetters]
 
[main] DEBUG DataNucleus.Datastore.Schema - Table ROLES will manage the persistence of the fields for class org.apache.hadoop.hive.metastore.model.MRole (inheritance strategy="new-table") 
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "ROLES.CREATE_TIME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MRole.createTime] -> Column(s) [ROLES.CREATE_TIME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "ROLES.OWNER_NAME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MRole.ownerName] -> Column(s) [ROLES.OWNER_NAME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "ROLES.ROLE_NAME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MRole.roleName] -> Column(s) [ROLES.ROLE_NAME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Table/View ROLES has been initialised
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@88b48" opened with isolation level "serializable" and auto-commit=false
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction started with connection "com.jolbox.bonecp.ConnectionHandle@88b48" with isolation "serializable"
 
[main] DEBUG DataNucleus.Datastore.Schema - Check of existence of ROLES returned table type of TABLE
 
[main] DEBUG DataNucleus.Datastore.Schema - Loading column info for table(s) "TBLS, PARTITION_KEY_VALS, SKEWED_STRING_LIST, ROLES, SERDES, PARTITION_KEYS, PART_COL_STATS, TAB_COL_STATS, CDS, BUCKETING_COLS, PARTITIONS, SERDE_PARAMS, SKEWED_VALUES, COLUMNS_V2, DBS, SD_PARAMS, VERSION, SKEWED_COL_VALUE_LOC_MAP, SDS, SKEWED_COL_NAMES, SORT_COLS, TABLE_PARAMS, DATABASE_PARAMS, PARTITION_PARAMS, SKEWED_STRING_LIST_VALUES" in Catalog "", Schema ""
 
[main] DEBUG DataNucleus.Datastore.Schema - Column info loaded for Catalog "", Schema "", 25 tables, time = 32 ms
 
[main] DEBUG DataNucleus.Datastore.Schema - Column info retrieved for table "ROLES" : 4 columns found
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 2 index(es) for table ROLES
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 0 foreign key(s) for table ROLES
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 2 unique key(s) for table ROLES
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction committing with connection "com.jolbox.bonecp.ConnectionHandle@88b48"
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction closing with connection "com.jolbox.bonecp.ConnectionHandle@88b48"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@88b48" non enlisted to a transaction is being committed.
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@88b48" closed
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time for datastore = 42 ms
 
[main] DEBUG DataNucleus.Query - SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1 Query compiled to datastore query "SELECT 'org.apache.hadoop.hive.metastore.model.MRole' AS NUCLEUS_TYPE,A0.CREATE_TIME,A0.OWNER_NAME,A0.ROLE_NAME,A0.ROLE_ID FROM ROLES A0 WHERE A0.ROLE_NAME = ?"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@2cdfdba6" opened with isolation level "read-committed" and auto-commit=false
 
[main] DEBUG DataNucleus.Transaction - Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1ccd864e, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1ccd864e is starting for transaction Xid= with flags 0
 
[main] DEBUG DataNucleus.Connection - Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@56e70117 [conn=com.jolbox.bonecp.ConnectionHandle@2cdfdba6, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Executing "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1" ...
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@9632b8e"
 
[main] DEBUG DataNucleus.Datastore.Native - SELECT 'org.apache.hadoop.hive.metastore.model.MRole' AS NUCLEUS_TYPE,A0.CREATE_TIME,A0.OWNER_NAME,A0.ROLE_NAME,A0.ROLE_ID FROM ROLES A0 WHERE A0.ROLE_NAME = <'admin'>
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 1 ms
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Execution Time = 7 ms
 
[main] DEBUG DataNucleus.Persistence - Retrieved object with OID "1[OID]org.apache.hadoop.hive.metastore.model.MRole"
 
[main] DEBUG DataNucleus.Cache - Object with id "1[OID]org.apache.hadoop.hive.metastore.model.MRole" not found in Level 1 cache [cache size = 0]
 
[main] DEBUG DataNucleus.MetaData - Listener found initialisation for persistable class org.apache.hadoop.hive.metastore.model.MRole
 
[main] DEBUG DataNucleus.Cache - Object "org.apache.hadoop.hive.metastore.model.MRole@182a483f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") added to Level 1 cache (loadedFlags="[NNN]")
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MRole@182a483f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") has a lifecycle change : "HOLLOW"->"P_CLEAN"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MRole@182a483f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") enlisted in transactional cache
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Commit transaction: count = 1, isactive true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMRole(ObjectStore.java:3302)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Rollback transaction, isActive: true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3030)
 
[main] DEBUG DataNucleus.Transaction - Transaction rolling back for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MRole@182a483f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") has a lifecycle change : "P_CLEAN"->"HOLLOW"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MRole@182a483f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MRole") being evicted from transactional cache
 
[main] DEBUG DataNucleus.Transaction - Rolling back [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1ccd864e]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1ccd864e is rolling back for transaction Xid=
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1ccd864e rolled back connection for transaction Xid=
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@2cdfdba6" closed
 
[main] DEBUG DataNucleus.Connection - Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@56e70117 [conn=com.jolbox.bonecp.ConnectionHandle@2cdfdba6, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Transaction - Transaction rolled back in 0 ms
 
[main] DEBUG DataNucleus.Cache - Object with id="1[OID]org.apache.hadoop.hive.metastore.model.MRole" being removed from Level 1 cache [current cache size = 1]
 
[main] DEBUG org.apache.hadoop.hive.metastore.HiveMetaStore - admin role already exists
 
InvalidObjectException(message:Role admin already exists.)
 
	at org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3020)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 
	at java.lang.reflect.Method.invoke(Method.java:606)
 
	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108)
 
	at com.sun.proxy.$Proxy8.addRole(Unknown Source)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:544)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:398)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:356)
 
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:54)
 
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:59)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore.newHMSHandler(HiveMetaStore.java:4944)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:171)
 
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
 
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
 
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
 
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
 
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1410)
 
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
 
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2453)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2465)
 
	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:340)
 
	at org.apache.spark.sql.hive.HiveContext$$anonfun$4.apply(HiveContext.scala:235)
 
	at org.apache.spark.sql.hive.HiveContext$$anonfun$4.apply(HiveContext.scala:231)
 
	at scala.Option.orElse(Option.scala:257)
 
	at org.apache.spark.sql.hive.HiveContext.x$3$lzycompute(HiveContext.scala:231)
 
	at org.apache.spark.sql.hive.HiveContext.x$3(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveContext.hiveconf$lzycompute(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveContext.hiveconf(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.<init>(HiveMetastoreCatalog.scala:54)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$2.<init>(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext.catalog$lzycompute(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext.catalog(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$4.<init>(HiveContext.scala:263)
 
	at org.apache.spark.sql.hive.HiveContext.analyzer$lzycompute(HiveContext.scala:263)
 
	at org.apache.spark.sql.hive.HiveContext.analyzer(HiveContext.scala:262)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed$lzycompute(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData$lzycompute(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan$lzycompute(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:418)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:416)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:422)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:422)
 
	at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:444)
 
	at org.apache.spark.sql.api.java.JavaSchemaRDD.collect(JavaSchemaRDD.scala:114)
 
	at com.blackhorse.SparkTest.main(SparkTest.java:27)
 
[main] INFO org.apache.hadoop.hive.metastore.HiveMetaStore - Added admin role in metastore
 
[main] DEBUG DataNucleus.Transaction - Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Transaction - Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9 (optimistic=false)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 1, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3017)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 2, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMRole(ObjectStore.java:3296)
 
[main] DEBUG DataNucleus.Query - Query "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1FetchPlan [default]" of language "JDOQL" has been run before so reusing existing generic compilation
 
[main] DEBUG DataNucleus.Query - Query "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1FetchPlan [default]" of language "JDOQL" for datastore "rdbms-derby" has been run before so reusing existing datastore compilation
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@37c7dd0c" opened with isolation level "read-committed" and auto-commit=false
 
[main] DEBUG DataNucleus.Transaction - Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@22815b70, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@22815b70 is starting for transaction Xid= with flags 0
 
[main] DEBUG DataNucleus.Connection - Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@3ea86006 [conn=com.jolbox.bonecp.ConnectionHandle@37c7dd0c, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Executing "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MRole WHERE roleName == t1 PARAMETERS java.lang.String t1" ...
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@20384a0d"
 
[main] DEBUG DataNucleus.Datastore.Native - SELECT 'org.apache.hadoop.hive.metastore.model.MRole' AS NUCLEUS_TYPE,A0.CREATE_TIME,A0.OWNER_NAME,A0.ROLE_NAME,A0.ROLE_ID FROM ROLES A0 WHERE A0.ROLE_NAME = <'public'>
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 0 ms
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Execution Time = 0 ms
 
[main] DEBUG DataNucleus.Persistence - Retrieved object with OID "2[OID]org.apache.hadoop.hive.metastore.model.MRole"
 
[main] DEBUG DataNucleus.Cache - Object with id "2[OID]org.apache.hadoop.hive.metastore.model.MRole" not found in Level 1 cache [cache size = 0]
 
[main] DEBUG DataNucleus.Cache - Object "org.apache.hadoop.hive.metastore.model.MRole@50df9e70" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") added to Level 1 cache (loadedFlags="[NNN]")
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MRole@50df9e70" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") has a lifecycle change : "HOLLOW"->"P_CLEAN"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MRole@50df9e70" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") enlisted in transactional cache
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Commit transaction: count = 1, isactive true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMRole(ObjectStore.java:3302)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Rollback transaction, isActive: true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3030)
 
[main] DEBUG DataNucleus.Transaction - Transaction rolling back for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MRole@50df9e70" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") has a lifecycle change : "P_CLEAN"->"HOLLOW"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MRole@50df9e70" (id="2[OID]org.apache.hadoop.hive.metastore.model.MRole") being evicted from transactional cache
 
[main] DEBUG DataNucleus.Transaction - Rolling back [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@22815b70]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@22815b70 is rolling back for transaction Xid=
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@22815b70 rolled back connection for transaction Xid=
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@37c7dd0c" closed
 
[main] DEBUG DataNucleus.Connection - Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@3ea86006 [conn=com.jolbox.bonecp.ConnectionHandle@37c7dd0c, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Transaction - Transaction rolled back in 0 ms
 
[main] DEBUG DataNucleus.Cache - Object with id="2[OID]org.apache.hadoop.hive.metastore.model.MRole" being removed from Level 1 cache [current cache size = 1]
 
[main] DEBUG org.apache.hadoop.hive.metastore.HiveMetaStore - public role already exists
 
InvalidObjectException(message:Role public already exists.)
 
	at org.apache.hadoop.hive.metastore.ObjectStore.addRole(ObjectStore.java:3020)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 
	at java.lang.reflect.Method.invoke(Method.java:606)
 
	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108)
 
	at com.sun.proxy.$Proxy8.addRole(Unknown Source)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:553)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:398)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:356)
 
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:54)
 
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:59)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore.newHMSHandler(HiveMetaStore.java:4944)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:171)
 
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
 
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
 
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
 
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
 
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1410)
 
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
 
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2453)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2465)
 
	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:340)
 
	at org.apache.spark.sql.hive.HiveContext$$anonfun$4.apply(HiveContext.scala:235)
 
	at org.apache.spark.sql.hive.HiveContext$$anonfun$4.apply(HiveContext.scala:231)
 
	at scala.Option.orElse(Option.scala:257)
 
	at org.apache.spark.sql.hive.HiveContext.x$3$lzycompute(HiveContext.scala:231)
 
	at org.apache.spark.sql.hive.HiveContext.x$3(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveContext.hiveconf$lzycompute(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveContext.hiveconf(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.<init>(HiveMetastoreCatalog.scala:54)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$2.<init>(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext.catalog$lzycompute(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext.catalog(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$4.<init>(HiveContext.scala:263)
 
	at org.apache.spark.sql.hive.HiveContext.analyzer$lzycompute(HiveContext.scala:263)
 
	at org.apache.spark.sql.hive.HiveContext.analyzer(HiveContext.scala:262)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed$lzycompute(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData$lzycompute(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan$lzycompute(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:418)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:416)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:422)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:422)
 
	at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:444)
 
	at org.apache.spark.sql.api.java.JavaSchemaRDD.collect(JavaSchemaRDD.scala:114)
 
	at com.blackhorse.SparkTest.main(SparkTest.java:27)
 
[main] INFO org.apache.hadoop.hive.metastore.HiveMetaStore - Added public role in metastore
 
[main] DEBUG DataNucleus.Transaction - Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Transaction - Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9 (optimistic=false)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 1, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:3681)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 2, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.listPrincipalGlobalGrants(ObjectStore.java:4109)
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT FROM org.apache.hadoop.hive.metastore.model.MGlobalPrivilege WHERE principalName == t1 && principalType == t2 PARAMETERS java.lang.String t1, java.lang.String t2"
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time = 1 ms
 
[main] DEBUG DataNucleus.Query - QueryCompilation:
 
  [filter:DyadicExpression{DyadicExpression{PrimaryExpression{principalName}  =  ParameterExpression{t1}}  AND  DyadicExpression{PrimaryExpression{principalType}  =  ParameterExpression{t2}}}]
 
  [symbols: t2 type=java.lang.String, t1 type=java.lang.String, this type=org.apache.hadoop.hive.metastore.model.MGlobalPrivilege]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT FROM org.apache.hadoop.hive.metastore.model.MGlobalPrivilege WHERE principalName == t1 && principalType == t2 PARAMETERS java.lang.String t1, java.lang.String t2" for datastore
 
[main] DEBUG DataNucleus.Persistence - Managing Persistence of Class : org.apache.hadoop.hive.metastore.model.MGlobalPrivilege [Table : GLOBAL_PRIVS, InheritanceStrategy : new-table]
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "GLOBAL_PRIVS.USER_GRANT_ID" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Table GLOBAL_PRIVS will manage the persistence of the fields for class org.apache.hadoop.hive.metastore.model.MGlobalPrivilege (inheritance strategy="new-table") 
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "GLOBAL_PRIVS.CREATE_TIME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.createTime] -> Column(s) [GLOBAL_PRIVS.CREATE_TIME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.IntegerMapping" (org.datanucleus.store.rdbms.mapping.datastore.IntegerRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "GLOBAL_PRIVS.GRANT_OPTION" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.grantOption] -> Column(s) [GLOBAL_PRIVS.GRANT_OPTION] using mapping of type "org.datanucleus.store.rdbms.mapping.java.BooleanMapping" (org.datanucleus.store.rdbms.mapping.datastore.SmallIntRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "GLOBAL_PRIVS.GRANTOR" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.grantor] -> Column(s) [GLOBAL_PRIVS.GRANTOR] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "GLOBAL_PRIVS.GRANTOR_TYPE" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.grantorType] -> Column(s) [GLOBAL_PRIVS.GRANTOR_TYPE] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "GLOBAL_PRIVS.PRINCIPAL_NAME" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.principalName] -> Column(s) [GLOBAL_PRIVS.PRINCIPAL_NAME] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "GLOBAL_PRIVS.PRINCIPAL_TYPE" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.principalType] -> Column(s) [GLOBAL_PRIVS.PRINCIPAL_TYPE] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Column "GLOBAL_PRIVS.USER_PRIV" added to internal representation of table.
 
[main] DEBUG DataNucleus.Datastore.Schema - Field [org.apache.hadoop.hive.metastore.model.MGlobalPrivilege.privilege] -> Column(s) [GLOBAL_PRIVS.USER_PRIV] using mapping of type "org.datanucleus.store.rdbms.mapping.java.StringMapping" (org.datanucleus.store.rdbms.mapping.datastore.VarCharRDBMSMapping)
 
[main] DEBUG DataNucleus.Datastore.Schema - Table/View GLOBAL_PRIVS has been initialised
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@5b07c310" opened with isolation level "serializable" and auto-commit=false
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction started with connection "com.jolbox.bonecp.ConnectionHandle@5b07c310" with isolation "serializable"
 
[main] DEBUG DataNucleus.Datastore.Schema - Check of existence of GLOBAL_PRIVS returned table type of TABLE
 
[main] DEBUG DataNucleus.Datastore.Schema - Loading column info for table(s) "TBLS, PARTITION_KEY_VALS, SKEWED_STRING_LIST, ROLES, SERDES, PARTITION_KEYS, PART_COL_STATS, TAB_COL_STATS, CDS, BUCKETING_COLS, PARTITIONS, SERDE_PARAMS, SKEWED_VALUES, COLUMNS_V2, DBS, SD_PARAMS, VERSION, SKEWED_COL_VALUE_LOC_MAP, SDS, SKEWED_COL_NAMES, SORT_COLS, TABLE_PARAMS, DATABASE_PARAMS, PARTITION_PARAMS, SKEWED_STRING_LIST_VALUES, GLOBAL_PRIVS" in Catalog "", Schema ""
 
[main] DEBUG DataNucleus.Datastore.Schema - Column info loaded for Catalog "", Schema "", 26 tables, time = 33 ms
 
[main] DEBUG DataNucleus.Datastore.Schema - Column info retrieved for table "GLOBAL_PRIVS" : 8 columns found
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 2 index(es) for table GLOBAL_PRIVS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 0 foreign key(s) for table GLOBAL_PRIVS
 
[main] DEBUG DataNucleus.Datastore.Schema - Validating 2 unique key(s) for table GLOBAL_PRIVS
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction committing with connection "com.jolbox.bonecp.ConnectionHandle@5b07c310"
 
[main] DEBUG DataNucleus.Datastore.Schema - Schema Transaction closing with connection "com.jolbox.bonecp.ConnectionHandle@5b07c310"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@5b07c310" non enlisted to a transaction is being committed.
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@5b07c310" closed
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time for datastore = 42 ms
 
[main] DEBUG DataNucleus.Query - SELECT FROM org.apache.hadoop.hive.metastore.model.MGlobalPrivilege WHERE principalName == t1 && principalType == t2 PARAMETERS java.lang.String t1, java.lang.String t2 Query compiled to datastore query "SELECT 'org.apache.hadoop.hive.metastore.model.MGlobalPrivilege' AS NUCLEUS_TYPE,A0.CREATE_TIME,A0.GRANT_OPTION,A0.GRANTOR,A0.GRANTOR_TYPE,A0.PRINCIPAL_NAME,A0.PRINCIPAL_TYPE,A0.USER_PRIV,A0.USER_GRANT_ID FROM GLOBAL_PRIVS A0 WHERE A0.PRINCIPAL_NAME = ? AND A0.PRINCIPAL_TYPE = ?"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@32ab33ed" opened with isolation level "read-committed" and auto-commit=false
 
[main] DEBUG DataNucleus.Transaction - Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@5de2698, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@5de2698 is starting for transaction Xid= with flags 0
 
[main] DEBUG DataNucleus.Connection - Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@1eb65c07 [conn=com.jolbox.bonecp.ConnectionHandle@32ab33ed, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Executing "SELECT FROM org.apache.hadoop.hive.metastore.model.MGlobalPrivilege WHERE principalName == t1 && principalType == t2 PARAMETERS java.lang.String t1, java.lang.String t2" ...
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@617608e9"
 
[main] DEBUG DataNucleus.Datastore.Native - SELECT 'org.apache.hadoop.hive.metastore.model.MGlobalPrivilege' AS NUCLEUS_TYPE,A0.CREATE_TIME,A0.GRANT_OPTION,A0.GRANTOR,A0.GRANTOR_TYPE,A0.PRINCIPAL_NAME,A0.PRINCIPAL_TYPE,A0.USER_PRIV,A0.USER_GRANT_ID FROM GLOBAL_PRIVS A0 WHERE A0.PRINCIPAL_NAME = <'admin'> AND A0.PRINCIPAL_TYPE = <'ROLE'>
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 0 ms
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Execution Time = 11 ms
 
[main] DEBUG DataNucleus.Persistence - Retrieved object with OID "1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege"
 
[main] DEBUG DataNucleus.Cache - Object with id "1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege" not found in Level 1 cache [cache size = 0]
 
[main] DEBUG DataNucleus.MetaData - Listener found initialisation for persistable class org.apache.hadoop.hive.metastore.model.MGlobalPrivilege
 
[main] DEBUG DataNucleus.Cache - Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@4938b86f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") added to Level 1 cache (loadedFlags="[NNNNNNN]")
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@4938b86f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") has a lifecycle change : "HOLLOW"->"P_CLEAN"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@4938b86f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") enlisted in transactional cache
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Commit transaction: count = 1, isactive true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.listPrincipalGlobalGrants(ObjectStore.java:4119)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Rollback transaction, isActive: true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:3880)
 
[main] DEBUG DataNucleus.Transaction - Transaction rolling back for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9
 
[main] DEBUG DataNucleus.Lifecycle - Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@4938b86f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") has a lifecycle change : "P_CLEAN"->"HOLLOW"
 
[main] DEBUG DataNucleus.Transaction - Object "org.apache.hadoop.hive.metastore.model.MGlobalPrivilege@4938b86f" (id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege") being evicted from transactional cache
 
[main] DEBUG DataNucleus.Transaction - Rolling back [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@5de2698]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@5de2698 is rolling back for transaction Xid=
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@5de2698 rolled back connection for transaction Xid=
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@32ab33ed" closed
 
[main] DEBUG DataNucleus.Connection - Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@1eb65c07 [conn=com.jolbox.bonecp.ConnectionHandle@32ab33ed, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Transaction - Transaction rolled back in 0 ms
 
[main] DEBUG DataNucleus.Cache - Object with id="1[OID]org.apache.hadoop.hive.metastore.model.MGlobalPrivilege" being removed from Level 1 cache [current cache size = 1]
 
[main] DEBUG org.apache.hadoop.hive.metastore.HiveMetaStore - Failed while granting global privs to admin
 
InvalidObjectException(message:All is already granted by admin)
 
	at org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:3713)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 
	at java.lang.reflect.Method.invoke(Method.java:606)
 
	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108)
 
	at com.sun.proxy.$Proxy8.grantPrivileges(Unknown Source)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:567)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:398)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:356)
 
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:54)
 
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:59)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore.newHMSHandler(HiveMetaStore.java:4944)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:171)
 
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
 
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
 
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
 
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
 
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1410)
 
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
 
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2453)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2465)
 
	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:340)
 
	at org.apache.spark.sql.hive.HiveContext$$anonfun$4.apply(HiveContext.scala:235)
 
	at org.apache.spark.sql.hive.HiveContext$$anonfun$4.apply(HiveContext.scala:231)
 
	at scala.Option.orElse(Option.scala:257)
 
	at org.apache.spark.sql.hive.HiveContext.x$3$lzycompute(HiveContext.scala:231)
 
	at org.apache.spark.sql.hive.HiveContext.x$3(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveContext.hiveconf$lzycompute(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveContext.hiveconf(HiveContext.scala:229)
 
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.<init>(HiveMetastoreCatalog.scala:54)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$2.<init>(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext.catalog$lzycompute(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext.catalog(HiveContext.scala:253)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$4.<init>(HiveContext.scala:263)
 
	at org.apache.spark.sql.hive.HiveContext.analyzer$lzycompute(HiveContext.scala:263)
 
	at org.apache.spark.sql.hive.HiveContext.analyzer(HiveContext.scala:262)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed$lzycompute(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData$lzycompute(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan$lzycompute(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:418)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:416)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:422)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:422)
 
	at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:444)
 
	at org.apache.spark.sql.api.java.JavaSchemaRDD.collect(JavaSchemaRDD.scala:114)
 
	at com.blackhorse.SparkTest.main(SparkTest.java:27)
 
[main] INFO org.apache.hadoop.hive.metastore.HiveMetaStore - No user is added in admin role, since config is empty
 
[sparkDriver-akka.actor.default-dispatcher-2] DEBUG org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - [actor] received message RegisterExecutor(0,lix1.bh.com:38933,2) from Actor[akka.tcp://sparkExecutor@lix1.bh.com:38933/user/Executor#-636927255]
 
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - Registered executor: Actor[akka.tcp://sparkExecutor@lix1.bh.com:38933/user/Executor#-636927255] with ID 0
 
[sparkDriver-akka.actor.default-dispatcher-2] DEBUG org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - [actor] handled message (3.711 ms) RegisterExecutor(0,lix1.bh.com:38933,2) from Actor[akka.tcp://sparkExecutor@lix1.bh.com:38933/user/Executor#-636927255]
 
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - hadoop login
 
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - hadoop login commit
 
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - using local user:UnixPrincipal: xiaobogu
 
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - Using user: "UnixPrincipal: xiaobogu" with name xiaobogu
 
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - User entry: "xiaobogu"
 
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - UGI loginUser:xiaobogu (auth:SIMPLE)
 
[main] INFO org.apache.hadoop.hive.ql.session.SessionState - No Tez session required at this point. hive.execution.engine=mr.
 
[sparkDriver-akka.actor.default-dispatcher-2] DEBUG org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - [actor] received message ReviveOffers from Actor[akka://sparkDriver/user/CoarseGrainedScheduler#-1099508446]
 
[sparkDriver-akka.actor.default-dispatcher-2] DEBUG org.apache.spark.scheduler.cluster.SparkDeploySchedulerBackend - [actor] handled message (0.22 ms) ReviveOffers from Actor[akka://sparkDriver/user/CoarseGrainedScheduler#-1099508446]
 
[sparkDriver-akka.actor.default-dispatcher-2] DEBUG org.apache.spark.storage.BlockManagerMasterActor - [actor] received message RegisterBlockManager(BlockManagerId(0, lix1.bh.com, 49382),278302556,Actor[akka.tcp://sparkExecutor@lix1.bh.com:38933/user/BlockManagerActor1#-938760042]) from Actor[akka.tcp://sparkExecutor@lix1.bh.com:38933/temp/$c]
 
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerMasterActor - Registering block manager lix1.bh.com:49382 with 265.4 MB RAM, BlockManagerId(0, lix1.bh.com, 49382)
 
[sparkDriver-akka.actor.default-dispatcher-2] DEBUG org.apache.spark.storage.BlockManagerMasterActor - [actor] handled message (0.285 ms) RegisterBlockManager(BlockManagerId(0, lix1.bh.com, 49382),278302556,Actor[akka.tcp://sparkExecutor@lix1.bh.com:38933/user/BlockManagerActor1#-938760042]) from Actor[akka.tcp://sparkExecutor@lix1.bh.com:38933/temp/$c]
 
[main] INFO org.apache.hadoop.hive.metastore.HiveMetaStore - 0: get_table : db=default tbl=src
 
[main] INFO org.apache.hadoop.hive.metastore.HiveMetaStore.audit - ugi=xiaobogu	ip=unknown-ip-addr	cmd=get_table : db=default tbl=src	
 
[main] DEBUG DataNucleus.Transaction - Transaction created [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Transaction - Transaction begun for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9 (optimistic=false)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 1, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getTable(ObjectStore.java:832)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Open transaction: count = 2, isActive = true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMTable(ObjectStore.java:895)
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MTable WHERE tableName == table && database.name == db PARAMETERS java.lang.String table, java.lang.String db"
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time = 0 ms
 
[main] DEBUG DataNucleus.Query - QueryCompilation:
 
  [filter:DyadicExpression{DyadicExpression{PrimaryExpression{tableName}  =  ParameterExpression{table}}  AND  DyadicExpression{PrimaryExpression{database.name}  =  ParameterExpression{db}}}]
 
  [symbols: db type=java.lang.String, table type=java.lang.String, this type=org.apache.hadoop.hive.metastore.model.MTable]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compiling "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MTable WHERE tableName == table && database.name == db PARAMETERS java.lang.String table, java.lang.String db" for datastore
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Compile Time for datastore = 3 ms
 
[main] DEBUG DataNucleus.Query - SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MTable WHERE tableName == table && database.name == db PARAMETERS java.lang.String table, java.lang.String db Query compiled to datastore query "SELECT DISTINCT 'org.apache.hadoop.hive.metastore.model.MTable' AS NUCLEUS_TYPE,A0.CREATE_TIME,A0.LAST_ACCESS_TIME,A0.OWNER,A0.RETENTION,A0.TBL_NAME,A0.TBL_TYPE,A0.TBL_ID FROM TBLS A0 LEFT OUTER JOIN DBS B0 ON A0.DB_ID = B0.DB_ID WHERE A0.TBL_NAME = ? AND B0."NAME" = ?"
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@40302bbb" opened with isolation level "read-committed" and auto-commit=false
 
[main] DEBUG DataNucleus.Transaction - Running enlist operation on resource: org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1a8722e9, error code TMNOFLAGS and transaction: [DataNucleus Transaction, ID=Xid=, enlisted resources=[]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1a8722e9 is starting for transaction Xid= with flags 0
 
[main] DEBUG DataNucleus.Connection - Connection added to the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@5963f1db [conn=com.jolbox.bonecp.ConnectionHandle@40302bbb, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Executing "SELECT UNIQUE FROM org.apache.hadoop.hive.metastore.model.MTable WHERE tableName == table && database.name == db PARAMETERS java.lang.String table, java.lang.String db" ...
 
[main] DEBUG DataNucleus.Datastore - Closing PreparedStatement "com.jolbox.bonecp.PreparedStatementHandle@4ff9b0f4"
 
[main] DEBUG DataNucleus.Datastore.Native - SELECT DISTINCT 'org.apache.hadoop.hive.metastore.model.MTable' AS NUCLEUS_TYPE,A0.CREATE_TIME,A0.LAST_ACCESS_TIME,A0.OWNER,A0.RETENTION,A0.TBL_NAME,A0.TBL_TYPE,A0.TBL_ID FROM TBLS A0 LEFT OUTER JOIN DBS B0 ON A0.DB_ID = B0.DB_ID WHERE A0.TBL_NAME = <'src'> AND B0."NAME" = <'default'>
 
[main] DEBUG DataNucleus.Datastore.Retrieve - Execution Time = 1 ms
 
[main] DEBUG DataNucleus.Query - JDOQL Query : Execution Time = 20 ms
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Commit transaction: count = 1, isactive true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getMTable(ObjectStore.java:903)
 
[main] DEBUG org.apache.hadoop.hive.metastore.ObjectStore - Commit transaction: count = 0, isactive true at:
 
	org.apache.hadoop.hive.metastore.ObjectStore.getTable(ObjectStore.java:834)
 
[main] DEBUG DataNucleus.Transaction - Transaction committing for ExecutionContext org.datanucleus.ExecutionContextThreadedImpl@60343ed9
 
[main] DEBUG DataNucleus.Persistence - ExecutionContext.internalFlush() process started using ordered flush - 0 enlisted objects
 
[main] DEBUG DataNucleus.Persistence - ExecutionContext.internalFlush() process finished
 
[main] DEBUG DataNucleus.Persistence - Performing check of objects for "persistence-by-reachability" (commit) ...
 
[main] DEBUG DataNucleus.Persistence - Completed check of objects for "persistence-by-reachability" (commit).
 
[main] DEBUG DataNucleus.Transaction - Committing [DataNucleus Transaction, ID=Xid=, enlisted resources=[org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1a8722e9]]
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1a8722e9 is committing for transaction Xid= with onePhase=true
 
[main] DEBUG DataNucleus.Connection - Managed connection org.datanucleus.store.rdbms.ConnectionFactoryImpl$EmulatedXAResource@1a8722e9 committed connection for transaction Xid= with onePhase=true
 
[main] DEBUG DataNucleus.Connection - Connection "com.jolbox.bonecp.ConnectionHandle@40302bbb" closed
 
[main] DEBUG DataNucleus.Connection - Connection removed from the pool : org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl@5963f1db [conn=com.jolbox.bonecp.ConnectionHandle@40302bbb, commitOnRelease=false, closeOnRelease=false, closeOnTxnEnd=true] for key=org.datanucleus.ExecutionContextThreadedImpl@60343ed9 in factory=ConnectionFactory:tx[org.datanucleus.store.rdbms.ConnectionFactoryImpl@5df1779e]
 
[main] DEBUG DataNucleus.Transaction - Transaction committed in 0 ms
 
[main] ERROR hive.ql.metadata.Hive - NoSuchObjectException(message:default.src table not found)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_table(HiveMetaStore.java:1560)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 
	at java.lang.reflect.Method.invoke(Method.java:606)
 
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105)
 
	at com.sun.proxy.$Proxy9.get_table(Unknown Source)
 
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getTable(HiveMetaStoreClient.java:997)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 
	at java.lang.reflect.Method.invoke(Method.java:606)
 
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89)
 
	at com.sun.proxy.$Proxy10.getTable(Unknown Source)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:976)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:950)
 
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:70)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$2.org$apache$spark$sql$catalyst$analysis$OverrideCatalog$$super$lookupRelation(HiveContext.scala:253)
 
	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:141)
 
	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:141)
 
	at scala.Option.getOrElse(Option.scala:120)
 
	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:141)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$2.lookupRelation(HiveContext.scala:253)
 
	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$5.applyOrElse(Analyzer.scala:143)
 
	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$5.applyOrElse(Analyzer.scala:138)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:144)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:162)
 
	at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
 
	at scala.collection.Iterator$class.foreach(Iterator.scala:727)
 
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
 
	at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
 
	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
 
	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
 
	at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
 
	at scala.collection.AbstractIterator.to(Iterator.scala:1157)
 
	at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
 
	at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
 
	at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
 
	at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenDown(TreeNode.scala:191)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:147)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:135)
 
	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:138)
 
	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:137)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1$$anonfun$apply$2.apply(RuleExecutor.scala:61)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1$$anonfun$apply$2.apply(RuleExecutor.scala:59)
 
	at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:111)
 
	at scala.collection.immutable.List.foldLeft(List.scala:84)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1.apply(RuleExecutor.scala:59)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1.apply(RuleExecutor.scala:51)
 
	at scala.collection.immutable.List.foreach(List.scala:318)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.apply(RuleExecutor.scala:51)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed$lzycompute(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData$lzycompute(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan$lzycompute(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:418)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:416)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:422)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:422)
 
	at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:444)
 
	at org.apache.spark.sql.api.java.JavaSchemaRDD.collect(JavaSchemaRDD.scala:114)
 
	at com.blackhorse.SparkTest.main(SparkTest.java:27)
 


 
Exception in thread "main" org.apache.hadoop.hive.ql.metadata.InvalidTableException: Table not found src
 
	at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:980)
 
	at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:950)
 
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:70)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$2.org$apache$spark$sql$catalyst$analysis$OverrideCatalog$$super$lookupRelation(HiveContext.scala:253)
 
	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:141)
 
	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:141)
 
	at scala.Option.getOrElse(Option.scala:120)
 
	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:141)
 
	at org.apache.spark.sql.hive.HiveContext$$anon$2.lookupRelation(HiveContext.scala:253)
 
	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$5.applyOrElse(Analyzer.scala:143)
 
	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$5.applyOrElse(Analyzer.scala:138)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:144)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:162)
 
	at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
 
	at scala.collection.Iterator$class.foreach(Iterator.scala:727)
 
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
 
	at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
 
	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
 
	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
 
	at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
 
	at scala.collection.AbstractIterator.to(Iterator.scala:1157)
 
	at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
 
	at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
 
	at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
 
	at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenDown(TreeNode.scala:191)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:147)
 
	at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:135)
 
	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:138)
 
	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:137)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1$$anonfun$apply$2.apply(RuleExecutor.scala:61)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1$$anonfun$apply$2.apply(RuleExecutor.scala:59)
 
	at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:111)
 
	at scala.collection.immutable.List.foldLeft(List.scala:84)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1.apply(RuleExecutor.scala:59)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1.apply(RuleExecutor.scala:51)
 
	at scala.collection.immutable.List.foreach(List.scala:318)
 
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.apply(RuleExecutor.scala:51)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed$lzycompute(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed(SQLContext.scala:411)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData$lzycompute(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData(SQLContext.scala:412)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan$lzycompute(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan(SQLContext.scala:413)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:418)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:416)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:422)
 
	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:422)
 
	at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:444)
 
	at org.apache.spark.sql.api.java.JavaSchemaRDD.collect(JavaSchemaRDD.scala:114)
 
	at com.blackhorse.SparkTest.main(SparkTest.java:27)
 
[delete Spark temp dirs] DEBUG org.apache.spark.util.Utils - Shutdown hook called
 
[delete Spark local dirs] DEBUG org.apache.spark.storage.DiskBlockManager - Shutdown hook called



------------------ Original ------------------
From:  "guxiaobo1982";<guxiaobo1982@qq.com>;
Send time: Sunday, Jan 25, 2015 4:18 PM
To: "user@spark.apache.org"<user@spark.apache.org>; 

Subject:  Can't access remote Hive table from spark



Hi,
I built and started a single node standalone Spark 1.2.0 cluster along with a single node Hive 0.14.0 instance installed by Ambari 1.17.0. On the Spark and Hive node I can create and query tables inside Hive, and on remote machines I can submit the SparkPi example to the Spark master. But I failed to run the following example code :


 
public class SparkTest {
 
	public static void main(String[] args)
 
	{
 
		String appName= "This is a test application";
 
		String master="spark://lix1.bh.com:7077";
 
		
 
		SparkConf conf = new SparkConf().setAppName(appName).setMaster(master);
 
		JavaSparkContext sc = new JavaSparkContext(conf);
 
		
 
		JavaHiveContext sqlCtx = new org.apache.spark.sql.hive.api.java.JavaHiveContext(sc);
 
		//sqlCtx.sql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING)");
 
		//sqlCtx.sql("LOAD DATA LOCAL INPATH '/opt/spark/examples/src/main/resources/kv1.txt' INTO TABLE src");
 
		// Queries are expressed in HiveQL.
 
List<Row> rows = sqlCtx.sql("FROM src SELECT key, value").collect();
 
System.out.print("I got " + rows.size() + " rows \r\n");
 
		sc.close();}
 
}




Exception in thread "main" org.apache.hadoop.hive.ql.metadata.InvalidTableException: Table not found src

	at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:980)

	at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:950)

	at org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:70)

	at org.apache.spark.sql.hive.HiveContext$$anon$2.org$apache$spark$sql$catalyst$analysis$OverrideCatalog$$super$lookupRelation(HiveContext.scala:253)

	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:141)

	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:141)

	at scala.Option.getOrElse(Option.scala:120)

	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:141)

	at org.apache.spark.sql.hive.HiveContext$$anon$2.lookupRelation(HiveContext.scala:253)

	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$5.applyOrElse(Analyzer.scala:143)

	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$5.applyOrElse(Analyzer.scala:138)

	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:144)

	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:162)

	at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)

	at scala.collection.Iterator$class.foreach(Iterator.scala:727)

	at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)

	at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)

	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)

	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)

	at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)

	at scala.collection.AbstractIterator.to(Iterator.scala:1157)

	at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)

	at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)

	at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)

	at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)

	at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenDown(TreeNode.scala:191)

	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:147)

	at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:135)

	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:138)

	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:137)

	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1$$anonfun$apply$2.apply(RuleExecutor.scala:61)

	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1$$anonfun$apply$2.apply(RuleExecutor.scala:59)

	at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:111)

	at scala.collection.immutable.List.foldLeft(List.scala:84)

	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1.apply(RuleExecutor.scala:59)

	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1.apply(RuleExecutor.scala:51)

	at scala.collection.immutable.List.foreach(List.scala:318)

	at org.apache.spark.sql.catalyst.rules.RuleExecutor.apply(RuleExecutor.scala:51)

	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed$lzycompute(SQLContext.scala:411)

	at org.apache.spark.sql.SQLContext$QueryExecution.analyzed(SQLContext.scala:411)

	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData$lzycompute(SQLContext.scala:412)

	at org.apache.spark.sql.SQLContext$QueryExecution.withCachedData(SQLContext.scala:412)

	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan$lzycompute(SQLContext.scala:413)

	at org.apache.spark.sql.SQLContext$QueryExecution.optimizedPlan(SQLContext.scala:413)

	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:418)

	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:416)

	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:422)

	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:422)

	at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:444)

	at org.apache.spark.sql.api.java.JavaSchemaRDD.collect(JavaSchemaRDD.scala:114)

	at com.blackhorse.SparkTest.main(SparkTest.java:27)

[delete Spark temp dirs] DEBUG org.apache.spark.util.Utils - Shutdown hook called

 

[delete Spark local dirs] DEBUG org.apache.spark.storage.DiskBlockManager - Shutdown hook calle







But if I change the query to "show tables", the program can run and got 0 rows through I have many tables inside Hive, so I come to doubt that my program or the spark instance did not connect to my Hive instance, maybe it started a local hive. I have put the hive-site.xml file from Hive installation into spark's conf directory. Can you help figure out what's wrong here, thanks.
Mime
View raw message