有经验的南昌网站制作,企业网站内容运营,wordpress新建的页面不存在,安徽网架公司文章目录 一、hive查看当前环境配置命令 在一次hive数据库执行命令
set ngmr.exec.modecluster时#xff0c;想看一下
ngmr.exec.mode参数原先的值是什么#xff0c;所以写一下本篇博文#xff0c;讲一下怎么查看hive中的参数。 一、hive查看当前环境配置命令
set #… 文章目录 一、hive查看当前环境配置命令 在一次hive数据库执行命令
set ngmr.exec.modecluster时想看一下
ngmr.exec.mode参数原先的值是什么所以写一下本篇博文讲一下怎么查看hive中的参数。 一、hive查看当前环境配置命令
set 可以查看所有参数set -v 可得到所有环境变量。如果没有-v参数只显示与hadoop不同的配置。set 具体参数 返回具体参数对应的值
案例一set 具体参数
# 返回mapreduce.map.memory.mb参数的值
set mapreduce.map.memory.mb;案例二set 查询所有变量
set;案例三set -v查询所有环境变量
hive set -v;
silentoff
fs.s3n.implorg.apache.hadoop.fs.s3native.NativeS3FileSystem
datanucleus.validateColumnsfalse
mapred.task.cache.levels2
hadoop.tmp.dir/home/hexianghui/datahadoop
hadoop.native.libtrue
map.sort.classorg.apache.hadoop.util.QuickSort
ipc.client.idlethreshold4000
mapred.system.dir${hadoop.tmp.dir}/mapred/system
hive.script.serdeorg.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
mapred.job.tracker.persist.jobstatus.hours0
io.skip.checksum.errorsfalse
fs.default.namehdfs://192.168.0.4:9000
mapred.child.tmp./tmp
datanucleus.cache.level2false
mapred.skip.reduce.max.skip.groups0
mapred.jobtracker.instrumentationorg.apache.hadoop.mapred.JobTrackerMetricsInst
mapred.tasktracker.dns.nameserverdefault
io.sort.factor10
hive.metastore.rawstore.implorg.apache.hadoop.hive.metastore.ObjectStore
hive.metastore.localtrue
mapred.task.timeout600000
mapred.max.tracker.failures4
hadoop.rpc.socket.factory.class.defaultorg.apache.hadoop.net.StandardSocketFactory
fs.hdfs.implorg.apache.hadoop.hdfs.DistributedFileSystem
mapred.queue.default.acl-administer-jobs*
mapred.queue.default.acl-submit-job*
mapred.skip.map.auto.incr.proc.counttrue
io.mapfile.bloom.size1048576
tasktracker.http.threads40
mapred.job.shuffle.merge.percent0.66
fs.ftp.implorg.apache.hadoop.fs.ftp.FTPFileSystem
io.bytes.per.checksum512
mapred.output.compressfalse
hive.test.mode.prefixtest_
hive.test.modefalse
hive.exec.compress.intermediatefalse
topology.node.switch.mapping.implorg.apache.hadoop.net.ScriptBasedMapping
datanucleus.cache.level2.typeSOFT
mapred.reduce.slowstart.completed.maps0.05
mapred.reduce.max.attempts4
fs.ramfs.implorg.apache.hadoop.fs.InMemoryFileSystem
javax.jdo.option.ConnectionUserNameAPP
mapred.skip.map.max.skip.records0
hive.merge.mapfilestrue
hive.merge.size.smallfiles.avgsize16000000
hive.test.mode.samplefreq32
hive.optimize.skewjoinfalse
mapred.job.tracker.persist.jobstatus.dir/jobtracker/jobsInfo
fs.s3.buffer.dir${hadoop.tmp.dir}/s3
hive.map.aggr.hash.min.reduction0.5
job.end.retry.attempts0
fs.file.implorg.apache.hadoop.fs.LocalFileSystem
mapred.local.dir.minspacestart0
hive.exec.compress.outputfalse
mapred.output.compression.typeRECORD
hive.script.recordreaderorg.apache.hadoop.hive.ql.exec.TextRecordReader
topology.script.number.args100
io.mapfile.bloom.error.rate0.005
hive.exec.parallel.thread.number8
mapred.max.tracker.blacklists4
mapred.task.profile.maps0-2
mapred.userlog.retain.hours24
datanucleus.storeManagerTyperdbms
mapred.job.tracker.persist.jobstatus.activefalse
hive.script.operator.id.env.varHIVE_SCRIPT_OPERATOR_ID
hadoop.security.authorizationfalse
local.cache.size10737418240
mapred.min.split.size0
mapred.map.tasks2
mapred.child.java.opts-Xmx200m
hive.skewjoin.mapjoin.min.split33554432
hive.metastore.warehouse.dir/user/hive/warehouse
mapred.job.queue.namedefault
hive.mapjoin.bucket.cache.size100
datanucleus.transactionIsolationread-committed
ipc.server.listen.queue.size128
mapred.inmem.merge.threshold1000
job.end.retry.interval30000
mapred.skip.attempts.to.start.skipping2
fs.checkpoint.dir${hadoop.tmp.dir}/dfs/namesecondary
mapred.reduce.tasks-1
mapred.merge.recordsBeforeProgress10000
mapred.userlog.limit.kb0
hive.skewjoin.key100000
javax.jdo.option.ConnectionDriverNameorg.apache.derby.jdbc.EmbeddedDriver
webinterface.private.actionsfalse
mapred.job.shuffle.input.buffer.percent0.70
io.sort.spill.percent0.80
hive.udtf.auto.progressfalse
hive.session.idhexianghui_201002232043
mapred.map.tasks.speculative.executiontrue
hadoop.util.hash.typemurmur
hive.exec.script.maxerrsize100000
hive.optimize.groupbytrue
mapred.map.max.attempts4
hive.default.fileformatTextFile
hive.exec.scratchdir/tmp/hive-${user.name}
mapred.job.tracker.handler.count10
hive.script.recordwriterorg.apache.hadoop.hive.ql.exec.TextRecordWriter
hive.join.emit.interval1000
datanucleus.validateConstraintsfalse
mapred.tasktracker.expiry.interval600000
mapred.jobtracker.maxtasks.per.job-1
mapred.jobtracker.job.history.block.size3145728
keep.failed.task.filesfalse
ipc.client.tcpnodelayfalse
mapred.task.profile.reduces0-2
mapred.output.compression.codecorg.apache.hadoop.io.compress.DefaultCodec
io.map.index.skip0
ipc.server.tcpnodelayfalse
hive.join.cache.size25000
datanucleus.autoStartMechanismModechecked
hive.input.formatorg.apache.hadoop.hive.ql.io.HiveInputFormat
hadoop.logfile.size10000000
mapred.reduce.tasks.speculative.executiontrue
hive.skewjoin.mapjoin.map.tasks10000
hive.hwi.listen.port9999
fs.checkpoint.period3600
mapred.job.reuse.jvm.num.tasks1
mapred.jobtracker.completeuserjobs.maximum100
hive.groupby.mapaggr.checkinterval100000
fs.s3.maxRetries4
javax.jdo.option.ConnectionURLjdbc:derby:;databaseNamemetastore_db;createtrue
hive.mapred.modenonstrict
hive.groupby.skewindatafalse
hive.exec.parallelfalse
mapred.local.dir${hadoop.tmp.dir}/mapred/local
fs.hftp.implorg.apache.hadoop.hdfs.HftpFileSystem
fs.s3.sleepTimeSeconds10
fs.trash.interval0
mapred.submit.replication10
hive.merge.size.per.task256000000
fs.har.implorg.apache.hadoop.fs.HarFileSystem
mapred.map.output.compression.codecorg.apache.hadoop.io.compress.DefaultCodec
hive.exec.reducers.max999
mapred.tasktracker.dns.interfacedefault
mapred.job.tracker192.168.0.4:9001
io.seqfile.sorter.recordlimit1000000
hive.optimize.ppdtrue
mapred.line.input.format.linespermap1
mapred.jobtracker.taskSchedulerorg.apache.hadoop.mapred.JobQueueTaskScheduler
mapred.tasktracker.instrumentationorg.apache.hadoop.mapred.TaskTrackerMetricsInst
hive.mapjoin.cache.numrows25000
hive.merge.mapredfilesfalse
hive.metastore.connect.retries5
hive.fileformat.checktrue
mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill5000
javax.jdo.option.DetachAllOnCommittrue
mapred.local.dir.minspacekill0
hive.optimize.prunertrue
javax.jdo.option.ConnectionPasswordmine
hive.hwi.listen.host0.0.0.0
io.sort.record.percent0.05
hive.map.aggr.hash.percentmemory0.5
fs.kfs.implorg.apache.hadoop.fs.kfs.KosmosFileSystem
mapred.temp.dir${hadoop.tmp.dir}/mapred/temp
mapred.tasktracker.reduce.tasks.maximum2
javax.jdo.PersistenceManagerFactoryClassorg.datanucleus.jdo.JDOPersistenceManagerFactory
hive.mapred.local.mem0
fs.checkpoint.edits.dir${fs.checkpoint.dir}
mapred.job.reduce.input.buffer.percent0.0
datanucleus.validateTablesfalse
mapred.tasktracker.indexcache.mb10
hadoop.logfile.count10
mapred.skip.reduce.auto.incr.proc.counttruehive.script.auto.progressfalse
io.seqfile.compress.blocksize1000000
fs.s3.block.size67108864
mapred.tasktracker.taskmemorymanager.monitoring-interval5000
datanucleus.autoCreateSchematrue
mapred.acls.enabledfalse
mapred.queue.namesdefault
fs.hsftp.implorg.apache.hadoop.hdfs.HsftpFileSystem
hive.map.aggrtrue
hive.enforce.bucketingfalse
mapred.task.tracker.http.address0.0.0.0:50060
mapred.reduce.parallel.copies5
io.seqfile.lazydecompresstrue
hive.exec.script.allow.partial.consumptionfalse
io.sort.mb100
ipc.client.connection.maxidletime10000
mapred.task.tracker.report.address127.0.0.1:0
mapred.compress.map.outputfalse
hive.mapred.reduce.tasks.speculative.executiontrue
ipc.client.kill.max10
ipc.client.connect.max.retries10
hive.heartbeat.interval1000
fs.s3.implorg.apache.hadoop.fs.s3.S3FileSystem
hive.mapjoin.maxsize100000
mapred.job.tracker.http.address0.0.0.0:50030
io.file.buffer.size4096
mapred.jobtracker.restart.recoverfalse
io.serializationsorg.apache.hadoop.io.serializer.WritableSerialization
hive.optimize.cptrue
javax.jdo.option.NonTransactionalReadtrue
hive.exec.reducers.bytes.per.reducer1000000000
mapred.reduce.copy.backoff300
mapred.task.profilefalse
jobclient.output.filterFAILED
mapred.tasktracker.map.tasks.maximum2
io.compression.codecsorg.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec
fs.checkpoint.size67108864