org.apache.hadoop.ipc.VersionedProtocol, org.apache.hadoop.mapreduce.protocol.ClientProtocol@Private
@Unstable
public class LocalJobRunner
extends java.lang.Object
implements org.apache.hadoop.mapreduce.protocol.ClientProtocol
| Modifier and Type | Field | Description |
|---|---|---|
static java.lang.String |
INTERMEDIATE_DATA_ENCRYPTION_ALGO |
|
static java.lang.String |
LOCAL_MAX_MAPS |
The maximum number of map tasks to run in parallel in LocalJobRunner
|
static java.lang.String |
LOCAL_MAX_REDUCES |
The maximum number of reduce tasks to run in parallel in LocalJobRunner
|
static org.slf4j.Logger |
LOG |
| Constructor | Description |
|---|---|
LocalJobRunner(org.apache.hadoop.conf.Configuration conf) |
|
LocalJobRunner(org.apache.hadoop.mapred.JobConf conf) |
Deprecated.
|
| Modifier and Type | Method | Description |
|---|---|---|
void |
cancelDelegationToken(org.apache.hadoop.security.token.Token<org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier> token) |
|
org.apache.hadoop.mapreduce.TaskTrackerInfo[] |
getActiveTrackers() |
Get all active trackers in cluster.
|
org.apache.hadoop.mapreduce.JobStatus[] |
getAllJobs() |
|
org.apache.hadoop.mapreduce.TaskTrackerInfo[] |
getBlacklistedTrackers() |
Get all blacklisted trackers in cluster.
|
org.apache.hadoop.mapreduce.QueueInfo[] |
getChildQueues(java.lang.String queueName) |
|
org.apache.hadoop.mapreduce.ClusterMetrics |
getClusterMetrics() |
|
org.apache.hadoop.security.token.Token<org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier> |
getDelegationToken(org.apache.hadoop.io.Text renewer) |
|
java.lang.String |
getFilesystemName() |
|
org.apache.hadoop.mapreduce.Counters |
getJobCounters(org.apache.hadoop.mapreduce.JobID id) |
|
java.lang.String |
getJobHistoryDir() |
|
org.apache.hadoop.mapreduce.JobStatus |
getJobStatus(org.apache.hadoop.mapreduce.JobID id) |
|
org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus |
getJobTrackerStatus() |
|
static int |
getLocalMaxRunningMaps(org.apache.hadoop.mapreduce.JobContext job) |
|
static int |
getLocalMaxRunningReduces(org.apache.hadoop.mapreduce.JobContext job) |
|
org.apache.hadoop.mapreduce.v2.LogParams |
getLogFileParams(org.apache.hadoop.mapreduce.JobID jobID,
org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID) |
|
org.apache.hadoop.mapreduce.JobID |
getNewJobID() |
|
org.apache.hadoop.ipc.ProtocolSignature |
getProtocolSignature(java.lang.String protocol,
long clientVersion,
int clientMethodsHash) |
|
long |
getProtocolVersion(java.lang.String protocol,
long clientVersion) |
|
org.apache.hadoop.mapreduce.QueueInfo |
getQueue(java.lang.String queue) |
|
org.apache.hadoop.mapreduce.QueueAclsInfo[] |
getQueueAclsForCurrentUser() |
|
org.apache.hadoop.security.authorize.AccessControlList |
getQueueAdmins(java.lang.String queueName) |
|
org.apache.hadoop.mapreduce.QueueInfo[] |
getQueues() |
|
org.apache.hadoop.mapreduce.QueueInfo[] |
getRootQueues() |
|
java.lang.String |
getStagingAreaDir() |
|
java.lang.String |
getSystemDir() |
|
org.apache.hadoop.mapreduce.TaskCompletionEvent[] |
getTaskCompletionEvents(org.apache.hadoop.mapreduce.JobID jobid,
int fromEventId,
int maxEvents) |
|
java.lang.String[] |
getTaskDiagnostics(org.apache.hadoop.mapreduce.TaskAttemptID taskid) |
Returns the diagnostic information for a particular task in the given job.
|
org.apache.hadoop.mapreduce.TaskReport[] |
getTaskReports(org.apache.hadoop.mapreduce.JobID id,
org.apache.hadoop.mapreduce.TaskType type) |
|
long |
getTaskTrackerExpiryInterval() |
|
void |
killJob(org.apache.hadoop.mapreduce.JobID id) |
|
boolean |
killTask(org.apache.hadoop.mapreduce.TaskAttemptID taskId,
boolean shouldFail) |
Throws
UnsupportedOperationException |
long |
renewDelegationToken(org.apache.hadoop.security.token.Token<org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier> token) |
|
void |
setJobPriority(org.apache.hadoop.mapreduce.JobID id,
java.lang.String jp) |
|
static void |
setLocalMaxRunningMaps(org.apache.hadoop.mapreduce.JobContext job,
int maxMaps) |
Set the max number of map tasks to run concurrently in the LocalJobRunner.
|
static void |
setLocalMaxRunningReduces(org.apache.hadoop.mapreduce.JobContext job,
int maxReduces) |
Set the max number of reduce tasks to run concurrently in the LocalJobRunner.
|
org.apache.hadoop.mapreduce.JobStatus |
submitJob(org.apache.hadoop.mapreduce.JobID jobid,
java.lang.String jobSubmitDir,
org.apache.hadoop.security.Credentials credentials) |
public static final org.slf4j.Logger LOG
public static final java.lang.String LOCAL_MAX_MAPS
public static final java.lang.String LOCAL_MAX_REDUCES
public static final java.lang.String INTERMEDIATE_DATA_ENCRYPTION_ALGO
public LocalJobRunner(org.apache.hadoop.conf.Configuration conf)
throws java.io.IOException
java.io.IOException@Deprecated
public LocalJobRunner(org.apache.hadoop.mapred.JobConf conf)
throws java.io.IOException
java.io.IOExceptionpublic long getProtocolVersion(java.lang.String protocol,
long clientVersion)
getProtocolVersion in interface org.apache.hadoop.ipc.VersionedProtocolpublic org.apache.hadoop.ipc.ProtocolSignature getProtocolSignature(java.lang.String protocol,
long clientVersion,
int clientMethodsHash)
throws java.io.IOException
getProtocolSignature in interface org.apache.hadoop.ipc.VersionedProtocoljava.io.IOExceptionpublic org.apache.hadoop.mapreduce.JobID getNewJobID()
getNewJobID in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic org.apache.hadoop.mapreduce.JobStatus submitJob(org.apache.hadoop.mapreduce.JobID jobid,
java.lang.String jobSubmitDir,
org.apache.hadoop.security.Credentials credentials)
throws java.io.IOException
submitJob in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic void killJob(org.apache.hadoop.mapreduce.JobID id)
killJob in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic void setJobPriority(org.apache.hadoop.mapreduce.JobID id,
java.lang.String jp)
throws java.io.IOException
setJobPriority in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic boolean killTask(org.apache.hadoop.mapreduce.TaskAttemptID taskId,
boolean shouldFail)
throws java.io.IOException
UnsupportedOperationExceptionkillTask in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic org.apache.hadoop.mapreduce.TaskReport[] getTaskReports(org.apache.hadoop.mapreduce.JobID id,
org.apache.hadoop.mapreduce.TaskType type)
getTaskReports in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic org.apache.hadoop.mapreduce.JobStatus getJobStatus(org.apache.hadoop.mapreduce.JobID id)
getJobStatus in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic org.apache.hadoop.mapreduce.Counters getJobCounters(org.apache.hadoop.mapreduce.JobID id)
getJobCounters in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic java.lang.String getFilesystemName()
throws java.io.IOException
getFilesystemName in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic org.apache.hadoop.mapreduce.ClusterMetrics getClusterMetrics()
getClusterMetrics in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus getJobTrackerStatus()
getJobTrackerStatus in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic long getTaskTrackerExpiryInterval()
throws java.io.IOException,
java.lang.InterruptedException
getTaskTrackerExpiryInterval in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionjava.lang.InterruptedExceptionpublic org.apache.hadoop.mapreduce.TaskTrackerInfo[] getActiveTrackers()
throws java.io.IOException,
java.lang.InterruptedException
getActiveTrackers in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionjava.lang.InterruptedExceptionpublic org.apache.hadoop.mapreduce.TaskTrackerInfo[] getBlacklistedTrackers()
throws java.io.IOException,
java.lang.InterruptedException
getBlacklistedTrackers in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionjava.lang.InterruptedExceptionpublic org.apache.hadoop.mapreduce.TaskCompletionEvent[] getTaskCompletionEvents(org.apache.hadoop.mapreduce.JobID jobid,
int fromEventId,
int maxEvents)
throws java.io.IOException
getTaskCompletionEvents in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic org.apache.hadoop.mapreduce.JobStatus[] getAllJobs()
getAllJobs in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic java.lang.String[] getTaskDiagnostics(org.apache.hadoop.mapreduce.TaskAttemptID taskid)
throws java.io.IOException
getTaskDiagnostics in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic java.lang.String getSystemDir()
getSystemDir in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolClientProtocol.getSystemDir()public org.apache.hadoop.security.authorize.AccessControlList getQueueAdmins(java.lang.String queueName)
throws java.io.IOException
getQueueAdmins in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionClientProtocol.getQueueAdmins(String)public java.lang.String getStagingAreaDir()
throws java.io.IOException
getStagingAreaDir in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionClientProtocol.getStagingAreaDir()public java.lang.String getJobHistoryDir()
getJobHistoryDir in interface org.apache.hadoop.mapreduce.protocol.ClientProtocolpublic org.apache.hadoop.mapreduce.QueueInfo[] getChildQueues(java.lang.String queueName)
throws java.io.IOException
getChildQueues in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic org.apache.hadoop.mapreduce.QueueInfo[] getRootQueues()
throws java.io.IOException
getRootQueues in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic org.apache.hadoop.mapreduce.QueueInfo[] getQueues()
throws java.io.IOException
getQueues in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic org.apache.hadoop.mapreduce.QueueInfo getQueue(java.lang.String queue)
throws java.io.IOException
getQueue in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic org.apache.hadoop.mapreduce.QueueAclsInfo[] getQueueAclsForCurrentUser()
throws java.io.IOException
getQueueAclsForCurrentUser in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionpublic static void setLocalMaxRunningMaps(org.apache.hadoop.mapreduce.JobContext job,
int maxMaps)
job - the job to configuremaxMaps - the maximum number of map tasks to allow.public static int getLocalMaxRunningMaps(org.apache.hadoop.mapreduce.JobContext job)
public static void setLocalMaxRunningReduces(org.apache.hadoop.mapreduce.JobContext job,
int maxReduces)
job - the job to configuremaxReduces - the maximum number of reduce tasks to allow.public static int getLocalMaxRunningReduces(org.apache.hadoop.mapreduce.JobContext job)
public void cancelDelegationToken(org.apache.hadoop.security.token.Token<org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier> token)
throws java.io.IOException,
java.lang.InterruptedException
cancelDelegationToken in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionjava.lang.InterruptedExceptionpublic org.apache.hadoop.security.token.Token<org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier> getDelegationToken(org.apache.hadoop.io.Text renewer)
throws java.io.IOException,
java.lang.InterruptedException
getDelegationToken in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionjava.lang.InterruptedExceptionpublic long renewDelegationToken(org.apache.hadoop.security.token.Token<org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier> token)
throws java.io.IOException,
java.lang.InterruptedException
renewDelegationToken in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionjava.lang.InterruptedExceptionpublic org.apache.hadoop.mapreduce.v2.LogParams getLogFileParams(org.apache.hadoop.mapreduce.JobID jobID,
org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID)
throws java.io.IOException,
java.lang.InterruptedException
getLogFileParams in interface org.apache.hadoop.mapreduce.protocol.ClientProtocoljava.io.IOExceptionjava.lang.InterruptedExceptionCopyright © 2008–2025 Apache Software Foundation. All rights reserved.