@RequiresInstanceClassLoading(cloneAncestorResources=true) public abstract class AbstractHadoopProcessor extends AbstractProcessor
SecurityUtil.loginKerberos(Configuration, String, String), which is used by this
class to authenticate a principal with Kerberos, Hadoop components no longer
attempt relogins explicitly. For more information, please read the documentation for
SecurityUtil.loginKerberos(Configuration, String, String).| Modifier and Type | Class and Description |
|---|---|
(package private) static class |
AbstractHadoopProcessor.ExtendedConfiguration
Extending Hadoop Configuration to prevent it from caching classes that can't be found.
|
protected static class |
AbstractHadoopProcessor.HdfsResources |
protected static class |
AbstractHadoopProcessor.ValidationResources |
| Modifier and Type | Field and Description |
|---|---|
static String |
ABSOLUTE_HDFS_PATH_ATTRIBUTE |
static PropertyDescriptor |
ADDITIONAL_CLASSPATH_RESOURCES |
private static String |
ALLOW_EXPLICIT_KEYTAB |
static PropertyDescriptor |
COMPRESSION_CODEC |
static PropertyDescriptor |
DIRECTORY |
private static AbstractHadoopProcessor.HdfsResources |
EMPTY_HDFS_RESOURCES |
static PropertyDescriptor |
HADOOP_CONFIGURATION_RESOURCES |
private AtomicReference<AbstractHadoopProcessor.HdfsResources> |
hdfsResources |
(package private) static PropertyDescriptor |
KERBEROS_CREDENTIALS_SERVICE |
static PropertyDescriptor |
KERBEROS_RELOGIN_PERIOD |
private File |
kerberosConfigFile |
protected KerberosProperties |
kerberosProperties |
protected List<PropertyDescriptor> |
properties |
private static Object |
RESOURCES_LOCK |
private AtomicReference<AbstractHadoopProcessor.ValidationResources> |
validationResourceHolder |
| Constructor and Description |
|---|
AbstractHadoopProcessor() |
| Modifier and Type | Method and Description |
|---|---|
void |
abstractOnScheduled(ProcessContext context)
If your subclass also has an @OnScheduled annotated method and you need hdfsResources in that method, then be sure to call super.abstractOnScheduled(context)
|
void |
abstractOnStopped() |
protected void |
checkHdfsUriForTimeout(org.apache.hadoop.conf.Configuration config) |
protected Collection<ValidationResult> |
customValidate(ValidationContext validationContext) |
protected org.apache.hadoop.io.compress.CompressionCodec |
getCompressionCodec(ProcessContext context,
org.apache.hadoop.conf.Configuration configuration)
Returns the configured CompressionCodec, or null if none is configured.
|
protected org.apache.hadoop.conf.Configuration |
getConfiguration() |
private static org.apache.hadoop.conf.Configuration |
getConfigurationFromResources(org.apache.hadoop.conf.Configuration config,
String configResources) |
protected org.apache.hadoop.fs.FileSystem |
getFileSystem() |
protected org.apache.hadoop.fs.FileSystem |
getFileSystem(org.apache.hadoop.conf.Configuration config)
This exists in order to allow unit tests to override it so that they don't take several minutes waiting for UDP packets to be received
|
protected org.apache.hadoop.fs.FileSystem |
getFileSystemAsUser(org.apache.hadoop.conf.Configuration config,
org.apache.hadoop.security.UserGroupInformation ugi) |
protected KerberosProperties |
getKerberosProperties(File kerberosConfigFile) |
static String |
getPathDifference(org.apache.hadoop.fs.Path root,
org.apache.hadoop.fs.Path child)
Returns the relative path of the child that does not include the filename or the root path.
|
protected List<PropertyDescriptor> |
getSupportedPropertyDescriptors() |
protected org.apache.hadoop.security.UserGroupInformation |
getUserGroupInformation() |
protected void |
init(ProcessorInitializationContext context) |
private void |
interruptStatisticsThread(org.apache.hadoop.fs.FileSystem fileSystem) |
(package private) boolean |
isAllowExplicitKeytab() |
protected void |
preProcessConfiguration(org.apache.hadoop.conf.Configuration config,
ProcessContext context)
This method will be called after the Configuration has been created, but before the FileSystem is created,
allowing sub-classes to take further action on the Configuration before creating the FileSystem.
|
(package private) AbstractHadoopProcessor.HdfsResources |
resetHDFSResources(String configResources,
ProcessContext context) |
onTrigger, onTriggergetControllerServiceLookup, getIdentifier, getLogger, getNodeTypeProvider, getRelationships, initialize, isConfigurationRestored, isScheduled, toString, updateConfiguredRestoredTrue, updateScheduledFalse, updateScheduledTrueequals, getPropertyDescriptor, getPropertyDescriptors, getSupportedDynamicPropertyDescriptor, hashCode, onPropertyModified, validateclone, finalize, getClass, notify, notifyAll, wait, wait, waitgetPropertyDescriptor, getPropertyDescriptors, onPropertyModified, validateprivate static final String ALLOW_EXPLICIT_KEYTAB
public static final PropertyDescriptor HADOOP_CONFIGURATION_RESOURCES
public static final PropertyDescriptor DIRECTORY
public static final PropertyDescriptor COMPRESSION_CODEC
public static final PropertyDescriptor KERBEROS_RELOGIN_PERIOD
public static final PropertyDescriptor ADDITIONAL_CLASSPATH_RESOURCES
static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE
public static final String ABSOLUTE_HDFS_PATH_ATTRIBUTE
private static final Object RESOURCES_LOCK
private static final AbstractHadoopProcessor.HdfsResources EMPTY_HDFS_RESOURCES
protected KerberosProperties kerberosProperties
protected List<PropertyDescriptor> properties
private volatile File kerberosConfigFile
private final AtomicReference<AbstractHadoopProcessor.HdfsResources> hdfsResources
private final AtomicReference<AbstractHadoopProcessor.ValidationResources> validationResourceHolder
protected void init(ProcessorInitializationContext context)
init in class AbstractSessionFactoryProcessorprotected KerberosProperties getKerberosProperties(File kerberosConfigFile)
protected List<PropertyDescriptor> getSupportedPropertyDescriptors()
getSupportedPropertyDescriptors in class AbstractConfigurableComponentprotected Collection<ValidationResult> customValidate(ValidationContext validationContext)
customValidate in class AbstractConfigurableComponent@OnScheduled public final void abstractOnScheduled(ProcessContext context) throws IOException
IOException@OnStopped public final void abstractOnStopped()
private void interruptStatisticsThread(org.apache.hadoop.fs.FileSystem fileSystem)
throws NoSuchFieldException,
IllegalAccessException
private static org.apache.hadoop.conf.Configuration getConfigurationFromResources(org.apache.hadoop.conf.Configuration config,
String configResources)
throws IOException
IOExceptionAbstractHadoopProcessor.HdfsResources resetHDFSResources(String configResources, ProcessContext context) throws IOException
IOExceptionprotected void preProcessConfiguration(org.apache.hadoop.conf.Configuration config,
ProcessContext context)
config - the Configuration that will be used to create the FileSystemcontext - the context that can be used to retrieve additional valuesprotected org.apache.hadoop.fs.FileSystem getFileSystem(org.apache.hadoop.conf.Configuration config)
throws IOException
config - the configuration to useIOException - if unable to create the FileSystemprotected org.apache.hadoop.fs.FileSystem getFileSystemAsUser(org.apache.hadoop.conf.Configuration config,
org.apache.hadoop.security.UserGroupInformation ugi)
throws IOException
IOExceptionprotected void checkHdfsUriForTimeout(org.apache.hadoop.conf.Configuration config)
throws IOException
IOExceptionprotected org.apache.hadoop.io.compress.CompressionCodec getCompressionCodec(ProcessContext context, org.apache.hadoop.conf.Configuration configuration)
context - the ProcessContextconfiguration - the Hadoop Configurationpublic static String getPathDifference(org.apache.hadoop.fs.Path root, org.apache.hadoop.fs.Path child)
root - the path to relativize fromchild - the path to relativizeprotected org.apache.hadoop.conf.Configuration getConfiguration()
protected org.apache.hadoop.fs.FileSystem getFileSystem()
protected org.apache.hadoop.security.UserGroupInformation getUserGroupInformation()
boolean isAllowExplicitKeytab()
Copyright © 2020 Apache NiFi Project. All rights reserved.