Java 类org.apache.hadoop.fs.http.server.HttpFSServerWebApp 实例源码

项目:hadoop-plus    文件:DelegationTokenManagerService.java   
/**
 * Creates a delegation token.
 *
 * @param ugi UGI creating the token.
 * @param renewer token renewer.
 * @return new delegation token.
 * @throws DelegationTokenManagerException thrown if the token could not be
 * created.
 */
@Override
public Token<DelegationTokenIdentifier> createToken(UserGroupInformation ugi,
                                                    String renewer)
  throws DelegationTokenManagerException {
  renewer = (renewer == null) ? ugi.getShortUserName() : renewer;
  String user = ugi.getUserName();
  Text owner = new Text(user);
  Text realUser = null;
  if (ugi.getRealUser() != null) {
    realUser = new Text(ugi.getRealUser().getUserName());
  }
  DelegationTokenIdentifier tokenIdentifier =
    new DelegationTokenIdentifier(owner, new Text(renewer), realUser);
  Token<DelegationTokenIdentifier> token =
    new Token<DelegationTokenIdentifier>(tokenIdentifier, secretManager);
  try {
    SecurityUtil.setTokenService(token,
                                 HttpFSServerWebApp.get().getAuthority());
  } catch (ServerException ex) {
    throw new DelegationTokenManagerException(
      DelegationTokenManagerException.ERROR.DT04, ex.toString(), ex);
  }
  return token;
}
项目:hops    文件:DelegationTokenManagerService.java   
/**
 * Creates a delegation token.
 *
 * @param ugi
 *     UGI creating the token.
 * @param renewer
 *     token renewer.
 * @return new delegation token.
 * @throws DelegationTokenManagerException
 *     thrown if the token could not be
 *     created.
 */
@Override
public Token<DelegationTokenIdentifier> createToken(UserGroupInformation ugi,
    String renewer) throws DelegationTokenManagerException {
  renewer = (renewer == null) ? ugi.getShortUserName() : renewer;
  String user = ugi.getUserName();
  Text owner = new Text(user);
  Text realUser = null;
  if (ugi.getRealUser() != null) {
    realUser = new Text(ugi.getRealUser().getUserName());
  }
  DelegationTokenIdentifier tokenIdentifier =
      new DelegationTokenIdentifier(owner, new Text(renewer), realUser);
  Token<DelegationTokenIdentifier> token =
      new Token<>(tokenIdentifier, secretManager);
  try {
    SecurityUtil
        .setTokenService(token, HttpFSServerWebApp.get().getAuthority());
  } catch (ServerException ex) {
    throw new DelegationTokenManagerException(
        DelegationTokenManagerException.ERROR.DT04, ex.toString(), ex);
  }
  return token;
}
项目:hadoop-TCP    文件:DelegationTokenManagerService.java   
/**
 * Creates a delegation token.
 *
 * @param ugi UGI creating the token.
 * @param renewer token renewer.
 * @return new delegation token.
 * @throws DelegationTokenManagerException thrown if the token could not be
 * created.
 */
@Override
public Token<DelegationTokenIdentifier> createToken(UserGroupInformation ugi,
                                                    String renewer)
  throws DelegationTokenManagerException {
  renewer = (renewer == null) ? ugi.getShortUserName() : renewer;
  String user = ugi.getUserName();
  Text owner = new Text(user);
  Text realUser = null;
  if (ugi.getRealUser() != null) {
    realUser = new Text(ugi.getRealUser().getUserName());
  }
  DelegationTokenIdentifier tokenIdentifier =
    new DelegationTokenIdentifier(owner, new Text(renewer), realUser);
  Token<DelegationTokenIdentifier> token =
    new Token<DelegationTokenIdentifier>(tokenIdentifier, secretManager);
  try {
    SecurityUtil.setTokenService(token,
                                 HttpFSServerWebApp.get().getAuthority());
  } catch (ServerException ex) {
    throw new DelegationTokenManagerException(
      DelegationTokenManagerException.ERROR.DT04, ex.toString(), ex);
  }
  return token;
}
项目:hardfs    文件:DelegationTokenManagerService.java   
/**
 * Creates a delegation token.
 *
 * @param ugi UGI creating the token.
 * @param renewer token renewer.
 * @return new delegation token.
 * @throws DelegationTokenManagerException thrown if the token could not be
 * created.
 */
@Override
public Token<DelegationTokenIdentifier> createToken(UserGroupInformation ugi,
                                                    String renewer)
  throws DelegationTokenManagerException {
  renewer = (renewer == null) ? ugi.getShortUserName() : renewer;
  String user = ugi.getUserName();
  Text owner = new Text(user);
  Text realUser = null;
  if (ugi.getRealUser() != null) {
    realUser = new Text(ugi.getRealUser().getUserName());
  }
  DelegationTokenIdentifier tokenIdentifier =
    new DelegationTokenIdentifier(owner, new Text(renewer), realUser);
  Token<DelegationTokenIdentifier> token =
    new Token<DelegationTokenIdentifier>(tokenIdentifier, secretManager);
  try {
    SecurityUtil.setTokenService(token,
                                 HttpFSServerWebApp.get().getAuthority());
  } catch (ServerException ex) {
    throw new DelegationTokenManagerException(
      DelegationTokenManagerException.ERROR.DT04, ex.toString(), ex);
  }
  return token;
}
项目:hadoop-on-lustre2    文件:DelegationTokenManagerService.java   
/**
 * Initializes the service.
 *
 * @throws ServiceException thrown if the service could not be initialized.
 */
@Override
protected void init() throws ServiceException {

  long updateInterval = getServiceConfig().getLong(UPDATE_INTERVAL, DAY);
  long maxLifetime = getServiceConfig().getLong(MAX_LIFETIME, 7 * DAY);
  long renewInterval = getServiceConfig().getLong(RENEW_INTERVAL, DAY);
  tokenKind = (HttpFSServerWebApp.get().isSslEnabled())
              ? SWebHdfsFileSystem.TOKEN_KIND : WebHdfsFileSystem.TOKEN_KIND;
  secretManager = new DelegationTokenSecretManager(tokenKind, updateInterval,
                                                   maxLifetime,
                                                   renewInterval, HOUR);
  try {
    secretManager.startThreads();
  } catch (IOException ex) {
    throw new ServiceException(ServiceException.ERROR.S12,
                               DelegationTokenManager.class.getSimpleName(),
                               ex.toString(), ex);
  }
}
项目:hadoop-on-lustre2    文件:DelegationTokenManagerService.java   
/**
 * Creates a delegation token.
 *
 * @param ugi UGI creating the token.
 * @param renewer token renewer.
 * @return new delegation token.
 * @throws DelegationTokenManagerException thrown if the token could not be
 * created.
 */
@Override
public Token<DelegationTokenIdentifier> createToken(UserGroupInformation ugi,
                                                    String renewer)
  throws DelegationTokenManagerException {
  renewer = (renewer == null) ? ugi.getShortUserName() : renewer;
  String user = ugi.getUserName();
  Text owner = new Text(user);
  Text realUser = null;
  if (ugi.getRealUser() != null) {
    realUser = new Text(ugi.getRealUser().getUserName());
  }
  DelegationTokenIdentifier tokenIdentifier =
    new DelegationTokenIdentifier(tokenKind, owner, new Text(renewer), realUser);
  Token<DelegationTokenIdentifier> token =
    new Token<DelegationTokenIdentifier>(tokenIdentifier, secretManager);
  try {
    SecurityUtil.setTokenService(token,
                                 HttpFSServerWebApp.get().getAuthority());
  } catch (ServerException ex) {
    throw new DelegationTokenManagerException(
      DelegationTokenManagerException.ERROR.DT04, ex.toString(), ex);
  }
  return token;
}
项目:hadoop-on-lustre2    文件:TestDelegationTokenManagerService.java   
@Test
@TestDir
public void service() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  Configuration conf = new Configuration(false);
  conf.set("httpfs.services", StringUtils.join(",",
    Arrays.asList(InstrumentationService.class.getName(),
        SchedulerService.class.getName(),
        FileSystemAccessService.class.getName(),
        DelegationTokenManagerService.class.getName())));
  Server server = new HttpFSServerWebApp(dir, dir, dir, dir, conf);
  server.init();
  DelegationTokenManager tm = server.get(DelegationTokenManager.class);
  Assert.assertNotNull(tm);
  server.destroy();
}
项目:hadoop-plus    文件:BaseTestHttpFSWith.java   
private void createHttpFSServer() throws Exception {
  File homeDir = TestDirHelper.getTestDir();
  Assert.assertTrue(new File(homeDir, "conf").mkdir());
  Assert.assertTrue(new File(homeDir, "log").mkdir());
  Assert.assertTrue(new File(homeDir, "temp").mkdir());
  HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

  File secretFile = new File(new File(homeDir, "conf"), "secret");
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();

  //FileSystem being served by HttpFS
  String fsDefaultName = getProxiedFSURI();
  Configuration conf = new Configuration(false);
  conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
  File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
  OutputStream os = new FileOutputStream(hdfsSite);
  conf.writeXml(os);
  os.close();

  //HTTPFS configuration
  conf = new Configuration(false);
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
           HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
           HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
  conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
  File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
  os = new FileOutputStream(httpfsSite);
  conf.writeXml(os);
  os.close();

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  URL url = cl.getResource("webapp");
  WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
  Server server = TestJettyHelper.getJettyServer();
  server.addHandler(context);
  server.start();
}
项目:hadoop-TCP    文件:BaseTestHttpFSWith.java   
private void createHttpFSServer() throws Exception {
  File homeDir = TestDirHelper.getTestDir();
  Assert.assertTrue(new File(homeDir, "conf").mkdir());
  Assert.assertTrue(new File(homeDir, "log").mkdir());
  Assert.assertTrue(new File(homeDir, "temp").mkdir());
  HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

  File secretFile = new File(new File(homeDir, "conf"), "secret");
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();

  //FileSystem being served by HttpFS
  String fsDefaultName = getProxiedFSURI();
  Configuration conf = new Configuration(false);
  conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
  File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
  OutputStream os = new FileOutputStream(hdfsSite);
  conf.writeXml(os);
  os.close();

  //HTTPFS configuration
  conf = new Configuration(false);
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
           HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
           HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
  conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
  File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
  os = new FileOutputStream(httpfsSite);
  conf.writeXml(os);
  os.close();

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  URL url = cl.getResource("webapp");
  WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
  Server server = TestJettyHelper.getJettyServer();
  server.addHandler(context);
  server.start();
}
项目:hardfs    文件:BaseTestHttpFSWith.java   
private void createHttpFSServer() throws Exception {
  File homeDir = TestDirHelper.getTestDir();
  Assert.assertTrue(new File(homeDir, "conf").mkdir());
  Assert.assertTrue(new File(homeDir, "log").mkdir());
  Assert.assertTrue(new File(homeDir, "temp").mkdir());
  HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

  File secretFile = new File(new File(homeDir, "conf"), "secret");
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();

  //FileSystem being served by HttpFS
  String fsDefaultName = getProxiedFSURI();
  Configuration conf = new Configuration(false);
  conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
  File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
  OutputStream os = new FileOutputStream(hdfsSite);
  conf.writeXml(os);
  os.close();

  //HTTPFS configuration
  conf = new Configuration(false);
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
           HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
           HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
  conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
  File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
  os = new FileOutputStream(httpfsSite);
  conf.writeXml(os);
  os.close();

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  URL url = cl.getResource("webapp");
  WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
  Server server = TestJettyHelper.getJettyServer();
  server.addHandler(context);
  server.start();
}
项目:hadoop-on-lustre2    文件:BaseTestHttpFSWith.java   
private void createHttpFSServer() throws Exception {
  File homeDir = TestDirHelper.getTestDir();
  Assert.assertTrue(new File(homeDir, "conf").mkdir());
  Assert.assertTrue(new File(homeDir, "log").mkdir());
  Assert.assertTrue(new File(homeDir, "temp").mkdir());
  HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

  File secretFile = new File(new File(homeDir, "conf"), "secret");
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();

  //FileSystem being served by HttpFS
  String fsDefaultName = getProxiedFSURI();
  Configuration conf = new Configuration(false);
  conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
  File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
  OutputStream os = new FileOutputStream(hdfsSite);
  conf.writeXml(os);
  os.close();

  //HTTPFS configuration
  conf = new Configuration(false);
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
           HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
           HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
  conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
  File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
  os = new FileOutputStream(httpfsSite);
  conf.writeXml(os);
  os.close();

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  URL url = cl.getResource("webapp");
  WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
  Server server = TestJettyHelper.getJettyServer();
  server.addHandler(context);
  server.start();
}
项目:aliyun-oss-hadoop-fs    文件:BaseTestHttpFSWith.java   
private void createHttpFSServer() throws Exception {
  File homeDir = TestDirHelper.getTestDir();
  Assert.assertTrue(new File(homeDir, "conf").mkdir());
  Assert.assertTrue(new File(homeDir, "log").mkdir());
  Assert.assertTrue(new File(homeDir, "temp").mkdir());
  HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

  File secretFile = new File(new File(homeDir, "conf"), "secret");
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();

  //FileSystem being served by HttpFS
  String fsDefaultName = getProxiedFSURI();
  Configuration conf = new Configuration(false);
  conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
  conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
  conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true);
  File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
  OutputStream os = new FileOutputStream(hdfsSite);
  conf.writeXml(os);
  os.close();

  //HTTPFS configuration
  conf = new Configuration(false);
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
           HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
           HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
  conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
  File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
  os = new FileOutputStream(httpfsSite);
  conf.writeXml(os);
  os.close();

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  URL url = cl.getResource("webapp");
  WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
  Server server = TestJettyHelper.getJettyServer();
  server.addHandler(context);
  server.start();
}
项目:big-c    文件:BaseTestHttpFSWith.java   
private void createHttpFSServer() throws Exception {
  File homeDir = TestDirHelper.getTestDir();
  Assert.assertTrue(new File(homeDir, "conf").mkdir());
  Assert.assertTrue(new File(homeDir, "log").mkdir());
  Assert.assertTrue(new File(homeDir, "temp").mkdir());
  HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

  File secretFile = new File(new File(homeDir, "conf"), "secret");
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();

  //FileSystem being served by HttpFS
  String fsDefaultName = getProxiedFSURI();
  Configuration conf = new Configuration(false);
  conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
  conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
  conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true);
  File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
  OutputStream os = new FileOutputStream(hdfsSite);
  conf.writeXml(os);
  os.close();

  //HTTPFS configuration
  conf = new Configuration(false);
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
           HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
           HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
  conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
  File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
  os = new FileOutputStream(httpfsSite);
  conf.writeXml(os);
  os.close();

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  URL url = cl.getResource("webapp");
  WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
  Server server = TestJettyHelper.getJettyServer();
  server.addHandler(context);
  server.start();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:BaseTestHttpFSWith.java   
private void createHttpFSServer() throws Exception {
  File homeDir = TestDirHelper.getTestDir();
  Assert.assertTrue(new File(homeDir, "conf").mkdir());
  Assert.assertTrue(new File(homeDir, "log").mkdir());
  Assert.assertTrue(new File(homeDir, "temp").mkdir());
  HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

  File secretFile = new File(new File(homeDir, "conf"), "secret");
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();

  //FileSystem being served by HttpFS
  String fsDefaultName = getProxiedFSURI();
  Configuration conf = new Configuration(false);
  conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
  conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
  conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true);
  File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
  OutputStream os = new FileOutputStream(hdfsSite);
  conf.writeXml(os);
  os.close();

  //HTTPFS configuration
  conf = new Configuration(false);
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
           HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
  conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
           HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
  conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
  File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
  os = new FileOutputStream(httpfsSite);
  conf.writeXml(os);
  os.close();

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  URL url = cl.getResource("webapp");
  WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
  Server server = TestJettyHelper.getJettyServer();
  server.addHandler(context);
  server.start();
}
项目:hops    文件:BaseTestHttpFSWith.java   
private void createHttpFSServer() throws Exception {
  File homeDir = TestDirHelper.getTestDir();
  Assert.assertTrue(new File(homeDir, "conf").mkdir());
  Assert.assertTrue(new File(homeDir, "log").mkdir());
  Assert.assertTrue(new File(homeDir, "temp").mkdir());
  HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());

  File secretFile = new File(new File(homeDir, "conf"), "secret");
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();

  //FileSystem being served by HttpFS
  String fsDefaultName = getProxiedFSURI();
  Configuration conf = new Configuration(false);
  conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
  File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
  OutputStream os = new FileOutputStream(hdfsSite);
  conf.writeXml(os);
  os.close();

  //HTTPFS configuration
  conf = new Configuration(false);
  conf.set(
      "httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() +
          ".groups", HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
  conf.set(
      "httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() +
          ".hosts", HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
  conf.set("httpfs.authentication.signature.secret.file",
      secretFile.getAbsolutePath());
  File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
  os = new FileOutputStream(httpfsSite);
  conf.writeXml(os);
  os.close();

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  URL url = cl.getResource("webapp");
  WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
  Server server = TestJettyHelper.getJettyServer();
  server.addHandler(context);
  server.start();
}