001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hdfs.web;
019
020import java.io.IOException;
021import java.util.ArrayList;
022import java.util.Enumeration;
023import java.util.HashMap;
024import java.util.Iterator;
025import java.util.List;
026import java.util.Map;
027import java.util.Properties;
028
029import javax.servlet.FilterChain;
030import javax.servlet.FilterConfig;
031import javax.servlet.ServletException;
032import javax.servlet.ServletRequest;
033import javax.servlet.ServletResponse;
034import javax.servlet.http.HttpServletRequest;
035import javax.servlet.http.HttpServletRequestWrapper;
036
037import org.apache.hadoop.hdfs.web.resources.DelegationParam;
038import org.apache.hadoop.security.UserGroupInformation;
039import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
040import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
041import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
042
043/**
044 * Subclass of {@link AuthenticationFilter} that
045 * obtains Hadoop-Auth configuration for webhdfs.
046 */
047public class AuthFilter extends AuthenticationFilter {
048  private static final String CONF_PREFIX = "dfs.web.authentication.";
049
050  /**
051   * Returns the filter configuration properties,
052   * including the ones prefixed with {@link #CONF_PREFIX}.
053   * The prefix is removed from the returned property names.
054   *
055   * @param prefix parameter not used.
056   * @param config parameter contains the initialization values.
057   * @return Hadoop-Auth configuration properties.
058   * @throws ServletException 
059   */
060  @Override
061  protected Properties getConfiguration(String prefix, FilterConfig config)
062      throws ServletException {
063    final Properties p = super.getConfiguration(CONF_PREFIX, config);
064    // set authentication type
065    p.setProperty(AUTH_TYPE, UserGroupInformation.isSecurityEnabled()?
066        KerberosAuthenticationHandler.TYPE: PseudoAuthenticationHandler.TYPE);
067    //For Pseudo Authentication, allow anonymous.
068    p.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
069    //set cookie path
070    p.setProperty(COOKIE_PATH, "/");
071    return p;
072  }
073
074  @Override
075  public void doFilter(ServletRequest request, ServletResponse response,
076      FilterChain filterChain) throws IOException, ServletException {
077    final HttpServletRequest httpRequest = toLowerCase((HttpServletRequest)request);
078    final String tokenString = httpRequest.getParameter(DelegationParam.NAME);
079    if (tokenString != null) {
080      //Token is present in the url, therefore token will be used for
081      //authentication, bypass kerberos authentication.
082      filterChain.doFilter(httpRequest, response);
083      return;
084    }
085    super.doFilter(httpRequest, response, filterChain);
086  }
087
088  private static HttpServletRequest toLowerCase(final HttpServletRequest request) {
089    @SuppressWarnings("unchecked")
090    final Map<String, String[]> original = (Map<String, String[]>)request.getParameterMap();
091    if (!ParamFilter.containsUpperCase(original.keySet())) {
092      return request;
093    }
094
095    final Map<String, List<String>> m = new HashMap<String, List<String>>();
096    for(Map.Entry<String, String[]> entry : original.entrySet()) {
097      final String key = entry.getKey().toLowerCase();
098      List<String> strings = m.get(key);
099      if (strings == null) {
100        strings = new ArrayList<String>();
101        m.put(key, strings);
102      }
103      for(String v : entry.getValue()) {
104        strings.add(v);
105      }
106    }
107
108    return new HttpServletRequestWrapper(request) {
109      private Map<String, String[]> parameters = null;
110
111      @Override
112      public Map<String, String[]> getParameterMap() {
113        if (parameters == null) {
114          parameters = new HashMap<String, String[]>();
115          for(Map.Entry<String, List<String>> entry : m.entrySet()) {
116            final List<String> a = entry.getValue();
117            parameters.put(entry.getKey(), a.toArray(new String[a.size()]));
118          }
119        }
120       return parameters;
121      }
122
123      @Override
124      public String getParameter(String name) {
125        final List<String> a = m.get(name);
126        return a == null? null: a.get(0);
127      }
128      
129      @Override
130      public String[] getParameterValues(String name) {
131        return getParameterMap().get(name);
132      }
133
134      @Override
135      public Enumeration<String> getParameterNames() {
136        final Iterator<String> i = m.keySet().iterator();
137        return new Enumeration<String>() {
138          @Override
139          public boolean hasMoreElements() {
140            return i.hasNext();
141          }
142          @Override
143          public String nextElement() {
144            return i.next();
145          }
146        };
147      }
148    };
149  }
150}