001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018
019package org.apache.hadoop.hdfs.protocolPB;
020
021import java.io.IOException;
022import java.util.List;
023
024import org.apache.hadoop.hdfs.protocol.DatanodeID;
025import org.apache.hadoop.hdfs.protocol.LocatedBlock;
026import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReceivedAndDeletedRequestProto;
027import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReceivedAndDeletedResponseProto;
028import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportRequestProto;
029import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto;
030import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CacheReportRequestProto;
031import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CacheReportResponseProto;
032import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CommitBlockSynchronizationRequestProto;
033import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CommitBlockSynchronizationResponseProto;
034import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.ErrorReportRequestProto;
035import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.ErrorReportResponseProto;
036import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.HeartbeatRequestProto;
037import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.HeartbeatResponseProto;
038import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.ReceivedDeletedBlockInfoProto;
039import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.RegisterDatanodeRequestProto;
040import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.RegisterDatanodeResponseProto;
041import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.ReportBadBlocksRequestProto;
042import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.ReportBadBlocksResponseProto;
043import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.StorageBlockReportProto;
044import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.StorageReceivedDeletedBlocksProto;
045import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto;
046import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto;
047import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto;
048import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto;
049import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand;
050import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
051import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
052import org.apache.hadoop.hdfs.server.protocol.HeartbeatResponse;
053import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
054import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo;
055import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport;
056import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
057import org.apache.hadoop.hdfs.server.protocol.StorageReport;
058
059import com.google.protobuf.RpcController;
060import com.google.protobuf.ServiceException;
061
062public class DatanodeProtocolServerSideTranslatorPB implements
063    DatanodeProtocolPB {
064
065  private final DatanodeProtocol impl;
066  private static final ErrorReportResponseProto
067      VOID_ERROR_REPORT_RESPONSE_PROTO = 
068          ErrorReportResponseProto.newBuilder().build();
069  private static final BlockReceivedAndDeletedResponseProto 
070      VOID_BLOCK_RECEIVED_AND_DELETE_RESPONSE = 
071          BlockReceivedAndDeletedResponseProto.newBuilder().build();
072  private static final ReportBadBlocksResponseProto
073      VOID_REPORT_BAD_BLOCK_RESPONSE = 
074          ReportBadBlocksResponseProto.newBuilder().build();
075  private static final CommitBlockSynchronizationResponseProto 
076      VOID_COMMIT_BLOCK_SYNCHRONIZATION_RESPONSE_PROTO =
077          CommitBlockSynchronizationResponseProto.newBuilder().build();
078
079  public DatanodeProtocolServerSideTranslatorPB(DatanodeProtocol impl) {
080    this.impl = impl;
081  }
082
083  @Override
084  public RegisterDatanodeResponseProto registerDatanode(
085      RpcController controller, RegisterDatanodeRequestProto request)
086      throws ServiceException {
087    DatanodeRegistration registration = PBHelper.convert(request
088        .getRegistration());
089    DatanodeRegistration registrationResp;
090    try {
091      registrationResp = impl.registerDatanode(registration);
092    } catch (IOException e) {
093      throw new ServiceException(e);
094    }
095    return RegisterDatanodeResponseProto.newBuilder()
096        .setRegistration(PBHelper.convert(registrationResp)).build();
097  }
098
099  @Override
100  public HeartbeatResponseProto sendHeartbeat(RpcController controller,
101      HeartbeatRequestProto request) throws ServiceException {
102    HeartbeatResponse response;
103    try {
104      final StorageReport[] report = PBHelper.convertStorageReports(
105          request.getReportsList());
106      response = impl.sendHeartbeat(PBHelper.convert(request.getRegistration()),
107          report, request.getCacheCapacity(), request.getCacheUsed(),
108          request.getXmitsInProgress(),
109          request.getXceiverCount(), request.getFailedVolumes());
110    } catch (IOException e) {
111      throw new ServiceException(e);
112    }
113    HeartbeatResponseProto.Builder builder = HeartbeatResponseProto
114        .newBuilder();
115    DatanodeCommand[] cmds = response.getCommands();
116    if (cmds != null) {
117      for (int i = 0; i < cmds.length; i++) {
118        if (cmds[i] != null) {
119          builder.addCmds(PBHelper.convert(cmds[i]));
120        }
121      }
122    }
123    builder.setHaStatus(PBHelper.convert(response.getNameNodeHaState()));
124    return builder.build();
125  }
126
127  @Override
128  public BlockReportResponseProto blockReport(RpcController controller,
129      BlockReportRequestProto request) throws ServiceException {
130    DatanodeCommand cmd = null;
131    StorageBlockReport[] report = 
132        new StorageBlockReport[request.getReportsCount()];
133    
134    int index = 0;
135    for (StorageBlockReportProto s : request.getReportsList()) {
136      List<Long> blockIds = s.getBlocksList();
137      long[] blocks = new long[blockIds.size()];
138      for (int i = 0; i < blockIds.size(); i++) {
139        blocks[i] = blockIds.get(i);
140      }
141      report[index++] = new StorageBlockReport(PBHelper.convert(s.getStorage()),
142          blocks);
143    }
144    try {
145      cmd = impl.blockReport(PBHelper.convert(request.getRegistration()),
146          request.getBlockPoolId(), report);
147    } catch (IOException e) {
148      throw new ServiceException(e);
149    }
150    BlockReportResponseProto.Builder builder = 
151        BlockReportResponseProto.newBuilder();
152    if (cmd != null) {
153      builder.setCmd(PBHelper.convert(cmd));
154    }
155    return builder.build();
156  }
157
158  @Override
159  public CacheReportResponseProto cacheReport(RpcController controller,
160      CacheReportRequestProto request) throws ServiceException {
161    DatanodeCommand cmd = null;
162    try {
163      cmd = impl.cacheReport(
164          PBHelper.convert(request.getRegistration()),
165          request.getBlockPoolId(),
166          request.getBlocksList());
167    } catch (IOException e) {
168      throw new ServiceException(e);
169    }
170    CacheReportResponseProto.Builder builder =
171        CacheReportResponseProto.newBuilder();
172    if (cmd != null) {
173      builder.setCmd(PBHelper.convert(cmd));
174    }
175    return builder.build();
176  }
177
178
179  @Override
180  public BlockReceivedAndDeletedResponseProto blockReceivedAndDeleted(
181      RpcController controller, BlockReceivedAndDeletedRequestProto request)
182      throws ServiceException {
183    List<StorageReceivedDeletedBlocksProto> sBlocks = request.getBlocksList();
184    StorageReceivedDeletedBlocks[] info = 
185        new StorageReceivedDeletedBlocks[sBlocks.size()];
186    for (int i = 0; i < sBlocks.size(); i++) {
187      StorageReceivedDeletedBlocksProto sBlock = sBlocks.get(i);
188      List<ReceivedDeletedBlockInfoProto> list = sBlock.getBlocksList();
189      ReceivedDeletedBlockInfo[] rdBlocks = 
190          new ReceivedDeletedBlockInfo[list.size()];
191      for (int j = 0; j < list.size(); j++) {
192        rdBlocks[j] = PBHelper.convert(list.get(j));
193      }
194      info[i] = new StorageReceivedDeletedBlocks(sBlock.getStorageUuid(), rdBlocks);
195    }
196    try {
197      impl.blockReceivedAndDeleted(PBHelper.convert(request.getRegistration()),
198          request.getBlockPoolId(), info);
199    } catch (IOException e) {
200      throw new ServiceException(e);
201    }
202    return VOID_BLOCK_RECEIVED_AND_DELETE_RESPONSE;
203  }
204
205  @Override
206  public ErrorReportResponseProto errorReport(RpcController controller,
207      ErrorReportRequestProto request) throws ServiceException {
208    try {
209      impl.errorReport(PBHelper.convert(request.getRegistartion()),
210          request.getErrorCode(), request.getMsg());
211    } catch (IOException e) {
212      throw new ServiceException(e);
213    }
214    return VOID_ERROR_REPORT_RESPONSE_PROTO;
215  }
216
217  @Override
218  public VersionResponseProto versionRequest(RpcController controller,
219      VersionRequestProto request) throws ServiceException {
220    NamespaceInfo info;
221    try {
222      info = impl.versionRequest();
223    } catch (IOException e) {
224      throw new ServiceException(e);
225    }
226    return VersionResponseProto.newBuilder()
227        .setInfo(PBHelper.convert(info)).build();
228  }
229
230  @Override
231  public ReportBadBlocksResponseProto reportBadBlocks(RpcController controller,
232      ReportBadBlocksRequestProto request) throws ServiceException {
233    List<LocatedBlockProto> lbps = request.getBlocksList();
234    LocatedBlock [] blocks = new LocatedBlock [lbps.size()];
235    for(int i=0; i<lbps.size(); i++) {
236      blocks[i] = PBHelper.convert(lbps.get(i));
237    }
238    try {
239      impl.reportBadBlocks(blocks);
240    } catch (IOException e) {
241      throw new ServiceException(e);
242    }
243    return VOID_REPORT_BAD_BLOCK_RESPONSE;
244  }
245
246  @Override
247  public CommitBlockSynchronizationResponseProto commitBlockSynchronization(
248      RpcController controller, CommitBlockSynchronizationRequestProto request)
249      throws ServiceException {
250    List<DatanodeIDProto> dnprotos = request.getNewTaragetsList();
251    DatanodeID[] dns = new DatanodeID[dnprotos.size()];
252    for (int i = 0; i < dnprotos.size(); i++) {
253      dns[i] = PBHelper.convert(dnprotos.get(i));
254    }
255    final List<String> sidprotos = request.getNewTargetStoragesList();
256    final String[] storageIDs = sidprotos.toArray(new String[sidprotos.size()]);
257    try {
258      impl.commitBlockSynchronization(PBHelper.convert(request.getBlock()),
259          request.getNewGenStamp(), request.getNewLength(),
260          request.getCloseFile(), request.getDeleteBlock(), dns, storageIDs);
261    } catch (IOException e) {
262      throw new ServiceException(e);
263    }
264    return VOID_COMMIT_BLOCK_SYNCHRONIZATION_RESPONSE_PROTO;
265  }
266}