001// Generated by the protocol buffer compiler. DO NOT EDIT! 002// source: ProtocolInfo.proto 003 004package org.apache.hadoop.ipc.protobuf; 005 006public final class ProtocolInfoProtos { 007 private ProtocolInfoProtos() {} 008 public static void registerAllExtensions( 009 com.google.protobuf.ExtensionRegistry registry) { 010 } 011 public interface GetProtocolVersionsRequestProtoOrBuilder 012 extends com.google.protobuf.MessageOrBuilder { 013 014 // required string protocol = 1; 015 /** 016 * <code>required string protocol = 1;</code> 017 * 018 * <pre> 019 * Protocol name 020 * </pre> 021 */ 022 boolean hasProtocol(); 023 /** 024 * <code>required string protocol = 1;</code> 025 * 026 * <pre> 027 * Protocol name 028 * </pre> 029 */ 030 java.lang.String getProtocol(); 031 /** 032 * <code>required string protocol = 1;</code> 033 * 034 * <pre> 035 * Protocol name 036 * </pre> 037 */ 038 com.google.protobuf.ByteString 039 getProtocolBytes(); 040 } 041 /** 042 * Protobuf type {@code hadoop.common.GetProtocolVersionsRequestProto} 043 * 044 * <pre> 045 ** 046 * Request to get protocol versions for all supported rpc kinds. 047 * </pre> 048 */ 049 public static final class GetProtocolVersionsRequestProto extends 050 com.google.protobuf.GeneratedMessage 051 implements GetProtocolVersionsRequestProtoOrBuilder { 052 // Use GetProtocolVersionsRequestProto.newBuilder() to construct. 053 private GetProtocolVersionsRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 054 super(builder); 055 this.unknownFields = builder.getUnknownFields(); 056 } 057 private GetProtocolVersionsRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 058 059 private static final GetProtocolVersionsRequestProto defaultInstance; 060 public static GetProtocolVersionsRequestProto getDefaultInstance() { 061 return defaultInstance; 062 } 063 064 public GetProtocolVersionsRequestProto getDefaultInstanceForType() { 065 return defaultInstance; 066 } 067 068 private final com.google.protobuf.UnknownFieldSet unknownFields; 069 @java.lang.Override 070 public final com.google.protobuf.UnknownFieldSet 071 getUnknownFields() { 072 return this.unknownFields; 073 } 074 private GetProtocolVersionsRequestProto( 075 com.google.protobuf.CodedInputStream input, 076 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 077 throws com.google.protobuf.InvalidProtocolBufferException { 078 initFields(); 079 int mutable_bitField0_ = 0; 080 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 081 com.google.protobuf.UnknownFieldSet.newBuilder(); 082 try { 083 boolean done = false; 084 while (!done) { 085 int tag = input.readTag(); 086 switch (tag) { 087 case 0: 088 done = true; 089 break; 090 default: { 091 if (!parseUnknownField(input, unknownFields, 092 extensionRegistry, tag)) { 093 done = true; 094 } 095 break; 096 } 097 case 10: { 098 bitField0_ |= 0x00000001; 099 protocol_ = input.readBytes(); 100 break; 101 } 102 } 103 } 104 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 105 throw e.setUnfinishedMessage(this); 106 } catch (java.io.IOException e) { 107 throw new com.google.protobuf.InvalidProtocolBufferException( 108 e.getMessage()).setUnfinishedMessage(this); 109 } finally { 110 this.unknownFields = unknownFields.build(); 111 makeExtensionsImmutable(); 112 } 113 } 114 public static final com.google.protobuf.Descriptors.Descriptor 115 getDescriptor() { 116 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor; 117 } 118 119 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 120 internalGetFieldAccessorTable() { 121 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_fieldAccessorTable 122 .ensureFieldAccessorsInitialized( 123 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.Builder.class); 124 } 125 126 public static com.google.protobuf.Parser<GetProtocolVersionsRequestProto> PARSER = 127 new com.google.protobuf.AbstractParser<GetProtocolVersionsRequestProto>() { 128 public GetProtocolVersionsRequestProto parsePartialFrom( 129 com.google.protobuf.CodedInputStream input, 130 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 131 throws com.google.protobuf.InvalidProtocolBufferException { 132 return new GetProtocolVersionsRequestProto(input, extensionRegistry); 133 } 134 }; 135 136 @java.lang.Override 137 public com.google.protobuf.Parser<GetProtocolVersionsRequestProto> getParserForType() { 138 return PARSER; 139 } 140 141 private int bitField0_; 142 // required string protocol = 1; 143 public static final int PROTOCOL_FIELD_NUMBER = 1; 144 private java.lang.Object protocol_; 145 /** 146 * <code>required string protocol = 1;</code> 147 * 148 * <pre> 149 * Protocol name 150 * </pre> 151 */ 152 public boolean hasProtocol() { 153 return ((bitField0_ & 0x00000001) == 0x00000001); 154 } 155 /** 156 * <code>required string protocol = 1;</code> 157 * 158 * <pre> 159 * Protocol name 160 * </pre> 161 */ 162 public java.lang.String getProtocol() { 163 java.lang.Object ref = protocol_; 164 if (ref instanceof java.lang.String) { 165 return (java.lang.String) ref; 166 } else { 167 com.google.protobuf.ByteString bs = 168 (com.google.protobuf.ByteString) ref; 169 java.lang.String s = bs.toStringUtf8(); 170 if (bs.isValidUtf8()) { 171 protocol_ = s; 172 } 173 return s; 174 } 175 } 176 /** 177 * <code>required string protocol = 1;</code> 178 * 179 * <pre> 180 * Protocol name 181 * </pre> 182 */ 183 public com.google.protobuf.ByteString 184 getProtocolBytes() { 185 java.lang.Object ref = protocol_; 186 if (ref instanceof java.lang.String) { 187 com.google.protobuf.ByteString b = 188 com.google.protobuf.ByteString.copyFromUtf8( 189 (java.lang.String) ref); 190 protocol_ = b; 191 return b; 192 } else { 193 return (com.google.protobuf.ByteString) ref; 194 } 195 } 196 197 private void initFields() { 198 protocol_ = ""; 199 } 200 private byte memoizedIsInitialized = -1; 201 public final boolean isInitialized() { 202 byte isInitialized = memoizedIsInitialized; 203 if (isInitialized != -1) return isInitialized == 1; 204 205 if (!hasProtocol()) { 206 memoizedIsInitialized = 0; 207 return false; 208 } 209 memoizedIsInitialized = 1; 210 return true; 211 } 212 213 public void writeTo(com.google.protobuf.CodedOutputStream output) 214 throws java.io.IOException { 215 getSerializedSize(); 216 if (((bitField0_ & 0x00000001) == 0x00000001)) { 217 output.writeBytes(1, getProtocolBytes()); 218 } 219 getUnknownFields().writeTo(output); 220 } 221 222 private int memoizedSerializedSize = -1; 223 public int getSerializedSize() { 224 int size = memoizedSerializedSize; 225 if (size != -1) return size; 226 227 size = 0; 228 if (((bitField0_ & 0x00000001) == 0x00000001)) { 229 size += com.google.protobuf.CodedOutputStream 230 .computeBytesSize(1, getProtocolBytes()); 231 } 232 size += getUnknownFields().getSerializedSize(); 233 memoizedSerializedSize = size; 234 return size; 235 } 236 237 private static final long serialVersionUID = 0L; 238 @java.lang.Override 239 protected java.lang.Object writeReplace() 240 throws java.io.ObjectStreamException { 241 return super.writeReplace(); 242 } 243 244 @java.lang.Override 245 public boolean equals(final java.lang.Object obj) { 246 if (obj == this) { 247 return true; 248 } 249 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto)) { 250 return super.equals(obj); 251 } 252 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto) obj; 253 254 boolean result = true; 255 result = result && (hasProtocol() == other.hasProtocol()); 256 if (hasProtocol()) { 257 result = result && getProtocol() 258 .equals(other.getProtocol()); 259 } 260 result = result && 261 getUnknownFields().equals(other.getUnknownFields()); 262 return result; 263 } 264 265 private int memoizedHashCode = 0; 266 @java.lang.Override 267 public int hashCode() { 268 if (memoizedHashCode != 0) { 269 return memoizedHashCode; 270 } 271 int hash = 41; 272 hash = (19 * hash) + getDescriptorForType().hashCode(); 273 if (hasProtocol()) { 274 hash = (37 * hash) + PROTOCOL_FIELD_NUMBER; 275 hash = (53 * hash) + getProtocol().hashCode(); 276 } 277 hash = (29 * hash) + getUnknownFields().hashCode(); 278 memoizedHashCode = hash; 279 return hash; 280 } 281 282 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom( 283 com.google.protobuf.ByteString data) 284 throws com.google.protobuf.InvalidProtocolBufferException { 285 return PARSER.parseFrom(data); 286 } 287 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom( 288 com.google.protobuf.ByteString data, 289 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 290 throws com.google.protobuf.InvalidProtocolBufferException { 291 return PARSER.parseFrom(data, extensionRegistry); 292 } 293 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(byte[] data) 294 throws com.google.protobuf.InvalidProtocolBufferException { 295 return PARSER.parseFrom(data); 296 } 297 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom( 298 byte[] data, 299 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 300 throws com.google.protobuf.InvalidProtocolBufferException { 301 return PARSER.parseFrom(data, extensionRegistry); 302 } 303 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom(java.io.InputStream input) 304 throws java.io.IOException { 305 return PARSER.parseFrom(input); 306 } 307 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom( 308 java.io.InputStream input, 309 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 310 throws java.io.IOException { 311 return PARSER.parseFrom(input, extensionRegistry); 312 } 313 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseDelimitedFrom(java.io.InputStream input) 314 throws java.io.IOException { 315 return PARSER.parseDelimitedFrom(input); 316 } 317 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseDelimitedFrom( 318 java.io.InputStream input, 319 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 320 throws java.io.IOException { 321 return PARSER.parseDelimitedFrom(input, extensionRegistry); 322 } 323 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom( 324 com.google.protobuf.CodedInputStream input) 325 throws java.io.IOException { 326 return PARSER.parseFrom(input); 327 } 328 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parseFrom( 329 com.google.protobuf.CodedInputStream input, 330 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 331 throws java.io.IOException { 332 return PARSER.parseFrom(input, extensionRegistry); 333 } 334 335 public static Builder newBuilder() { return Builder.create(); } 336 public Builder newBuilderForType() { return newBuilder(); } 337 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto prototype) { 338 return newBuilder().mergeFrom(prototype); 339 } 340 public Builder toBuilder() { return newBuilder(this); } 341 342 @java.lang.Override 343 protected Builder newBuilderForType( 344 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 345 Builder builder = new Builder(parent); 346 return builder; 347 } 348 /** 349 * Protobuf type {@code hadoop.common.GetProtocolVersionsRequestProto} 350 * 351 * <pre> 352 ** 353 * Request to get protocol versions for all supported rpc kinds. 354 * </pre> 355 */ 356 public static final class Builder extends 357 com.google.protobuf.GeneratedMessage.Builder<Builder> 358 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProtoOrBuilder { 359 public static final com.google.protobuf.Descriptors.Descriptor 360 getDescriptor() { 361 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor; 362 } 363 364 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 365 internalGetFieldAccessorTable() { 366 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_fieldAccessorTable 367 .ensureFieldAccessorsInitialized( 368 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.Builder.class); 369 } 370 371 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.newBuilder() 372 private Builder() { 373 maybeForceBuilderInitialization(); 374 } 375 376 private Builder( 377 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 378 super(parent); 379 maybeForceBuilderInitialization(); 380 } 381 private void maybeForceBuilderInitialization() { 382 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 383 } 384 } 385 private static Builder create() { 386 return new Builder(); 387 } 388 389 public Builder clear() { 390 super.clear(); 391 protocol_ = ""; 392 bitField0_ = (bitField0_ & ~0x00000001); 393 return this; 394 } 395 396 public Builder clone() { 397 return create().mergeFrom(buildPartial()); 398 } 399 400 public com.google.protobuf.Descriptors.Descriptor 401 getDescriptorForType() { 402 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor; 403 } 404 405 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto getDefaultInstanceForType() { 406 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDefaultInstance(); 407 } 408 409 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto build() { 410 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto result = buildPartial(); 411 if (!result.isInitialized()) { 412 throw newUninitializedMessageException(result); 413 } 414 return result; 415 } 416 417 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto buildPartial() { 418 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto(this); 419 int from_bitField0_ = bitField0_; 420 int to_bitField0_ = 0; 421 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 422 to_bitField0_ |= 0x00000001; 423 } 424 result.protocol_ = protocol_; 425 result.bitField0_ = to_bitField0_; 426 onBuilt(); 427 return result; 428 } 429 430 public Builder mergeFrom(com.google.protobuf.Message other) { 431 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto) { 432 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto)other); 433 } else { 434 super.mergeFrom(other); 435 return this; 436 } 437 } 438 439 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto other) { 440 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDefaultInstance()) return this; 441 if (other.hasProtocol()) { 442 bitField0_ |= 0x00000001; 443 protocol_ = other.protocol_; 444 onChanged(); 445 } 446 this.mergeUnknownFields(other.getUnknownFields()); 447 return this; 448 } 449 450 public final boolean isInitialized() { 451 if (!hasProtocol()) { 452 453 return false; 454 } 455 return true; 456 } 457 458 public Builder mergeFrom( 459 com.google.protobuf.CodedInputStream input, 460 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 461 throws java.io.IOException { 462 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto parsedMessage = null; 463 try { 464 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 465 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 466 parsedMessage = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto) e.getUnfinishedMessage(); 467 throw e; 468 } finally { 469 if (parsedMessage != null) { 470 mergeFrom(parsedMessage); 471 } 472 } 473 return this; 474 } 475 private int bitField0_; 476 477 // required string protocol = 1; 478 private java.lang.Object protocol_ = ""; 479 /** 480 * <code>required string protocol = 1;</code> 481 * 482 * <pre> 483 * Protocol name 484 * </pre> 485 */ 486 public boolean hasProtocol() { 487 return ((bitField0_ & 0x00000001) == 0x00000001); 488 } 489 /** 490 * <code>required string protocol = 1;</code> 491 * 492 * <pre> 493 * Protocol name 494 * </pre> 495 */ 496 public java.lang.String getProtocol() { 497 java.lang.Object ref = protocol_; 498 if (!(ref instanceof java.lang.String)) { 499 java.lang.String s = ((com.google.protobuf.ByteString) ref) 500 .toStringUtf8(); 501 protocol_ = s; 502 return s; 503 } else { 504 return (java.lang.String) ref; 505 } 506 } 507 /** 508 * <code>required string protocol = 1;</code> 509 * 510 * <pre> 511 * Protocol name 512 * </pre> 513 */ 514 public com.google.protobuf.ByteString 515 getProtocolBytes() { 516 java.lang.Object ref = protocol_; 517 if (ref instanceof String) { 518 com.google.protobuf.ByteString b = 519 com.google.protobuf.ByteString.copyFromUtf8( 520 (java.lang.String) ref); 521 protocol_ = b; 522 return b; 523 } else { 524 return (com.google.protobuf.ByteString) ref; 525 } 526 } 527 /** 528 * <code>required string protocol = 1;</code> 529 * 530 * <pre> 531 * Protocol name 532 * </pre> 533 */ 534 public Builder setProtocol( 535 java.lang.String value) { 536 if (value == null) { 537 throw new NullPointerException(); 538 } 539 bitField0_ |= 0x00000001; 540 protocol_ = value; 541 onChanged(); 542 return this; 543 } 544 /** 545 * <code>required string protocol = 1;</code> 546 * 547 * <pre> 548 * Protocol name 549 * </pre> 550 */ 551 public Builder clearProtocol() { 552 bitField0_ = (bitField0_ & ~0x00000001); 553 protocol_ = getDefaultInstance().getProtocol(); 554 onChanged(); 555 return this; 556 } 557 /** 558 * <code>required string protocol = 1;</code> 559 * 560 * <pre> 561 * Protocol name 562 * </pre> 563 */ 564 public Builder setProtocolBytes( 565 com.google.protobuf.ByteString value) { 566 if (value == null) { 567 throw new NullPointerException(); 568 } 569 bitField0_ |= 0x00000001; 570 protocol_ = value; 571 onChanged(); 572 return this; 573 } 574 575 // @@protoc_insertion_point(builder_scope:hadoop.common.GetProtocolVersionsRequestProto) 576 } 577 578 static { 579 defaultInstance = new GetProtocolVersionsRequestProto(true); 580 defaultInstance.initFields(); 581 } 582 583 // @@protoc_insertion_point(class_scope:hadoop.common.GetProtocolVersionsRequestProto) 584 } 585 586 public interface ProtocolVersionProtoOrBuilder 587 extends com.google.protobuf.MessageOrBuilder { 588 589 // required string rpcKind = 1; 590 /** 591 * <code>required string rpcKind = 1;</code> 592 * 593 * <pre> 594 *RPC kind 595 * </pre> 596 */ 597 boolean hasRpcKind(); 598 /** 599 * <code>required string rpcKind = 1;</code> 600 * 601 * <pre> 602 *RPC kind 603 * </pre> 604 */ 605 java.lang.String getRpcKind(); 606 /** 607 * <code>required string rpcKind = 1;</code> 608 * 609 * <pre> 610 *RPC kind 611 * </pre> 612 */ 613 com.google.protobuf.ByteString 614 getRpcKindBytes(); 615 616 // repeated uint64 versions = 2; 617 /** 618 * <code>repeated uint64 versions = 2;</code> 619 * 620 * <pre> 621 *Protocol version corresponding to the rpc kind. 622 * </pre> 623 */ 624 java.util.List<java.lang.Long> getVersionsList(); 625 /** 626 * <code>repeated uint64 versions = 2;</code> 627 * 628 * <pre> 629 *Protocol version corresponding to the rpc kind. 630 * </pre> 631 */ 632 int getVersionsCount(); 633 /** 634 * <code>repeated uint64 versions = 2;</code> 635 * 636 * <pre> 637 *Protocol version corresponding to the rpc kind. 638 * </pre> 639 */ 640 long getVersions(int index); 641 } 642 /** 643 * Protobuf type {@code hadoop.common.ProtocolVersionProto} 644 * 645 * <pre> 646 ** 647 * Protocol version with corresponding rpc kind. 648 * </pre> 649 */ 650 public static final class ProtocolVersionProto extends 651 com.google.protobuf.GeneratedMessage 652 implements ProtocolVersionProtoOrBuilder { 653 // Use ProtocolVersionProto.newBuilder() to construct. 654 private ProtocolVersionProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 655 super(builder); 656 this.unknownFields = builder.getUnknownFields(); 657 } 658 private ProtocolVersionProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 659 660 private static final ProtocolVersionProto defaultInstance; 661 public static ProtocolVersionProto getDefaultInstance() { 662 return defaultInstance; 663 } 664 665 public ProtocolVersionProto getDefaultInstanceForType() { 666 return defaultInstance; 667 } 668 669 private final com.google.protobuf.UnknownFieldSet unknownFields; 670 @java.lang.Override 671 public final com.google.protobuf.UnknownFieldSet 672 getUnknownFields() { 673 return this.unknownFields; 674 } 675 private ProtocolVersionProto( 676 com.google.protobuf.CodedInputStream input, 677 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 678 throws com.google.protobuf.InvalidProtocolBufferException { 679 initFields(); 680 int mutable_bitField0_ = 0; 681 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 682 com.google.protobuf.UnknownFieldSet.newBuilder(); 683 try { 684 boolean done = false; 685 while (!done) { 686 int tag = input.readTag(); 687 switch (tag) { 688 case 0: 689 done = true; 690 break; 691 default: { 692 if (!parseUnknownField(input, unknownFields, 693 extensionRegistry, tag)) { 694 done = true; 695 } 696 break; 697 } 698 case 10: { 699 bitField0_ |= 0x00000001; 700 rpcKind_ = input.readBytes(); 701 break; 702 } 703 case 16: { 704 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 705 versions_ = new java.util.ArrayList<java.lang.Long>(); 706 mutable_bitField0_ |= 0x00000002; 707 } 708 versions_.add(input.readUInt64()); 709 break; 710 } 711 case 18: { 712 int length = input.readRawVarint32(); 713 int limit = input.pushLimit(length); 714 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { 715 versions_ = new java.util.ArrayList<java.lang.Long>(); 716 mutable_bitField0_ |= 0x00000002; 717 } 718 while (input.getBytesUntilLimit() > 0) { 719 versions_.add(input.readUInt64()); 720 } 721 input.popLimit(limit); 722 break; 723 } 724 } 725 } 726 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 727 throw e.setUnfinishedMessage(this); 728 } catch (java.io.IOException e) { 729 throw new com.google.protobuf.InvalidProtocolBufferException( 730 e.getMessage()).setUnfinishedMessage(this); 731 } finally { 732 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 733 versions_ = java.util.Collections.unmodifiableList(versions_); 734 } 735 this.unknownFields = unknownFields.build(); 736 makeExtensionsImmutable(); 737 } 738 } 739 public static final com.google.protobuf.Descriptors.Descriptor 740 getDescriptor() { 741 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_descriptor; 742 } 743 744 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 745 internalGetFieldAccessorTable() { 746 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_fieldAccessorTable 747 .ensureFieldAccessorsInitialized( 748 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder.class); 749 } 750 751 public static com.google.protobuf.Parser<ProtocolVersionProto> PARSER = 752 new com.google.protobuf.AbstractParser<ProtocolVersionProto>() { 753 public ProtocolVersionProto parsePartialFrom( 754 com.google.protobuf.CodedInputStream input, 755 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 756 throws com.google.protobuf.InvalidProtocolBufferException { 757 return new ProtocolVersionProto(input, extensionRegistry); 758 } 759 }; 760 761 @java.lang.Override 762 public com.google.protobuf.Parser<ProtocolVersionProto> getParserForType() { 763 return PARSER; 764 } 765 766 private int bitField0_; 767 // required string rpcKind = 1; 768 public static final int RPCKIND_FIELD_NUMBER = 1; 769 private java.lang.Object rpcKind_; 770 /** 771 * <code>required string rpcKind = 1;</code> 772 * 773 * <pre> 774 *RPC kind 775 * </pre> 776 */ 777 public boolean hasRpcKind() { 778 return ((bitField0_ & 0x00000001) == 0x00000001); 779 } 780 /** 781 * <code>required string rpcKind = 1;</code> 782 * 783 * <pre> 784 *RPC kind 785 * </pre> 786 */ 787 public java.lang.String getRpcKind() { 788 java.lang.Object ref = rpcKind_; 789 if (ref instanceof java.lang.String) { 790 return (java.lang.String) ref; 791 } else { 792 com.google.protobuf.ByteString bs = 793 (com.google.protobuf.ByteString) ref; 794 java.lang.String s = bs.toStringUtf8(); 795 if (bs.isValidUtf8()) { 796 rpcKind_ = s; 797 } 798 return s; 799 } 800 } 801 /** 802 * <code>required string rpcKind = 1;</code> 803 * 804 * <pre> 805 *RPC kind 806 * </pre> 807 */ 808 public com.google.protobuf.ByteString 809 getRpcKindBytes() { 810 java.lang.Object ref = rpcKind_; 811 if (ref instanceof java.lang.String) { 812 com.google.protobuf.ByteString b = 813 com.google.protobuf.ByteString.copyFromUtf8( 814 (java.lang.String) ref); 815 rpcKind_ = b; 816 return b; 817 } else { 818 return (com.google.protobuf.ByteString) ref; 819 } 820 } 821 822 // repeated uint64 versions = 2; 823 public static final int VERSIONS_FIELD_NUMBER = 2; 824 private java.util.List<java.lang.Long> versions_; 825 /** 826 * <code>repeated uint64 versions = 2;</code> 827 * 828 * <pre> 829 *Protocol version corresponding to the rpc kind. 830 * </pre> 831 */ 832 public java.util.List<java.lang.Long> 833 getVersionsList() { 834 return versions_; 835 } 836 /** 837 * <code>repeated uint64 versions = 2;</code> 838 * 839 * <pre> 840 *Protocol version corresponding to the rpc kind. 841 * </pre> 842 */ 843 public int getVersionsCount() { 844 return versions_.size(); 845 } 846 /** 847 * <code>repeated uint64 versions = 2;</code> 848 * 849 * <pre> 850 *Protocol version corresponding to the rpc kind. 851 * </pre> 852 */ 853 public long getVersions(int index) { 854 return versions_.get(index); 855 } 856 857 private void initFields() { 858 rpcKind_ = ""; 859 versions_ = java.util.Collections.emptyList(); 860 } 861 private byte memoizedIsInitialized = -1; 862 public final boolean isInitialized() { 863 byte isInitialized = memoizedIsInitialized; 864 if (isInitialized != -1) return isInitialized == 1; 865 866 if (!hasRpcKind()) { 867 memoizedIsInitialized = 0; 868 return false; 869 } 870 memoizedIsInitialized = 1; 871 return true; 872 } 873 874 public void writeTo(com.google.protobuf.CodedOutputStream output) 875 throws java.io.IOException { 876 getSerializedSize(); 877 if (((bitField0_ & 0x00000001) == 0x00000001)) { 878 output.writeBytes(1, getRpcKindBytes()); 879 } 880 for (int i = 0; i < versions_.size(); i++) { 881 output.writeUInt64(2, versions_.get(i)); 882 } 883 getUnknownFields().writeTo(output); 884 } 885 886 private int memoizedSerializedSize = -1; 887 public int getSerializedSize() { 888 int size = memoizedSerializedSize; 889 if (size != -1) return size; 890 891 size = 0; 892 if (((bitField0_ & 0x00000001) == 0x00000001)) { 893 size += com.google.protobuf.CodedOutputStream 894 .computeBytesSize(1, getRpcKindBytes()); 895 } 896 { 897 int dataSize = 0; 898 for (int i = 0; i < versions_.size(); i++) { 899 dataSize += com.google.protobuf.CodedOutputStream 900 .computeUInt64SizeNoTag(versions_.get(i)); 901 } 902 size += dataSize; 903 size += 1 * getVersionsList().size(); 904 } 905 size += getUnknownFields().getSerializedSize(); 906 memoizedSerializedSize = size; 907 return size; 908 } 909 910 private static final long serialVersionUID = 0L; 911 @java.lang.Override 912 protected java.lang.Object writeReplace() 913 throws java.io.ObjectStreamException { 914 return super.writeReplace(); 915 } 916 917 @java.lang.Override 918 public boolean equals(final java.lang.Object obj) { 919 if (obj == this) { 920 return true; 921 } 922 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto)) { 923 return super.equals(obj); 924 } 925 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto) obj; 926 927 boolean result = true; 928 result = result && (hasRpcKind() == other.hasRpcKind()); 929 if (hasRpcKind()) { 930 result = result && getRpcKind() 931 .equals(other.getRpcKind()); 932 } 933 result = result && getVersionsList() 934 .equals(other.getVersionsList()); 935 result = result && 936 getUnknownFields().equals(other.getUnknownFields()); 937 return result; 938 } 939 940 private int memoizedHashCode = 0; 941 @java.lang.Override 942 public int hashCode() { 943 if (memoizedHashCode != 0) { 944 return memoizedHashCode; 945 } 946 int hash = 41; 947 hash = (19 * hash) + getDescriptorForType().hashCode(); 948 if (hasRpcKind()) { 949 hash = (37 * hash) + RPCKIND_FIELD_NUMBER; 950 hash = (53 * hash) + getRpcKind().hashCode(); 951 } 952 if (getVersionsCount() > 0) { 953 hash = (37 * hash) + VERSIONS_FIELD_NUMBER; 954 hash = (53 * hash) + getVersionsList().hashCode(); 955 } 956 hash = (29 * hash) + getUnknownFields().hashCode(); 957 memoizedHashCode = hash; 958 return hash; 959 } 960 961 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom( 962 com.google.protobuf.ByteString data) 963 throws com.google.protobuf.InvalidProtocolBufferException { 964 return PARSER.parseFrom(data); 965 } 966 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom( 967 com.google.protobuf.ByteString data, 968 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 969 throws com.google.protobuf.InvalidProtocolBufferException { 970 return PARSER.parseFrom(data, extensionRegistry); 971 } 972 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(byte[] data) 973 throws com.google.protobuf.InvalidProtocolBufferException { 974 return PARSER.parseFrom(data); 975 } 976 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom( 977 byte[] data, 978 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 979 throws com.google.protobuf.InvalidProtocolBufferException { 980 return PARSER.parseFrom(data, extensionRegistry); 981 } 982 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom(java.io.InputStream input) 983 throws java.io.IOException { 984 return PARSER.parseFrom(input); 985 } 986 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom( 987 java.io.InputStream input, 988 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 989 throws java.io.IOException { 990 return PARSER.parseFrom(input, extensionRegistry); 991 } 992 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseDelimitedFrom(java.io.InputStream input) 993 throws java.io.IOException { 994 return PARSER.parseDelimitedFrom(input); 995 } 996 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseDelimitedFrom( 997 java.io.InputStream input, 998 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 999 throws java.io.IOException { 1000 return PARSER.parseDelimitedFrom(input, extensionRegistry); 1001 } 1002 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom( 1003 com.google.protobuf.CodedInputStream input) 1004 throws java.io.IOException { 1005 return PARSER.parseFrom(input); 1006 } 1007 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parseFrom( 1008 com.google.protobuf.CodedInputStream input, 1009 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1010 throws java.io.IOException { 1011 return PARSER.parseFrom(input, extensionRegistry); 1012 } 1013 1014 public static Builder newBuilder() { return Builder.create(); } 1015 public Builder newBuilderForType() { return newBuilder(); } 1016 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto prototype) { 1017 return newBuilder().mergeFrom(prototype); 1018 } 1019 public Builder toBuilder() { return newBuilder(this); } 1020 1021 @java.lang.Override 1022 protected Builder newBuilderForType( 1023 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1024 Builder builder = new Builder(parent); 1025 return builder; 1026 } 1027 /** 1028 * Protobuf type {@code hadoop.common.ProtocolVersionProto} 1029 * 1030 * <pre> 1031 ** 1032 * Protocol version with corresponding rpc kind. 1033 * </pre> 1034 */ 1035 public static final class Builder extends 1036 com.google.protobuf.GeneratedMessage.Builder<Builder> 1037 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder { 1038 public static final com.google.protobuf.Descriptors.Descriptor 1039 getDescriptor() { 1040 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_descriptor; 1041 } 1042 1043 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 1044 internalGetFieldAccessorTable() { 1045 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_fieldAccessorTable 1046 .ensureFieldAccessorsInitialized( 1047 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder.class); 1048 } 1049 1050 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.newBuilder() 1051 private Builder() { 1052 maybeForceBuilderInitialization(); 1053 } 1054 1055 private Builder( 1056 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1057 super(parent); 1058 maybeForceBuilderInitialization(); 1059 } 1060 private void maybeForceBuilderInitialization() { 1061 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 1062 } 1063 } 1064 private static Builder create() { 1065 return new Builder(); 1066 } 1067 1068 public Builder clear() { 1069 super.clear(); 1070 rpcKind_ = ""; 1071 bitField0_ = (bitField0_ & ~0x00000001); 1072 versions_ = java.util.Collections.emptyList(); 1073 bitField0_ = (bitField0_ & ~0x00000002); 1074 return this; 1075 } 1076 1077 public Builder clone() { 1078 return create().mergeFrom(buildPartial()); 1079 } 1080 1081 public com.google.protobuf.Descriptors.Descriptor 1082 getDescriptorForType() { 1083 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolVersionProto_descriptor; 1084 } 1085 1086 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto getDefaultInstanceForType() { 1087 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDefaultInstance(); 1088 } 1089 1090 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto build() { 1091 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto result = buildPartial(); 1092 if (!result.isInitialized()) { 1093 throw newUninitializedMessageException(result); 1094 } 1095 return result; 1096 } 1097 1098 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto buildPartial() { 1099 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto(this); 1100 int from_bitField0_ = bitField0_; 1101 int to_bitField0_ = 0; 1102 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 1103 to_bitField0_ |= 0x00000001; 1104 } 1105 result.rpcKind_ = rpcKind_; 1106 if (((bitField0_ & 0x00000002) == 0x00000002)) { 1107 versions_ = java.util.Collections.unmodifiableList(versions_); 1108 bitField0_ = (bitField0_ & ~0x00000002); 1109 } 1110 result.versions_ = versions_; 1111 result.bitField0_ = to_bitField0_; 1112 onBuilt(); 1113 return result; 1114 } 1115 1116 public Builder mergeFrom(com.google.protobuf.Message other) { 1117 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto) { 1118 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto)other); 1119 } else { 1120 super.mergeFrom(other); 1121 return this; 1122 } 1123 } 1124 1125 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto other) { 1126 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDefaultInstance()) return this; 1127 if (other.hasRpcKind()) { 1128 bitField0_ |= 0x00000001; 1129 rpcKind_ = other.rpcKind_; 1130 onChanged(); 1131 } 1132 if (!other.versions_.isEmpty()) { 1133 if (versions_.isEmpty()) { 1134 versions_ = other.versions_; 1135 bitField0_ = (bitField0_ & ~0x00000002); 1136 } else { 1137 ensureVersionsIsMutable(); 1138 versions_.addAll(other.versions_); 1139 } 1140 onChanged(); 1141 } 1142 this.mergeUnknownFields(other.getUnknownFields()); 1143 return this; 1144 } 1145 1146 public final boolean isInitialized() { 1147 if (!hasRpcKind()) { 1148 1149 return false; 1150 } 1151 return true; 1152 } 1153 1154 public Builder mergeFrom( 1155 com.google.protobuf.CodedInputStream input, 1156 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1157 throws java.io.IOException { 1158 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto parsedMessage = null; 1159 try { 1160 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 1161 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1162 parsedMessage = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto) e.getUnfinishedMessage(); 1163 throw e; 1164 } finally { 1165 if (parsedMessage != null) { 1166 mergeFrom(parsedMessage); 1167 } 1168 } 1169 return this; 1170 } 1171 private int bitField0_; 1172 1173 // required string rpcKind = 1; 1174 private java.lang.Object rpcKind_ = ""; 1175 /** 1176 * <code>required string rpcKind = 1;</code> 1177 * 1178 * <pre> 1179 *RPC kind 1180 * </pre> 1181 */ 1182 public boolean hasRpcKind() { 1183 return ((bitField0_ & 0x00000001) == 0x00000001); 1184 } 1185 /** 1186 * <code>required string rpcKind = 1;</code> 1187 * 1188 * <pre> 1189 *RPC kind 1190 * </pre> 1191 */ 1192 public java.lang.String getRpcKind() { 1193 java.lang.Object ref = rpcKind_; 1194 if (!(ref instanceof java.lang.String)) { 1195 java.lang.String s = ((com.google.protobuf.ByteString) ref) 1196 .toStringUtf8(); 1197 rpcKind_ = s; 1198 return s; 1199 } else { 1200 return (java.lang.String) ref; 1201 } 1202 } 1203 /** 1204 * <code>required string rpcKind = 1;</code> 1205 * 1206 * <pre> 1207 *RPC kind 1208 * </pre> 1209 */ 1210 public com.google.protobuf.ByteString 1211 getRpcKindBytes() { 1212 java.lang.Object ref = rpcKind_; 1213 if (ref instanceof String) { 1214 com.google.protobuf.ByteString b = 1215 com.google.protobuf.ByteString.copyFromUtf8( 1216 (java.lang.String) ref); 1217 rpcKind_ = b; 1218 return b; 1219 } else { 1220 return (com.google.protobuf.ByteString) ref; 1221 } 1222 } 1223 /** 1224 * <code>required string rpcKind = 1;</code> 1225 * 1226 * <pre> 1227 *RPC kind 1228 * </pre> 1229 */ 1230 public Builder setRpcKind( 1231 java.lang.String value) { 1232 if (value == null) { 1233 throw new NullPointerException(); 1234 } 1235 bitField0_ |= 0x00000001; 1236 rpcKind_ = value; 1237 onChanged(); 1238 return this; 1239 } 1240 /** 1241 * <code>required string rpcKind = 1;</code> 1242 * 1243 * <pre> 1244 *RPC kind 1245 * </pre> 1246 */ 1247 public Builder clearRpcKind() { 1248 bitField0_ = (bitField0_ & ~0x00000001); 1249 rpcKind_ = getDefaultInstance().getRpcKind(); 1250 onChanged(); 1251 return this; 1252 } 1253 /** 1254 * <code>required string rpcKind = 1;</code> 1255 * 1256 * <pre> 1257 *RPC kind 1258 * </pre> 1259 */ 1260 public Builder setRpcKindBytes( 1261 com.google.protobuf.ByteString value) { 1262 if (value == null) { 1263 throw new NullPointerException(); 1264 } 1265 bitField0_ |= 0x00000001; 1266 rpcKind_ = value; 1267 onChanged(); 1268 return this; 1269 } 1270 1271 // repeated uint64 versions = 2; 1272 private java.util.List<java.lang.Long> versions_ = java.util.Collections.emptyList(); 1273 private void ensureVersionsIsMutable() { 1274 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 1275 versions_ = new java.util.ArrayList<java.lang.Long>(versions_); 1276 bitField0_ |= 0x00000002; 1277 } 1278 } 1279 /** 1280 * <code>repeated uint64 versions = 2;</code> 1281 * 1282 * <pre> 1283 *Protocol version corresponding to the rpc kind. 1284 * </pre> 1285 */ 1286 public java.util.List<java.lang.Long> 1287 getVersionsList() { 1288 return java.util.Collections.unmodifiableList(versions_); 1289 } 1290 /** 1291 * <code>repeated uint64 versions = 2;</code> 1292 * 1293 * <pre> 1294 *Protocol version corresponding to the rpc kind. 1295 * </pre> 1296 */ 1297 public int getVersionsCount() { 1298 return versions_.size(); 1299 } 1300 /** 1301 * <code>repeated uint64 versions = 2;</code> 1302 * 1303 * <pre> 1304 *Protocol version corresponding to the rpc kind. 1305 * </pre> 1306 */ 1307 public long getVersions(int index) { 1308 return versions_.get(index); 1309 } 1310 /** 1311 * <code>repeated uint64 versions = 2;</code> 1312 * 1313 * <pre> 1314 *Protocol version corresponding to the rpc kind. 1315 * </pre> 1316 */ 1317 public Builder setVersions( 1318 int index, long value) { 1319 ensureVersionsIsMutable(); 1320 versions_.set(index, value); 1321 onChanged(); 1322 return this; 1323 } 1324 /** 1325 * <code>repeated uint64 versions = 2;</code> 1326 * 1327 * <pre> 1328 *Protocol version corresponding to the rpc kind. 1329 * </pre> 1330 */ 1331 public Builder addVersions(long value) { 1332 ensureVersionsIsMutable(); 1333 versions_.add(value); 1334 onChanged(); 1335 return this; 1336 } 1337 /** 1338 * <code>repeated uint64 versions = 2;</code> 1339 * 1340 * <pre> 1341 *Protocol version corresponding to the rpc kind. 1342 * </pre> 1343 */ 1344 public Builder addAllVersions( 1345 java.lang.Iterable<? extends java.lang.Long> values) { 1346 ensureVersionsIsMutable(); 1347 super.addAll(values, versions_); 1348 onChanged(); 1349 return this; 1350 } 1351 /** 1352 * <code>repeated uint64 versions = 2;</code> 1353 * 1354 * <pre> 1355 *Protocol version corresponding to the rpc kind. 1356 * </pre> 1357 */ 1358 public Builder clearVersions() { 1359 versions_ = java.util.Collections.emptyList(); 1360 bitField0_ = (bitField0_ & ~0x00000002); 1361 onChanged(); 1362 return this; 1363 } 1364 1365 // @@protoc_insertion_point(builder_scope:hadoop.common.ProtocolVersionProto) 1366 } 1367 1368 static { 1369 defaultInstance = new ProtocolVersionProto(true); 1370 defaultInstance.initFields(); 1371 } 1372 1373 // @@protoc_insertion_point(class_scope:hadoop.common.ProtocolVersionProto) 1374 } 1375 1376 public interface GetProtocolVersionsResponseProtoOrBuilder 1377 extends com.google.protobuf.MessageOrBuilder { 1378 1379 // repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1; 1380 /** 1381 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1382 */ 1383 java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> 1384 getProtocolVersionsList(); 1385 /** 1386 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1387 */ 1388 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto getProtocolVersions(int index); 1389 /** 1390 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1391 */ 1392 int getProtocolVersionsCount(); 1393 /** 1394 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1395 */ 1396 java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder> 1397 getProtocolVersionsOrBuilderList(); 1398 /** 1399 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1400 */ 1401 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder getProtocolVersionsOrBuilder( 1402 int index); 1403 } 1404 /** 1405 * Protobuf type {@code hadoop.common.GetProtocolVersionsResponseProto} 1406 * 1407 * <pre> 1408 ** 1409 * Get protocol version response. 1410 * </pre> 1411 */ 1412 public static final class GetProtocolVersionsResponseProto extends 1413 com.google.protobuf.GeneratedMessage 1414 implements GetProtocolVersionsResponseProtoOrBuilder { 1415 // Use GetProtocolVersionsResponseProto.newBuilder() to construct. 1416 private GetProtocolVersionsResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 1417 super(builder); 1418 this.unknownFields = builder.getUnknownFields(); 1419 } 1420 private GetProtocolVersionsResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 1421 1422 private static final GetProtocolVersionsResponseProto defaultInstance; 1423 public static GetProtocolVersionsResponseProto getDefaultInstance() { 1424 return defaultInstance; 1425 } 1426 1427 public GetProtocolVersionsResponseProto getDefaultInstanceForType() { 1428 return defaultInstance; 1429 } 1430 1431 private final com.google.protobuf.UnknownFieldSet unknownFields; 1432 @java.lang.Override 1433 public final com.google.protobuf.UnknownFieldSet 1434 getUnknownFields() { 1435 return this.unknownFields; 1436 } 1437 private GetProtocolVersionsResponseProto( 1438 com.google.protobuf.CodedInputStream input, 1439 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1440 throws com.google.protobuf.InvalidProtocolBufferException { 1441 initFields(); 1442 int mutable_bitField0_ = 0; 1443 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 1444 com.google.protobuf.UnknownFieldSet.newBuilder(); 1445 try { 1446 boolean done = false; 1447 while (!done) { 1448 int tag = input.readTag(); 1449 switch (tag) { 1450 case 0: 1451 done = true; 1452 break; 1453 default: { 1454 if (!parseUnknownField(input, unknownFields, 1455 extensionRegistry, tag)) { 1456 done = true; 1457 } 1458 break; 1459 } 1460 case 10: { 1461 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 1462 protocolVersions_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto>(); 1463 mutable_bitField0_ |= 0x00000001; 1464 } 1465 protocolVersions_.add(input.readMessage(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.PARSER, extensionRegistry)); 1466 break; 1467 } 1468 } 1469 } 1470 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1471 throw e.setUnfinishedMessage(this); 1472 } catch (java.io.IOException e) { 1473 throw new com.google.protobuf.InvalidProtocolBufferException( 1474 e.getMessage()).setUnfinishedMessage(this); 1475 } finally { 1476 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 1477 protocolVersions_ = java.util.Collections.unmodifiableList(protocolVersions_); 1478 } 1479 this.unknownFields = unknownFields.build(); 1480 makeExtensionsImmutable(); 1481 } 1482 } 1483 public static final com.google.protobuf.Descriptors.Descriptor 1484 getDescriptor() { 1485 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor; 1486 } 1487 1488 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 1489 internalGetFieldAccessorTable() { 1490 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_fieldAccessorTable 1491 .ensureFieldAccessorsInitialized( 1492 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.Builder.class); 1493 } 1494 1495 public static com.google.protobuf.Parser<GetProtocolVersionsResponseProto> PARSER = 1496 new com.google.protobuf.AbstractParser<GetProtocolVersionsResponseProto>() { 1497 public GetProtocolVersionsResponseProto parsePartialFrom( 1498 com.google.protobuf.CodedInputStream input, 1499 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1500 throws com.google.protobuf.InvalidProtocolBufferException { 1501 return new GetProtocolVersionsResponseProto(input, extensionRegistry); 1502 } 1503 }; 1504 1505 @java.lang.Override 1506 public com.google.protobuf.Parser<GetProtocolVersionsResponseProto> getParserForType() { 1507 return PARSER; 1508 } 1509 1510 // repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1; 1511 public static final int PROTOCOLVERSIONS_FIELD_NUMBER = 1; 1512 private java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> protocolVersions_; 1513 /** 1514 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1515 */ 1516 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> getProtocolVersionsList() { 1517 return protocolVersions_; 1518 } 1519 /** 1520 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1521 */ 1522 public java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder> 1523 getProtocolVersionsOrBuilderList() { 1524 return protocolVersions_; 1525 } 1526 /** 1527 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1528 */ 1529 public int getProtocolVersionsCount() { 1530 return protocolVersions_.size(); 1531 } 1532 /** 1533 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1534 */ 1535 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto getProtocolVersions(int index) { 1536 return protocolVersions_.get(index); 1537 } 1538 /** 1539 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1540 */ 1541 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder getProtocolVersionsOrBuilder( 1542 int index) { 1543 return protocolVersions_.get(index); 1544 } 1545 1546 private void initFields() { 1547 protocolVersions_ = java.util.Collections.emptyList(); 1548 } 1549 private byte memoizedIsInitialized = -1; 1550 public final boolean isInitialized() { 1551 byte isInitialized = memoizedIsInitialized; 1552 if (isInitialized != -1) return isInitialized == 1; 1553 1554 for (int i = 0; i < getProtocolVersionsCount(); i++) { 1555 if (!getProtocolVersions(i).isInitialized()) { 1556 memoizedIsInitialized = 0; 1557 return false; 1558 } 1559 } 1560 memoizedIsInitialized = 1; 1561 return true; 1562 } 1563 1564 public void writeTo(com.google.protobuf.CodedOutputStream output) 1565 throws java.io.IOException { 1566 getSerializedSize(); 1567 for (int i = 0; i < protocolVersions_.size(); i++) { 1568 output.writeMessage(1, protocolVersions_.get(i)); 1569 } 1570 getUnknownFields().writeTo(output); 1571 } 1572 1573 private int memoizedSerializedSize = -1; 1574 public int getSerializedSize() { 1575 int size = memoizedSerializedSize; 1576 if (size != -1) return size; 1577 1578 size = 0; 1579 for (int i = 0; i < protocolVersions_.size(); i++) { 1580 size += com.google.protobuf.CodedOutputStream 1581 .computeMessageSize(1, protocolVersions_.get(i)); 1582 } 1583 size += getUnknownFields().getSerializedSize(); 1584 memoizedSerializedSize = size; 1585 return size; 1586 } 1587 1588 private static final long serialVersionUID = 0L; 1589 @java.lang.Override 1590 protected java.lang.Object writeReplace() 1591 throws java.io.ObjectStreamException { 1592 return super.writeReplace(); 1593 } 1594 1595 @java.lang.Override 1596 public boolean equals(final java.lang.Object obj) { 1597 if (obj == this) { 1598 return true; 1599 } 1600 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto)) { 1601 return super.equals(obj); 1602 } 1603 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto) obj; 1604 1605 boolean result = true; 1606 result = result && getProtocolVersionsList() 1607 .equals(other.getProtocolVersionsList()); 1608 result = result && 1609 getUnknownFields().equals(other.getUnknownFields()); 1610 return result; 1611 } 1612 1613 private int memoizedHashCode = 0; 1614 @java.lang.Override 1615 public int hashCode() { 1616 if (memoizedHashCode != 0) { 1617 return memoizedHashCode; 1618 } 1619 int hash = 41; 1620 hash = (19 * hash) + getDescriptorForType().hashCode(); 1621 if (getProtocolVersionsCount() > 0) { 1622 hash = (37 * hash) + PROTOCOLVERSIONS_FIELD_NUMBER; 1623 hash = (53 * hash) + getProtocolVersionsList().hashCode(); 1624 } 1625 hash = (29 * hash) + getUnknownFields().hashCode(); 1626 memoizedHashCode = hash; 1627 return hash; 1628 } 1629 1630 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom( 1631 com.google.protobuf.ByteString data) 1632 throws com.google.protobuf.InvalidProtocolBufferException { 1633 return PARSER.parseFrom(data); 1634 } 1635 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom( 1636 com.google.protobuf.ByteString data, 1637 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1638 throws com.google.protobuf.InvalidProtocolBufferException { 1639 return PARSER.parseFrom(data, extensionRegistry); 1640 } 1641 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(byte[] data) 1642 throws com.google.protobuf.InvalidProtocolBufferException { 1643 return PARSER.parseFrom(data); 1644 } 1645 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom( 1646 byte[] data, 1647 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1648 throws com.google.protobuf.InvalidProtocolBufferException { 1649 return PARSER.parseFrom(data, extensionRegistry); 1650 } 1651 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom(java.io.InputStream input) 1652 throws java.io.IOException { 1653 return PARSER.parseFrom(input); 1654 } 1655 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom( 1656 java.io.InputStream input, 1657 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1658 throws java.io.IOException { 1659 return PARSER.parseFrom(input, extensionRegistry); 1660 } 1661 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseDelimitedFrom(java.io.InputStream input) 1662 throws java.io.IOException { 1663 return PARSER.parseDelimitedFrom(input); 1664 } 1665 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseDelimitedFrom( 1666 java.io.InputStream input, 1667 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1668 throws java.io.IOException { 1669 return PARSER.parseDelimitedFrom(input, extensionRegistry); 1670 } 1671 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom( 1672 com.google.protobuf.CodedInputStream input) 1673 throws java.io.IOException { 1674 return PARSER.parseFrom(input); 1675 } 1676 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parseFrom( 1677 com.google.protobuf.CodedInputStream input, 1678 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1679 throws java.io.IOException { 1680 return PARSER.parseFrom(input, extensionRegistry); 1681 } 1682 1683 public static Builder newBuilder() { return Builder.create(); } 1684 public Builder newBuilderForType() { return newBuilder(); } 1685 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto prototype) { 1686 return newBuilder().mergeFrom(prototype); 1687 } 1688 public Builder toBuilder() { return newBuilder(this); } 1689 1690 @java.lang.Override 1691 protected Builder newBuilderForType( 1692 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1693 Builder builder = new Builder(parent); 1694 return builder; 1695 } 1696 /** 1697 * Protobuf type {@code hadoop.common.GetProtocolVersionsResponseProto} 1698 * 1699 * <pre> 1700 ** 1701 * Get protocol version response. 1702 * </pre> 1703 */ 1704 public static final class Builder extends 1705 com.google.protobuf.GeneratedMessage.Builder<Builder> 1706 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProtoOrBuilder { 1707 public static final com.google.protobuf.Descriptors.Descriptor 1708 getDescriptor() { 1709 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor; 1710 } 1711 1712 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 1713 internalGetFieldAccessorTable() { 1714 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_fieldAccessorTable 1715 .ensureFieldAccessorsInitialized( 1716 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.Builder.class); 1717 } 1718 1719 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.newBuilder() 1720 private Builder() { 1721 maybeForceBuilderInitialization(); 1722 } 1723 1724 private Builder( 1725 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1726 super(parent); 1727 maybeForceBuilderInitialization(); 1728 } 1729 private void maybeForceBuilderInitialization() { 1730 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 1731 getProtocolVersionsFieldBuilder(); 1732 } 1733 } 1734 private static Builder create() { 1735 return new Builder(); 1736 } 1737 1738 public Builder clear() { 1739 super.clear(); 1740 if (protocolVersionsBuilder_ == null) { 1741 protocolVersions_ = java.util.Collections.emptyList(); 1742 bitField0_ = (bitField0_ & ~0x00000001); 1743 } else { 1744 protocolVersionsBuilder_.clear(); 1745 } 1746 return this; 1747 } 1748 1749 public Builder clone() { 1750 return create().mergeFrom(buildPartial()); 1751 } 1752 1753 public com.google.protobuf.Descriptors.Descriptor 1754 getDescriptorForType() { 1755 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor; 1756 } 1757 1758 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto getDefaultInstanceForType() { 1759 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance(); 1760 } 1761 1762 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto build() { 1763 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto result = buildPartial(); 1764 if (!result.isInitialized()) { 1765 throw newUninitializedMessageException(result); 1766 } 1767 return result; 1768 } 1769 1770 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto buildPartial() { 1771 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto(this); 1772 int from_bitField0_ = bitField0_; 1773 if (protocolVersionsBuilder_ == null) { 1774 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1775 protocolVersions_ = java.util.Collections.unmodifiableList(protocolVersions_); 1776 bitField0_ = (bitField0_ & ~0x00000001); 1777 } 1778 result.protocolVersions_ = protocolVersions_; 1779 } else { 1780 result.protocolVersions_ = protocolVersionsBuilder_.build(); 1781 } 1782 onBuilt(); 1783 return result; 1784 } 1785 1786 public Builder mergeFrom(com.google.protobuf.Message other) { 1787 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto) { 1788 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto)other); 1789 } else { 1790 super.mergeFrom(other); 1791 return this; 1792 } 1793 } 1794 1795 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto other) { 1796 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance()) return this; 1797 if (protocolVersionsBuilder_ == null) { 1798 if (!other.protocolVersions_.isEmpty()) { 1799 if (protocolVersions_.isEmpty()) { 1800 protocolVersions_ = other.protocolVersions_; 1801 bitField0_ = (bitField0_ & ~0x00000001); 1802 } else { 1803 ensureProtocolVersionsIsMutable(); 1804 protocolVersions_.addAll(other.protocolVersions_); 1805 } 1806 onChanged(); 1807 } 1808 } else { 1809 if (!other.protocolVersions_.isEmpty()) { 1810 if (protocolVersionsBuilder_.isEmpty()) { 1811 protocolVersionsBuilder_.dispose(); 1812 protocolVersionsBuilder_ = null; 1813 protocolVersions_ = other.protocolVersions_; 1814 bitField0_ = (bitField0_ & ~0x00000001); 1815 protocolVersionsBuilder_ = 1816 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 1817 getProtocolVersionsFieldBuilder() : null; 1818 } else { 1819 protocolVersionsBuilder_.addAllMessages(other.protocolVersions_); 1820 } 1821 } 1822 } 1823 this.mergeUnknownFields(other.getUnknownFields()); 1824 return this; 1825 } 1826 1827 public final boolean isInitialized() { 1828 for (int i = 0; i < getProtocolVersionsCount(); i++) { 1829 if (!getProtocolVersions(i).isInitialized()) { 1830 1831 return false; 1832 } 1833 } 1834 return true; 1835 } 1836 1837 public Builder mergeFrom( 1838 com.google.protobuf.CodedInputStream input, 1839 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1840 throws java.io.IOException { 1841 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto parsedMessage = null; 1842 try { 1843 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 1844 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1845 parsedMessage = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto) e.getUnfinishedMessage(); 1846 throw e; 1847 } finally { 1848 if (parsedMessage != null) { 1849 mergeFrom(parsedMessage); 1850 } 1851 } 1852 return this; 1853 } 1854 private int bitField0_; 1855 1856 // repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1; 1857 private java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> protocolVersions_ = 1858 java.util.Collections.emptyList(); 1859 private void ensureProtocolVersionsIsMutable() { 1860 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 1861 protocolVersions_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto>(protocolVersions_); 1862 bitField0_ |= 0x00000001; 1863 } 1864 } 1865 1866 private com.google.protobuf.RepeatedFieldBuilder< 1867 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder> protocolVersionsBuilder_; 1868 1869 /** 1870 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1871 */ 1872 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> getProtocolVersionsList() { 1873 if (protocolVersionsBuilder_ == null) { 1874 return java.util.Collections.unmodifiableList(protocolVersions_); 1875 } else { 1876 return protocolVersionsBuilder_.getMessageList(); 1877 } 1878 } 1879 /** 1880 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1881 */ 1882 public int getProtocolVersionsCount() { 1883 if (protocolVersionsBuilder_ == null) { 1884 return protocolVersions_.size(); 1885 } else { 1886 return protocolVersionsBuilder_.getCount(); 1887 } 1888 } 1889 /** 1890 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1891 */ 1892 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto getProtocolVersions(int index) { 1893 if (protocolVersionsBuilder_ == null) { 1894 return protocolVersions_.get(index); 1895 } else { 1896 return protocolVersionsBuilder_.getMessage(index); 1897 } 1898 } 1899 /** 1900 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1901 */ 1902 public Builder setProtocolVersions( 1903 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto value) { 1904 if (protocolVersionsBuilder_ == null) { 1905 if (value == null) { 1906 throw new NullPointerException(); 1907 } 1908 ensureProtocolVersionsIsMutable(); 1909 protocolVersions_.set(index, value); 1910 onChanged(); 1911 } else { 1912 protocolVersionsBuilder_.setMessage(index, value); 1913 } 1914 return this; 1915 } 1916 /** 1917 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1918 */ 1919 public Builder setProtocolVersions( 1920 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder builderForValue) { 1921 if (protocolVersionsBuilder_ == null) { 1922 ensureProtocolVersionsIsMutable(); 1923 protocolVersions_.set(index, builderForValue.build()); 1924 onChanged(); 1925 } else { 1926 protocolVersionsBuilder_.setMessage(index, builderForValue.build()); 1927 } 1928 return this; 1929 } 1930 /** 1931 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1932 */ 1933 public Builder addProtocolVersions(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto value) { 1934 if (protocolVersionsBuilder_ == null) { 1935 if (value == null) { 1936 throw new NullPointerException(); 1937 } 1938 ensureProtocolVersionsIsMutable(); 1939 protocolVersions_.add(value); 1940 onChanged(); 1941 } else { 1942 protocolVersionsBuilder_.addMessage(value); 1943 } 1944 return this; 1945 } 1946 /** 1947 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1948 */ 1949 public Builder addProtocolVersions( 1950 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto value) { 1951 if (protocolVersionsBuilder_ == null) { 1952 if (value == null) { 1953 throw new NullPointerException(); 1954 } 1955 ensureProtocolVersionsIsMutable(); 1956 protocolVersions_.add(index, value); 1957 onChanged(); 1958 } else { 1959 protocolVersionsBuilder_.addMessage(index, value); 1960 } 1961 return this; 1962 } 1963 /** 1964 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1965 */ 1966 public Builder addProtocolVersions( 1967 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder builderForValue) { 1968 if (protocolVersionsBuilder_ == null) { 1969 ensureProtocolVersionsIsMutable(); 1970 protocolVersions_.add(builderForValue.build()); 1971 onChanged(); 1972 } else { 1973 protocolVersionsBuilder_.addMessage(builderForValue.build()); 1974 } 1975 return this; 1976 } 1977 /** 1978 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1979 */ 1980 public Builder addProtocolVersions( 1981 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder builderForValue) { 1982 if (protocolVersionsBuilder_ == null) { 1983 ensureProtocolVersionsIsMutable(); 1984 protocolVersions_.add(index, builderForValue.build()); 1985 onChanged(); 1986 } else { 1987 protocolVersionsBuilder_.addMessage(index, builderForValue.build()); 1988 } 1989 return this; 1990 } 1991 /** 1992 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 1993 */ 1994 public Builder addAllProtocolVersions( 1995 java.lang.Iterable<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto> values) { 1996 if (protocolVersionsBuilder_ == null) { 1997 ensureProtocolVersionsIsMutable(); 1998 super.addAll(values, protocolVersions_); 1999 onChanged(); 2000 } else { 2001 protocolVersionsBuilder_.addAllMessages(values); 2002 } 2003 return this; 2004 } 2005 /** 2006 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 2007 */ 2008 public Builder clearProtocolVersions() { 2009 if (protocolVersionsBuilder_ == null) { 2010 protocolVersions_ = java.util.Collections.emptyList(); 2011 bitField0_ = (bitField0_ & ~0x00000001); 2012 onChanged(); 2013 } else { 2014 protocolVersionsBuilder_.clear(); 2015 } 2016 return this; 2017 } 2018 /** 2019 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 2020 */ 2021 public Builder removeProtocolVersions(int index) { 2022 if (protocolVersionsBuilder_ == null) { 2023 ensureProtocolVersionsIsMutable(); 2024 protocolVersions_.remove(index); 2025 onChanged(); 2026 } else { 2027 protocolVersionsBuilder_.remove(index); 2028 } 2029 return this; 2030 } 2031 /** 2032 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 2033 */ 2034 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder getProtocolVersionsBuilder( 2035 int index) { 2036 return getProtocolVersionsFieldBuilder().getBuilder(index); 2037 } 2038 /** 2039 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 2040 */ 2041 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder getProtocolVersionsOrBuilder( 2042 int index) { 2043 if (protocolVersionsBuilder_ == null) { 2044 return protocolVersions_.get(index); } else { 2045 return protocolVersionsBuilder_.getMessageOrBuilder(index); 2046 } 2047 } 2048 /** 2049 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 2050 */ 2051 public java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder> 2052 getProtocolVersionsOrBuilderList() { 2053 if (protocolVersionsBuilder_ != null) { 2054 return protocolVersionsBuilder_.getMessageOrBuilderList(); 2055 } else { 2056 return java.util.Collections.unmodifiableList(protocolVersions_); 2057 } 2058 } 2059 /** 2060 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 2061 */ 2062 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder addProtocolVersionsBuilder() { 2063 return getProtocolVersionsFieldBuilder().addBuilder( 2064 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDefaultInstance()); 2065 } 2066 /** 2067 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 2068 */ 2069 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder addProtocolVersionsBuilder( 2070 int index) { 2071 return getProtocolVersionsFieldBuilder().addBuilder( 2072 index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.getDefaultInstance()); 2073 } 2074 /** 2075 * <code>repeated .hadoop.common.ProtocolVersionProto protocolVersions = 1;</code> 2076 */ 2077 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder> 2078 getProtocolVersionsBuilderList() { 2079 return getProtocolVersionsFieldBuilder().getBuilderList(); 2080 } 2081 private com.google.protobuf.RepeatedFieldBuilder< 2082 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder> 2083 getProtocolVersionsFieldBuilder() { 2084 if (protocolVersionsBuilder_ == null) { 2085 protocolVersionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 2086 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolVersionProtoOrBuilder>( 2087 protocolVersions_, 2088 ((bitField0_ & 0x00000001) == 0x00000001), 2089 getParentForChildren(), 2090 isClean()); 2091 protocolVersions_ = null; 2092 } 2093 return protocolVersionsBuilder_; 2094 } 2095 2096 // @@protoc_insertion_point(builder_scope:hadoop.common.GetProtocolVersionsResponseProto) 2097 } 2098 2099 static { 2100 defaultInstance = new GetProtocolVersionsResponseProto(true); 2101 defaultInstance.initFields(); 2102 } 2103 2104 // @@protoc_insertion_point(class_scope:hadoop.common.GetProtocolVersionsResponseProto) 2105 } 2106 2107 public interface GetProtocolSignatureRequestProtoOrBuilder 2108 extends com.google.protobuf.MessageOrBuilder { 2109 2110 // required string protocol = 1; 2111 /** 2112 * <code>required string protocol = 1;</code> 2113 * 2114 * <pre> 2115 * Protocol name 2116 * </pre> 2117 */ 2118 boolean hasProtocol(); 2119 /** 2120 * <code>required string protocol = 1;</code> 2121 * 2122 * <pre> 2123 * Protocol name 2124 * </pre> 2125 */ 2126 java.lang.String getProtocol(); 2127 /** 2128 * <code>required string protocol = 1;</code> 2129 * 2130 * <pre> 2131 * Protocol name 2132 * </pre> 2133 */ 2134 com.google.protobuf.ByteString 2135 getProtocolBytes(); 2136 2137 // required string rpcKind = 2; 2138 /** 2139 * <code>required string rpcKind = 2;</code> 2140 * 2141 * <pre> 2142 * RPC kind 2143 * </pre> 2144 */ 2145 boolean hasRpcKind(); 2146 /** 2147 * <code>required string rpcKind = 2;</code> 2148 * 2149 * <pre> 2150 * RPC kind 2151 * </pre> 2152 */ 2153 java.lang.String getRpcKind(); 2154 /** 2155 * <code>required string rpcKind = 2;</code> 2156 * 2157 * <pre> 2158 * RPC kind 2159 * </pre> 2160 */ 2161 com.google.protobuf.ByteString 2162 getRpcKindBytes(); 2163 } 2164 /** 2165 * Protobuf type {@code hadoop.common.GetProtocolSignatureRequestProto} 2166 * 2167 * <pre> 2168 ** 2169 * Get protocol signature request. 2170 * </pre> 2171 */ 2172 public static final class GetProtocolSignatureRequestProto extends 2173 com.google.protobuf.GeneratedMessage 2174 implements GetProtocolSignatureRequestProtoOrBuilder { 2175 // Use GetProtocolSignatureRequestProto.newBuilder() to construct. 2176 private GetProtocolSignatureRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 2177 super(builder); 2178 this.unknownFields = builder.getUnknownFields(); 2179 } 2180 private GetProtocolSignatureRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 2181 2182 private static final GetProtocolSignatureRequestProto defaultInstance; 2183 public static GetProtocolSignatureRequestProto getDefaultInstance() { 2184 return defaultInstance; 2185 } 2186 2187 public GetProtocolSignatureRequestProto getDefaultInstanceForType() { 2188 return defaultInstance; 2189 } 2190 2191 private final com.google.protobuf.UnknownFieldSet unknownFields; 2192 @java.lang.Override 2193 public final com.google.protobuf.UnknownFieldSet 2194 getUnknownFields() { 2195 return this.unknownFields; 2196 } 2197 private GetProtocolSignatureRequestProto( 2198 com.google.protobuf.CodedInputStream input, 2199 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2200 throws com.google.protobuf.InvalidProtocolBufferException { 2201 initFields(); 2202 int mutable_bitField0_ = 0; 2203 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 2204 com.google.protobuf.UnknownFieldSet.newBuilder(); 2205 try { 2206 boolean done = false; 2207 while (!done) { 2208 int tag = input.readTag(); 2209 switch (tag) { 2210 case 0: 2211 done = true; 2212 break; 2213 default: { 2214 if (!parseUnknownField(input, unknownFields, 2215 extensionRegistry, tag)) { 2216 done = true; 2217 } 2218 break; 2219 } 2220 case 10: { 2221 bitField0_ |= 0x00000001; 2222 protocol_ = input.readBytes(); 2223 break; 2224 } 2225 case 18: { 2226 bitField0_ |= 0x00000002; 2227 rpcKind_ = input.readBytes(); 2228 break; 2229 } 2230 } 2231 } 2232 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2233 throw e.setUnfinishedMessage(this); 2234 } catch (java.io.IOException e) { 2235 throw new com.google.protobuf.InvalidProtocolBufferException( 2236 e.getMessage()).setUnfinishedMessage(this); 2237 } finally { 2238 this.unknownFields = unknownFields.build(); 2239 makeExtensionsImmutable(); 2240 } 2241 } 2242 public static final com.google.protobuf.Descriptors.Descriptor 2243 getDescriptor() { 2244 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor; 2245 } 2246 2247 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 2248 internalGetFieldAccessorTable() { 2249 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_fieldAccessorTable 2250 .ensureFieldAccessorsInitialized( 2251 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.Builder.class); 2252 } 2253 2254 public static com.google.protobuf.Parser<GetProtocolSignatureRequestProto> PARSER = 2255 new com.google.protobuf.AbstractParser<GetProtocolSignatureRequestProto>() { 2256 public GetProtocolSignatureRequestProto parsePartialFrom( 2257 com.google.protobuf.CodedInputStream input, 2258 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2259 throws com.google.protobuf.InvalidProtocolBufferException { 2260 return new GetProtocolSignatureRequestProto(input, extensionRegistry); 2261 } 2262 }; 2263 2264 @java.lang.Override 2265 public com.google.protobuf.Parser<GetProtocolSignatureRequestProto> getParserForType() { 2266 return PARSER; 2267 } 2268 2269 private int bitField0_; 2270 // required string protocol = 1; 2271 public static final int PROTOCOL_FIELD_NUMBER = 1; 2272 private java.lang.Object protocol_; 2273 /** 2274 * <code>required string protocol = 1;</code> 2275 * 2276 * <pre> 2277 * Protocol name 2278 * </pre> 2279 */ 2280 public boolean hasProtocol() { 2281 return ((bitField0_ & 0x00000001) == 0x00000001); 2282 } 2283 /** 2284 * <code>required string protocol = 1;</code> 2285 * 2286 * <pre> 2287 * Protocol name 2288 * </pre> 2289 */ 2290 public java.lang.String getProtocol() { 2291 java.lang.Object ref = protocol_; 2292 if (ref instanceof java.lang.String) { 2293 return (java.lang.String) ref; 2294 } else { 2295 com.google.protobuf.ByteString bs = 2296 (com.google.protobuf.ByteString) ref; 2297 java.lang.String s = bs.toStringUtf8(); 2298 if (bs.isValidUtf8()) { 2299 protocol_ = s; 2300 } 2301 return s; 2302 } 2303 } 2304 /** 2305 * <code>required string protocol = 1;</code> 2306 * 2307 * <pre> 2308 * Protocol name 2309 * </pre> 2310 */ 2311 public com.google.protobuf.ByteString 2312 getProtocolBytes() { 2313 java.lang.Object ref = protocol_; 2314 if (ref instanceof java.lang.String) { 2315 com.google.protobuf.ByteString b = 2316 com.google.protobuf.ByteString.copyFromUtf8( 2317 (java.lang.String) ref); 2318 protocol_ = b; 2319 return b; 2320 } else { 2321 return (com.google.protobuf.ByteString) ref; 2322 } 2323 } 2324 2325 // required string rpcKind = 2; 2326 public static final int RPCKIND_FIELD_NUMBER = 2; 2327 private java.lang.Object rpcKind_; 2328 /** 2329 * <code>required string rpcKind = 2;</code> 2330 * 2331 * <pre> 2332 * RPC kind 2333 * </pre> 2334 */ 2335 public boolean hasRpcKind() { 2336 return ((bitField0_ & 0x00000002) == 0x00000002); 2337 } 2338 /** 2339 * <code>required string rpcKind = 2;</code> 2340 * 2341 * <pre> 2342 * RPC kind 2343 * </pre> 2344 */ 2345 public java.lang.String getRpcKind() { 2346 java.lang.Object ref = rpcKind_; 2347 if (ref instanceof java.lang.String) { 2348 return (java.lang.String) ref; 2349 } else { 2350 com.google.protobuf.ByteString bs = 2351 (com.google.protobuf.ByteString) ref; 2352 java.lang.String s = bs.toStringUtf8(); 2353 if (bs.isValidUtf8()) { 2354 rpcKind_ = s; 2355 } 2356 return s; 2357 } 2358 } 2359 /** 2360 * <code>required string rpcKind = 2;</code> 2361 * 2362 * <pre> 2363 * RPC kind 2364 * </pre> 2365 */ 2366 public com.google.protobuf.ByteString 2367 getRpcKindBytes() { 2368 java.lang.Object ref = rpcKind_; 2369 if (ref instanceof java.lang.String) { 2370 com.google.protobuf.ByteString b = 2371 com.google.protobuf.ByteString.copyFromUtf8( 2372 (java.lang.String) ref); 2373 rpcKind_ = b; 2374 return b; 2375 } else { 2376 return (com.google.protobuf.ByteString) ref; 2377 } 2378 } 2379 2380 private void initFields() { 2381 protocol_ = ""; 2382 rpcKind_ = ""; 2383 } 2384 private byte memoizedIsInitialized = -1; 2385 public final boolean isInitialized() { 2386 byte isInitialized = memoizedIsInitialized; 2387 if (isInitialized != -1) return isInitialized == 1; 2388 2389 if (!hasProtocol()) { 2390 memoizedIsInitialized = 0; 2391 return false; 2392 } 2393 if (!hasRpcKind()) { 2394 memoizedIsInitialized = 0; 2395 return false; 2396 } 2397 memoizedIsInitialized = 1; 2398 return true; 2399 } 2400 2401 public void writeTo(com.google.protobuf.CodedOutputStream output) 2402 throws java.io.IOException { 2403 getSerializedSize(); 2404 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2405 output.writeBytes(1, getProtocolBytes()); 2406 } 2407 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2408 output.writeBytes(2, getRpcKindBytes()); 2409 } 2410 getUnknownFields().writeTo(output); 2411 } 2412 2413 private int memoizedSerializedSize = -1; 2414 public int getSerializedSize() { 2415 int size = memoizedSerializedSize; 2416 if (size != -1) return size; 2417 2418 size = 0; 2419 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2420 size += com.google.protobuf.CodedOutputStream 2421 .computeBytesSize(1, getProtocolBytes()); 2422 } 2423 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2424 size += com.google.protobuf.CodedOutputStream 2425 .computeBytesSize(2, getRpcKindBytes()); 2426 } 2427 size += getUnknownFields().getSerializedSize(); 2428 memoizedSerializedSize = size; 2429 return size; 2430 } 2431 2432 private static final long serialVersionUID = 0L; 2433 @java.lang.Override 2434 protected java.lang.Object writeReplace() 2435 throws java.io.ObjectStreamException { 2436 return super.writeReplace(); 2437 } 2438 2439 @java.lang.Override 2440 public boolean equals(final java.lang.Object obj) { 2441 if (obj == this) { 2442 return true; 2443 } 2444 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto)) { 2445 return super.equals(obj); 2446 } 2447 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto) obj; 2448 2449 boolean result = true; 2450 result = result && (hasProtocol() == other.hasProtocol()); 2451 if (hasProtocol()) { 2452 result = result && getProtocol() 2453 .equals(other.getProtocol()); 2454 } 2455 result = result && (hasRpcKind() == other.hasRpcKind()); 2456 if (hasRpcKind()) { 2457 result = result && getRpcKind() 2458 .equals(other.getRpcKind()); 2459 } 2460 result = result && 2461 getUnknownFields().equals(other.getUnknownFields()); 2462 return result; 2463 } 2464 2465 private int memoizedHashCode = 0; 2466 @java.lang.Override 2467 public int hashCode() { 2468 if (memoizedHashCode != 0) { 2469 return memoizedHashCode; 2470 } 2471 int hash = 41; 2472 hash = (19 * hash) + getDescriptorForType().hashCode(); 2473 if (hasProtocol()) { 2474 hash = (37 * hash) + PROTOCOL_FIELD_NUMBER; 2475 hash = (53 * hash) + getProtocol().hashCode(); 2476 } 2477 if (hasRpcKind()) { 2478 hash = (37 * hash) + RPCKIND_FIELD_NUMBER; 2479 hash = (53 * hash) + getRpcKind().hashCode(); 2480 } 2481 hash = (29 * hash) + getUnknownFields().hashCode(); 2482 memoizedHashCode = hash; 2483 return hash; 2484 } 2485 2486 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom( 2487 com.google.protobuf.ByteString data) 2488 throws com.google.protobuf.InvalidProtocolBufferException { 2489 return PARSER.parseFrom(data); 2490 } 2491 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom( 2492 com.google.protobuf.ByteString data, 2493 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2494 throws com.google.protobuf.InvalidProtocolBufferException { 2495 return PARSER.parseFrom(data, extensionRegistry); 2496 } 2497 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(byte[] data) 2498 throws com.google.protobuf.InvalidProtocolBufferException { 2499 return PARSER.parseFrom(data); 2500 } 2501 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom( 2502 byte[] data, 2503 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2504 throws com.google.protobuf.InvalidProtocolBufferException { 2505 return PARSER.parseFrom(data, extensionRegistry); 2506 } 2507 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom(java.io.InputStream input) 2508 throws java.io.IOException { 2509 return PARSER.parseFrom(input); 2510 } 2511 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom( 2512 java.io.InputStream input, 2513 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2514 throws java.io.IOException { 2515 return PARSER.parseFrom(input, extensionRegistry); 2516 } 2517 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseDelimitedFrom(java.io.InputStream input) 2518 throws java.io.IOException { 2519 return PARSER.parseDelimitedFrom(input); 2520 } 2521 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseDelimitedFrom( 2522 java.io.InputStream input, 2523 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2524 throws java.io.IOException { 2525 return PARSER.parseDelimitedFrom(input, extensionRegistry); 2526 } 2527 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom( 2528 com.google.protobuf.CodedInputStream input) 2529 throws java.io.IOException { 2530 return PARSER.parseFrom(input); 2531 } 2532 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parseFrom( 2533 com.google.protobuf.CodedInputStream input, 2534 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2535 throws java.io.IOException { 2536 return PARSER.parseFrom(input, extensionRegistry); 2537 } 2538 2539 public static Builder newBuilder() { return Builder.create(); } 2540 public Builder newBuilderForType() { return newBuilder(); } 2541 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto prototype) { 2542 return newBuilder().mergeFrom(prototype); 2543 } 2544 public Builder toBuilder() { return newBuilder(this); } 2545 2546 @java.lang.Override 2547 protected Builder newBuilderForType( 2548 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2549 Builder builder = new Builder(parent); 2550 return builder; 2551 } 2552 /** 2553 * Protobuf type {@code hadoop.common.GetProtocolSignatureRequestProto} 2554 * 2555 * <pre> 2556 ** 2557 * Get protocol signature request. 2558 * </pre> 2559 */ 2560 public static final class Builder extends 2561 com.google.protobuf.GeneratedMessage.Builder<Builder> 2562 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProtoOrBuilder { 2563 public static final com.google.protobuf.Descriptors.Descriptor 2564 getDescriptor() { 2565 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor; 2566 } 2567 2568 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 2569 internalGetFieldAccessorTable() { 2570 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_fieldAccessorTable 2571 .ensureFieldAccessorsInitialized( 2572 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.Builder.class); 2573 } 2574 2575 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.newBuilder() 2576 private Builder() { 2577 maybeForceBuilderInitialization(); 2578 } 2579 2580 private Builder( 2581 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2582 super(parent); 2583 maybeForceBuilderInitialization(); 2584 } 2585 private void maybeForceBuilderInitialization() { 2586 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 2587 } 2588 } 2589 private static Builder create() { 2590 return new Builder(); 2591 } 2592 2593 public Builder clear() { 2594 super.clear(); 2595 protocol_ = ""; 2596 bitField0_ = (bitField0_ & ~0x00000001); 2597 rpcKind_ = ""; 2598 bitField0_ = (bitField0_ & ~0x00000002); 2599 return this; 2600 } 2601 2602 public Builder clone() { 2603 return create().mergeFrom(buildPartial()); 2604 } 2605 2606 public com.google.protobuf.Descriptors.Descriptor 2607 getDescriptorForType() { 2608 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor; 2609 } 2610 2611 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto getDefaultInstanceForType() { 2612 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDefaultInstance(); 2613 } 2614 2615 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto build() { 2616 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto result = buildPartial(); 2617 if (!result.isInitialized()) { 2618 throw newUninitializedMessageException(result); 2619 } 2620 return result; 2621 } 2622 2623 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto buildPartial() { 2624 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto(this); 2625 int from_bitField0_ = bitField0_; 2626 int to_bitField0_ = 0; 2627 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 2628 to_bitField0_ |= 0x00000001; 2629 } 2630 result.protocol_ = protocol_; 2631 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 2632 to_bitField0_ |= 0x00000002; 2633 } 2634 result.rpcKind_ = rpcKind_; 2635 result.bitField0_ = to_bitField0_; 2636 onBuilt(); 2637 return result; 2638 } 2639 2640 public Builder mergeFrom(com.google.protobuf.Message other) { 2641 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto) { 2642 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto)other); 2643 } else { 2644 super.mergeFrom(other); 2645 return this; 2646 } 2647 } 2648 2649 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto other) { 2650 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDefaultInstance()) return this; 2651 if (other.hasProtocol()) { 2652 bitField0_ |= 0x00000001; 2653 protocol_ = other.protocol_; 2654 onChanged(); 2655 } 2656 if (other.hasRpcKind()) { 2657 bitField0_ |= 0x00000002; 2658 rpcKind_ = other.rpcKind_; 2659 onChanged(); 2660 } 2661 this.mergeUnknownFields(other.getUnknownFields()); 2662 return this; 2663 } 2664 2665 public final boolean isInitialized() { 2666 if (!hasProtocol()) { 2667 2668 return false; 2669 } 2670 if (!hasRpcKind()) { 2671 2672 return false; 2673 } 2674 return true; 2675 } 2676 2677 public Builder mergeFrom( 2678 com.google.protobuf.CodedInputStream input, 2679 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2680 throws java.io.IOException { 2681 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto parsedMessage = null; 2682 try { 2683 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 2684 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2685 parsedMessage = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto) e.getUnfinishedMessage(); 2686 throw e; 2687 } finally { 2688 if (parsedMessage != null) { 2689 mergeFrom(parsedMessage); 2690 } 2691 } 2692 return this; 2693 } 2694 private int bitField0_; 2695 2696 // required string protocol = 1; 2697 private java.lang.Object protocol_ = ""; 2698 /** 2699 * <code>required string protocol = 1;</code> 2700 * 2701 * <pre> 2702 * Protocol name 2703 * </pre> 2704 */ 2705 public boolean hasProtocol() { 2706 return ((bitField0_ & 0x00000001) == 0x00000001); 2707 } 2708 /** 2709 * <code>required string protocol = 1;</code> 2710 * 2711 * <pre> 2712 * Protocol name 2713 * </pre> 2714 */ 2715 public java.lang.String getProtocol() { 2716 java.lang.Object ref = protocol_; 2717 if (!(ref instanceof java.lang.String)) { 2718 java.lang.String s = ((com.google.protobuf.ByteString) ref) 2719 .toStringUtf8(); 2720 protocol_ = s; 2721 return s; 2722 } else { 2723 return (java.lang.String) ref; 2724 } 2725 } 2726 /** 2727 * <code>required string protocol = 1;</code> 2728 * 2729 * <pre> 2730 * Protocol name 2731 * </pre> 2732 */ 2733 public com.google.protobuf.ByteString 2734 getProtocolBytes() { 2735 java.lang.Object ref = protocol_; 2736 if (ref instanceof String) { 2737 com.google.protobuf.ByteString b = 2738 com.google.protobuf.ByteString.copyFromUtf8( 2739 (java.lang.String) ref); 2740 protocol_ = b; 2741 return b; 2742 } else { 2743 return (com.google.protobuf.ByteString) ref; 2744 } 2745 } 2746 /** 2747 * <code>required string protocol = 1;</code> 2748 * 2749 * <pre> 2750 * Protocol name 2751 * </pre> 2752 */ 2753 public Builder setProtocol( 2754 java.lang.String value) { 2755 if (value == null) { 2756 throw new NullPointerException(); 2757 } 2758 bitField0_ |= 0x00000001; 2759 protocol_ = value; 2760 onChanged(); 2761 return this; 2762 } 2763 /** 2764 * <code>required string protocol = 1;</code> 2765 * 2766 * <pre> 2767 * Protocol name 2768 * </pre> 2769 */ 2770 public Builder clearProtocol() { 2771 bitField0_ = (bitField0_ & ~0x00000001); 2772 protocol_ = getDefaultInstance().getProtocol(); 2773 onChanged(); 2774 return this; 2775 } 2776 /** 2777 * <code>required string protocol = 1;</code> 2778 * 2779 * <pre> 2780 * Protocol name 2781 * </pre> 2782 */ 2783 public Builder setProtocolBytes( 2784 com.google.protobuf.ByteString value) { 2785 if (value == null) { 2786 throw new NullPointerException(); 2787 } 2788 bitField0_ |= 0x00000001; 2789 protocol_ = value; 2790 onChanged(); 2791 return this; 2792 } 2793 2794 // required string rpcKind = 2; 2795 private java.lang.Object rpcKind_ = ""; 2796 /** 2797 * <code>required string rpcKind = 2;</code> 2798 * 2799 * <pre> 2800 * RPC kind 2801 * </pre> 2802 */ 2803 public boolean hasRpcKind() { 2804 return ((bitField0_ & 0x00000002) == 0x00000002); 2805 } 2806 /** 2807 * <code>required string rpcKind = 2;</code> 2808 * 2809 * <pre> 2810 * RPC kind 2811 * </pre> 2812 */ 2813 public java.lang.String getRpcKind() { 2814 java.lang.Object ref = rpcKind_; 2815 if (!(ref instanceof java.lang.String)) { 2816 java.lang.String s = ((com.google.protobuf.ByteString) ref) 2817 .toStringUtf8(); 2818 rpcKind_ = s; 2819 return s; 2820 } else { 2821 return (java.lang.String) ref; 2822 } 2823 } 2824 /** 2825 * <code>required string rpcKind = 2;</code> 2826 * 2827 * <pre> 2828 * RPC kind 2829 * </pre> 2830 */ 2831 public com.google.protobuf.ByteString 2832 getRpcKindBytes() { 2833 java.lang.Object ref = rpcKind_; 2834 if (ref instanceof String) { 2835 com.google.protobuf.ByteString b = 2836 com.google.protobuf.ByteString.copyFromUtf8( 2837 (java.lang.String) ref); 2838 rpcKind_ = b; 2839 return b; 2840 } else { 2841 return (com.google.protobuf.ByteString) ref; 2842 } 2843 } 2844 /** 2845 * <code>required string rpcKind = 2;</code> 2846 * 2847 * <pre> 2848 * RPC kind 2849 * </pre> 2850 */ 2851 public Builder setRpcKind( 2852 java.lang.String value) { 2853 if (value == null) { 2854 throw new NullPointerException(); 2855 } 2856 bitField0_ |= 0x00000002; 2857 rpcKind_ = value; 2858 onChanged(); 2859 return this; 2860 } 2861 /** 2862 * <code>required string rpcKind = 2;</code> 2863 * 2864 * <pre> 2865 * RPC kind 2866 * </pre> 2867 */ 2868 public Builder clearRpcKind() { 2869 bitField0_ = (bitField0_ & ~0x00000002); 2870 rpcKind_ = getDefaultInstance().getRpcKind(); 2871 onChanged(); 2872 return this; 2873 } 2874 /** 2875 * <code>required string rpcKind = 2;</code> 2876 * 2877 * <pre> 2878 * RPC kind 2879 * </pre> 2880 */ 2881 public Builder setRpcKindBytes( 2882 com.google.protobuf.ByteString value) { 2883 if (value == null) { 2884 throw new NullPointerException(); 2885 } 2886 bitField0_ |= 0x00000002; 2887 rpcKind_ = value; 2888 onChanged(); 2889 return this; 2890 } 2891 2892 // @@protoc_insertion_point(builder_scope:hadoop.common.GetProtocolSignatureRequestProto) 2893 } 2894 2895 static { 2896 defaultInstance = new GetProtocolSignatureRequestProto(true); 2897 defaultInstance.initFields(); 2898 } 2899 2900 // @@protoc_insertion_point(class_scope:hadoop.common.GetProtocolSignatureRequestProto) 2901 } 2902 2903 public interface GetProtocolSignatureResponseProtoOrBuilder 2904 extends com.google.protobuf.MessageOrBuilder { 2905 2906 // repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1; 2907 /** 2908 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 2909 */ 2910 java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> 2911 getProtocolSignatureList(); 2912 /** 2913 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 2914 */ 2915 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto getProtocolSignature(int index); 2916 /** 2917 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 2918 */ 2919 int getProtocolSignatureCount(); 2920 /** 2921 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 2922 */ 2923 java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder> 2924 getProtocolSignatureOrBuilderList(); 2925 /** 2926 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 2927 */ 2928 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder getProtocolSignatureOrBuilder( 2929 int index); 2930 } 2931 /** 2932 * Protobuf type {@code hadoop.common.GetProtocolSignatureResponseProto} 2933 * 2934 * <pre> 2935 ** 2936 * Get protocol signature response. 2937 * </pre> 2938 */ 2939 public static final class GetProtocolSignatureResponseProto extends 2940 com.google.protobuf.GeneratedMessage 2941 implements GetProtocolSignatureResponseProtoOrBuilder { 2942 // Use GetProtocolSignatureResponseProto.newBuilder() to construct. 2943 private GetProtocolSignatureResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 2944 super(builder); 2945 this.unknownFields = builder.getUnknownFields(); 2946 } 2947 private GetProtocolSignatureResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 2948 2949 private static final GetProtocolSignatureResponseProto defaultInstance; 2950 public static GetProtocolSignatureResponseProto getDefaultInstance() { 2951 return defaultInstance; 2952 } 2953 2954 public GetProtocolSignatureResponseProto getDefaultInstanceForType() { 2955 return defaultInstance; 2956 } 2957 2958 private final com.google.protobuf.UnknownFieldSet unknownFields; 2959 @java.lang.Override 2960 public final com.google.protobuf.UnknownFieldSet 2961 getUnknownFields() { 2962 return this.unknownFields; 2963 } 2964 private GetProtocolSignatureResponseProto( 2965 com.google.protobuf.CodedInputStream input, 2966 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2967 throws com.google.protobuf.InvalidProtocolBufferException { 2968 initFields(); 2969 int mutable_bitField0_ = 0; 2970 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 2971 com.google.protobuf.UnknownFieldSet.newBuilder(); 2972 try { 2973 boolean done = false; 2974 while (!done) { 2975 int tag = input.readTag(); 2976 switch (tag) { 2977 case 0: 2978 done = true; 2979 break; 2980 default: { 2981 if (!parseUnknownField(input, unknownFields, 2982 extensionRegistry, tag)) { 2983 done = true; 2984 } 2985 break; 2986 } 2987 case 10: { 2988 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 2989 protocolSignature_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto>(); 2990 mutable_bitField0_ |= 0x00000001; 2991 } 2992 protocolSignature_.add(input.readMessage(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.PARSER, extensionRegistry)); 2993 break; 2994 } 2995 } 2996 } 2997 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2998 throw e.setUnfinishedMessage(this); 2999 } catch (java.io.IOException e) { 3000 throw new com.google.protobuf.InvalidProtocolBufferException( 3001 e.getMessage()).setUnfinishedMessage(this); 3002 } finally { 3003 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 3004 protocolSignature_ = java.util.Collections.unmodifiableList(protocolSignature_); 3005 } 3006 this.unknownFields = unknownFields.build(); 3007 makeExtensionsImmutable(); 3008 } 3009 } 3010 public static final com.google.protobuf.Descriptors.Descriptor 3011 getDescriptor() { 3012 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor; 3013 } 3014 3015 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 3016 internalGetFieldAccessorTable() { 3017 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_fieldAccessorTable 3018 .ensureFieldAccessorsInitialized( 3019 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.Builder.class); 3020 } 3021 3022 public static com.google.protobuf.Parser<GetProtocolSignatureResponseProto> PARSER = 3023 new com.google.protobuf.AbstractParser<GetProtocolSignatureResponseProto>() { 3024 public GetProtocolSignatureResponseProto parsePartialFrom( 3025 com.google.protobuf.CodedInputStream input, 3026 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3027 throws com.google.protobuf.InvalidProtocolBufferException { 3028 return new GetProtocolSignatureResponseProto(input, extensionRegistry); 3029 } 3030 }; 3031 3032 @java.lang.Override 3033 public com.google.protobuf.Parser<GetProtocolSignatureResponseProto> getParserForType() { 3034 return PARSER; 3035 } 3036 3037 // repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1; 3038 public static final int PROTOCOLSIGNATURE_FIELD_NUMBER = 1; 3039 private java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> protocolSignature_; 3040 /** 3041 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3042 */ 3043 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> getProtocolSignatureList() { 3044 return protocolSignature_; 3045 } 3046 /** 3047 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3048 */ 3049 public java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder> 3050 getProtocolSignatureOrBuilderList() { 3051 return protocolSignature_; 3052 } 3053 /** 3054 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3055 */ 3056 public int getProtocolSignatureCount() { 3057 return protocolSignature_.size(); 3058 } 3059 /** 3060 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3061 */ 3062 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto getProtocolSignature(int index) { 3063 return protocolSignature_.get(index); 3064 } 3065 /** 3066 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3067 */ 3068 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder getProtocolSignatureOrBuilder( 3069 int index) { 3070 return protocolSignature_.get(index); 3071 } 3072 3073 private void initFields() { 3074 protocolSignature_ = java.util.Collections.emptyList(); 3075 } 3076 private byte memoizedIsInitialized = -1; 3077 public final boolean isInitialized() { 3078 byte isInitialized = memoizedIsInitialized; 3079 if (isInitialized != -1) return isInitialized == 1; 3080 3081 for (int i = 0; i < getProtocolSignatureCount(); i++) { 3082 if (!getProtocolSignature(i).isInitialized()) { 3083 memoizedIsInitialized = 0; 3084 return false; 3085 } 3086 } 3087 memoizedIsInitialized = 1; 3088 return true; 3089 } 3090 3091 public void writeTo(com.google.protobuf.CodedOutputStream output) 3092 throws java.io.IOException { 3093 getSerializedSize(); 3094 for (int i = 0; i < protocolSignature_.size(); i++) { 3095 output.writeMessage(1, protocolSignature_.get(i)); 3096 } 3097 getUnknownFields().writeTo(output); 3098 } 3099 3100 private int memoizedSerializedSize = -1; 3101 public int getSerializedSize() { 3102 int size = memoizedSerializedSize; 3103 if (size != -1) return size; 3104 3105 size = 0; 3106 for (int i = 0; i < protocolSignature_.size(); i++) { 3107 size += com.google.protobuf.CodedOutputStream 3108 .computeMessageSize(1, protocolSignature_.get(i)); 3109 } 3110 size += getUnknownFields().getSerializedSize(); 3111 memoizedSerializedSize = size; 3112 return size; 3113 } 3114 3115 private static final long serialVersionUID = 0L; 3116 @java.lang.Override 3117 protected java.lang.Object writeReplace() 3118 throws java.io.ObjectStreamException { 3119 return super.writeReplace(); 3120 } 3121 3122 @java.lang.Override 3123 public boolean equals(final java.lang.Object obj) { 3124 if (obj == this) { 3125 return true; 3126 } 3127 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto)) { 3128 return super.equals(obj); 3129 } 3130 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto) obj; 3131 3132 boolean result = true; 3133 result = result && getProtocolSignatureList() 3134 .equals(other.getProtocolSignatureList()); 3135 result = result && 3136 getUnknownFields().equals(other.getUnknownFields()); 3137 return result; 3138 } 3139 3140 private int memoizedHashCode = 0; 3141 @java.lang.Override 3142 public int hashCode() { 3143 if (memoizedHashCode != 0) { 3144 return memoizedHashCode; 3145 } 3146 int hash = 41; 3147 hash = (19 * hash) + getDescriptorForType().hashCode(); 3148 if (getProtocolSignatureCount() > 0) { 3149 hash = (37 * hash) + PROTOCOLSIGNATURE_FIELD_NUMBER; 3150 hash = (53 * hash) + getProtocolSignatureList().hashCode(); 3151 } 3152 hash = (29 * hash) + getUnknownFields().hashCode(); 3153 memoizedHashCode = hash; 3154 return hash; 3155 } 3156 3157 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom( 3158 com.google.protobuf.ByteString data) 3159 throws com.google.protobuf.InvalidProtocolBufferException { 3160 return PARSER.parseFrom(data); 3161 } 3162 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom( 3163 com.google.protobuf.ByteString data, 3164 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3165 throws com.google.protobuf.InvalidProtocolBufferException { 3166 return PARSER.parseFrom(data, extensionRegistry); 3167 } 3168 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(byte[] data) 3169 throws com.google.protobuf.InvalidProtocolBufferException { 3170 return PARSER.parseFrom(data); 3171 } 3172 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom( 3173 byte[] data, 3174 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3175 throws com.google.protobuf.InvalidProtocolBufferException { 3176 return PARSER.parseFrom(data, extensionRegistry); 3177 } 3178 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom(java.io.InputStream input) 3179 throws java.io.IOException { 3180 return PARSER.parseFrom(input); 3181 } 3182 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom( 3183 java.io.InputStream input, 3184 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3185 throws java.io.IOException { 3186 return PARSER.parseFrom(input, extensionRegistry); 3187 } 3188 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseDelimitedFrom(java.io.InputStream input) 3189 throws java.io.IOException { 3190 return PARSER.parseDelimitedFrom(input); 3191 } 3192 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseDelimitedFrom( 3193 java.io.InputStream input, 3194 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3195 throws java.io.IOException { 3196 return PARSER.parseDelimitedFrom(input, extensionRegistry); 3197 } 3198 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom( 3199 com.google.protobuf.CodedInputStream input) 3200 throws java.io.IOException { 3201 return PARSER.parseFrom(input); 3202 } 3203 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parseFrom( 3204 com.google.protobuf.CodedInputStream input, 3205 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3206 throws java.io.IOException { 3207 return PARSER.parseFrom(input, extensionRegistry); 3208 } 3209 3210 public static Builder newBuilder() { return Builder.create(); } 3211 public Builder newBuilderForType() { return newBuilder(); } 3212 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto prototype) { 3213 return newBuilder().mergeFrom(prototype); 3214 } 3215 public Builder toBuilder() { return newBuilder(this); } 3216 3217 @java.lang.Override 3218 protected Builder newBuilderForType( 3219 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3220 Builder builder = new Builder(parent); 3221 return builder; 3222 } 3223 /** 3224 * Protobuf type {@code hadoop.common.GetProtocolSignatureResponseProto} 3225 * 3226 * <pre> 3227 ** 3228 * Get protocol signature response. 3229 * </pre> 3230 */ 3231 public static final class Builder extends 3232 com.google.protobuf.GeneratedMessage.Builder<Builder> 3233 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProtoOrBuilder { 3234 public static final com.google.protobuf.Descriptors.Descriptor 3235 getDescriptor() { 3236 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor; 3237 } 3238 3239 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 3240 internalGetFieldAccessorTable() { 3241 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_fieldAccessorTable 3242 .ensureFieldAccessorsInitialized( 3243 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.Builder.class); 3244 } 3245 3246 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.newBuilder() 3247 private Builder() { 3248 maybeForceBuilderInitialization(); 3249 } 3250 3251 private Builder( 3252 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3253 super(parent); 3254 maybeForceBuilderInitialization(); 3255 } 3256 private void maybeForceBuilderInitialization() { 3257 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 3258 getProtocolSignatureFieldBuilder(); 3259 } 3260 } 3261 private static Builder create() { 3262 return new Builder(); 3263 } 3264 3265 public Builder clear() { 3266 super.clear(); 3267 if (protocolSignatureBuilder_ == null) { 3268 protocolSignature_ = java.util.Collections.emptyList(); 3269 bitField0_ = (bitField0_ & ~0x00000001); 3270 } else { 3271 protocolSignatureBuilder_.clear(); 3272 } 3273 return this; 3274 } 3275 3276 public Builder clone() { 3277 return create().mergeFrom(buildPartial()); 3278 } 3279 3280 public com.google.protobuf.Descriptors.Descriptor 3281 getDescriptorForType() { 3282 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor; 3283 } 3284 3285 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto getDefaultInstanceForType() { 3286 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance(); 3287 } 3288 3289 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto build() { 3290 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto result = buildPartial(); 3291 if (!result.isInitialized()) { 3292 throw newUninitializedMessageException(result); 3293 } 3294 return result; 3295 } 3296 3297 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto buildPartial() { 3298 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto(this); 3299 int from_bitField0_ = bitField0_; 3300 if (protocolSignatureBuilder_ == null) { 3301 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3302 protocolSignature_ = java.util.Collections.unmodifiableList(protocolSignature_); 3303 bitField0_ = (bitField0_ & ~0x00000001); 3304 } 3305 result.protocolSignature_ = protocolSignature_; 3306 } else { 3307 result.protocolSignature_ = protocolSignatureBuilder_.build(); 3308 } 3309 onBuilt(); 3310 return result; 3311 } 3312 3313 public Builder mergeFrom(com.google.protobuf.Message other) { 3314 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto) { 3315 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto)other); 3316 } else { 3317 super.mergeFrom(other); 3318 return this; 3319 } 3320 } 3321 3322 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto other) { 3323 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance()) return this; 3324 if (protocolSignatureBuilder_ == null) { 3325 if (!other.protocolSignature_.isEmpty()) { 3326 if (protocolSignature_.isEmpty()) { 3327 protocolSignature_ = other.protocolSignature_; 3328 bitField0_ = (bitField0_ & ~0x00000001); 3329 } else { 3330 ensureProtocolSignatureIsMutable(); 3331 protocolSignature_.addAll(other.protocolSignature_); 3332 } 3333 onChanged(); 3334 } 3335 } else { 3336 if (!other.protocolSignature_.isEmpty()) { 3337 if (protocolSignatureBuilder_.isEmpty()) { 3338 protocolSignatureBuilder_.dispose(); 3339 protocolSignatureBuilder_ = null; 3340 protocolSignature_ = other.protocolSignature_; 3341 bitField0_ = (bitField0_ & ~0x00000001); 3342 protocolSignatureBuilder_ = 3343 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 3344 getProtocolSignatureFieldBuilder() : null; 3345 } else { 3346 protocolSignatureBuilder_.addAllMessages(other.protocolSignature_); 3347 } 3348 } 3349 } 3350 this.mergeUnknownFields(other.getUnknownFields()); 3351 return this; 3352 } 3353 3354 public final boolean isInitialized() { 3355 for (int i = 0; i < getProtocolSignatureCount(); i++) { 3356 if (!getProtocolSignature(i).isInitialized()) { 3357 3358 return false; 3359 } 3360 } 3361 return true; 3362 } 3363 3364 public Builder mergeFrom( 3365 com.google.protobuf.CodedInputStream input, 3366 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3367 throws java.io.IOException { 3368 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto parsedMessage = null; 3369 try { 3370 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 3371 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3372 parsedMessage = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto) e.getUnfinishedMessage(); 3373 throw e; 3374 } finally { 3375 if (parsedMessage != null) { 3376 mergeFrom(parsedMessage); 3377 } 3378 } 3379 return this; 3380 } 3381 private int bitField0_; 3382 3383 // repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1; 3384 private java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> protocolSignature_ = 3385 java.util.Collections.emptyList(); 3386 private void ensureProtocolSignatureIsMutable() { 3387 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 3388 protocolSignature_ = new java.util.ArrayList<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto>(protocolSignature_); 3389 bitField0_ |= 0x00000001; 3390 } 3391 } 3392 3393 private com.google.protobuf.RepeatedFieldBuilder< 3394 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder> protocolSignatureBuilder_; 3395 3396 /** 3397 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3398 */ 3399 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> getProtocolSignatureList() { 3400 if (protocolSignatureBuilder_ == null) { 3401 return java.util.Collections.unmodifiableList(protocolSignature_); 3402 } else { 3403 return protocolSignatureBuilder_.getMessageList(); 3404 } 3405 } 3406 /** 3407 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3408 */ 3409 public int getProtocolSignatureCount() { 3410 if (protocolSignatureBuilder_ == null) { 3411 return protocolSignature_.size(); 3412 } else { 3413 return protocolSignatureBuilder_.getCount(); 3414 } 3415 } 3416 /** 3417 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3418 */ 3419 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto getProtocolSignature(int index) { 3420 if (protocolSignatureBuilder_ == null) { 3421 return protocolSignature_.get(index); 3422 } else { 3423 return protocolSignatureBuilder_.getMessage(index); 3424 } 3425 } 3426 /** 3427 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3428 */ 3429 public Builder setProtocolSignature( 3430 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto value) { 3431 if (protocolSignatureBuilder_ == null) { 3432 if (value == null) { 3433 throw new NullPointerException(); 3434 } 3435 ensureProtocolSignatureIsMutable(); 3436 protocolSignature_.set(index, value); 3437 onChanged(); 3438 } else { 3439 protocolSignatureBuilder_.setMessage(index, value); 3440 } 3441 return this; 3442 } 3443 /** 3444 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3445 */ 3446 public Builder setProtocolSignature( 3447 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder builderForValue) { 3448 if (protocolSignatureBuilder_ == null) { 3449 ensureProtocolSignatureIsMutable(); 3450 protocolSignature_.set(index, builderForValue.build()); 3451 onChanged(); 3452 } else { 3453 protocolSignatureBuilder_.setMessage(index, builderForValue.build()); 3454 } 3455 return this; 3456 } 3457 /** 3458 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3459 */ 3460 public Builder addProtocolSignature(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto value) { 3461 if (protocolSignatureBuilder_ == null) { 3462 if (value == null) { 3463 throw new NullPointerException(); 3464 } 3465 ensureProtocolSignatureIsMutable(); 3466 protocolSignature_.add(value); 3467 onChanged(); 3468 } else { 3469 protocolSignatureBuilder_.addMessage(value); 3470 } 3471 return this; 3472 } 3473 /** 3474 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3475 */ 3476 public Builder addProtocolSignature( 3477 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto value) { 3478 if (protocolSignatureBuilder_ == null) { 3479 if (value == null) { 3480 throw new NullPointerException(); 3481 } 3482 ensureProtocolSignatureIsMutable(); 3483 protocolSignature_.add(index, value); 3484 onChanged(); 3485 } else { 3486 protocolSignatureBuilder_.addMessage(index, value); 3487 } 3488 return this; 3489 } 3490 /** 3491 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3492 */ 3493 public Builder addProtocolSignature( 3494 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder builderForValue) { 3495 if (protocolSignatureBuilder_ == null) { 3496 ensureProtocolSignatureIsMutable(); 3497 protocolSignature_.add(builderForValue.build()); 3498 onChanged(); 3499 } else { 3500 protocolSignatureBuilder_.addMessage(builderForValue.build()); 3501 } 3502 return this; 3503 } 3504 /** 3505 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3506 */ 3507 public Builder addProtocolSignature( 3508 int index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder builderForValue) { 3509 if (protocolSignatureBuilder_ == null) { 3510 ensureProtocolSignatureIsMutable(); 3511 protocolSignature_.add(index, builderForValue.build()); 3512 onChanged(); 3513 } else { 3514 protocolSignatureBuilder_.addMessage(index, builderForValue.build()); 3515 } 3516 return this; 3517 } 3518 /** 3519 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3520 */ 3521 public Builder addAllProtocolSignature( 3522 java.lang.Iterable<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto> values) { 3523 if (protocolSignatureBuilder_ == null) { 3524 ensureProtocolSignatureIsMutable(); 3525 super.addAll(values, protocolSignature_); 3526 onChanged(); 3527 } else { 3528 protocolSignatureBuilder_.addAllMessages(values); 3529 } 3530 return this; 3531 } 3532 /** 3533 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3534 */ 3535 public Builder clearProtocolSignature() { 3536 if (protocolSignatureBuilder_ == null) { 3537 protocolSignature_ = java.util.Collections.emptyList(); 3538 bitField0_ = (bitField0_ & ~0x00000001); 3539 onChanged(); 3540 } else { 3541 protocolSignatureBuilder_.clear(); 3542 } 3543 return this; 3544 } 3545 /** 3546 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3547 */ 3548 public Builder removeProtocolSignature(int index) { 3549 if (protocolSignatureBuilder_ == null) { 3550 ensureProtocolSignatureIsMutable(); 3551 protocolSignature_.remove(index); 3552 onChanged(); 3553 } else { 3554 protocolSignatureBuilder_.remove(index); 3555 } 3556 return this; 3557 } 3558 /** 3559 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3560 */ 3561 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder getProtocolSignatureBuilder( 3562 int index) { 3563 return getProtocolSignatureFieldBuilder().getBuilder(index); 3564 } 3565 /** 3566 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3567 */ 3568 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder getProtocolSignatureOrBuilder( 3569 int index) { 3570 if (protocolSignatureBuilder_ == null) { 3571 return protocolSignature_.get(index); } else { 3572 return protocolSignatureBuilder_.getMessageOrBuilder(index); 3573 } 3574 } 3575 /** 3576 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3577 */ 3578 public java.util.List<? extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder> 3579 getProtocolSignatureOrBuilderList() { 3580 if (protocolSignatureBuilder_ != null) { 3581 return protocolSignatureBuilder_.getMessageOrBuilderList(); 3582 } else { 3583 return java.util.Collections.unmodifiableList(protocolSignature_); 3584 } 3585 } 3586 /** 3587 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3588 */ 3589 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder addProtocolSignatureBuilder() { 3590 return getProtocolSignatureFieldBuilder().addBuilder( 3591 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDefaultInstance()); 3592 } 3593 /** 3594 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3595 */ 3596 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder addProtocolSignatureBuilder( 3597 int index) { 3598 return getProtocolSignatureFieldBuilder().addBuilder( 3599 index, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDefaultInstance()); 3600 } 3601 /** 3602 * <code>repeated .hadoop.common.ProtocolSignatureProto protocolSignature = 1;</code> 3603 */ 3604 public java.util.List<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder> 3605 getProtocolSignatureBuilderList() { 3606 return getProtocolSignatureFieldBuilder().getBuilderList(); 3607 } 3608 private com.google.protobuf.RepeatedFieldBuilder< 3609 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder> 3610 getProtocolSignatureFieldBuilder() { 3611 if (protocolSignatureBuilder_ == null) { 3612 protocolSignatureBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 3613 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder>( 3614 protocolSignature_, 3615 ((bitField0_ & 0x00000001) == 0x00000001), 3616 getParentForChildren(), 3617 isClean()); 3618 protocolSignature_ = null; 3619 } 3620 return protocolSignatureBuilder_; 3621 } 3622 3623 // @@protoc_insertion_point(builder_scope:hadoop.common.GetProtocolSignatureResponseProto) 3624 } 3625 3626 static { 3627 defaultInstance = new GetProtocolSignatureResponseProto(true); 3628 defaultInstance.initFields(); 3629 } 3630 3631 // @@protoc_insertion_point(class_scope:hadoop.common.GetProtocolSignatureResponseProto) 3632 } 3633 3634 public interface ProtocolSignatureProtoOrBuilder 3635 extends com.google.protobuf.MessageOrBuilder { 3636 3637 // required uint64 version = 1; 3638 /** 3639 * <code>required uint64 version = 1;</code> 3640 */ 3641 boolean hasVersion(); 3642 /** 3643 * <code>required uint64 version = 1;</code> 3644 */ 3645 long getVersion(); 3646 3647 // repeated uint32 methods = 2; 3648 /** 3649 * <code>repeated uint32 methods = 2;</code> 3650 */ 3651 java.util.List<java.lang.Integer> getMethodsList(); 3652 /** 3653 * <code>repeated uint32 methods = 2;</code> 3654 */ 3655 int getMethodsCount(); 3656 /** 3657 * <code>repeated uint32 methods = 2;</code> 3658 */ 3659 int getMethods(int index); 3660 } 3661 /** 3662 * Protobuf type {@code hadoop.common.ProtocolSignatureProto} 3663 */ 3664 public static final class ProtocolSignatureProto extends 3665 com.google.protobuf.GeneratedMessage 3666 implements ProtocolSignatureProtoOrBuilder { 3667 // Use ProtocolSignatureProto.newBuilder() to construct. 3668 private ProtocolSignatureProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 3669 super(builder); 3670 this.unknownFields = builder.getUnknownFields(); 3671 } 3672 private ProtocolSignatureProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 3673 3674 private static final ProtocolSignatureProto defaultInstance; 3675 public static ProtocolSignatureProto getDefaultInstance() { 3676 return defaultInstance; 3677 } 3678 3679 public ProtocolSignatureProto getDefaultInstanceForType() { 3680 return defaultInstance; 3681 } 3682 3683 private final com.google.protobuf.UnknownFieldSet unknownFields; 3684 @java.lang.Override 3685 public final com.google.protobuf.UnknownFieldSet 3686 getUnknownFields() { 3687 return this.unknownFields; 3688 } 3689 private ProtocolSignatureProto( 3690 com.google.protobuf.CodedInputStream input, 3691 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3692 throws com.google.protobuf.InvalidProtocolBufferException { 3693 initFields(); 3694 int mutable_bitField0_ = 0; 3695 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 3696 com.google.protobuf.UnknownFieldSet.newBuilder(); 3697 try { 3698 boolean done = false; 3699 while (!done) { 3700 int tag = input.readTag(); 3701 switch (tag) { 3702 case 0: 3703 done = true; 3704 break; 3705 default: { 3706 if (!parseUnknownField(input, unknownFields, 3707 extensionRegistry, tag)) { 3708 done = true; 3709 } 3710 break; 3711 } 3712 case 8: { 3713 bitField0_ |= 0x00000001; 3714 version_ = input.readUInt64(); 3715 break; 3716 } 3717 case 16: { 3718 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 3719 methods_ = new java.util.ArrayList<java.lang.Integer>(); 3720 mutable_bitField0_ |= 0x00000002; 3721 } 3722 methods_.add(input.readUInt32()); 3723 break; 3724 } 3725 case 18: { 3726 int length = input.readRawVarint32(); 3727 int limit = input.pushLimit(length); 3728 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { 3729 methods_ = new java.util.ArrayList<java.lang.Integer>(); 3730 mutable_bitField0_ |= 0x00000002; 3731 } 3732 while (input.getBytesUntilLimit() > 0) { 3733 methods_.add(input.readUInt32()); 3734 } 3735 input.popLimit(limit); 3736 break; 3737 } 3738 } 3739 } 3740 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3741 throw e.setUnfinishedMessage(this); 3742 } catch (java.io.IOException e) { 3743 throw new com.google.protobuf.InvalidProtocolBufferException( 3744 e.getMessage()).setUnfinishedMessage(this); 3745 } finally { 3746 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 3747 methods_ = java.util.Collections.unmodifiableList(methods_); 3748 } 3749 this.unknownFields = unknownFields.build(); 3750 makeExtensionsImmutable(); 3751 } 3752 } 3753 public static final com.google.protobuf.Descriptors.Descriptor 3754 getDescriptor() { 3755 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_descriptor; 3756 } 3757 3758 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 3759 internalGetFieldAccessorTable() { 3760 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_fieldAccessorTable 3761 .ensureFieldAccessorsInitialized( 3762 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder.class); 3763 } 3764 3765 public static com.google.protobuf.Parser<ProtocolSignatureProto> PARSER = 3766 new com.google.protobuf.AbstractParser<ProtocolSignatureProto>() { 3767 public ProtocolSignatureProto parsePartialFrom( 3768 com.google.protobuf.CodedInputStream input, 3769 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3770 throws com.google.protobuf.InvalidProtocolBufferException { 3771 return new ProtocolSignatureProto(input, extensionRegistry); 3772 } 3773 }; 3774 3775 @java.lang.Override 3776 public com.google.protobuf.Parser<ProtocolSignatureProto> getParserForType() { 3777 return PARSER; 3778 } 3779 3780 private int bitField0_; 3781 // required uint64 version = 1; 3782 public static final int VERSION_FIELD_NUMBER = 1; 3783 private long version_; 3784 /** 3785 * <code>required uint64 version = 1;</code> 3786 */ 3787 public boolean hasVersion() { 3788 return ((bitField0_ & 0x00000001) == 0x00000001); 3789 } 3790 /** 3791 * <code>required uint64 version = 1;</code> 3792 */ 3793 public long getVersion() { 3794 return version_; 3795 } 3796 3797 // repeated uint32 methods = 2; 3798 public static final int METHODS_FIELD_NUMBER = 2; 3799 private java.util.List<java.lang.Integer> methods_; 3800 /** 3801 * <code>repeated uint32 methods = 2;</code> 3802 */ 3803 public java.util.List<java.lang.Integer> 3804 getMethodsList() { 3805 return methods_; 3806 } 3807 /** 3808 * <code>repeated uint32 methods = 2;</code> 3809 */ 3810 public int getMethodsCount() { 3811 return methods_.size(); 3812 } 3813 /** 3814 * <code>repeated uint32 methods = 2;</code> 3815 */ 3816 public int getMethods(int index) { 3817 return methods_.get(index); 3818 } 3819 3820 private void initFields() { 3821 version_ = 0L; 3822 methods_ = java.util.Collections.emptyList(); 3823 } 3824 private byte memoizedIsInitialized = -1; 3825 public final boolean isInitialized() { 3826 byte isInitialized = memoizedIsInitialized; 3827 if (isInitialized != -1) return isInitialized == 1; 3828 3829 if (!hasVersion()) { 3830 memoizedIsInitialized = 0; 3831 return false; 3832 } 3833 memoizedIsInitialized = 1; 3834 return true; 3835 } 3836 3837 public void writeTo(com.google.protobuf.CodedOutputStream output) 3838 throws java.io.IOException { 3839 getSerializedSize(); 3840 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3841 output.writeUInt64(1, version_); 3842 } 3843 for (int i = 0; i < methods_.size(); i++) { 3844 output.writeUInt32(2, methods_.get(i)); 3845 } 3846 getUnknownFields().writeTo(output); 3847 } 3848 3849 private int memoizedSerializedSize = -1; 3850 public int getSerializedSize() { 3851 int size = memoizedSerializedSize; 3852 if (size != -1) return size; 3853 3854 size = 0; 3855 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3856 size += com.google.protobuf.CodedOutputStream 3857 .computeUInt64Size(1, version_); 3858 } 3859 { 3860 int dataSize = 0; 3861 for (int i = 0; i < methods_.size(); i++) { 3862 dataSize += com.google.protobuf.CodedOutputStream 3863 .computeUInt32SizeNoTag(methods_.get(i)); 3864 } 3865 size += dataSize; 3866 size += 1 * getMethodsList().size(); 3867 } 3868 size += getUnknownFields().getSerializedSize(); 3869 memoizedSerializedSize = size; 3870 return size; 3871 } 3872 3873 private static final long serialVersionUID = 0L; 3874 @java.lang.Override 3875 protected java.lang.Object writeReplace() 3876 throws java.io.ObjectStreamException { 3877 return super.writeReplace(); 3878 } 3879 3880 @java.lang.Override 3881 public boolean equals(final java.lang.Object obj) { 3882 if (obj == this) { 3883 return true; 3884 } 3885 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto)) { 3886 return super.equals(obj); 3887 } 3888 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto other = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto) obj; 3889 3890 boolean result = true; 3891 result = result && (hasVersion() == other.hasVersion()); 3892 if (hasVersion()) { 3893 result = result && (getVersion() 3894 == other.getVersion()); 3895 } 3896 result = result && getMethodsList() 3897 .equals(other.getMethodsList()); 3898 result = result && 3899 getUnknownFields().equals(other.getUnknownFields()); 3900 return result; 3901 } 3902 3903 private int memoizedHashCode = 0; 3904 @java.lang.Override 3905 public int hashCode() { 3906 if (memoizedHashCode != 0) { 3907 return memoizedHashCode; 3908 } 3909 int hash = 41; 3910 hash = (19 * hash) + getDescriptorForType().hashCode(); 3911 if (hasVersion()) { 3912 hash = (37 * hash) + VERSION_FIELD_NUMBER; 3913 hash = (53 * hash) + hashLong(getVersion()); 3914 } 3915 if (getMethodsCount() > 0) { 3916 hash = (37 * hash) + METHODS_FIELD_NUMBER; 3917 hash = (53 * hash) + getMethodsList().hashCode(); 3918 } 3919 hash = (29 * hash) + getUnknownFields().hashCode(); 3920 memoizedHashCode = hash; 3921 return hash; 3922 } 3923 3924 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom( 3925 com.google.protobuf.ByteString data) 3926 throws com.google.protobuf.InvalidProtocolBufferException { 3927 return PARSER.parseFrom(data); 3928 } 3929 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom( 3930 com.google.protobuf.ByteString data, 3931 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3932 throws com.google.protobuf.InvalidProtocolBufferException { 3933 return PARSER.parseFrom(data, extensionRegistry); 3934 } 3935 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(byte[] data) 3936 throws com.google.protobuf.InvalidProtocolBufferException { 3937 return PARSER.parseFrom(data); 3938 } 3939 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom( 3940 byte[] data, 3941 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3942 throws com.google.protobuf.InvalidProtocolBufferException { 3943 return PARSER.parseFrom(data, extensionRegistry); 3944 } 3945 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom(java.io.InputStream input) 3946 throws java.io.IOException { 3947 return PARSER.parseFrom(input); 3948 } 3949 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom( 3950 java.io.InputStream input, 3951 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3952 throws java.io.IOException { 3953 return PARSER.parseFrom(input, extensionRegistry); 3954 } 3955 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseDelimitedFrom(java.io.InputStream input) 3956 throws java.io.IOException { 3957 return PARSER.parseDelimitedFrom(input); 3958 } 3959 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseDelimitedFrom( 3960 java.io.InputStream input, 3961 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3962 throws java.io.IOException { 3963 return PARSER.parseDelimitedFrom(input, extensionRegistry); 3964 } 3965 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom( 3966 com.google.protobuf.CodedInputStream input) 3967 throws java.io.IOException { 3968 return PARSER.parseFrom(input); 3969 } 3970 public static org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parseFrom( 3971 com.google.protobuf.CodedInputStream input, 3972 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3973 throws java.io.IOException { 3974 return PARSER.parseFrom(input, extensionRegistry); 3975 } 3976 3977 public static Builder newBuilder() { return Builder.create(); } 3978 public Builder newBuilderForType() { return newBuilder(); } 3979 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto prototype) { 3980 return newBuilder().mergeFrom(prototype); 3981 } 3982 public Builder toBuilder() { return newBuilder(this); } 3983 3984 @java.lang.Override 3985 protected Builder newBuilderForType( 3986 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3987 Builder builder = new Builder(parent); 3988 return builder; 3989 } 3990 /** 3991 * Protobuf type {@code hadoop.common.ProtocolSignatureProto} 3992 */ 3993 public static final class Builder extends 3994 com.google.protobuf.GeneratedMessage.Builder<Builder> 3995 implements org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProtoOrBuilder { 3996 public static final com.google.protobuf.Descriptors.Descriptor 3997 getDescriptor() { 3998 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_descriptor; 3999 } 4000 4001 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable 4002 internalGetFieldAccessorTable() { 4003 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_fieldAccessorTable 4004 .ensureFieldAccessorsInitialized( 4005 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.class, org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.Builder.class); 4006 } 4007 4008 // Construct using org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.newBuilder() 4009 private Builder() { 4010 maybeForceBuilderInitialization(); 4011 } 4012 4013 private Builder( 4014 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 4015 super(parent); 4016 maybeForceBuilderInitialization(); 4017 } 4018 private void maybeForceBuilderInitialization() { 4019 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 4020 } 4021 } 4022 private static Builder create() { 4023 return new Builder(); 4024 } 4025 4026 public Builder clear() { 4027 super.clear(); 4028 version_ = 0L; 4029 bitField0_ = (bitField0_ & ~0x00000001); 4030 methods_ = java.util.Collections.emptyList(); 4031 bitField0_ = (bitField0_ & ~0x00000002); 4032 return this; 4033 } 4034 4035 public Builder clone() { 4036 return create().mergeFrom(buildPartial()); 4037 } 4038 4039 public com.google.protobuf.Descriptors.Descriptor 4040 getDescriptorForType() { 4041 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.internal_static_hadoop_common_ProtocolSignatureProto_descriptor; 4042 } 4043 4044 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto getDefaultInstanceForType() { 4045 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDefaultInstance(); 4046 } 4047 4048 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto build() { 4049 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto result = buildPartial(); 4050 if (!result.isInitialized()) { 4051 throw newUninitializedMessageException(result); 4052 } 4053 return result; 4054 } 4055 4056 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto buildPartial() { 4057 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto result = new org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto(this); 4058 int from_bitField0_ = bitField0_; 4059 int to_bitField0_ = 0; 4060 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 4061 to_bitField0_ |= 0x00000001; 4062 } 4063 result.version_ = version_; 4064 if (((bitField0_ & 0x00000002) == 0x00000002)) { 4065 methods_ = java.util.Collections.unmodifiableList(methods_); 4066 bitField0_ = (bitField0_ & ~0x00000002); 4067 } 4068 result.methods_ = methods_; 4069 result.bitField0_ = to_bitField0_; 4070 onBuilt(); 4071 return result; 4072 } 4073 4074 public Builder mergeFrom(com.google.protobuf.Message other) { 4075 if (other instanceof org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto) { 4076 return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto)other); 4077 } else { 4078 super.mergeFrom(other); 4079 return this; 4080 } 4081 } 4082 4083 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto other) { 4084 if (other == org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto.getDefaultInstance()) return this; 4085 if (other.hasVersion()) { 4086 setVersion(other.getVersion()); 4087 } 4088 if (!other.methods_.isEmpty()) { 4089 if (methods_.isEmpty()) { 4090 methods_ = other.methods_; 4091 bitField0_ = (bitField0_ & ~0x00000002); 4092 } else { 4093 ensureMethodsIsMutable(); 4094 methods_.addAll(other.methods_); 4095 } 4096 onChanged(); 4097 } 4098 this.mergeUnknownFields(other.getUnknownFields()); 4099 return this; 4100 } 4101 4102 public final boolean isInitialized() { 4103 if (!hasVersion()) { 4104 4105 return false; 4106 } 4107 return true; 4108 } 4109 4110 public Builder mergeFrom( 4111 com.google.protobuf.CodedInputStream input, 4112 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4113 throws java.io.IOException { 4114 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto parsedMessage = null; 4115 try { 4116 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 4117 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4118 parsedMessage = (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto) e.getUnfinishedMessage(); 4119 throw e; 4120 } finally { 4121 if (parsedMessage != null) { 4122 mergeFrom(parsedMessage); 4123 } 4124 } 4125 return this; 4126 } 4127 private int bitField0_; 4128 4129 // required uint64 version = 1; 4130 private long version_ ; 4131 /** 4132 * <code>required uint64 version = 1;</code> 4133 */ 4134 public boolean hasVersion() { 4135 return ((bitField0_ & 0x00000001) == 0x00000001); 4136 } 4137 /** 4138 * <code>required uint64 version = 1;</code> 4139 */ 4140 public long getVersion() { 4141 return version_; 4142 } 4143 /** 4144 * <code>required uint64 version = 1;</code> 4145 */ 4146 public Builder setVersion(long value) { 4147 bitField0_ |= 0x00000001; 4148 version_ = value; 4149 onChanged(); 4150 return this; 4151 } 4152 /** 4153 * <code>required uint64 version = 1;</code> 4154 */ 4155 public Builder clearVersion() { 4156 bitField0_ = (bitField0_ & ~0x00000001); 4157 version_ = 0L; 4158 onChanged(); 4159 return this; 4160 } 4161 4162 // repeated uint32 methods = 2; 4163 private java.util.List<java.lang.Integer> methods_ = java.util.Collections.emptyList(); 4164 private void ensureMethodsIsMutable() { 4165 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 4166 methods_ = new java.util.ArrayList<java.lang.Integer>(methods_); 4167 bitField0_ |= 0x00000002; 4168 } 4169 } 4170 /** 4171 * <code>repeated uint32 methods = 2;</code> 4172 */ 4173 public java.util.List<java.lang.Integer> 4174 getMethodsList() { 4175 return java.util.Collections.unmodifiableList(methods_); 4176 } 4177 /** 4178 * <code>repeated uint32 methods = 2;</code> 4179 */ 4180 public int getMethodsCount() { 4181 return methods_.size(); 4182 } 4183 /** 4184 * <code>repeated uint32 methods = 2;</code> 4185 */ 4186 public int getMethods(int index) { 4187 return methods_.get(index); 4188 } 4189 /** 4190 * <code>repeated uint32 methods = 2;</code> 4191 */ 4192 public Builder setMethods( 4193 int index, int value) { 4194 ensureMethodsIsMutable(); 4195 methods_.set(index, value); 4196 onChanged(); 4197 return this; 4198 } 4199 /** 4200 * <code>repeated uint32 methods = 2;</code> 4201 */ 4202 public Builder addMethods(int value) { 4203 ensureMethodsIsMutable(); 4204 methods_.add(value); 4205 onChanged(); 4206 return this; 4207 } 4208 /** 4209 * <code>repeated uint32 methods = 2;</code> 4210 */ 4211 public Builder addAllMethods( 4212 java.lang.Iterable<? extends java.lang.Integer> values) { 4213 ensureMethodsIsMutable(); 4214 super.addAll(values, methods_); 4215 onChanged(); 4216 return this; 4217 } 4218 /** 4219 * <code>repeated uint32 methods = 2;</code> 4220 */ 4221 public Builder clearMethods() { 4222 methods_ = java.util.Collections.emptyList(); 4223 bitField0_ = (bitField0_ & ~0x00000002); 4224 onChanged(); 4225 return this; 4226 } 4227 4228 // @@protoc_insertion_point(builder_scope:hadoop.common.ProtocolSignatureProto) 4229 } 4230 4231 static { 4232 defaultInstance = new ProtocolSignatureProto(true); 4233 defaultInstance.initFields(); 4234 } 4235 4236 // @@protoc_insertion_point(class_scope:hadoop.common.ProtocolSignatureProto) 4237 } 4238 4239 /** 4240 * Protobuf service {@code hadoop.common.ProtocolInfoService} 4241 * 4242 * <pre> 4243 ** 4244 * Protocol to get information about protocols. 4245 * </pre> 4246 */ 4247 public static abstract class ProtocolInfoService 4248 implements com.google.protobuf.Service { 4249 protected ProtocolInfoService() {} 4250 4251 public interface Interface { 4252 /** 4253 * <code>rpc getProtocolVersions(.hadoop.common.GetProtocolVersionsRequestProto) returns (.hadoop.common.GetProtocolVersionsResponseProto);</code> 4254 * 4255 * <pre> 4256 ** 4257 * Return protocol version corresponding to protocol interface for each 4258 * supported rpc kind. 4259 * </pre> 4260 */ 4261 public abstract void getProtocolVersions( 4262 com.google.protobuf.RpcController controller, 4263 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request, 4264 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto> done); 4265 4266 /** 4267 * <code>rpc getProtocolSignature(.hadoop.common.GetProtocolSignatureRequestProto) returns (.hadoop.common.GetProtocolSignatureResponseProto);</code> 4268 * 4269 * <pre> 4270 ** 4271 * Return protocol version corresponding to protocol interface. 4272 * </pre> 4273 */ 4274 public abstract void getProtocolSignature( 4275 com.google.protobuf.RpcController controller, 4276 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request, 4277 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto> done); 4278 4279 } 4280 4281 public static com.google.protobuf.Service newReflectiveService( 4282 final Interface impl) { 4283 return new ProtocolInfoService() { 4284 @java.lang.Override 4285 public void getProtocolVersions( 4286 com.google.protobuf.RpcController controller, 4287 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request, 4288 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto> done) { 4289 impl.getProtocolVersions(controller, request, done); 4290 } 4291 4292 @java.lang.Override 4293 public void getProtocolSignature( 4294 com.google.protobuf.RpcController controller, 4295 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request, 4296 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto> done) { 4297 impl.getProtocolSignature(controller, request, done); 4298 } 4299 4300 }; 4301 } 4302 4303 public static com.google.protobuf.BlockingService 4304 newReflectiveBlockingService(final BlockingInterface impl) { 4305 return new com.google.protobuf.BlockingService() { 4306 public final com.google.protobuf.Descriptors.ServiceDescriptor 4307 getDescriptorForType() { 4308 return getDescriptor(); 4309 } 4310 4311 public final com.google.protobuf.Message callBlockingMethod( 4312 com.google.protobuf.Descriptors.MethodDescriptor method, 4313 com.google.protobuf.RpcController controller, 4314 com.google.protobuf.Message request) 4315 throws com.google.protobuf.ServiceException { 4316 if (method.getService() != getDescriptor()) { 4317 throw new java.lang.IllegalArgumentException( 4318 "Service.callBlockingMethod() given method descriptor for " + 4319 "wrong service type."); 4320 } 4321 switch(method.getIndex()) { 4322 case 0: 4323 return impl.getProtocolVersions(controller, (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto)request); 4324 case 1: 4325 return impl.getProtocolSignature(controller, (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto)request); 4326 default: 4327 throw new java.lang.AssertionError("Can't get here."); 4328 } 4329 } 4330 4331 public final com.google.protobuf.Message 4332 getRequestPrototype( 4333 com.google.protobuf.Descriptors.MethodDescriptor method) { 4334 if (method.getService() != getDescriptor()) { 4335 throw new java.lang.IllegalArgumentException( 4336 "Service.getRequestPrototype() given method " + 4337 "descriptor for wrong service type."); 4338 } 4339 switch(method.getIndex()) { 4340 case 0: 4341 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDefaultInstance(); 4342 case 1: 4343 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDefaultInstance(); 4344 default: 4345 throw new java.lang.AssertionError("Can't get here."); 4346 } 4347 } 4348 4349 public final com.google.protobuf.Message 4350 getResponsePrototype( 4351 com.google.protobuf.Descriptors.MethodDescriptor method) { 4352 if (method.getService() != getDescriptor()) { 4353 throw new java.lang.IllegalArgumentException( 4354 "Service.getResponsePrototype() given method " + 4355 "descriptor for wrong service type."); 4356 } 4357 switch(method.getIndex()) { 4358 case 0: 4359 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance(); 4360 case 1: 4361 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance(); 4362 default: 4363 throw new java.lang.AssertionError("Can't get here."); 4364 } 4365 } 4366 4367 }; 4368 } 4369 4370 /** 4371 * <code>rpc getProtocolVersions(.hadoop.common.GetProtocolVersionsRequestProto) returns (.hadoop.common.GetProtocolVersionsResponseProto);</code> 4372 * 4373 * <pre> 4374 ** 4375 * Return protocol version corresponding to protocol interface for each 4376 * supported rpc kind. 4377 * </pre> 4378 */ 4379 public abstract void getProtocolVersions( 4380 com.google.protobuf.RpcController controller, 4381 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request, 4382 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto> done); 4383 4384 /** 4385 * <code>rpc getProtocolSignature(.hadoop.common.GetProtocolSignatureRequestProto) returns (.hadoop.common.GetProtocolSignatureResponseProto);</code> 4386 * 4387 * <pre> 4388 ** 4389 * Return protocol version corresponding to protocol interface. 4390 * </pre> 4391 */ 4392 public abstract void getProtocolSignature( 4393 com.google.protobuf.RpcController controller, 4394 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request, 4395 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto> done); 4396 4397 public static final 4398 com.google.protobuf.Descriptors.ServiceDescriptor 4399 getDescriptor() { 4400 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.getDescriptor().getServices().get(0); 4401 } 4402 public final com.google.protobuf.Descriptors.ServiceDescriptor 4403 getDescriptorForType() { 4404 return getDescriptor(); 4405 } 4406 4407 public final void callMethod( 4408 com.google.protobuf.Descriptors.MethodDescriptor method, 4409 com.google.protobuf.RpcController controller, 4410 com.google.protobuf.Message request, 4411 com.google.protobuf.RpcCallback< 4412 com.google.protobuf.Message> done) { 4413 if (method.getService() != getDescriptor()) { 4414 throw new java.lang.IllegalArgumentException( 4415 "Service.callMethod() given method descriptor for wrong " + 4416 "service type."); 4417 } 4418 switch(method.getIndex()) { 4419 case 0: 4420 this.getProtocolVersions(controller, (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto)request, 4421 com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto>specializeCallback( 4422 done)); 4423 return; 4424 case 1: 4425 this.getProtocolSignature(controller, (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto)request, 4426 com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto>specializeCallback( 4427 done)); 4428 return; 4429 default: 4430 throw new java.lang.AssertionError("Can't get here."); 4431 } 4432 } 4433 4434 public final com.google.protobuf.Message 4435 getRequestPrototype( 4436 com.google.protobuf.Descriptors.MethodDescriptor method) { 4437 if (method.getService() != getDescriptor()) { 4438 throw new java.lang.IllegalArgumentException( 4439 "Service.getRequestPrototype() given method " + 4440 "descriptor for wrong service type."); 4441 } 4442 switch(method.getIndex()) { 4443 case 0: 4444 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto.getDefaultInstance(); 4445 case 1: 4446 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto.getDefaultInstance(); 4447 default: 4448 throw new java.lang.AssertionError("Can't get here."); 4449 } 4450 } 4451 4452 public final com.google.protobuf.Message 4453 getResponsePrototype( 4454 com.google.protobuf.Descriptors.MethodDescriptor method) { 4455 if (method.getService() != getDescriptor()) { 4456 throw new java.lang.IllegalArgumentException( 4457 "Service.getResponsePrototype() given method " + 4458 "descriptor for wrong service type."); 4459 } 4460 switch(method.getIndex()) { 4461 case 0: 4462 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance(); 4463 case 1: 4464 return org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance(); 4465 default: 4466 throw new java.lang.AssertionError("Can't get here."); 4467 } 4468 } 4469 4470 public static Stub newStub( 4471 com.google.protobuf.RpcChannel channel) { 4472 return new Stub(channel); 4473 } 4474 4475 public static final class Stub extends org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolInfoService implements Interface { 4476 private Stub(com.google.protobuf.RpcChannel channel) { 4477 this.channel = channel; 4478 } 4479 4480 private final com.google.protobuf.RpcChannel channel; 4481 4482 public com.google.protobuf.RpcChannel getChannel() { 4483 return channel; 4484 } 4485 4486 public void getProtocolVersions( 4487 com.google.protobuf.RpcController controller, 4488 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request, 4489 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto> done) { 4490 channel.callMethod( 4491 getDescriptor().getMethods().get(0), 4492 controller, 4493 request, 4494 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance(), 4495 com.google.protobuf.RpcUtil.generalizeCallback( 4496 done, 4497 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.class, 4498 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance())); 4499 } 4500 4501 public void getProtocolSignature( 4502 com.google.protobuf.RpcController controller, 4503 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request, 4504 com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto> done) { 4505 channel.callMethod( 4506 getDescriptor().getMethods().get(1), 4507 controller, 4508 request, 4509 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance(), 4510 com.google.protobuf.RpcUtil.generalizeCallback( 4511 done, 4512 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.class, 4513 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance())); 4514 } 4515 } 4516 4517 public static BlockingInterface newBlockingStub( 4518 com.google.protobuf.BlockingRpcChannel channel) { 4519 return new BlockingStub(channel); 4520 } 4521 4522 public interface BlockingInterface { 4523 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto getProtocolVersions( 4524 com.google.protobuf.RpcController controller, 4525 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request) 4526 throws com.google.protobuf.ServiceException; 4527 4528 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto getProtocolSignature( 4529 com.google.protobuf.RpcController controller, 4530 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request) 4531 throws com.google.protobuf.ServiceException; 4532 } 4533 4534 private static final class BlockingStub implements BlockingInterface { 4535 private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { 4536 this.channel = channel; 4537 } 4538 4539 private final com.google.protobuf.BlockingRpcChannel channel; 4540 4541 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto getProtocolVersions( 4542 com.google.protobuf.RpcController controller, 4543 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto request) 4544 throws com.google.protobuf.ServiceException { 4545 return (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto) channel.callBlockingMethod( 4546 getDescriptor().getMethods().get(0), 4547 controller, 4548 request, 4549 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsResponseProto.getDefaultInstance()); 4550 } 4551 4552 4553 public org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto getProtocolSignature( 4554 com.google.protobuf.RpcController controller, 4555 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto request) 4556 throws com.google.protobuf.ServiceException { 4557 return (org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto) channel.callBlockingMethod( 4558 getDescriptor().getMethods().get(1), 4559 controller, 4560 request, 4561 org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto.getDefaultInstance()); 4562 } 4563 4564 } 4565 4566 // @@protoc_insertion_point(class_scope:hadoop.common.ProtocolInfoService) 4567 } 4568 4569 private static com.google.protobuf.Descriptors.Descriptor 4570 internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor; 4571 private static 4572 com.google.protobuf.GeneratedMessage.FieldAccessorTable 4573 internal_static_hadoop_common_GetProtocolVersionsRequestProto_fieldAccessorTable; 4574 private static com.google.protobuf.Descriptors.Descriptor 4575 internal_static_hadoop_common_ProtocolVersionProto_descriptor; 4576 private static 4577 com.google.protobuf.GeneratedMessage.FieldAccessorTable 4578 internal_static_hadoop_common_ProtocolVersionProto_fieldAccessorTable; 4579 private static com.google.protobuf.Descriptors.Descriptor 4580 internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor; 4581 private static 4582 com.google.protobuf.GeneratedMessage.FieldAccessorTable 4583 internal_static_hadoop_common_GetProtocolVersionsResponseProto_fieldAccessorTable; 4584 private static com.google.protobuf.Descriptors.Descriptor 4585 internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor; 4586 private static 4587 com.google.protobuf.GeneratedMessage.FieldAccessorTable 4588 internal_static_hadoop_common_GetProtocolSignatureRequestProto_fieldAccessorTable; 4589 private static com.google.protobuf.Descriptors.Descriptor 4590 internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor; 4591 private static 4592 com.google.protobuf.GeneratedMessage.FieldAccessorTable 4593 internal_static_hadoop_common_GetProtocolSignatureResponseProto_fieldAccessorTable; 4594 private static com.google.protobuf.Descriptors.Descriptor 4595 internal_static_hadoop_common_ProtocolSignatureProto_descriptor; 4596 private static 4597 com.google.protobuf.GeneratedMessage.FieldAccessorTable 4598 internal_static_hadoop_common_ProtocolSignatureProto_fieldAccessorTable; 4599 4600 public static com.google.protobuf.Descriptors.FileDescriptor 4601 getDescriptor() { 4602 return descriptor; 4603 } 4604 private static com.google.protobuf.Descriptors.FileDescriptor 4605 descriptor; 4606 static { 4607 java.lang.String[] descriptorData = { 4608 "\n\022ProtocolInfo.proto\022\rhadoop.common\"3\n\037G" + 4609 "etProtocolVersionsRequestProto\022\020\n\010protoc" + 4610 "ol\030\001 \002(\t\"9\n\024ProtocolVersionProto\022\017\n\007rpcK" + 4611 "ind\030\001 \002(\t\022\020\n\010versions\030\002 \003(\004\"a\n GetProtoc" + 4612 "olVersionsResponseProto\022=\n\020protocolVersi" + 4613 "ons\030\001 \003(\0132#.hadoop.common.ProtocolVersio" + 4614 "nProto\"E\n GetProtocolSignatureRequestPro" + 4615 "to\022\020\n\010protocol\030\001 \002(\t\022\017\n\007rpcKind\030\002 \002(\t\"e\n" + 4616 "!GetProtocolSignatureResponseProto\022@\n\021pr" + 4617 "otocolSignature\030\001 \003(\0132%.hadoop.common.Pr", 4618 "otocolSignatureProto\":\n\026ProtocolSignatur" + 4619 "eProto\022\017\n\007version\030\001 \002(\004\022\017\n\007methods\030\002 \003(\r" + 4620 "2\210\002\n\023ProtocolInfoService\022v\n\023getProtocolV" + 4621 "ersions\022..hadoop.common.GetProtocolVersi" + 4622 "onsRequestProto\032/.hadoop.common.GetProto" + 4623 "colVersionsResponseProto\022y\n\024getProtocolS" + 4624 "ignature\022/.hadoop.common.GetProtocolSign" + 4625 "atureRequestProto\0320.hadoop.common.GetPro" + 4626 "tocolSignatureResponseProtoB:\n\036org.apach" + 4627 "e.hadoop.ipc.protobufB\022ProtocolInfoProto", 4628 "s\210\001\001\240\001\001" 4629 }; 4630 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = 4631 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { 4632 public com.google.protobuf.ExtensionRegistry assignDescriptors( 4633 com.google.protobuf.Descriptors.FileDescriptor root) { 4634 descriptor = root; 4635 internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor = 4636 getDescriptor().getMessageTypes().get(0); 4637 internal_static_hadoop_common_GetProtocolVersionsRequestProto_fieldAccessorTable = new 4638 com.google.protobuf.GeneratedMessage.FieldAccessorTable( 4639 internal_static_hadoop_common_GetProtocolVersionsRequestProto_descriptor, 4640 new java.lang.String[] { "Protocol", }); 4641 internal_static_hadoop_common_ProtocolVersionProto_descriptor = 4642 getDescriptor().getMessageTypes().get(1); 4643 internal_static_hadoop_common_ProtocolVersionProto_fieldAccessorTable = new 4644 com.google.protobuf.GeneratedMessage.FieldAccessorTable( 4645 internal_static_hadoop_common_ProtocolVersionProto_descriptor, 4646 new java.lang.String[] { "RpcKind", "Versions", }); 4647 internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor = 4648 getDescriptor().getMessageTypes().get(2); 4649 internal_static_hadoop_common_GetProtocolVersionsResponseProto_fieldAccessorTable = new 4650 com.google.protobuf.GeneratedMessage.FieldAccessorTable( 4651 internal_static_hadoop_common_GetProtocolVersionsResponseProto_descriptor, 4652 new java.lang.String[] { "ProtocolVersions", }); 4653 internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor = 4654 getDescriptor().getMessageTypes().get(3); 4655 internal_static_hadoop_common_GetProtocolSignatureRequestProto_fieldAccessorTable = new 4656 com.google.protobuf.GeneratedMessage.FieldAccessorTable( 4657 internal_static_hadoop_common_GetProtocolSignatureRequestProto_descriptor, 4658 new java.lang.String[] { "Protocol", "RpcKind", }); 4659 internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor = 4660 getDescriptor().getMessageTypes().get(4); 4661 internal_static_hadoop_common_GetProtocolSignatureResponseProto_fieldAccessorTable = new 4662 com.google.protobuf.GeneratedMessage.FieldAccessorTable( 4663 internal_static_hadoop_common_GetProtocolSignatureResponseProto_descriptor, 4664 new java.lang.String[] { "ProtocolSignature", }); 4665 internal_static_hadoop_common_ProtocolSignatureProto_descriptor = 4666 getDescriptor().getMessageTypes().get(5); 4667 internal_static_hadoop_common_ProtocolSignatureProto_fieldAccessorTable = new 4668 com.google.protobuf.GeneratedMessage.FieldAccessorTable( 4669 internal_static_hadoop_common_ProtocolSignatureProto_descriptor, 4670 new java.lang.String[] { "Version", "Methods", }); 4671 return null; 4672 } 4673 }; 4674 com.google.protobuf.Descriptors.FileDescriptor 4675 .internalBuildGeneratedFileFrom(descriptorData, 4676 new com.google.protobuf.Descriptors.FileDescriptor[] { 4677 }, assigner); 4678 } 4679 4680 // @@protoc_insertion_point(outer_class_scope) 4681}