4634 lines
180 KiB
Plaintext
4634 lines
180 KiB
Plaintext
|
#!/usr/bin/env dart
|
||
|
// Compiled https://github.com/dart-lang/dart-protoc-plugin/releases, v0.4.1
|
||
|
import "dart:convert";
|
||
|
import "dart:typed_data";
|
||
|
import "dart:math";
|
||
|
import "dart:math" as _A;
|
||
|
import "dart:collection";
|
||
|
import "dart:async";
|
||
|
import "dart:io";
|
||
|
import "dart:io" as _B;
|
||
|
class CryptoUtils {
|
||
|
static String bytesToBase64_A(List<int> bytes, {bool urlSafe: false, bool addLineSeparator: false}) {
|
||
|
return _CryptoUtils_A.bytesToBase64_B(bytes, urlSafe, addLineSeparator);
|
||
|
}
|
||
|
}
|
||
|
abstract class _CryptoUtils_A {
|
||
|
static const int PAD_A = 61;
|
||
|
static const int CR_A = 13;
|
||
|
static const int LF_A = 10;
|
||
|
static const int LINE_LENGTH_A = 76;
|
||
|
static const String _encodeTable_A = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||
|
static const String _encodeTableUrlSafe_A = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
|
||
|
static String bytesToBase64_B(List<int> bytes, [bool urlSafe = false, bool addLineSeparator = false]) {
|
||
|
int len = bytes.length;
|
||
|
if (len == 0) {
|
||
|
return "";
|
||
|
}
|
||
|
final String lookup_A = urlSafe ? _encodeTableUrlSafe_A : _encodeTable_A;
|
||
|
final int remainderLength = len.remainder(3);
|
||
|
final int chunkLength = len - remainderLength;
|
||
|
int outputLen = ((len ~/ 3) * 4) + ((remainderLength > 0) ? 4 : 0);
|
||
|
if (addLineSeparator) {
|
||
|
outputLen += ((outputLen - 1) ~/ LINE_LENGTH_A) << 1;
|
||
|
}
|
||
|
List<int> out = new List<int>(outputLen);
|
||
|
int j = 0, i = 0, c = 0;
|
||
|
while (i < chunkLength) {
|
||
|
int x_A = ((bytes[i++] << 16) & 0xFFFFFF) | ((bytes[i++] << 8) & 0xFFFFFF) | bytes[i++];
|
||
|
out[j++] = lookup_A.codeUnitAt(x_A >> 18);
|
||
|
out[j++] = lookup_A.codeUnitAt((x_A >> 12) & 0x3F);
|
||
|
out[j++] = lookup_A.codeUnitAt((x_A >> 6) & 0x3F);
|
||
|
out[j++] = lookup_A.codeUnitAt(x_A & 0x3f);
|
||
|
if (addLineSeparator && ++c == 19 && j < outputLen - 2) {
|
||
|
out[j++] = CR_A;
|
||
|
out[j++] = LF_A;
|
||
|
c = 0;
|
||
|
}
|
||
|
}
|
||
|
if (remainderLength == 1) {
|
||
|
int x_A = bytes[i];
|
||
|
out[j++] = lookup_A.codeUnitAt(x_A >> 2);
|
||
|
out[j++] = lookup_A.codeUnitAt((x_A << 4) & 0x3F);
|
||
|
out[j++] = PAD_A;
|
||
|
out[j++] = PAD_A;
|
||
|
} else if (remainderLength == 2) {
|
||
|
int x_A = bytes[i];
|
||
|
int y_A = bytes[i + 1];
|
||
|
out[j++] = lookup_A.codeUnitAt(x_A >> 2);
|
||
|
out[j++] = lookup_A.codeUnitAt(((x_A << 4) | (y_A >> 4)) & 0x3F);
|
||
|
out[j++] = lookup_A.codeUnitAt((y_A << 2) & 0x3F);
|
||
|
out[j++] = PAD_A;
|
||
|
}
|
||
|
return new String.fromCharCodes(out);
|
||
|
}
|
||
|
}
|
||
|
class Dart_options {
|
||
|
static final Extension defaultMixin = new Extension('FileOptions', 'defaultMixin', 96128839, GeneratedMessage.OS);
|
||
|
static final Extension mixin = new Extension('MessageOptions', 'mixin', 96128839, GeneratedMessage.OS);
|
||
|
static void registerAllExtensions(ExtensionRegistry registry) {
|
||
|
registry.add(defaultMixin);
|
||
|
registry.add(mixin);
|
||
|
}
|
||
|
}
|
||
|
class Int32 implements IntX {}
|
||
|
class Int64 implements IntX {
|
||
|
final int _l;
|
||
|
final int _m;
|
||
|
final int _h_A;
|
||
|
static const int _BITS = 22;
|
||
|
static const int _BITS01 = 44;
|
||
|
static const int _BITS2 = 20;
|
||
|
static const int _MASK = 4194303;
|
||
|
static const int _MASK2 = 1048575;
|
||
|
static const int _SIGN_BIT_MASK = 524288;
|
||
|
static const Int64 ZERO_A = const Int64._bits(0, 0, 0);
|
||
|
const Int64._bits(int this._l, int this._m, int this._h_A);
|
||
|
factory Int64([int value_A = 0]) {
|
||
|
int v0 = 0, v1 = 0, v2 = 0;
|
||
|
bool negative = false;
|
||
|
if (value_A < 0) {
|
||
|
negative = true;
|
||
|
value_A = -value_A - 1;
|
||
|
}
|
||
|
if (_haveBigInts) {
|
||
|
v0 = _MASK & value_A;
|
||
|
v1 = _MASK & (value_A >> _BITS);
|
||
|
v2 = _MASK2 & (value_A >> _BITS01);
|
||
|
} else {
|
||
|
v2 = value_A ~/ 17592186044416;
|
||
|
value_A -= v2 * 17592186044416;
|
||
|
v1 = value_A ~/ 4194304;
|
||
|
value_A -= v1 * 4194304;
|
||
|
v0 = value_A;
|
||
|
}
|
||
|
if (negative) {
|
||
|
v0 = ~v0;
|
||
|
v1 = ~v1;
|
||
|
v2 = ~v2;
|
||
|
}
|
||
|
return Int64._masked_A(v0, v1, v2);
|
||
|
}
|
||
|
factory Int64.fromBytes(List<int> bytes) {
|
||
|
int top_A = bytes[7] & 0xff;
|
||
|
top_A <<= 8;
|
||
|
top_A |= bytes[6] & 0xff;
|
||
|
top_A <<= 8;
|
||
|
top_A |= bytes[5] & 0xff;
|
||
|
top_A <<= 8;
|
||
|
top_A |= bytes[4] & 0xff;
|
||
|
int bottom_A = bytes[3] & 0xff;
|
||
|
bottom_A <<= 8;
|
||
|
bottom_A |= bytes[2] & 0xff;
|
||
|
bottom_A <<= 8;
|
||
|
bottom_A |= bytes[1] & 0xff;
|
||
|
bottom_A <<= 8;
|
||
|
bottom_A |= bytes[0] & 0xff;
|
||
|
return new Int64.fromInts(top_A, bottom_A);
|
||
|
}
|
||
|
factory Int64.fromInts(int top_A, int bottom_A) {
|
||
|
top_A &= 0xffffffff;
|
||
|
bottom_A &= 0xffffffff;
|
||
|
int d0 = bottom_A & _MASK;
|
||
|
int d1 = ((top_A & 0xfff) << 10) | ((bottom_A >> _BITS) & 0x3ff);
|
||
|
int d2 = (top_A >> 12) & _MASK2;
|
||
|
return new Int64._bits(d0, d1, d2);
|
||
|
}
|
||
|
static Int64 _promote(val) {
|
||
|
if (val is Int64) {
|
||
|
return val;
|
||
|
} else if (val is int) {
|
||
|
return new Int64(val);
|
||
|
} else if (val is Int32) {
|
||
|
return val.toInt64();
|
||
|
}
|
||
|
throw new ArgumentError(val);
|
||
|
}
|
||
|
Int64 operator+(other) {
|
||
|
Int64 o = _promote(other);
|
||
|
int sum0 = _l + o._l;
|
||
|
int sum1 = _m + o._m + (sum0 >> _BITS);
|
||
|
int sum2 = _h_A + o._h_A + (sum1 >> _BITS);
|
||
|
return Int64._masked_A(sum0, sum1, sum2);
|
||
|
}
|
||
|
Int64 operator-(other) {
|
||
|
Int64 o = _promote(other);
|
||
|
return _sub(_l, _m, _h_A, o._l, o._m, o._h_A);
|
||
|
}
|
||
|
Int64 operator-() => _negate(_l, _m, _h_A);
|
||
|
Int64 operator*(other) {
|
||
|
Int64 o = _promote(other);
|
||
|
int a0 = _l & 0x1fff;
|
||
|
int a1 = (_l >> 13) | ((_m & 0xf) << 9);
|
||
|
int a2 = (_m >> 4) & 0x1fff;
|
||
|
int a3 = (_m >> 17) | ((_h_A & 0xff) << 5);
|
||
|
int a4 = (_h_A & 0xfff00) >> 8;
|
||
|
int b0 = o._l & 0x1fff;
|
||
|
int b1 = (o._l >> 13) | ((o._m & 0xf) << 9);
|
||
|
int b2 = (o._m >> 4) & 0x1fff;
|
||
|
int b3 = (o._m >> 17) | ((o._h_A & 0xff) << 5);
|
||
|
int b4 = (o._h_A & 0xfff00) >> 8;
|
||
|
int p0 = a0 * b0;
|
||
|
int p1 = a1 * b0;
|
||
|
int p2 = a2 * b0;
|
||
|
int p3 = a3 * b0;
|
||
|
int p4 = a4 * b0;
|
||
|
if (b1 != 0) {
|
||
|
p1 += a0 * b1;
|
||
|
p2 += a1 * b1;
|
||
|
p3 += a2 * b1;
|
||
|
p4 += a3 * b1;
|
||
|
}
|
||
|
if (b2 != 0) {
|
||
|
p2 += a0 * b2;
|
||
|
p3 += a1 * b2;
|
||
|
p4 += a2 * b2;
|
||
|
}
|
||
|
if (b3 != 0) {
|
||
|
p3 += a0 * b3;
|
||
|
p4 += a1 * b3;
|
||
|
}
|
||
|
if (b4 != 0) {
|
||
|
p4 += a0 * b4;
|
||
|
}
|
||
|
int c00 = p0 & 0x3fffff;
|
||
|
int c01 = (p1 & 0x1ff) << 13;
|
||
|
int c0 = c00 + c01;
|
||
|
int c10 = p0 >> 22;
|
||
|
int c11 = p1 >> 9;
|
||
|
int c12 = (p2 & 0x3ffff) << 4;
|
||
|
int c13 = (p3 & 0x1f) << 17;
|
||
|
int c1 = c10 + c11 + c12 + c13;
|
||
|
int c22 = p2 >> 18;
|
||
|
int c23 = p3 >> 5;
|
||
|
int c24 = (p4 & 0xfff) << 8;
|
||
|
int c2 = c22 + c23 + c24;
|
||
|
c1 += c0 >> _BITS;
|
||
|
c0 &= _MASK;
|
||
|
c2 += c1 >> _BITS;
|
||
|
c1 &= _MASK;
|
||
|
c2 &= _MASK2;
|
||
|
return new Int64._bits(c0, c1, c2);
|
||
|
}
|
||
|
Int64 operator~/(other) => _divide(this, other, _RETURN_DIV);
|
||
|
Int64 remainder(other) => _divide(this, other, _RETURN_REM);
|
||
|
Int64 operator&(other) {
|
||
|
Int64 o = _promote(other);
|
||
|
int a0 = _l & o._l;
|
||
|
int a1 = _m & o._m;
|
||
|
int a2 = _h_A & o._h_A;
|
||
|
return new Int64._bits(a0, a1, a2);
|
||
|
}
|
||
|
Int64 operator|(other) {
|
||
|
Int64 o = _promote(other);
|
||
|
int a0 = _l | o._l;
|
||
|
int a1 = _m | o._m;
|
||
|
int a2 = _h_A | o._h_A;
|
||
|
return new Int64._bits(a0, a1, a2);
|
||
|
}
|
||
|
Int64 operator^(other) {
|
||
|
Int64 o = _promote(other);
|
||
|
int a0 = _l ^ o._l;
|
||
|
int a1 = _m ^ o._m;
|
||
|
int a2 = _h_A ^ o._h_A;
|
||
|
return new Int64._bits(a0, a1, a2);
|
||
|
}
|
||
|
Int64 operator~() {
|
||
|
return Int64._masked_A(~_l, ~_m, ~_h_A);
|
||
|
}
|
||
|
Int64 operator<<(int n) {
|
||
|
if (n < 0) {
|
||
|
throw new ArgumentError(n);
|
||
|
}
|
||
|
n &= 63;
|
||
|
int res0, res1, res2;
|
||
|
if (n < _BITS) {
|
||
|
res0 = _l << n;
|
||
|
res1 = (_m << n) | (_l >> (_BITS - n));
|
||
|
res2 = (_h_A << n) | (_m >> (_BITS - n));
|
||
|
} else if (n < _BITS01) {
|
||
|
res0 = 0;
|
||
|
res1 = _l << (n - _BITS);
|
||
|
res2 = (_m << (n - _BITS)) | (_l >> (_BITS01 - n));
|
||
|
} else {
|
||
|
res0 = 0;
|
||
|
res1 = 0;
|
||
|
res2 = _l << (n - _BITS01);
|
||
|
}
|
||
|
return Int64._masked_A(res0, res1, res2);
|
||
|
}
|
||
|
Int64 operator>>(int n) {
|
||
|
if (n < 0) {
|
||
|
throw new ArgumentError(n);
|
||
|
}
|
||
|
n &= 63;
|
||
|
int res0, res1, res2;
|
||
|
int a2 = _h_A;
|
||
|
bool negative = (a2 & _SIGN_BIT_MASK) != 0;
|
||
|
if (negative && _MASK > _MASK2) {
|
||
|
a2 += (_MASK - _MASK2);
|
||
|
}
|
||
|
if (n < _BITS) {
|
||
|
res2 = _shiftRight(a2, n);
|
||
|
if (negative) {
|
||
|
res2 |= _MASK2 & ~(_MASK2 >> n);
|
||
|
}
|
||
|
res1 = _shiftRight(_m, n) | (a2 << (_BITS - n));
|
||
|
res0 = _shiftRight(_l, n) | (_m << (_BITS - n));
|
||
|
} else if (n < _BITS01) {
|
||
|
res2 = negative ? _MASK2 : 0;
|
||
|
res1 = _shiftRight(a2, n - _BITS);
|
||
|
if (negative) {
|
||
|
res1 |= _MASK & ~(_MASK >> (n - _BITS));
|
||
|
}
|
||
|
res0 = _shiftRight(_m, n - _BITS) | (a2 << (_BITS01 - n));
|
||
|
} else {
|
||
|
res2 = negative ? _MASK2 : 0;
|
||
|
res1 = negative ? _MASK : 0;
|
||
|
res0 = _shiftRight(a2, n - _BITS01);
|
||
|
if (negative) {
|
||
|
res0 |= _MASK & ~(_MASK >> (n - _BITS01));
|
||
|
}
|
||
|
}
|
||
|
return Int64._masked_A(res0, res1, res2);
|
||
|
}
|
||
|
bool operator==(other) {
|
||
|
Int64 o;
|
||
|
if (other is Int64) {
|
||
|
o = other;
|
||
|
} else if (other is int) {
|
||
|
if (_h_A == 0 && _m == 0) return _l == other;
|
||
|
if ((_MASK & other) == other) return false;
|
||
|
o = new Int64(other);
|
||
|
} else if (other is Int32) {
|
||
|
o = other.toInt64();
|
||
|
}
|
||
|
if (o != null) {
|
||
|
return _l == o._l && _m == o._m && _h_A == o._h_A;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
int compareTo(Comparable other) {
|
||
|
Int64 o = _promote(other);
|
||
|
int signa = _h_A >> (_BITS2 - 1);
|
||
|
int signb = o._h_A >> (_BITS2 - 1);
|
||
|
if (signa != signb) {
|
||
|
return signa == 0 ? 1 : -1;
|
||
|
}
|
||
|
if (_h_A > o._h_A) {
|
||
|
return 1;
|
||
|
} else if (_h_A < o._h_A) {
|
||
|
return -1;
|
||
|
}
|
||
|
if (_m > o._m) {
|
||
|
return 1;
|
||
|
} else if (_m < o._m) {
|
||
|
return -1;
|
||
|
}
|
||
|
if (_l > o._l) {
|
||
|
return 1;
|
||
|
} else if (_l < o._l) {
|
||
|
return -1;
|
||
|
}
|
||
|
return 0;
|
||
|
}
|
||
|
bool operator<(other) {
|
||
|
return this.compareTo(other) < 0;
|
||
|
}
|
||
|
bool operator<=(other) {
|
||
|
return this.compareTo(other) <= 0;
|
||
|
}
|
||
|
bool operator>(other) {
|
||
|
return this.compareTo(other) > 0;
|
||
|
}
|
||
|
bool operator>=(other) {
|
||
|
return this.compareTo(other) >= 0;
|
||
|
}
|
||
|
bool get isNegative => (_h_A & _SIGN_BIT_MASK) != 0;
|
||
|
bool get isZero => _h_A == 0 && _m == 0 && _l == 0;
|
||
|
int get hashCode {
|
||
|
int bottom_A = ((_m & 0x3ff) << _BITS) | _l;
|
||
|
int top_A = (_h_A << 12) | ((_m >> 10) & 0xfff);
|
||
|
return bottom_A ^ top_A;
|
||
|
}
|
||
|
Int64 abs() {
|
||
|
return this.isNegative ? -this : this;
|
||
|
}
|
||
|
List<int> toBytes() {
|
||
|
List<int> result_A = new List<int>(8);
|
||
|
result_A[0] = _l & 0xff;
|
||
|
result_A[1] = (_l >> 8) & 0xff;
|
||
|
result_A[2] = ((_m << 6) & 0xfc) | ((_l >> 16) & 0x3f);
|
||
|
result_A[3] = (_m >> 2) & 0xff;
|
||
|
result_A[4] = (_m >> 10) & 0xff;
|
||
|
result_A[5] = ((_h_A << 4) & 0xf0) | ((_m >> 18) & 0xf);
|
||
|
result_A[6] = (_h_A >> 4) & 0xff;
|
||
|
result_A[7] = (_h_A >> 12) & 0xff;
|
||
|
return result_A;
|
||
|
}
|
||
|
int toInt() {
|
||
|
int l = _l;
|
||
|
int m = _m;
|
||
|
int h = _h_A;
|
||
|
bool negative = false;
|
||
|
if ((_h_A & _SIGN_BIT_MASK) != 0) {
|
||
|
l = _MASK & ~_l;
|
||
|
m = _MASK & ~_m;
|
||
|
h = _MASK2 & ~_h_A;
|
||
|
negative = true;
|
||
|
}
|
||
|
if (_haveBigInts) {
|
||
|
int result_A = (h << _BITS01) | (m << _BITS) | l;
|
||
|
return negative ? -result_A - 1 : result_A;
|
||
|
} else {
|
||
|
if (negative) {
|
||
|
return -((l + 1) + (m * 4194304) + (h * 17592186044416));
|
||
|
} else {
|
||
|
return (l + (m * 4194304)) + (h * 17592186044416);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
Int64 toInt64() => this;
|
||
|
String toString() => _toRadixString(10);
|
||
|
String toRadixString(int radix) {
|
||
|
if ((radix <= 1) || (radix > 36)) {
|
||
|
throw new ArgumentError("Bad radix: ${radix}");
|
||
|
}
|
||
|
return _toRadixString(radix);
|
||
|
}
|
||
|
String _toRadixString(int radix) {
|
||
|
int d0 = _l;
|
||
|
int d1 = _m;
|
||
|
int d2 = _h_A;
|
||
|
if (d0 == 0 && d1 == 0 && d2 == 0) return '0';
|
||
|
String sign_A = '';
|
||
|
if ((d2 & _SIGN_BIT_MASK) != 0) {
|
||
|
sign_A = '-';
|
||
|
d0 = 0 - d0;
|
||
|
int borrow = (d0 >> _BITS) & 1;
|
||
|
d0 &= _MASK;
|
||
|
d1 = 0 - d1 - borrow;
|
||
|
borrow = (d1 >> _BITS) & 1;
|
||
|
d1 &= _MASK;
|
||
|
d2 = 0 - d2 - borrow;
|
||
|
d2 &= _MASK2;
|
||
|
}
|
||
|
int d4 = (d2 << 4) | (d1 >> 18);
|
||
|
int d3 = (d1 >> 8) & 0x3ff;
|
||
|
d2 = ((d1 << 2) | (d0 >> 20)) & 0x3ff;
|
||
|
d1 = (d0 >> 10) & 0x3ff;
|
||
|
d0 = d0 & 0x3ff;
|
||
|
int fatRadix = _fatRadixTable[radix];
|
||
|
String chunk1 = "", chunk2 = "", chunk3 = "";
|
||
|
while (!(d4 == 0 && d3 == 0)) {
|
||
|
int q = d4 ~/ fatRadix;
|
||
|
int r = d4 - q * fatRadix;
|
||
|
d4 = q;
|
||
|
d3 += r << 10;
|
||
|
q = d3 ~/ fatRadix;
|
||
|
r = d3 - q * fatRadix;
|
||
|
d3 = q;
|
||
|
d2 += r << 10;
|
||
|
q = d2 ~/ fatRadix;
|
||
|
r = d2 - q * fatRadix;
|
||
|
d2 = q;
|
||
|
d1 += r << 10;
|
||
|
q = d1 ~/ fatRadix;
|
||
|
r = d1 - q * fatRadix;
|
||
|
d1 = q;
|
||
|
d0 += r << 10;
|
||
|
q = d0 ~/ fatRadix;
|
||
|
r = d0 - q * fatRadix;
|
||
|
d0 = q;
|
||
|
assert(chunk3 == "");
|
||
|
chunk3 = chunk2;
|
||
|
chunk2 = chunk1;
|
||
|
chunk1 = (fatRadix + r).toRadixString(radix).substring(1);
|
||
|
}
|
||
|
int residue = (d2 << 20) + (d1 << 10) + d0;
|
||
|
String leadingDigits = residue == 0 ? '' : residue.toRadixString(radix);
|
||
|
return '${sign_A}${leadingDigits}${chunk1}${chunk2}${chunk3}';
|
||
|
}
|
||
|
static const _fatRadixTable = const<int>[0, 0, 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2 * 2, 3 * 3 * 3 * 3 * 3 * 3 * 3 * 3 * 3 * 3 * 3 * 3, 4 * 4 * 4 * 4 * 4 * 4 * 4 * 4 * 4 * 4, 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5, 6 * 6 * 6 * 6 * 6 * 6 * 6, 7 * 7 * 7 * 7 * 7 * 7 * 7, 8 * 8 * 8 * 8 * 8 * 8, 9 * 9 * 9 * 9 * 9 * 9, 10 * 10 * 10 * 10 * 10 * 10, 11 * 11 * 11 * 11 * 11, 12 * 12 * 12 * 12 * 12, 13 * 13 * 13 * 13 * 13, 14 * 14 * 14 * 14 * 14, 15 * 15 * 15 * 15 * 15, 16 * 16 * 16 * 16 * 16, 17 * 17 * 17 * 17, 18 * 18 * 18 * 18, 19 * 19 * 19 * 19, 20 * 20 * 20 * 20, 21 * 21 * 21 * 21, 22 * 22 * 22 * 22, 23 * 23 * 23 * 23, 24 * 24 * 24 * 24, 25 * 25 * 25 * 25, 26 * 26 * 26 * 26, 27 * 27 * 27 * 27, 28 * 28 * 28 * 28, 29 * 29 * 29 * 29, 30 * 30 * 30 * 30, 31 * 31 * 31 * 31, 32 * 32 * 32 * 32, 33 * 33 * 33, 34 * 34 * 34, 35 * 35 * 35, 36 * 36 * 36];
|
||
|
static Int64 _masked_A(int a0, int a1, int a2) => new Int64._bits(_MASK & a0, _MASK & a1, _MASK2 & a2);
|
||
|
static Int64 _sub(int a0, int a1, int a2, int b0, int b1, int b2) {
|
||
|
int diff0 = a0 - b0;
|
||
|
int diff1 = a1 - b1 - ((diff0 >> _BITS) & 1);
|
||
|
int diff2 = a2 - b2 - ((diff1 >> _BITS) & 1);
|
||
|
return _masked_A(diff0, diff1, diff2);
|
||
|
}
|
||
|
static Int64 _negate(int b0, int b1, int b2) {
|
||
|
return _sub(0, 0, 0, b0, b1, b2);
|
||
|
}
|
||
|
static bool _haveBigIntsCached = null;
|
||
|
static bool get _haveBigInts {
|
||
|
if (_haveBigIntsCached == null) {
|
||
|
var x_A = 9007199254740992;
|
||
|
if (2 + 2 != 4) {
|
||
|
x_A = 0;
|
||
|
}
|
||
|
var y_A = x_A + 1;
|
||
|
var same = y_A == x_A;
|
||
|
_haveBigIntsCached = !same;
|
||
|
}
|
||
|
return _haveBigIntsCached;
|
||
|
}
|
||
|
static int _shiftRight(int x_A, int n) {
|
||
|
if (x_A >= 0) {
|
||
|
return x_A >> n;
|
||
|
} else {
|
||
|
int shifted = x_A >> n;
|
||
|
if (shifted >= 0x80000000) {
|
||
|
shifted -= 4294967296;
|
||
|
}
|
||
|
return shifted;
|
||
|
}
|
||
|
}
|
||
|
static Int64 _divide(Int64 a, other, int what) {
|
||
|
Int64 b = _promote(other);
|
||
|
if (b.isZero) {
|
||
|
throw new IntegerDivisionByZeroException();
|
||
|
}
|
||
|
if (a.isZero) return ZERO_A;
|
||
|
bool aNeg = a.isNegative;
|
||
|
bool bNeg = b.isNegative;
|
||
|
a = a.abs();
|
||
|
b = b.abs();
|
||
|
int a0 = a._l;
|
||
|
int a1 = a._m;
|
||
|
int a2 = a._h_A;
|
||
|
int b0 = b._l;
|
||
|
int b1 = b._m;
|
||
|
int b2 = b._h_A;
|
||
|
return _divideHelper(a0, a1, a2, aNeg, b0, b1, b2, bNeg, what);
|
||
|
}
|
||
|
static const _RETURN_DIV = 1;
|
||
|
static const _RETURN_REM = 2;
|
||
|
static const _RETURN_MOD = 3;
|
||
|
static _divideHelper(int a0, int a1, int a2, bool aNeg, int b0, int b1, int b2, bool bNeg, int what) {
|
||
|
int q0 = 0, q1 = 0, q2 = 0;
|
||
|
int r0 = 0, r1 = 0, r2 = 0;
|
||
|
if (b2 == 0 && b1 == 0 && b0 < (1 << (30 - _BITS))) {
|
||
|
q2 = a2 ~/ b0;
|
||
|
int carry = a2 - q2 * b0;
|
||
|
int d1 = a1 + (carry << _BITS);
|
||
|
q1 = d1 ~/ b0;
|
||
|
carry = d1 - q1 * b0;
|
||
|
int d0 = a0 + (carry << _BITS);
|
||
|
q0 = d0 ~/ b0;
|
||
|
r0 = d0 - q0 * b0;
|
||
|
} else {
|
||
|
const double K2 = 17592186044416.0;
|
||
|
const double K1 = 4194304.0;
|
||
|
double ad = a0 + K1 * a1 + K2 * a2;
|
||
|
double bd = b0 + K1 * b1 + K2 * b2;
|
||
|
double qd = (ad / bd).floorToDouble();
|
||
|
double q2d = (qd / K2).floorToDouble();
|
||
|
qd = qd - K2 * q2d;
|
||
|
double q1d = (qd / K1).floorToDouble();
|
||
|
double q0d = qd - K1 * q1d;
|
||
|
q2 = q2d.toInt();
|
||
|
q1 = q1d.toInt();
|
||
|
q0 = q0d.toInt();
|
||
|
assert(q0 + K1 * q1 + K2 * q2 == (ad / bd).floorToDouble());
|
||
|
assert(q2 == 0 || b2 == 0);
|
||
|
double p0d = q0d * b0;
|
||
|
double p0carry = (p0d / K1).floorToDouble();
|
||
|
p0d = p0d - p0carry * K1;
|
||
|
double p1d = q1d * b0 + q0d * b1 + p0carry;
|
||
|
double p1carry = (p1d / K1).floorToDouble();
|
||
|
p1d = p1d - p1carry * K1;
|
||
|
double p2d = q2d * b0 + q1d * b1 + q0d * b2 + p1carry;
|
||
|
assert(p2d <= _MASK2);
|
||
|
int diff0 = a0 - p0d.toInt();
|
||
|
int diff1 = a1 - p1d.toInt() - ((diff0 >> _BITS) & 1);
|
||
|
int diff2 = a2 - p2d.toInt() - ((diff1 >> _BITS) & 1);
|
||
|
r0 = _MASK & diff0;
|
||
|
r1 = _MASK & diff1;
|
||
|
r2 = _MASK2 & diff2;
|
||
|
while (r2 >= _SIGN_BIT_MASK || r2 > b2 || (r2 == b2 && (r1 > b1 || (r1 == b1 && r0 >= b0)))) {
|
||
|
int m = (r2 & _SIGN_BIT_MASK) == 0 ? 1 : -1;
|
||
|
int d0 = r0 - m * b0;
|
||
|
int d1 = r1 - m * (b1 + ((d0 >> _BITS) & 1));
|
||
|
int d2 = r2 - m * (b2 + ((d1 >> _BITS) & 1));
|
||
|
r0 = _MASK & d0;
|
||
|
r1 = _MASK & d1;
|
||
|
r2 = _MASK2 & d2;
|
||
|
d0 = q0 + m;
|
||
|
d1 = q1 + m * ((d0 >> _BITS) & 1);
|
||
|
d2 = q2 + m * ((d1 >> _BITS) & 1);
|
||
|
q0 = _MASK & d0;
|
||
|
q1 = _MASK & d1;
|
||
|
q2 = _MASK2 & d2;
|
||
|
}
|
||
|
}
|
||
|
assert(Int64.ZERO_A <= new Int64._bits(r0, r1, r2));
|
||
|
assert(r2 < b2 || new Int64._bits(r0, r1, r2) < new Int64._bits(b0, b1, b2));
|
||
|
assert(what == _RETURN_DIV || what == _RETURN_MOD || what == _RETURN_REM);
|
||
|
if (what == _RETURN_DIV) {
|
||
|
if (aNeg != bNeg) return _negate(q0, q1, q2);
|
||
|
return Int64._masked_A(q0, q1, q2);
|
||
|
}
|
||
|
if (!aNeg) {
|
||
|
return new Int64._bits(_MASK & r0, r1, r2);
|
||
|
}
|
||
|
if (what == _RETURN_MOD) {
|
||
|
if (r0 == 0 && r1 == 0 && r2 == 0) {
|
||
|
return ZERO_A;
|
||
|
} else {
|
||
|
return _sub(b0, b1, b2, r0, r1, r2);
|
||
|
}
|
||
|
} else {
|
||
|
return _negate(r0, r1, r2);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
abstract class IntX implements Comparable {
|
||
|
IntX operator+(other);
|
||
|
IntX operator-(other);
|
||
|
IntX operator-();
|
||
|
IntX operator*(other);
|
||
|
IntX operator~/(other);
|
||
|
IntX remainder(other);
|
||
|
IntX operator&(other);
|
||
|
IntX operator|(other);
|
||
|
IntX operator^(other);
|
||
|
IntX operator~();
|
||
|
IntX operator<<(int shiftAmount);
|
||
|
IntX operator>>(int shiftAmount);
|
||
|
int compareTo(Comparable other);
|
||
|
bool operator==(other);
|
||
|
bool operator<(other);
|
||
|
bool operator<=(other);
|
||
|
bool operator>(other);
|
||
|
bool operator>=(other);
|
||
|
bool get isNegative;
|
||
|
bool get isZero;
|
||
|
int get hashCode;
|
||
|
IntX abs();
|
||
|
List<int> toBytes();
|
||
|
int toInt();
|
||
|
Int64 toInt64();
|
||
|
String toString();
|
||
|
String toRadixString(int radix);
|
||
|
}
|
||
|
final Context url = new Context(style: Style.url_A);
|
||
|
final Context context = createInternal();
|
||
|
String get current_A {
|
||
|
var uri_A = Uri.base;
|
||
|
if (Style.platform == Style.url_A) {
|
||
|
return uri_A.resolve('.').toString();
|
||
|
} else {
|
||
|
var path_A = uri_A.toFilePath();
|
||
|
int lastIndex = path_A.length - 1;
|
||
|
assert(path_A[lastIndex] == '/' || path_A[lastIndex] == '\\');
|
||
|
return path_A.substring(0, lastIndex);
|
||
|
}
|
||
|
}
|
||
|
String withoutExtension(String path_A) => context.withoutExtension_A(path_A);
|
||
|
const SLASH_A = 0x2f;
|
||
|
const COLON_A = 0x3a;
|
||
|
const UPPER_A = 0x41;
|
||
|
const UPPER_Z = 0x5a;
|
||
|
const LOWER_A = 0x61;
|
||
|
const LOWER_Z = 0x7a;
|
||
|
const BACKSLASH_A = 0x5c;
|
||
|
Context createInternal() => new Context._internal_A();
|
||
|
class Context {
|
||
|
factory Context({Style style, String current}) {
|
||
|
if (current == null) {
|
||
|
if (style == null) {
|
||
|
current = current_A;
|
||
|
} else {
|
||
|
current = ".";
|
||
|
}
|
||
|
}
|
||
|
if (style == null) {
|
||
|
style = Style.platform;
|
||
|
} else if (style is! InternalStyle) {
|
||
|
throw new ArgumentError("Only styles defined by the path package are " "allowed.");
|
||
|
}
|
||
|
return new Context.__A(style as InternalStyle, current);
|
||
|
}
|
||
|
Context._internal_A() : style = Style.platform as InternalStyle, _current_A = null;
|
||
|
Context.__A(this.style, this._current_A);
|
||
|
final InternalStyle style;
|
||
|
final String _current_A;
|
||
|
String get current => _current_A != null ? _current_A : current_A;
|
||
|
String get separator => style.separator;
|
||
|
String absolute(String part1, [String part2, String part3, String part4, String part5, String part6, String part7]) {
|
||
|
return join(current, part1, part2, part3, part4, part5, part6, part7);
|
||
|
}
|
||
|
String basenameWithoutExtension(String path_A) => _parse_A(path_A).basenameWithoutExtension;
|
||
|
String dirname(String path_A) {
|
||
|
var parsed = _parse_A(path_A);
|
||
|
parsed.removeTrailingSeparators();
|
||
|
if (parsed.parts.isEmpty) return parsed.root == null ? '.' : parsed.root;
|
||
|
if (parsed.parts.length == 1) {
|
||
|
return parsed.root == null ? '.' : parsed.root;
|
||
|
}
|
||
|
parsed.parts.removeLast();
|
||
|
parsed.separators.removeLast();
|
||
|
parsed.removeTrailingSeparators();
|
||
|
return parsed.toString();
|
||
|
}
|
||
|
String extension(String path_A) => _parse_A(path_A).extension;
|
||
|
String rootPrefix(String path_A) => path_A.substring(0, style.rootLength(path_A));
|
||
|
bool isAbsolute(String path_A) => style.rootLength(path_A) > 0;
|
||
|
bool isRelative(String path_A) => !this.isAbsolute(path_A);
|
||
|
bool isRootRelative(String path_A) => style.isRootRelative(path_A);
|
||
|
String join(String part1, [String part2, String part3, String part4, String part5, String part6, String part7, String part8]) {
|
||
|
var parts = <String>[part1, part2, part3, part4, part5, part6, part7, part8];
|
||
|
_validateArgList("join", parts);
|
||
|
return joinAll(parts.where((part_A) => part_A != null));
|
||
|
}
|
||
|
String joinAll(Iterable<String> parts) {
|
||
|
var buffer_A = new StringBuffer();
|
||
|
var needsSeparator = false;
|
||
|
var isAbsoluteAndNotRootRelative = false;
|
||
|
for (var part_A in parts.where((part_A) => part_A != '')) {
|
||
|
if (this.isRootRelative(part_A) && isAbsoluteAndNotRootRelative) {
|
||
|
var parsed = _parse_A(part_A);
|
||
|
parsed.root = this.rootPrefix(buffer_A.toString());
|
||
|
if (style.needsSeparator(parsed.root)) {
|
||
|
parsed.separators[0] = style.separator;
|
||
|
}
|
||
|
buffer_A.clear();
|
||
|
buffer_A.write(parsed.toString());
|
||
|
} else if (this.isAbsolute(part_A)) {
|
||
|
isAbsoluteAndNotRootRelative = !this.isRootRelative(part_A);
|
||
|
buffer_A.clear();
|
||
|
buffer_A.write(part_A);
|
||
|
} else {
|
||
|
if (part_A.length > 0 && style.containsSeparator(part_A[0])) {} else if (needsSeparator) {
|
||
|
buffer_A.write(separator);
|
||
|
}
|
||
|
buffer_A.write(part_A);
|
||
|
}
|
||
|
needsSeparator = style.needsSeparator(part_A);
|
||
|
}
|
||
|
return buffer_A.toString();
|
||
|
}
|
||
|
List<String> split(String path_A) {
|
||
|
var parsed = _parse_A(path_A);
|
||
|
parsed.parts = parsed.parts.where((part_A) => !part_A.isEmpty).toList();
|
||
|
if (parsed.root != null) parsed.parts.insert(0, parsed.root);
|
||
|
return parsed.parts;
|
||
|
}
|
||
|
String normalize(String path_A) {
|
||
|
var parsed = _parse_A(path_A);
|
||
|
parsed.normalize();
|
||
|
return parsed.toString();
|
||
|
}
|
||
|
String relative(String path_A, {String from}) {
|
||
|
if (from == null) {
|
||
|
from = current;
|
||
|
} else if (this.isRelative(from) || this.isRootRelative(from)) {
|
||
|
from = this.join(current, from);
|
||
|
}
|
||
|
if (this.isRelative(from) && this.isAbsolute(path_A)) {
|
||
|
return this.normalize(path_A);
|
||
|
}
|
||
|
if (this.isRelative(path_A) || this.isRootRelative(path_A)) {
|
||
|
path_A = this.absolute(path_A);
|
||
|
}
|
||
|
if (this.isRelative(path_A) && this.isAbsolute(from)) {
|
||
|
throw new PathException('Unable to find a path to "${path_A}" from "${from}".');
|
||
|
}
|
||
|
var fromParsed = _parse_A(from)
|
||
|
..normalize();
|
||
|
var pathParsed = _parse_A(path_A)
|
||
|
..normalize();
|
||
|
if (fromParsed.parts.length > 0 && fromParsed.parts[0] == '.') {
|
||
|
return pathParsed.toString();
|
||
|
}
|
||
|
if (fromParsed.root != pathParsed.root && ((fromParsed.root == null || pathParsed.root == null) || fromParsed.root.toLowerCase().replaceAll('/', '\\') != pathParsed.root.toLowerCase().replaceAll('/', '\\'))) {
|
||
|
return pathParsed.toString();
|
||
|
}
|
||
|
while (fromParsed.parts.length > 0 && pathParsed.parts.length > 0 && fromParsed.parts[0] == pathParsed.parts[0]) {
|
||
|
fromParsed.parts.removeAt(0);
|
||
|
fromParsed.separators.removeAt(1);
|
||
|
pathParsed.parts.removeAt(0);
|
||
|
pathParsed.separators.removeAt(1);
|
||
|
}
|
||
|
if (fromParsed.parts.length > 0 && fromParsed.parts[0] == '..') {
|
||
|
throw new PathException('Unable to find a path to "${path_A}" from "${from}".');
|
||
|
}
|
||
|
pathParsed.parts.insertAll(0, new List.filled(fromParsed.parts.length, '..'));
|
||
|
pathParsed.separators[0] = '';
|
||
|
pathParsed.separators.insertAll(1, new List.filled(fromParsed.parts.length, style.separator));
|
||
|
if (pathParsed.parts.length == 0) return '.';
|
||
|
if (pathParsed.parts.length > 1 && pathParsed.parts.last == '.') {
|
||
|
pathParsed.parts.removeLast();
|
||
|
pathParsed.separators
|
||
|
..removeLast()
|
||
|
..removeLast()
|
||
|
..add('');
|
||
|
}
|
||
|
pathParsed.root = '';
|
||
|
pathParsed.removeTrailingSeparators();
|
||
|
return pathParsed.toString();
|
||
|
}
|
||
|
String withoutExtension_A(String path_A) {
|
||
|
var parsed = _parse_A(path_A);
|
||
|
for (var i = parsed.parts.length - 1; i >= 0; i--) {
|
||
|
if (!parsed.parts[i].isEmpty) {
|
||
|
parsed.parts[i] = parsed.basenameWithoutExtension;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
return parsed.toString();
|
||
|
}
|
||
|
String fromUri_A(uri_A) {
|
||
|
if (uri_A is String) uri_A = Uri.parse(uri_A);
|
||
|
return style.pathFromUri(uri_A);
|
||
|
}
|
||
|
Uri toUri(String path_A) {
|
||
|
if (isRelative(path_A)) {
|
||
|
return style.relativePathToUri(path_A);
|
||
|
} else {
|
||
|
return style.absolutePathToUri(join(current, path_A));
|
||
|
}
|
||
|
}
|
||
|
ParsedPath _parse_A(String path_A) => new ParsedPath.parse_A(path_A, style);
|
||
|
}
|
||
|
_validateArgList(String method_A, List<String> args) {
|
||
|
for (var i = 1; i < args.length; i++) {
|
||
|
if (args[i] == null || args[i - 1] != null) continue;
|
||
|
var numArgs;
|
||
|
for (numArgs = args.length; numArgs >= 1; numArgs--) {
|
||
|
if (args[numArgs - 1] != null) break;
|
||
|
}
|
||
|
var message_A = new StringBuffer();
|
||
|
message_A.write("${method_A}(");
|
||
|
message_A.write(args.take(numArgs).map((arg) => arg == null ? "null" : '"${arg}"').join(", "));
|
||
|
message_A.write("): part ${i - 1} was null, but part ${i} was not.");
|
||
|
throw new ArgumentError(message_A.toString());
|
||
|
}
|
||
|
}
|
||
|
abstract class InternalStyle extends Style {
|
||
|
String get separator;
|
||
|
bool containsSeparator(String path_A);
|
||
|
bool isSeparator(int codeUnit);
|
||
|
bool needsSeparator(String path_A);
|
||
|
int rootLength(String path_A);
|
||
|
String getRoot(String path_A) {
|
||
|
var length_A = rootLength(path_A);
|
||
|
if (length_A > 0) return path_A.substring(0, length_A);
|
||
|
return isRootRelative(path_A) ? path_A[0] : null;
|
||
|
}
|
||
|
bool isRootRelative(String path_A);
|
||
|
String pathFromUri(Uri uri_A);
|
||
|
Uri relativePathToUri(String path_A) {
|
||
|
var segments = context_A.split(path_A);
|
||
|
if (isSeparator(path_A.codeUnitAt(path_A.length - 1))) segments.add('');
|
||
|
return new Uri(pathSegments: segments);
|
||
|
}
|
||
|
Uri absolutePathToUri(String path_A);
|
||
|
}
|
||
|
class ParsedPath {
|
||
|
InternalStyle style;
|
||
|
String root;
|
||
|
bool isRootRelative;
|
||
|
List<String> parts;
|
||
|
List<String> separators;
|
||
|
String get extension => _splitExtension()[1];
|
||
|
bool get isAbsolute => root != null;
|
||
|
factory ParsedPath.parse_A(String path_A, InternalStyle style_A) {
|
||
|
var root_A = style_A.getRoot(path_A);
|
||
|
var isRootRelative_A = style_A.isRootRelative(path_A);
|
||
|
if (root_A != null) path_A = path_A.substring(root_A.length);
|
||
|
var parts_A = <String>[];
|
||
|
var separators_A = <String>[];
|
||
|
var start_A = 0;
|
||
|
if (path_A.isNotEmpty && style_A.isSeparator(path_A.codeUnitAt(0))) {
|
||
|
separators_A.add(path_A[0]);
|
||
|
start_A = 1;
|
||
|
} else {
|
||
|
separators_A.add('');
|
||
|
}
|
||
|
for (var i = start_A; i < path_A.length; i++) {
|
||
|
if (style_A.isSeparator(path_A.codeUnitAt(i))) {
|
||
|
parts_A.add(path_A.substring(start_A, i));
|
||
|
separators_A.add(path_A[i]);
|
||
|
start_A = i + 1;
|
||
|
}
|
||
|
}
|
||
|
if (start_A < path_A.length) {
|
||
|
parts_A.add(path_A.substring(start_A));
|
||
|
separators_A.add('');
|
||
|
}
|
||
|
return new ParsedPath.__B(style_A, root_A, isRootRelative_A, parts_A, separators_A);
|
||
|
}
|
||
|
ParsedPath.__B(this.style, this.root, this.isRootRelative, this.parts, this.separators);
|
||
|
String get basenameWithoutExtension => _splitExtension()[0];
|
||
|
bool get hasTrailingSeparator => !parts.isEmpty && (parts.last == '' || separators.last != '');
|
||
|
void removeTrailingSeparators() {
|
||
|
while (!parts.isEmpty && parts.last == '') {
|
||
|
parts.removeLast();
|
||
|
separators.removeLast();
|
||
|
}
|
||
|
if (separators.length > 0) separators[separators.length - 1] = '';
|
||
|
}
|
||
|
void normalize() {
|
||
|
var leadingDoubles = 0;
|
||
|
var newParts = <String>[];
|
||
|
for (var part_A in parts) {
|
||
|
if (part_A == '.' || part_A == '') {} else if (part_A == '..') {
|
||
|
if (newParts.length > 0) {
|
||
|
newParts.removeLast();
|
||
|
} else {
|
||
|
leadingDoubles++;
|
||
|
}
|
||
|
} else {
|
||
|
newParts.add(part_A);
|
||
|
}
|
||
|
}
|
||
|
if (!isAbsolute) {
|
||
|
newParts.insertAll(0, new List.filled(leadingDoubles, '..'));
|
||
|
}
|
||
|
if (newParts.length == 0 && !isAbsolute) {
|
||
|
newParts.add('.');
|
||
|
}
|
||
|
var newSeparators = new List<String>.generate(newParts.length, (__H) => style.separator, growable: true);
|
||
|
newSeparators.insert(0, isAbsolute && newParts.length > 0 && style.needsSeparator(root) ? style.separator : '');
|
||
|
parts = newParts;
|
||
|
separators = newSeparators;
|
||
|
if (root != null && style == Style.windows) {
|
||
|
root = root.replaceAll('/', '\\');
|
||
|
}
|
||
|
removeTrailingSeparators();
|
||
|
}
|
||
|
String toString() {
|
||
|
var builder_A = new StringBuffer();
|
||
|
if (root != null) builder_A.write(root);
|
||
|
for (var i = 0; i < parts.length; i++) {
|
||
|
builder_A.write(separators[i]);
|
||
|
builder_A.write(parts[i]);
|
||
|
}
|
||
|
builder_A.write(separators.last);
|
||
|
return builder_A.toString();
|
||
|
}
|
||
|
List<String> _splitExtension() {
|
||
|
var file_A = parts.lastWhere((p) => p != '', orElse: () => null);
|
||
|
if (file_A == null) return ['', ''];
|
||
|
if (file_A == '..') return ['..', ''];
|
||
|
var lastDot = file_A.lastIndexOf('.');
|
||
|
if (lastDot <= 0) return [file_A, ''];
|
||
|
return [file_A.substring(0, lastDot), file_A.substring(lastDot)];
|
||
|
}
|
||
|
ParsedPath clone() => new ParsedPath.__B(style, root, isRootRelative, new List.from(parts), new List.from(separators));
|
||
|
}
|
||
|
class PathException implements Exception {
|
||
|
String message;
|
||
|
PathException(this.message);
|
||
|
String toString() => "PathException: ${message}";
|
||
|
}
|
||
|
abstract class Style {
|
||
|
static final Style posix = new PosixStyle();
|
||
|
static final Style windows = new WindowsStyle();
|
||
|
static final Style url_A = new UrlStyle();
|
||
|
static final Style platform = _getPlatformStyle();
|
||
|
static Style _getPlatformStyle() {
|
||
|
if (Uri.base.scheme != 'file') return Style.url_A;
|
||
|
if (!Uri.base.path.endsWith('/')) return Style.url_A;
|
||
|
if (new Uri(path: 'a/b').toFilePath() == 'a\\b') return Style.windows;
|
||
|
return Style.posix;
|
||
|
}
|
||
|
String get name;
|
||
|
Context get context_A => new Context(style: this);
|
||
|
String get separator;
|
||
|
String getRoot(String path_A);
|
||
|
String pathFromUri(Uri uri_A);
|
||
|
Uri relativePathToUri(String path_A);
|
||
|
Uri absolutePathToUri(String path_A);
|
||
|
String toString() => name;
|
||
|
}
|
||
|
class PosixStyle extends InternalStyle {
|
||
|
PosixStyle();
|
||
|
final name = 'posix';
|
||
|
final separator = '/';
|
||
|
final separators = const['/'];
|
||
|
final separatorPattern = new RegExp(r'/');
|
||
|
final needsSeparatorPattern = new RegExp(r'[^/]$');
|
||
|
final rootPattern = new RegExp(r'^/');
|
||
|
final relativeRootPattern = null;
|
||
|
bool containsSeparator(String path_A) => path_A.contains('/');
|
||
|
bool isSeparator(int codeUnit) => codeUnit == SLASH_A;
|
||
|
bool needsSeparator(String path_A) => path_A.isNotEmpty && !isSeparator(path_A.codeUnitAt(path_A.length - 1));
|
||
|
int rootLength(String path_A) {
|
||
|
if (path_A.isNotEmpty && isSeparator(path_A.codeUnitAt(0))) return 1;
|
||
|
return 0;
|
||
|
}
|
||
|
bool isRootRelative(String path_A) => false;
|
||
|
String pathFromUri(Uri uri_A) {
|
||
|
if (uri_A.scheme == '' || uri_A.scheme == 'file') {
|
||
|
return Uri.decodeComponent(uri_A.path);
|
||
|
}
|
||
|
throw new ArgumentError("Uri ${uri_A} must have scheme 'file:'.");
|
||
|
}
|
||
|
Uri absolutePathToUri(String path_A) {
|
||
|
var parsed = new ParsedPath.parse_A(path_A, this);
|
||
|
if (parsed.parts.isEmpty) {
|
||
|
parsed.parts.addAll(["", ""]);
|
||
|
} else if (parsed.hasTrailingSeparator) {
|
||
|
parsed.parts.add("");
|
||
|
}
|
||
|
return new Uri(scheme: 'file', pathSegments: parsed.parts);
|
||
|
}
|
||
|
}
|
||
|
class UrlStyle extends InternalStyle {
|
||
|
UrlStyle();
|
||
|
final name = 'url';
|
||
|
final separator = '/';
|
||
|
final separators = const['/'];
|
||
|
final separatorPattern = new RegExp(r'/');
|
||
|
final needsSeparatorPattern = new RegExp(r"(^[a-zA-Z][-+.a-zA-Z\d]*://|[^/])$");
|
||
|
final rootPattern = new RegExp(r"[a-zA-Z][-+.a-zA-Z\d]*://[^/]*");
|
||
|
final relativeRootPattern = new RegExp(r"^/");
|
||
|
bool containsSeparator(String path_A) => path_A.contains('/');
|
||
|
bool isSeparator(int codeUnit) => codeUnit == SLASH_A;
|
||
|
bool needsSeparator(String path_A) {
|
||
|
if (path_A.isEmpty) return false;
|
||
|
if (!isSeparator(path_A.codeUnitAt(path_A.length - 1))) return true;
|
||
|
return path_A.endsWith("://") && rootLength(path_A) == path_A.length;
|
||
|
}
|
||
|
int rootLength(String path_A) {
|
||
|
if (path_A.isEmpty) return 0;
|
||
|
if (isSeparator(path_A.codeUnitAt(0))) return 1;
|
||
|
var index_A = path_A.indexOf("/");
|
||
|
if (index_A > 0 && path_A.startsWith('://', index_A - 1)) {
|
||
|
index_A = path_A.indexOf('/', index_A + 2);
|
||
|
if (index_A > 0) return index_A;
|
||
|
return path_A.length;
|
||
|
}
|
||
|
return 0;
|
||
|
}
|
||
|
bool isRootRelative(String path_A) => path_A.isNotEmpty && isSeparator(path_A.codeUnitAt(0));
|
||
|
String pathFromUri(Uri uri_A) => uri_A.toString();
|
||
|
Uri relativePathToUri(String path_A) => Uri.parse(path_A);
|
||
|
Uri absolutePathToUri(String path_A) => Uri.parse(path_A);
|
||
|
}
|
||
|
class WindowsStyle extends InternalStyle {
|
||
|
WindowsStyle();
|
||
|
final name = 'windows';
|
||
|
final separator = '\\';
|
||
|
final separators = const['/', '\\'];
|
||
|
final separatorPattern = new RegExp(r'[/\\]');
|
||
|
final needsSeparatorPattern = new RegExp(r'[^/\\]$');
|
||
|
final rootPattern = new RegExp(r'^(\\\\[^\\]+\\[^\\/]+|[a-zA-Z]:[/\\])');
|
||
|
final relativeRootPattern = new RegExp(r"^[/\\](?![/\\])");
|
||
|
bool containsSeparator(String path_A) => path_A.contains('/');
|
||
|
bool isSeparator(int codeUnit) => codeUnit == SLASH_A || codeUnit == BACKSLASH_A;
|
||
|
bool needsSeparator(String path_A) {
|
||
|
if (path_A.isEmpty) return false;
|
||
|
return !isSeparator(path_A.codeUnitAt(path_A.length - 1));
|
||
|
}
|
||
|
int rootLength(String path_A) {
|
||
|
if (path_A.isEmpty) return 0;
|
||
|
if (path_A.codeUnitAt(0) == SLASH_A) return 1;
|
||
|
if (path_A.codeUnitAt(0) == BACKSLASH_A) {
|
||
|
if (path_A.length < 2 || path_A.codeUnitAt(1) != BACKSLASH_A) return 1;
|
||
|
var index_A = path_A.indexOf('\\', 2);
|
||
|
if (index_A > 0) {
|
||
|
index_A = path_A.indexOf('\\', index_A + 1);
|
||
|
if (index_A > 0) return index_A;
|
||
|
}
|
||
|
return path_A.length;
|
||
|
}
|
||
|
if (path_A.length < 3) return 0;
|
||
|
if (!isAlphabetic(path_A.codeUnitAt(0))) return 0;
|
||
|
if (path_A.codeUnitAt(1) != COLON_A) return 0;
|
||
|
if (!isSeparator(path_A.codeUnitAt(2))) return 0;
|
||
|
return 3;
|
||
|
}
|
||
|
bool isRootRelative(String path_A) => rootLength(path_A) == 1;
|
||
|
String pathFromUri(Uri uri_A) {
|
||
|
if (uri_A.scheme != '' && uri_A.scheme != 'file') {
|
||
|
throw new ArgumentError("Uri ${uri_A} must have scheme 'file:'.");
|
||
|
}
|
||
|
var path_A = uri_A.path;
|
||
|
if (uri_A.host == '') {
|
||
|
if (path_A.startsWith('/')) path_A = path_A.replaceFirst("/", "");
|
||
|
} else {
|
||
|
path_A = '\\\\${uri_A.host}${path_A}';
|
||
|
}
|
||
|
return Uri.decodeComponent(path_A.replaceAll("/", "\\"));
|
||
|
}
|
||
|
Uri absolutePathToUri(String path_A) {
|
||
|
var parsed = new ParsedPath.parse_A(path_A, this);
|
||
|
if (parsed.root.startsWith(r'\\')) {
|
||
|
var rootParts = parsed.root.split('\\').where((part_A) => part_A != '');
|
||
|
parsed.parts.insert(0, rootParts.last);
|
||
|
if (parsed.hasTrailingSeparator) {
|
||
|
parsed.parts.add("");
|
||
|
}
|
||
|
return new Uri(scheme: 'file', host: rootParts.first, pathSegments: parsed.parts);
|
||
|
} else {
|
||
|
if (parsed.parts.length == 0 || parsed.hasTrailingSeparator) {
|
||
|
parsed.parts.add("");
|
||
|
}
|
||
|
parsed.parts.insert(0, parsed.root.replaceAll("/", "").replaceAll("\\", ""));
|
||
|
return new Uri(scheme: 'file', pathSegments: parsed.parts);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
bool isAlphabetic(int char) => (char >= UPPER_A && char <= UPPER_Z) || (char >= LOWER_A && char <= LOWER_Z);
|
||
|
class CodeGeneratorRequest extends GeneratedMessage {
|
||
|
static final BuilderInfo _i = new BuilderInfo('CodeGeneratorRequest')
|
||
|
..p(1, 'fileToGenerate', PbFieldType.PS)
|
||
|
..a(2, 'parameter', PbFieldType.OS_A)
|
||
|
..pp(15, 'protoFile', PbFieldType.PM, FileDescriptorProto.$checkItem, FileDescriptorProto.create_A);
|
||
|
CodeGeneratorRequest() : super();
|
||
|
CodeGeneratorRequest.fromBuffer(List<int> i, [ExtensionRegistry r = ExtensionRegistry.EMPTY]) : super.fromBuffer_A(i, r);
|
||
|
CodeGeneratorRequest clone() => new CodeGeneratorRequest()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i;
|
||
|
List<String> get fileToGenerate => getField(1);
|
||
|
String get parameter => getField(2);
|
||
|
List<FileDescriptorProto> get protoFile => getField(15);
|
||
|
}
|
||
|
class CodeGeneratorResponse_File extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_A = new BuilderInfo('CodeGeneratorResponse_File')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..a(2, 'insertionPoint', PbFieldType.OS_A)
|
||
|
..a(15, 'content', PbFieldType.OS_A)
|
||
|
..hasRequiredFields = false;
|
||
|
CodeGeneratorResponse_File() : super();
|
||
|
CodeGeneratorResponse_File clone() => new CodeGeneratorResponse_File()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_A;
|
||
|
static CodeGeneratorResponse_File create_B() => new CodeGeneratorResponse_File();
|
||
|
static void $checkItem_A(CodeGeneratorResponse_File v) {
|
||
|
if (v is! CodeGeneratorResponse_File) checkItemFailed(v, 'CodeGeneratorResponse_File');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
void set content(String v) {
|
||
|
setField(15, v);
|
||
|
}
|
||
|
}
|
||
|
class CodeGeneratorResponse extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_B = new BuilderInfo('CodeGeneratorResponse')
|
||
|
..a(1, 'error', PbFieldType.OS_A)
|
||
|
..pp(15, 'file', PbFieldType.PM, CodeGeneratorResponse_File.$checkItem_A, CodeGeneratorResponse_File.create_B)
|
||
|
..hasRequiredFields = false;
|
||
|
CodeGeneratorResponse() : super();
|
||
|
CodeGeneratorResponse clone() => new CodeGeneratorResponse()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_B;
|
||
|
void set error(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
List<CodeGeneratorResponse_File> get file_A => getField(15);
|
||
|
}
|
||
|
class FileDescriptorProto extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_C = new BuilderInfo('FileDescriptorProto')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..a(2, 'package', PbFieldType.OS_A)
|
||
|
..p(3, 'dependency', PbFieldType.PS)
|
||
|
..p(10, 'publicDependency', PbFieldType.P3)
|
||
|
..p(11, 'weakDependency', PbFieldType.P3)
|
||
|
..pp(4, 'messageType', PbFieldType.PM, DescriptorProto.$checkItem_B, DescriptorProto.create_C)
|
||
|
..pp(5, 'enumType', PbFieldType.PM, EnumDescriptorProto.$checkItem_C, EnumDescriptorProto.create_D)
|
||
|
..pp(6, 'service', PbFieldType.PM, ServiceDescriptorProto.$checkItem_D, ServiceDescriptorProto.create_E)
|
||
|
..pp(7, 'extension', PbFieldType.PM, FieldDescriptorProto.$checkItem_E, FieldDescriptorProto.create_F)
|
||
|
..a(8, 'options', PbFieldType.OM, FileOptions.getDefault, FileOptions.create_G)
|
||
|
..a(9, 'sourceCodeInfo', PbFieldType.OM, SourceCodeInfo.getDefault_A, SourceCodeInfo.create_H);
|
||
|
FileDescriptorProto() : super();
|
||
|
FileDescriptorProto clone() => new FileDescriptorProto()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_C;
|
||
|
static FileDescriptorProto create_A() => new FileDescriptorProto();
|
||
|
static void $checkItem(FileDescriptorProto v) {
|
||
|
if (v is! FileDescriptorProto) checkItemFailed(v, 'FileDescriptorProto');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
String get package => getField(2);
|
||
|
List<DescriptorProto> get messageType => getField(4);
|
||
|
List<EnumDescriptorProto> get enumType => getField(5);
|
||
|
List<ServiceDescriptorProto> get service => getField(6);
|
||
|
List<FieldDescriptorProto> get extension => getField(7);
|
||
|
FileOptions get options => getField(8);
|
||
|
bool hasOptions() => hasField(8);
|
||
|
}
|
||
|
class DescriptorProto_ExtensionRange extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_D = new BuilderInfo('DescriptorProto_ExtensionRange')
|
||
|
..a(1, 'start', PbFieldType.O3)
|
||
|
..a(2, 'end', PbFieldType.O3)
|
||
|
..hasRequiredFields = false;
|
||
|
DescriptorProto_ExtensionRange() : super();
|
||
|
DescriptorProto_ExtensionRange clone() => new DescriptorProto_ExtensionRange()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_D;
|
||
|
static DescriptorProto_ExtensionRange create_I() => new DescriptorProto_ExtensionRange();
|
||
|
static void $checkItem_F(DescriptorProto_ExtensionRange v) {
|
||
|
if (v is! DescriptorProto_ExtensionRange) checkItemFailed(v, 'DescriptorProto_ExtensionRange');
|
||
|
}
|
||
|
int get start => getField(1);
|
||
|
int get end => getField(2);
|
||
|
}
|
||
|
class DescriptorProto extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_E = new BuilderInfo('DescriptorProto')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..pp(2, 'field', PbFieldType.PM, FieldDescriptorProto.$checkItem_E, FieldDescriptorProto.create_F)
|
||
|
..pp(6, 'extension', PbFieldType.PM, FieldDescriptorProto.$checkItem_E, FieldDescriptorProto.create_F)
|
||
|
..pp(3, 'nestedType', PbFieldType.PM, DescriptorProto.$checkItem_B, DescriptorProto.create_C)
|
||
|
..pp(4, 'enumType', PbFieldType.PM, EnumDescriptorProto.$checkItem_C, EnumDescriptorProto.create_D)
|
||
|
..pp(5, 'extensionRange', PbFieldType.PM, DescriptorProto_ExtensionRange.$checkItem_F, DescriptorProto_ExtensionRange.create_I)
|
||
|
..a(7, 'options', PbFieldType.OM, MessageOptions.getDefault_B, MessageOptions.create_J);
|
||
|
DescriptorProto() : super();
|
||
|
DescriptorProto clone() => new DescriptorProto()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_E;
|
||
|
static DescriptorProto create_C() => new DescriptorProto();
|
||
|
static void $checkItem_B(DescriptorProto v) {
|
||
|
if (v is! DescriptorProto) checkItemFailed(v, 'DescriptorProto');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
List<FieldDescriptorProto> get field => getField(2);
|
||
|
List<FieldDescriptorProto> get extension => getField(6);
|
||
|
List<DescriptorProto> get nestedType => getField(3);
|
||
|
List<EnumDescriptorProto> get enumType => getField(4);
|
||
|
List<DescriptorProto_ExtensionRange> get extensionRange => getField(5);
|
||
|
MessageOptions get options => getField(7);
|
||
|
bool hasOptions() => hasField(7);
|
||
|
}
|
||
|
class FieldDescriptorProto_Type extends ProtobufEnum {
|
||
|
static const FieldDescriptorProto_Type TYPE_DOUBLE = const FieldDescriptorProto_Type.__C(1, 'TYPE_DOUBLE');
|
||
|
static const FieldDescriptorProto_Type TYPE_FLOAT = const FieldDescriptorProto_Type.__C(2, 'TYPE_FLOAT');
|
||
|
static const FieldDescriptorProto_Type TYPE_INT64 = const FieldDescriptorProto_Type.__C(3, 'TYPE_INT64');
|
||
|
static const FieldDescriptorProto_Type TYPE_UINT64 = const FieldDescriptorProto_Type.__C(4, 'TYPE_UINT64');
|
||
|
static const FieldDescriptorProto_Type TYPE_INT32 = const FieldDescriptorProto_Type.__C(5, 'TYPE_INT32');
|
||
|
static const FieldDescriptorProto_Type TYPE_FIXED64 = const FieldDescriptorProto_Type.__C(6, 'TYPE_FIXED64');
|
||
|
static const FieldDescriptorProto_Type TYPE_FIXED32 = const FieldDescriptorProto_Type.__C(7, 'TYPE_FIXED32');
|
||
|
static const FieldDescriptorProto_Type TYPE_BOOL = const FieldDescriptorProto_Type.__C(8, 'TYPE_BOOL');
|
||
|
static const FieldDescriptorProto_Type TYPE_STRING = const FieldDescriptorProto_Type.__C(9, 'TYPE_STRING');
|
||
|
static const FieldDescriptorProto_Type TYPE_GROUP = const FieldDescriptorProto_Type.__C(10, 'TYPE_GROUP');
|
||
|
static const FieldDescriptorProto_Type TYPE_MESSAGE = const FieldDescriptorProto_Type.__C(11, 'TYPE_MESSAGE');
|
||
|
static const FieldDescriptorProto_Type TYPE_BYTES = const FieldDescriptorProto_Type.__C(12, 'TYPE_BYTES');
|
||
|
static const FieldDescriptorProto_Type TYPE_UINT32 = const FieldDescriptorProto_Type.__C(13, 'TYPE_UINT32');
|
||
|
static const FieldDescriptorProto_Type TYPE_ENUM = const FieldDescriptorProto_Type.__C(14, 'TYPE_ENUM');
|
||
|
static const FieldDescriptorProto_Type TYPE_SFIXED32 = const FieldDescriptorProto_Type.__C(15, 'TYPE_SFIXED32');
|
||
|
static const FieldDescriptorProto_Type TYPE_SFIXED64 = const FieldDescriptorProto_Type.__C(16, 'TYPE_SFIXED64');
|
||
|
static const FieldDescriptorProto_Type TYPE_SINT32 = const FieldDescriptorProto_Type.__C(17, 'TYPE_SINT32');
|
||
|
static const FieldDescriptorProto_Type TYPE_SINT64 = const FieldDescriptorProto_Type.__C(18, 'TYPE_SINT64');
|
||
|
static const List<FieldDescriptorProto_Type> values_A = const<FieldDescriptorProto_Type>[TYPE_DOUBLE, TYPE_FLOAT, TYPE_INT64, TYPE_UINT64, TYPE_INT32, TYPE_FIXED64, TYPE_FIXED32, TYPE_BOOL, TYPE_STRING, TYPE_GROUP, TYPE_MESSAGE, TYPE_BYTES, TYPE_UINT32, TYPE_ENUM, TYPE_SFIXED32, TYPE_SFIXED64, TYPE_SINT32, TYPE_SINT64];
|
||
|
static final Map<int, FieldDescriptorProto_Type> _byValue = ProtobufEnum.initByValue(values_A);
|
||
|
static FieldDescriptorProto_Type valueOf(int value_A) => _byValue[value_A];
|
||
|
const FieldDescriptorProto_Type.__C(int v, String n) : super(v, n);
|
||
|
}
|
||
|
class FieldDescriptorProto_Label extends ProtobufEnum {
|
||
|
static const FieldDescriptorProto_Label LABEL_OPTIONAL = const FieldDescriptorProto_Label.__D(1, 'LABEL_OPTIONAL');
|
||
|
static const FieldDescriptorProto_Label LABEL_REQUIRED = const FieldDescriptorProto_Label.__D(2, 'LABEL_REQUIRED');
|
||
|
static const FieldDescriptorProto_Label LABEL_REPEATED = const FieldDescriptorProto_Label.__D(3, 'LABEL_REPEATED');
|
||
|
static const List<FieldDescriptorProto_Label> values_B = const<FieldDescriptorProto_Label>[LABEL_OPTIONAL, LABEL_REQUIRED, LABEL_REPEATED];
|
||
|
static final Map<int, FieldDescriptorProto_Label> _byValue_A = ProtobufEnum.initByValue(values_B);
|
||
|
static FieldDescriptorProto_Label valueOf_A(int value_A) => _byValue_A[value_A];
|
||
|
const FieldDescriptorProto_Label.__D(int v, String n) : super(v, n);
|
||
|
}
|
||
|
class FieldDescriptorProto extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_F = new BuilderInfo('FieldDescriptorProto')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..a(3, 'number', PbFieldType.O3)
|
||
|
..e(4, 'label', PbFieldType.OE, FieldDescriptorProto_Label.LABEL_OPTIONAL, FieldDescriptorProto_Label.valueOf_A)
|
||
|
..e(5, 'type', PbFieldType.OE, FieldDescriptorProto_Type.TYPE_DOUBLE, FieldDescriptorProto_Type.valueOf)
|
||
|
..a(6, 'typeName', PbFieldType.OS_A)
|
||
|
..a(2, 'extendee', PbFieldType.OS_A)
|
||
|
..a(7, 'defaultValue', PbFieldType.OS_A)
|
||
|
..a(8, 'options', PbFieldType.OM, FieldOptions.getDefault_C, FieldOptions.create_K);
|
||
|
FieldDescriptorProto() : super();
|
||
|
FieldDescriptorProto clone() => new FieldDescriptorProto()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_F;
|
||
|
static FieldDescriptorProto create_F() => new FieldDescriptorProto();
|
||
|
static void $checkItem_E(FieldDescriptorProto v) {
|
||
|
if (v is! FieldDescriptorProto) checkItemFailed(v, 'FieldDescriptorProto');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
int get number => getField(3);
|
||
|
FieldDescriptorProto_Label get label => getField(4);
|
||
|
FieldDescriptorProto_Type get type => getField(5);
|
||
|
String get typeName => getField(6);
|
||
|
String get extendee => getField(2);
|
||
|
String get defaultValue => getField(7);
|
||
|
bool hasDefaultValue() => hasField(7);
|
||
|
FieldOptions get options => getField(8);
|
||
|
bool hasOptions() => hasField(8);
|
||
|
}
|
||
|
class EnumDescriptorProto extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_G = new BuilderInfo('EnumDescriptorProto')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..pp(2, 'value', PbFieldType.PM, EnumValueDescriptorProto.$checkItem_G, EnumValueDescriptorProto.create_L)
|
||
|
..a(3, 'options', PbFieldType.OM, EnumOptions.getDefault_D, EnumOptions.create_M);
|
||
|
EnumDescriptorProto() : super();
|
||
|
EnumDescriptorProto clone() => new EnumDescriptorProto()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_G;
|
||
|
static EnumDescriptorProto create_D() => new EnumDescriptorProto();
|
||
|
static void $checkItem_C(EnumDescriptorProto v) {
|
||
|
if (v is! EnumDescriptorProto) checkItemFailed(v, 'EnumDescriptorProto');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
List<EnumValueDescriptorProto> get value => getField(2);
|
||
|
EnumOptions get options => getField(3);
|
||
|
bool hasOptions() => hasField(3);
|
||
|
}
|
||
|
class EnumValueDescriptorProto extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_H = new BuilderInfo('EnumValueDescriptorProto')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..a(2, 'number', PbFieldType.O3)
|
||
|
..a(3, 'options', PbFieldType.OM, EnumValueOptions.getDefault_E, EnumValueOptions.create_N);
|
||
|
EnumValueDescriptorProto() : super();
|
||
|
EnumValueDescriptorProto clone() => new EnumValueDescriptorProto()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_H;
|
||
|
static EnumValueDescriptorProto create_L() => new EnumValueDescriptorProto();
|
||
|
static void $checkItem_G(EnumValueDescriptorProto v) {
|
||
|
if (v is! EnumValueDescriptorProto) checkItemFailed(v, 'EnumValueDescriptorProto');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
int get number => getField(2);
|
||
|
EnumValueOptions get options => getField(3);
|
||
|
bool hasOptions() => hasField(3);
|
||
|
}
|
||
|
class ServiceDescriptorProto extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_I = new BuilderInfo('ServiceDescriptorProto')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..pp(2, 'method', PbFieldType.PM, MethodDescriptorProto.$checkItem_H, MethodDescriptorProto.create_O)
|
||
|
..pp(4, 'stream', PbFieldType.PM, StreamDescriptorProto.$checkItem_I, StreamDescriptorProto.create_P)
|
||
|
..a(3, 'options', PbFieldType.OM, ServiceOptions.getDefault_F, ServiceOptions.create_Q);
|
||
|
ServiceDescriptorProto() : super();
|
||
|
ServiceDescriptorProto clone() => new ServiceDescriptorProto()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_I;
|
||
|
static ServiceDescriptorProto create_E() => new ServiceDescriptorProto();
|
||
|
static void $checkItem_D(ServiceDescriptorProto v) {
|
||
|
if (v is! ServiceDescriptorProto) checkItemFailed(v, 'ServiceDescriptorProto');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
List<MethodDescriptorProto> get method => getField(2);
|
||
|
ServiceOptions get options => getField(3);
|
||
|
bool hasOptions() => hasField(3);
|
||
|
}
|
||
|
class MethodDescriptorProto extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_J = new BuilderInfo('MethodDescriptorProto')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..a(2, 'inputType', PbFieldType.OS_A)
|
||
|
..a(3, 'outputType', PbFieldType.OS_A)
|
||
|
..a(4, 'options', PbFieldType.OM, MethodOptions.getDefault_G, MethodOptions.create_R);
|
||
|
MethodDescriptorProto() : super();
|
||
|
MethodDescriptorProto clone() => new MethodDescriptorProto()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_J;
|
||
|
static MethodDescriptorProto create_O() => new MethodDescriptorProto();
|
||
|
static void $checkItem_H(MethodDescriptorProto v) {
|
||
|
if (v is! MethodDescriptorProto) checkItemFailed(v, 'MethodDescriptorProto');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
String get inputType => getField(2);
|
||
|
String get outputType => getField(3);
|
||
|
MethodOptions get options => getField(4);
|
||
|
bool hasOptions() => hasField(4);
|
||
|
}
|
||
|
class StreamDescriptorProto extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_K = new BuilderInfo('StreamDescriptorProto')
|
||
|
..a(1, 'name', PbFieldType.OS_A)
|
||
|
..a(2, 'clientMessageType', PbFieldType.OS_A)
|
||
|
..a(3, 'serverMessageType', PbFieldType.OS_A)
|
||
|
..a(4, 'options', PbFieldType.OM, StreamOptions.getDefault_H, StreamOptions.create_S);
|
||
|
StreamDescriptorProto() : super();
|
||
|
StreamDescriptorProto clone() => new StreamDescriptorProto()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_K;
|
||
|
static StreamDescriptorProto create_P() => new StreamDescriptorProto();
|
||
|
static void $checkItem_I(StreamDescriptorProto v) {
|
||
|
if (v is! StreamDescriptorProto) checkItemFailed(v, 'StreamDescriptorProto');
|
||
|
}
|
||
|
String get name => getField(1);
|
||
|
void set name(String v) {
|
||
|
setField(1, v);
|
||
|
}
|
||
|
StreamOptions get options => getField(4);
|
||
|
bool hasOptions() => hasField(4);
|
||
|
}
|
||
|
class FileOptions_OptimizeMode extends ProtobufEnum {
|
||
|
static const FileOptions_OptimizeMode SPEED = const FileOptions_OptimizeMode.__E(1, 'SPEED');
|
||
|
static const FileOptions_OptimizeMode CODE_SIZE = const FileOptions_OptimizeMode.__E(2, 'CODE_SIZE');
|
||
|
static const FileOptions_OptimizeMode LITE_RUNTIME = const FileOptions_OptimizeMode.__E(3, 'LITE_RUNTIME');
|
||
|
static const List<FileOptions_OptimizeMode> values_C = const<FileOptions_OptimizeMode>[SPEED, CODE_SIZE, LITE_RUNTIME];
|
||
|
static final Map<int, FileOptions_OptimizeMode> _byValue_B = ProtobufEnum.initByValue(values_C);
|
||
|
static FileOptions_OptimizeMode valueOf_B(int value_A) => _byValue_B[value_A];
|
||
|
const FileOptions_OptimizeMode.__E(int v, String n) : super(v, n);
|
||
|
}
|
||
|
class FileOptions extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_L = new BuilderInfo('FileOptions')
|
||
|
..a(1, 'javaPackage', PbFieldType.OS_A)
|
||
|
..a(8, 'javaOuterClassname', PbFieldType.OS_A)
|
||
|
..a(10, 'javaMultipleFiles', PbFieldType.OB)
|
||
|
..a(20, 'javaGenerateEqualsAndHash', PbFieldType.OB)
|
||
|
..e(9, 'optimizeFor', PbFieldType.OE, FileOptions_OptimizeMode.SPEED, FileOptions_OptimizeMode.valueOf_B)
|
||
|
..a(16, 'ccGenericServices', PbFieldType.OB)
|
||
|
..a(17, 'javaGenericServices', PbFieldType.OB)
|
||
|
..a(18, 'pyGenericServices', PbFieldType.OB)
|
||
|
..pp(999, 'uninterpretedOption', PbFieldType.PM, UninterpretedOption.$checkItem_J, UninterpretedOption.create_T)
|
||
|
..hasExtensions = true;
|
||
|
FileOptions() : super();
|
||
|
FileOptions clone() => new FileOptions()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_L;
|
||
|
static FileOptions create_G() => new FileOptions();
|
||
|
static FileOptions getDefault() {
|
||
|
if (_defaultInstance == null) _defaultInstance = new _ReadonlyFileOptions();
|
||
|
return _defaultInstance;
|
||
|
}
|
||
|
static FileOptions _defaultInstance;
|
||
|
}
|
||
|
class _ReadonlyFileOptions extends FileOptions with ReadonlyMessageMixin {}
|
||
|
class MessageOptions extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_M = new BuilderInfo('MessageOptions')
|
||
|
..a(1, 'messageSetWireFormat', PbFieldType.OB)
|
||
|
..a(2, 'noStandardDescriptorAccessor', PbFieldType.OB)
|
||
|
..pp(999, 'uninterpretedOption', PbFieldType.PM, UninterpretedOption.$checkItem_J, UninterpretedOption.create_T)
|
||
|
..hasExtensions = true;
|
||
|
MessageOptions() : super();
|
||
|
MessageOptions clone() => new MessageOptions()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_M;
|
||
|
static MessageOptions create_J() => new MessageOptions();
|
||
|
static MessageOptions getDefault_B() {
|
||
|
if (_defaultInstance_A == null) _defaultInstance_A = new _ReadonlyMessageOptions();
|
||
|
return _defaultInstance_A;
|
||
|
}
|
||
|
static MessageOptions _defaultInstance_A;
|
||
|
}
|
||
|
class _ReadonlyMessageOptions extends MessageOptions with ReadonlyMessageMixin {}
|
||
|
class FieldOptions_CType extends ProtobufEnum {
|
||
|
static const FieldOptions_CType STRING = const FieldOptions_CType.__F(0, 'STRING');
|
||
|
static const List<FieldOptions_CType> values_D = const<FieldOptions_CType>[STRING];
|
||
|
static final Map<int, FieldOptions_CType> _byValue_C = ProtobufEnum.initByValue(values_D);
|
||
|
static FieldOptions_CType valueOf_C(int value_A) => _byValue_C[value_A];
|
||
|
const FieldOptions_CType.__F(int v, String n) : super(v, n);
|
||
|
}
|
||
|
class FieldOptions extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_N = new BuilderInfo('FieldOptions')
|
||
|
..e(1, 'ctype', PbFieldType.OE, FieldOptions_CType.STRING, FieldOptions_CType.valueOf_C)
|
||
|
..a(2, 'packed', PbFieldType.OB)
|
||
|
..a(5, 'lazy', PbFieldType.OB)
|
||
|
..a(3, 'deprecated', PbFieldType.OB)
|
||
|
..a(9, 'experimentalMapKey', PbFieldType.OS_A)
|
||
|
..pp(999, 'uninterpretedOption', PbFieldType.PM, UninterpretedOption.$checkItem_J, UninterpretedOption.create_T)
|
||
|
..hasExtensions = true;
|
||
|
FieldOptions() : super();
|
||
|
FieldOptions clone() => new FieldOptions()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_N;
|
||
|
static FieldOptions create_K() => new FieldOptions();
|
||
|
static FieldOptions getDefault_C() {
|
||
|
if (_defaultInstance_B == null) _defaultInstance_B = new _ReadonlyFieldOptions();
|
||
|
return _defaultInstance_B;
|
||
|
}
|
||
|
static FieldOptions _defaultInstance_B;
|
||
|
bool get packed => getField(2);
|
||
|
}
|
||
|
class _ReadonlyFieldOptions extends FieldOptions with ReadonlyMessageMixin {}
|
||
|
class EnumOptions extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_O = new BuilderInfo('EnumOptions')
|
||
|
..a(2, 'allowAlias', PbFieldType.OB, true)
|
||
|
..pp(999, 'uninterpretedOption', PbFieldType.PM, UninterpretedOption.$checkItem_J, UninterpretedOption.create_T)
|
||
|
..hasExtensions = true;
|
||
|
EnumOptions() : super();
|
||
|
EnumOptions clone() => new EnumOptions()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_O;
|
||
|
static EnumOptions create_M() => new EnumOptions();
|
||
|
static EnumOptions getDefault_D() {
|
||
|
if (_defaultInstance_C == null) _defaultInstance_C = new _ReadonlyEnumOptions();
|
||
|
return _defaultInstance_C;
|
||
|
}
|
||
|
static EnumOptions _defaultInstance_C;
|
||
|
}
|
||
|
class _ReadonlyEnumOptions extends EnumOptions with ReadonlyMessageMixin {}
|
||
|
class EnumValueOptions extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_P = new BuilderInfo('EnumValueOptions')
|
||
|
..pp(999, 'uninterpretedOption', PbFieldType.PM, UninterpretedOption.$checkItem_J, UninterpretedOption.create_T)
|
||
|
..hasExtensions = true;
|
||
|
EnumValueOptions() : super();
|
||
|
EnumValueOptions clone() => new EnumValueOptions()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_P;
|
||
|
static EnumValueOptions create_N() => new EnumValueOptions();
|
||
|
static EnumValueOptions getDefault_E() {
|
||
|
if (_defaultInstance_D == null) _defaultInstance_D = new _ReadonlyEnumValueOptions();
|
||
|
return _defaultInstance_D;
|
||
|
}
|
||
|
static EnumValueOptions _defaultInstance_D;
|
||
|
}
|
||
|
class _ReadonlyEnumValueOptions extends EnumValueOptions with ReadonlyMessageMixin {}
|
||
|
class ServiceOptions extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_Q = new BuilderInfo('ServiceOptions')
|
||
|
..pp(999, 'uninterpretedOption', PbFieldType.PM, UninterpretedOption.$checkItem_J, UninterpretedOption.create_T)
|
||
|
..hasExtensions = true;
|
||
|
ServiceOptions() : super();
|
||
|
ServiceOptions clone() => new ServiceOptions()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_Q;
|
||
|
static ServiceOptions create_Q() => new ServiceOptions();
|
||
|
static ServiceOptions getDefault_F() {
|
||
|
if (_defaultInstance_E == null) _defaultInstance_E = new _ReadonlyServiceOptions();
|
||
|
return _defaultInstance_E;
|
||
|
}
|
||
|
static ServiceOptions _defaultInstance_E;
|
||
|
}
|
||
|
class _ReadonlyServiceOptions extends ServiceOptions with ReadonlyMessageMixin {}
|
||
|
class MethodOptions extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_R = new BuilderInfo('MethodOptions')
|
||
|
..pp(999, 'uninterpretedOption', PbFieldType.PM, UninterpretedOption.$checkItem_J, UninterpretedOption.create_T)
|
||
|
..hasExtensions = true;
|
||
|
MethodOptions() : super();
|
||
|
MethodOptions clone() => new MethodOptions()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_R;
|
||
|
static MethodOptions create_R() => new MethodOptions();
|
||
|
static MethodOptions getDefault_G() {
|
||
|
if (_defaultInstance_F == null) _defaultInstance_F = new _ReadonlyMethodOptions();
|
||
|
return _defaultInstance_F;
|
||
|
}
|
||
|
static MethodOptions _defaultInstance_F;
|
||
|
}
|
||
|
class _ReadonlyMethodOptions extends MethodOptions with ReadonlyMessageMixin {}
|
||
|
class StreamOptions extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_S = new BuilderInfo('StreamOptions')
|
||
|
..pp(999, 'uninterpretedOption', PbFieldType.PM, UninterpretedOption.$checkItem_J, UninterpretedOption.create_T)
|
||
|
..hasExtensions = true;
|
||
|
StreamOptions() : super();
|
||
|
StreamOptions clone() => new StreamOptions()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_S;
|
||
|
static StreamOptions create_S() => new StreamOptions();
|
||
|
static StreamOptions getDefault_H() {
|
||
|
if (_defaultInstance_G == null) _defaultInstance_G = new _ReadonlyStreamOptions();
|
||
|
return _defaultInstance_G;
|
||
|
}
|
||
|
static StreamOptions _defaultInstance_G;
|
||
|
}
|
||
|
class _ReadonlyStreamOptions extends StreamOptions with ReadonlyMessageMixin {}
|
||
|
class UninterpretedOption_NamePart extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_T = new BuilderInfo('UninterpretedOption_NamePart')
|
||
|
..a(1, 'namePart', PbFieldType.QS)
|
||
|
..a(2, 'isExtension', PbFieldType.QB);
|
||
|
UninterpretedOption_NamePart() : super();
|
||
|
UninterpretedOption_NamePart clone() => new UninterpretedOption_NamePart()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_T;
|
||
|
static UninterpretedOption_NamePart create_U() => new UninterpretedOption_NamePart();
|
||
|
static void $checkItem_K(UninterpretedOption_NamePart v) {
|
||
|
if (v is! UninterpretedOption_NamePart) checkItemFailed(v, 'UninterpretedOption_NamePart');
|
||
|
}
|
||
|
}
|
||
|
class UninterpretedOption extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_U = new BuilderInfo('UninterpretedOption')
|
||
|
..pp(2, 'name', PbFieldType.PM, UninterpretedOption_NamePart.$checkItem_K, UninterpretedOption_NamePart.create_U)
|
||
|
..a(3, 'identifierValue', PbFieldType.OS_A)
|
||
|
..a(4, 'positiveIntValue', PbFieldType.OU6, Int64.ZERO_A)
|
||
|
..a(5, 'negativeIntValue', PbFieldType.O6, Int64.ZERO_A)
|
||
|
..a(6, 'doubleValue', PbFieldType.OD)
|
||
|
..a(7, 'stringValue', PbFieldType.OY)
|
||
|
..a(8, 'aggregateValue', PbFieldType.OS_A);
|
||
|
UninterpretedOption() : super();
|
||
|
UninterpretedOption clone() => new UninterpretedOption()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_U;
|
||
|
static UninterpretedOption create_T() => new UninterpretedOption();
|
||
|
static void $checkItem_J(UninterpretedOption v) {
|
||
|
if (v is! UninterpretedOption) checkItemFailed(v, 'UninterpretedOption');
|
||
|
}
|
||
|
List<UninterpretedOption_NamePart> get name => getField(2);
|
||
|
}
|
||
|
class SourceCodeInfo_Location extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_V = new BuilderInfo('SourceCodeInfo_Location')
|
||
|
..p(1, 'path', PbFieldType.K3)
|
||
|
..p(2, 'span', PbFieldType.K3)
|
||
|
..hasRequiredFields = false;
|
||
|
SourceCodeInfo_Location() : super();
|
||
|
SourceCodeInfo_Location clone() => new SourceCodeInfo_Location()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_V;
|
||
|
static SourceCodeInfo_Location create_V() => new SourceCodeInfo_Location();
|
||
|
static void $checkItem_L(SourceCodeInfo_Location v) {
|
||
|
if (v is! SourceCodeInfo_Location) checkItemFailed(v, 'SourceCodeInfo_Location');
|
||
|
}
|
||
|
List<int> get path => getField(1);
|
||
|
}
|
||
|
class SourceCodeInfo extends GeneratedMessage {
|
||
|
static final BuilderInfo _i_W = new BuilderInfo('SourceCodeInfo')
|
||
|
..pp(1, 'location', PbFieldType.PM, SourceCodeInfo_Location.$checkItem_L, SourceCodeInfo_Location.create_V)
|
||
|
..hasRequiredFields = false;
|
||
|
SourceCodeInfo() : super();
|
||
|
SourceCodeInfo clone() => new SourceCodeInfo()
|
||
|
..mergeFromMessage(this);
|
||
|
BuilderInfo get info_ => _i_W;
|
||
|
static SourceCodeInfo create_H() => new SourceCodeInfo();
|
||
|
static SourceCodeInfo getDefault_A() {
|
||
|
if (_defaultInstance_H == null) _defaultInstance_H = new _ReadonlySourceCodeInfo();
|
||
|
return _defaultInstance_H;
|
||
|
}
|
||
|
static SourceCodeInfo _defaultInstance_H;
|
||
|
}
|
||
|
class _ReadonlySourceCodeInfo extends SourceCodeInfo with ReadonlyMessageMixin {}
|
||
|
const _UTF8 = const Utf8Codec(allowMalformed: true);
|
||
|
class BuilderInfo {
|
||
|
final String messageName;
|
||
|
final Map<int, FieldInfo> fieldInfo = new Map<int, FieldInfo>();
|
||
|
final Map<String, FieldInfo> byName = <String, FieldInfo>{};
|
||
|
bool hasExtensions = false;
|
||
|
bool hasRequiredFields = true;
|
||
|
BuilderInfo(this.messageName);
|
||
|
void add(int tagNumber_A, String name_A, int fieldType_A, dynamic defaultOrMaker, CreateBuilderFunc subBuilder_A, ValueOfFunc valueOf_D) {
|
||
|
fieldInfo[tagNumber_A] = byName[name_A] = new FieldInfo(name_A, tagNumber_A, fieldType_A, defaultOrMaker, subBuilder_A, valueOf_D);
|
||
|
}
|
||
|
void addRepeated(int tagNumber_A, String name_A, int fieldType_A, CheckFunc check_A, CreateBuilderFunc subBuilder_A, ValueOfFunc valueOf_D) {
|
||
|
fieldInfo[tagNumber_A] = byName[name_A] = new FieldInfo.repeated(name_A, tagNumber_A, fieldType_A, check_A, subBuilder_A, valueOf_D);
|
||
|
}
|
||
|
void a(int tagNumber_A, String name_A, int fieldType_A, [dynamic defaultOrMaker, CreateBuilderFunc subBuilder_A, ValueOfFunc valueOf_D]) {
|
||
|
add(tagNumber_A, name_A, fieldType_A, defaultOrMaker, subBuilder_A, valueOf_D);
|
||
|
}
|
||
|
void e(int tagNumber_A, String name_A, int fieldType_A, dynamic defaultOrMaker, ValueOfFunc valueOf_D) {
|
||
|
add(tagNumber_A, name_A, fieldType_A, defaultOrMaker, null, valueOf_D);
|
||
|
}
|
||
|
void p(int tagNumber_A, String name_A, int fieldType_A) {
|
||
|
assert(!_isGroupOrMessage(fieldType_A) && !_isEnum(fieldType_A));
|
||
|
addRepeated(tagNumber_A, name_A, fieldType_A, getCheckFunction(fieldType_A), null, null);
|
||
|
}
|
||
|
void pp(int tagNumber_A, String name_A, int fieldType_A, CheckFunc check_A, [CreateBuilderFunc subBuilder_A, ValueOfFunc valueOf_D]) {
|
||
|
assert(_isGroupOrMessage(fieldType_A) || _isEnum(fieldType_A));
|
||
|
addRepeated(tagNumber_A, name_A, fieldType_A, check_A, subBuilder_A, valueOf_D);
|
||
|
}
|
||
|
defaultValue(int tagNumber_A) {
|
||
|
MakeDefaultFunc func = makeDefault(tagNumber_A);
|
||
|
return func == null ? null : func();
|
||
|
}
|
||
|
String fieldName(int tagNumber_A) {
|
||
|
FieldInfo i = fieldInfo[tagNumber_A];
|
||
|
return i != null ? i.name : null;
|
||
|
}
|
||
|
MakeDefaultFunc makeDefault(int tagNumber_A) {
|
||
|
FieldInfo i = fieldInfo[tagNumber_A];
|
||
|
return i != null ? i.makeDefault : null;
|
||
|
}
|
||
|
bool isInitialized(Map<int, dynamic> fieldValues) {
|
||
|
return fieldInfo.keys.every((tagNumber_A) => _isFieldInitialized(fieldValues, tagNumber_A));
|
||
|
}
|
||
|
CreateBuilderFunc subBuilder(int tagNumber_A) {
|
||
|
FieldInfo i = fieldInfo[tagNumber_A];
|
||
|
return i != null ? i.subBuilder : null;
|
||
|
}
|
||
|
int tagNumber(String fieldName_A) {
|
||
|
FieldInfo i = byName[fieldName_A];
|
||
|
return i != null ? i.tagNumber : null;
|
||
|
}
|
||
|
ValueOfFunc valueOfFunc(int tagNumber_A) {
|
||
|
FieldInfo i = fieldInfo[tagNumber_A];
|
||
|
return i != null ? i.valueOf_D : null;
|
||
|
}
|
||
|
bool _isFieldInitialized(Map<int, dynamic> fieldValues, int tagNumber_A, [int fieldType_A = null]) {
|
||
|
if (fieldType_A == null) {
|
||
|
fieldType_A = fieldInfo[tagNumber_A].type;
|
||
|
}
|
||
|
if (_isGroupOrMessage(fieldType_A)) {
|
||
|
if (_isRequired(fieldType_A)) {
|
||
|
GeneratedMessage message_A = fieldValues[tagNumber_A];
|
||
|
if (message_A == null || !message_A.isInitialized()) {
|
||
|
return false;
|
||
|
}
|
||
|
} else if (_isRepeated(fieldType_A)) {
|
||
|
if (fieldValues.containsKey(tagNumber_A)) {
|
||
|
List list_A = fieldValues[tagNumber_A];
|
||
|
if (!list_A.isEmpty && list_A[0].hasRequiredFields()) {
|
||
|
if (!list_A.every((message_A) => message_A.isInitialized())) {
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
} else {
|
||
|
GeneratedMessage message_A = fieldValues[tagNumber_A];
|
||
|
if (message_A != null && !message_A.isInitialized()) {
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
} else if (_isRequired(fieldType_A)) {
|
||
|
if (fieldValues[tagNumber_A] == null) {
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
List<String> _findInvalidFields(Map<int, dynamic> fieldValues, List<String> invalidFields, [String prefix = '']) {
|
||
|
fieldInfo.forEach((int tagNumber_A, FieldInfo field) {
|
||
|
int fieldType_A = field.type;
|
||
|
if (_isGroupOrMessage(fieldType_A)) {
|
||
|
if (_isRequired(fieldType_A)) {
|
||
|
GeneratedMessage message_A = fieldValues[tagNumber_A];
|
||
|
if (message_A == null) {
|
||
|
invalidFields.add('${prefix}${field.name}');
|
||
|
} else {
|
||
|
message_A._findInvalidFields(invalidFields, '${prefix}${field.name}.');
|
||
|
}
|
||
|
} else if (_isRepeated(fieldType_A)) {
|
||
|
if (fieldValues.containsKey(tagNumber_A)) {
|
||
|
List list_A = fieldValues[tagNumber_A];
|
||
|
if (!list_A.isEmpty && list_A[0].hasRequiredFields()) {
|
||
|
int position_A = 0;
|
||
|
for (GeneratedMessage message_A in list_A) {
|
||
|
if (message_A.hasRequiredFields()) {
|
||
|
message_A._findInvalidFields(invalidFields, '${prefix}${field.name}[${position_A}].');
|
||
|
}
|
||
|
position_A++;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
} else {
|
||
|
GeneratedMessage message_A = fieldValues[tagNumber_A];
|
||
|
if (message_A != null) {
|
||
|
message_A._findInvalidFields(invalidFields, '${prefix}${field.name}.');
|
||
|
}
|
||
|
}
|
||
|
} else if (_isRequired(fieldType_A)) {
|
||
|
if (fieldValues[tagNumber_A] == null) {
|
||
|
invalidFields.add('${prefix}${field.name}');
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
return invalidFields;
|
||
|
}
|
||
|
}
|
||
|
class CodedBufferReader {
|
||
|
static const int DEFAULT_RECURSION_LIMIT = 64;
|
||
|
static const int DEFAULT_SIZE_LIMIT = 64 << 20;
|
||
|
final Uint8List _buffer_A;
|
||
|
int _bufferPos = 0;
|
||
|
int _currentLimit = -1;
|
||
|
int _lastTag = 0;
|
||
|
int _recursionDepth = 0;
|
||
|
final int _recursionLimit;
|
||
|
final int _sizeLimit;
|
||
|
CodedBufferReader(List<int> buffer_A, {int recursionLimit: DEFAULT_RECURSION_LIMIT, int sizeLimit: DEFAULT_SIZE_LIMIT}) : _buffer_A = buffer_A is Uint8List ? buffer_A : new Uint8List(buffer_A.length)
|
||
|
..setRange(0, buffer_A.length, buffer_A), _recursionLimit = recursionLimit, _sizeLimit = _A.min(sizeLimit, buffer_A.length) {
|
||
|
_currentLimit = _sizeLimit;
|
||
|
}
|
||
|
void checkLastTagWas(int value_A) {
|
||
|
if (_lastTag != value_A) {
|
||
|
throw new InvalidProtocolBufferException.invalidEndTag();
|
||
|
}
|
||
|
}
|
||
|
bool isAtEnd() => _bufferPos >= _currentLimit;
|
||
|
void _withLimit(int byteLimit, callback_A) {
|
||
|
if (byteLimit < 0) {
|
||
|
throw new ArgumentError('CodedBufferReader encountered an embedded string or message' ' which claimed to have negative size.');
|
||
|
}
|
||
|
byteLimit += _bufferPos;
|
||
|
int oldLimit = _currentLimit;
|
||
|
if ((oldLimit != -1 && byteLimit > oldLimit) || byteLimit > _sizeLimit) {
|
||
|
throw new InvalidProtocolBufferException.truncatedMessage();
|
||
|
}
|
||
|
_currentLimit = byteLimit;
|
||
|
callback_A();
|
||
|
_currentLimit = oldLimit;
|
||
|
}
|
||
|
void _checkLimit(int increment) {
|
||
|
assert(_currentLimit != -1);
|
||
|
_bufferPos += increment;
|
||
|
if (_bufferPos > _currentLimit) {
|
||
|
throw new InvalidProtocolBufferException.truncatedMessage();
|
||
|
}
|
||
|
}
|
||
|
void readGroup(int fieldNumber, GeneratedMessage message_A, ExtensionRegistry extensionRegistry) {
|
||
|
if (_recursionDepth >= _recursionLimit) {
|
||
|
throw new InvalidProtocolBufferException.recursionLimitExceeded();
|
||
|
}
|
||
|
++_recursionDepth;
|
||
|
message_A.mergeFromCodedBufferReader(this, extensionRegistry);
|
||
|
checkLastTagWas(makeTag(fieldNumber, WIRETYPE_END_GROUP));
|
||
|
--_recursionDepth;
|
||
|
}
|
||
|
UnknownFieldSet readUnknownFieldSetGroup(int fieldNumber) {
|
||
|
if (_recursionDepth >= _recursionLimit) {
|
||
|
throw new InvalidProtocolBufferException.recursionLimitExceeded();
|
||
|
}
|
||
|
++_recursionDepth;
|
||
|
UnknownFieldSet unknownFieldSet = new UnknownFieldSet();
|
||
|
unknownFieldSet.mergeFromCodedBufferReader(this);
|
||
|
checkLastTagWas(makeTag(fieldNumber, WIRETYPE_END_GROUP));
|
||
|
--_recursionDepth;
|
||
|
return unknownFieldSet;
|
||
|
}
|
||
|
void readMessage(GeneratedMessage message_A, ExtensionRegistry extensionRegistry) {
|
||
|
int length_A = readInt32();
|
||
|
if (_recursionDepth >= _recursionLimit) {
|
||
|
throw new InvalidProtocolBufferException.recursionLimitExceeded();
|
||
|
}
|
||
|
if (length_A < 0) {
|
||
|
throw new ArgumentError('CodedBufferReader encountered an embedded string or message' ' which claimed to have negative size.');
|
||
|
}
|
||
|
int oldLimit = _currentLimit;
|
||
|
_currentLimit = _bufferPos + length_A;
|
||
|
if (_currentLimit > oldLimit) {
|
||
|
throw new InvalidProtocolBufferException.truncatedMessage();
|
||
|
}
|
||
|
++_recursionDepth;
|
||
|
message_A.mergeFromCodedBufferReader(this, extensionRegistry);
|
||
|
checkLastTagWas(0);
|
||
|
--_recursionDepth;
|
||
|
_currentLimit = oldLimit;
|
||
|
}
|
||
|
int readEnum() => readInt32();
|
||
|
int readInt32() => _readRawVarint32();
|
||
|
Int64 readInt64() => _readRawVarint64();
|
||
|
int readUint32() => _readRawVarint32(false);
|
||
|
Int64 readUint64() => _readRawVarint64();
|
||
|
int readSint32() => _decodeZigZag32(readUint32());
|
||
|
Int64 readSint64() => _decodeZigZag64(readUint64());
|
||
|
int readFixed32() => _readByteData(4).getUint32(0, Endianness.LITTLE_ENDIAN);
|
||
|
Int64 readFixed64() => readSfixed64();
|
||
|
int readSfixed32() => _readByteData(4).getInt32(0, Endianness.LITTLE_ENDIAN);
|
||
|
Int64 readSfixed64() {
|
||
|
var data_A = _readByteData(8);
|
||
|
var view_A = new Uint8List.view(data_A.buffer, data_A.offsetInBytes, 8);
|
||
|
return new Int64.fromBytes(view_A);
|
||
|
}
|
||
|
bool readBool() => _readRawVarint32() != 0;
|
||
|
List<int> readBytes() {
|
||
|
int length_A = readInt32();
|
||
|
_checkLimit(length_A);
|
||
|
return new Uint8List.view(_buffer_A.buffer, _buffer_A.offsetInBytes + _bufferPos - length_A, length_A);
|
||
|
}
|
||
|
String readString() => _UTF8.decode(readBytes());
|
||
|
double readFloat() => _readByteData(4).getFloat32(0, Endianness.LITTLE_ENDIAN);
|
||
|
double readDouble() => _readByteData(8).getFloat64(0, Endianness.LITTLE_ENDIAN);
|
||
|
int readTag() {
|
||
|
if (isAtEnd()) {
|
||
|
_lastTag = 0;
|
||
|
return 0;
|
||
|
}
|
||
|
_lastTag = readInt32();
|
||
|
if (getTagFieldNumber(_lastTag) == 0) {
|
||
|
throw new InvalidProtocolBufferException.invalidTag();
|
||
|
}
|
||
|
return _lastTag;
|
||
|
}
|
||
|
static int _decodeZigZag32(int value_A) {
|
||
|
if ((value_A & 0x1) == 1) value_A = -value_A;
|
||
|
return value_A >> 1;
|
||
|
}
|
||
|
static Int64 _decodeZigZag64(Int64 value_A) {
|
||
|
if ((value_A & 0x1) == 1) value_A = -value_A;
|
||
|
return value_A >> 1;
|
||
|
}
|
||
|
int _readRawVarintByte() {
|
||
|
_checkLimit(1);
|
||
|
return _buffer_A[_bufferPos - 1];
|
||
|
}
|
||
|
int _readRawVarint32([bool signed = true]) {
|
||
|
int bytes = _currentLimit - _bufferPos;
|
||
|
if (bytes > 10) bytes = 10;
|
||
|
int result_A = 0;
|
||
|
for (int i = 0; i < bytes; i++) {
|
||
|
int byte = _buffer_A[_bufferPos++];
|
||
|
result_A |= (byte & 0x7f) << (i * 7);
|
||
|
if ((byte & 0x80) == 0) {
|
||
|
result_A &= 0xffffffff;
|
||
|
return signed ? result_A - 2 * (0x80000000 & result_A) : result_A;
|
||
|
}
|
||
|
}
|
||
|
throw new InvalidProtocolBufferException.malformedVarint();
|
||
|
}
|
||
|
Int64 _readRawVarint64() {
|
||
|
int lo = 0;
|
||
|
int hi = 0;
|
||
|
for (int i = 0; i < 4; i++) {
|
||
|
int byte = _readRawVarintByte();
|
||
|
lo |= (byte & 0x7f) << (i * 7);
|
||
|
if ((byte & 0x80) == 0) return new Int64.fromInts(hi, lo);
|
||
|
}
|
||
|
int byte = _readRawVarintByte();
|
||
|
lo |= (byte & 0xf) << 28;
|
||
|
hi = (byte >> 4) & 0x7;
|
||
|
if ((byte & 0x80) == 0) {
|
||
|
return new Int64.fromInts(hi, lo);
|
||
|
}
|
||
|
for (int i = 0; i < 5; i++) {
|
||
|
int byte = _readRawVarintByte();
|
||
|
hi |= (byte & 0x7f) << ((i * 7) + 3);
|
||
|
if ((byte & 0x80) == 0) return new Int64.fromInts(hi, lo);
|
||
|
}
|
||
|
throw new InvalidProtocolBufferException.malformedVarint();
|
||
|
}
|
||
|
ByteData _readByteData(int sizeInBytes) {
|
||
|
_checkLimit(sizeInBytes);
|
||
|
return new ByteData.view(_buffer_A.buffer, _buffer_A.offsetInBytes + _bufferPos - sizeInBytes, sizeInBytes);
|
||
|
}
|
||
|
}
|
||
|
class CodedBufferWriter {
|
||
|
final List<TypedData> _output = <TypedData>[];
|
||
|
int _runningSizeInBytes = 0;
|
||
|
static final _WRITE_FUNCTION_MAP = _makeWriteFunctionMap();
|
||
|
static ByteData _toVarint32(int value_A) {
|
||
|
Uint8List result_A = new Uint8List(5);
|
||
|
int i = 0;
|
||
|
while (value_A >= 0x80) {
|
||
|
result_A[i++] = 0x80 | (value_A & 0x7f);
|
||
|
value_A >>= 7;
|
||
|
}
|
||
|
result_A[i++] = value_A;
|
||
|
return new ByteData.view(result_A.buffer, 0, i);
|
||
|
}
|
||
|
static ByteData _toVarint64(Int64 value_A) {
|
||
|
Uint8List result_A = new Uint8List(10);
|
||
|
int i = 0;
|
||
|
ByteData bytes = new ByteData.view(new Uint8List.fromList(value_A.toBytes()).buffer, 0, 8);
|
||
|
int lo = bytes.getUint32(0, Endianness.LITTLE_ENDIAN);
|
||
|
int hi = bytes.getUint32(4, Endianness.LITTLE_ENDIAN);
|
||
|
while (hi > 0 || lo >= 0x80) {
|
||
|
result_A[i++] = 0x80 | (lo & 0x7f);
|
||
|
lo = (lo >> 7) | ((hi & 0x7f) << 25);
|
||
|
hi >>= 7;
|
||
|
}
|
||
|
result_A[i++] = lo;
|
||
|
return new ByteData.view(result_A.buffer, 0, i);
|
||
|
}
|
||
|
static ByteData _int32ToBytes(int value_A) => _toVarint32(value_A & 0xffffffff);
|
||
|
static _makeWriteFunctionMap() {
|
||
|
writeBytesNoTag(output, List<int> value_A) {
|
||
|
output.writeInt32NoTag(value_A.length);
|
||
|
output.writeRawBytes(new Uint8List(value_A.length)
|
||
|
..setRange(0, value_A.length, value_A));
|
||
|
}
|
||
|
makeWriter(convertor) => ((output, value_A) {
|
||
|
output.writeRawBytes(convertor(value_A));
|
||
|
});
|
||
|
int _encodeZigZag32(int value_A) => (value_A << 1) ^ (value_A >> 31);
|
||
|
Int64 _encodeZigZag64(Int64 value_A) => (value_A << 1) ^ (value_A >> 63);
|
||
|
ByteData makeByteData32(int value_A) => new ByteData(4)
|
||
|
..setUint32(0, value_A, Endianness.LITTLE_ENDIAN);
|
||
|
ByteData makeByteData64(Int64 value_A) {
|
||
|
var data_A = new Uint8List.fromList(value_A.toBytes());
|
||
|
return new ByteData.view(data_A.buffer, 0, 8);
|
||
|
}
|
||
|
return new Map<int, dynamic>()
|
||
|
..[PbFieldType._BOOL_BIT] = makeWriter((value_A) => _int32ToBytes(value_A ? 1 : 0))
|
||
|
..[PbFieldType._BYTES_BIT] = writeBytesNoTag
|
||
|
..[PbFieldType._STRING_BIT] = (output, value_A) {
|
||
|
writeBytesNoTag(output, _UTF8.encode(value_A));
|
||
|
}
|
||
|
..[PbFieldType._DOUBLE_BIT] = makeWriter((double value_A) {
|
||
|
if (value_A.isNaN) return new ByteData(8)
|
||
|
..setUint32(0, 0x00000000, Endianness.LITTLE_ENDIAN)
|
||
|
..setUint32(4, 0x7ff80000, Endianness.LITTLE_ENDIAN);
|
||
|
return new ByteData(8)
|
||
|
..setFloat64(0, value_A, Endianness.LITTLE_ENDIAN);
|
||
|
})
|
||
|
..[PbFieldType._FLOAT_BIT] = makeWriter((double value_A) {
|
||
|
const double MIN_FLOAT_DENORM = 1.401298464324817E-45;
|
||
|
const double MAX_FLOAT = 3.4028234663852886E38;
|
||
|
if (value_A.isNaN) return makeByteData32(0x7fc00000);
|
||
|
if (value_A.abs() < MIN_FLOAT_DENORM) {
|
||
|
return makeByteData32(value_A.isNegative ? 0x80000000 : 0x00000000);
|
||
|
}
|
||
|
if (value_A.isInfinite || value_A.abs() > MAX_FLOAT) {
|
||
|
return makeByteData32(value_A.isNegative ? 0xff800000 : 0x7f800000);
|
||
|
}
|
||
|
return new ByteData(4)
|
||
|
..setFloat32(0, value_A, Endianness.LITTLE_ENDIAN);
|
||
|
})
|
||
|
..[PbFieldType._ENUM_BIT] = makeWriter((value_A) => _int32ToBytes(value_A.value))
|
||
|
..[PbFieldType._GROUP_BIT] = (output, value_A) {
|
||
|
value_A.writeToCodedBufferWriter(output);
|
||
|
}
|
||
|
..[PbFieldType._INT32_BIT] = makeWriter(_int32ToBytes)
|
||
|
..[PbFieldType._INT64_BIT] = makeWriter((value_A) => _toVarint64(value_A))
|
||
|
..[PbFieldType._SINT32_BIT] = makeWriter((int value_A) => _int32ToBytes(_encodeZigZag32(value_A)))
|
||
|
..[PbFieldType._SINT64_BIT] = makeWriter((Int64 value_A) => _toVarint64(_encodeZigZag64(value_A)))
|
||
|
..[PbFieldType._UINT32_BIT] = makeWriter(_toVarint32)
|
||
|
..[PbFieldType._UINT64_BIT] = makeWriter(_toVarint64)
|
||
|
..[PbFieldType._FIXED32_BIT] = makeWriter(makeByteData32)
|
||
|
..[PbFieldType._FIXED64_BIT] = makeWriter(makeByteData64)
|
||
|
..[PbFieldType._SFIXED32_BIT] = makeWriter(makeByteData32)
|
||
|
..[PbFieldType._SFIXED64_BIT] = makeWriter(makeByteData64)
|
||
|
..[PbFieldType._MESSAGE_BIT] = (output, value_A) {
|
||
|
output._withDeferredSizeCalculation(() {
|
||
|
value_A.writeToCodedBufferWriter(output);
|
||
|
});
|
||
|
};
|
||
|
}
|
||
|
static final _OPEN_TAG_MAP = _makeOpenTagMap();
|
||
|
static _makeOpenTagMap() {
|
||
|
return new Map<int, int>()
|
||
|
..[PbFieldType._BOOL_BIT] = WIRETYPE_VARINT
|
||
|
..[PbFieldType._BYTES_BIT] = WIRETYPE_LENGTH_DELIMITED
|
||
|
..[PbFieldType._STRING_BIT] = WIRETYPE_LENGTH_DELIMITED
|
||
|
..[PbFieldType._DOUBLE_BIT] = WIRETYPE_FIXED64
|
||
|
..[PbFieldType._FLOAT_BIT] = WIRETYPE_FIXED32
|
||
|
..[PbFieldType._ENUM_BIT] = WIRETYPE_VARINT
|
||
|
..[PbFieldType._GROUP_BIT] = WIRETYPE_START_GROUP
|
||
|
..[PbFieldType._INT32_BIT] = WIRETYPE_VARINT
|
||
|
..[PbFieldType._INT64_BIT] = WIRETYPE_VARINT
|
||
|
..[PbFieldType._SINT32_BIT] = WIRETYPE_VARINT
|
||
|
..[PbFieldType._SINT64_BIT] = WIRETYPE_VARINT
|
||
|
..[PbFieldType._UINT32_BIT] = WIRETYPE_VARINT
|
||
|
..[PbFieldType._UINT64_BIT] = WIRETYPE_VARINT
|
||
|
..[PbFieldType._FIXED32_BIT] = WIRETYPE_FIXED32
|
||
|
..[PbFieldType._FIXED64_BIT] = WIRETYPE_FIXED64
|
||
|
..[PbFieldType._SFIXED32_BIT] = WIRETYPE_FIXED32
|
||
|
..[PbFieldType._SFIXED64_BIT] = WIRETYPE_FIXED64
|
||
|
..[PbFieldType._MESSAGE_BIT] = WIRETYPE_LENGTH_DELIMITED;
|
||
|
}
|
||
|
void _withDeferredSizeCalculation(continuation) {
|
||
|
int index_A = _output.length;
|
||
|
_output.add(null);
|
||
|
int currentRunningSizeInBytes = _runningSizeInBytes;
|
||
|
continuation();
|
||
|
int writtenSizeInBytes = _runningSizeInBytes - currentRunningSizeInBytes;
|
||
|
TypedData sizeMarker = _int32ToBytes(writtenSizeInBytes);
|
||
|
_output[index_A] = sizeMarker;
|
||
|
_runningSizeInBytes += sizeMarker.lengthInBytes;
|
||
|
}
|
||
|
void writeField(int fieldNumber, int fieldType, fieldValue) {
|
||
|
var valueType = fieldType & ~0x07;
|
||
|
var writeFunction = _WRITE_FUNCTION_MAP[valueType];
|
||
|
writeTag(int wireFormat) {
|
||
|
writeInt32NoTag(makeTag(fieldNumber, wireFormat));
|
||
|
}
|
||
|
if ((fieldType & PbFieldType._PACKED_BIT) != 0) {
|
||
|
if (!fieldValue.isEmpty) {
|
||
|
writeTag(WIRETYPE_LENGTH_DELIMITED);
|
||
|
_withDeferredSizeCalculation(() {
|
||
|
for (var value_A in fieldValue) {
|
||
|
writeFunction(this, value_A);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
writeValue(value_A) {
|
||
|
writeTag(_OPEN_TAG_MAP[valueType]);
|
||
|
writeFunction(this, value_A);
|
||
|
if (valueType == PbFieldType._GROUP_BIT) {
|
||
|
writeTag(WIRETYPE_END_GROUP);
|
||
|
}
|
||
|
}
|
||
|
if ((fieldType & PbFieldType._REPEATED_BIT) != 0) {
|
||
|
fieldValue.forEach(writeValue);
|
||
|
return;
|
||
|
}
|
||
|
writeValue(fieldValue);
|
||
|
}
|
||
|
void writeInt32NoTag(int value_A) {
|
||
|
writeRawBytes(_int32ToBytes(value_A));
|
||
|
}
|
||
|
void writeRawBytes(TypedData value_A) {
|
||
|
_output.add(value_A);
|
||
|
_runningSizeInBytes += value_A.lengthInBytes;
|
||
|
}
|
||
|
Uint8List toBuffer() {
|
||
|
Uint8List result_A = new Uint8List(_runningSizeInBytes);
|
||
|
int position_A = 0;
|
||
|
for (var typedData in _output) {
|
||
|
Uint8List asBytes = new Uint8List.view(typedData.buffer, typedData.offsetInBytes, typedData.lengthInBytes);
|
||
|
result_A.setRange(position_A, position_A + typedData.lengthInBytes, asBytes);
|
||
|
position_A += typedData.lengthInBytes;
|
||
|
}
|
||
|
return result_A;
|
||
|
}
|
||
|
}
|
||
|
abstract class EventPlugin {}
|
||
|
class InvalidProtocolBufferException implements Exception {
|
||
|
final String message;
|
||
|
InvalidProtocolBufferException.__G(this.message);
|
||
|
String toString() => 'InvalidProtocolBufferException: ${message}';
|
||
|
InvalidProtocolBufferException.invalidEndTag() : this.__G('Protocol message end-group tag did not match expected tag.');
|
||
|
InvalidProtocolBufferException.invalidTag() : this.__G('Protocol message contained an invalid tag (zero).');
|
||
|
InvalidProtocolBufferException.invalidWireType() : this.__G('Protocol message tag had invalid wire type.');
|
||
|
InvalidProtocolBufferException.malformedVarint() : this.__G('CodedBufferReader encountered a malformed varint.');
|
||
|
InvalidProtocolBufferException.recursionLimitExceeded() : this.__G('''
|
||
|
Protocol message had too many levels of nesting. May be malicious.
|
||
|
Use CodedBufferReader.setRecursionLimit() to increase the depth limit.
|
||
|
''');
|
||
|
InvalidProtocolBufferException.truncatedMessage() : this.__G('''
|
||
|
While parsing a protocol message, the input ended unexpectedly
|
||
|
in the middle of a field. This could mean either than the
|
||
|
input has been truncated or that an embedded message
|
||
|
misreported its own length.
|
||
|
''');
|
||
|
}
|
||
|
class Extension extends FieldInfo {
|
||
|
final String extendee;
|
||
|
Extension(this.extendee, String name_A, int tagNumber, int fieldType, [dynamic defaultOrMaker, CreateBuilderFunc subBuilder, ValueOfFunc valueOf_D]) : super(name_A, tagNumber, fieldType, defaultOrMaker, subBuilder, valueOf_D);
|
||
|
int get hashCode => extendee.hashCode * 31 + tagNumber;
|
||
|
bool operator==(other) {
|
||
|
if (other is! Extension) return false;
|
||
|
Extension o = other;
|
||
|
return extendee == o.extendee && tagNumber == o.tagNumber;
|
||
|
}
|
||
|
}
|
||
|
class ExtensionRegistry {
|
||
|
final Map<String, Map<int, Extension>> _extensions = <String, Map<int, Extension>>{};
|
||
|
static const ExtensionRegistry EMPTY = const _EmptyExtensionRegistry();
|
||
|
void add(Extension extension) {
|
||
|
var map_A = _extensions.putIfAbsent(extension.extendee, () => new Map<int, Extension>());
|
||
|
map_A[extension.tagNumber] = extension;
|
||
|
}
|
||
|
Extension getExtension(String messageName, int tagNumber) {
|
||
|
var map_A = _extensions[messageName];
|
||
|
if (map_A != null) {
|
||
|
return map_A[tagNumber];
|
||
|
}
|
||
|
return null;
|
||
|
}
|
||
|
}
|
||
|
class _EmptyExtensionRegistry implements ExtensionRegistry {
|
||
|
const _EmptyExtensionRegistry();
|
||
|
get _extensions => null;
|
||
|
void add(Extension extension) {
|
||
|
throw new UnsupportedError('Immutable ExtensionRegistry');
|
||
|
}
|
||
|
Extension getExtension(String messageName, int tagNumber) => null;
|
||
|
}
|
||
|
String _getFieldError(int fieldType, var value_A) {
|
||
|
switch (PbFieldType._baseType(fieldType)) {
|
||
|
case PbFieldType._BOOL_BIT:
|
||
|
if (value_A is! bool) return 'not type bool';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._BYTES_BIT:
|
||
|
if (value_A is! List) return 'not List';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._STRING_BIT:
|
||
|
if (value_A is! String) return 'not type String';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._FLOAT_BIT:
|
||
|
if (value_A is! double) return 'not type double';
|
||
|
if (!_isFloat32(value_A)) return 'out of range for float';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._DOUBLE_BIT:
|
||
|
if (value_A is! double) return 'not type double';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._ENUM_BIT:
|
||
|
if (value_A is! ProtobufEnum) return 'not type ProtobufEnum';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._INT32_BIT: case PbFieldType._SINT32_BIT: case PbFieldType._SFIXED32_BIT:
|
||
|
if (value_A is! int) return 'not type int';
|
||
|
if (!_isSigned32(value_A)) return 'out of range for signed 32-bit int';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._UINT32_BIT: case PbFieldType._FIXED32_BIT:
|
||
|
if (value_A is! int) return 'not type int';
|
||
|
if (!_isUnsigned32(value_A)) return 'out of range for unsigned 32-bit int';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._INT64_BIT: case PbFieldType._SINT64_BIT: case PbFieldType._UINT64_BIT: case PbFieldType._FIXED64_BIT: case PbFieldType._SFIXED64_BIT:
|
||
|
if (value_A is! Int64) return 'not Int64';
|
||
|
return null;
|
||
|
|
||
|
case PbFieldType._GROUP_BIT: case PbFieldType._MESSAGE_BIT:
|
||
|
if (value_A is! GeneratedMessage) return 'not a GeneratedMessage';
|
||
|
return null;
|
||
|
|
||
|
default:
|
||
|
return 'field has unknown type ${fieldType}';
|
||
|
}
|
||
|
}
|
||
|
void checkItemFailed(val, String className) {
|
||
|
throw new ArgumentError('Value (${val}) is not an instance of ${className}');
|
||
|
}
|
||
|
CheckFunc getCheckFunction(int fieldType) {
|
||
|
switch (fieldType & ~0x7) {
|
||
|
case PbFieldType._BOOL_BIT:
|
||
|
return _checkBool;
|
||
|
|
||
|
case PbFieldType._BYTES_BIT:
|
||
|
return _checkBytes;
|
||
|
|
||
|
case PbFieldType._STRING_BIT:
|
||
|
return _checkString;
|
||
|
|
||
|
case PbFieldType._FLOAT_BIT:
|
||
|
return _checkFloat;
|
||
|
|
||
|
case PbFieldType._DOUBLE_BIT:
|
||
|
return _checkDouble;
|
||
|
|
||
|
case PbFieldType._INT32_BIT: case PbFieldType._SINT32_BIT: case PbFieldType._SFIXED32_BIT:
|
||
|
return _checkSigned32;
|
||
|
|
||
|
case PbFieldType._UINT32_BIT: case PbFieldType._FIXED32_BIT:
|
||
|
return _checkUnsigned32;
|
||
|
|
||
|
case PbFieldType._INT64_BIT: case PbFieldType._SINT64_BIT: case PbFieldType._SFIXED64_BIT: case PbFieldType._UINT64_BIT: case PbFieldType._FIXED64_BIT:
|
||
|
return _checkAnyInt64;
|
||
|
|
||
|
case PbFieldType._ENUM_BIT:
|
||
|
return _checkAnyEnum;
|
||
|
|
||
|
case PbFieldType._GROUP_BIT: case PbFieldType._MESSAGE_BIT:
|
||
|
return _checkAnyMessage;
|
||
|
}
|
||
|
throw new ArgumentError('check function not implemented: ${fieldType}');
|
||
|
}
|
||
|
void _checkNotNull(val) {
|
||
|
if (val == null) {
|
||
|
throw new ArgumentError("Can't add a null to a repeated field");
|
||
|
}
|
||
|
}
|
||
|
void _checkBool(bool val) {
|
||
|
if (val is! bool) throw _createFieldTypeError(val, 'a bool');
|
||
|
}
|
||
|
void _checkBytes(List<int> val) {
|
||
|
if (val is! List<int>) throw _createFieldTypeError(val, 'a List<int>');
|
||
|
}
|
||
|
void _checkString(String val) {
|
||
|
if (val is! String) throw _createFieldTypeError(val, 'a String');
|
||
|
}
|
||
|
void _checkFloat(double val) {
|
||
|
_checkDouble(val);
|
||
|
if (!_isFloat32(val)) throw _createFieldRangeError(val, 'a float');
|
||
|
}
|
||
|
void _checkDouble(double val) {
|
||
|
if (val is! double) throw _createFieldTypeError(val, 'a double');
|
||
|
}
|
||
|
void _checkInt(int val) {
|
||
|
if (val is! int) throw _createFieldTypeError(val, 'an int');
|
||
|
}
|
||
|
void _checkSigned32(int val) {
|
||
|
_checkInt(val);
|
||
|
if (!_isSigned32(val)) throw _createFieldRangeError(val, 'a signed int32');
|
||
|
}
|
||
|
void _checkUnsigned32(int val) {
|
||
|
_checkInt(val);
|
||
|
if (!_isUnsigned32(val)) {
|
||
|
throw _createFieldRangeError(val, 'an unsigned int32');
|
||
|
}
|
||
|
}
|
||
|
void _checkAnyInt64(Int64 val) {
|
||
|
if (val is! Int64) throw _createFieldTypeError(val, 'an Int64');
|
||
|
}
|
||
|
_checkAnyEnum(ProtobufEnum val) {
|
||
|
if (val is! ProtobufEnum) throw _createFieldTypeError(val, 'a ProtobufEnum');
|
||
|
}
|
||
|
_checkAnyMessage(GeneratedMessage val) {
|
||
|
if (val is! GeneratedMessage) {
|
||
|
throw _createFieldTypeError(val, 'a GeneratedMessage');
|
||
|
}
|
||
|
}
|
||
|
ArgumentError _createFieldTypeError(val, String wantedType) => new ArgumentError('Value (${val}) is not ${wantedType}');
|
||
|
RangeError _createFieldRangeError(val, String wantedType) => new RangeError('Value (${val}) is not ${wantedType}');
|
||
|
bool _inRange(min_A, value_A, max_A) => (min_A <= value_A) && (value_A <= max_A);
|
||
|
bool _isSigned32(int value_A) => _inRange(-2147483648, value_A, 2147483647);
|
||
|
bool _isUnsigned32(int value_A) => _inRange(0, value_A, 4294967295);
|
||
|
bool _isFloat32(double value_A) => value_A.isNaN || value_A.isInfinite || _inRange(-3.4028234663852886E38, value_A, 3.4028234663852886E38);
|
||
|
class FieldInfo {
|
||
|
final String name;
|
||
|
final int tagNumber;
|
||
|
final int type;
|
||
|
final MakeDefaultFunc makeDefault;
|
||
|
final CreateBuilderFunc subBuilder;
|
||
|
final ValueOfFunc valueOf_D;
|
||
|
final CheckFunc check;
|
||
|
FieldInfo(this.name, this.tagNumber, int type_A, [dynamic defaultOrMaker, this.subBuilder, this.valueOf_D]) : this.type = type_A, this.makeDefault = findMakeDefault(type_A, defaultOrMaker), this.check = null {
|
||
|
assert(!_isGroupOrMessage(type_A) || subBuilder != null);
|
||
|
assert(!_isEnum(type_A) || valueOf_D != null);
|
||
|
}
|
||
|
FieldInfo.repeated(this.name, this.tagNumber, int type_A, CheckFunc check_A, this.subBuilder, [this.valueOf_D]) : this.type = type_A, this.check = check_A, this.makeDefault = (() => new PbList(check: check_A)) {
|
||
|
assert(name != null);
|
||
|
assert(tagNumber != null);
|
||
|
assert(_isRepeated(type_A));
|
||
|
assert(check_A != null);
|
||
|
assert(!_isEnum(type_A) || valueOf_D != null);
|
||
|
}
|
||
|
static MakeDefaultFunc findMakeDefault(int type_A, dynamic defaultOrMaker) {
|
||
|
if (defaultOrMaker == null) return PbFieldType._defaultForType(type_A);
|
||
|
if (defaultOrMaker is MakeDefaultFunc) return defaultOrMaker;
|
||
|
return () => defaultOrMaker;
|
||
|
}
|
||
|
bool get isRepeated => _isRepeated(type);
|
||
|
String toString() => name;
|
||
|
}
|
||
|
bool _isRepeated(int fieldType) => (fieldType & PbFieldType._REPEATED_BIT) != 0;
|
||
|
bool _isRequired(int fieldType) => (fieldType & PbFieldType._REQUIRED_BIT) != 0;
|
||
|
bool _isEnum(int fieldType) => PbFieldType._baseType(fieldType) == PbFieldType._ENUM_BIT;
|
||
|
bool _isGroupOrMessage(int fieldType) => (fieldType & (PbFieldType._GROUP_BIT | PbFieldType._MESSAGE_BIT)) != 0;
|
||
|
class PbFieldType {
|
||
|
static int _baseType(int fieldType) => fieldType & ~(_REQUIRED_BIT | _REPEATED_BIT | _PACKED_BIT);
|
||
|
static MakeDefaultFunc _defaultForType(int type_A) {
|
||
|
switch (type_A) {
|
||
|
case _OPTIONAL_BOOL: case _REQUIRED_BOOL:
|
||
|
return _BOOL_FALSE;
|
||
|
|
||
|
case _OPTIONAL_BYTES: case _REQUIRED_BYTES:
|
||
|
return _BYTES_EMPTY;
|
||
|
|
||
|
case _OPTIONAL_STRING: case _REQUIRED_STRING:
|
||
|
return _STRING_EMPTY;
|
||
|
|
||
|
case _OPTIONAL_FLOAT: case _REQUIRED_FLOAT: case _OPTIONAL_DOUBLE: case _REQUIRED_DOUBLE:
|
||
|
return _DOUBLE_ZERO;
|
||
|
|
||
|
case _OPTIONAL_INT32: case _REQUIRED_INT32: case _OPTIONAL_INT64: case _REQUIRED_INT64: case _OPTIONAL_SINT32: case _REQUIRED_SINT32: case _OPTIONAL_SINT64: case _REQUIRED_SINT64: case _OPTIONAL_UINT32: case _REQUIRED_UINT32: case _OPTIONAL_UINT64: case _REQUIRED_UINT64: case _OPTIONAL_FIXED32: case _REQUIRED_FIXED32: case _OPTIONAL_FIXED64: case _REQUIRED_FIXED64: case _OPTIONAL_SFIXED32: case _REQUIRED_SFIXED32: case _OPTIONAL_SFIXED64: case _REQUIRED_SFIXED64:
|
||
|
return _INT_ZERO;
|
||
|
|
||
|
default:
|
||
|
return null;
|
||
|
}
|
||
|
}
|
||
|
static String _STRING_EMPTY() => '';
|
||
|
static List<int> _BYTES_EMPTY() => new PbList(check: _checkInt);
|
||
|
static bool _BOOL_FALSE() => false;
|
||
|
static int _INT_ZERO() => 0;
|
||
|
static double _DOUBLE_ZERO() => 0.0;
|
||
|
static const int _REQUIRED_BIT = 0x1;
|
||
|
static const int _REPEATED_BIT = 0x2;
|
||
|
static const int _PACKED_BIT = 0x4;
|
||
|
static const int _BOOL_BIT = 0x10;
|
||
|
static const int _BYTES_BIT = 0x20;
|
||
|
static const int _STRING_BIT = 0x40;
|
||
|
static const int _DOUBLE_BIT = 0x80;
|
||
|
static const int _FLOAT_BIT = 0x100;
|
||
|
static const int _ENUM_BIT = 0x200;
|
||
|
static const int _GROUP_BIT = 0x400;
|
||
|
static const int _INT32_BIT = 0x800;
|
||
|
static const int _INT64_BIT = 0x1000;
|
||
|
static const int _SINT32_BIT = 0x2000;
|
||
|
static const int _SINT64_BIT = 0x4000;
|
||
|
static const int _UINT32_BIT = 0x8000;
|
||
|
static const int _UINT64_BIT = 0x10000;
|
||
|
static const int _FIXED32_BIT = 0x20000;
|
||
|
static const int _FIXED64_BIT = 0x40000;
|
||
|
static const int _SFIXED32_BIT = 0x80000;
|
||
|
static const int _SFIXED64_BIT = 0x100000;
|
||
|
static const int _MESSAGE_BIT = 0x200000;
|
||
|
static const int _OPTIONAL_BOOL = _BOOL_BIT;
|
||
|
static const int _OPTIONAL_BYTES = _BYTES_BIT;
|
||
|
static const int _OPTIONAL_STRING = _STRING_BIT;
|
||
|
static const int _OPTIONAL_FLOAT = _FLOAT_BIT;
|
||
|
static const int _OPTIONAL_DOUBLE = _DOUBLE_BIT;
|
||
|
static const int _OPTIONAL_ENUM = _ENUM_BIT;
|
||
|
static const int _OPTIONAL_GROUP = _GROUP_BIT;
|
||
|
static const int _OPTIONAL_INT32 = _INT32_BIT;
|
||
|
static const int _OPTIONAL_INT64 = _INT64_BIT;
|
||
|
static const int _OPTIONAL_SINT32 = _SINT32_BIT;
|
||
|
static const int _OPTIONAL_SINT64 = _SINT64_BIT;
|
||
|
static const int _OPTIONAL_UINT32 = _UINT32_BIT;
|
||
|
static const int _OPTIONAL_UINT64 = _UINT64_BIT;
|
||
|
static const int _OPTIONAL_FIXED32 = _FIXED32_BIT;
|
||
|
static const int _OPTIONAL_FIXED64 = _FIXED64_BIT;
|
||
|
static const int _OPTIONAL_SFIXED32 = _SFIXED32_BIT;
|
||
|
static const int _OPTIONAL_SFIXED64 = _SFIXED64_BIT;
|
||
|
static const int _OPTIONAL_MESSAGE = _MESSAGE_BIT;
|
||
|
static const int _REQUIRED_BOOL = _REQUIRED_BIT | _BOOL_BIT;
|
||
|
static const int _REQUIRED_BYTES = _REQUIRED_BIT | _BYTES_BIT;
|
||
|
static const int _REQUIRED_STRING = _REQUIRED_BIT | _STRING_BIT;
|
||
|
static const int _REQUIRED_FLOAT = _REQUIRED_BIT | _FLOAT_BIT;
|
||
|
static const int _REQUIRED_DOUBLE = _REQUIRED_BIT | _DOUBLE_BIT;
|
||
|
static const int _REQUIRED_INT32 = _REQUIRED_BIT | _INT32_BIT;
|
||
|
static const int _REQUIRED_INT64 = _REQUIRED_BIT | _INT64_BIT;
|
||
|
static const int _REQUIRED_SINT32 = _REQUIRED_BIT | _SINT32_BIT;
|
||
|
static const int _REQUIRED_SINT64 = _REQUIRED_BIT | _SINT64_BIT;
|
||
|
static const int _REQUIRED_UINT32 = _REQUIRED_BIT | _UINT32_BIT;
|
||
|
static const int _REQUIRED_UINT64 = _REQUIRED_BIT | _UINT64_BIT;
|
||
|
static const int _REQUIRED_FIXED32 = _REQUIRED_BIT | _FIXED32_BIT;
|
||
|
static const int _REQUIRED_FIXED64 = _REQUIRED_BIT | _FIXED64_BIT;
|
||
|
static const int _REQUIRED_SFIXED32 = _REQUIRED_BIT | _SFIXED32_BIT;
|
||
|
static const int _REQUIRED_SFIXED64 = _REQUIRED_BIT | _SFIXED64_BIT;
|
||
|
static const int _REPEATED_BOOL = _REPEATED_BIT | _BOOL_BIT;
|
||
|
static const int _REPEATED_BYTES = _REPEATED_BIT | _BYTES_BIT;
|
||
|
static const int _REPEATED_STRING = _REPEATED_BIT | _STRING_BIT;
|
||
|
static const int _REPEATED_FLOAT = _REPEATED_BIT | _FLOAT_BIT;
|
||
|
static const int _REPEATED_DOUBLE = _REPEATED_BIT | _DOUBLE_BIT;
|
||
|
static const int _REPEATED_ENUM = _REPEATED_BIT | _ENUM_BIT;
|
||
|
static const int _REPEATED_GROUP = _REPEATED_BIT | _GROUP_BIT;
|
||
|
static const int _REPEATED_INT32 = _REPEATED_BIT | _INT32_BIT;
|
||
|
static const int _REPEATED_INT64 = _REPEATED_BIT | _INT64_BIT;
|
||
|
static const int _REPEATED_SINT32 = _REPEATED_BIT | _SINT32_BIT;
|
||
|
static const int _REPEATED_SINT64 = _REPEATED_BIT | _SINT64_BIT;
|
||
|
static const int _REPEATED_UINT32 = _REPEATED_BIT | _UINT32_BIT;
|
||
|
static const int _REPEATED_UINT64 = _REPEATED_BIT | _UINT64_BIT;
|
||
|
static const int _REPEATED_FIXED32 = _REPEATED_BIT | _FIXED32_BIT;
|
||
|
static const int _REPEATED_FIXED64 = _REPEATED_BIT | _FIXED64_BIT;
|
||
|
static const int _REPEATED_SFIXED32 = _REPEATED_BIT | _SFIXED32_BIT;
|
||
|
static const int _REPEATED_SFIXED64 = _REPEATED_BIT | _SFIXED64_BIT;
|
||
|
static const int _REPEATED_MESSAGE = _REPEATED_BIT | _MESSAGE_BIT;
|
||
|
static const int _PACKED_INT32 = _REPEATED_BIT | _PACKED_BIT | _INT32_BIT;
|
||
|
static const int OB = _OPTIONAL_BOOL;
|
||
|
static const int OY = _OPTIONAL_BYTES;
|
||
|
static const int OS_A = _OPTIONAL_STRING;
|
||
|
static const int OD = _OPTIONAL_DOUBLE;
|
||
|
static const int OE = _OPTIONAL_ENUM;
|
||
|
static const int O3 = _OPTIONAL_INT32;
|
||
|
static const int O6 = _OPTIONAL_INT64;
|
||
|
static const int OU6 = _OPTIONAL_UINT64;
|
||
|
static const int OM = _OPTIONAL_MESSAGE;
|
||
|
static const int QB = _REQUIRED_BOOL;
|
||
|
static const int QS = _REQUIRED_STRING;
|
||
|
static const int PS = _REPEATED_STRING;
|
||
|
static const int P3 = _REPEATED_INT32;
|
||
|
static const int PM = _REPEATED_MESSAGE;
|
||
|
static const int K3 = _PACKED_INT32;
|
||
|
}
|
||
|
typedef GeneratedMessage CreateBuilderFunc();
|
||
|
typedef Object MakeDefaultFunc();
|
||
|
typedef ProtobufEnum ValueOfFunc(int _0);
|
||
|
abstract class GeneratedMessage {
|
||
|
static const int OS = PbFieldType._OPTIONAL_STRING;
|
||
|
static Int64 MAX_JSON_INT = new Int64.fromInts(0x200000, 0);
|
||
|
static Int64 MIN_JSON_INT = -MAX_JSON_INT;
|
||
|
final Map<int, dynamic> _fieldValues = new Map<int, dynamic>();
|
||
|
final Map<int, Extension> _extensions = new Map<int, Extension>();
|
||
|
final UnknownFieldSet unknownFields = new UnknownFieldSet();
|
||
|
GeneratedMessage() {
|
||
|
if (eventPlugin != null) eventPlugin.attach(this);
|
||
|
}
|
||
|
GeneratedMessage.fromBuffer_A(List<int> input_A, ExtensionRegistry extensionRegistry) {
|
||
|
if (eventPlugin != null) eventPlugin.attach(this);
|
||
|
mergeFromBuffer(input_A, extensionRegistry);
|
||
|
}
|
||
|
EventPlugin get eventPlugin => null;
|
||
|
bool get _hasObservers => eventPlugin != null && eventPlugin.hasObservers;
|
||
|
bool hasRequiredFields() => info_.hasRequiredFields;
|
||
|
bool isInitialized() {
|
||
|
if (!info_.hasRequiredFields) {
|
||
|
return true;
|
||
|
}
|
||
|
return info_.isInitialized(_fieldValues) && extensionsAreInitialized();
|
||
|
}
|
||
|
void _findInvalidFields(List<String> invalidFields, String prefix) {
|
||
|
info_._findInvalidFields(_fieldValues, invalidFields, prefix);
|
||
|
}
|
||
|
void clear() {
|
||
|
unknownFields.clear();
|
||
|
if (_hasObservers) {
|
||
|
for (int key_A in _fieldValues.keys) {
|
||
|
var fi = _ensureFieldInfo(key_A);
|
||
|
eventPlugin.beforeClearField(fi);
|
||
|
}
|
||
|
}
|
||
|
_fieldValues.clear();
|
||
|
}
|
||
|
bool operator==(other) {
|
||
|
if (other is! GeneratedMessage) return false;
|
||
|
GeneratedMessage o = other;
|
||
|
if (o.info_ != info_) return false;
|
||
|
if (!_areMapsEqual(o._fieldValues, _fieldValues)) return false;
|
||
|
if (o.unknownFields != unknownFields) return false;
|
||
|
return true;
|
||
|
}
|
||
|
int get hashCode {
|
||
|
int hash;
|
||
|
void hashEnumList(PbList enums) {
|
||
|
enums.forEach((ProtobufEnum enm) {
|
||
|
hash = (31 * hash + enm.value) & 0x3fffffff;
|
||
|
});
|
||
|
}
|
||
|
void hashFields() {
|
||
|
for (int tagNumber in sorted(_fieldValues.keys)) {
|
||
|
var value_A = _fieldValues[tagNumber];
|
||
|
if (value_A is List && value_A.isEmpty) {
|
||
|
continue;
|
||
|
}
|
||
|
hash = ((37 * hash) + tagNumber) & 0x3fffffff;
|
||
|
var fi = _ensureFieldInfo(tagNumber);
|
||
|
if (!_isEnum(fi.type)) {
|
||
|
hash = ((53 * hash) + (value_A as Object).hashCode) & 0x3fffffff;
|
||
|
} else if (fi.isRepeated) {
|
||
|
hashEnumList(value_A);
|
||
|
} else {
|
||
|
ProtobufEnum enm = value_A;
|
||
|
hash = ((53 * hash) + enm.value) & 0x3fffffff;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
hash = 41;
|
||
|
hash = ((19 * hash) + info_.hashCode) & 0x3fffffff;
|
||
|
hashFields();
|
||
|
hash = ((29 * hash) + unknownFields.hashCode) & 0x3fffffff;
|
||
|
return hash;
|
||
|
}
|
||
|
String toString() => _toString('');
|
||
|
String _toString(String indent_A) {
|
||
|
StringBuffer s = new StringBuffer();
|
||
|
void renderValue(key_A, value_A) {
|
||
|
if (value_A is GeneratedMessage) {
|
||
|
s.write('${indent_A}${key_A}: {\n');
|
||
|
s.write(value_A._toString('${indent_A} '));
|
||
|
s.write('${indent_A}}\n');
|
||
|
} else {
|
||
|
s.write('${indent_A}${key_A}: ${value_A}\n');
|
||
|
}
|
||
|
}
|
||
|
List<FieldInfo> fields = new List<FieldInfo>.from(info_.fieldInfo.values)
|
||
|
..sort((a, b) => a.tagNumber.compareTo(b.tagNumber));
|
||
|
for (FieldInfo field in fields) {
|
||
|
if (hasField(field.tagNumber)) {
|
||
|
var fieldValue = _fieldValues[field.tagNumber];
|
||
|
if (fieldValue is ByteData) {
|
||
|
final value_A = fieldValue.getUint64(0, Endianness.LITTLE_ENDIAN);
|
||
|
renderValue(field.name, value_A);
|
||
|
} else if (fieldValue is List) {
|
||
|
for (var value_A in fieldValue) {
|
||
|
renderValue(field.name, value_A);
|
||
|
}
|
||
|
} else {
|
||
|
renderValue(field.name, fieldValue);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
s.write(unknownFields.toString());
|
||
|
return s.toString();
|
||
|
}
|
||
|
void check() {
|
||
|
if (!isInitialized()) {
|
||
|
List<String> invalidFields = <String>[];
|
||
|
info_._findInvalidFields(_fieldValues, invalidFields);
|
||
|
String missingFields = (invalidFields
|
||
|
..sort()).join(', ');
|
||
|
throw new StateError('Message missing required fields: ${missingFields}');
|
||
|
}
|
||
|
}
|
||
|
BuilderInfo get info_;
|
||
|
Uint8List writeToBuffer() {
|
||
|
CodedBufferWriter out = new CodedBufferWriter();
|
||
|
writeToCodedBufferWriter(out);
|
||
|
return out.toBuffer();
|
||
|
}
|
||
|
void writeToCodedBufferWriter(CodedBufferWriter output) {
|
||
|
for (int tagNumber in sorted(_fieldValues.keys)) {
|
||
|
var value_A = _fieldValues[tagNumber];
|
||
|
var fi = _ensureFieldInfo(tagNumber);
|
||
|
output.writeField(tagNumber, fi.type, value_A);
|
||
|
}
|
||
|
unknownFields.writeToCodedBufferWriter(output);
|
||
|
}
|
||
|
void mergeFromCodedBufferReader(CodedBufferReader input_A, [ExtensionRegistry extensionRegistry = ExtensionRegistry.EMPTY]) {
|
||
|
void appendToRepeated(tagNumber, value_A) {
|
||
|
List list_A = getField(tagNumber);
|
||
|
list_A.add(value_A);
|
||
|
}
|
||
|
void readPackableToList(int wireType, int tagNumber, Function readToList) {
|
||
|
List list_A = getField(tagNumber);
|
||
|
if (wireType == WIRETYPE_LENGTH_DELIMITED) {
|
||
|
input_A._withLimit(input_A.readInt32(), () {
|
||
|
while (!input_A.isAtEnd()) {
|
||
|
readToList(list_A);
|
||
|
}
|
||
|
});
|
||
|
} else {
|
||
|
readToList(list_A);
|
||
|
}
|
||
|
}
|
||
|
void readPackable(int wireType, int tagNumber, Function readFunc) {
|
||
|
void readToList(List list_A) => list_A.add(readFunc());
|
||
|
readPackableToList(wireType, tagNumber, readToList);
|
||
|
}
|
||
|
while (true) {
|
||
|
int tag = input_A.readTag();
|
||
|
if (tag == 0) {
|
||
|
return;
|
||
|
}
|
||
|
int wireType = tag & 0x7;
|
||
|
int tagNumber = tag >> 3;
|
||
|
FieldInfo fi = info_.fieldInfo[tagNumber];
|
||
|
if (fi == null) {
|
||
|
fi = extensionRegistry.getExtension(info_.messageName, tagNumber);
|
||
|
if (fi != null) {
|
||
|
_addExtensionToMap(fi);
|
||
|
}
|
||
|
}
|
||
|
if (fi == null || !_wireTypeMatches(fi.type, wireType)) {
|
||
|
if (!unknownFields.mergeFieldFromBuffer(tag, input_A)) {
|
||
|
return;
|
||
|
}
|
||
|
continue;
|
||
|
}
|
||
|
int fieldType = fi.type;
|
||
|
fieldType &= ~(PbFieldType._PACKED_BIT | PbFieldType._REQUIRED_BIT);
|
||
|
switch (fieldType) {
|
||
|
case PbFieldType._OPTIONAL_BOOL:
|
||
|
_setField(fi, input_A.readBool());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_BYTES:
|
||
|
_setField(fi, input_A.readBytes());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_STRING:
|
||
|
_setField(fi, input_A.readString());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_FLOAT:
|
||
|
_setField(fi, input_A.readFloat());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_DOUBLE:
|
||
|
_setField(fi, input_A.readDouble());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_ENUM:
|
||
|
int rawValue = input_A.readEnum();
|
||
|
var value_A = _getValueOfFunc(tagNumber, extensionRegistry)(rawValue);
|
||
|
if (value_A == null) {
|
||
|
unknownFields.mergeVarintField(tagNumber, new Int64(rawValue));
|
||
|
} else {
|
||
|
_setField(fi, value_A);
|
||
|
}
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_GROUP:
|
||
|
GeneratedMessage subMessage = _getEmptyMessage(tagNumber, extensionRegistry);
|
||
|
if (_fieldValues.containsKey(tagNumber)) {
|
||
|
subMessage.mergeFromMessage(getField(tagNumber));
|
||
|
}
|
||
|
input_A.readGroup(tagNumber, subMessage, extensionRegistry);
|
||
|
_setField(fi, subMessage);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_INT32:
|
||
|
_setField(fi, input_A.readInt32());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_INT64:
|
||
|
_setField(fi, input_A.readInt64());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_SINT32:
|
||
|
_setField(fi, input_A.readSint32());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_SINT64:
|
||
|
_setField(fi, input_A.readSint64());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_UINT32:
|
||
|
_setField(fi, input_A.readUint32());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_UINT64:
|
||
|
_setField(fi, input_A.readUint64());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_FIXED32:
|
||
|
_setField(fi, input_A.readFixed32());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_FIXED64:
|
||
|
_setField(fi, input_A.readFixed64());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_SFIXED32:
|
||
|
_setField(fi, input_A.readSfixed32());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_SFIXED64:
|
||
|
_setField(fi, input_A.readSfixed64());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._OPTIONAL_MESSAGE:
|
||
|
GeneratedMessage subMessage = _getEmptyMessage(tagNumber, extensionRegistry);
|
||
|
if (_fieldValues.containsKey(tagNumber)) {
|
||
|
subMessage.mergeFromMessage(getField(tagNumber));
|
||
|
}
|
||
|
input_A.readMessage(subMessage, extensionRegistry);
|
||
|
_setField(fi, subMessage);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_BOOL:
|
||
|
readPackable(wireType, tagNumber, input_A.readBool);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_BYTES:
|
||
|
appendToRepeated(tagNumber, input_A.readBytes());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_STRING:
|
||
|
appendToRepeated(tagNumber, input_A.readString());
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_FLOAT:
|
||
|
readPackable(wireType, tagNumber, input_A.readFloat);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_DOUBLE:
|
||
|
readPackable(wireType, tagNumber, input_A.readDouble);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_ENUM:
|
||
|
readPackableToList(wireType, tagNumber, (List list_A) {
|
||
|
int rawValue = input_A.readEnum();
|
||
|
var value_A = _getValueOfFunc(tagNumber, extensionRegistry)(rawValue);
|
||
|
if (value_A == null) {
|
||
|
unknownFields.mergeVarintField(tagNumber, new Int64(rawValue));
|
||
|
} else {
|
||
|
list_A.add(value_A);
|
||
|
}
|
||
|
});
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_GROUP:
|
||
|
GeneratedMessage subMessage = _getEmptyMessage(tagNumber, extensionRegistry);
|
||
|
input_A.readGroup(tagNumber, subMessage, extensionRegistry);
|
||
|
appendToRepeated(tagNumber, subMessage);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_INT32:
|
||
|
readPackable(wireType, tagNumber, input_A.readInt32);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_INT64:
|
||
|
readPackable(wireType, tagNumber, input_A.readInt64);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_SINT32:
|
||
|
readPackable(wireType, tagNumber, input_A.readSint32);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_SINT64:
|
||
|
readPackable(wireType, tagNumber, input_A.readSint64);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_UINT32:
|
||
|
readPackable(wireType, tagNumber, input_A.readUint32);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_UINT64:
|
||
|
readPackable(wireType, tagNumber, input_A.readUint64);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_FIXED32:
|
||
|
readPackable(wireType, tagNumber, input_A.readFixed32);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_FIXED64:
|
||
|
readPackable(wireType, tagNumber, input_A.readFixed64);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_SFIXED32:
|
||
|
readPackable(wireType, tagNumber, input_A.readSfixed32);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_SFIXED64:
|
||
|
readPackable(wireType, tagNumber, input_A.readSfixed64);
|
||
|
break;
|
||
|
|
||
|
case PbFieldType._REPEATED_MESSAGE:
|
||
|
GeneratedMessage subMessage = _getEmptyMessage(tagNumber, extensionRegistry);
|
||
|
input_A.readMessage(subMessage, extensionRegistry);
|
||
|
appendToRepeated(tagNumber, subMessage);
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
throw 'Unknown field type ${fieldType}';
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
void mergeFromBuffer(List<int> input_A, [ExtensionRegistry extensionRegistry = ExtensionRegistry.EMPTY]) {
|
||
|
CodedBufferReader codedInput = new CodedBufferReader(input_A);
|
||
|
mergeFromCodedBufferReader(codedInput, extensionRegistry);
|
||
|
codedInput.checkLastTagWas(0);
|
||
|
}
|
||
|
Map<String, dynamic> writeToJsonMap() {
|
||
|
convertToMap(fieldValue, fieldType) {
|
||
|
int baseType = PbFieldType._baseType(fieldType);
|
||
|
if (_isRepeated(fieldType)) {
|
||
|
return new List.from(fieldValue.map((e) => convertToMap(e, baseType)));
|
||
|
}
|
||
|
switch (baseType) {
|
||
|
case PbFieldType._BOOL_BIT: case PbFieldType._STRING_BIT: case PbFieldType._FLOAT_BIT: case PbFieldType._DOUBLE_BIT: case PbFieldType._INT32_BIT: case PbFieldType._SINT32_BIT: case PbFieldType._UINT32_BIT: case PbFieldType._FIXED32_BIT: case PbFieldType._SFIXED32_BIT:
|
||
|
return fieldValue;
|
||
|
|
||
|
case PbFieldType._BYTES_BIT:
|
||
|
return CryptoUtils.bytesToBase64_A(fieldValue);
|
||
|
|
||
|
case PbFieldType._ENUM_BIT:
|
||
|
return fieldValue.value;
|
||
|
|
||
|
case PbFieldType._INT64_BIT: case PbFieldType._SINT64_BIT: case PbFieldType._UINT64_BIT: case PbFieldType._FIXED64_BIT: case PbFieldType._SFIXED64_BIT:
|
||
|
if (MIN_JSON_INT <= fieldValue && fieldValue <= MAX_JSON_INT) {
|
||
|
return fieldValue.toInt();
|
||
|
}
|
||
|
return fieldValue.toString();
|
||
|
|
||
|
case PbFieldType._GROUP_BIT: case PbFieldType._MESSAGE_BIT:
|
||
|
return fieldValue.writeToJsonMap();
|
||
|
|
||
|
default:
|
||
|
throw 'Unknown type ${fieldType}';
|
||
|
}
|
||
|
}
|
||
|
var result_A = <String, dynamic>{};
|
||
|
for (int tagNumber in sorted(_fieldValues.keys)) {
|
||
|
var value_A = _fieldValues[tagNumber];
|
||
|
if (value_A is List && value_A.isEmpty) {
|
||
|
continue;
|
||
|
}
|
||
|
var fi = _ensureFieldInfo(tagNumber);
|
||
|
result_A['${tagNumber}'] = convertToMap(value_A, fi.type);
|
||
|
}
|
||
|
return result_A;
|
||
|
}
|
||
|
bool extensionsAreInitialized() {
|
||
|
return _extensions.keys.every((int tagNumber) {
|
||
|
return info_._isFieldInitialized(_fieldValues, tagNumber, _extensions[tagNumber].type);
|
||
|
});
|
||
|
}
|
||
|
getExtension(Extension extension) {
|
||
|
_checkExtension(extension);
|
||
|
_addExtensionToMap(extension);
|
||
|
return getField(extension.tagNumber);
|
||
|
}
|
||
|
getField(int tagNumber) {
|
||
|
var value_A = _fieldValues[tagNumber];
|
||
|
if (value_A != null) return value_A;
|
||
|
var fi = _ensureFieldInfo(tagNumber);
|
||
|
if (fi.isRepeated) {
|
||
|
return _getDefaultRepeatedField(tagNumber, fi);
|
||
|
} else {
|
||
|
return fi.makeDefault();
|
||
|
}
|
||
|
}
|
||
|
FieldInfo _ensureFieldInfo(int tagNumber) {
|
||
|
var fi = info_.fieldInfo[tagNumber];
|
||
|
if (fi != null) return fi;
|
||
|
fi = _extensions[tagNumber];
|
||
|
if (fi != null) return fi;
|
||
|
throw new ArgumentError("tag ${tagNumber} not defined in ${info_.messageName}");
|
||
|
}
|
||
|
List _getDefaultRepeatedField(int tagNumber, FieldInfo fi) {
|
||
|
var value_A = createRepeatedField(tagNumber, fi);
|
||
|
_setField(fi, value_A);
|
||
|
return value_A;
|
||
|
}
|
||
|
List createRepeatedField(int tagNumber, FieldInfo fi) {
|
||
|
if (fi.check != null) {
|
||
|
return new PbList(check: fi.check);
|
||
|
} else {
|
||
|
return fi.makeDefault();
|
||
|
}
|
||
|
}
|
||
|
bool hasExtension(Extension extension) {
|
||
|
_checkExtension(extension);
|
||
|
return hasField(extension.tagNumber);
|
||
|
}
|
||
|
bool hasField(int tagNumber) {
|
||
|
if (!_fieldValues.containsKey(tagNumber)) {
|
||
|
return false;
|
||
|
}
|
||
|
var value_A = _fieldValues[tagNumber];
|
||
|
if (value_A is List && value_A.isEmpty) {
|
||
|
return false;
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
void mergeFromMessage(GeneratedMessage other) {
|
||
|
for (int tagNumber in other._fieldValues.keys) {
|
||
|
var fieldValue = other._fieldValues[tagNumber];
|
||
|
var fi = info_.fieldInfo[tagNumber];
|
||
|
if (fi == null) {
|
||
|
fi = other._extensions[tagNumber];
|
||
|
if (fi != null) {
|
||
|
_checkExtension(fi);
|
||
|
_addExtensionToMap(fi);
|
||
|
}
|
||
|
}
|
||
|
var otherType = other._ensureFieldInfo(tagNumber).type;
|
||
|
var cloner = (x_A) => x_A;
|
||
|
if (_isGroupOrMessage(otherType)) {
|
||
|
cloner = (message_A) => message_A.clone();
|
||
|
}
|
||
|
if (fi.isRepeated) {
|
||
|
getField(tagNumber).addAll(new List.from(fieldValue).map(cloner));
|
||
|
} else {
|
||
|
fieldValue = cloner(fieldValue);
|
||
|
_validate_A(tagNumber, fi.type, fieldValue);
|
||
|
_setField(fi, fieldValue);
|
||
|
}
|
||
|
}
|
||
|
mergeUnknownFields(other.unknownFields);
|
||
|
}
|
||
|
void mergeUnknownFields(UnknownFieldSet unknownFieldSet) {
|
||
|
unknownFields.mergeFromUnknownFieldSet(unknownFieldSet);
|
||
|
}
|
||
|
void setField(int tagNumber, value_A) {
|
||
|
if (value_A == null) throw new ArgumentError('value is null');
|
||
|
var fi = _ensureFieldInfo(tagNumber);
|
||
|
if (fi.isRepeated) {
|
||
|
throw new ArgumentError(_generateMessage(tagNumber, value_A, 'repeating field (use get + .add())'));
|
||
|
}
|
||
|
_validate_A(tagNumber, fi.type, value_A);
|
||
|
_setField(fi, value_A);
|
||
|
}
|
||
|
void _setField(FieldInfo fi, value_A) {
|
||
|
assert(fi != null);
|
||
|
if (_hasObservers) {
|
||
|
eventPlugin.beforeSetField(fi, value_A);
|
||
|
}
|
||
|
_fieldValues[fi.tagNumber] = value_A;
|
||
|
}
|
||
|
void _addExtensionToMap(Extension extension) {
|
||
|
_extensions[extension.tagNumber] = extension;
|
||
|
}
|
||
|
void _checkExtension(Extension extension) {
|
||
|
if (extension.extendee != info_.messageName) {
|
||
|
throw new ArgumentError('Extension ${extension} not legal for message ${info_.messageName}');
|
||
|
}
|
||
|
}
|
||
|
GeneratedMessage _getEmptyMessage(int tagNumber, ExtensionRegistry extensionRegistry) {
|
||
|
CreateBuilderFunc subBuilderFunc = info_.subBuilder(tagNumber);
|
||
|
if (subBuilderFunc == null && extensionRegistry != null) {
|
||
|
subBuilderFunc = extensionRegistry.getExtension(info_.messageName, tagNumber).subBuilder;
|
||
|
}
|
||
|
return subBuilderFunc();
|
||
|
}
|
||
|
ValueOfFunc _getValueOfFunc(int tagNumber, ExtensionRegistry extensionRegistry) {
|
||
|
ValueOfFunc valueOfFunc = info_.valueOfFunc(tagNumber);
|
||
|
if (valueOfFunc == null && extensionRegistry != null) {
|
||
|
valueOfFunc = extensionRegistry.getExtension(info_.messageName, tagNumber).valueOf_D;
|
||
|
}
|
||
|
return valueOfFunc;
|
||
|
}
|
||
|
String _generateMessage(int tagNumber, var value_A, String detail) {
|
||
|
String fieldName;
|
||
|
if (_extensions[tagNumber] != null) {
|
||
|
fieldName = _extensions[tagNumber].name;
|
||
|
} else {
|
||
|
fieldName = info_.fieldName(tagNumber);
|
||
|
}
|
||
|
String messageType_A = info_.messageName;
|
||
|
return 'Illegal to set field ${fieldName} (${tagNumber}) of ${messageType_A}' ' to value (${value_A}): ${detail}';
|
||
|
}
|
||
|
void _validate_A(int tagNumber, int fieldType, var value_A) {
|
||
|
var message_A = _getFieldError(fieldType, value_A);
|
||
|
if (message_A != null) {
|
||
|
throw new ArgumentError(_generateMessage(tagNumber, value_A, message_A));
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
typedef dynamic CheckFunc(_0);
|
||
|
class PbList<E_A> extends Object with ListMixin<E_A> implements List<E_A> {
|
||
|
final List<E_A> _wrappedList;
|
||
|
final CheckFunc check;
|
||
|
PbList({this.check: _checkNotNull}) : _wrappedList = <E_A>[] {
|
||
|
assert(check != null);
|
||
|
}
|
||
|
bool operator==(other) => (other is PbList) && _areListsEqual(other, this);
|
||
|
int get hashCode {
|
||
|
int hash = 0;
|
||
|
_wrappedList.forEach((E_A value_A) {
|
||
|
hash = (hash + value_A.hashCode) & 0x3fffffff;
|
||
|
hash = (hash + hash << 10) & 0x3fffffff;
|
||
|
hash = (hash ^ hash >> 6) & 0x3fffffff;
|
||
|
});
|
||
|
hash = (hash + hash << 3) & 0x3fffffff;
|
||
|
hash = (hash ^ hash >> 11) & 0x3fffffff;
|
||
|
hash = (hash + hash << 15) & 0x3fffffff;
|
||
|
return hash;
|
||
|
}
|
||
|
Iterator<E_A> get iterator => _wrappedList.iterator;
|
||
|
E_A operator[](int index_A) => _wrappedList[index_A];
|
||
|
void operator[]=(int index_A, E_A value_A) {
|
||
|
_validate_A(value_A);
|
||
|
_wrappedList[index_A] = value_A;
|
||
|
}
|
||
|
void set length(int newLength) {
|
||
|
if (newLength > length) {
|
||
|
throw new ArgumentError('Extending protobuf lists is not supported');
|
||
|
}
|
||
|
_wrappedList.length = newLength;
|
||
|
}
|
||
|
void add(E_A value_A) {
|
||
|
_validate_A(value_A);
|
||
|
_wrappedList.add(value_A);
|
||
|
}
|
||
|
void addAll(Iterable<E_A> collection) {
|
||
|
collection.forEach(_validate_A);
|
||
|
_wrappedList.addAll(collection);
|
||
|
}
|
||
|
void setRange(int start_A, int end_A, Iterable<E_A> from_A, [int skipCount = 0]) {
|
||
|
from_A.skip(skipCount).take(end_A - start_A).forEach(_validate_A);
|
||
|
_wrappedList.setRange(start_A, end_A, from_A, skipCount);
|
||
|
}
|
||
|
void insert(int index_A, E_A element_A) {
|
||
|
_validate_A(element_A);
|
||
|
_wrappedList.insert(index_A, element_A);
|
||
|
}
|
||
|
void insertAll(int index_A, Iterable<E_A> iterable) {
|
||
|
iterable.forEach(_validate_A);
|
||
|
_wrappedList.insertAll(index_A, iterable);
|
||
|
}
|
||
|
void setAll(int index_A, Iterable<E_A> iterable) {
|
||
|
iterable.forEach(_validate_A);
|
||
|
_wrappedList.setAll(index_A, iterable);
|
||
|
}
|
||
|
int get length => _wrappedList.length;
|
||
|
void _validate_A(E_A val) {
|
||
|
check(val);
|
||
|
if (val is! E_A) {
|
||
|
throw new ArgumentError('Value (${val}) is not of the correct type');
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
class ProtobufEnum {
|
||
|
final int value;
|
||
|
final String name;
|
||
|
const ProtobufEnum(this.value, this.name);
|
||
|
static Map<int, dynamic> initByValue(List<ProtobufEnum> byIndex) {
|
||
|
var byValue = new Map<int, dynamic>();
|
||
|
for (ProtobufEnum v in byIndex) {
|
||
|
byValue[v.value] = v;
|
||
|
}
|
||
|
return byValue;
|
||
|
}
|
||
|
int get hashCode => value;
|
||
|
String toString() => name;
|
||
|
}
|
||
|
final _emptyList = new List.unmodifiable([]);
|
||
|
abstract class ReadonlyMessageMixin {
|
||
|
static final _emptyUnknownFields = new _ReadonlyUnknownFieldSet();
|
||
|
BuilderInfo get info_;
|
||
|
get unknownFields => _emptyUnknownFields;
|
||
|
List _getDefaultRepeatedField(int tagNumber, FieldInfo fi) => _emptyList;
|
||
|
void clear() => _readonly("clear");
|
||
|
void mergeFromBuffer(List<int> input_A, [ExtensionRegistry extensionRegistry = ExtensionRegistry.EMPTY]) => _readonly("mergeFromBuffer");
|
||
|
void mergeFromCodedBufferReader(CodedBufferReader input_A, [ExtensionRegistry extensionRegistry = ExtensionRegistry.EMPTY]) => _readonly("mergeFromCodedBufferReader");
|
||
|
void mergeFromMessage(GeneratedMessage other) => _readonly("mergeFromMessage");
|
||
|
void mergeUnknownFields(UnknownFieldSet unknownFieldSet) => _readonly("mergeUnknownFields");
|
||
|
void setField(int tagNumber, var value_A, [int fieldType = null]) => _readonly("setField");
|
||
|
void _readonly(String methodName) {
|
||
|
String messageType_A = info_.messageName;
|
||
|
throw new UnsupportedError("attempted to call ${methodName} on a read-only message (${messageType_A})");
|
||
|
}
|
||
|
}
|
||
|
class _ReadonlyUnknownFieldSet extends UnknownFieldSet {
|
||
|
void clear() => _readonly("clear");
|
||
|
void mergeField(int number, UnknownFieldSetField field) => _readonly("mergeField");
|
||
|
bool mergeFieldFromBuffer(int tag, CodedBufferReader input_A) {
|
||
|
_readonly("mergeFieldFromBuffer");
|
||
|
return false;
|
||
|
}
|
||
|
void mergeFromCodedBufferReader(CodedBufferReader input_A) => _readonly("mergeFromCodedBufferReader");
|
||
|
void mergeFromUnknownFieldSet(UnknownFieldSet other) => _readonly("mergeFromUnknownFieldSet");
|
||
|
UnknownFieldSetField _getField(int number) {
|
||
|
_readonly("a merge method");
|
||
|
return null;
|
||
|
}
|
||
|
void _readonly(String methodName) {
|
||
|
throw new UnsupportedError("attempted to call ${methodName} on a read-only UnknownFieldSet");
|
||
|
}
|
||
|
}
|
||
|
class UnknownFieldSet {
|
||
|
final Map<int, UnknownFieldSetField> _fields = new Map<int, UnknownFieldSetField>();
|
||
|
UnknownFieldSet();
|
||
|
UnknownFieldSet._clone_A(UnknownFieldSet unknownFieldSet) {
|
||
|
mergeFromUnknownFieldSet(unknownFieldSet);
|
||
|
}
|
||
|
UnknownFieldSet clone() => new UnknownFieldSet._clone_A(this);
|
||
|
Map<int, UnknownFieldSetField> asMap() => new Map.from(_fields);
|
||
|
void clear() {
|
||
|
_fields.clear();
|
||
|
}
|
||
|
UnknownFieldSetField getField(int tagNumber) => _fields[tagNumber];
|
||
|
bool hasField(int tagNumber) => _fields.containsKey(tagNumber);
|
||
|
void mergeField(int number, UnknownFieldSetField field) {
|
||
|
_getField(number)
|
||
|
..varints.addAll(field.varints)
|
||
|
..fixed32s.addAll(field.fixed32s)
|
||
|
..fixed64s.addAll(field.fixed64s)
|
||
|
..lengthDelimited.addAll(field.lengthDelimited)
|
||
|
..groups.addAll(field.groups);
|
||
|
}
|
||
|
bool mergeFieldFromBuffer(int tag, CodedBufferReader input_A) {
|
||
|
int number = getTagFieldNumber(tag);
|
||
|
switch (getTagWireType(tag)) {
|
||
|
case WIRETYPE_VARINT:
|
||
|
mergeVarintField(number, input_A.readInt64());
|
||
|
return true;
|
||
|
|
||
|
case WIRETYPE_FIXED64:
|
||
|
mergeFixed64Field(number, input_A.readFixed64());
|
||
|
return true;
|
||
|
|
||
|
case WIRETYPE_LENGTH_DELIMITED:
|
||
|
mergeLengthDelimitedField(number, input_A.readBytes());
|
||
|
return true;
|
||
|
|
||
|
case WIRETYPE_START_GROUP:
|
||
|
UnknownFieldSet subGroup = input_A.readUnknownFieldSetGroup(number);
|
||
|
mergeGroupField(number, subGroup);
|
||
|
return true;
|
||
|
|
||
|
case WIRETYPE_END_GROUP:
|
||
|
return false;
|
||
|
|
||
|
case WIRETYPE_FIXED32:
|
||
|
mergeFixed32Field(number, input_A.readFixed32());
|
||
|
return true;
|
||
|
|
||
|
default:
|
||
|
throw new InvalidProtocolBufferException.invalidWireType();
|
||
|
}
|
||
|
}
|
||
|
void mergeFromCodedBufferReader(CodedBufferReader input_A) {
|
||
|
while (true) {
|
||
|
int tag = input_A.readTag();
|
||
|
if (tag == 0 || !mergeFieldFromBuffer(tag, input_A)) {
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
void mergeFromUnknownFieldSet(UnknownFieldSet other) {
|
||
|
for (int key_A in other._fields.keys) {
|
||
|
mergeField(key_A, other._fields[key_A]);
|
||
|
}
|
||
|
}
|
||
|
_checkFieldNumber(int number) {
|
||
|
if (number == 0) {
|
||
|
throw new ArgumentError('Zero is not a valid field number.');
|
||
|
}
|
||
|
}
|
||
|
void mergeFixed32Field(int number, int value_A) {
|
||
|
_getField(number).addFixed32(value_A);
|
||
|
}
|
||
|
void mergeFixed64Field(int number, Int64 value_A) {
|
||
|
_getField(number).addFixed64(value_A);
|
||
|
}
|
||
|
void mergeGroupField(int number, UnknownFieldSet value_A) {
|
||
|
_getField(number).addGroup(value_A);
|
||
|
}
|
||
|
void mergeLengthDelimitedField(int number, List<int> value_A) {
|
||
|
_getField(number).addLengthDelimited(value_A);
|
||
|
}
|
||
|
void mergeVarintField(int number, Int64 value_A) {
|
||
|
_getField(number).addVarint(value_A);
|
||
|
}
|
||
|
UnknownFieldSetField _getField(int number) {
|
||
|
_checkFieldNumber(number);
|
||
|
return _fields.putIfAbsent(number, () => new UnknownFieldSetField());
|
||
|
}
|
||
|
bool operator==(other) {
|
||
|
if (other is! UnknownFieldSet) return false;
|
||
|
UnknownFieldSet o = other;
|
||
|
return _areMapsEqual(o._fields, _fields);
|
||
|
}
|
||
|
int get hashCode {
|
||
|
int hash = 0;
|
||
|
_fields.forEach((int number, Object value_A) {
|
||
|
hash = ((37 * hash) + number) & 0x3fffffff;
|
||
|
hash = ((53 * hash) + value_A.hashCode) & 0x3fffffff;
|
||
|
});
|
||
|
return hash;
|
||
|
}
|
||
|
String toString() => _toString('');
|
||
|
String _toString(String indent_A) {
|
||
|
var stringBuffer = new StringBuffer();
|
||
|
for (int tag in sorted(_fields.keys)) {
|
||
|
var field = _fields[tag];
|
||
|
for (var value_A in field.values) {
|
||
|
if (value_A is UnknownFieldSet) {
|
||
|
stringBuffer
|
||
|
..write('${indent_A}${tag}: {\n')
|
||
|
..write(value_A._toString('${indent_A} '))
|
||
|
..write('${indent_A}}\n');
|
||
|
} else {
|
||
|
if (value_A is ByteData) {
|
||
|
value_A = value_A.getUint64(0, Endianness.LITTLE_ENDIAN);
|
||
|
}
|
||
|
stringBuffer.write('${indent_A}${tag}: ${value_A}\n');
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return stringBuffer.toString();
|
||
|
}
|
||
|
void writeToCodedBufferWriter(CodedBufferWriter output) {
|
||
|
for (int key_A in _fields.keys) {
|
||
|
_fields[key_A].writeTo(key_A, output);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
class UnknownFieldSetField {
|
||
|
final List<List<int>> lengthDelimited = <List<int>>[];
|
||
|
final List<Int64> varints = <Int64>[];
|
||
|
final List<int> fixed32s = <int>[];
|
||
|
final List<Int64> fixed64s = <Int64>[];
|
||
|
final List<UnknownFieldSet> groups = <UnknownFieldSet>[];
|
||
|
bool operator==(other) {
|
||
|
if (other is! UnknownFieldSetField) return false;
|
||
|
UnknownFieldSetField o = other;
|
||
|
if (lengthDelimited.length != o.lengthDelimited.length) return false;
|
||
|
for (int i = 0; i < lengthDelimited.length; i++) {
|
||
|
if (!_areListsEqual(o.lengthDelimited[i], lengthDelimited[i])) {
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
if (!_areListsEqual(o.varints, varints)) return false;
|
||
|
if (!_areListsEqual(o.fixed32s, fixed32s)) return false;
|
||
|
if (!_areListsEqual(o.fixed64s, fixed64s)) return false;
|
||
|
if (!_areListsEqual(o.groups, groups)) return false;
|
||
|
return true;
|
||
|
}
|
||
|
int get hashCode {
|
||
|
int hash = 0;
|
||
|
lengthDelimited.forEach((List<int> value_A) {
|
||
|
for (int i = 0; i < value_A.length; i++) {
|
||
|
hash = (hash + value_A[i]) & 0x3fffffff;
|
||
|
hash = (hash + hash << 10) & 0x3fffffff;
|
||
|
hash = (hash ^ hash >> 6) & 0x3fffffff;
|
||
|
}
|
||
|
hash = (hash + hash << 3) & 0x3fffffff;
|
||
|
hash = (hash ^ hash >> 11) & 0x3fffffff;
|
||
|
hash = (hash + hash << 15) & 0x3fffffff;
|
||
|
});
|
||
|
varints.forEach((Object value_A) => hash = (hash + 7 * value_A.hashCode) & 0x3fffffff);
|
||
|
fixed32s.forEach((Object value_A) => hash = (hash + 37 * value_A.hashCode) & 0x3fffffff);
|
||
|
fixed64s.forEach((Object value_A) => hash = (hash + 53 * value_A.hashCode) & 0x3fffffff);
|
||
|
groups.forEach((Object value_A) => hash = (hash + value_A.hashCode) & 0x3fffffff);
|
||
|
return hash;
|
||
|
}
|
||
|
List get values => []
|
||
|
..addAll(lengthDelimited)
|
||
|
..addAll(varints)
|
||
|
..addAll(fixed32s)
|
||
|
..addAll(fixed64s)
|
||
|
..addAll(groups);
|
||
|
void writeTo(int fieldNumber, CodedBufferWriter output) {
|
||
|
write_A(type_A, value_A) {
|
||
|
output.writeField(fieldNumber, type_A, value_A);
|
||
|
}
|
||
|
write_A(PbFieldType._REPEATED_UINT64, varints);
|
||
|
write_A(PbFieldType._REPEATED_FIXED32, fixed32s);
|
||
|
write_A(PbFieldType._REPEATED_FIXED64, fixed64s);
|
||
|
write_A(PbFieldType._REPEATED_BYTES, lengthDelimited);
|
||
|
write_A(PbFieldType._REPEATED_GROUP, groups);
|
||
|
}
|
||
|
void addGroup(UnknownFieldSet value_A) {
|
||
|
groups.add(value_A);
|
||
|
}
|
||
|
void addLengthDelimited(List<int> value_A) {
|
||
|
lengthDelimited.add(value_A);
|
||
|
}
|
||
|
void addFixed32(int value_A) {
|
||
|
fixed32s.add(value_A);
|
||
|
}
|
||
|
void addFixed64(Int64 value_A) {
|
||
|
fixed64s.add(value_A);
|
||
|
}
|
||
|
void addVarint(Int64 value_A) {
|
||
|
varints.add(value_A);
|
||
|
}
|
||
|
bool hasRequiredFields() => false;
|
||
|
bool isInitialized() => true;
|
||
|
int get length => values.length;
|
||
|
}
|
||
|
bool _deepEquals(lhs, rhs) {
|
||
|
if ((lhs is List) && (rhs is List)) return _areListsEqual(lhs, rhs);
|
||
|
if ((lhs is Map) && (rhs is Map)) return _areMapsEqual(lhs, rhs);
|
||
|
if ((lhs is ByteData) && (rhs is ByteData)) {
|
||
|
return _areByteDataEqual(lhs, rhs);
|
||
|
}
|
||
|
return lhs == rhs;
|
||
|
}
|
||
|
bool _areListsEqual(List lhs, List rhs) {
|
||
|
range_A(i) => new Iterable.generate(i, (i) => i);
|
||
|
if (lhs.length != rhs.length) return false;
|
||
|
return range_A(lhs.length).every((i) => _deepEquals(lhs[i], rhs[i]));
|
||
|
}
|
||
|
bool _areMapsEqual(Map lhs, Map rhs) {
|
||
|
if (lhs.length != rhs.length) return false;
|
||
|
return lhs.keys.every((key_A) => _deepEquals(lhs[key_A], rhs[key_A]));
|
||
|
}
|
||
|
bool _areByteDataEqual(ByteData lhs, ByteData rhs) {
|
||
|
asBytes(d) => new Uint8List.view(d.buffer, d.offsetInBytes, d.lengthInBytes);
|
||
|
return _areListsEqual(asBytes(lhs), asBytes(rhs));
|
||
|
}
|
||
|
List sorted(Iterable list_A) => new List.from(list_A)
|
||
|
..sort();
|
||
|
const int TAG_TYPE_BITS = 3;
|
||
|
const int TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1;
|
||
|
const int WIRETYPE_VARINT = 0;
|
||
|
const int WIRETYPE_FIXED64 = 1;
|
||
|
const int WIRETYPE_LENGTH_DELIMITED = 2;
|
||
|
const int WIRETYPE_START_GROUP = 3;
|
||
|
const int WIRETYPE_END_GROUP = 4;
|
||
|
const int WIRETYPE_FIXED32 = 5;
|
||
|
int getTagFieldNumber(int tag) => (tag & 0x7fffffff) >> TAG_TYPE_BITS;
|
||
|
int getTagWireType(int tag) => tag & TAG_TYPE_MASK;
|
||
|
int makeTag(int fieldNumber, int tag) => (fieldNumber << TAG_TYPE_BITS) | tag;
|
||
|
bool _wireTypeMatches(int fieldType, int wireType) {
|
||
|
switch (PbFieldType._baseType(fieldType)) {
|
||
|
case PbFieldType._BOOL_BIT: case PbFieldType._ENUM_BIT: case PbFieldType._INT32_BIT: case PbFieldType._INT64_BIT: case PbFieldType._SINT32_BIT: case PbFieldType._SINT64_BIT: case PbFieldType._UINT32_BIT: case PbFieldType._UINT64_BIT:
|
||
|
return wireType == WIRETYPE_VARINT || wireType == WIRETYPE_LENGTH_DELIMITED;
|
||
|
|
||
|
case PbFieldType._FLOAT_BIT: case PbFieldType._FIXED32_BIT: case PbFieldType._SFIXED32_BIT:
|
||
|
return wireType == WIRETYPE_FIXED32 || wireType == WIRETYPE_LENGTH_DELIMITED;
|
||
|
|
||
|
case PbFieldType._DOUBLE_BIT: case PbFieldType._FIXED64_BIT: case PbFieldType._SFIXED64_BIT:
|
||
|
return wireType == WIRETYPE_FIXED64 || wireType == WIRETYPE_LENGTH_DELIMITED;
|
||
|
|
||
|
case PbFieldType._BYTES_BIT: case PbFieldType._STRING_BIT: case PbFieldType._MESSAGE_BIT:
|
||
|
return wireType == WIRETYPE_LENGTH_DELIMITED;
|
||
|
|
||
|
case PbFieldType._GROUP_BIT:
|
||
|
return wireType == WIRETYPE_START_GROUP;
|
||
|
|
||
|
default:
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
const List<String> GeneratedMessage_reservedNames = const['hashCode', 'noSuchMethod', 'runtimeType', 'toString', 'fromBuffer', 'fromJson', 'hasRequiredFields', 'isInitialized', 'clear', 'getTagNumber', 'check', 'writeToBuffer', 'writeToCodedBufferWriter', 'mergeFromCodedBufferReader', 'mergeFromBuffer', 'writeToJson', 'mergeFromJson', 'writeToJsonMap', 'mergeFromJsonMap', 'addExtension', 'getExtension', 'setExtension', 'hasExtension', 'clearExtension', 'getField', 'getFieldOrNull', 'getDefaultForField', 'setField', 'hasField', 'clearField', 'extensionsAreInitialized', 'mergeFromMessage', 'mergeUnknownFields', '==', 'info_', 'GeneratedMessage', 'Object', 'eventPlugin', 'createRepeatedField'];
|
||
|
PbMixin findMixin(String name_A) {
|
||
|
for (var m in _exportedMixins) {
|
||
|
if (m.name == name_A) {
|
||
|
return m;
|
||
|
}
|
||
|
}
|
||
|
return null;
|
||
|
}
|
||
|
class PbMixin {
|
||
|
final String name;
|
||
|
final String importFrom;
|
||
|
final PbMixin parent;
|
||
|
final List<String> reservedNames;
|
||
|
const PbMixin._raw(this.name, {this.importFrom, this.parent, this.reservedNames});
|
||
|
Iterable<PbMixin> findMixinsToApply() {
|
||
|
var result_A = [this];
|
||
|
for (var p = parent; p != null; p = p.parent) {
|
||
|
result_A.add(p);
|
||
|
}
|
||
|
return result_A.reversed;
|
||
|
}
|
||
|
Iterable<String> findReservedNames() {
|
||
|
var names = new Set<String>();
|
||
|
for (var m = this; m != null; m = m.parent) {
|
||
|
names.add(m.name);
|
||
|
if (m.reservedNames != null) {
|
||
|
names.addAll(m.reservedNames);
|
||
|
}
|
||
|
}
|
||
|
return names;
|
||
|
}
|
||
|
}
|
||
|
const _exportedMixins = const[_pbMapMixin, _pbEventMixin];
|
||
|
const _pbMapMixin = const PbMixin._raw("PbMapMixin", importFrom: "package:protobuf/src/protobuf/mixins/map_mixin.dart", parent: _mapMixin);
|
||
|
const _pbEventMixin = const PbMixin._raw("PbEventMixin", importFrom: "package:protobuf/src/protobuf/mixins/event_mixin.dart", reservedNames: const["changes", "deliverChanges"]);
|
||
|
const List<String> _reservedNamesForMap = const['[]', '[]=', 'addAll', 'containsKey', 'containsValue', 'forEach', 'putIfAbsent', 'remove', 'isEmpty', 'isNotEmpty', 'keys', 'length', 'values'];
|
||
|
const _mapMixin = const PbMixin._raw("MapMixin", importFrom: "dart:collection", reservedNames: _reservedNamesForMap);
|
||
|
class BaseType {
|
||
|
final FieldDescriptorProto_Type descriptor;
|
||
|
final String unprefixed;
|
||
|
final String typeConstantSuffix;
|
||
|
final ProtobufContainer generator;
|
||
|
const BaseType._raw_A(this.descriptor, this.typeConstantSuffix, this.unprefixed, this.generator);
|
||
|
bool get isGroup => descriptor == FieldDescriptorProto_Type.TYPE_GROUP;
|
||
|
bool get isMessage => descriptor == FieldDescriptorProto_Type.TYPE_MESSAGE;
|
||
|
bool get isEnum => descriptor == FieldDescriptorProto_Type.TYPE_ENUM;
|
||
|
String get package => generator == null ? "" : generator.package;
|
||
|
String get prefixed {
|
||
|
if (generator == null || generator.packageImportPrefix.isEmpty) {
|
||
|
return unprefixed;
|
||
|
}
|
||
|
return generator.packageImportPrefix + "." + unprefixed;
|
||
|
}
|
||
|
String getDartType(String package_A) => (package_A == this.package) ? unprefixed : prefixed;
|
||
|
String getRepeatedDartType(String package_A) => "List<${getDartType(package_A)}>";
|
||
|
factory BaseType(FieldDescriptorProto field, GenerationContext ctx) {
|
||
|
String constSuffix;
|
||
|
switch (field.type) {
|
||
|
case FieldDescriptorProto_Type.TYPE_BOOL:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_BOOL, "B", "bool", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_FLOAT:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_FLOAT, "F", "double", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_DOUBLE:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_DOUBLE, "D", "double", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_INT32:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_INT32, "3", "int", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_UINT32:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_UINT32, "U3", "int", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_SINT32:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_SINT32, "S3", "int", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_FIXED32:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_FIXED32, "F3", "int", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_SFIXED32:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_SFIXED32, "SF3", "int", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_INT64:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_INT64, "6", "Int64", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_UINT64:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_UINT64, "U6", "Int64", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_SINT64:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_SINT64, "S6", "Int64", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_FIXED64:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_FIXED64, "F6", "Int64", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_SFIXED64:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_SFIXED64, "SF6", "Int64", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_STRING:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_STRING, "S", "String", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_BYTES:
|
||
|
return const BaseType._raw_A(FieldDescriptorProto_Type.TYPE_BYTES, "Y", "List<int>", null);
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_GROUP:
|
||
|
constSuffix = "G";
|
||
|
break;
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_MESSAGE:
|
||
|
constSuffix = "M";
|
||
|
break;
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_ENUM:
|
||
|
constSuffix = "E";
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
throw new ArgumentError("unimplemented type: ${field.type.name}");
|
||
|
}
|
||
|
ProtobufContainer generator_A = ctx.getFieldType(field.typeName);
|
||
|
if (generator_A == null) {
|
||
|
throw 'FAILURE: Unknown type reference ${field.typeName}';
|
||
|
}
|
||
|
return new BaseType._raw_A(field.type, constSuffix, generator_A.classname, generator_A);
|
||
|
}
|
||
|
}
|
||
|
class ClientApiGenerator {
|
||
|
final ServiceGenerator service;
|
||
|
ClientApiGenerator(this.service);
|
||
|
String get _clientType => 'RpcClient';
|
||
|
void generate_A(IndentingWriter out) {
|
||
|
var className = service._descriptor.name;
|
||
|
out.addBlock('class ${className}Api {', '}', () {
|
||
|
out.println('${_clientType} _client;');
|
||
|
out.println('${className}Api(this._client);');
|
||
|
out.println();
|
||
|
for (MethodDescriptorProto m in service._descriptor.method) {
|
||
|
generateMethod(out, m);
|
||
|
}
|
||
|
});
|
||
|
out.println();
|
||
|
}
|
||
|
void generateMethod(IndentingWriter out, MethodDescriptorProto m) {
|
||
|
var methodName = service._methodName(m.name);
|
||
|
var inputType = service._getDartClassName(m.inputType);
|
||
|
var outputType = service._getDartClassName(m.outputType);
|
||
|
out.addBlock('Future<${outputType}> ${methodName}(' 'ClientContext ctx, ${inputType} request) {', '}', () {
|
||
|
out.println('var emptyResponse = new ${outputType}();');
|
||
|
out.println('return _client.invoke(ctx, \'${service._descriptor.name}\', ' '\'${m.name}\', request, emptyResponse);');
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
abstract class ProtobufContainer {
|
||
|
String get package;
|
||
|
String get classname;
|
||
|
String get fqname;
|
||
|
String get packageImportPrefix => package.replaceAll('.', r'$');
|
||
|
FileGenerator get fileGen;
|
||
|
}
|
||
|
class CodeGenerator extends ProtobufContainer {
|
||
|
final Stream<List<int>> _streamIn;
|
||
|
final IOSink _streamOut;
|
||
|
CodeGenerator(this._streamIn, this._streamOut);
|
||
|
void generate_A({Map<String, SingleOptionParser> optionParsers, OutputConfiguration config}) {
|
||
|
if (config == null) {
|
||
|
config = new DefaultOutputConfiguration();
|
||
|
}
|
||
|
var extensions_A = new ExtensionRegistry();
|
||
|
Dart_options.registerAllExtensions(extensions_A);
|
||
|
_streamIn.fold(new BytesBuilder(), (builder_A, data_A) => builder_A
|
||
|
..add(data_A)).then((builder_A) => builder_A.takeBytes()).then((List<int> bytes) {
|
||
|
var request = new CodeGeneratorRequest.fromBuffer(bytes, extensions_A);
|
||
|
var response_A = new CodeGeneratorResponse();
|
||
|
var options = parseGenerationOptions(request, response_A, optionParsers);
|
||
|
if (options == null) {
|
||
|
_streamOut.add(response_A.writeToBuffer());
|
||
|
return;
|
||
|
}
|
||
|
List<FileGenerator> generators = <FileGenerator>[];
|
||
|
for (FileDescriptorProto file_A in request.protoFile) {
|
||
|
generators.add(new FileGenerator(file_A));
|
||
|
}
|
||
|
link(options, generators);
|
||
|
for (var gen in generators) {
|
||
|
var name_A = gen._fileDescriptor.name;
|
||
|
if (request.fileToGenerate.contains(name_A)) {
|
||
|
response_A.file_A.add(gen.generateResponse(config));
|
||
|
}
|
||
|
}
|
||
|
_streamOut.add(response_A.writeToBuffer());
|
||
|
});
|
||
|
}
|
||
|
String get package => '';
|
||
|
String get classname => null;
|
||
|
String get fqname => '';
|
||
|
get fileGen => null;
|
||
|
}
|
||
|
class EnumAlias {
|
||
|
final EnumValueDescriptorProto value;
|
||
|
final EnumValueDescriptorProto canonicalValue;
|
||
|
EnumAlias(this.value, this.canonicalValue);
|
||
|
}
|
||
|
class EnumGenerator extends ProtobufContainer {
|
||
|
final ProtobufContainer _parent;
|
||
|
final String classname;
|
||
|
final String fqname;
|
||
|
final EnumDescriptorProto _descriptor;
|
||
|
final List<EnumValueDescriptorProto> _canonicalValues = <EnumValueDescriptorProto>[];
|
||
|
final List<EnumAlias> _aliases = <EnumAlias>[];
|
||
|
EnumGenerator(EnumDescriptorProto descriptor, ProtobufContainer parent_A) : _parent = parent_A, classname = (parent_A == null || parent_A is FileGenerator) ? descriptor.name : '${parent_A.classname}_${descriptor.name}', fqname = (parent_A == null || parent_A.fqname == null) ? descriptor.name : (parent_A.fqname == '.' ? '.${descriptor.name}' : '${parent_A.fqname}.${descriptor.name}'), _descriptor = descriptor {
|
||
|
for (EnumValueDescriptorProto value_A in descriptor.value) {
|
||
|
EnumValueDescriptorProto canonicalValue = descriptor.value.firstWhere((v) => v.number == value_A.number);
|
||
|
if (value_A == canonicalValue) {
|
||
|
_canonicalValues.add(value_A);
|
||
|
} else {
|
||
|
_aliases.add(new EnumAlias(value_A, canonicalValue));
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
String get package => _parent.package;
|
||
|
FileGenerator get fileGen => _parent.fileGen;
|
||
|
void register(GenerationContext ctx) {
|
||
|
ctx.registerFieldType(fqname, this);
|
||
|
}
|
||
|
String getJsonConstant(FileGenerator usage) {
|
||
|
var name_A = "${classname}\$json";
|
||
|
if (usage.package == fileGen.package || packageImportPrefix.isEmpty) {
|
||
|
return name_A;
|
||
|
}
|
||
|
return "${packageImportPrefix}.${name_A}";
|
||
|
}
|
||
|
void generate_A(IndentingWriter out) {
|
||
|
out.addBlock('class ${classname} extends ProtobufEnum {', '}\n', () {
|
||
|
for (EnumValueDescriptorProto val in _canonicalValues) {
|
||
|
out.println('static const ${classname} ${val.name} = ' "const ${classname}._(${val.number}, '${val.name}');");
|
||
|
}
|
||
|
if (!_aliases.isEmpty) {
|
||
|
out.println();
|
||
|
for (EnumAlias alias in _aliases) {
|
||
|
out.println('static const ${classname} ${alias.value.name} =' ' ${alias.canonicalValue.name};');
|
||
|
}
|
||
|
}
|
||
|
out.println();
|
||
|
out.println('static const List<${classname}> values =' ' const <${classname}> [');
|
||
|
for (EnumValueDescriptorProto val in _canonicalValues) {
|
||
|
out.println(' ${val.name},');
|
||
|
}
|
||
|
out.println('];');
|
||
|
out.println();
|
||
|
out.println('static final Map<int, ${classname}> _byValue =' ' ProtobufEnum.initByValue(values);');
|
||
|
out.println('static ${classname} valueOf(int value) =>' ' _byValue[value];');
|
||
|
out.addBlock('static void ${checkItem}(${classname} v) {', '}', () {
|
||
|
out.println('if (v is !${classname})' " checkItemFailed(v, '${classname}');");
|
||
|
});
|
||
|
out.println();
|
||
|
out.println('const ${classname}._(int v, String n) ' ': super(v, n);');
|
||
|
});
|
||
|
}
|
||
|
void generateConstants(IndentingWriter out) {
|
||
|
var name_A = getJsonConstant(fileGen);
|
||
|
var json = _descriptor.writeToJsonMap();
|
||
|
out.print("const ${name_A} = ");
|
||
|
writeJsonConst(out, json);
|
||
|
out.println(";");
|
||
|
out.println();
|
||
|
}
|
||
|
}
|
||
|
class ExtensionGenerator {
|
||
|
final FieldDescriptorProto _descriptor;
|
||
|
final ProtobufContainer _parent;
|
||
|
ProtobufField _field;
|
||
|
String _extendedClassName = "";
|
||
|
ExtensionGenerator(this._descriptor, this._parent);
|
||
|
void resolve(GenerationContext ctx) {
|
||
|
_field = new ProtobufField(_descriptor, _parent, ctx);
|
||
|
ProtobufContainer extendedType = ctx.getFieldType(_descriptor.extendee);
|
||
|
if (extendedType != null) {
|
||
|
_extendedClassName = extendedType.classname;
|
||
|
}
|
||
|
}
|
||
|
String get package => _parent.package;
|
||
|
FileGenerator get fileGen => _parent.fileGen;
|
||
|
String get name {
|
||
|
if (_field == null) throw new StateError("resolve not called");
|
||
|
String name_A = _field.dartFieldName;
|
||
|
return _parent is MessageGenerator ? '${_parent.classname}.${name_A}' : name_A;
|
||
|
}
|
||
|
bool get needsFixnumImport {
|
||
|
if (_field == null) throw new StateError("resolve not called");
|
||
|
return _field.needsFixnumImport;
|
||
|
}
|
||
|
void addImportsTo(Set<FileGenerator> imports) {
|
||
|
if (_field == null) throw new StateError("resolve not called");
|
||
|
var typeGen = _field.baseType.generator;
|
||
|
if (typeGen != null && typeGen.fileGen != fileGen) {
|
||
|
imports.add(typeGen.fileGen);
|
||
|
}
|
||
|
}
|
||
|
void generate_A(IndentingWriter out) {
|
||
|
if (_field == null) throw new StateError("resolve not called");
|
||
|
String name_A = _field.dartFieldName;
|
||
|
if (_field.isRepeated) {
|
||
|
out.print('static final Extension ${name_A} = ' 'new Extension.repeated(\'${_extendedClassName}\', \'${name_A}\', ' '${_field.number}, ${_field.typeConstant}');
|
||
|
var type_A = _field.baseType;
|
||
|
if (type_A.isMessage || type_A.isGroup) {
|
||
|
var dartClass = type_A.getDartType(package);
|
||
|
out.println(', ${dartClass}.${checkItem}, ${dartClass}.create);');
|
||
|
} else if (type_A.isEnum) {
|
||
|
var dartClass = type_A.getDartType(package);
|
||
|
out.println(', ${dartClass}.${checkItem}, null, ${dartClass}.valueOf);');
|
||
|
} else {
|
||
|
out.println(", getCheckFunction(${_field.typeConstant}));");
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
out.print('static final Extension ${name_A} = ' 'new Extension(\'${_extendedClassName}\', \'${name_A}\', ' '${_field.number}, ${_field.typeConstant}');
|
||
|
String initializer = _field.generateDefaultFunction(package);
|
||
|
var type_A = _field.baseType;
|
||
|
if (type_A.isMessage || type_A.isGroup) {
|
||
|
var dartClass = type_A.getDartType(package);
|
||
|
out.println(', ${initializer}, ${dartClass}.create);');
|
||
|
} else if (type_A.isEnum) {
|
||
|
var dartEnum = type_A.getDartType(package);
|
||
|
String valueOf_D = '(var v) => ${dartEnum}.valueOf(v)';
|
||
|
out.println(", ${initializer}, null, ${valueOf_D});");
|
||
|
} else if (initializer != null) {
|
||
|
out.println(", ${initializer});");
|
||
|
} else {
|
||
|
out.println(");");
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
class FileGenerator extends ProtobufContainer {
|
||
|
static PbMixin _getDefaultMixin(FileDescriptorProto desc) {
|
||
|
if (!desc.hasOptions()) return null;
|
||
|
if (!desc.options.hasExtension(Dart_options.defaultMixin)) {
|
||
|
return null;
|
||
|
}
|
||
|
var name_A = desc.options.getExtension(Dart_options.defaultMixin);
|
||
|
PbMixin mixin_A = findMixin(name_A);
|
||
|
if (mixin_A == null) {
|
||
|
throw ("unknown mixin class: ${name_A}");
|
||
|
}
|
||
|
return mixin_A;
|
||
|
}
|
||
|
final FileDescriptorProto _fileDescriptor;
|
||
|
final List<EnumGenerator> enumGenerators = <EnumGenerator>[];
|
||
|
final List<MessageGenerator> messageGenerators = <MessageGenerator>[];
|
||
|
final List<ExtensionGenerator> extensionGenerators = <ExtensionGenerator>[];
|
||
|
final List<ClientApiGenerator> clientApiGenerators = <ClientApiGenerator>[];
|
||
|
final List<ServiceGenerator> serviceGenerators = <ServiceGenerator>[];
|
||
|
bool _linked = false;
|
||
|
FileGenerator(this._fileDescriptor) {
|
||
|
var defaultMixin_A = _getDefaultMixin(_fileDescriptor);
|
||
|
for (EnumDescriptorProto enumType in _fileDescriptor.enumType) {
|
||
|
enumGenerators.add(new EnumGenerator(enumType, this));
|
||
|
}
|
||
|
for (DescriptorProto messageType_A in _fileDescriptor.messageType) {
|
||
|
messageGenerators.add(new MessageGenerator(messageType_A, this, defaultMixin_A));
|
||
|
}
|
||
|
for (FieldDescriptorProto extension in _fileDescriptor.extension) {
|
||
|
extensionGenerators.add(new ExtensionGenerator(extension, this));
|
||
|
}
|
||
|
for (ServiceDescriptorProto service in _fileDescriptor.service) {
|
||
|
var serviceGen = new ServiceGenerator(service, this);
|
||
|
serviceGenerators.add(serviceGen);
|
||
|
clientApiGenerators.add(new ClientApiGenerator(serviceGen));
|
||
|
}
|
||
|
}
|
||
|
void resolve(GenerationContext ctx) {
|
||
|
if (_linked) throw new StateError("cross references already resolved");
|
||
|
for (var m in messageGenerators) {
|
||
|
m.resolve(ctx);
|
||
|
}
|
||
|
for (var x_A in extensionGenerators) {
|
||
|
x_A.resolve(ctx);
|
||
|
}
|
||
|
_linked = true;
|
||
|
}
|
||
|
String get package => _fileDescriptor.package;
|
||
|
String get classname => '';
|
||
|
String get fqname => '.${_fileDescriptor.package}';
|
||
|
FileGenerator get fileGen => this;
|
||
|
String _fileNameWithoutExtension(Uri filePath) {
|
||
|
String fileName = filePath.pathSegments.last;
|
||
|
int index_A = fileName.lastIndexOf(".");
|
||
|
return index_A == -1 ? fileName : fileName.substring(0, index_A);
|
||
|
}
|
||
|
String _generateClassName(Uri protoFilePath) {
|
||
|
String s = _fileNameWithoutExtension(protoFilePath).replaceAll('-', '_');
|
||
|
return '${s[0].toUpperCase()}${s.substring(1)}';
|
||
|
}
|
||
|
String _generateLibraryName(Uri protoFilePath) {
|
||
|
var libraryName_A = _fileNameWithoutExtension(protoFilePath).replaceAll('-', '_');
|
||
|
if (_fileDescriptor.package != '') {
|
||
|
return _fileDescriptor.package + "_" + libraryName_A;
|
||
|
}
|
||
|
return libraryName_A;
|
||
|
}
|
||
|
CodeGeneratorResponse_File generateResponse(OutputConfiguration config) {
|
||
|
IndentingWriter out = new IndentingWriter();
|
||
|
generate_A(out, config);
|
||
|
Uri filePath = new Uri.file(_fileDescriptor.name);
|
||
|
return new CodeGeneratorResponse_File()
|
||
|
..name = config.outputPathFor(filePath).path
|
||
|
..content = out.toString();
|
||
|
}
|
||
|
void generate_A(IndentingWriter out, [OutputConfiguration config = const DefaultOutputConfiguration()]) {
|
||
|
if (!_linked) throw new StateError("not linked");
|
||
|
Uri filePath = new Uri.file(_fileDescriptor.name);
|
||
|
if (filePath.isAbsolute) {
|
||
|
throw ("FAILURE: File with an absolute path is not supported");
|
||
|
}
|
||
|
generateHeader(out, filePath, config);
|
||
|
for (EnumGenerator e in enumGenerators) {
|
||
|
e.generate_A(out);
|
||
|
}
|
||
|
for (MessageGenerator m in messageGenerators) {
|
||
|
m.generate_A(out);
|
||
|
}
|
||
|
if (!extensionGenerators.isEmpty) {
|
||
|
String className = _generateClassName(filePath);
|
||
|
out.addBlock('class ${className} {', '}\n', () {
|
||
|
for (ExtensionGenerator x_A in extensionGenerators) {
|
||
|
x_A.generate_A(out);
|
||
|
}
|
||
|
out.println('static void registerAllExtensions(ExtensionRegistry ' 'registry) {');
|
||
|
for (ExtensionGenerator x_A in extensionGenerators) {
|
||
|
out.println(' registry.add(${x_A.name});');
|
||
|
}
|
||
|
out.println('}');
|
||
|
});
|
||
|
}
|
||
|
for (ClientApiGenerator c in clientApiGenerators) {
|
||
|
c.generate_A(out);
|
||
|
}
|
||
|
for (ServiceGenerator s in serviceGenerators) {
|
||
|
s.generate_A(out);
|
||
|
}
|
||
|
for (var e in enumGenerators) {
|
||
|
e.generateConstants(out);
|
||
|
}
|
||
|
for (MessageGenerator m in messageGenerators) {
|
||
|
m.generateConstants(out);
|
||
|
}
|
||
|
for (ServiceGenerator s in serviceGenerators) {
|
||
|
s.generateConstants(out);
|
||
|
}
|
||
|
}
|
||
|
void generateHeader(IndentingWriter out, Uri filePath, [OutputConfiguration config = const DefaultOutputConfiguration()]) {
|
||
|
String libraryName_A = _generateLibraryName(filePath);
|
||
|
out.println('///\n' '// Generated code. Do not modify.\n' '///\n' 'library ${libraryName_A};\n');
|
||
|
if (_fileDescriptor.service.isNotEmpty) {
|
||
|
out.println("import 'dart:async';\n");
|
||
|
}
|
||
|
if (_needsFixnumImport) {
|
||
|
out.println("import 'package:fixnum/fixnum.dart';");
|
||
|
}
|
||
|
out.println("import 'package:protobuf/protobuf.dart';");
|
||
|
var mixinImports = findMixinsToImport();
|
||
|
var importNames = mixinImports.keys.toList();
|
||
|
importNames.sort();
|
||
|
for (var imp in importNames) {
|
||
|
var symbols = mixinImports[imp];
|
||
|
out.println("import '${imp}' show ${symbols.join(', ')};");
|
||
|
}
|
||
|
for (var imported in _findProtosToImport()) {
|
||
|
String filename = imported._fileDescriptor.name;
|
||
|
Uri importPath = new Uri.file(filename);
|
||
|
if (importPath.isAbsolute) {
|
||
|
throw ("FAILURE: Import with absolute path is not supported");
|
||
|
}
|
||
|
Uri resolvedImport = config.resolveImport(importPath, filePath);
|
||
|
out.print("import '${resolvedImport}'");
|
||
|
if (package != imported.package && imported.package.isNotEmpty) {
|
||
|
out.print(' as ${imported.packageImportPrefix}');
|
||
|
}
|
||
|
out.println(';');
|
||
|
}
|
||
|
out.println();
|
||
|
}
|
||
|
bool get _needsFixnumImport {
|
||
|
for (var m in messageGenerators) {
|
||
|
if (m.needsFixnumImport) return true;
|
||
|
}
|
||
|
for (var x_A in extensionGenerators) {
|
||
|
if (x_A.needsFixnumImport) return true;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
Set<FileGenerator> _findProtosToImport() {
|
||
|
var imports = new Set<FileGenerator>.identity();
|
||
|
for (var m in messageGenerators) {
|
||
|
m.addImportsTo(imports);
|
||
|
}
|
||
|
for (var x_A in extensionGenerators) {
|
||
|
x_A.addImportsTo(imports);
|
||
|
}
|
||
|
for (var x_A in serviceGenerators) {
|
||
|
x_A.addImportsTo(imports);
|
||
|
}
|
||
|
imports.remove(this);
|
||
|
return imports;
|
||
|
}
|
||
|
Map<String, List<String>> findMixinsToImport() {
|
||
|
var mixins = new Set<PbMixin>();
|
||
|
for (MessageGenerator m in messageGenerators) {
|
||
|
m.addMixinsTo(mixins);
|
||
|
}
|
||
|
var imports = {};
|
||
|
for (var m in mixins) {
|
||
|
var imp = m.importFrom;
|
||
|
List<String> symbols = imports[imp];
|
||
|
if (symbols == null) {
|
||
|
symbols = [];
|
||
|
imports[imp] = symbols;
|
||
|
}
|
||
|
symbols.add(m.name);
|
||
|
}
|
||
|
for (var imp in imports.keys) {
|
||
|
imports[imp].sort();
|
||
|
}
|
||
|
return imports;
|
||
|
}
|
||
|
}
|
||
|
void link(GenerationOptions options, Iterable<FileGenerator> files) {
|
||
|
GenerationContext ctx = new GenerationContext(options);
|
||
|
for (var f in files) {
|
||
|
ctx.registerProtoFile(f);
|
||
|
for (var m in f.messageGenerators) {
|
||
|
m.register(ctx);
|
||
|
}
|
||
|
for (var e in f.enumGenerators) {
|
||
|
e.register(ctx);
|
||
|
}
|
||
|
}
|
||
|
for (var f in files) {
|
||
|
f.resolve(ctx);
|
||
|
}
|
||
|
for (var f in files) {
|
||
|
for (var s in f.serviceGenerators) {
|
||
|
s.resolve(ctx);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
class GenerationContext {
|
||
|
final GenerationOptions options;
|
||
|
final Map<String, FileGenerator> _files_A = <String, FileGenerator>{};
|
||
|
final Map<String, ProtobufContainer> _typeRegistry = <String, ProtobufContainer>{};
|
||
|
GenerationContext(this.options);
|
||
|
void registerProtoFile(FileGenerator f) {
|
||
|
_files_A[f._fileDescriptor.name] = f;
|
||
|
}
|
||
|
void registerFieldType(String name_A, ProtobufContainer type_A) {
|
||
|
_typeRegistry[name_A] = type_A;
|
||
|
}
|
||
|
ProtobufContainer getFieldType(String name_A) => _typeRegistry[name_A];
|
||
|
}
|
||
|
const checkItem = '\$checkItem';
|
||
|
class MessageGenerator extends ProtobufContainer {
|
||
|
static final List<String> reservedWords = ['assert', 'break', 'case', 'catch', 'class', 'const', 'continue', 'default', 'do', 'else', 'enum', 'extends', 'false', 'final', 'finally', 'for', 'if', 'in', 'is', 'new', 'null', 'rethrow', 'return', 'super', 'switch', 'this', 'throw', 'true', 'try', 'var', 'void', 'while', 'with'];
|
||
|
static final List<String> generatedNames = ['create', 'createRepeated', 'getDefault', checkItem];
|
||
|
static PbMixin _getMixin(DescriptorProto desc, PbMixin defaultValue) {
|
||
|
if (!desc.hasOptions()) return defaultValue;
|
||
|
if (!desc.options.hasExtension(Dart_options.mixin)) return defaultValue;
|
||
|
String name_A = desc.options.getExtension(Dart_options.mixin);
|
||
|
if (name_A.isEmpty) return null;
|
||
|
var mixin_A = findMixin(name_A);
|
||
|
if (mixin_A == null) {
|
||
|
throw ("unknown mixin class: ${name_A}");
|
||
|
}
|
||
|
return mixin_A;
|
||
|
}
|
||
|
final String classname;
|
||
|
final String fqname;
|
||
|
final PbMixin mixin_A;
|
||
|
final ProtobufContainer _parent;
|
||
|
final DescriptorProto _descriptor;
|
||
|
final List<EnumGenerator> _enumGenerators = <EnumGenerator>[];
|
||
|
final List<MessageGenerator> _messageGenerators = <MessageGenerator>[];
|
||
|
final List<ExtensionGenerator> _extensionGenerators = <ExtensionGenerator>[];
|
||
|
List<ProtobufField> _fieldList;
|
||
|
final Set<String> _methodNames = new Set<String>();
|
||
|
MessageGenerator(DescriptorProto descriptor, ProtobufContainer parent_A, PbMixin defaultMixin_A) : _descriptor = descriptor, _parent = parent_A, classname = (parent_A.classname == '') ? descriptor.name : '${parent_A.classname}_${descriptor.name}', fqname = (parent_A == null || parent_A.fqname == null) ? descriptor.name : (parent_A.fqname == '.' ? '.${descriptor.name}' : '${parent_A.fqname}.${descriptor.name}'), mixin_A = _getMixin(descriptor, defaultMixin_A) {
|
||
|
for (EnumDescriptorProto e in _descriptor.enumType) {
|
||
|
_enumGenerators.add(new EnumGenerator(e, this));
|
||
|
}
|
||
|
for (DescriptorProto n in _descriptor.nestedType) {
|
||
|
_messageGenerators.add(new MessageGenerator(n, this, defaultMixin_A));
|
||
|
}
|
||
|
for (FieldDescriptorProto x_A in _descriptor.extension) {
|
||
|
_extensionGenerators.add(new ExtensionGenerator(x_A, this));
|
||
|
}
|
||
|
}
|
||
|
String get package => _parent.package;
|
||
|
FileGenerator get fileGen => _parent.fileGen;
|
||
|
void checkResolved() {
|
||
|
if (_fieldList == null) {
|
||
|
throw new StateError("message not resolved: ${fqname}");
|
||
|
}
|
||
|
}
|
||
|
String getJsonConstant(FileGenerator usage) {
|
||
|
var name_A = "${classname}\$json";
|
||
|
if (usage.package == fileGen.package || packageImportPrefix.isEmpty) {
|
||
|
return name_A;
|
||
|
}
|
||
|
return "${packageImportPrefix}.${name_A}";
|
||
|
}
|
||
|
void addMixinsTo(Set<PbMixin> output) {
|
||
|
if (mixin_A != null) {
|
||
|
output.addAll(mixin_A.findMixinsToApply());
|
||
|
}
|
||
|
for (var m in _messageGenerators) {
|
||
|
m.addMixinsTo(output);
|
||
|
}
|
||
|
}
|
||
|
void register(GenerationContext ctx) {
|
||
|
ctx.registerFieldType(fqname, this);
|
||
|
for (var m in _messageGenerators) {
|
||
|
m.register(ctx);
|
||
|
}
|
||
|
for (var e in _enumGenerators) {
|
||
|
e.register(ctx);
|
||
|
}
|
||
|
}
|
||
|
void resolve(GenerationContext ctx) {
|
||
|
if (_fieldList != null) throw new StateError("message already resolved");
|
||
|
_fieldList = <ProtobufField>[];
|
||
|
for (FieldDescriptorProto field in _descriptor.field) {
|
||
|
_fieldList.add(new ProtobufField(field, this, ctx));
|
||
|
}
|
||
|
for (var m in _messageGenerators) {
|
||
|
m.resolve(ctx);
|
||
|
}
|
||
|
for (var x_A in _extensionGenerators) {
|
||
|
x_A.resolve(ctx);
|
||
|
}
|
||
|
}
|
||
|
bool get needsFixnumImport {
|
||
|
if (_fieldList == null) throw new StateError("message not resolved");
|
||
|
for (var field in _fieldList) {
|
||
|
if (field.needsFixnumImport) return true;
|
||
|
}
|
||
|
for (var m in _messageGenerators) {
|
||
|
if (m.needsFixnumImport) return true;
|
||
|
}
|
||
|
for (var x_A in _extensionGenerators) {
|
||
|
if (x_A.needsFixnumImport) return true;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
void addImportsTo(Set<FileGenerator> imports) {
|
||
|
if (_fieldList == null) throw new StateError("message not resolved");
|
||
|
for (var field in _fieldList) {
|
||
|
var typeGen = field.baseType.generator;
|
||
|
if (typeGen != null && typeGen.fileGen != fileGen) {
|
||
|
imports.add(typeGen.fileGen);
|
||
|
}
|
||
|
}
|
||
|
for (var m in _messageGenerators) {
|
||
|
m.addImportsTo(imports);
|
||
|
}
|
||
|
for (var x_A in _extensionGenerators) {
|
||
|
x_A.addImportsTo(imports);
|
||
|
}
|
||
|
}
|
||
|
void generate_A(IndentingWriter out) {
|
||
|
checkResolved();
|
||
|
_methodNames.clear();
|
||
|
_methodNames.addAll(reservedWords);
|
||
|
_methodNames.addAll(GeneratedMessage_reservedNames);
|
||
|
_methodNames.addAll(generatedNames);
|
||
|
if (mixin_A != null) {
|
||
|
_methodNames.addAll(mixin_A.findReservedNames());
|
||
|
}
|
||
|
for (EnumGenerator e in _enumGenerators) {
|
||
|
e.generate_A(out);
|
||
|
}
|
||
|
for (MessageGenerator m in _messageGenerators) {
|
||
|
m.generate_A(out);
|
||
|
}
|
||
|
var mixinClause = '';
|
||
|
if (mixin_A != null) {
|
||
|
var mixinNames = mixin_A.findMixinsToApply().map((m) => m.name);
|
||
|
mixinClause = ' with ${mixinNames.join(", ")}';
|
||
|
}
|
||
|
out.addBlock('class ${classname} extends GeneratedMessage${mixinClause} {', '}', () {
|
||
|
out.addBlock('static final BuilderInfo _i = new BuilderInfo(\'${classname}\')', ';', () {
|
||
|
for (ProtobufField field in _fieldList) {
|
||
|
out.println(field.generateBuilderInfoCall(package));
|
||
|
}
|
||
|
if (_descriptor.extensionRange.length > 0) {
|
||
|
out.println('..hasExtensions = true');
|
||
|
}
|
||
|
if (!_hasRequiredFields(this, new Set())) {
|
||
|
out.println('..hasRequiredFields = false');
|
||
|
}
|
||
|
});
|
||
|
for (ExtensionGenerator x_A in _extensionGenerators) {
|
||
|
x_A.generate_A(out);
|
||
|
}
|
||
|
out.println();
|
||
|
out.println('${classname}() : super();');
|
||
|
out.println('${classname}.fromBuffer(List<int> i,' ' [ExtensionRegistry r = ExtensionRegistry.EMPTY])' ' : super.fromBuffer(i, r);');
|
||
|
out.println('${classname}.fromJson(String i,' ' [ExtensionRegistry r = ExtensionRegistry.EMPTY])' ' : super.fromJson(i, r);');
|
||
|
out.println('${classname} clone() =>' ' new ${classname}()..mergeFromMessage(this);');
|
||
|
out.println('BuilderInfo get info_ => _i;');
|
||
|
out.println('static ${classname} create() =>' ' new ${classname}();');
|
||
|
out.println('static PbList<${classname}> createRepeated() =>' ' new PbList<${classname}>();');
|
||
|
out.addBlock('static ${classname} getDefault() {', '}', () {
|
||
|
out.println('if (_defaultInstance == null) _defaultInstance = new _Readonly${classname}();');
|
||
|
out.println('return _defaultInstance;');
|
||
|
});
|
||
|
out.println('static ${classname} _defaultInstance;');
|
||
|
out.addBlock('static void ${checkItem}(${classname} v) {', '}', () {
|
||
|
out.println('if (v is !${classname})' " checkItemFailed(v, '${classname}');");
|
||
|
});
|
||
|
generateFieldsAccessorsMutators(out);
|
||
|
});
|
||
|
out.println();
|
||
|
out.println('class _Readonly${classname} extends ${classname} with ReadonlyMessageMixin {}');
|
||
|
out.println();
|
||
|
}
|
||
|
bool _hasRequiredFields(MessageGenerator type_A, Set alreadySeen) {
|
||
|
if (type_A._fieldList == null) throw new StateError("message not resolved");
|
||
|
if (alreadySeen.contains(type_A.fqname)) {
|
||
|
return false;
|
||
|
}
|
||
|
alreadySeen.add(type_A.fqname);
|
||
|
if (type_A._descriptor.extensionRange.length > 0) {
|
||
|
return true;
|
||
|
}
|
||
|
for (ProtobufField field in type_A._fieldList) {
|
||
|
if (field.isRequired) {
|
||
|
return true;
|
||
|
}
|
||
|
if (field.baseType.isMessage) {
|
||
|
MessageGenerator child = field.baseType.generator;
|
||
|
if (_hasRequiredFields(child, alreadySeen)) {
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
void generateFieldsAccessorsMutators(IndentingWriter out) {
|
||
|
for (ProtobufField field in _fieldList) {
|
||
|
out.println();
|
||
|
String identifier = field.dartFieldName;
|
||
|
String hasIdentifier = field.hasMethodName;
|
||
|
String clearIdentifier = field.clearMethodName;
|
||
|
if (!field.isRepeated) {
|
||
|
while (_methodNames.contains(identifier) || _methodNames.contains(hasIdentifier) || _methodNames.contains(clearIdentifier)) {
|
||
|
identifier += '_' + field.number.toString();
|
||
|
hasIdentifier += '_' + field.number.toString();
|
||
|
clearIdentifier += '_' + field.number.toString();
|
||
|
}
|
||
|
_methodNames.add(identifier);
|
||
|
_methodNames.add(hasIdentifier);
|
||
|
_methodNames.add(clearIdentifier);
|
||
|
} else {
|
||
|
while (_methodNames.contains(identifier)) {
|
||
|
identifier += '_' + field.number.toString();
|
||
|
}
|
||
|
_methodNames.add(identifier);
|
||
|
}
|
||
|
var fieldTypeString = field.getDartType(package);
|
||
|
out.println('${fieldTypeString} get ${identifier}' ' => getField(${field.number});');
|
||
|
if (!field.isRepeated) {
|
||
|
out.println('void set ${identifier}' '(${fieldTypeString} v) ' '{ setField(${field.number}, v); }');
|
||
|
out.println('bool ${hasIdentifier}() =>' ' hasField(${field.number});');
|
||
|
out.println('void ${clearIdentifier}() =>' ' clearField(${field.number});');
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
void generateConstants(IndentingWriter out) {
|
||
|
const nestedTypeTag = 3;
|
||
|
const enumTypeTag = 4;
|
||
|
assert(_descriptor.info_.fieldInfo[nestedTypeTag].name == "nestedType");
|
||
|
assert(_descriptor.info_.fieldInfo[enumTypeTag].name == "enumType");
|
||
|
var name_A = getJsonConstant(fileGen);
|
||
|
var json = _descriptor.writeToJsonMap();
|
||
|
var nestedTypeNames = _messageGenerators.map((m) => m.getJsonConstant(fileGen)).toList();
|
||
|
var nestedEnumNames = _enumGenerators.map((e) => e.getJsonConstant(fileGen)).toList();
|
||
|
out.addBlock("const ${name_A} = const {", "};", () {
|
||
|
for (var key_A in json.keys) {
|
||
|
out.print("'${key_A}': ");
|
||
|
if (key_A == "${nestedTypeTag}") {
|
||
|
out.println("const [${nestedTypeNames.join(", ")}],");
|
||
|
continue;
|
||
|
} else if (key_A == "${enumTypeTag}") {
|
||
|
out.println("const [${nestedEnumNames.join(", ")}],");
|
||
|
continue;
|
||
|
}
|
||
|
writeJsonConst(out, json[key_A]);
|
||
|
out.println(",");
|
||
|
}
|
||
|
});
|
||
|
out.println();
|
||
|
for (var m in _messageGenerators) {
|
||
|
m.generateConstants(out);
|
||
|
}
|
||
|
for (var e in _enumGenerators) {
|
||
|
e.generateConstants(out);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
bool genericOptionsParser(CodeGeneratorRequest request, CodeGeneratorResponse response_A, Map<String, SingleOptionParser> parsers) {
|
||
|
var parameter = request.parameter != null ? request.parameter : '';
|
||
|
var options = parameter.trim().split(',');
|
||
|
var errors_A = [];
|
||
|
for (var option in options) {
|
||
|
option = option.trim();
|
||
|
if (option.isEmpty) continue;
|
||
|
var reportError = (details) {
|
||
|
errors_A.add('Error found trying to parse the option: ${option}.\n${details}');
|
||
|
};
|
||
|
var nameValue = option.split('=');
|
||
|
if (nameValue.length != 1 && nameValue.length != 2) {
|
||
|
reportError('Options should be a single token, or a name=value pair');
|
||
|
continue;
|
||
|
}
|
||
|
var name_A = nameValue[0].trim();
|
||
|
var parser = parsers[name_A];
|
||
|
if (parser == null) {
|
||
|
reportError('Unknown option (${name_A}).');
|
||
|
continue;
|
||
|
}
|
||
|
var value_A = nameValue.length > 1 ? nameValue[1].trim() : null;
|
||
|
parser.parse_B(name_A, value_A, reportError);
|
||
|
}
|
||
|
if (errors_A.length == 0) return true;
|
||
|
response_A.error = errors_A.join('\n');
|
||
|
return false;
|
||
|
}
|
||
|
class GenerationOptions {
|
||
|
final Map<String, String> fieldNameOverrides;
|
||
|
GenerationOptions([this.fieldNameOverrides = const{}]);
|
||
|
}
|
||
|
abstract class SingleOptionParser {
|
||
|
void parse_B(String name_A, String value_A, onError_A(String details));
|
||
|
}
|
||
|
GenerationOptions parseGenerationOptions(CodeGeneratorRequest request, CodeGeneratorResponse response_A, [Map<String, SingleOptionParser> parsers]) {
|
||
|
var fieldNameOptionParser = new FieldNameOptionParser();
|
||
|
var map_A = {};
|
||
|
if (parsers != null) parsers.forEach((k, v) {
|
||
|
map_A[k] = v;
|
||
|
});
|
||
|
map_A['field_name'] = fieldNameOptionParser;
|
||
|
if (genericOptionsParser(request, response_A, map_A)) {
|
||
|
return new GenerationOptions(fieldNameOptionParser.mappings);
|
||
|
}
|
||
|
return null;
|
||
|
}
|
||
|
class FieldNameOptionParser implements SingleOptionParser {
|
||
|
final Map<String, String> mappings = {};
|
||
|
void parse_B(String name_A, String value_A, onError_A(String message)) {
|
||
|
if (value_A == null) {
|
||
|
onError_A('Invalid field_name option, expected a non-emtpy value.');
|
||
|
return;
|
||
|
}
|
||
|
List<String> fromTo = value_A.split('|');
|
||
|
if (fromTo.length != 2) {
|
||
|
onError_A('Invalid field_name option, expected a single "|" separator.');
|
||
|
return;
|
||
|
}
|
||
|
var fromName = fromTo[0].trim();
|
||
|
var toName = fromTo[1].trim();
|
||
|
if (fromName.isEmpty || toName.isEmpty) {
|
||
|
onError_A('Invalid field_name option, ' '"from" and "to" names should not be empty.');
|
||
|
return;
|
||
|
}
|
||
|
mappings['.${fromName}'] = toName;
|
||
|
}
|
||
|
}
|
||
|
abstract class OutputConfiguration {
|
||
|
const OutputConfiguration();
|
||
|
String replacePathExtension(String filePath) => '${withoutExtension(filePath)}.pb.dart';
|
||
|
Uri replaceUriExtension(Uri file_A) => url.toUri(replacePathExtension(url.fromUri_A(file_A)));
|
||
|
Uri resolveImport(Uri target_A, Uri source_A);
|
||
|
Uri outputPathFor(Uri inputPath);
|
||
|
}
|
||
|
class DefaultOutputConfiguration extends OutputConfiguration {
|
||
|
const DefaultOutputConfiguration();
|
||
|
Uri outputPathFor(Uri input_A) => replaceUriExtension(input_A);
|
||
|
Uri resolveImport(Uri target_A, Uri source_A) {
|
||
|
var builder_A = url;
|
||
|
var targetPath = builder_A.fromUri_A(target_A);
|
||
|
var sourceDir = builder_A.dirname(builder_A.fromUri_A(source_A));
|
||
|
return builder_A.toUri(replacePathExtension(builder_A.relative(targetPath, from: sourceDir)));
|
||
|
}
|
||
|
}
|
||
|
class ProtobufField {
|
||
|
static final RegExp HEX_LITERAL_REGEX = new RegExp(r'^0x[0-9a-f]+$', multiLine: false, caseSensitive: false);
|
||
|
static final RegExp INTEGER_LITERAL_REGEX = new RegExp(r'^[+-]?[0-9]+$');
|
||
|
static final RegExp DECIMAL_LITERAL_REGEX_A = new RegExp(r'^[+-]?([0-9]*)\.[0-9]+(e[+-]?[0-9]+)?$', multiLine: false, caseSensitive: false);
|
||
|
static final RegExp DECIMAL_LITERAL_REGEX_B = new RegExp(r'^[+-]?[0-9]+e[+-]?[0-9]+$', multiLine: false, caseSensitive: false);
|
||
|
final FieldDescriptorProto _field;
|
||
|
final String fqname;
|
||
|
final BaseType baseType;
|
||
|
final GenerationOptions _genOptions;
|
||
|
ProtobufField(FieldDescriptorProto field, ProtobufContainer parent_A, GenerationContext ctx) : _field = field, fqname = '${parent_A.fqname}.${field.name}', baseType = new BaseType(field, ctx), _genOptions = ctx.options;
|
||
|
int get number => _field.number;
|
||
|
bool get isRequired => _field.label == FieldDescriptorProto_Label.LABEL_REQUIRED;
|
||
|
bool get isRepeated => _field.label == FieldDescriptorProto_Label.LABEL_REPEATED;
|
||
|
bool get isPacked => isRepeated && _field.options != null && _field.options.packed;
|
||
|
bool get needsFixnumImport => baseType.unprefixed == "Int64";
|
||
|
String getDartType(String package) {
|
||
|
if (isRepeated) return baseType.getRepeatedDartType(package);
|
||
|
return baseType.getDartType(package);
|
||
|
}
|
||
|
String get typeConstant {
|
||
|
String prefix = 'O';
|
||
|
if (isRequired) {
|
||
|
prefix = 'Q';
|
||
|
} else if (isPacked) {
|
||
|
prefix = 'K';
|
||
|
} else if (isRepeated) {
|
||
|
prefix = 'P';
|
||
|
}
|
||
|
return "PbFieldType." + prefix + baseType.typeConstantSuffix;
|
||
|
}
|
||
|
String get dartFieldName {
|
||
|
String name_A = _fieldMethodSuffix;
|
||
|
return '${name_A[0].toLowerCase()}${name_A.substring(1)}';
|
||
|
}
|
||
|
String get hasMethodName => 'has${_fieldMethodSuffix}';
|
||
|
String get clearMethodName => 'clear${_fieldMethodSuffix}';
|
||
|
String get _fieldMethodSuffix {
|
||
|
String underscoresToCamelCase(String s) {
|
||
|
cap(s) => s.isEmpty ? s : '${s[0].toUpperCase()}${s.substring(1)}';
|
||
|
return s.split('_').map(cap).join('');
|
||
|
}
|
||
|
if (baseType.isGroup) {
|
||
|
String name_A = _field.typeName;
|
||
|
int index_A = name_A.lastIndexOf('.');
|
||
|
if (index_A != -1) {
|
||
|
name_A = name_A.substring(index_A + 1);
|
||
|
}
|
||
|
return underscoresToCamelCase(name_A);
|
||
|
}
|
||
|
var name_A = _genOptions.fieldNameOverrides[fqname];
|
||
|
return name_A != null ? name_A : underscoresToCamelCase(_field.name);
|
||
|
}
|
||
|
String generateBuilderInfoCall(String package) {
|
||
|
String quotedName = "'${dartFieldName}'";
|
||
|
String type_A = baseType.getDartType(package);
|
||
|
if (isRepeated) {
|
||
|
if (baseType.isMessage || baseType.isGroup) {
|
||
|
return '..pp(${number}, ${quotedName}, ${typeConstant},' ' ${type_A}.${checkItem}, ${type_A}.create)';
|
||
|
} else if (baseType.isEnum) {
|
||
|
return '..pp(${number}, ${quotedName}, ${typeConstant},' ' ${type_A}.${checkItem}, null, ${type_A}.valueOf)';
|
||
|
} else {
|
||
|
return '..p(${number}, ${quotedName}, ${typeConstant})';
|
||
|
}
|
||
|
}
|
||
|
String makeDefault = generateDefaultFunction(package);
|
||
|
if (baseType.isEnum) {
|
||
|
String valueOf_D = '${type_A}.valueOf';
|
||
|
return '..e(${number}, ${quotedName}, ${typeConstant}, ${makeDefault}, ${valueOf_D})';
|
||
|
}
|
||
|
String prefix = '..a(${number}, ${quotedName}, ${typeConstant}';
|
||
|
if (makeDefault == null) return prefix + ')';
|
||
|
if (baseType.isMessage || baseType.isGroup) {
|
||
|
return prefix + ', ${makeDefault}, ${type_A}.create)';
|
||
|
}
|
||
|
return prefix + ', ${makeDefault})';
|
||
|
}
|
||
|
String generateDefaultFunction(String package) {
|
||
|
if (isRepeated) {
|
||
|
return '() => new PbList()';
|
||
|
}
|
||
|
bool samePackage = package == baseType.package;
|
||
|
switch (_field.type) {
|
||
|
case FieldDescriptorProto_Type.TYPE_BOOL:
|
||
|
if (_field.hasDefaultValue() && 'false' != _field.defaultValue) {
|
||
|
return '${_field.defaultValue}';
|
||
|
}
|
||
|
return null;
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_FLOAT: case FieldDescriptorProto_Type.TYPE_DOUBLE:
|
||
|
if (!_field.hasDefaultValue()) {
|
||
|
return null;
|
||
|
} else if ('0.0' == _field.defaultValue || '0' == _field.defaultValue) {
|
||
|
return null;
|
||
|
} else if (_field.defaultValue == 'inf') {
|
||
|
return 'double.INFINITY';
|
||
|
} else if (_field.defaultValue == '-inf') {
|
||
|
return 'double.NEGATIVE_INFINITY';
|
||
|
} else if (_field.defaultValue == 'nan') {
|
||
|
return 'double.NAN';
|
||
|
} else if (HEX_LITERAL_REGEX.hasMatch(_field.defaultValue)) {
|
||
|
return '(${_field.defaultValue}).toDouble()';
|
||
|
} else if (INTEGER_LITERAL_REGEX.hasMatch(_field.defaultValue)) {
|
||
|
return '${_field.defaultValue}.0';
|
||
|
} else if (DECIMAL_LITERAL_REGEX_A.hasMatch(_field.defaultValue) || DECIMAL_LITERAL_REGEX_B.hasMatch(_field.defaultValue)) {
|
||
|
return '${_field.defaultValue}';
|
||
|
}
|
||
|
throw _invalidDefaultValue;
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_INT32: case FieldDescriptorProto_Type.TYPE_UINT32: case FieldDescriptorProto_Type.TYPE_SINT32: case FieldDescriptorProto_Type.TYPE_FIXED32: case FieldDescriptorProto_Type.TYPE_SFIXED32:
|
||
|
if (_field.hasDefaultValue() && '0' != _field.defaultValue) {
|
||
|
return '${_field.defaultValue}';
|
||
|
}
|
||
|
return null;
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_INT64: case FieldDescriptorProto_Type.TYPE_UINT64: case FieldDescriptorProto_Type.TYPE_SINT64: case FieldDescriptorProto_Type.TYPE_FIXED64: case FieldDescriptorProto_Type.TYPE_SFIXED64:
|
||
|
var value_A = '0';
|
||
|
if (_field.hasDefaultValue()) value_A = _field.defaultValue;
|
||
|
if (value_A == '0') return 'Int64.ZERO';
|
||
|
return "parseLongInt('${value_A}')";
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_STRING:
|
||
|
if (!_field.hasDefaultValue() || _field.defaultValue.isEmpty) {
|
||
|
return null;
|
||
|
}
|
||
|
String value_A = _field.defaultValue.replaceAll(r'$', r'\$');
|
||
|
return '\'${value_A}\'';
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_BYTES:
|
||
|
if (!_field.hasDefaultValue() || _field.defaultValue.isEmpty) {
|
||
|
return null;
|
||
|
}
|
||
|
String byteList = _field.defaultValue.codeUnits.map((b) => '0x${b.toRadixString(16)}').join(',');
|
||
|
return '() => <int>[${byteList}]';
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_GROUP: case FieldDescriptorProto_Type.TYPE_MESSAGE:
|
||
|
if (samePackage) return '${baseType.unprefixed}.getDefault';
|
||
|
return "${baseType.prefixed}.getDefault";
|
||
|
|
||
|
case FieldDescriptorProto_Type.TYPE_ENUM:
|
||
|
var className = samePackage ? baseType.unprefixed : baseType.prefixed;
|
||
|
EnumGenerator gen = baseType.generator;
|
||
|
if (_field.hasDefaultValue() && !_field.defaultValue.isEmpty) {
|
||
|
return '${className}.${_field.defaultValue}';
|
||
|
} else if (!gen._canonicalValues.isEmpty) {
|
||
|
return '${className}.${gen._canonicalValues[0].name}';
|
||
|
}
|
||
|
return null;
|
||
|
|
||
|
default:
|
||
|
throw _typeNotImplemented("generatedDefaultFunction");
|
||
|
}
|
||
|
}
|
||
|
get _invalidDefaultValue => "dart-protoc-plugin:" " invalid default value (${_field.defaultValue})" " found in field ${fqname}";
|
||
|
_typeNotImplemented(String methodName) => "dart-protoc-plugin:" " ${methodName} not implemented for type (${_field.type})" " found in field ${fqname}";
|
||
|
}
|
||
|
class ServiceGenerator {
|
||
|
final ServiceDescriptorProto _descriptor;
|
||
|
final FileGenerator fileGen;
|
||
|
final _deps = <String, MessageGenerator>{};
|
||
|
final _undefinedDeps = <String, String>{};
|
||
|
ServiceGenerator(this._descriptor, this.fileGen);
|
||
|
String get classname {
|
||
|
if (_descriptor.name.endsWith("Service")) {
|
||
|
return _descriptor.name + "Base";
|
||
|
} else {
|
||
|
return _descriptor.name + "ServiceBase";
|
||
|
}
|
||
|
}
|
||
|
void resolve(GenerationContext ctx) {
|
||
|
for (var m in _methodDescriptors) {
|
||
|
_addDependency(ctx, m.inputType, "input type of ${m.name}");
|
||
|
_addDependency(ctx, m.outputType, "output type of ${m.name}");
|
||
|
}
|
||
|
_resolveMoreTypes(ctx);
|
||
|
}
|
||
|
void _resolveMoreTypes(GenerationContext ctx) {}
|
||
|
void _addDependency(GenerationContext ctx, String fqname, String location_A) {
|
||
|
if (_deps.containsKey(fqname)) return;
|
||
|
MessageGenerator mg = ctx.getFieldType(fqname);
|
||
|
if (mg == null) {
|
||
|
_undefinedDeps[fqname] = location_A;
|
||
|
return;
|
||
|
}
|
||
|
_addDepsRecursively(mg);
|
||
|
}
|
||
|
void _addDepsRecursively(MessageGenerator mg) {
|
||
|
if (_deps.containsKey(mg.fqname)) return;
|
||
|
mg.checkResolved();
|
||
|
_deps[mg.fqname] = mg;
|
||
|
for (var field in mg._fieldList) {
|
||
|
if (field.baseType.isGroup || field.baseType.isMessage) {
|
||
|
_addDepsRecursively(field.baseType.generator);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
void addImportsTo(Set<FileGenerator> imports) {
|
||
|
for (var mg in _deps.values) {
|
||
|
imports.add(mg.fileGen);
|
||
|
}
|
||
|
}
|
||
|
String _getDartClassName(String fqname) {
|
||
|
var mg = _deps[fqname];
|
||
|
if (mg == null) {
|
||
|
var location_A = _undefinedDeps[fqname];
|
||
|
throw 'FAILURE: Unknown type reference (${fqname}) for ${location_A}';
|
||
|
}
|
||
|
if (fileGen.package == mg.fileGen.package || mg.fileGen.package == "") {
|
||
|
return mg.classname;
|
||
|
}
|
||
|
return mg.packageImportPrefix + "." + mg.classname;
|
||
|
}
|
||
|
List<MethodDescriptorProto> get _methodDescriptors => _descriptor.method;
|
||
|
String _methodName(String name_A) => name_A.substring(0, 1).toLowerCase() + name_A.substring(1);
|
||
|
String get _parentClass => 'GeneratedService';
|
||
|
void _generateStub(IndentingWriter out, MethodDescriptorProto m) {
|
||
|
var methodName = _methodName(m.name);
|
||
|
var inputClass = _getDartClassName(m.inputType);
|
||
|
var outputClass = _getDartClassName(m.outputType);
|
||
|
out.println('Future<${outputClass}> ${methodName}(' 'ServerContext ctx, ${inputClass} request);');
|
||
|
}
|
||
|
void _generateStubs(IndentingWriter out) {
|
||
|
for (MethodDescriptorProto m in _methodDescriptors) {
|
||
|
_generateStub(out, m);
|
||
|
}
|
||
|
out.println();
|
||
|
}
|
||
|
void _generateRequestMethod(IndentingWriter out) {
|
||
|
out.addBlock('GeneratedMessage createRequest(String method) {', '}', () {
|
||
|
out.addBlock("switch (method) {", "}", () {
|
||
|
for (MethodDescriptorProto m in _methodDescriptors) {
|
||
|
var inputClass = _getDartClassName(m.inputType);
|
||
|
out.println("case '${m.name}': return new ${inputClass}();");
|
||
|
}
|
||
|
out.println("default: " "throw new ArgumentError('Unknown method: \$method');");
|
||
|
});
|
||
|
});
|
||
|
out.println();
|
||
|
}
|
||
|
void _generateDispatchMethod(out) {
|
||
|
out.addBlock('Future<GeneratedMessage> handleCall(ServerContext ctx, ' 'String method, GeneratedMessage request) {', '}', () {
|
||
|
out.addBlock("switch (method) {", "}", () {
|
||
|
for (MethodDescriptorProto m in _methodDescriptors) {
|
||
|
var methodName = _methodName(m.name);
|
||
|
out.println("case '${m.name}': return ${methodName}(ctx, request);");
|
||
|
}
|
||
|
out.println("default: " "throw new ArgumentError('Unknown method: \$method');");
|
||
|
});
|
||
|
});
|
||
|
out.println();
|
||
|
}
|
||
|
void _generateMoreClassMembers(out) {}
|
||
|
void generate_A(IndentingWriter out) {
|
||
|
out.addBlock('abstract class ${classname} extends ' '${_parentClass} {', '}', () {
|
||
|
_generateStubs(out);
|
||
|
_generateRequestMethod(out);
|
||
|
_generateDispatchMethod(out);
|
||
|
_generateMoreClassMembers(out);
|
||
|
out.println("Map<String, dynamic> get \$json => ${jsonConstant};");
|
||
|
out.println("Map<String, dynamic> get \$messageJson =>" " ${messageJsonConstant};");
|
||
|
});
|
||
|
out.println();
|
||
|
}
|
||
|
String get jsonConstant => "${_descriptor.name}\$json";
|
||
|
String get messageJsonConstant => "${_descriptor.name}\$messageJson";
|
||
|
void generateConstants(IndentingWriter out) {
|
||
|
out.print("const ${jsonConstant} = ");
|
||
|
writeJsonConst(out, _descriptor.writeToJsonMap());
|
||
|
out.println(";");
|
||
|
out.println();
|
||
|
var typeConstants = <String, String>{};
|
||
|
for (var key_A in _deps.keys) {
|
||
|
typeConstants[key_A] = _deps[key_A].getJsonConstant(fileGen);
|
||
|
}
|
||
|
out.addBlock("const ${messageJsonConstant} = const {", "};", () {
|
||
|
for (var key_A in typeConstants.keys) {
|
||
|
var typeConst = typeConstants[key_A];
|
||
|
out.println("'${key_A}': ${typeConst},");
|
||
|
}
|
||
|
});
|
||
|
out.println();
|
||
|
if (_undefinedDeps.isNotEmpty) {
|
||
|
for (var name_A in _undefinedDeps.keys) {
|
||
|
var location_A = _undefinedDeps[name_A];
|
||
|
out.println("// can't resolve (${name_A}) used by ${location_A}");
|
||
|
}
|
||
|
out.println();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
void writeJsonConst(IndentingWriter out, val) {
|
||
|
if (val is Map) {
|
||
|
if (val.values.any(_nonEmptyListOrMap)) {
|
||
|
out.addBlock("const {", "}", () => _writeMapItems(out, val, vertical: true), endWithNewline: false);
|
||
|
} else {
|
||
|
out.print("const {");
|
||
|
_writeMapItems(out, val);
|
||
|
out.print("}");
|
||
|
}
|
||
|
} else if (val is List) {
|
||
|
if (val.any(_nonEmptyListOrMap)) {
|
||
|
out.addBlock("const [", "]", () => _writeListItems(out, val, vertical: true), endWithNewline: false);
|
||
|
} else {
|
||
|
out.print("const [");
|
||
|
_writeListItems(out, val);
|
||
|
out.print("]");
|
||
|
}
|
||
|
} else if (val is String) {
|
||
|
_writeString(out, val);
|
||
|
} else if (val is num || val is bool) {
|
||
|
out.print(val.toString());
|
||
|
} else if (val == null) {
|
||
|
out.print("null");
|
||
|
} else {
|
||
|
throw "not JSON: ${val}";
|
||
|
}
|
||
|
}
|
||
|
bool _nonEmptyListOrMap(x_A) {
|
||
|
if (x_A is List && !x_A.isEmpty) return true;
|
||
|
if (x_A is Map && !x_A.isEmpty) return true;
|
||
|
return false;
|
||
|
}
|
||
|
void _writeString(IndentingWriter out, String val) {
|
||
|
if (_maybeWriteSingleLineString(out, val)) return;
|
||
|
var quote = "'''";
|
||
|
out.addUnindentedBlock("r${quote}", "${quote}", () {
|
||
|
out.print(val.replaceAll(quote, '${quote} "${quote}" r${quote}'));
|
||
|
}, endWithNewline: false);
|
||
|
}
|
||
|
bool _maybeWriteSingleLineString(IndentingWriter out, String val) {
|
||
|
if (val.contains("\n")) return false;
|
||
|
var prefix = '';
|
||
|
if (val.contains(r'$') || val.contains(r'\')) {
|
||
|
prefix = 'r';
|
||
|
}
|
||
|
if (!val.contains("'")) {
|
||
|
out.print("${prefix}'${val}'");
|
||
|
return true;
|
||
|
} else if (!val.contains('"')) {
|
||
|
out.print('${prefix}"${val}"');
|
||
|
return true;
|
||
|
} else if (!val.contains("'''")) {
|
||
|
out.print("${prefix}'''${val}'''");
|
||
|
return true;
|
||
|
} else if (!val.contains('"""')) {
|
||
|
out.print('${prefix}"""${val}"""');
|
||
|
return true;
|
||
|
} else {
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
void _writeListItems(IndentingWriter out, List val, {bool vertical: false}) {
|
||
|
bool first_A = true;
|
||
|
for (var item in val) {
|
||
|
if (!first_A && !vertical) {
|
||
|
out.print(", ");
|
||
|
}
|
||
|
writeJsonConst(out, item);
|
||
|
if (vertical) {
|
||
|
out.println(",");
|
||
|
}
|
||
|
first_A = false;
|
||
|
}
|
||
|
}
|
||
|
void _writeMapItems(IndentingWriter out, Map<String, dynamic> val, {bool vertical: false}) {
|
||
|
bool first_A = true;
|
||
|
for (String key_A in val.keys) {
|
||
|
if (!first_A && !vertical) out.print(", ");
|
||
|
_writeString(out, key_A);
|
||
|
out.print(": ");
|
||
|
writeJsonConst(out, val[key_A]);
|
||
|
if (vertical) {
|
||
|
out.println(",");
|
||
|
}
|
||
|
first_A = false;
|
||
|
}
|
||
|
}
|
||
|
class IndentingWriter {
|
||
|
final StringBuffer _buffer_B = new StringBuffer();
|
||
|
String _indent_A = "";
|
||
|
bool _needIndent = true;
|
||
|
void print(String text) {
|
||
|
var lastNewline = text.lastIndexOf('\n');
|
||
|
if (lastNewline == -1) {
|
||
|
_writeChunk(text);
|
||
|
return;
|
||
|
}
|
||
|
for (String line in text.substring(0, lastNewline).split('\n')) {
|
||
|
_writeChunk(line);
|
||
|
_newline();
|
||
|
}
|
||
|
_writeChunk(text.substring(lastNewline + 1));
|
||
|
}
|
||
|
void println([String text = '']) {
|
||
|
print(text);
|
||
|
_newline();
|
||
|
}
|
||
|
void addBlock(String start_A, String end_A, void body(), {endWithNewline: true}) {
|
||
|
_addBlock(start_A, end_A, body, endWithNewline, _indent_A + ' ');
|
||
|
}
|
||
|
void addUnindentedBlock(String start_A, String end_A, void body(), {endWithNewline: true}) {
|
||
|
_addBlock(start_A, end_A, body, endWithNewline, '');
|
||
|
}
|
||
|
void _addBlock(String start_A, String end_A, void body(), endWithNewline, newIndent) {
|
||
|
println(start_A);
|
||
|
var oldIndent = _indent_A;
|
||
|
_indent_A = newIndent;
|
||
|
body();
|
||
|
_indent_A = oldIndent;
|
||
|
if (endWithNewline) {
|
||
|
println(end_A);
|
||
|
} else {
|
||
|
print(end_A);
|
||
|
}
|
||
|
}
|
||
|
String toString() => _buffer_B.toString();
|
||
|
void _writeChunk(String chunk) {
|
||
|
assert(!chunk.contains('\n'));
|
||
|
if (chunk.isEmpty) return;
|
||
|
if (_needIndent) {
|
||
|
_buffer_B.write(_indent_A);
|
||
|
_needIndent = false;
|
||
|
}
|
||
|
_buffer_B.write(chunk);
|
||
|
}
|
||
|
void _newline() {
|
||
|
_buffer_B.writeln();
|
||
|
_needIndent = true;
|
||
|
}
|
||
|
}
|
||
|
void main() {
|
||
|
new CodeGenerator(_B.stdin, _B.stdout).generate_A();
|
||
|
}
|