diff --git a/mozilla/js/js2/hash.cpp b/mozilla/js/js2/hash.cpp index 214a420cc57..b0504320d87 100644 --- a/mozilla/js/js2/hash.cpp +++ b/mozilla/js/js2/hash.cpp @@ -63,7 +63,7 @@ JS::HashNumber JS::hashString(const String &s) // -static const int minLgNBuckets = 4; +const uint minLgNBuckets = 4; JS::GenericHashTableIterator::GenericHashTableIterator(GenericHashTable &ht): @@ -164,7 +164,7 @@ void JS::GenericHashTable::rehash() bucketsEnd = newBucketsEnd; } catch (std::bad_alloc) { // Out of memory. Ignore the error and just relax the resizing boundaries. - if (JS_BIT(newLgNBuckets) > bucketsEnd - buckets) + if (buckets + JS_BIT(newLgNBuckets) > bucketsEnd) maxNEntries >>= 1; else minNEntries <<= 1; diff --git a/mozilla/js/js2/numerics.cpp b/mozilla/js/js2/numerics.cpp index 72e3b703308..ae57e4f97ca 100644 --- a/mozilla/js/js2/numerics.cpp +++ b/mozilla/js/js2/numerics.cpp @@ -1914,8 +1914,7 @@ static char *doubleToAscii(double d, int mode, bool biasUp, int ndigits, if ((word0(d) & Exp_mask) == Exp_mask) { // Infinity or NaN *decpt = 9999; - s = !word1(d) && !(word0(d) & Frac_mask) ? "Infinity" : "NaN"; - strcpy(buf, s); + strcpy(buf, !word1(d) && !(word0(d) & Frac_mask) ? "Infinity" : "NaN"); return buf[3] ? buf + 8 : buf + 3; } if (!d) { diff --git a/mozilla/js/js2/systemtypes.h b/mozilla/js/js2/systemtypes.h index bbe7ef75710..0d5a5d12a8e 100644 --- a/mozilla/js/js2/systemtypes.h +++ b/mozilla/js/js2/systemtypes.h @@ -61,11 +61,17 @@ typedef float float32; // Use wchar_t on platforms on which wchar_t has 16 bits; otherwise use int16. // Note that in C++ wchar_t is a distinct type rather than a typedef for some integral type. // Like char, a char16 can be either signed or unsigned at the implementation's discretion. -typedef wchar_t char16; -#ifndef _WIN32 // Microsoft VC6 bug: wchar_t should be a built-in type, not a typedef - typedef unsigned wchar_t uchar16; +#ifdef __GNUC__ + // GCC's wchar_t is 32 bits, so we can't use it. + typedef uint16 char16; + typedef uint16 uchar16; #else - typedef wchar_t uchar16; + typedef wchar_t char16; + #ifndef _WIN32 // Microsoft VC6 bug: wchar_t should be a built-in type, not a typedef + typedef unsigned wchar_t uchar16; + #else + typedef wchar_t uchar16; + #endif #endif #ifdef _WIN32 diff --git a/mozilla/js/js2/utilities.h b/mozilla/js/js2/utilities.h index 3685bce8822..7f2019c1e84 100644 --- a/mozilla/js/js2/utilities.h +++ b/mozilla/js/js2/utilities.h @@ -356,9 +356,11 @@ namespace JavaScript { // A class to remember the format of an ostream so that a function may modify it internally // without changing it for the caller. class SaveFormat { +#ifndef __GNUC__ // The GCC libraries don't support ios_base yet. ostream &o; std::ios_base::fmtflags flags; char fill; +#endif public: explicit SaveFormat(ostream &out); ~SaveFormat(); diff --git a/mozilla/js2/src/hash.cpp b/mozilla/js2/src/hash.cpp index 214a420cc57..b0504320d87 100644 --- a/mozilla/js2/src/hash.cpp +++ b/mozilla/js2/src/hash.cpp @@ -63,7 +63,7 @@ JS::HashNumber JS::hashString(const String &s) // -static const int minLgNBuckets = 4; +const uint minLgNBuckets = 4; JS::GenericHashTableIterator::GenericHashTableIterator(GenericHashTable &ht): @@ -164,7 +164,7 @@ void JS::GenericHashTable::rehash() bucketsEnd = newBucketsEnd; } catch (std::bad_alloc) { // Out of memory. Ignore the error and just relax the resizing boundaries. - if (JS_BIT(newLgNBuckets) > bucketsEnd - buckets) + if (buckets + JS_BIT(newLgNBuckets) > bucketsEnd) maxNEntries >>= 1; else minNEntries <<= 1; diff --git a/mozilla/js2/src/numerics.cpp b/mozilla/js2/src/numerics.cpp index 72e3b703308..ae57e4f97ca 100644 --- a/mozilla/js2/src/numerics.cpp +++ b/mozilla/js2/src/numerics.cpp @@ -1914,8 +1914,7 @@ static char *doubleToAscii(double d, int mode, bool biasUp, int ndigits, if ((word0(d) & Exp_mask) == Exp_mask) { // Infinity or NaN *decpt = 9999; - s = !word1(d) && !(word0(d) & Frac_mask) ? "Infinity" : "NaN"; - strcpy(buf, s); + strcpy(buf, !word1(d) && !(word0(d) & Frac_mask) ? "Infinity" : "NaN"); return buf[3] ? buf + 8 : buf + 3; } if (!d) { diff --git a/mozilla/js2/src/systemtypes.h b/mozilla/js2/src/systemtypes.h index bbe7ef75710..0d5a5d12a8e 100644 --- a/mozilla/js2/src/systemtypes.h +++ b/mozilla/js2/src/systemtypes.h @@ -61,11 +61,17 @@ typedef float float32; // Use wchar_t on platforms on which wchar_t has 16 bits; otherwise use int16. // Note that in C++ wchar_t is a distinct type rather than a typedef for some integral type. // Like char, a char16 can be either signed or unsigned at the implementation's discretion. -typedef wchar_t char16; -#ifndef _WIN32 // Microsoft VC6 bug: wchar_t should be a built-in type, not a typedef - typedef unsigned wchar_t uchar16; +#ifdef __GNUC__ + // GCC's wchar_t is 32 bits, so we can't use it. + typedef uint16 char16; + typedef uint16 uchar16; #else - typedef wchar_t uchar16; + typedef wchar_t char16; + #ifndef _WIN32 // Microsoft VC6 bug: wchar_t should be a built-in type, not a typedef + typedef unsigned wchar_t uchar16; + #else + typedef wchar_t uchar16; + #endif #endif #ifdef _WIN32 diff --git a/mozilla/js2/src/utilities.h b/mozilla/js2/src/utilities.h index 3685bce8822..7f2019c1e84 100644 --- a/mozilla/js2/src/utilities.h +++ b/mozilla/js2/src/utilities.h @@ -356,9 +356,11 @@ namespace JavaScript { // A class to remember the format of an ostream so that a function may modify it internally // without changing it for the caller. class SaveFormat { +#ifndef __GNUC__ // The GCC libraries don't support ios_base yet. ostream &o; std::ios_base::fmtflags flags; char fill; +#endif public: explicit SaveFormat(ostream &out); ~SaveFormat();