diff --git a/.travis.yml b/.travis.yml
index 1200c676..0e17bc27 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,81 +1,81 @@
 # Build matrix / environment variable are explained on:
-# http://about.travis-ci.org/docs/user/build-configuration/
+# https://docs.travis-ci.com/user/customizing-the-build/
 # This file can be validated on:
 # http://lint.travis-ci.org/
 
 sudo: false
 language: cpp
 
 # Define the matrix explicitly, manually expanding the combinations of (os, compiler, env).
 # It is more tedious, but grants us far more flexibility.
 matrix:
   include:
     - os: linux
       compiler: gcc
       sudo : true
       install: ./ci/install-linux.sh && ./ci/log-config.sh
       script: ./ci/build-linux-bazel.sh
     - os: linux
       compiler: clang
       sudo : true
       install: ./ci/install-linux.sh && ./ci/log-config.sh
       script: ./ci/build-linux-bazel.sh
     - os: linux
       group: deprecated-2017Q4
       compiler: gcc
       install: ./ci/install-linux.sh && ./ci/log-config.sh
       script: ./ci/build-linux-autotools.sh
     - os: linux
       group: deprecated-2017Q4
       compiler: gcc
       env: BUILD_TYPE=Debug VERBOSE=1 CXX_FLAGS=-std=c++11
     - os: linux
       group: deprecated-2017Q4
       compiler: clang
       env: BUILD_TYPE=Debug VERBOSE=1
     - os: linux
       group: deprecated-2017Q4
       compiler: clang
       env: BUILD_TYPE=Release VERBOSE=1 CXX_FLAGS=-std=c++11
     - os: linux
       compiler: clang
       env: BUILD_TYPE=Release VERBOSE=1 CXX_FLAGS=-std=c++11 NO_EXCEPTION=ON NO_RTTI=ON COMPILER_IS_GNUCXX=ON
     - os: osx
       compiler: gcc
       env: BUILD_TYPE=Debug VERBOSE=1
     - os: osx
       compiler: gcc
       env: BUILD_TYPE=Release VERBOSE=1 CXX_FLAGS=-std=c++11
     - os: osx
       compiler: clang
       env: BUILD_TYPE=Debug VERBOSE=1
       if: type != pull_request
     - os: osx
       env: BUILD_TYPE=Release VERBOSE=1 CXX_FLAGS=-std=c++11
       if: type != pull_request
 
 # These are the install and build (script) phases for the most common entries in the matrix.  They could be included
 # in each entry in the matrix, but that is just repetitive.
 install:
   - ./ci/install-${TRAVIS_OS_NAME}.sh
   - . ./ci/env-${TRAVIS_OS_NAME}.sh
   - ./ci/log-config.sh
 
 script: ./ci/travis.sh
 
 # For sudo=false builds this section installs the necessary dependencies.
 addons:
   apt:
     # List of whitelisted in travis packages for ubuntu-precise can be found here:
     #   https://github.com/travis-ci/apt-package-whitelist/blob/master/ubuntu-precise
     # List of whitelisted in travis apt-sources:
     #   https://github.com/travis-ci/apt-source-whitelist/blob/master/ubuntu.json
     sources:
     - ubuntu-toolchain-r-test
     - llvm-toolchain-precise-3.7
     packages:
     - g++-4.9
     - clang-3.7
 
 notifications:
   email: false
diff --git a/googlemock/docs/ForDummies.md b/googlemock/docs/ForDummies.md
index 1e0fd416..566a34e5 100644
--- a/googlemock/docs/ForDummies.md
+++ b/googlemock/docs/ForDummies.md
@@ -1,447 +1,447 @@
 
 
 (**Note:** If you get compiler errors that you don't understand, be sure to consult [Google Mock Doctor](FrequentlyAskedQuestions.md#how-am-i-supposed-to-make-sense-of-these-horrible-template-errors).)
 
 # What Is Google C++ Mocking Framework? #
 When you write a prototype or test, often it's not feasible or wise to rely on real objects entirely. A **mock object** implements the same interface as a real object (so it can be used as one), but lets you specify at run time how it will be used and what it should do (which methods will be called? in which order? how many times? with what arguments? what will they return? etc).
 
 **Note:** It is easy to confuse the term _fake objects_ with mock objects. Fakes and mocks actually mean very different things in the Test-Driven Development (TDD) community:
 
   * **Fake** objects have working implementations, but usually take some shortcut (perhaps to make the operations less expensive), which makes them not suitable for production. An in-memory file system would be an example of a fake.
   * **Mocks** are objects pre-programmed with _expectations_, which form a specification of the calls they are expected to receive.
 
 If all this seems too abstract for you, don't worry - the most important thing to remember is that a mock allows you to check the _interaction_ between itself and code that uses it. The difference between fakes and mocks will become much clearer once you start to use mocks.
 
 **Google C++ Mocking Framework** (or **Google Mock** for short) is a library (sometimes we also call it a "framework" to make it sound cool) for creating mock classes and using them. It does to C++ what [jMock](http://www.jmock.org/) and [EasyMock](http://www.easymock.org/) do to Java.
 
 Using Google Mock involves three basic steps:
 
   1. Use some simple macros to describe the interface you want to mock, and they will expand to the implementation of your mock class;
   1. Create some mock objects and specify its expectations and behavior using an intuitive syntax;
   1. Exercise code that uses the mock objects. Google Mock will catch any violation of the expectations as soon as it arises.
 
 # Why Google Mock? #
 While mock objects help you remove unnecessary dependencies in tests and make them fast and reliable, using mocks manually in C++ is _hard_:
 
   * Someone has to implement the mocks. The job is usually tedious and error-prone. No wonder people go great distances to avoid it.
   * The quality of those manually written mocks is a bit, uh, unpredictable. You may see some really polished ones, but you may also see some that were hacked up in a hurry and have all sorts of ad-hoc restrictions.
   * The knowledge you gained from using one mock doesn't transfer to the next.
 
 In contrast, Java and Python programmers have some fine mock frameworks, which automate the creation of mocks. As a result, mocking is a proven effective technique and widely adopted practice in those communities. Having the right tool absolutely makes the difference.
 
 Google Mock was built to help C++ programmers. It was inspired by [jMock](http://www.jmock.org/) and [EasyMock](http://www.easymock.org/), but designed with C++'s specifics in mind. It is your friend if any of the following problems is bothering you:
 
   * You are stuck with a sub-optimal design and wish you had done more prototyping before it was too late, but prototyping in C++ is by no means "rapid".
   * Your tests are slow as they depend on too many libraries or use expensive resources (e.g. a database).
   * Your tests are brittle as some resources they use are unreliable (e.g. the network).
   * You want to test how your code handles a failure (e.g. a file checksum error), but it's not easy to cause one.
   * You need to make sure that your module interacts with other modules in the right way, but it's hard to observe the interaction; therefore you resort to observing the side effects at the end of the action, which is awkward at best.
   * You want to "mock out" your dependencies, except that they don't have mock implementations yet; and, frankly, you aren't thrilled by some of those hand-written mocks.
 
 We encourage you to use Google Mock as:
 
   * a _design_ tool, for it lets you experiment with your interface design early and often. More iterations lead to better designs!
   * a _testing_ tool to cut your tests' outbound dependencies and probe the interaction between your module and its collaborators.
 
 # Getting Started #
 Using Google Mock is easy! Inside your C++ source file, just `#include` `"gtest/gtest.h"` and `"gmock/gmock.h"`, and you are ready to go.
 
 # A Case for Mock Turtles #
 Let's look at an example. Suppose you are developing a graphics program that relies on a LOGO-like API for drawing. How would you test that it does the right thing? Well, you can run it and compare the screen with a golden screen snapshot, but let's admit it: tests like this are expensive to run and fragile (What if you just upgraded to a shiny new graphics card that has better anti-aliasing? Suddenly you have to update all your golden images.). It would be too painful if all your tests are like this. Fortunately, you learned about Dependency Injection and know the right thing to do: instead of having your application talk to the drawing API directly, wrap the API in an interface (say, `Turtle`) and code to that interface:
 
 ```
 class Turtle {
   ...
   virtual ~Turtle() {}
   virtual void PenUp() = 0;
   virtual void PenDown() = 0;
   virtual void Forward(int distance) = 0;
   virtual void Turn(int degrees) = 0;
   virtual void GoTo(int x, int y) = 0;
   virtual int GetX() const = 0;
   virtual int GetY() const = 0;
 };
 ```
 
 (Note that the destructor of `Turtle` **must** be virtual, as is the case for **all** classes you intend to inherit from - otherwise the destructor of the derived class will not be called when you delete an object through a base pointer, and you'll get corrupted program states like memory leaks.)
 
 You can control whether the turtle's movement will leave a trace using `PenUp()` and `PenDown()`, and control its movement using `Forward()`, `Turn()`, and `GoTo()`. Finally, `GetX()` and `GetY()` tell you the current position of the turtle.
 
 Your program will normally use a real implementation of this interface. In tests, you can use a mock implementation instead. This allows you to easily check what drawing primitives your program is calling, with what arguments, and in which order. Tests written this way are much more robust (they won't break because your new machine does anti-aliasing differently), easier to read and maintain (the intent of a test is expressed in the code, not in some binary images), and run _much, much faster_.
 
 # Writing the Mock Class #
 If you are lucky, the mocks you need to use have already been implemented by some nice people. If, however, you find yourself in the position to write a mock class, relax - Google Mock turns this task into a fun game! (Well, almost.)
 
 ## How to Define It ##
 Using the `Turtle` interface as example, here are the simple steps you need to follow:
 
   1. Derive a class `MockTurtle` from `Turtle`.
   1. Take a _virtual_ function of `Turtle` (while it's possible to [mock non-virtual methods using templates](CookBook.md#mocking-nonvirtual-methods), it's much more involved). Count how many arguments it has.
   1. In the `public:` section of the child class, write `MOCK_METHODn();` (or `MOCK_CONST_METHODn();` if you are mocking a `const` method), where `n` is the number of the arguments; if you counted wrong, shame on you, and a compiler error will tell you so.
   1. Now comes the fun part: you take the function signature, cut-and-paste the _function name_ as the _first_ argument to the macro, and leave what's left as the _second_ argument (in case you're curious, this is the _type of the function_).
   1. Repeat until all virtual functions you want to mock are done.
 
 After the process, you should have something like:
 
 ```
 #include "gmock/gmock.h"  // Brings in Google Mock.
 class MockTurtle : public Turtle {
  public:
   ...
   MOCK_METHOD0(PenUp, void());
   MOCK_METHOD0(PenDown, void());
   MOCK_METHOD1(Forward, void(int distance));
   MOCK_METHOD1(Turn, void(int degrees));
   MOCK_METHOD2(GoTo, void(int x, int y));
   MOCK_CONST_METHOD0(GetX, int());
   MOCK_CONST_METHOD0(GetY, int());
 };
 ```
 
 You don't need to define these mock methods somewhere else - the `MOCK_METHOD*` macros will generate the definitions for you. It's that simple! Once you get the hang of it, you can pump out mock classes faster than your source-control system can handle your check-ins.
 
 **Tip:** If even this is too much work for you, you'll find the
 `gmock_gen.py` tool in Google Mock's `scripts/generator/` directory (courtesy of the [cppclean](http://code.google.com/p/cppclean/) project) useful.  This command-line
 tool requires that you have Python 2.4 installed.  You give it a C++ file and the name of an abstract class defined in it,
 and it will print the definition of the mock class for you.  Due to the
 complexity of the C++ language, this script may not always work, but
 it can be quite handy when it does.  For more details, read the [user documentation](../scripts/generator/README).
 
 ## Where to Put It ##
 When you define a mock class, you need to decide where to put its definition. Some people put it in a `*_test.cc`. This is fine when the interface being mocked (say, `Foo`) is owned by the same person or team. Otherwise, when the owner of `Foo` changes it, your test could break. (You can't really expect `Foo`'s maintainer to fix every test that uses `Foo`, can you?)
 
 So, the rule of thumb is: if you need to mock `Foo` and it's owned by others, define the mock class in `Foo`'s package (better, in a `testing` sub-package such that you can clearly separate production code and testing utilities), and put it in a `mock_foo.h`. Then everyone can reference `mock_foo.h` from their tests. If `Foo` ever changes, there is only one copy of `MockFoo` to change, and only tests that depend on the changed methods need to be fixed.
 
 Another way to do it: you can introduce a thin layer `FooAdaptor` on top of `Foo` and code to this new interface. Since you own `FooAdaptor`, you can absorb changes in `Foo` much more easily. While this is more work initially, carefully choosing the adaptor interface can make your code easier to write and more readable (a net win in the long run), as you can choose `FooAdaptor` to fit your specific domain much better than `Foo` does.
 
 # Using Mocks in Tests #
 Once you have a mock class, using it is easy. The typical work flow is:
 
   1. Import the Google Mock names from the `testing` namespace such that you can use them unqualified (You only have to do it once per file. Remember that namespaces are a good idea and good for your health.).
   1. Create some mock objects.
   1. Specify your expectations on them (How many times will a method be called? With what arguments? What should it do? etc.).
   1. Exercise some code that uses the mocks; optionally, check the result using Google Test assertions. If a mock method is called more than expected or with wrong arguments, you'll get an error immediately.
   1. When a mock is destructed, Google Mock will automatically check whether all expectations on it have been satisfied.
 
 Here's an example:
 
 ```
 #include "path/to/mock-turtle.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
 using ::testing::AtLeast;                     // #1
 
 TEST(PainterTest, CanDrawSomething) {
   MockTurtle turtle;                          // #2
   EXPECT_CALL(turtle, PenDown())              // #3
       .Times(AtLeast(1));
 
   Painter painter(&turtle);                   // #4
 
   EXPECT_TRUE(painter.DrawCircle(0, 0, 10));
 }                                             // #5
 
 int main(int argc, char** argv) {
   // The following line must be executed to initialize Google Mock
   // (and Google Test) before running the tests.
   ::testing::InitGoogleMock(&argc, argv);
   return RUN_ALL_TESTS();
 }
 ```
 
 As you might have guessed, this test checks that `PenDown()` is called at least once. If the `painter` object didn't call this method, your test will fail with a message like this:
 
 ```
 path/to/my_test.cc:119: Failure
 Actual function call count doesn't match this expectation:
 Actually: never called;
 Expected: called at least once.
 ```
 
 **Tip 1:** If you run the test from an Emacs buffer, you can hit `<Enter>` on the line number displayed in the error message to jump right to the failed expectation.
 
 **Tip 2:** If your mock objects are never deleted, the final verification won't happen. Therefore it's a good idea to use a heap leak checker in your tests when you allocate mocks on the heap.
 
 **Important note:** Google Mock requires expectations to be set **before** the mock functions are called, otherwise the behavior is **undefined**. In particular, you mustn't interleave `EXPECT_CALL()`s and calls to the mock functions.
 
 This means `EXPECT_CALL()` should be read as expecting that a call will occur _in the future_, not that a call has occurred. Why does Google Mock work like that? Well, specifying the expectation beforehand allows Google Mock to report a violation as soon as it arises, when the context (stack trace, etc) is still available. This makes debugging much easier.
 
 Admittedly, this test is contrived and doesn't do much. You can easily achieve the same effect without using Google Mock. However, as we shall reveal soon, Google Mock allows you to do _much more_ with the mocks.
 
 ## Using Google Mock with Any Testing Framework ##
 If you want to use something other than Google Test (e.g. [CppUnit](http://sourceforge.net/projects/cppunit/) or
-[CxxTest](http://cxxtest.tigris.org/)) as your testing framework, just change the `main()` function in the previous section to:
+[CxxTest](https://cxxtest.com/)) as your testing framework, just change the `main()` function in the previous section to:
 ```
 int main(int argc, char** argv) {
   // The following line causes Google Mock to throw an exception on failure,
   // which will be interpreted by your testing framework as a test failure.
   ::testing::GTEST_FLAG(throw_on_failure) = true;
   ::testing::InitGoogleMock(&argc, argv);
   ... whatever your testing framework requires ...
 }
 ```
 
 This approach has a catch: it makes Google Mock throw an exception
 from a mock object's destructor sometimes.  With some compilers, this
 sometimes causes the test program to crash.  You'll still be able to
 notice that the test has failed, but it's not a graceful failure.
 
 A better solution is to use Google Test's
 [event listener API](../../googletest/docs/advanced.md#extending-google-test-by-handling-test-events)
 to report a test failure to your testing framework properly.  You'll need to
 implement the `OnTestPartResult()` method of the event listener interface, but it
 should be straightforward.
 
 If this turns out to be too much work, we suggest that you stick with
 Google Test, which works with Google Mock seamlessly (in fact, it is
 technically part of Google Mock.).  If there is a reason that you
 cannot use Google Test, please let us know.
 
 # Setting Expectations #
 The key to using a mock object successfully is to set the _right expectations_ on it. If you set the expectations too strict, your test will fail as the result of unrelated changes. If you set them too loose, bugs can slip through. You want to do it just right such that your test can catch exactly the kind of bugs you intend it to catch. Google Mock provides the necessary means for you to do it "just right."
 
 ## General Syntax ##
 In Google Mock we use the `EXPECT_CALL()` macro to set an expectation on a mock method. The general syntax is:
 
 ```
 EXPECT_CALL(mock_object, method(matchers))
     .Times(cardinality)
     .WillOnce(action)
     .WillRepeatedly(action);
 ```
 
 The macro has two arguments: first the mock object, and then the method and its arguments. Note that the two are separated by a comma (`,`), not a period (`.`). (Why using a comma? The answer is that it was necessary for technical reasons.)
 
 The macro can be followed by some optional _clauses_ that provide more information about the expectation. We'll discuss how each clause works in the coming sections.
 
 This syntax is designed to make an expectation read like English. For example, you can probably guess that
 
 ```
 using ::testing::Return;
 ...
 EXPECT_CALL(turtle, GetX())
     .Times(5)
     .WillOnce(Return(100))
     .WillOnce(Return(150))
     .WillRepeatedly(Return(200));
 ```
 
 says that the `turtle` object's `GetX()` method will be called five times, it will return 100 the first time, 150 the second time, and then 200 every time. Some people like to call this style of syntax a Domain-Specific Language (DSL).
 
 **Note:** Why do we use a macro to do this? It serves two purposes: first it makes expectations easily identifiable (either by `grep` or by a human reader), and second it allows Google Mock to include the source file location of a failed expectation in messages, making debugging easier.
 
 ## Matchers: What Arguments Do We Expect? ##
 When a mock function takes arguments, we must specify what arguments we are expecting; for example:
 
 ```
 // Expects the turtle to move forward by 100 units.
 EXPECT_CALL(turtle, Forward(100));
 ```
 
 Sometimes you may not want to be too specific (Remember that talk about tests being too rigid? Over specification leads to brittle tests and obscures the intent of tests. Therefore we encourage you to specify only what's necessary - no more, no less.). If you care to check that `Forward()` will be called but aren't interested in its actual argument, write `_` as the argument, which means "anything goes":
 
 ```
 using ::testing::_;
 ...
 // Expects the turtle to move forward.
 EXPECT_CALL(turtle, Forward(_));
 ```
 
 `_` is an instance of what we call **matchers**. A matcher is like a predicate and can test whether an argument is what we'd expect. You can use a matcher inside `EXPECT_CALL()` wherever a function argument is expected.
 
 A list of built-in matchers can be found in the [CheatSheet](CheatSheet.md). For example, here's the `Ge` (greater than or equal) matcher:
 
 ```
 using ::testing::Ge;
 ...
 EXPECT_CALL(turtle, Forward(Ge(100)));
 ```
 
 This checks that the turtle will be told to go forward by at least 100 units.
 
 ## Cardinalities: How Many Times Will It Be Called? ##
 The first clause we can specify following an `EXPECT_CALL()` is `Times()`. We call its argument a **cardinality** as it tells _how many times_ the call should occur. It allows us to repeat an expectation many times without actually writing it as many times. More importantly, a cardinality can be "fuzzy", just like a matcher can be. This allows a user to express the intent of a test exactly.
 
 An interesting special case is when we say `Times(0)`. You may have guessed - it means that the function shouldn't be called with the given arguments at all, and Google Mock will report a Google Test failure whenever the function is (wrongfully) called.
 
 We've seen `AtLeast(n)` as an example of fuzzy cardinalities earlier. For the list of built-in cardinalities you can use, see the [CheatSheet](CheatSheet.md).
 
 The `Times()` clause can be omitted. **If you omit `Times()`, Google Mock will infer the cardinality for you.** The rules are easy to remember:
 
   * If **neither** `WillOnce()` **nor** `WillRepeatedly()` is in the `EXPECT_CALL()`, the inferred cardinality is `Times(1)`.
   * If there are `n WillOnce()`'s but **no** `WillRepeatedly()`, where `n` >= 1, the cardinality is `Times(n)`.
   * If there are `n WillOnce()`'s and **one** `WillRepeatedly()`, where `n` >= 0, the cardinality is `Times(AtLeast(n))`.
 
 **Quick quiz:** what do you think will happen if a function is expected to be called twice but actually called four times?
 
 ## Actions: What Should It Do? ##
 Remember that a mock object doesn't really have a working implementation? We as users have to tell it what to do when a method is invoked. This is easy in Google Mock.
 
 First, if the return type of a mock function is a built-in type or a pointer, the function has a **default action** (a `void` function will just return, a `bool` function will return `false`, and other functions will return 0). In addition, in C++ 11 and above, a mock function whose return type is default-constructible (i.e. has a default constructor) has a default action of returning a default-constructed value.  If you don't say anything, this behavior will be used.
 
 Second, if a mock function doesn't have a default action, or the default action doesn't suit you, you can specify the action to be taken each time the expectation matches using a series of `WillOnce()` clauses followed by an optional `WillRepeatedly()`. For example,
 
 ```
 using ::testing::Return;
 ...
 EXPECT_CALL(turtle, GetX())
     .WillOnce(Return(100))
     .WillOnce(Return(200))
     .WillOnce(Return(300));
 ```
 
 This says that `turtle.GetX()` will be called _exactly three times_ (Google Mock inferred this from how many `WillOnce()` clauses we've written, since we didn't explicitly write `Times()`), and will return 100, 200, and 300 respectively.
 
 ```
 using ::testing::Return;
 ...
 EXPECT_CALL(turtle, GetY())
     .WillOnce(Return(100))
     .WillOnce(Return(200))
     .WillRepeatedly(Return(300));
 ```
 
 says that `turtle.GetY()` will be called _at least twice_ (Google Mock knows this as we've written two `WillOnce()` clauses and a `WillRepeatedly()` while having no explicit `Times()`), will return 100 the first time, 200 the second time, and 300 from the third time on.
 
 Of course, if you explicitly write a `Times()`, Google Mock will not try to infer the cardinality itself. What if the number you specified is larger than there are `WillOnce()` clauses? Well, after all `WillOnce()`s are used up, Google Mock will do the _default_ action for the function every time (unless, of course, you have a `WillRepeatedly()`.).
 
 What can we do inside `WillOnce()` besides `Return()`? You can return a reference using `ReturnRef(variable)`, or invoke a pre-defined function, among [others](CheatSheet.md#actions).
 
 **Important note:** The `EXPECT_CALL()` statement evaluates the action clause only once, even though the action may be performed many times. Therefore you must be careful about side effects. The following may not do what you want:
 
 ```
 int n = 100;
 EXPECT_CALL(turtle, GetX())
 .Times(4)
 .WillRepeatedly(Return(n++));
 ```
 
 Instead of returning 100, 101, 102, ..., consecutively, this mock function will always return 100 as `n++` is only evaluated once. Similarly, `Return(new Foo)` will create a new `Foo` object when the `EXPECT_CALL()` is executed, and will return the same pointer every time. If you want the side effect to happen every time, you need to define a custom action, which we'll teach in the [CookBook](CookBook.md).
 
 Time for another quiz! What do you think the following means?
 
 ```
 using ::testing::Return;
 ...
 EXPECT_CALL(turtle, GetY())
 .Times(4)
 .WillOnce(Return(100));
 ```
 
 Obviously `turtle.GetY()` is expected to be called four times. But if you think it will return 100 every time, think twice! Remember that one `WillOnce()` clause will be consumed each time the function is invoked and the default action will be taken afterwards. So the right answer is that `turtle.GetY()` will return 100 the first time, but **return 0 from the second time on**, as returning 0 is the default action for `int` functions.
 
 ## Using Multiple Expectations ##
 So far we've only shown examples where you have a single expectation. More realistically, you're going to specify expectations on multiple mock methods, which may be from multiple mock objects.
 
 By default, when a mock method is invoked, Google Mock will search the expectations in the **reverse order** they are defined, and stop when an active expectation that matches the arguments is found (you can think of it as "newer rules override older ones."). If the matching expectation cannot take any more calls, you will get an upper-bound-violated failure. Here's an example:
 
 ```
 using ::testing::_;
 ...
 EXPECT_CALL(turtle, Forward(_));  // #1
 EXPECT_CALL(turtle, Forward(10))  // #2
     .Times(2);
 ```
 
 If `Forward(10)` is called three times in a row, the third time it will be an error, as the last matching expectation (#2) has been saturated. If, however, the third `Forward(10)` call is replaced by `Forward(20)`, then it would be OK, as now #1 will be the matching expectation.
 
 **Side note:** Why does Google Mock search for a match in the _reverse_ order of the expectations? The reason is that this allows a user to set up the default expectations in a mock object's constructor or the test fixture's set-up phase and then customize the mock by writing more specific expectations in the test body. So, if you have two expectations on the same method, you want to put the one with more specific matchers **after** the other, or the more specific rule would be shadowed by the more general one that comes after it.
 
 ## Ordered vs Unordered Calls ##
 By default, an expectation can match a call even though an earlier expectation hasn't been satisfied. In other words, the calls don't have to occur in the order the expectations are specified.
 
 Sometimes, you may want all the expected calls to occur in a strict order. To say this in Google Mock is easy:
 
 ```
 using ::testing::InSequence;
 ...
 TEST(FooTest, DrawsLineSegment) {
   ...
   {
     InSequence dummy;
 
     EXPECT_CALL(turtle, PenDown());
     EXPECT_CALL(turtle, Forward(100));
     EXPECT_CALL(turtle, PenUp());
   }
   Foo();
 }
 ```
 
 By creating an object of type `InSequence`, all expectations in its scope are put into a _sequence_ and have to occur _sequentially_. Since we are just relying on the constructor and destructor of this object to do the actual work, its name is really irrelevant.
 
 In this example, we test that `Foo()` calls the three expected functions in the order as written. If a call is made out-of-order, it will be an error.
 
 (What if you care about the relative order of some of the calls, but not all of them? Can you specify an arbitrary partial order? The answer is ... yes! If you are impatient, the details can be found in the [CookBook](CookBook.md#expecting-partially-ordered-calls).)
 
 ## All Expectations Are Sticky (Unless Said Otherwise) ##
 Now let's do a quick quiz to see how well you can use this mock stuff already. How would you test that the turtle is asked to go to the origin _exactly twice_ (you want to ignore any other instructions it receives)?
 
 After you've come up with your answer, take a look at ours and compare notes (solve it yourself first - don't cheat!):
 
 ```
 using ::testing::_;
 ...
 EXPECT_CALL(turtle, GoTo(_, _))  // #1
     .Times(AnyNumber());
 EXPECT_CALL(turtle, GoTo(0, 0))  // #2
     .Times(2);
 ```
 
 Suppose `turtle.GoTo(0, 0)` is called three times. In the third time, Google Mock will see that the arguments match expectation #2 (remember that we always pick the last matching expectation). Now, since we said that there should be only two such calls, Google Mock will report an error immediately. This is basically what we've told you in the "Using Multiple Expectations" section above.
 
 This example shows that **expectations in Google Mock are "sticky" by default**, in the sense that they remain active even after we have reached their invocation upper bounds. This is an important rule to remember, as it affects the meaning of the spec, and is **different** to how it's done in many other mocking frameworks (Why'd we do that? Because we think our rule makes the common cases easier to express and understand.).
 
 Simple? Let's see if you've really understood it: what does the following code say?
 
 ```
 using ::testing::Return;
 ...
 for (int i = n; i > 0; i--) {
   EXPECT_CALL(turtle, GetX())
       .WillOnce(Return(10*i));
 }
 ```
 
 If you think it says that `turtle.GetX()` will be called `n` times and will return 10, 20, 30, ..., consecutively, think twice! The problem is that, as we said, expectations are sticky. So, the second time `turtle.GetX()` is called, the last (latest) `EXPECT_CALL()` statement will match, and will immediately lead to an "upper bound exceeded" error - this piece of code is not very useful!
 
 One correct way of saying that `turtle.GetX()` will return 10, 20, 30, ..., is to explicitly say that the expectations are _not_ sticky. In other words, they should _retire_ as soon as they are saturated:
 
 ```
 using ::testing::Return;
 ...
 for (int i = n; i > 0; i--) {
   EXPECT_CALL(turtle, GetX())
     .WillOnce(Return(10*i))
     .RetiresOnSaturation();
 }
 ```
 
 And, there's a better way to do it: in this case, we expect the calls to occur in a specific order, and we line up the actions to match the order. Since the order is important here, we should make it explicit using a sequence:
 
 ```
 using ::testing::InSequence;
 using ::testing::Return;
 ...
 {
   InSequence s;
 
   for (int i = 1; i <= n; i++) {
     EXPECT_CALL(turtle, GetX())
         .WillOnce(Return(10*i))
         .RetiresOnSaturation();
   }
 }
 ```
 
 By the way, the other situation where an expectation may _not_ be sticky is when it's in a sequence - as soon as another expectation that comes after it in the sequence has been used, it automatically retires (and will never be used to match any call).
 
 ## Uninteresting Calls ##
 A mock object may have many methods, and not all of them are that interesting. For example, in some tests we may not care about how many times `GetX()` and `GetY()` get called.
 
 In Google Mock, if you are not interested in a method, just don't say anything about it. If a call to this method occurs, you'll see a warning in the test output, but it won't be a failure.
 
 # What Now? #
 Congratulations! You've learned enough about Google Mock to start using it. Now, you might want to join the [googlemock](http://groups.google.com/group/googlemock) discussion group and actually write some tests using Google Mock - it will be fun. Hey, it may even be addictive - you've been warned.
 
 Then, if you feel like increasing your mock quotient, you should move on to the [CookBook](CookBook.md). You can learn many advanced features of Google Mock there -- and advance your level of enjoyment and testing bliss.
diff --git a/googlemock/docs/FrequentlyAskedQuestions.md b/googlemock/docs/FrequentlyAskedQuestions.md
index 23f7da03..9008c637 100644
--- a/googlemock/docs/FrequentlyAskedQuestions.md
+++ b/googlemock/docs/FrequentlyAskedQuestions.md
@@ -1,627 +1,627 @@
 
 
 Please send your questions to the
 [googlemock](http://groups.google.com/group/googlemock) discussion
 group. If you need help with compiler errors, make sure you have
 tried [Google Mock Doctor](#How_am_I_supposed_to_make_sense_of_these_horrible_template_error.md) first.
 
 ## When I call a method on my mock object, the method for the real object is invoked instead.  What's the problem? ##
 
 In order for a method to be mocked, it must be _virtual_, unless you use the [high-perf dependency injection technique](CookBook.md#mocking-nonvirtual-methods).
 
 ## I wrote some matchers.  After I upgraded to a new version of Google Mock, they no longer compile.  What's going on? ##
 
 After version 1.4.0 of Google Mock was released, we had an idea on how
 to make it easier to write matchers that can generate informative
 messages efficiently.  We experimented with this idea and liked what
 we saw.  Therefore we decided to implement it.
 
 Unfortunately, this means that if you have defined your own matchers
 by implementing `MatcherInterface` or using `MakePolymorphicMatcher()`,
 your definitions will no longer compile.  Matchers defined using the
 `MATCHER*` family of macros are not affected.
 
 Sorry for the hassle if your matchers are affected.  We believe it's
 in everyone's long-term interest to make this change sooner than
 later.  Fortunately, it's usually not hard to migrate an existing
 matcher to the new API.  Here's what you need to do:
 
 If you wrote your matcher like this:
 ```
 // Old matcher definition that doesn't work with the latest
 // Google Mock.
 using ::testing::MatcherInterface;
 ...
 class MyWonderfulMatcher : public MatcherInterface<MyType> {
  public:
   ...
   virtual bool Matches(MyType value) const {
     // Returns true if value matches.
     return value.GetFoo() > 5;
   }
   ...
 };
 ```
 
 you'll need to change it to:
 ```
 // New matcher definition that works with the latest Google Mock.
 using ::testing::MatcherInterface;
 using ::testing::MatchResultListener;
 ...
 class MyWonderfulMatcher : public MatcherInterface<MyType> {
  public:
   ...
   virtual bool MatchAndExplain(MyType value,
                                MatchResultListener* listener) const {
     // Returns true if value matches.
     return value.GetFoo() > 5;
   }
   ...
 };
 ```
 (i.e. rename `Matches()` to `MatchAndExplain()` and give it a second
 argument of type `MatchResultListener*`.)
 
 If you were also using `ExplainMatchResultTo()` to improve the matcher
 message:
 ```
 // Old matcher definition that doesn't work with the lastest
 // Google Mock.
 using ::testing::MatcherInterface;
 ...
 class MyWonderfulMatcher : public MatcherInterface<MyType> {
  public:
   ...
   virtual bool Matches(MyType value) const {
     // Returns true if value matches.
     return value.GetFoo() > 5;
   }
 
   virtual void ExplainMatchResultTo(MyType value,
                                     ::std::ostream* os) const {
     // Prints some helpful information to os to help
     // a user understand why value matches (or doesn't match).
     *os << "the Foo property is " << value.GetFoo();
   }
   ...
 };
 ```
 
 you should move the logic of `ExplainMatchResultTo()` into
 `MatchAndExplain()`, using the `MatchResultListener` argument where
 the `::std::ostream` was used:
 ```
 // New matcher definition that works with the latest Google Mock.
 using ::testing::MatcherInterface;
 using ::testing::MatchResultListener;
 ...
 class MyWonderfulMatcher : public MatcherInterface<MyType> {
  public:
   ...
   virtual bool MatchAndExplain(MyType value,
                                MatchResultListener* listener) const {
     // Returns true if value matches.
     *listener << "the Foo property is " << value.GetFoo();
     return value.GetFoo() > 5;
   }
   ...
 };
 ```
 
 If your matcher is defined using `MakePolymorphicMatcher()`:
 ```
 // Old matcher definition that doesn't work with the latest
 // Google Mock.
 using ::testing::MakePolymorphicMatcher;
 ...
 class MyGreatMatcher {
  public:
   ...
   bool Matches(MyType value) const {
     // Returns true if value matches.
     return value.GetBar() < 42;
   }
   ...
 };
 ... MakePolymorphicMatcher(MyGreatMatcher()) ...
 ```
 
 you should rename the `Matches()` method to `MatchAndExplain()` and
 add a `MatchResultListener*` argument (the same as what you need to do
 for matchers defined by implementing `MatcherInterface`):
 ```
 // New matcher definition that works with the latest Google Mock.
 using ::testing::MakePolymorphicMatcher;
 using ::testing::MatchResultListener;
 ...
 class MyGreatMatcher {
  public:
   ...
   bool MatchAndExplain(MyType value,
                        MatchResultListener* listener) const {
     // Returns true if value matches.
     return value.GetBar() < 42;
   }
   ...
 };
 ... MakePolymorphicMatcher(MyGreatMatcher()) ...
 ```
 
 If your polymorphic matcher uses `ExplainMatchResultTo()` for better
 failure messages:
 ```
 // Old matcher definition that doesn't work with the latest
 // Google Mock.
 using ::testing::MakePolymorphicMatcher;
 ...
 class MyGreatMatcher {
  public:
   ...
   bool Matches(MyType value) const {
     // Returns true if value matches.
     return value.GetBar() < 42;
   }
   ...
 };
 void ExplainMatchResultTo(const MyGreatMatcher& matcher,
                           MyType value,
                           ::std::ostream* os) {
   // Prints some helpful information to os to help
   // a user understand why value matches (or doesn't match).
   *os << "the Bar property is " << value.GetBar();
 }
 ... MakePolymorphicMatcher(MyGreatMatcher()) ...
 ```
 
 you'll need to move the logic inside `ExplainMatchResultTo()` to
 `MatchAndExplain()`:
 ```
 // New matcher definition that works with the latest Google Mock.
 using ::testing::MakePolymorphicMatcher;
 using ::testing::MatchResultListener;
 ...
 class MyGreatMatcher {
  public:
   ...
   bool MatchAndExplain(MyType value,
                        MatchResultListener* listener) const {
     // Returns true if value matches.
     *listener << "the Bar property is " << value.GetBar();
     return value.GetBar() < 42;
   }
   ...
 };
 ... MakePolymorphicMatcher(MyGreatMatcher()) ...
 ```
 
 For more information, you can read these
 [two](CookBook.md#writing-new-monomorphic-matchers)
 [recipes](CookBook.md#writing-new-polymorphic-matchers)
 from the cookbook.  As always, you
 are welcome to post questions on `googlemock@googlegroups.com` if you
 need any help.
 
 ## When using Google Mock, do I have to use Google Test as the testing framework?  I have my favorite testing framework and don't want to switch. ##
 
 Google Mock works out of the box with Google Test.  However, it's easy
 to configure it to work with any testing framework of your choice.
 [Here](ForDummies.md#using-google-mock-with-any-testing-framework) is how.
 
 ## How am I supposed to make sense of these horrible template errors? ##
 
 If you are confused by the compiler errors gcc threw at you,
 try consulting the _Google Mock Doctor_ tool first.  What it does is to
 scan stdin for gcc error messages, and spit out diagnoses on the
 problems (we call them diseases) your code has.
 
 To "install", run command:
 ```
 alias gmd='<path to googlemock>/scripts/gmock_doctor.py'
 ```
 
 To use it, do:
 ```
 <your-favorite-build-command> <your-test> 2>&1 | gmd
 ```
 
 For example:
 ```
 make my_test 2>&1 | gmd
 ```
 
 Or you can run `gmd` and copy-n-paste gcc's error messages to it.
 
 ## Can I mock a variadic function? ##
 
 You cannot mock a variadic function (i.e. a function taking ellipsis
 (`...`) arguments) directly in Google Mock.
 
 The problem is that in general, there is _no way_ for a mock object to
 know how many arguments are passed to the variadic method, and what
 the arguments' types are.  Only the _author of the base class_ knows
 the protocol, and we cannot look into their head.
 
 Therefore, to mock such a function, the _user_ must teach the mock
 object how to figure out the number of arguments and their types.  One
 way to do it is to provide overloaded versions of the function.
 
 Ellipsis arguments are inherited from C and not really a C++ feature.
 They are unsafe to use and don't work with arguments that have
 constructors or destructors.  Therefore we recommend to avoid them in
 C++ as much as possible.
 
 ## MSVC gives me warning C4301 or C4373 when I define a mock method with a const parameter.  Why? ##
 
 If you compile this using Microsoft Visual C++ 2005 SP1:
 ```
 class Foo {
   ...
   virtual void Bar(const int i) = 0;
 };
 
 class MockFoo : public Foo {
   ...
   MOCK_METHOD1(Bar, void(const int i));
 };
 ```
 You may get the following warning:
 ```
 warning C4301: 'MockFoo::Bar': overriding virtual function only differs from 'Foo::Bar' by const/volatile qualifier
 ```
 
 This is a MSVC bug.  The same code compiles fine with gcc ,for
 example.  If you use Visual C++ 2008 SP1, you would get the warning:
 ```
 warning C4373: 'MockFoo::Bar': virtual function overrides 'Foo::Bar', previous versions of the compiler did not override when parameters only differed by const/volatile qualifiers
 ```
 
 In C++, if you _declare_ a function with a `const` parameter, the
 `const` modifier is _ignored_.  Therefore, the `Foo` base class above
 is equivalent to:
 ```
 class Foo {
   ...
   virtual void Bar(int i) = 0;  // int or const int?  Makes no difference.
 };
 ```
 
 In fact, you can _declare_ Bar() with an `int` parameter, and _define_
 it with a `const int` parameter.  The compiler will still match them
 up.
 
 Since making a parameter `const` is meaningless in the method
 _declaration_, we recommend to remove it in both `Foo` and `MockFoo`.
 That should workaround the VC bug.
 
 Note that we are talking about the _top-level_ `const` modifier here.
 If the function parameter is passed by pointer or reference, declaring
 the _pointee_ or _referee_ as `const` is still meaningful.  For
 example, the following two declarations are _not_ equivalent:
 ```
 void Bar(int* p);        // Neither p nor *p is const.
 void Bar(const int* p);  // p is not const, but *p is.
 ```
 
 ## I have a huge mock class, and Microsoft Visual C++ runs out of memory when compiling it.  What can I do? ##
 
 We've noticed that when the `/clr` compiler flag is used, Visual C++
 uses 5~6 times as much memory when compiling a mock class.  We suggest
 to avoid `/clr` when compiling native C++ mocks.
 
 ## I can't figure out why Google Mock thinks my expectations are not satisfied.  What should I do? ##
 
 You might want to run your test with
 `--gmock_verbose=info`.  This flag lets Google Mock print a trace
 of every mock function call it receives.  By studying the trace,
 you'll gain insights on why the expectations you set are not met.
 
 ## How can I assert that a function is NEVER called? ##
 
 ```
 EXPECT_CALL(foo, Bar(_))
     .Times(0);
 ```
 
 ## I have a failed test where Google Mock tells me TWICE that a particular expectation is not satisfied.  Isn't this redundant? ##
 
 When Google Mock detects a failure, it prints relevant information
 (the mock function arguments, the state of relevant expectations, and
 etc) to help the user debug.  If another failure is detected, Google
 Mock will do the same, including printing the state of relevant
 expectations.
 
 Sometimes an expectation's state didn't change between two failures,
 and you'll see the same description of the state twice.  They are
 however _not_ redundant, as they refer to _different points in time_.
 The fact they are the same _is_ interesting information.
 
 ## I get a heap check failure when using a mock object, but using a real object is fine.  What can be wrong? ##
 
 Does the class (hopefully a pure interface) you are mocking have a
 virtual destructor?
 
 Whenever you derive from a base class, make sure its destructor is
 virtual.  Otherwise Bad Things will happen.  Consider the following
 code:
 
 ```
 class Base {
  public:
   // Not virtual, but should be.
   ~Base() { ... }
   ...
 };
 
 class Derived : public Base {
  public:
   ...
  private:
   std::string value_;
 };
 
 ...
   Base* p = new Derived;
   ...
   delete p;  // Surprise! ~Base() will be called, but ~Derived() will not
              // - value_ is leaked.
 ```
 
 By changing `~Base()` to virtual, `~Derived()` will be correctly
 called when `delete p` is executed, and the heap checker
 will be happy.
 
 ## The "newer expectations override older ones" rule makes writing expectations awkward.  Why does Google Mock do that? ##
 
 When people complain about this, often they are referring to code like:
 
 ```
 // foo.Bar() should be called twice, return 1 the first time, and return
 // 2 the second time.  However, I have to write the expectations in the
 // reverse order.  This sucks big time!!!
 EXPECT_CALL(foo, Bar())
     .WillOnce(Return(2))
     .RetiresOnSaturation();
 EXPECT_CALL(foo, Bar())
     .WillOnce(Return(1))
     .RetiresOnSaturation();
 ```
 
 The problem is that they didn't pick the **best** way to express the test's
 intent.
 
 By default, expectations don't have to be matched in _any_ particular
 order.  If you want them to match in a certain order, you need to be
 explicit.  This is Google Mock's (and jMock's) fundamental philosophy: it's
 easy to accidentally over-specify your tests, and we want to make it
 harder to do so.
 
 There are two better ways to write the test spec.  You could either
 put the expectations in sequence:
 
 ```
 // foo.Bar() should be called twice, return 1 the first time, and return
 // 2 the second time.  Using a sequence, we can write the expectations
 // in their natural order.
 {
   InSequence s;
   EXPECT_CALL(foo, Bar())
       .WillOnce(Return(1))
       .RetiresOnSaturation();
   EXPECT_CALL(foo, Bar())
       .WillOnce(Return(2))
       .RetiresOnSaturation();
 }
 ```
 
 or you can put the sequence of actions in the same expectation:
 
 ```
 // foo.Bar() should be called twice, return 1 the first time, and return
 // 2 the second time.
 EXPECT_CALL(foo, Bar())
     .WillOnce(Return(1))
     .WillOnce(Return(2))
     .RetiresOnSaturation();
 ```
 
 Back to the original questions: why does Google Mock search the
 expectations (and `ON_CALL`s) from back to front?  Because this
 allows a user to set up a mock's behavior for the common case early
 (e.g. in the mock's constructor or the test fixture's set-up phase)
 and customize it with more specific rules later.  If Google Mock
 searches from front to back, this very useful pattern won't be
 possible.
 
 ## Google Mock prints a warning when a function without EXPECT\_CALL is called, even if I have set its behavior using ON\_CALL.  Would it be reasonable not to show the warning in this case? ##
 
 When choosing between being neat and being safe, we lean toward the
 latter.  So the answer is that we think it's better to show the
 warning.
 
 Often people write `ON_CALL`s in the mock object's
 constructor or `SetUp()`, as the default behavior rarely changes from
 test to test.  Then in the test body they set the expectations, which
 are often different for each test.  Having an `ON_CALL` in the set-up
 part of a test doesn't mean that the calls are expected.  If there's
 no `EXPECT_CALL` and the method is called, it's possibly an error.  If
 we quietly let the call go through without notifying the user, bugs
 may creep in unnoticed.
 
 If, however, you are sure that the calls are OK, you can write
 
 ```
 EXPECT_CALL(foo, Bar(_))
     .WillRepeatedly(...);
 ```
 
 instead of
 
 ```
 ON_CALL(foo, Bar(_))
     .WillByDefault(...);
 ```
 
 This tells Google Mock that you do expect the calls and no warning should be
 printed.
 
 Also, you can control the verbosity using the `--gmock_verbose` flag.
 If you find the output too noisy when debugging, just choose a less
 verbose level.
 
 ## How can I delete the mock function's argument in an action? ##
 
 If you find yourself needing to perform some action that's not
 supported by Google Mock directly, remember that you can define your own
 actions using
 [MakeAction()](CookBook.md#writing-new-actions) or
 [MakePolymorphicAction()](CookBook.md#writing_new_polymorphic_actions),
 or you can write a stub function and invoke it using
 [Invoke()](CookBook.md#using-functions_methods_functors).
 
 ## MOCK\_METHODn()'s second argument looks funny.  Why don't you use the MOCK\_METHODn(Method, return\_type, arg\_1, ..., arg\_n) syntax? ##
 
 What?!  I think it's beautiful. :-)
 
 While which syntax looks more natural is a subjective matter to some
 extent, Google Mock's syntax was chosen for several practical advantages it
 has.
 
 Try to mock a function that takes a map as an argument:
 ```
 virtual int GetSize(const map<int, std::string>& m);
 ```
 
 Using the proposed syntax, it would be:
 ```
 MOCK_METHOD1(GetSize, int, const map<int, std::string>& m);
 ```
 
 Guess what?  You'll get a compiler error as the compiler thinks that
 `const map<int, std::string>& m` are **two**, not one, arguments. To work
 around this you can use `typedef` to give the map type a name, but
 that gets in the way of your work.  Google Mock's syntax avoids this
 problem as the function's argument types are protected inside a pair
 of parentheses:
 ```
 // This compiles fine.
 MOCK_METHOD1(GetSize, int(const map<int, std::string>& m));
 ```
 
 You still need a `typedef` if the return type contains an unprotected
 comma, but that's much rarer.
 
 Other advantages include:
   1. `MOCK_METHOD1(Foo, int, bool)` can leave a reader wonder whether the method returns `int` or `bool`, while there won't be such confusion using Google Mock's syntax.
   1. The way Google Mock describes a function type is nothing new, although many people may not be familiar with it.  The same syntax was used in C, and the `function` library in `tr1` uses this syntax extensively.  Since `tr1` will become a part of the new version of STL, we feel very comfortable to be consistent with it.
   1. The function type syntax is also used in other parts of Google Mock's API (e.g. the action interface) in order to make the implementation tractable. A user needs to learn it anyway in order to utilize Google Mock's more advanced features.  We'd as well stick to the same syntax in `MOCK_METHOD*`!
 
 ## My code calls a static/global function.  Can I mock it? ##
 
 You can, but you need to make some changes.
 
 In general, if you find yourself needing to mock a static function,
 it's a sign that your modules are too tightly coupled (and less
 flexible, less reusable, less testable, etc).  You are probably better
 off defining a small interface and call the function through that
 interface, which then can be easily mocked.  It's a bit of work
 initially, but usually pays for itself quickly.
 
 This Google Testing Blog
-[post](http://googletesting.blogspot.com/2008/06/defeat-static-cling.html)
+[post](https://testing.googleblog.com/2008/06/defeat-static-cling.html)
 says it excellently.  Check it out.
 
 ## My mock object needs to do complex stuff.  It's a lot of pain to specify the actions.  Google Mock sucks! ##
 
 I know it's not a question, but you get an answer for free any way. :-)
 
 With Google Mock, you can create mocks in C++ easily.  And people might be
 tempted to use them everywhere. Sometimes they work great, and
 sometimes you may find them, well, a pain to use. So, what's wrong in
 the latter case?
 
 When you write a test without using mocks, you exercise the code and
 assert that it returns the correct value or that the system is in an
 expected state.  This is sometimes called "state-based testing".
 
 Mocks are great for what some call "interaction-based" testing:
 instead of checking the system state at the very end, mock objects
 verify that they are invoked the right way and report an error as soon
 as it arises, giving you a handle on the precise context in which the
 error was triggered.  This is often more effective and economical to
 do than state-based testing.
 
 If you are doing state-based testing and using a test double just to
 simulate the real object, you are probably better off using a fake.
 Using a mock in this case causes pain, as it's not a strong point for
 mocks to perform complex actions.  If you experience this and think
 that mocks suck, you are just not using the right tool for your
 problem. Or, you might be trying to solve the wrong problem. :-)
 
 ## I got a warning "Uninteresting function call encountered - default action taken.."  Should I panic? ##
 
 By all means, NO!  It's just an FYI.
 
 What it means is that you have a mock function, you haven't set any
 expectations on it (by Google Mock's rule this means that you are not
 interested in calls to this function and therefore it can be called
 any number of times), and it is called.  That's OK - you didn't say
 it's not OK to call the function!
 
 What if you actually meant to disallow this function to be called, but
 forgot to write `EXPECT_CALL(foo, Bar()).Times(0)`?  While
 one can argue that it's the user's fault, Google Mock tries to be nice and
 prints you a note.
 
 So, when you see the message and believe that there shouldn't be any
 uninteresting calls, you should investigate what's going on.  To make
 your life easier, Google Mock prints the function name and arguments
 when an uninteresting call is encountered.
 
 ## I want to define a custom action.  Should I use Invoke() or implement the action interface? ##
 
 Either way is fine - you want to choose the one that's more convenient
 for your circumstance.
 
 Usually, if your action is for a particular function type, defining it
 using `Invoke()` should be easier; if your action can be used in
 functions of different types (e.g. if you are defining
 `Return(value)`), `MakePolymorphicAction()` is
 easiest.  Sometimes you want precise control on what types of
 functions the action can be used in, and implementing
 `ActionInterface` is the way to go here. See the implementation of
 `Return()` in `include/gmock/gmock-actions.h` for an example.
 
 ## I'm using the set-argument-pointee action, and the compiler complains about "conflicting return type specified".  What does it mean? ##
 
 You got this error as Google Mock has no idea what value it should return
 when the mock method is called.  `SetArgPointee()` says what the
 side effect is, but doesn't say what the return value should be.  You
 need `DoAll()` to chain a `SetArgPointee()` with a `Return()`.
 
 See this [recipe](CookBook.md#mocking_side_effects) for more details and an example.
 
 
 ## My question is not in your FAQ! ##
 
 If you cannot find the answer to your question in this FAQ, there are
 some other resources you can use:
 
   1. search the mailing list [archive](http://groups.google.com/group/googlemock/topics),
   1. ask it on [googlemock@googlegroups.com](mailto:googlemock@googlegroups.com) and someone will answer it (to prevent spam, we require you to join the [discussion group](http://groups.google.com/group/googlemock) before you can post.).
 
 Please note that creating an issue in the
 [issue tracker](https://github.com/google/googletest/issues) is _not_
 a good way to get your answer, as it is monitored infrequently by a
 very small number of people.
 
 When asking a question, it's helpful to provide as much of the
 following information as possible (people cannot help you if there's
 not enough information in your question):
 
   * the version (or the revision number if you check out from SVN directly) of Google Mock you use (Google Mock is under active development, so it's possible that your problem has been solved in a later version),
   * your operating system,
   * the name and version of your compiler,
   * the complete command line flags you give to your compiler,
   * the complete compiler error messages (if the question is about compilation),
   * the _actual_ code (ideally, a minimal but complete program) that has the problem you encounter.
diff --git a/googlemock/scripts/upload.py b/googlemock/scripts/upload.py
index 6e6f9a14..95239dc2 100755
--- a/googlemock/scripts/upload.py
+++ b/googlemock/scripts/upload.py
@@ -1,1387 +1,1387 @@
 #!/usr/bin/env python
 #
 # Copyright 2007 Google Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
 #     http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
 """Tool for uploading diffs from a version control system to the codereview app.
 
 Usage summary: upload.py [options] [-- diff_options]
 
 Diff options are passed to the diff command of the underlying system.
 
 Supported version control systems:
   Git
   Mercurial
   Subversion
 
 It is important for Git/Mercurial users to specify a tree/node/branch to diff
 against by using the '--rev' option.
 """
 # This code is derived from appcfg.py in the App Engine SDK (open source),
 # and from ASPN recipe #146306.
 
 import cookielib
 import getpass
 import logging
 import md5
 import mimetypes
 import optparse
 import os
 import re
 import socket
 import subprocess
 import sys
 import urllib
 import urllib2
 import urlparse
 
 try:
   import readline
 except ImportError:
   pass
 
 # The logging verbosity:
 #  0: Errors only.
 #  1: Status messages.
 #  2: Info logs.
 #  3: Debug logs.
 verbosity = 1
 
 # Max size of patch or base file.
 MAX_UPLOAD_SIZE = 900 * 1024
 
 
 def GetEmail(prompt):
   """Prompts the user for their email address and returns it.
 
   The last used email address is saved to a file and offered up as a suggestion
   to the user. If the user presses enter without typing in anything the last
   used email address is used. If the user enters a new address, it is saved
   for next time we prompt.
 
   """
   last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
   last_email = ""
   if os.path.exists(last_email_file_name):
     try:
       last_email_file = open(last_email_file_name, "r")
       last_email = last_email_file.readline().strip("\n")
       last_email_file.close()
       prompt += " [%s]" % last_email
     except IOError, e:
       pass
   email = raw_input(prompt + ": ").strip()
   if email:
     try:
       last_email_file = open(last_email_file_name, "w")
       last_email_file.write(email)
       last_email_file.close()
     except IOError, e:
       pass
   else:
     email = last_email
   return email
 
 
 def StatusUpdate(msg):
   """Print a status message to stdout.
 
   If 'verbosity' is greater than 0, print the message.
 
   Args:
     msg: The string to print.
   """
   if verbosity > 0:
     print msg
 
 
 def ErrorExit(msg):
   """Print an error message to stderr and exit."""
   print >>sys.stderr, msg
   sys.exit(1)
 
 
 class ClientLoginError(urllib2.HTTPError):
   """Raised to indicate there was an error authenticating with ClientLogin."""
 
   def __init__(self, url, code, msg, headers, args):
     urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
     self.args = args
     self.reason = args["Error"]
 
 
 class AbstractRpcServer(object):
   """Provides a common interface for a simple RPC server."""
 
   def __init__(self, host, auth_function, host_override=None, extra_headers={},
                save_cookies=False):
     """Creates a new HttpRpcServer.
 
     Args:
       host: The host to send requests to.
       auth_function: A function that takes no arguments and returns an
         (email, password) tuple when called. Will be called if authentication
         is required.
       host_override: The host header to send to the server (defaults to host).
       extra_headers: A dict of extra headers to append to every request.
       save_cookies: If True, save the authentication cookies to local disk.
         If False, use an in-memory cookiejar instead.  Subclasses must
         implement this functionality.  Defaults to False.
     """
     self.host = host
     self.host_override = host_override
     self.auth_function = auth_function
     self.authenticated = False
     self.extra_headers = extra_headers
     self.save_cookies = save_cookies
     self.opener = self._GetOpener()
     if self.host_override:
       logging.info("Server: %s; Host: %s", self.host, self.host_override)
     else:
       logging.info("Server: %s", self.host)
 
   def _GetOpener(self):
     """Returns an OpenerDirector for making HTTP requests.
 
     Returns:
       A urllib2.OpenerDirector object.
     """
     raise NotImplementedError()
 
   def _CreateRequest(self, url, data=None):
     """Creates a new urllib request."""
     logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
     req = urllib2.Request(url, data=data)
     if self.host_override:
       req.add_header("Host", self.host_override)
     for key, value in self.extra_headers.iteritems():
       req.add_header(key, value)
     return req
 
   def _GetAuthToken(self, email, password):
     """Uses ClientLogin to authenticate the user, returning an auth token.
 
     Args:
       email:    The user's email address
       password: The user's password
 
     Raises:
       ClientLoginError: If there was an error authenticating with ClientLogin.
       HTTPError: If there was some other form of HTTP error.
 
     Returns:
       The authentication token returned by ClientLogin.
     """
     account_type = "GOOGLE"
     if self.host.endswith(".google.com"):
       # Needed for use inside Google.
       account_type = "HOSTED"
     req = self._CreateRequest(
         url="https://www.google.com/accounts/ClientLogin",
         data=urllib.urlencode({
             "Email": email,
             "Passwd": password,
             "service": "ah",
             "source": "rietveld-codereview-upload",
             "accountType": account_type,
         }),
     )
     try:
       response = self.opener.open(req)
       response_body = response.read()
       response_dict = dict(x.split("=")
                            for x in response_body.split("\n") if x)
       return response_dict["Auth"]
     except urllib2.HTTPError, e:
       if e.code == 403:
         body = e.read()
         response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
         raise ClientLoginError(req.get_full_url(), e.code, e.msg,
                                e.headers, response_dict)
       else:
         raise
 
   def _GetAuthCookie(self, auth_token):
     """Fetches authentication cookies for an authentication token.
 
     Args:
       auth_token: The authentication token returned by ClientLogin.
 
     Raises:
       HTTPError: If there was an error fetching the authentication cookies.
     """
     # This is a dummy value to allow us to identify when we're successful.
     continue_location = "http://localhost/"
     args = {"continue": continue_location, "auth": auth_token}
     req = self._CreateRequest("http://%s/_ah/login?%s" %
                               (self.host, urllib.urlencode(args)))
     try:
       response = self.opener.open(req)
     except urllib2.HTTPError, e:
       response = e
     if (response.code != 302 or
         response.info()["location"] != continue_location):
       raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
                               response.headers, response.fp)
     self.authenticated = True
 
   def _Authenticate(self):
     """Authenticates the user.
 
     The authentication process works as follows:
      1) We get a username and password from the user
      2) We use ClientLogin to obtain an AUTH token for the user
-        (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
+        (see https://developers.google.com/identity/protocols/AuthForInstalledApps).
      3) We pass the auth token to /_ah/login on the server to obtain an
         authentication cookie. If login was successful, it tries to redirect
         us to the URL we provided.
 
     If we attempt to access the upload API without first obtaining an
     authentication cookie, it returns a 401 response and directs us to
     authenticate ourselves with ClientLogin.
     """
     for i in range(3):
       credentials = self.auth_function()
       try:
         auth_token = self._GetAuthToken(credentials[0], credentials[1])
       except ClientLoginError, e:
         if e.reason == "BadAuthentication":
           print >>sys.stderr, "Invalid username or password."
           continue
         if e.reason == "CaptchaRequired":
           print >>sys.stderr, (
               "Please go to\n"
               "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
               "and verify you are a human.  Then try again.")
           break
         if e.reason == "NotVerified":
           print >>sys.stderr, "Account not verified."
           break
         if e.reason == "TermsNotAgreed":
           print >>sys.stderr, "User has not agreed to TOS."
           break
         if e.reason == "AccountDeleted":
           print >>sys.stderr, "The user account has been deleted."
           break
         if e.reason == "AccountDisabled":
           print >>sys.stderr, "The user account has been disabled."
           break
         if e.reason == "ServiceDisabled":
           print >>sys.stderr, ("The user's access to the service has been "
                                "disabled.")
           break
         if e.reason == "ServiceUnavailable":
           print >>sys.stderr, "The service is not available; try again later."
           break
         raise
       self._GetAuthCookie(auth_token)
       return
 
   def Send(self, request_path, payload=None,
            content_type="application/octet-stream",
            timeout=None,
            **kwargs):
     """Sends an RPC and returns the response.
 
     Args:
       request_path: The path to send the request to, eg /api/appversion/create.
       payload: The body of the request, or None to send an empty request.
       content_type: The Content-Type header to use.
       timeout: timeout in seconds; default None i.e. no timeout.
         (Note: for large requests on OS X, the timeout doesn't work right.)
       kwargs: Any keyword arguments are converted into query string parameters.
 
     Returns:
       The response body, as a string.
     """
     # TODO: Don't require authentication.  Let the server say
     # whether it is necessary.
     if not self.authenticated:
       self._Authenticate()
 
     old_timeout = socket.getdefaulttimeout()
     socket.setdefaulttimeout(timeout)
     try:
       tries = 0
       while True:
         tries += 1
         args = dict(kwargs)
         url = "http://%s%s" % (self.host, request_path)
         if args:
           url += "?" + urllib.urlencode(args)
         req = self._CreateRequest(url=url, data=payload)
         req.add_header("Content-Type", content_type)
         try:
           f = self.opener.open(req)
           response = f.read()
           f.close()
           return response
         except urllib2.HTTPError, e:
           if tries > 3:
             raise
           elif e.code == 401:
             self._Authenticate()
 ##           elif e.code >= 500 and e.code < 600:
 ##             # Server Error - try again.
 ##             continue
           else:
             raise
     finally:
       socket.setdefaulttimeout(old_timeout)
 
 
 class HttpRpcServer(AbstractRpcServer):
   """Provides a simplified RPC-style interface for HTTP requests."""
 
   def _Authenticate(self):
     """Save the cookie jar after authentication."""
     super(HttpRpcServer, self)._Authenticate()
     if self.save_cookies:
       StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
       self.cookie_jar.save()
 
   def _GetOpener(self):
     """Returns an OpenerDirector that supports cookies and ignores redirects.
 
     Returns:
       A urllib2.OpenerDirector object.
     """
     opener = urllib2.OpenerDirector()
     opener.add_handler(urllib2.ProxyHandler())
     opener.add_handler(urllib2.UnknownHandler())
     opener.add_handler(urllib2.HTTPHandler())
     opener.add_handler(urllib2.HTTPDefaultErrorHandler())
     opener.add_handler(urllib2.HTTPSHandler())
     opener.add_handler(urllib2.HTTPErrorProcessor())
     if self.save_cookies:
       self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
       self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
       if os.path.exists(self.cookie_file):
         try:
           self.cookie_jar.load()
           self.authenticated = True
           StatusUpdate("Loaded authentication cookies from %s" %
                        self.cookie_file)
         except (cookielib.LoadError, IOError):
           # Failed to load cookies - just ignore them.
           pass
       else:
         # Create an empty cookie file with mode 600
         fd = os.open(self.cookie_file, os.O_CREAT, 0600)
         os.close(fd)
       # Always chmod the cookie file
       os.chmod(self.cookie_file, 0600)
     else:
       # Don't save cookies across runs of update.py.
       self.cookie_jar = cookielib.CookieJar()
     opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
     return opener
 
 
 parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
 parser.add_option("-y", "--assume_yes", action="store_true",
                   dest="assume_yes", default=False,
                   help="Assume that the answer to yes/no questions is 'yes'.")
 # Logging
 group = parser.add_option_group("Logging options")
 group.add_option("-q", "--quiet", action="store_const", const=0,
                  dest="verbose", help="Print errors only.")
 group.add_option("-v", "--verbose", action="store_const", const=2,
                  dest="verbose", default=1,
                  help="Print info level logs (default).")
 group.add_option("--noisy", action="store_const", const=3,
                  dest="verbose", help="Print all logs.")
 # Review server
 group = parser.add_option_group("Review server options")
 group.add_option("-s", "--server", action="store", dest="server",
                  default="codereview.appspot.com",
                  metavar="SERVER",
                  help=("The server to upload to. The format is host[:port]. "
                        "Defaults to 'codereview.appspot.com'."))
 group.add_option("-e", "--email", action="store", dest="email",
                  metavar="EMAIL", default=None,
                  help="The username to use. Will prompt if omitted.")
 group.add_option("-H", "--host", action="store", dest="host",
                  metavar="HOST", default=None,
                  help="Overrides the Host header sent with all RPCs.")
 group.add_option("--no_cookies", action="store_false",
                  dest="save_cookies", default=True,
                  help="Do not save authentication cookies to local disk.")
 # Issue
 group = parser.add_option_group("Issue options")
 group.add_option("-d", "--description", action="store", dest="description",
                  metavar="DESCRIPTION", default=None,
                  help="Optional description when creating an issue.")
 group.add_option("-f", "--description_file", action="store",
                  dest="description_file", metavar="DESCRIPTION_FILE",
                  default=None,
                  help="Optional path of a file that contains "
                       "the description when creating an issue.")
 group.add_option("-r", "--reviewers", action="store", dest="reviewers",
                  metavar="REVIEWERS", default=None,
                  help="Add reviewers (comma separated email addresses).")
 group.add_option("--cc", action="store", dest="cc",
                  metavar="CC", default=None,
                  help="Add CC (comma separated email addresses).")
 # Upload options
 group = parser.add_option_group("Patch options")
 group.add_option("-m", "--message", action="store", dest="message",
                  metavar="MESSAGE", default=None,
                  help="A message to identify the patch. "
                       "Will prompt if omitted.")
 group.add_option("-i", "--issue", type="int", action="store",
                  metavar="ISSUE", default=None,
                  help="Issue number to which to add. Defaults to new issue.")
 group.add_option("--download_base", action="store_true",
                  dest="download_base", default=False,
                  help="Base files will be downloaded by the server "
                  "(side-by-side diffs may not work on files with CRs).")
 group.add_option("--rev", action="store", dest="revision",
                  metavar="REV", default=None,
                  help="Branch/tree/revision to diff against (used by DVCS).")
 group.add_option("--send_mail", action="store_true",
                  dest="send_mail", default=False,
                  help="Send notification email to reviewers.")
 
 
 def GetRpcServer(options):
   """Returns an instance of an AbstractRpcServer.
 
   Returns:
     A new AbstractRpcServer, on which RPC calls can be made.
   """
 
   rpc_server_class = HttpRpcServer
 
   def GetUserCredentials():
     """Prompts the user for a username and password."""
     email = options.email
     if email is None:
       email = GetEmail("Email (login for uploading to %s)" % options.server)
     password = getpass.getpass("Password for %s: " % email)
     return (email, password)
 
   # If this is the dev_appserver, use fake authentication.
   host = (options.host or options.server).lower()
   if host == "localhost" or host.startswith("localhost:"):
     email = options.email
     if email is None:
       email = "test@example.com"
       logging.info("Using debug user %s.  Override with --email" % email)
     server = rpc_server_class(
         options.server,
         lambda: (email, "password"),
         host_override=options.host,
         extra_headers={"Cookie":
                        'dev_appserver_login="%s:False"' % email},
         save_cookies=options.save_cookies)
     # Don't try to talk to ClientLogin.
     server.authenticated = True
     return server
 
   return rpc_server_class(options.server, GetUserCredentials,
                           host_override=options.host,
                           save_cookies=options.save_cookies)
 
 
 def EncodeMultipartFormData(fields, files):
   """Encode form fields for multipart/form-data.
 
   Args:
     fields: A sequence of (name, value) elements for regular form fields.
     files: A sequence of (name, filename, value) elements for data to be
            uploaded as files.
   Returns:
     (content_type, body) ready for httplib.HTTP instance.
 
   Source:
-    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
+    https://web.archive.org/web/20160116052001/code.activestate.com/recipes/146306
   """
   BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
   CRLF = '\r\n'
   lines = []
   for (key, value) in fields:
     lines.append('--' + BOUNDARY)
     lines.append('Content-Disposition: form-data; name="%s"' % key)
     lines.append('')
     lines.append(value)
   for (key, filename, value) in files:
     lines.append('--' + BOUNDARY)
     lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
              (key, filename))
     lines.append('Content-Type: %s' % GetContentType(filename))
     lines.append('')
     lines.append(value)
   lines.append('--' + BOUNDARY + '--')
   lines.append('')
   body = CRLF.join(lines)
   content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
   return content_type, body
 
 
 def GetContentType(filename):
   """Helper to guess the content-type from the filename."""
   return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
 
 
 # Use a shell for subcommands on Windows to get a PATH search.
 use_shell = sys.platform.startswith("win")
 
 def RunShellWithReturnCode(command, print_output=False,
                            universal_newlines=True):
   """Executes a command and returns the output from stdout and the return code.
 
   Args:
     command: Command to execute.
     print_output: If True, the output is printed to stdout.
                   If False, both stdout and stderr are ignored.
     universal_newlines: Use universal_newlines flag (default: True).
 
   Returns:
     Tuple (output, return code)
   """
   logging.info("Running %s", command)
   p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                        shell=use_shell, universal_newlines=universal_newlines)
   if print_output:
     output_array = []
     while True:
       line = p.stdout.readline()
       if not line:
         break
       print line.strip("\n")
       output_array.append(line)
     output = "".join(output_array)
   else:
     output = p.stdout.read()
   p.wait()
   errout = p.stderr.read()
   if print_output and errout:
     print >>sys.stderr, errout
   p.stdout.close()
   p.stderr.close()
   return output, p.returncode
 
 
 def RunShell(command, silent_ok=False, universal_newlines=True,
              print_output=False):
   data, retcode = RunShellWithReturnCode(command, print_output,
                                          universal_newlines)
   if retcode:
     ErrorExit("Got error status from %s:\n%s" % (command, data))
   if not silent_ok and not data:
     ErrorExit("No output from %s" % command)
   return data
 
 
 class VersionControlSystem(object):
   """Abstract base class providing an interface to the VCS."""
 
   def __init__(self, options):
     """Constructor.
 
     Args:
       options: Command line options.
     """
     self.options = options
 
   def GenerateDiff(self, args):
     """Return the current diff as a string.
 
     Args:
       args: Extra arguments to pass to the diff command.
     """
     raise NotImplementedError(
         "abstract method -- subclass %s must override" % self.__class__)
 
   def GetUnknownFiles(self):
     """Return a list of files unknown to the VCS."""
     raise NotImplementedError(
         "abstract method -- subclass %s must override" % self.__class__)
 
   def CheckForUnknownFiles(self):
     """Show an "are you sure?" prompt if there are unknown files."""
     unknown_files = self.GetUnknownFiles()
     if unknown_files:
       print "The following files are not added to version control:"
       for line in unknown_files:
         print line
       prompt = "Are you sure to continue?(y/N) "
       answer = raw_input(prompt).strip()
       if answer != "y":
         ErrorExit("User aborted")
 
   def GetBaseFile(self, filename):
     """Get the content of the upstream version of a file.
 
     Returns:
       A tuple (base_content, new_content, is_binary, status)
         base_content: The contents of the base file.
         new_content: For text files, this is empty.  For binary files, this is
           the contents of the new file, since the diff output won't contain
           information to reconstruct the current file.
         is_binary: True iff the file is binary.
         status: The status of the file.
     """
 
     raise NotImplementedError(
         "abstract method -- subclass %s must override" % self.__class__)
 
 
   def GetBaseFiles(self, diff):
     """Helper that calls GetBase file for each file in the patch.
 
     Returns:
       A dictionary that maps from filename to GetBaseFile's tuple.  Filenames
       are retrieved based on lines that start with "Index:" or
       "Property changes on:".
     """
     files = {}
     for line in diff.splitlines(True):
       if line.startswith('Index:') or line.startswith('Property changes on:'):
         unused, filename = line.split(':', 1)
         # On Windows if a file has property changes its filename uses '\'
         # instead of '/'.
         filename = filename.strip().replace('\\', '/')
         files[filename] = self.GetBaseFile(filename)
     return files
 
 
   def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
                       files):
     """Uploads the base files (and if necessary, the current ones as well)."""
 
     def UploadFile(filename, file_id, content, is_binary, status, is_base):
       """Uploads a file to the server."""
       file_too_large = False
       if is_base:
         type = "base"
       else:
         type = "current"
       if len(content) > MAX_UPLOAD_SIZE:
         print ("Not uploading the %s file for %s because it's too large." %
                (type, filename))
         file_too_large = True
         content = ""
       checksum = md5.new(content).hexdigest()
       if options.verbose > 0 and not file_too_large:
         print "Uploading %s file for %s" % (type, filename)
       url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
       form_fields = [("filename", filename),
                      ("status", status),
                      ("checksum", checksum),
                      ("is_binary", str(is_binary)),
                      ("is_current", str(not is_base)),
                     ]
       if file_too_large:
         form_fields.append(("file_too_large", "1"))
       if options.email:
         form_fields.append(("user", options.email))
       ctype, body = EncodeMultipartFormData(form_fields,
                                             [("data", filename, content)])
       response_body = rpc_server.Send(url, body,
                                       content_type=ctype)
       if not response_body.startswith("OK"):
         StatusUpdate("  --> %s" % response_body)
         sys.exit(1)
 
     patches = dict()
     [patches.setdefault(v, k) for k, v in patch_list]
     for filename in patches.keys():
       base_content, new_content, is_binary, status = files[filename]
       file_id_str = patches.get(filename)
       if file_id_str.find("nobase") != -1:
         base_content = None
         file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
       file_id = int(file_id_str)
       if base_content != None:
         UploadFile(filename, file_id, base_content, is_binary, status, True)
       if new_content != None:
         UploadFile(filename, file_id, new_content, is_binary, status, False)
 
   def IsImage(self, filename):
     """Returns true if the filename has an image extension."""
     mimetype =  mimetypes.guess_type(filename)[0]
     if not mimetype:
       return False
     return mimetype.startswith("image/")
 
 
 class SubversionVCS(VersionControlSystem):
   """Implementation of the VersionControlSystem interface for Subversion."""
 
   def __init__(self, options):
     super(SubversionVCS, self).__init__(options)
     if self.options.revision:
       match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
       if not match:
         ErrorExit("Invalid Subversion revision %s." % self.options.revision)
       self.rev_start = match.group(1)
       self.rev_end = match.group(3)
     else:
       self.rev_start = self.rev_end = None
     # Cache output from "svn list -r REVNO dirname".
     # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
     self.svnls_cache = {}
     # SVN base URL is required to fetch files deleted in an older revision.
     # Result is cached to not guess it over and over again in GetBaseFile().
     required = self.options.download_base or self.options.revision is not None
     self.svn_base = self._GuessBase(required)
 
   def GuessBase(self, required):
     """Wrapper for _GuessBase."""
     return self.svn_base
 
   def _GuessBase(self, required):
     """Returns the SVN base URL.
 
     Args:
       required: If true, exits if the url can't be guessed, otherwise None is
         returned.
     """
     info = RunShell(["svn", "info"])
     for line in info.splitlines():
       words = line.split()
       if len(words) == 2 and words[0] == "URL:":
         url = words[1]
         scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
         username, netloc = urllib.splituser(netloc)
         if username:
           logging.info("Removed username from base URL")
         if netloc.endswith("svn.python.org"):
           if netloc == "svn.python.org":
             if path.startswith("/projects/"):
               path = path[9:]
           elif netloc != "pythondev@svn.python.org":
             ErrorExit("Unrecognized Python URL: %s" % url)
           base = "http://svn.python.org/view/*checkout*%s/" % path
           logging.info("Guessed Python base = %s", base)
         elif netloc.endswith("svn.collab.net"):
           if path.startswith("/repos/"):
             path = path[6:]
           base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
           logging.info("Guessed CollabNet base = %s", base)
         elif netloc.endswith(".googlecode.com"):
           path = path + "/"
           base = urlparse.urlunparse(("http", netloc, path, params,
                                       query, fragment))
           logging.info("Guessed Google Code base = %s", base)
         else:
           path = path + "/"
           base = urlparse.urlunparse((scheme, netloc, path, params,
                                       query, fragment))
           logging.info("Guessed base = %s", base)
         return base
     if required:
       ErrorExit("Can't find URL in output from svn info")
     return None
 
   def GenerateDiff(self, args):
     cmd = ["svn", "diff"]
     if self.options.revision:
       cmd += ["-r", self.options.revision]
     cmd.extend(args)
     data = RunShell(cmd)
     count = 0
     for line in data.splitlines():
       if line.startswith("Index:") or line.startswith("Property changes on:"):
         count += 1
         logging.info(line)
     if not count:
       ErrorExit("No valid patches found in output from svn diff")
     return data
 
   def _CollapseKeywords(self, content, keyword_str):
     """Collapses SVN keywords."""
     # svn cat translates keywords but svn diff doesn't. As a result of this
     # behavior patching.PatchChunks() fails with a chunk mismatch error.
     # This part was originally written by the Review Board development team
-    # who had the same problem (http://reviews.review-board.org/r/276/).
+    # who had the same problem (https://reviews.reviewboard.org/r/276/).
     # Mapping of keywords to known aliases
     svn_keywords = {
       # Standard keywords
       'Date':                ['Date', 'LastChangedDate'],
       'Revision':            ['Revision', 'LastChangedRevision', 'Rev'],
       'Author':              ['Author', 'LastChangedBy'],
       'HeadURL':             ['HeadURL', 'URL'],
       'Id':                  ['Id'],
 
       # Aliases
       'LastChangedDate':     ['LastChangedDate', 'Date'],
       'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
       'LastChangedBy':       ['LastChangedBy', 'Author'],
       'URL':                 ['URL', 'HeadURL'],
     }
 
     def repl(m):
        if m.group(2):
          return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
        return "$%s$" % m.group(1)
     keywords = [keyword
                 for name in keyword_str.split(" ")
                 for keyword in svn_keywords.get(name, [])]
     return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
 
   def GetUnknownFiles(self):
     status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
     unknown_files = []
     for line in status.split("\n"):
       if line and line[0] == "?":
         unknown_files.append(line)
     return unknown_files
 
   def ReadFile(self, filename):
     """Returns the contents of a file."""
     file = open(filename, 'rb')
     result = ""
     try:
       result = file.read()
     finally:
       file.close()
     return result
 
   def GetStatus(self, filename):
     """Returns the status of a file."""
     if not self.options.revision:
       status = RunShell(["svn", "status", "--ignore-externals", filename])
       if not status:
         ErrorExit("svn status returned no output for %s" % filename)
       status_lines = status.splitlines()
       # If file is in a cl, the output will begin with
       # "\n--- Changelist 'cl_name':\n".  See
-      # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
+      # https://web.archive.org/web/20090918234815/svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
       if (len(status_lines) == 3 and
           not status_lines[0] and
           status_lines[1].startswith("--- Changelist")):
         status = status_lines[2]
       else:
         status = status_lines[0]
     # If we have a revision to diff against we need to run "svn list"
     # for the old and the new revision and compare the results to get
     # the correct status for a file.
     else:
       dirname, relfilename = os.path.split(filename)
       if dirname not in self.svnls_cache:
         cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
         out, returncode = RunShellWithReturnCode(cmd)
         if returncode:
           ErrorExit("Failed to get status for %s." % filename)
         old_files = out.splitlines()
         args = ["svn", "list"]
         if self.rev_end:
           args += ["-r", self.rev_end]
         cmd = args + [dirname or "."]
         out, returncode = RunShellWithReturnCode(cmd)
         if returncode:
           ErrorExit("Failed to run command %s" % cmd)
         self.svnls_cache[dirname] = (old_files, out.splitlines())
       old_files, new_files = self.svnls_cache[dirname]
       if relfilename in old_files and relfilename not in new_files:
         status = "D   "
       elif relfilename in old_files and relfilename in new_files:
         status = "M   "
       else:
         status = "A   "
     return status
 
   def GetBaseFile(self, filename):
     status = self.GetStatus(filename)
     base_content = None
     new_content = None
 
     # If a file is copied its status will be "A  +", which signifies
     # "addition-with-history".  See "svn st" for more information.  We need to
     # upload the original file or else diff parsing will fail if the file was
     # edited.
     if status[0] == "A" and status[3] != "+":
       # We'll need to upload the new content if we're adding a binary file
       # since diff's output won't contain it.
       mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
                           silent_ok=True)
       base_content = ""
       is_binary = mimetype and not mimetype.startswith("text/")
       if is_binary and self.IsImage(filename):
         new_content = self.ReadFile(filename)
     elif (status[0] in ("M", "D", "R") or
           (status[0] == "A" and status[3] == "+") or  # Copied file.
           (status[0] == " " and status[1] == "M")):  # Property change.
       args = []
       if self.options.revision:
         url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
       else:
         # Don't change filename, it's needed later.
         url = filename
         args += ["-r", "BASE"]
       cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
       mimetype, returncode = RunShellWithReturnCode(cmd)
       if returncode:
         # File does not exist in the requested revision.
         # Reset mimetype, it contains an error message.
         mimetype = ""
       get_base = False
       is_binary = mimetype and not mimetype.startswith("text/")
       if status[0] == " ":
         # Empty base content just to force an upload.
         base_content = ""
       elif is_binary:
         if self.IsImage(filename):
           get_base = True
           if status[0] == "M":
             if not self.rev_end:
               new_content = self.ReadFile(filename)
             else:
               url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
               new_content = RunShell(["svn", "cat", url],
                                      universal_newlines=True, silent_ok=True)
         else:
           base_content = ""
       else:
         get_base = True
 
       if get_base:
         if is_binary:
           universal_newlines = False
         else:
           universal_newlines = True
         if self.rev_start:
           # "svn cat -r REV delete_file.txt" doesn't work. cat requires
           # the full URL with "@REV" appended instead of using "-r" option.
           url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
           base_content = RunShell(["svn", "cat", url],
                                   universal_newlines=universal_newlines,
                                   silent_ok=True)
         else:
           base_content = RunShell(["svn", "cat", filename],
                                   universal_newlines=universal_newlines,
                                   silent_ok=True)
         if not is_binary:
           args = []
           if self.rev_start:
             url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
           else:
             url = filename
             args += ["-r", "BASE"]
           cmd = ["svn"] + args + ["propget", "svn:keywords", url]
           keywords, returncode = RunShellWithReturnCode(cmd)
           if keywords and not returncode:
             base_content = self._CollapseKeywords(base_content, keywords)
     else:
       StatusUpdate("svn status returned unexpected output: %s" % status)
       sys.exit(1)
     return base_content, new_content, is_binary, status[0:5]
 
 
 class GitVCS(VersionControlSystem):
   """Implementation of the VersionControlSystem interface for Git."""
 
   def __init__(self, options):
     super(GitVCS, self).__init__(options)
     # Map of filename -> hash of base file.
     self.base_hashes = {}
 
   def GenerateDiff(self, extra_args):
     # This is more complicated than svn's GenerateDiff because we must convert
     # the diff output to include an svn-style "Index:" line as well as record
     # the hashes of the base files, so we can upload them along with our diff.
     if self.options.revision:
       extra_args = [self.options.revision] + extra_args
     gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
     svndiff = []
     filecount = 0
     filename = None
     for line in gitdiff.splitlines():
       match = re.match(r"diff --git a/(.*) b/.*$", line)
       if match:
         filecount += 1
         filename = match.group(1)
         svndiff.append("Index: %s\n" % filename)
       else:
         # The "index" line in a git diff looks like this (long hashes elided):
         #   index 82c0d44..b2cee3f 100755
         # We want to save the left hash, as that identifies the base file.
         match = re.match(r"index (\w+)\.\.", line)
         if match:
           self.base_hashes[filename] = match.group(1)
       svndiff.append(line + "\n")
     if not filecount:
       ErrorExit("No valid patches found in output from git diff")
     return "".join(svndiff)
 
   def GetUnknownFiles(self):
     status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
                       silent_ok=True)
     return status.splitlines()
 
   def GetBaseFile(self, filename):
     hash = self.base_hashes[filename]
     base_content = None
     new_content = None
     is_binary = False
     if hash == "0" * 40:  # All-zero hash indicates no base file.
       status = "A"
       base_content = ""
     else:
       status = "M"
       base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
       if returncode:
         ErrorExit("Got error status from 'git show %s'" % hash)
     return (base_content, new_content, is_binary, status)
 
 
 class MercurialVCS(VersionControlSystem):
   """Implementation of the VersionControlSystem interface for Mercurial."""
 
   def __init__(self, options, repo_dir):
     super(MercurialVCS, self).__init__(options)
     # Absolute path to repository (we can be in a subdir)
     self.repo_dir = os.path.normpath(repo_dir)
     # Compute the subdir
     cwd = os.path.normpath(os.getcwd())
     assert cwd.startswith(self.repo_dir)
     self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
     if self.options.revision:
       self.base_rev = self.options.revision
     else:
       self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
 
   def _GetRelPath(self, filename):
     """Get relative path of a file according to the current directory,
     given its logical path in the repo."""
     assert filename.startswith(self.subdir), filename
     return filename[len(self.subdir):].lstrip(r"\/")
 
   def GenerateDiff(self, extra_args):
     # If no file specified, restrict to the current subdir
     extra_args = extra_args or ["."]
     cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
     data = RunShell(cmd, silent_ok=True)
     svndiff = []
     filecount = 0
     for line in data.splitlines():
       m = re.match("diff --git a/(\S+) b/(\S+)", line)
       if m:
         # Modify line to make it look like as it comes from svn diff.
         # With this modification no changes on the server side are required
         # to make upload.py work with Mercurial repos.
         # NOTE: for proper handling of moved/copied files, we have to use
         # the second filename.
         filename = m.group(2)
         svndiff.append("Index: %s" % filename)
         svndiff.append("=" * 67)
         filecount += 1
         logging.info(line)
       else:
         svndiff.append(line)
     if not filecount:
       ErrorExit("No valid patches found in output from hg diff")
     return "\n".join(svndiff) + "\n"
 
   def GetUnknownFiles(self):
     """Return a list of files unknown to the VCS."""
     args = []
     status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
         silent_ok=True)
     unknown_files = []
     for line in status.splitlines():
       st, fn = line.split(" ", 1)
       if st == "?":
         unknown_files.append(fn)
     return unknown_files
 
   def GetBaseFile(self, filename):
     # "hg status" and "hg cat" both take a path relative to the current subdir
     # rather than to the repo root, but "hg diff" has given us the full path
     # to the repo root.
     base_content = ""
     new_content = None
     is_binary = False
     oldrelpath = relpath = self._GetRelPath(filename)
     # "hg status -C" returns two lines for moved/copied files, one otherwise
     out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
     out = out.splitlines()
     # HACK: strip error message about missing file/directory if it isn't in
     # the working copy
     if out[0].startswith('%s: ' % relpath):
       out = out[1:]
     if len(out) > 1:
       # Moved/copied => considered as modified, use old filename to
       # retrieve base contents
       oldrelpath = out[1].strip()
       status = "M"
     else:
       status, _ = out[0].split(' ', 1)
     if status != "A":
       base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
         silent_ok=True)
       is_binary = "\0" in base_content  # Mercurial's heuristic
     if status != "R":
       new_content = open(relpath, "rb").read()
       is_binary = is_binary or "\0" in new_content
     if is_binary and base_content:
       # Fetch again without converting newlines
       base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
         silent_ok=True, universal_newlines=False)
     if not is_binary or not self.IsImage(relpath):
       new_content = None
     return base_content, new_content, is_binary, status
 
 
 # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
 def SplitPatch(data):
   """Splits a patch into separate pieces for each file.
 
   Args:
     data: A string containing the output of svn diff.
 
   Returns:
     A list of 2-tuple (filename, text) where text is the svn diff output
       pertaining to filename.
   """
   patches = []
   filename = None
   diff = []
   for line in data.splitlines(True):
     new_filename = None
     if line.startswith('Index:'):
       unused, new_filename = line.split(':', 1)
       new_filename = new_filename.strip()
     elif line.startswith('Property changes on:'):
       unused, temp_filename = line.split(':', 1)
       # When a file is modified, paths use '/' between directories, however
       # when a property is modified '\' is used on Windows.  Make them the same
       # otherwise the file shows up twice.
       temp_filename = temp_filename.strip().replace('\\', '/')
       if temp_filename != filename:
         # File has property changes but no modifications, create a new diff.
         new_filename = temp_filename
     if new_filename:
       if filename and diff:
         patches.append((filename, ''.join(diff)))
       filename = new_filename
       diff = [line]
       continue
     if diff is not None:
       diff.append(line)
   if filename and diff:
     patches.append((filename, ''.join(diff)))
   return patches
 
 
 def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
   """Uploads a separate patch for each file in the diff output.
 
   Returns a list of [patch_key, filename] for each file.
   """
   patches = SplitPatch(data)
   rv = []
   for patch in patches:
     if len(patch[1]) > MAX_UPLOAD_SIZE:
       print ("Not uploading the patch for " + patch[0] +
              " because the file is too large.")
       continue
     form_fields = [("filename", patch[0])]
     if not options.download_base:
       form_fields.append(("content_upload", "1"))
     files = [("data", "data.diff", patch[1])]
     ctype, body = EncodeMultipartFormData(form_fields, files)
     url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
     print "Uploading patch for " + patch[0]
     response_body = rpc_server.Send(url, body, content_type=ctype)
     lines = response_body.splitlines()
     if not lines or lines[0] != "OK":
       StatusUpdate("  --> %s" % response_body)
       sys.exit(1)
     rv.append([lines[1], patch[0]])
   return rv
 
 
 def GuessVCS(options):
   """Helper to guess the version control system.
 
   This examines the current directory, guesses which VersionControlSystem
   we're using, and returns an instance of the appropriate class.  Exit with an
   error if we can't figure it out.
 
   Returns:
     A VersionControlSystem instance. Exits if the VCS can't be guessed.
   """
   # Mercurial has a command to get the base directory of a repository
   # Try running it, but don't die if we don't have hg installed.
   # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
   try:
     out, returncode = RunShellWithReturnCode(["hg", "root"])
     if returncode == 0:
       return MercurialVCS(options, out.strip())
   except OSError, (errno, message):
     if errno != 2:  # ENOENT -- they don't have hg installed.
       raise
 
   # Subversion has a .svn in all working directories.
   if os.path.isdir('.svn'):
     logging.info("Guessed VCS = Subversion")
     return SubversionVCS(options)
 
   # Git has a command to test if you're in a git tree.
   # Try running it, but don't die if we don't have git installed.
   try:
     out, returncode = RunShellWithReturnCode(["git", "rev-parse",
                                               "--is-inside-work-tree"])
     if returncode == 0:
       return GitVCS(options)
   except OSError, (errno, message):
     if errno != 2:  # ENOENT -- they don't have git installed.
       raise
 
   ErrorExit(("Could not guess version control system. "
              "Are you in a working copy directory?"))
 
 
 def RealMain(argv, data=None):
   """The real main function.
 
   Args:
     argv: Command line arguments.
     data: Diff contents. If None (default) the diff is generated by
       the VersionControlSystem implementation returned by GuessVCS().
 
   Returns:
     A 2-tuple (issue id, patchset id).
     The patchset id is None if the base files are not uploaded by this
     script (applies only to SVN checkouts).
   """
   logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
                               "%(lineno)s %(message)s "))
   os.environ['LC_ALL'] = 'C'
   options, args = parser.parse_args(argv[1:])
   global verbosity
   verbosity = options.verbose
   if verbosity >= 3:
     logging.getLogger().setLevel(logging.DEBUG)
   elif verbosity >= 2:
     logging.getLogger().setLevel(logging.INFO)
   vcs = GuessVCS(options)
   if isinstance(vcs, SubversionVCS):
     # base field is only allowed for Subversion.
     # Note: Fetching base files may become deprecated in future releases.
     base = vcs.GuessBase(options.download_base)
   else:
     base = None
   if not base and options.download_base:
     options.download_base = True
     logging.info("Enabled upload of base file")
   if not options.assume_yes:
     vcs.CheckForUnknownFiles()
   if data is None:
     data = vcs.GenerateDiff(args)
   files = vcs.GetBaseFiles(data)
   if verbosity >= 1:
     print "Upload server:", options.server, "(change with -s/--server)"
   if options.issue:
     prompt = "Message describing this patch set: "
   else:
     prompt = "New issue subject: "
   message = options.message or raw_input(prompt).strip()
   if not message:
     ErrorExit("A non-empty message is required")
   rpc_server = GetRpcServer(options)
   form_fields = [("subject", message)]
   if base:
     form_fields.append(("base", base))
   if options.issue:
     form_fields.append(("issue", str(options.issue)))
   if options.email:
     form_fields.append(("user", options.email))
   if options.reviewers:
     for reviewer in options.reviewers.split(','):
       if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
         ErrorExit("Invalid email address: %s" % reviewer)
     form_fields.append(("reviewers", options.reviewers))
   if options.cc:
     for cc in options.cc.split(','):
       if "@" in cc and not cc.split("@")[1].count(".") == 1:
         ErrorExit("Invalid email address: %s" % cc)
     form_fields.append(("cc", options.cc))
   description = options.description
   if options.description_file:
     if options.description:
       ErrorExit("Can't specify description and description_file")
     file = open(options.description_file, 'r')
     description = file.read()
     file.close()
   if description:
     form_fields.append(("description", description))
   # Send a hash of all the base file so the server can determine if a copy
   # already exists in an earlier patchset.
   base_hashes = ""
   for file, info in files.iteritems():
     if not info[0] is None:
       checksum = md5.new(info[0]).hexdigest()
       if base_hashes:
         base_hashes += "|"
       base_hashes += checksum + ":" + file
   form_fields.append(("base_hashes", base_hashes))
   # If we're uploading base files, don't send the email before the uploads, so
   # that it contains the file status.
   if options.send_mail and options.download_base:
     form_fields.append(("send_mail", "1"))
   if not options.download_base:
     form_fields.append(("content_upload", "1"))
   if len(data) > MAX_UPLOAD_SIZE:
     print "Patch is large, so uploading file patches separately."
     uploaded_diff_file = []
     form_fields.append(("separate_patches", "1"))
   else:
     uploaded_diff_file = [("data", "data.diff", data)]
   ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
   response_body = rpc_server.Send("/upload", body, content_type=ctype)
   patchset = None
   if not options.download_base or not uploaded_diff_file:
     lines = response_body.splitlines()
     if len(lines) >= 2:
       msg = lines[0]
       patchset = lines[1].strip()
       patches = [x.split(" ", 1) for x in lines[2:]]
     else:
       msg = response_body
   else:
     msg = response_body
   StatusUpdate(msg)
   if not response_body.startswith("Issue created.") and \
   not response_body.startswith("Issue updated."):
     sys.exit(0)
   issue = msg[msg.rfind("/")+1:]
 
   if not uploaded_diff_file:
     result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
     if not options.download_base:
       patches = result
 
   if not options.download_base:
     vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
     if options.send_mail:
       rpc_server.Send("/" + issue + "/mail", payload="")
   return issue, patchset
 
 
 def main():
   try:
     RealMain(sys.argv)
   except KeyboardInterrupt:
     print
     StatusUpdate("Interrupted.")
     sys.exit(1)
 
 
 if __name__ == "__main__":
   main()
diff --git a/googlemock/src/gmock_main.cc b/googlemock/src/gmock_main.cc
index bd5be03b..61821592 100644
--- a/googlemock/src/gmock_main.cc
+++ b/googlemock/src/gmock_main.cc
@@ -1,54 +1,54 @@
 // Copyright 2008, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 
 #include <iostream>
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
 
 // MS C++ compiler/linker has a bug on Windows (not on Windows CE), which
 // causes a link error when _tmain is defined in a static library and UNICODE
 // is enabled. For this reason instead of _tmain, main function is used on
 // Windows. See the following link to track the current status of this bug:
-// http://connect.microsoft.com/VisualStudio/feedback/ViewFeedback.aspx?FeedbackID=394464  // NOLINT
+// https://web.archive.org/web/20170912203238/connect.microsoft.com/VisualStudio/feedback/details/394464/wmain-link-error-in-the-static-library  // NOLINT
 #if GTEST_OS_WINDOWS_MOBILE
 # include <tchar.h>  // NOLINT
 
 GTEST_API_ int _tmain(int argc, TCHAR** argv) {
 #else
 GTEST_API_ int main(int argc, char** argv) {
 #endif  // GTEST_OS_WINDOWS_MOBILE
   std::cout << "Running main() from gmock_main.cc\n";
   // Since Google Mock depends on Google Test, InitGoogleMock() is
   // also responsible for initializing Google Test.  Therefore there's
   // no need for calling testing::InitGoogleTest() separately.
   testing::InitGoogleMock(&argc, argv);
   return RUN_ALL_TESTS();
 }
diff --git a/googlemock/test/gmock-actions_test.cc b/googlemock/test/gmock-actions_test.cc
index e8bdbee7..2d169f88 100644
--- a/googlemock/test/gmock-actions_test.cc
+++ b/googlemock/test/gmock-actions_test.cc
@@ -1,1373 +1,1375 @@
 // Copyright 2007, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 
 // Google Mock - a framework for writing C++ mock classes.
 //
 // This file tests the built-in actions.
 
 // Silence C4800 (C4800: 'int *const ': forcing value
 // to bool 'true' or 'false') for MSVC 14,15
 #ifdef _MSC_VER
 #if _MSC_VER <= 1900
 #  pragma warning(push)
 #  pragma warning(disable:4800)
 #endif
 #endif
 
 #include "gmock/gmock-actions.h"
 #include <algorithm>
 #include <iterator>
 #include <memory>
 #include <string>
 #include "gmock/gmock.h"
 #include "gmock/internal/gmock-port.h"
 #include "gtest/gtest.h"
 #include "gtest/gtest-spi.h"
 
 namespace {
 
 // This list should be kept sorted.
 using testing::Action;
 using testing::ActionInterface;
 using testing::Assign;
 using testing::ByMove;
 using testing::ByRef;
 using testing::DefaultValue;
 using testing::DoDefault;
 using testing::IgnoreResult;
 using testing::Invoke;
 using testing::InvokeWithoutArgs;
 using testing::MakePolymorphicAction;
 using testing::Ne;
 using testing::PolymorphicAction;
 using testing::Return;
 using testing::ReturnNull;
 using testing::ReturnRef;
 using testing::ReturnRefOfCopy;
 using testing::SetArgPointee;
 using testing::SetArgumentPointee;
 using testing::Unused;
 using testing::_;
 using testing::get;
 using testing::internal::BuiltInDefaultValue;
 using testing::internal::Int64;
 using testing::internal::UInt64;
 using testing::make_tuple;
 using testing::tuple;
 using testing::tuple_element;
 
 #if !GTEST_OS_WINDOWS_MOBILE
 using testing::SetErrnoAndReturn;
 #endif
 
 // Tests that BuiltInDefaultValue<T*>::Get() returns NULL.
 TEST(BuiltInDefaultValueTest, IsNullForPointerTypes) {
   EXPECT_TRUE(BuiltInDefaultValue<int*>::Get() == NULL);
   EXPECT_TRUE(BuiltInDefaultValue<const char*>::Get() == NULL);
   EXPECT_TRUE(BuiltInDefaultValue<void*>::Get() == NULL);
 }
 
 // Tests that BuiltInDefaultValue<T*>::Exists() return true.
 TEST(BuiltInDefaultValueTest, ExistsForPointerTypes) {
   EXPECT_TRUE(BuiltInDefaultValue<int*>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<const char*>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<void*>::Exists());
 }
 
 // Tests that BuiltInDefaultValue<T>::Get() returns 0 when T is a
 // built-in numeric type.
 TEST(BuiltInDefaultValueTest, IsZeroForNumericTypes) {
   EXPECT_EQ(0U, BuiltInDefaultValue<unsigned char>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<signed char>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<char>::Get());
 #if GMOCK_HAS_SIGNED_WCHAR_T_
   EXPECT_EQ(0U, BuiltInDefaultValue<unsigned wchar_t>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<signed wchar_t>::Get());
 #endif
 #if GMOCK_WCHAR_T_IS_NATIVE_
 #if !defined(__WCHAR_UNSIGNED__)
   EXPECT_EQ(0, BuiltInDefaultValue<wchar_t>::Get());
 #else
   EXPECT_EQ(0U, BuiltInDefaultValue<wchar_t>::Get());
 #endif
 #endif
   EXPECT_EQ(0U, BuiltInDefaultValue<unsigned short>::Get());  // NOLINT
   EXPECT_EQ(0, BuiltInDefaultValue<signed short>::Get());  // NOLINT
   EXPECT_EQ(0, BuiltInDefaultValue<short>::Get());  // NOLINT
   EXPECT_EQ(0U, BuiltInDefaultValue<unsigned int>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<signed int>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<int>::Get());
   EXPECT_EQ(0U, BuiltInDefaultValue<unsigned long>::Get());  // NOLINT
   EXPECT_EQ(0, BuiltInDefaultValue<signed long>::Get());  // NOLINT
   EXPECT_EQ(0, BuiltInDefaultValue<long>::Get());  // NOLINT
   EXPECT_EQ(0U, BuiltInDefaultValue<UInt64>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<Int64>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<float>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<double>::Get());
 }
 
 // Tests that BuiltInDefaultValue<T>::Exists() returns true when T is a
 // built-in numeric type.
 TEST(BuiltInDefaultValueTest, ExistsForNumericTypes) {
   EXPECT_TRUE(BuiltInDefaultValue<unsigned char>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<signed char>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<char>::Exists());
 #if GMOCK_HAS_SIGNED_WCHAR_T_
   EXPECT_TRUE(BuiltInDefaultValue<unsigned wchar_t>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<signed wchar_t>::Exists());
 #endif
 #if GMOCK_WCHAR_T_IS_NATIVE_
   EXPECT_TRUE(BuiltInDefaultValue<wchar_t>::Exists());
 #endif
   EXPECT_TRUE(BuiltInDefaultValue<unsigned short>::Exists());  // NOLINT
   EXPECT_TRUE(BuiltInDefaultValue<signed short>::Exists());  // NOLINT
   EXPECT_TRUE(BuiltInDefaultValue<short>::Exists());  // NOLINT
   EXPECT_TRUE(BuiltInDefaultValue<unsigned int>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<signed int>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<int>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<unsigned long>::Exists());  // NOLINT
   EXPECT_TRUE(BuiltInDefaultValue<signed long>::Exists());  // NOLINT
   EXPECT_TRUE(BuiltInDefaultValue<long>::Exists());  // NOLINT
   EXPECT_TRUE(BuiltInDefaultValue<UInt64>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<Int64>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<float>::Exists());
   EXPECT_TRUE(BuiltInDefaultValue<double>::Exists());
 }
 
 // Tests that BuiltInDefaultValue<bool>::Get() returns false.
 TEST(BuiltInDefaultValueTest, IsFalseForBool) {
   EXPECT_FALSE(BuiltInDefaultValue<bool>::Get());
 }
 
 // Tests that BuiltInDefaultValue<bool>::Exists() returns true.
 TEST(BuiltInDefaultValueTest, BoolExists) {
   EXPECT_TRUE(BuiltInDefaultValue<bool>::Exists());
 }
 
 // Tests that BuiltInDefaultValue<T>::Get() returns "" when T is a
 // string type.
 TEST(BuiltInDefaultValueTest, IsEmptyStringForString) {
 #if GTEST_HAS_GLOBAL_STRING
   EXPECT_EQ("", BuiltInDefaultValue< ::string>::Get());
 #endif  // GTEST_HAS_GLOBAL_STRING
 
   EXPECT_EQ("", BuiltInDefaultValue< ::std::string>::Get());
 }
 
 // Tests that BuiltInDefaultValue<T>::Exists() returns true when T is a
 // string type.
 TEST(BuiltInDefaultValueTest, ExistsForString) {
 #if GTEST_HAS_GLOBAL_STRING
   EXPECT_TRUE(BuiltInDefaultValue< ::string>::Exists());
 #endif  // GTEST_HAS_GLOBAL_STRING
 
   EXPECT_TRUE(BuiltInDefaultValue< ::std::string>::Exists());
 }
 
 // Tests that BuiltInDefaultValue<const T>::Get() returns the same
 // value as BuiltInDefaultValue<T>::Get() does.
 TEST(BuiltInDefaultValueTest, WorksForConstTypes) {
   EXPECT_EQ("", BuiltInDefaultValue<const std::string>::Get());
   EXPECT_EQ(0, BuiltInDefaultValue<const int>::Get());
   EXPECT_TRUE(BuiltInDefaultValue<char* const>::Get() == NULL);
   EXPECT_FALSE(BuiltInDefaultValue<const bool>::Get());
 }
 
 // A type that's default constructible.
 class MyDefaultConstructible {
  public:
   MyDefaultConstructible() : value_(42) {}
 
   int value() const { return value_; }
 
  private:
   int value_;
 };
 
 // A type that's not default constructible.
 class MyNonDefaultConstructible {
  public:
   // Does not have a default ctor.
   explicit MyNonDefaultConstructible(int a_value) : value_(a_value) {}
 
   int value() const { return value_; }
 
  private:
   int value_;
 };
 
 #if GTEST_LANG_CXX11
 
 TEST(BuiltInDefaultValueTest, ExistsForDefaultConstructibleType) {
   EXPECT_TRUE(BuiltInDefaultValue<MyDefaultConstructible>::Exists());
 }
 
 TEST(BuiltInDefaultValueTest, IsDefaultConstructedForDefaultConstructibleType) {
   EXPECT_EQ(42, BuiltInDefaultValue<MyDefaultConstructible>::Get().value());
 }
 
 #endif  // GTEST_LANG_CXX11
 
 TEST(BuiltInDefaultValueTest, DoesNotExistForNonDefaultConstructibleType) {
   EXPECT_FALSE(BuiltInDefaultValue<MyNonDefaultConstructible>::Exists());
 }
 
 // Tests that BuiltInDefaultValue<T&>::Get() aborts the program.
 TEST(BuiltInDefaultValueDeathTest, IsUndefinedForReferences) {
   EXPECT_DEATH_IF_SUPPORTED({
     BuiltInDefaultValue<int&>::Get();
   }, "");
   EXPECT_DEATH_IF_SUPPORTED({
     BuiltInDefaultValue<const char&>::Get();
   }, "");
 }
 
 TEST(BuiltInDefaultValueDeathTest, IsUndefinedForNonDefaultConstructibleType) {
   EXPECT_DEATH_IF_SUPPORTED({
     BuiltInDefaultValue<MyNonDefaultConstructible>::Get();
   }, "");
 }
 
 // Tests that DefaultValue<T>::IsSet() is false initially.
 TEST(DefaultValueTest, IsInitiallyUnset) {
   EXPECT_FALSE(DefaultValue<int>::IsSet());
   EXPECT_FALSE(DefaultValue<MyDefaultConstructible>::IsSet());
   EXPECT_FALSE(DefaultValue<const MyNonDefaultConstructible>::IsSet());
 }
 
 // Tests that DefaultValue<T> can be set and then unset.
 TEST(DefaultValueTest, CanBeSetAndUnset) {
   EXPECT_TRUE(DefaultValue<int>::Exists());
   EXPECT_FALSE(DefaultValue<const MyNonDefaultConstructible>::Exists());
 
   DefaultValue<int>::Set(1);
   DefaultValue<const MyNonDefaultConstructible>::Set(
       MyNonDefaultConstructible(42));
 
   EXPECT_EQ(1, DefaultValue<int>::Get());
   EXPECT_EQ(42, DefaultValue<const MyNonDefaultConstructible>::Get().value());
 
   EXPECT_TRUE(DefaultValue<int>::Exists());
   EXPECT_TRUE(DefaultValue<const MyNonDefaultConstructible>::Exists());
 
   DefaultValue<int>::Clear();
   DefaultValue<const MyNonDefaultConstructible>::Clear();
 
   EXPECT_FALSE(DefaultValue<int>::IsSet());
   EXPECT_FALSE(DefaultValue<const MyNonDefaultConstructible>::IsSet());
 
   EXPECT_TRUE(DefaultValue<int>::Exists());
   EXPECT_FALSE(DefaultValue<const MyNonDefaultConstructible>::Exists());
 }
 
 // Tests that DefaultValue<T>::Get() returns the
 // BuiltInDefaultValue<T>::Get() when DefaultValue<T>::IsSet() is
 // false.
 TEST(DefaultValueDeathTest, GetReturnsBuiltInDefaultValueWhenUnset) {
   EXPECT_FALSE(DefaultValue<int>::IsSet());
   EXPECT_TRUE(DefaultValue<int>::Exists());
   EXPECT_FALSE(DefaultValue<MyNonDefaultConstructible>::IsSet());
   EXPECT_FALSE(DefaultValue<MyNonDefaultConstructible>::Exists());
 
   EXPECT_EQ(0, DefaultValue<int>::Get());
 
   EXPECT_DEATH_IF_SUPPORTED({
     DefaultValue<MyNonDefaultConstructible>::Get();
   }, "");
 }
 
 #if GTEST_HAS_STD_UNIQUE_PTR_
 TEST(DefaultValueTest, GetWorksForMoveOnlyIfSet) {
   EXPECT_TRUE(DefaultValue<std::unique_ptr<int>>::Exists());
   EXPECT_TRUE(DefaultValue<std::unique_ptr<int>>::Get() == NULL);
   DefaultValue<std::unique_ptr<int>>::SetFactory([] {
     return std::unique_ptr<int>(new int(42));
   });
   EXPECT_TRUE(DefaultValue<std::unique_ptr<int>>::Exists());
   std::unique_ptr<int> i = DefaultValue<std::unique_ptr<int>>::Get();
   EXPECT_EQ(42, *i);
 }
 #endif  // GTEST_HAS_STD_UNIQUE_PTR_
 
 // Tests that DefaultValue<void>::Get() returns void.
 TEST(DefaultValueTest, GetWorksForVoid) {
   return DefaultValue<void>::Get();
 }
 
 // Tests using DefaultValue with a reference type.
 
 // Tests that DefaultValue<T&>::IsSet() is false initially.
 TEST(DefaultValueOfReferenceTest, IsInitiallyUnset) {
   EXPECT_FALSE(DefaultValue<int&>::IsSet());
   EXPECT_FALSE(DefaultValue<MyDefaultConstructible&>::IsSet());
   EXPECT_FALSE(DefaultValue<MyNonDefaultConstructible&>::IsSet());
 }
 
 // Tests that DefaultValue<T&>::Exists is false initiallly.
 TEST(DefaultValueOfReferenceTest, IsInitiallyNotExisting) {
   EXPECT_FALSE(DefaultValue<int&>::Exists());
   EXPECT_FALSE(DefaultValue<MyDefaultConstructible&>::Exists());
   EXPECT_FALSE(DefaultValue<MyNonDefaultConstructible&>::Exists());
 }
 
 // Tests that DefaultValue<T&> can be set and then unset.
 TEST(DefaultValueOfReferenceTest, CanBeSetAndUnset) {
   int n = 1;
   DefaultValue<const int&>::Set(n);
   MyNonDefaultConstructible x(42);
   DefaultValue<MyNonDefaultConstructible&>::Set(x);
 
   EXPECT_TRUE(DefaultValue<const int&>::Exists());
   EXPECT_TRUE(DefaultValue<MyNonDefaultConstructible&>::Exists());
 
   EXPECT_EQ(&n, &(DefaultValue<const int&>::Get()));
   EXPECT_EQ(&x, &(DefaultValue<MyNonDefaultConstructible&>::Get()));
 
   DefaultValue<const int&>::Clear();
   DefaultValue<MyNonDefaultConstructible&>::Clear();
 
   EXPECT_FALSE(DefaultValue<const int&>::Exists());
   EXPECT_FALSE(DefaultValue<MyNonDefaultConstructible&>::Exists());
 
   EXPECT_FALSE(DefaultValue<const int&>::IsSet());
   EXPECT_FALSE(DefaultValue<MyNonDefaultConstructible&>::IsSet());
 }
 
 // Tests that DefaultValue<T&>::Get() returns the
 // BuiltInDefaultValue<T&>::Get() when DefaultValue<T&>::IsSet() is
 // false.
 TEST(DefaultValueOfReferenceDeathTest, GetReturnsBuiltInDefaultValueWhenUnset) {
   EXPECT_FALSE(DefaultValue<int&>::IsSet());
   EXPECT_FALSE(DefaultValue<MyNonDefaultConstructible&>::IsSet());
 
   EXPECT_DEATH_IF_SUPPORTED({
     DefaultValue<int&>::Get();
   }, "");
   EXPECT_DEATH_IF_SUPPORTED({
     DefaultValue<MyNonDefaultConstructible>::Get();
   }, "");
 }
 
 // Tests that ActionInterface can be implemented by defining the
 // Perform method.
 
 typedef int MyGlobalFunction(bool, int);
 
 class MyActionImpl : public ActionInterface<MyGlobalFunction> {
  public:
   virtual int Perform(const tuple<bool, int>& args) {
     return get<0>(args) ? get<1>(args) : 0;
   }
 };
 
 TEST(ActionInterfaceTest, CanBeImplementedByDefiningPerform) {
   MyActionImpl my_action_impl;
   (void)my_action_impl;
 }
 
 TEST(ActionInterfaceTest, MakeAction) {
   Action<MyGlobalFunction> action = MakeAction(new MyActionImpl);
 
   // When exercising the Perform() method of Action<F>, we must pass
   // it a tuple whose size and type are compatible with F's argument
   // types.  For example, if F is int(), then Perform() takes a
   // 0-tuple; if F is void(bool, int), then Perform() takes a
   // tuple<bool, int>, and so on.
   EXPECT_EQ(5, action.Perform(make_tuple(true, 5)));
 }
 
 // Tests that Action<F> can be contructed from a pointer to
 // ActionInterface<F>.
 TEST(ActionTest, CanBeConstructedFromActionInterface) {
   Action<MyGlobalFunction> action(new MyActionImpl);
 }
 
 // Tests that Action<F> delegates actual work to ActionInterface<F>.
 TEST(ActionTest, DelegatesWorkToActionInterface) {
   const Action<MyGlobalFunction> action(new MyActionImpl);
 
   EXPECT_EQ(5, action.Perform(make_tuple(true, 5)));
   EXPECT_EQ(0, action.Perform(make_tuple(false, 1)));
 }
 
 // Tests that Action<F> can be copied.
 TEST(ActionTest, IsCopyable) {
   Action<MyGlobalFunction> a1(new MyActionImpl);
   Action<MyGlobalFunction> a2(a1);  // Tests the copy constructor.
 
   // a1 should continue to work after being copied from.
   EXPECT_EQ(5, a1.Perform(make_tuple(true, 5)));
   EXPECT_EQ(0, a1.Perform(make_tuple(false, 1)));
 
   // a2 should work like the action it was copied from.
   EXPECT_EQ(5, a2.Perform(make_tuple(true, 5)));
   EXPECT_EQ(0, a2.Perform(make_tuple(false, 1)));
 
   a2 = a1;  // Tests the assignment operator.
 
   // a1 should continue to work after being copied from.
   EXPECT_EQ(5, a1.Perform(make_tuple(true, 5)));
   EXPECT_EQ(0, a1.Perform(make_tuple(false, 1)));
 
   // a2 should work like the action it was copied from.
   EXPECT_EQ(5, a2.Perform(make_tuple(true, 5)));
   EXPECT_EQ(0, a2.Perform(make_tuple(false, 1)));
 }
 
 // Tests that an Action<From> object can be converted to a
 // compatible Action<To> object.
 
 class IsNotZero : public ActionInterface<bool(int)> {  // NOLINT
  public:
   virtual bool Perform(const tuple<int>& arg) {
     return get<0>(arg) != 0;
   }
 };
 
 #if !GTEST_OS_SYMBIAN
 // Compiling this test on Nokia's Symbian compiler fails with:
 //  'Result' is not a member of class 'testing::internal::Function<int>'
 //  (point of instantiation: '@unnamed@gmock_actions_test_cc@::
 //      ActionTest_CanBeConvertedToOtherActionType_Test::TestBody()')
 // with no obvious fix.
 TEST(ActionTest, CanBeConvertedToOtherActionType) {
   const Action<bool(int)> a1(new IsNotZero);  // NOLINT
   const Action<int(char)> a2 = Action<int(char)>(a1);  // NOLINT
   EXPECT_EQ(1, a2.Perform(make_tuple('a')));
   EXPECT_EQ(0, a2.Perform(make_tuple('\0')));
 }
 #endif  // !GTEST_OS_SYMBIAN
 
 // The following two classes are for testing MakePolymorphicAction().
 
 // Implements a polymorphic action that returns the second of the
 // arguments it receives.
 class ReturnSecondArgumentAction {
  public:
   // We want to verify that MakePolymorphicAction() can work with a
   // polymorphic action whose Perform() method template is either
   // const or not.  This lets us verify the non-const case.
   template <typename Result, typename ArgumentTuple>
   Result Perform(const ArgumentTuple& args) { return get<1>(args); }
 };
 
 // Implements a polymorphic action that can be used in a nullary
 // function to return 0.
 class ReturnZeroFromNullaryFunctionAction {
  public:
   // For testing that MakePolymorphicAction() works when the
   // implementation class' Perform() method template takes only one
   // template parameter.
   //
   // We want to verify that MakePolymorphicAction() can work with a
   // polymorphic action whose Perform() method template is either
   // const or not.  This lets us verify the const case.
   template <typename Result>
   Result Perform(const tuple<>&) const { return 0; }
 };
 
 // These functions verify that MakePolymorphicAction() returns a
 // PolymorphicAction<T> where T is the argument's type.
 
 PolymorphicAction<ReturnSecondArgumentAction> ReturnSecondArgument() {
   return MakePolymorphicAction(ReturnSecondArgumentAction());
 }
 
 PolymorphicAction<ReturnZeroFromNullaryFunctionAction>
 ReturnZeroFromNullaryFunction() {
   return MakePolymorphicAction(ReturnZeroFromNullaryFunctionAction());
 }
 
 // Tests that MakePolymorphicAction() turns a polymorphic action
 // implementation class into a polymorphic action.
 TEST(MakePolymorphicActionTest, ConstructsActionFromImpl) {
   Action<int(bool, int, double)> a1 = ReturnSecondArgument();  // NOLINT
   EXPECT_EQ(5, a1.Perform(make_tuple(false, 5, 2.0)));
 }
 
 // Tests that MakePolymorphicAction() works when the implementation
 // class' Perform() method template has only one template parameter.
 TEST(MakePolymorphicActionTest, WorksWhenPerformHasOneTemplateParameter) {
   Action<int()> a1 = ReturnZeroFromNullaryFunction();
   EXPECT_EQ(0, a1.Perform(make_tuple()));
 
   Action<void*()> a2 = ReturnZeroFromNullaryFunction();
   EXPECT_TRUE(a2.Perform(make_tuple()) == NULL);
 }
 
 // Tests that Return() works as an action for void-returning
 // functions.
 TEST(ReturnTest, WorksForVoid) {
   const Action<void(int)> ret = Return();  // NOLINT
   return ret.Perform(make_tuple(1));
 }
 
 // Tests that Return(v) returns v.
 TEST(ReturnTest, ReturnsGivenValue) {
   Action<int()> ret = Return(1);  // NOLINT
   EXPECT_EQ(1, ret.Perform(make_tuple()));
 
   ret = Return(-5);
   EXPECT_EQ(-5, ret.Perform(make_tuple()));
 }
 
 // Tests that Return("string literal") works.
 TEST(ReturnTest, AcceptsStringLiteral) {
   Action<const char*()> a1 = Return("Hello");
   EXPECT_STREQ("Hello", a1.Perform(make_tuple()));
 
   Action<std::string()> a2 = Return("world");
   EXPECT_EQ("world", a2.Perform(make_tuple()));
 }
 
 // Test struct which wraps a vector of integers. Used in
 // 'SupportsWrapperReturnType' test.
 struct IntegerVectorWrapper {
   std::vector<int> * v;
   IntegerVectorWrapper(std::vector<int>& _v) : v(&_v) {}  // NOLINT
 };
 
 // Tests that Return() works when return type is a wrapper type.
 TEST(ReturnTest, SupportsWrapperReturnType) {
   // Initialize vector of integers.
   std::vector<int> v;
   for (int i = 0; i < 5; ++i) v.push_back(i);
 
   // Return() called with 'v' as argument. The Action will return the same data
   // as 'v' (copy) but it will be wrapped in an IntegerVectorWrapper.
   Action<IntegerVectorWrapper()> a = Return(v);
   const std::vector<int>& result = *(a.Perform(make_tuple()).v);
   EXPECT_THAT(result, ::testing::ElementsAre(0, 1, 2, 3, 4));
 }
 
 // Tests that Return(v) is covaraint.
 
 struct Base {
   bool operator==(const Base&) { return true; }
 };
 
 struct Derived : public Base {
   bool operator==(const Derived&) { return true; }
 };
 
 TEST(ReturnTest, IsCovariant) {
   Base base;
   Derived derived;
   Action<Base*()> ret = Return(&base);
   EXPECT_EQ(&base, ret.Perform(make_tuple()));
 
   ret = Return(&derived);
   EXPECT_EQ(&derived, ret.Perform(make_tuple()));
 }
 
 // Tests that the type of the value passed into Return is converted into T
 // when the action is cast to Action<T(...)> rather than when the action is
 // performed. See comments on testing::internal::ReturnAction in
 // gmock-actions.h for more information.
 class FromType {
  public:
   explicit FromType(bool* is_converted) : converted_(is_converted) {}
   bool* converted() const { return converted_; }
 
  private:
   bool* const converted_;
 
   GTEST_DISALLOW_ASSIGN_(FromType);
 };
 
 class ToType {
  public:
   // Must allow implicit conversion due to use in ImplicitCast_<T>.
   ToType(const FromType& x) { *x.converted() = true; }  // NOLINT
 };
 
 TEST(ReturnTest, ConvertsArgumentWhenConverted) {
   bool converted = false;
   FromType x(&converted);
   Action<ToType()> action(Return(x));
   EXPECT_TRUE(converted) << "Return must convert its argument in its own "
                          << "conversion operator.";
   converted = false;
   action.Perform(tuple<>());
   EXPECT_FALSE(converted) << "Action must NOT convert its argument "
                           << "when performed.";
 }
 
 class DestinationType {};
 
 class SourceType {
  public:
   // Note: a non-const typecast operator.
   operator DestinationType() { return DestinationType(); }
 };
 
 TEST(ReturnTest, CanConvertArgumentUsingNonConstTypeCastOperator) {
   SourceType s;
   Action<DestinationType()> action(Return(s));
 }
 
 // Tests that ReturnNull() returns NULL in a pointer-returning function.
 TEST(ReturnNullTest, WorksInPointerReturningFunction) {
   const Action<int*()> a1 = ReturnNull();
   EXPECT_TRUE(a1.Perform(make_tuple()) == NULL);
 
   const Action<const char*(bool)> a2 = ReturnNull();  // NOLINT
   EXPECT_TRUE(a2.Perform(make_tuple(true)) == NULL);
 }
 
 #if GTEST_HAS_STD_UNIQUE_PTR_
 // Tests that ReturnNull() returns NULL for shared_ptr and unique_ptr returning
 // functions.
 TEST(ReturnNullTest, WorksInSmartPointerReturningFunction) {
   const Action<std::unique_ptr<const int>()> a1 = ReturnNull();
   EXPECT_TRUE(a1.Perform(make_tuple()) == nullptr);
 
   const Action<std::shared_ptr<int>(std::string)> a2 = ReturnNull();
   EXPECT_TRUE(a2.Perform(make_tuple("foo")) == nullptr);
 }
 #endif  // GTEST_HAS_STD_UNIQUE_PTR_
 
 // Tests that ReturnRef(v) works for reference types.
 TEST(ReturnRefTest, WorksForReference) {
   const int n = 0;
   const Action<const int&(bool)> ret = ReturnRef(n);  // NOLINT
 
   EXPECT_EQ(&n, &ret.Perform(make_tuple(true)));
 }
 
 // Tests that ReturnRef(v) is covariant.
 TEST(ReturnRefTest, IsCovariant) {
   Base base;
   Derived derived;
   Action<Base&()> a = ReturnRef(base);
   EXPECT_EQ(&base, &a.Perform(make_tuple()));
 
   a = ReturnRef(derived);
   EXPECT_EQ(&derived, &a.Perform(make_tuple()));
 }
 
 // Tests that ReturnRefOfCopy(v) works for reference types.
 TEST(ReturnRefOfCopyTest, WorksForReference) {
   int n = 42;
   const Action<const int&()> ret = ReturnRefOfCopy(n);
 
   EXPECT_NE(&n, &ret.Perform(make_tuple()));
   EXPECT_EQ(42, ret.Perform(make_tuple()));
 
   n = 43;
   EXPECT_NE(&n, &ret.Perform(make_tuple()));
   EXPECT_EQ(42, ret.Perform(make_tuple()));
 }
 
 // Tests that ReturnRefOfCopy(v) is covariant.
 TEST(ReturnRefOfCopyTest, IsCovariant) {
   Base base;
   Derived derived;
   Action<Base&()> a = ReturnRefOfCopy(base);
   EXPECT_NE(&base, &a.Perform(make_tuple()));
 
   a = ReturnRefOfCopy(derived);
   EXPECT_NE(&derived, &a.Perform(make_tuple()));
 }
 
 // Tests that DoDefault() does the default action for the mock method.
 
 class MockClass {
  public:
   MockClass() {}
 
   MOCK_METHOD1(IntFunc, int(bool flag));  // NOLINT
   MOCK_METHOD0(Foo, MyNonDefaultConstructible());
 #if GTEST_HAS_STD_UNIQUE_PTR_
   MOCK_METHOD0(MakeUnique, std::unique_ptr<int>());
   MOCK_METHOD0(MakeUniqueBase, std::unique_ptr<Base>());
   MOCK_METHOD0(MakeVectorUnique, std::vector<std::unique_ptr<int>>());
   MOCK_METHOD1(TakeUnique, int(std::unique_ptr<int>));
   MOCK_METHOD2(TakeUnique,
                int(const std::unique_ptr<int>&, std::unique_ptr<int>));
 #endif
 
  private:
   GTEST_DISALLOW_COPY_AND_ASSIGN_(MockClass);
 };
 
 // Tests that DoDefault() returns the built-in default value for the
 // return type by default.
 TEST(DoDefaultTest, ReturnsBuiltInDefaultValueByDefault) {
   MockClass mock;
   EXPECT_CALL(mock, IntFunc(_))
       .WillOnce(DoDefault());
   EXPECT_EQ(0, mock.IntFunc(true));
 }
 
 // Tests that DoDefault() throws (when exceptions are enabled) or aborts
 // the process when there is no built-in default value for the return type.
 TEST(DoDefaultDeathTest, DiesForUnknowType) {
   MockClass mock;
   EXPECT_CALL(mock, Foo())
       .WillRepeatedly(DoDefault());
 #if GTEST_HAS_EXCEPTIONS
   EXPECT_ANY_THROW(mock.Foo());
 #else
   EXPECT_DEATH_IF_SUPPORTED({
     mock.Foo();
   }, "");
 #endif
 }
 
 // Tests that using DoDefault() inside a composite action leads to a
 // run-time error.
 
 void VoidFunc(bool /* flag */) {}
 
 TEST(DoDefaultDeathTest, DiesIfUsedInCompositeAction) {
   MockClass mock;
   EXPECT_CALL(mock, IntFunc(_))
       .WillRepeatedly(DoAll(Invoke(VoidFunc),
                             DoDefault()));
 
   // Ideally we should verify the error message as well.  Sadly,
   // EXPECT_DEATH() can only capture stderr, while Google Mock's
   // errors are printed on stdout.  Therefore we have to settle for
   // not verifying the message.
   EXPECT_DEATH_IF_SUPPORTED({
     mock.IntFunc(true);
   }, "");
 }
 
 // Tests that DoDefault() returns the default value set by
 // DefaultValue<T>::Set() when it's not overriden by an ON_CALL().
 TEST(DoDefaultTest, ReturnsUserSpecifiedPerTypeDefaultValueWhenThereIsOne) {
   DefaultValue<int>::Set(1);
   MockClass mock;
   EXPECT_CALL(mock, IntFunc(_))
       .WillOnce(DoDefault());
   EXPECT_EQ(1, mock.IntFunc(false));
   DefaultValue<int>::Clear();
 }
 
 // Tests that DoDefault() does the action specified by ON_CALL().
 TEST(DoDefaultTest, DoesWhatOnCallSpecifies) {
   MockClass mock;
   ON_CALL(mock, IntFunc(_))
       .WillByDefault(Return(2));
   EXPECT_CALL(mock, IntFunc(_))
       .WillOnce(DoDefault());
   EXPECT_EQ(2, mock.IntFunc(false));
 }
 
 // Tests that using DoDefault() in ON_CALL() leads to a run-time failure.
 TEST(DoDefaultTest, CannotBeUsedInOnCall) {
   MockClass mock;
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     ON_CALL(mock, IntFunc(_))
       .WillByDefault(DoDefault());
   }, "DoDefault() cannot be used in ON_CALL()");
 }
 
 // Tests that SetArgPointee<N>(v) sets the variable pointed to by
 // the N-th (0-based) argument to v.
 TEST(SetArgPointeeTest, SetsTheNthPointee) {
   typedef void MyFunction(bool, int*, char*);
   Action<MyFunction> a = SetArgPointee<1>(2);
 
   int n = 0;
   char ch = '\0';
   a.Perform(make_tuple(true, &n, &ch));
   EXPECT_EQ(2, n);
   EXPECT_EQ('\0', ch);
 
   a = SetArgPointee<2>('a');
   n = 0;
   ch = '\0';
   a.Perform(make_tuple(true, &n, &ch));
   EXPECT_EQ(0, n);
   EXPECT_EQ('a', ch);
 }
 
 #if !((GTEST_GCC_VER_ && GTEST_GCC_VER_ < 40000) || GTEST_OS_SYMBIAN)
 // Tests that SetArgPointee<N>() accepts a string literal.
 // GCC prior to v4.0 and the Symbian compiler do not support this.
 TEST(SetArgPointeeTest, AcceptsStringLiteral) {
   typedef void MyFunction(std::string*, const char**);
   Action<MyFunction> a = SetArgPointee<0>("hi");
   std::string str;
   const char* ptr = NULL;
   a.Perform(make_tuple(&str, &ptr));
   EXPECT_EQ("hi", str);
   EXPECT_TRUE(ptr == NULL);
 
   a = SetArgPointee<1>("world");
   str = "";
   a.Perform(make_tuple(&str, &ptr));
   EXPECT_EQ("", str);
   EXPECT_STREQ("world", ptr);
 }
 
 TEST(SetArgPointeeTest, AcceptsWideStringLiteral) {
   typedef void MyFunction(const wchar_t**);
   Action<MyFunction> a = SetArgPointee<0>(L"world");
   const wchar_t* ptr = NULL;
   a.Perform(make_tuple(&ptr));
   EXPECT_STREQ(L"world", ptr);
 
 # if GTEST_HAS_STD_WSTRING
 
   typedef void MyStringFunction(std::wstring*);
   Action<MyStringFunction> a2 = SetArgPointee<0>(L"world");
   std::wstring str = L"";
   a2.Perform(make_tuple(&str));
   EXPECT_EQ(L"world", str);
 
 # endif
 }
 #endif
 
 // Tests that SetArgPointee<N>() accepts a char pointer.
 TEST(SetArgPointeeTest, AcceptsCharPointer) {
   typedef void MyFunction(bool, std::string*, const char**);
   const char* const hi = "hi";
   Action<MyFunction> a = SetArgPointee<1>(hi);
   std::string str;
   const char* ptr = NULL;
   a.Perform(make_tuple(true, &str, &ptr));
   EXPECT_EQ("hi", str);
   EXPECT_TRUE(ptr == NULL);
 
   char world_array[] = "world";
   char* const world = world_array;
   a = SetArgPointee<2>(world);
   str = "";
   a.Perform(make_tuple(true, &str, &ptr));
   EXPECT_EQ("", str);
   EXPECT_EQ(world, ptr);
 }
 
 TEST(SetArgPointeeTest, AcceptsWideCharPointer) {
   typedef void MyFunction(bool, const wchar_t**);
   const wchar_t* const hi = L"hi";
   Action<MyFunction> a = SetArgPointee<1>(hi);
   const wchar_t* ptr = NULL;
   a.Perform(make_tuple(true, &ptr));
   EXPECT_EQ(hi, ptr);
 
 # if GTEST_HAS_STD_WSTRING
 
   typedef void MyStringFunction(bool, std::wstring*);
   wchar_t world_array[] = L"world";
   wchar_t* const world = world_array;
   Action<MyStringFunction> a2 = SetArgPointee<1>(world);
   std::wstring str;
   a2.Perform(make_tuple(true, &str));
   EXPECT_EQ(world_array, str);
 # endif
 }
 
 // Tests that SetArgumentPointee<N>(v) sets the variable pointed to by
 // the N-th (0-based) argument to v.
 TEST(SetArgumentPointeeTest, SetsTheNthPointee) {
   typedef void MyFunction(bool, int*, char*);
   Action<MyFunction> a = SetArgumentPointee<1>(2);
 
   int n = 0;
   char ch = '\0';
   a.Perform(make_tuple(true, &n, &ch));
   EXPECT_EQ(2, n);
   EXPECT_EQ('\0', ch);
 
   a = SetArgumentPointee<2>('a');
   n = 0;
   ch = '\0';
   a.Perform(make_tuple(true, &n, &ch));
   EXPECT_EQ(0, n);
   EXPECT_EQ('a', ch);
 }
 
 // Sample functions and functors for testing Invoke() and etc.
 int Nullary() { return 1; }
 
 class NullaryFunctor {
  public:
   int operator()() { return 2; }
 };
 
 bool g_done = false;
 void VoidNullary() { g_done = true; }
 
 class VoidNullaryFunctor {
  public:
   void operator()() { g_done = true; }
 };
 
 class Foo {
  public:
   Foo() : value_(123) {}
 
   int Nullary() const { return value_; }
 
  private:
   int value_;
 };
 
+//  GOOGLETEST_CM0005 DO NOT DELETE
+
 // Tests InvokeWithoutArgs(function).
 TEST(InvokeWithoutArgsTest, Function) {
   // As an action that takes one argument.
   Action<int(int)> a = InvokeWithoutArgs(Nullary);  // NOLINT
   EXPECT_EQ(1, a.Perform(make_tuple(2)));
 
   // As an action that takes two arguments.
   Action<int(int, double)> a2 = InvokeWithoutArgs(Nullary);  // NOLINT
   EXPECT_EQ(1, a2.Perform(make_tuple(2, 3.5)));
 
   // As an action that returns void.
   Action<void(int)> a3 = InvokeWithoutArgs(VoidNullary);  // NOLINT
   g_done = false;
   a3.Perform(make_tuple(1));
   EXPECT_TRUE(g_done);
 }
 
 // Tests InvokeWithoutArgs(functor).
 TEST(InvokeWithoutArgsTest, Functor) {
   // As an action that takes no argument.
   Action<int()> a = InvokeWithoutArgs(NullaryFunctor());  // NOLINT
   EXPECT_EQ(2, a.Perform(make_tuple()));
 
   // As an action that takes three arguments.
   Action<int(int, double, char)> a2 =  // NOLINT
       InvokeWithoutArgs(NullaryFunctor());
   EXPECT_EQ(2, a2.Perform(make_tuple(3, 3.5, 'a')));
 
   // As an action that returns void.
   Action<void()> a3 = InvokeWithoutArgs(VoidNullaryFunctor());
   g_done = false;
   a3.Perform(make_tuple());
   EXPECT_TRUE(g_done);
 }
 
 // Tests InvokeWithoutArgs(obj_ptr, method).
 TEST(InvokeWithoutArgsTest, Method) {
   Foo foo;
   Action<int(bool, char)> a =  // NOLINT
       InvokeWithoutArgs(&foo, &Foo::Nullary);
   EXPECT_EQ(123, a.Perform(make_tuple(true, 'a')));
 }
 
 // Tests using IgnoreResult() on a polymorphic action.
 TEST(IgnoreResultTest, PolymorphicAction) {
   Action<void(int)> a = IgnoreResult(Return(5));  // NOLINT
   a.Perform(make_tuple(1));
 }
 
 // Tests using IgnoreResult() on a monomorphic action.
 
 int ReturnOne() {
   g_done = true;
   return 1;
 }
 
 TEST(IgnoreResultTest, MonomorphicAction) {
   g_done = false;
   Action<void()> a = IgnoreResult(Invoke(ReturnOne));
   a.Perform(make_tuple());
   EXPECT_TRUE(g_done);
 }
 
 // Tests using IgnoreResult() on an action that returns a class type.
 
 MyNonDefaultConstructible ReturnMyNonDefaultConstructible(double /* x */) {
   g_done = true;
   return MyNonDefaultConstructible(42);
 }
 
 TEST(IgnoreResultTest, ActionReturningClass) {
   g_done = false;
   Action<void(int)> a =
       IgnoreResult(Invoke(ReturnMyNonDefaultConstructible));  // NOLINT
   a.Perform(make_tuple(2));
   EXPECT_TRUE(g_done);
 }
 
 TEST(AssignTest, Int) {
   int x = 0;
   Action<void(int)> a = Assign(&x, 5);
   a.Perform(make_tuple(0));
   EXPECT_EQ(5, x);
 }
 
 TEST(AssignTest, String) {
   ::std::string x;
   Action<void(void)> a = Assign(&x, "Hello, world");
   a.Perform(make_tuple());
   EXPECT_EQ("Hello, world", x);
 }
 
 TEST(AssignTest, CompatibleTypes) {
   double x = 0;
   Action<void(int)> a = Assign(&x, 5);
   a.Perform(make_tuple(0));
   EXPECT_DOUBLE_EQ(5, x);
 }
 
 #if !GTEST_OS_WINDOWS_MOBILE
 
 class SetErrnoAndReturnTest : public testing::Test {
  protected:
   virtual void SetUp() { errno = 0; }
   virtual void TearDown() { errno = 0; }
 };
 
 TEST_F(SetErrnoAndReturnTest, Int) {
   Action<int(void)> a = SetErrnoAndReturn(ENOTTY, -5);
   EXPECT_EQ(-5, a.Perform(make_tuple()));
   EXPECT_EQ(ENOTTY, errno);
 }
 
 TEST_F(SetErrnoAndReturnTest, Ptr) {
   int x;
   Action<int*(void)> a = SetErrnoAndReturn(ENOTTY, &x);
   EXPECT_EQ(&x, a.Perform(make_tuple()));
   EXPECT_EQ(ENOTTY, errno);
 }
 
 TEST_F(SetErrnoAndReturnTest, CompatibleTypes) {
   Action<double()> a = SetErrnoAndReturn(EINVAL, 5);
   EXPECT_DOUBLE_EQ(5.0, a.Perform(make_tuple()));
   EXPECT_EQ(EINVAL, errno);
 }
 
 #endif  // !GTEST_OS_WINDOWS_MOBILE
 
 // Tests ByRef().
 
 // Tests that ReferenceWrapper<T> is copyable.
 TEST(ByRefTest, IsCopyable) {
   const std::string s1 = "Hi";
   const std::string s2 = "Hello";
 
   ::testing::internal::ReferenceWrapper<const std::string> ref_wrapper =
       ByRef(s1);
   const std::string& r1 = ref_wrapper;
   EXPECT_EQ(&s1, &r1);
 
   // Assigns a new value to ref_wrapper.
   ref_wrapper = ByRef(s2);
   const std::string& r2 = ref_wrapper;
   EXPECT_EQ(&s2, &r2);
 
   ::testing::internal::ReferenceWrapper<const std::string> ref_wrapper1 =
       ByRef(s1);
   // Copies ref_wrapper1 to ref_wrapper.
   ref_wrapper = ref_wrapper1;
   const std::string& r3 = ref_wrapper;
   EXPECT_EQ(&s1, &r3);
 }
 
 // Tests using ByRef() on a const value.
 TEST(ByRefTest, ConstValue) {
   const int n = 0;
   // int& ref = ByRef(n);  // This shouldn't compile - we have a
                            // negative compilation test to catch it.
   const int& const_ref = ByRef(n);
   EXPECT_EQ(&n, &const_ref);
 }
 
 // Tests using ByRef() on a non-const value.
 TEST(ByRefTest, NonConstValue) {
   int n = 0;
 
   // ByRef(n) can be used as either an int&,
   int& ref = ByRef(n);
   EXPECT_EQ(&n, &ref);
 
   // or a const int&.
   const int& const_ref = ByRef(n);
   EXPECT_EQ(&n, &const_ref);
 }
 
 // Tests explicitly specifying the type when using ByRef().
 TEST(ByRefTest, ExplicitType) {
   int n = 0;
   const int& r1 = ByRef<const int>(n);
   EXPECT_EQ(&n, &r1);
 
   // ByRef<char>(n);  // This shouldn't compile - we have a negative
                       // compilation test to catch it.
 
   Derived d;
   Derived& r2 = ByRef<Derived>(d);
   EXPECT_EQ(&d, &r2);
 
   const Derived& r3 = ByRef<const Derived>(d);
   EXPECT_EQ(&d, &r3);
 
   Base& r4 = ByRef<Base>(d);
   EXPECT_EQ(&d, &r4);
 
   const Base& r5 = ByRef<const Base>(d);
   EXPECT_EQ(&d, &r5);
 
   // The following shouldn't compile - we have a negative compilation
   // test for it.
   //
   // Base b;
   // ByRef<Derived>(b);
 }
 
 // Tests that Google Mock prints expression ByRef(x) as a reference to x.
 TEST(ByRefTest, PrintsCorrectly) {
   int n = 42;
   ::std::stringstream expected, actual;
   testing::internal::UniversalPrinter<const int&>::Print(n, &expected);
   testing::internal::UniversalPrint(ByRef(n), &actual);
   EXPECT_EQ(expected.str(), actual.str());
 }
 
 #if GTEST_HAS_STD_UNIQUE_PTR_
 
 std::unique_ptr<int> UniquePtrSource() {
   return std::unique_ptr<int>(new int(19));
 }
 
 std::vector<std::unique_ptr<int>> VectorUniquePtrSource() {
   std::vector<std::unique_ptr<int>> out;
   out.emplace_back(new int(7));
   return out;
 }
 
 TEST(MockMethodTest, CanReturnMoveOnlyValue_Return) {
   MockClass mock;
   std::unique_ptr<int> i(new int(19));
   EXPECT_CALL(mock, MakeUnique()).WillOnce(Return(ByMove(std::move(i))));
   EXPECT_CALL(mock, MakeVectorUnique())
       .WillOnce(Return(ByMove(VectorUniquePtrSource())));
   Derived* d = new Derived;
   EXPECT_CALL(mock, MakeUniqueBase())
       .WillOnce(Return(ByMove(std::unique_ptr<Derived>(d))));
 
   std::unique_ptr<int> result1 = mock.MakeUnique();
   EXPECT_EQ(19, *result1);
 
   std::vector<std::unique_ptr<int>> vresult = mock.MakeVectorUnique();
   EXPECT_EQ(1u, vresult.size());
   EXPECT_NE(nullptr, vresult[0]);
   EXPECT_EQ(7, *vresult[0]);
 
   std::unique_ptr<Base> result2 = mock.MakeUniqueBase();
   EXPECT_EQ(d, result2.get());
 }
 
 TEST(MockMethodTest, CanReturnMoveOnlyValue_DoAllReturn) {
   testing::MockFunction<void()> mock_function;
   MockClass mock;
   std::unique_ptr<int> i(new int(19));
   EXPECT_CALL(mock_function, Call());
   EXPECT_CALL(mock, MakeUnique()).WillOnce(DoAll(
       InvokeWithoutArgs(&mock_function, &testing::MockFunction<void()>::Call),
       Return(ByMove(std::move(i)))));
 
   std::unique_ptr<int> result1 = mock.MakeUnique();
   EXPECT_EQ(19, *result1);
 }
 
 TEST(MockMethodTest, CanReturnMoveOnlyValue_Invoke) {
   MockClass mock;
 
   // Check default value
   DefaultValue<std::unique_ptr<int>>::SetFactory([] {
     return std::unique_ptr<int>(new int(42));
   });
   EXPECT_EQ(42, *mock.MakeUnique());
 
   EXPECT_CALL(mock, MakeUnique()).WillRepeatedly(Invoke(UniquePtrSource));
   EXPECT_CALL(mock, MakeVectorUnique())
       .WillRepeatedly(Invoke(VectorUniquePtrSource));
   std::unique_ptr<int> result1 = mock.MakeUnique();
   EXPECT_EQ(19, *result1);
   std::unique_ptr<int> result2 = mock.MakeUnique();
   EXPECT_EQ(19, *result2);
   EXPECT_NE(result1, result2);
 
   std::vector<std::unique_ptr<int>> vresult = mock.MakeVectorUnique();
   EXPECT_EQ(1u, vresult.size());
   EXPECT_NE(nullptr, vresult[0]);
   EXPECT_EQ(7, *vresult[0]);
 }
 
 TEST(MockMethodTest, CanTakeMoveOnlyValue) {
   MockClass mock;
   auto make = [](int i) { return std::unique_ptr<int>(new int(i)); };
 
   EXPECT_CALL(mock, TakeUnique(_)).WillRepeatedly([](std::unique_ptr<int> i) {
     return *i;
   });
   // DoAll() does not compile, since it would move from its arguments twice.
   // EXPECT_CALL(mock, TakeUnique(_, _))
   //     .WillRepeatedly(DoAll(Invoke([](std::unique_ptr<int> j) {}),
   //     Return(1)));
   EXPECT_CALL(mock, TakeUnique(testing::Pointee(7)))
       .WillOnce(Return(-7))
       .RetiresOnSaturation();
   EXPECT_CALL(mock, TakeUnique(testing::IsNull()))
       .WillOnce(Return(-1))
       .RetiresOnSaturation();
 
   EXPECT_EQ(5, mock.TakeUnique(make(5)));
   EXPECT_EQ(-7, mock.TakeUnique(make(7)));
   EXPECT_EQ(7, mock.TakeUnique(make(7)));
   EXPECT_EQ(7, mock.TakeUnique(make(7)));
   EXPECT_EQ(-1, mock.TakeUnique({}));
 
   // Some arguments are moved, some passed by reference.
   auto lvalue = make(6);
   EXPECT_CALL(mock, TakeUnique(_, _))
       .WillOnce([](const std::unique_ptr<int>& i, std::unique_ptr<int> j) {
         return *i * *j;
       });
   EXPECT_EQ(42, mock.TakeUnique(lvalue, make(7)));
 
   // The unique_ptr can be saved by the action.
   std::unique_ptr<int> saved;
   EXPECT_CALL(mock, TakeUnique(_)).WillOnce([&saved](std::unique_ptr<int> i) {
     saved = std::move(i);
     return 0;
   });
   EXPECT_EQ(0, mock.TakeUnique(make(42)));
   EXPECT_EQ(42, *saved);
 }
 
 #endif  // GTEST_HAS_STD_UNIQUE_PTR_
 
 #if GTEST_LANG_CXX11
 // Tests for std::function based action.
 
 int Add(int val, int& ref, int* ptr) {  // NOLINT
   int result = val + ref + *ptr;
   ref = 42;
   *ptr = 43;
   return result;
 }
 
 int Deref(std::unique_ptr<int> ptr) { return *ptr; }
 
 struct Double {
   template <typename T>
   T operator()(T t) { return 2 * t; }
 };
 
 std::unique_ptr<int> UniqueInt(int i) {
   return std::unique_ptr<int>(new int(i));
 }
 
 TEST(FunctorActionTest, ActionFromFunction) {
   Action<int(int, int&, int*)> a = &Add;
   int x = 1, y = 2, z = 3;
   EXPECT_EQ(6, a.Perform(std::forward_as_tuple(x, y, &z)));
   EXPECT_EQ(42, y);
   EXPECT_EQ(43, z);
 
   Action<int(std::unique_ptr<int>)> a1 = &Deref;
   EXPECT_EQ(7, a1.Perform(std::make_tuple(UniqueInt(7))));
 }
 
 TEST(FunctorActionTest, ActionFromLambda) {
   Action<int(bool, int)> a1 = [](bool b, int i) { return b ? i : 0; };
   EXPECT_EQ(5, a1.Perform(make_tuple(true, 5)));
   EXPECT_EQ(0, a1.Perform(make_tuple(false, 5)));
 
   std::unique_ptr<int> saved;
   Action<void(std::unique_ptr<int>)> a2 = [&saved](std::unique_ptr<int> p) {
     saved = std::move(p);
   };
   a2.Perform(make_tuple(UniqueInt(5)));
   EXPECT_EQ(5, *saved);
 }
 
 TEST(FunctorActionTest, PolymorphicFunctor) {
   Action<int(int)> ai = Double();
   EXPECT_EQ(2, ai.Perform(make_tuple(1)));
   Action<double(double)> ad = Double();  // Double? Double double!
   EXPECT_EQ(3.0, ad.Perform(make_tuple(1.5)));
 }
 
 TEST(FunctorActionTest, TypeConversion) {
   // Numeric promotions are allowed.
   const Action<bool(int)> a1 = [](int i) { return i > 1; };
   const Action<int(bool)> a2 = Action<int(bool)>(a1);
   EXPECT_EQ(1, a1.Perform(make_tuple(42)));
   EXPECT_EQ(0, a2.Perform(make_tuple(42)));
 
   // Implicit constructors are allowed.
   const Action<bool(std::string)> s1 = [](std::string s) { return !s.empty(); };
   const Action<int(const char*)> s2 = Action<int(const char*)>(s1);
   EXPECT_EQ(0, s2.Perform(make_tuple("")));
   EXPECT_EQ(1, s2.Perform(make_tuple("hello")));
 
   // Also between the lambda and the action itself.
   const Action<bool(std::string)> x = [](Unused) { return 42; };
   EXPECT_TRUE(x.Perform(make_tuple("hello")));
 }
 
 TEST(FunctorActionTest, UnusedArguments) {
   // Verify that users can ignore uninteresting arguments.
   Action<int(int, double y, double z)> a =
       [](int i, Unused, Unused) { return 2 * i; };
   tuple<int, double, double> dummy = make_tuple(3, 7.3, 9.44);
   EXPECT_EQ(6, a.Perform(dummy));
 }
 
 // Test that basic built-in actions work with move-only arguments.
 // TODO(rburny): Currently, almost all ActionInterface-based actions will not
 // work, even if they only try to use other, copyable arguments. Implement them
 // if necessary (but note that DoAll cannot work on non-copyable types anyway -
 // so maybe it's better to make users use lambdas instead.
 TEST(MoveOnlyArgumentsTest, ReturningActions) {
   Action<int(std::unique_ptr<int>)> a = Return(1);
   EXPECT_EQ(1, a.Perform(make_tuple(nullptr)));
 
   a = testing::WithoutArgs([]() { return 7; });
   EXPECT_EQ(7, a.Perform(make_tuple(nullptr)));
 
   Action<void(std::unique_ptr<int>, int*)> a2 = testing::SetArgPointee<1>(3);
   int x = 0;
   a2.Perform(make_tuple(nullptr, &x));
   EXPECT_EQ(x, 3);
 }
 
 #endif  // GTEST_LANG_CXX11
 
 }  // Unnamed namespace
 
 #ifdef _MSC_VER
 #if _MSC_VER == 1900
 #  pragma warning(pop)
 #endif
 #endif
 
diff --git a/googlemock/test/gmock_leak_test.py b/googlemock/test/gmock_leak_test.py
index 997680ce..a2fee4b6 100755
--- a/googlemock/test/gmock_leak_test.py
+++ b/googlemock/test/gmock_leak_test.py
@@ -1,108 +1,106 @@
 #!/usr/bin/env python
 #
 # Copyright 2009, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Tests that leaked mock objects can be caught be Google Mock."""
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
-
 import gmock_test_utils
 
-
 PROGRAM_PATH = gmock_test_utils.GetTestExecutablePath('gmock_leak_test_')
 TEST_WITH_EXPECT_CALL = [PROGRAM_PATH, '--gtest_filter=*ExpectCall*']
 TEST_WITH_ON_CALL = [PROGRAM_PATH, '--gtest_filter=*OnCall*']
 TEST_MULTIPLE_LEAKS = [PROGRAM_PATH, '--gtest_filter=*MultipleLeaked*']
 
 environ = gmock_test_utils.environ
 SetEnvVar = gmock_test_utils.SetEnvVar
 
 # Tests in this file run a Google-Test-based test program and expect it
 # to terminate prematurely.  Therefore they are incompatible with
 # the premature-exit-file protocol by design.  Unset the
 # premature-exit filepath to prevent Google Test from creating
 # the file.
 SetEnvVar(gmock_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
 
 
 class GMockLeakTest(gmock_test_utils.TestCase):
 
   def testCatchesLeakedMockByDefault(self):
     self.assertNotEqual(
         0,
         gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL,
                                     env=environ).exit_code)
     self.assertNotEqual(
         0,
         gmock_test_utils.Subprocess(TEST_WITH_ON_CALL,
                                     env=environ).exit_code)
 
   def testDoesNotCatchLeakedMockWhenDisabled(self):
     self.assertEquals(
         0,
         gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
                                     ['--gmock_catch_leaked_mocks=0'],
                                     env=environ).exit_code)
     self.assertEquals(
         0,
         gmock_test_utils.Subprocess(TEST_WITH_ON_CALL +
                                     ['--gmock_catch_leaked_mocks=0'],
                                     env=environ).exit_code)
 
   def testCatchesLeakedMockWhenEnabled(self):
     self.assertNotEqual(
         0,
         gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
                                     ['--gmock_catch_leaked_mocks'],
                                     env=environ).exit_code)
     self.assertNotEqual(
         0,
         gmock_test_utils.Subprocess(TEST_WITH_ON_CALL +
                                     ['--gmock_catch_leaked_mocks'],
                                     env=environ).exit_code)
 
   def testCatchesLeakedMockWhenEnabledWithExplictFlagValue(self):
     self.assertNotEqual(
         0,
         gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
                                     ['--gmock_catch_leaked_mocks=1'],
                                     env=environ).exit_code)
 
   def testCatchesMultipleLeakedMocks(self):
     self.assertNotEqual(
         0,
         gmock_test_utils.Subprocess(TEST_MULTIPLE_LEAKS +
                                     ['--gmock_catch_leaked_mocks'],
                                     env=environ).exit_code)
 
 
 if __name__ == '__main__':
   gmock_test_utils.Main()
diff --git a/googlemock/test/gmock_output_test.py b/googlemock/test/gmock_output_test.py
index 9d73d570..8f57d46c 100755
--- a/googlemock/test/gmock_output_test.py
+++ b/googlemock/test/gmock_output_test.py
@@ -1,183 +1,182 @@
 #!/usr/bin/env python
 #
 # Copyright 2008, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Tests the text output of Google C++ Mocking Framework.
 
 To update the golden file:
 gmock_output_test.py --build_dir=BUILD/DIR --gengolden
 # where BUILD/DIR contains the built gmock_output_test_ file.
 gmock_output_test.py --gengolden
 gmock_output_test.py
 """
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import os
 import re
 import sys
-
 import gmock_test_utils
 
 
 # The flag for generating the golden file
 GENGOLDEN_FLAG = '--gengolden'
 
 PROGRAM_PATH = gmock_test_utils.GetTestExecutablePath('gmock_output_test_')
 COMMAND = [PROGRAM_PATH, '--gtest_stack_trace_depth=0', '--gtest_print_time=0']
 GOLDEN_NAME = 'gmock_output_test_golden.txt'
 GOLDEN_PATH = os.path.join(gmock_test_utils.GetSourceDir(), GOLDEN_NAME)
 
 
 def ToUnixLineEnding(s):
   """Changes all Windows/Mac line endings in s to UNIX line endings."""
 
   return s.replace('\r\n', '\n').replace('\r', '\n')
 
 
 def RemoveReportHeaderAndFooter(output):
   """Removes Google Test result report's header and footer from the output."""
 
   output = re.sub(r'.*gtest_main.*\n', '', output)
   output = re.sub(r'\[.*\d+ tests.*\n', '', output)
   output = re.sub(r'\[.* test environment .*\n', '', output)
   output = re.sub(r'\[=+\] \d+ tests .* ran.*', '', output)
   output = re.sub(r'.* FAILED TESTS\n', '', output)
   return output
 
 
 def RemoveLocations(output):
   """Removes all file location info from a Google Test program's output.
 
   Args:
        output:  the output of a Google Test program.
 
   Returns:
        output with all file location info (in the form of
        'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
        'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
        'FILE:#: '.
   """
 
   return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\:', 'FILE:#:', output)
 
 
 def NormalizeErrorMarker(output):
   """Normalizes the error marker, which is different on Windows vs on Linux."""
 
   return re.sub(r' error: ', ' Failure\n', output)
 
 
 def RemoveMemoryAddresses(output):
   """Removes memory addresses from the test output."""
 
   return re.sub(r'@\w+', '@0x#', output)
 
 
 def RemoveTestNamesOfLeakedMocks(output):
   """Removes the test names of leaked mock objects from the test output."""
 
   return re.sub(r'\(used in test .+\) ', '', output)
 
 
 def GetLeakyTests(output):
   """Returns a list of test names that leak mock objects."""
 
   # findall() returns a list of all matches of the regex in output.
   # For example, if '(used in test FooTest.Bar)' is in output, the
   # list will contain 'FooTest.Bar'.
   return re.findall(r'\(used in test (.+)\)', output)
 
 
 def GetNormalizedOutputAndLeakyTests(output):
   """Normalizes the output of gmock_output_test_.
 
   Args:
     output: The test output.
 
   Returns:
     A tuple (the normalized test output, the list of test names that have
     leaked mocks).
   """
 
   output = ToUnixLineEnding(output)
   output = RemoveReportHeaderAndFooter(output)
   output = NormalizeErrorMarker(output)
   output = RemoveLocations(output)
   output = RemoveMemoryAddresses(output)
   return (RemoveTestNamesOfLeakedMocks(output), GetLeakyTests(output))
 
 
 def GetShellCommandOutput(cmd):
   """Runs a command in a sub-process, and returns its STDOUT in a string."""
 
   return gmock_test_utils.Subprocess(cmd, capture_stderr=False).output
 
 
 def GetNormalizedCommandOutputAndLeakyTests(cmd):
   """Runs a command and returns its normalized output and a list of leaky tests.
 
   Args:
     cmd:  the shell command.
   """
 
   # Disables exception pop-ups on Windows.
   os.environ['GTEST_CATCH_EXCEPTIONS'] = '1'
   return GetNormalizedOutputAndLeakyTests(GetShellCommandOutput(cmd))
 
 
 class GMockOutputTest(gmock_test_utils.TestCase):
   def testOutput(self):
     (output, leaky_tests) = GetNormalizedCommandOutputAndLeakyTests(COMMAND)
     golden_file = open(GOLDEN_PATH, 'rb')
     golden = golden_file.read()
     golden_file.close()
 
     # The normalized output should match the golden file.
     self.assertEquals(golden, output)
 
     # The raw output should contain 2 leaked mock object errors for
     # test GMockOutputTest.CatchesLeakedMocks.
     self.assertEquals(['GMockOutputTest.CatchesLeakedMocks',
                        'GMockOutputTest.CatchesLeakedMocks'],
                       leaky_tests)
 
 
 if __name__ == '__main__':
   if sys.argv[1:] == [GENGOLDEN_FLAG]:
     (output, _) = GetNormalizedCommandOutputAndLeakyTests(COMMAND)
     golden_file = open(GOLDEN_PATH, 'wb')
     golden_file.write(output)
     golden_file.close()
     # Suppress the error "googletest was imported but a call to its main()
     # was never detected."
     os._exit(0)
   else:
     gmock_test_utils.Main()
diff --git a/googletest/CMakeLists.txt b/googletest/CMakeLists.txt
index 2e412d7f..ba2c2c2c 100644
--- a/googletest/CMakeLists.txt
+++ b/googletest/CMakeLists.txt
@@ -1,312 +1,312 @@
 ########################################################################
 # CMake build script for Google Test.
 #
 # To run the tests for Google Test itself on Linux, use 'make test' or
 # ctest.  You can select which tests to run using 'ctest -R regex'.
 # For more options, run 'ctest --help'.
 
 # BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to
 # make it prominent in the GUI.
 option(BUILD_SHARED_LIBS "Build shared libraries (DLLs)." OFF)
 
 # When other libraries are using a shared version of runtime libraries,
 # Google Test also has to use one.
 option(
   gtest_force_shared_crt
   "Use shared (DLL) run-time lib even when Google Test is built as static lib."
   OFF)
 
 option(gtest_build_tests "Build all of gtest's own tests." OFF)
 
 option(gtest_build_samples "Build gtest's sample programs." OFF)
 
 option(gtest_disable_pthreads "Disable uses of pthreads in gtest." OFF)
 
 option(
   gtest_hide_internal_symbols
   "Build gtest with internal symbols hidden in shared libraries."
   OFF)
 
 # Defines pre_project_set_up_hermetic_build() and set_up_hermetic_build().
 include(cmake/hermetic_build.cmake OPTIONAL)
 
 if (COMMAND pre_project_set_up_hermetic_build)
   pre_project_set_up_hermetic_build()
 endif()
 
 ########################################################################
 #
 # Project-wide settings
 
 # Name of the project.
 #
 # CMake files in this project can refer to the root source directory
 # as ${gtest_SOURCE_DIR} and to the root binary directory as
 # ${gtest_BINARY_DIR}.
 # Language "C" is required for find_package(Threads).
 if (CMAKE_VERSION VERSION_LESS 3.0)
   project(gtest CXX C)
 else()
   cmake_policy(SET CMP0048 NEW)
   project(gtest VERSION 1.9.0 LANGUAGES CXX C)
 endif()
 cmake_minimum_required(VERSION 2.6.4)
 
 if (POLICY CMP0063) # Visibility
   cmake_policy(SET CMP0063 NEW)
 endif (POLICY CMP0063)
 
 if (COMMAND set_up_hermetic_build)
   set_up_hermetic_build()
 endif()
 
 if (gtest_hide_internal_symbols)
   set(CMAKE_CXX_VISIBILITY_PRESET hidden)
   set(CMAKE_VISIBILITY_INLINES_HIDDEN 1)
 endif()
 
 # Define helper functions and macros used by Google Test.
 include(cmake/internal_utils.cmake)
 
 config_compiler_and_linker()  # Defined in internal_utils.cmake.
 
 # Where Google Test's .h files can be found.
 include_directories(
   "${gtest_SOURCE_DIR}/include"
   "${gtest_SOURCE_DIR}")
 
 # Summary of tuple support for Microsoft Visual Studio:
 # Compiler    version(MS)  version(cmake)  Support
 # ----------  -----------  --------------  -----------------------------
 # <= VS 2010  <= 10        <= 1600         Use Google Tests's own tuple.
 # VS 2012     11           1700            std::tr1::tuple + _VARIADIC_MAX=10
 # VS 2013     12           1800            std::tr1::tuple
 # VS 2015     14           1900            std::tuple
 # VS 2017     15           >= 1910         std::tuple
 if (MSVC AND MSVC_VERSION EQUAL 1700)
   add_definitions(/D _VARIADIC_MAX=10)
 endif()
- 
+
 ########################################################################
 #
 # Defines the gtest & gtest_main libraries.  User tests should link
 # with one of them.
 
 # Google Test libraries.  We build them using more strict warnings than what
 # are used for other targets, to ensure that gtest can be compiled by a user
 # aggressive about warnings.
 cxx_library(gtest "${cxx_strict}" src/gtest-all.cc)
 cxx_library(gtest_main "${cxx_strict}" src/gtest_main.cc)
 target_link_libraries(gtest_main gtest)
 
 # If the CMake version supports it, attach header directory information
 # to the targets for when we are part of a parent build (ie being pulled
 # in via add_subdirectory() rather than being a standalone build).
 if (DEFINED CMAKE_VERSION AND NOT "${CMAKE_VERSION}" VERSION_LESS "2.8.11")
   target_include_directories(gtest      SYSTEM INTERFACE "${gtest_SOURCE_DIR}/include")
   target_include_directories(gtest_main SYSTEM INTERFACE "${gtest_SOURCE_DIR}/include")
 endif()
 
 ########################################################################
 #
 # Install rules
 if(INSTALL_GTEST)
   install(TARGETS gtest gtest_main
     RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}"
     ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
     LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
   install(DIRECTORY "${gtest_SOURCE_DIR}/include/gtest"
     DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}")
 
   # configure and install pkgconfig files
   configure_file(
     cmake/gtest.pc.in
     "${CMAKE_BINARY_DIR}/gtest.pc"
     @ONLY)
   configure_file(
     cmake/gtest_main.pc.in
     "${CMAKE_BINARY_DIR}/gtest_main.pc"
     @ONLY)
   install(FILES "${CMAKE_BINARY_DIR}/gtest.pc" "${CMAKE_BINARY_DIR}/gtest_main.pc"
     DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
 endif()
 
 ########################################################################
 #
 # Samples on how to link user tests with gtest or gtest_main.
 #
 # They are not built by default.  To build them, set the
 # gtest_build_samples option to ON.  You can do it by running ccmake
 # or specifying the -Dgtest_build_samples=ON flag when running cmake.
 
 if (gtest_build_samples)
   cxx_executable(sample1_unittest samples gtest_main samples/sample1.cc)
   cxx_executable(sample2_unittest samples gtest_main samples/sample2.cc)
   cxx_executable(sample3_unittest samples gtest_main)
   cxx_executable(sample4_unittest samples gtest_main samples/sample4.cc)
   cxx_executable(sample5_unittest samples gtest_main samples/sample1.cc)
   cxx_executable(sample6_unittest samples gtest_main)
   cxx_executable(sample7_unittest samples gtest_main)
   cxx_executable(sample8_unittest samples gtest_main)
   cxx_executable(sample9_unittest samples gtest)
   cxx_executable(sample10_unittest samples gtest)
 endif()
 
 ########################################################################
 #
 # Google Test's own tests.
 #
 # You can skip this section if you aren't interested in testing
 # Google Test itself.
 #
 # The tests are not built by default.  To build them, set the
 # gtest_build_tests option to ON.  You can do it by running ccmake
 # or specifying the -Dgtest_build_tests=ON flag when running cmake.
 
 if (gtest_build_tests)
   # This must be set in the root directory for the tests to be run by
   # 'make test' or ctest.
   enable_testing()
 
   ############################################################
   # C++ tests built with standard compiler flags.
 
-  cxx_test(gtest-death-test_test gtest_main)
+  cxx_test(googletest-death-test-test gtest_main)
   cxx_test(gtest_environment_test gtest)
-  cxx_test(gtest-filepath_test gtest_main)
-  cxx_test(gtest-linked_ptr_test gtest_main)
-  cxx_test(gtest-listener_test gtest_main)
+  cxx_test(googletest-filepath-test gtest_main)
+  cxx_test(googletest-linked-ptr-test gtest_main)
+  cxx_test(googletest-listener-test gtest_main)
   cxx_test(gtest_main_unittest gtest_main)
-  cxx_test(gtest-message_test gtest_main)
+  cxx_test(googletest-message-test gtest_main)
   cxx_test(gtest_no_test_unittest gtest)
-  cxx_test(gtest-options_test gtest_main)
-  cxx_test(gtest-param-test_test gtest
-    test/gtest-param-test2_test.cc)
-  cxx_test(gtest-port_test gtest_main)
+  cxx_test(googletest-options-test gtest_main)
+  cxx_test(googletest-param-test-test gtest
+    test/googletest-param-test2-test.cc)
+  cxx_test(googletest-port-test gtest_main)
   cxx_test(gtest_pred_impl_unittest gtest_main)
   cxx_test(gtest_premature_exit_test gtest
     test/gtest_premature_exit_test.cc)
-  cxx_test(gtest-printers_test gtest_main)
+  cxx_test(googletest-printers-test gtest_main)
   cxx_test(gtest_prod_test gtest_main
     test/production.cc)
   cxx_test(gtest_repeat_test gtest)
   cxx_test(gtest_sole_header_test gtest_main)
   cxx_test(gtest_stress_test gtest)
-  cxx_test(gtest-test-part_test gtest_main)
+  cxx_test(googletest-test-part-test gtest_main)
   cxx_test(gtest_throw_on_failure_ex_test gtest)
   cxx_test(gtest-typed-test_test gtest_main
     test/gtest-typed-test2_test.cc)
   cxx_test(gtest_unittest gtest_main)
   cxx_test(gtest-unittest-api_test gtest)
 
   ############################################################
   # C++ tests built with non-standard compiler flags.
 
   # MSVC 7.1 does not support STL with exceptions disabled.
   if (NOT MSVC OR MSVC_VERSION GREATER 1310)
     cxx_library(gtest_no_exception "${cxx_no_exception}"
       src/gtest-all.cc)
     cxx_library(gtest_main_no_exception "${cxx_no_exception}"
       src/gtest-all.cc src/gtest_main.cc)
   endif()
   cxx_library(gtest_main_no_rtti "${cxx_no_rtti}"
     src/gtest-all.cc src/gtest_main.cc)
 
   cxx_test_with_flags(gtest-death-test_ex_nocatch_test
     "${cxx_exception} -DGTEST_ENABLE_CATCH_EXCEPTIONS_=0"
-    gtest test/gtest-death-test_ex_test.cc)
+    gtest test/googletest-death-test_ex_test.cc)
   cxx_test_with_flags(gtest-death-test_ex_catch_test
     "${cxx_exception} -DGTEST_ENABLE_CATCH_EXCEPTIONS_=1"
-    gtest test/gtest-death-test_ex_test.cc)
+    gtest test/googletest-death-test_ex_test.cc)
 
   cxx_test_with_flags(gtest_no_rtti_unittest "${cxx_no_rtti}"
     gtest_main_no_rtti test/gtest_unittest.cc)
 
   cxx_shared_library(gtest_dll "${cxx_default}"
     src/gtest-all.cc src/gtest_main.cc)
 
   cxx_executable_with_flags(gtest_dll_test_ "${cxx_default}"
     gtest_dll test/gtest_all_test.cc)
   set_target_properties(gtest_dll_test_
                         PROPERTIES
                         COMPILE_DEFINITIONS "GTEST_LINKED_AS_SHARED_LIBRARY=1")
 
   if (NOT MSVC OR MSVC_VERSION LESS 1600)  # 1600 is Visual Studio 2010.
     # Visual Studio 2010, 2012, and 2013 define symbols in std::tr1 that
     # conflict with our own definitions. Therefore using our own tuple does not
     # work on those compilers.
     cxx_library(gtest_main_use_own_tuple "${cxx_use_own_tuple}"
       src/gtest-all.cc src/gtest_main.cc)
 
-    cxx_test_with_flags(gtest-tuple_test "${cxx_use_own_tuple}"
-      gtest_main_use_own_tuple test/gtest-tuple_test.cc)
+    cxx_test_with_flags(googletest-tuple-test "${cxx_use_own_tuple}"
+      gtest_main_use_own_tuple test/googletest-tuple-test.cc)
 
     cxx_test_with_flags(gtest_use_own_tuple_test "${cxx_use_own_tuple}"
       gtest_main_use_own_tuple
-      test/gtest-param-test_test.cc test/gtest-param-test2_test.cc)
+      test/googletest-param-test-test.cc test/googletest-param-test2-test.cc)
   endif()
 
   ############################################################
   # Python tests.
 
-  cxx_executable(gtest_break_on_failure_unittest_ test gtest)
-  py_test(gtest_break_on_failure_unittest)
+  cxx_executable(googletest-break-on-failure-unittest_ test gtest)
+  py_test(googletest-break-on-failure-unittest)
 
   # Visual Studio .NET 2003 does not support STL with exceptions disabled.
   if (NOT MSVC OR MSVC_VERSION GREATER 1310)  # 1310 is Visual Studio .NET 2003
     cxx_executable_with_flags(
-      gtest_catch_exceptions_no_ex_test_
+      googletest-catch-exceptions-no-ex-test_
       "${cxx_no_exception}"
       gtest_main_no_exception
-      test/gtest_catch_exceptions_test_.cc)
+      test/googletest-catch-exceptions-test_.cc)
   endif()
 
   cxx_executable_with_flags(
-    gtest_catch_exceptions_ex_test_
+    googletest-catch-exceptions-ex-test_
     "${cxx_exception}"
     gtest_main
-    test/gtest_catch_exceptions_test_.cc)
-  py_test(gtest_catch_exceptions_test)
+    test/googletest-catch-exceptions-test_.cc)
+  py_test(googletest-catch-exceptions-test)
 
-  cxx_executable(gtest_color_test_ test gtest)
-  py_test(gtest_color_test)
+  cxx_executable(googletest-color-test_ test gtest)
+  py_test(googletest-color-test)
 
-  cxx_executable(gtest_env_var_test_ test gtest)
-  py_test(gtest_env_var_test)
+  cxx_executable(googletest-env-var-test_ test gtest)
+  py_test(googletest-env-var-test)
 
-  cxx_executable(gtest_filter_unittest_ test gtest)
-  py_test(gtest_filter_unittest)
+  cxx_executable(googletest-filter-unittest_ test gtest)
+  py_test(googletest-filter-unittest)
 
   cxx_executable(gtest_help_test_ test gtest_main)
   py_test(gtest_help_test)
 
-  cxx_executable(gtest_list_tests_unittest_ test gtest)
-  py_test(gtest_list_tests_unittest)
+  cxx_executable(googletest-list-tests-unittest_ test gtest)
+  py_test(googletest-list-tests-unittest)
 
-  cxx_executable(gtest_output_test_ test gtest)
-  py_test(gtest_output_test --no_stacktrace_support)
+  cxx_executable(googletest-output-test_ test gtest)
+  py_test(googletest-output-test --no_stacktrace_support)
 
-  cxx_executable(gtest_shuffle_test_ test gtest)
-  py_test(gtest_shuffle_test)
+  cxx_executable(googletest-shuffle-test_ test gtest)
+  py_test(googletest-shuffle-test)
 
   # MSVC 7.1 does not support STL with exceptions disabled.
   if (NOT MSVC OR MSVC_VERSION GREATER 1310)
-    cxx_executable(gtest_throw_on_failure_test_ test gtest_no_exception)
-    set_target_properties(gtest_throw_on_failure_test_
+    cxx_executable(googletest-throw-on-failure-test_ test gtest_no_exception)
+    set_target_properties(googletest-throw-on-failure-test_
       PROPERTIES
       COMPILE_FLAGS "${cxx_no_exception}")
-    py_test(gtest_throw_on_failure_test)
+    py_test(googletest-throw-on-failure-test)
   endif()
 
-  cxx_executable(gtest_uninitialized_test_ test gtest)
-  py_test(gtest_uninitialized_test)
+  cxx_executable(googletest-uninitialized-test_ test gtest)
+  py_test(googletest-uninitialized-test)
 
   cxx_executable(gtest_xml_outfile1_test_ test gtest_main)
   cxx_executable(gtest_xml_outfile2_test_ test gtest_main)
   py_test(gtest_xml_outfiles_test)
-  py_test(gtest_json_outfiles_test)
+  py_test(googletest-json-outfiles-test)
 
   cxx_executable(gtest_xml_output_unittest_ test gtest)
   py_test(gtest_xml_output_unittest --no_stacktrace_support)
-  py_test(gtest_json_output_unittest)
+  py_test(googletest-json-output-unittest --no_stacktrace_support)
 endif()
diff --git a/googletest/Makefile.am b/googletest/Makefile.am
index b6c7232d..b44c8416 100644
--- a/googletest/Makefile.am
+++ b/googletest/Makefile.am
@@ -1,339 +1,339 @@
 # Automake file
 
 ACLOCAL_AMFLAGS = -I m4
 
 # Nonstandard package files for distribution
 EXTRA_DIST = \
   CHANGES \
   CONTRIBUTORS \
   LICENSE \
   include/gtest/gtest-param-test.h.pump \
   include/gtest/internal/gtest-param-util-generated.h.pump \
   include/gtest/internal/gtest-tuple.h.pump \
   include/gtest/internal/gtest-type-util.h.pump \
   make/Makefile \
   scripts/fuse_gtest_files.py \
   scripts/gen_gtest_pred_impl.py \
   scripts/pump.py \
   scripts/test/Makefile
 
 # gtest source files that we don't compile directly.  They are
 # #included by gtest-all.cc.
 GTEST_SRC = \
   src/gtest-death-test.cc \
   src/gtest-filepath.cc \
   src/gtest-internal-inl.h \
   src/gtest-port.cc \
   src/gtest-printers.cc \
   src/gtest-test-part.cc \
   src/gtest-typed-test.cc \
   src/gtest.cc
 
 EXTRA_DIST += $(GTEST_SRC)
 
 # Sample files that we don't compile.
 EXTRA_DIST += \
   samples/prime_tables.h \
   samples/sample1_unittest.cc \
   samples/sample2_unittest.cc \
   samples/sample3_unittest.cc \
   samples/sample4_unittest.cc \
   samples/sample5_unittest.cc \
   samples/sample6_unittest.cc \
   samples/sample7_unittest.cc \
   samples/sample8_unittest.cc \
   samples/sample9_unittest.cc
 
 # C++ test files that we don't compile directly.
 EXTRA_DIST += \
   test/gtest-death-test_ex_test.cc \
   test/gtest-death-test_test.cc \
   test/gtest-filepath_test.cc \
   test/gtest-linked_ptr_test.cc \
   test/gtest-listener_test.cc \
   test/gtest-message_test.cc \
   test/gtest-options_test.cc \
-  test/gtest-param-test2_test.cc \
-  test/gtest-param-test2_test.cc \
-  test/gtest-param-test_test.cc \
-  test/gtest-param-test_test.cc \
+  test/googletest-param-test2-test.cc \
+  test/googletest-param-test2-test.cc \
+  test/googletest-param-test-test.cc \
+  test/googletest-param-test-test.cc \
   test/gtest-param-test_test.h \
   test/gtest-port_test.cc \
   test/gtest_premature_exit_test.cc \
   test/gtest-printers_test.cc \
   test/gtest-test-part_test.cc \
-  test/gtest-tuple_test.cc \
+  test/googletest-tuple-test.cc \
   test/gtest-typed-test2_test.cc \
   test/gtest-typed-test_test.cc \
   test/gtest-typed-test_test.h \
   test/gtest-unittest-api_test.cc \
-  test/gtest_break_on_failure_unittest_.cc \
-  test/gtest_catch_exceptions_test_.cc \
-  test/gtest_color_test_.cc \
-  test/gtest_env_var_test_.cc \
+  test/googletest-break-on-failure-unittest_.cc \
+  test/googletest-catch-exceptions-test_.cc \
+  test/googletest-color-test_.cc \
+  test/googletest-env-var-test_.cc \
   test/gtest_environment_test.cc \
-  test/gtest_filter_unittest_.cc \
+  test/googletest-filter-unittest_.cc \
   test/gtest_help_test_.cc \
-  test/gtest_list_tests_unittest_.cc \
+  test/googletest-list-tests-unittest_.cc \
   test/gtest_main_unittest.cc \
   test/gtest_no_test_unittest.cc \
-  test/gtest_output_test_.cc \
+  test/googletest-output-test_.cc \
   test/gtest_pred_impl_unittest.cc \
   test/gtest_prod_test.cc \
   test/gtest_repeat_test.cc \
-  test/gtest_shuffle_test_.cc \
+  test/googletest-shuffle-test_.cc \
   test/gtest_sole_header_test.cc \
   test/gtest_stress_test.cc \
   test/gtest_throw_on_failure_ex_test.cc \
-  test/gtest_throw_on_failure_test_.cc \
-  test/gtest_uninitialized_test_.cc \
+  test/googletest-throw-on-failure-test_.cc \
+  test/googletest-uninitialized-test_.cc \
   test/gtest_unittest.cc \
   test/gtest_unittest.cc \
   test/gtest_xml_outfile1_test_.cc \
   test/gtest_xml_outfile2_test_.cc \
   test/gtest_xml_output_unittest_.cc \
   test/production.cc \
   test/production.h
 
 # Python tests that we don't run.
 EXTRA_DIST += \
-  test/gtest_break_on_failure_unittest.py \
-  test/gtest_catch_exceptions_test.py \
-  test/gtest_color_test.py \
-  test/gtest_env_var_test.py \
-  test/gtest_filter_unittest.py \
+  test/googletest-break-on-failure-unittest.py \
+  test/googletest-catch-exceptions-test.py \
+  test/googletest-color-test.py \
+  test/googletest-env-var-test.py \
+  test/googletest-filter-unittest.py \
   test/gtest_help_test.py \
-  test/gtest_list_tests_unittest.py \
-  test/gtest_output_test.py \
-  test/gtest_output_test_golden_lin.txt \
-  test/gtest_shuffle_test.py \
+  test/googletest-list-tests-unittest.py \
+  test/googletest-output-test.py \
+  test/googletest-output-test_golden_lin.txt \
+  test/googletest-shuffle-test.py \
   test/gtest_test_utils.py \
-  test/gtest_throw_on_failure_test.py \
-  test/gtest_uninitialized_test.py \
+  test/googletest-throw-on-failure-test.py \
+  test/googletest-uninitialized-test.py \
   test/gtest_xml_outfiles_test.py \
   test/gtest_xml_output_unittest.py \
   test/gtest_xml_test_utils.py
 
 # CMake script
 EXTRA_DIST += \
   CMakeLists.txt \
   cmake/internal_utils.cmake
 
 # MSVC project files
 EXTRA_DIST += \
   msvc/2010/gtest-md.sln \
   msvc/2010/gtest-md.vcxproj \
   msvc/2010/gtest.sln \
   msvc/2010/gtest.vcxproj \
   msvc/2010/gtest_main-md.vcxproj \
   msvc/2010/gtest_main.vcxproj \
   msvc/2010/gtest_prod_test-md.vcxproj \
   msvc/2010/gtest_prod_test.vcxproj \
   msvc/2010/gtest_unittest-md.vcxproj \
   msvc/2010/gtest_unittest.vcxproj
 
 # xcode project files
 EXTRA_DIST += \
   xcode/Config/DebugProject.xcconfig \
   xcode/Config/FrameworkTarget.xcconfig \
   xcode/Config/General.xcconfig \
   xcode/Config/ReleaseProject.xcconfig \
   xcode/Config/StaticLibraryTarget.xcconfig \
   xcode/Config/TestTarget.xcconfig \
   xcode/Resources/Info.plist \
   xcode/Scripts/runtests.sh \
   xcode/Scripts/versiongenerate.py \
   xcode/gtest.xcodeproj/project.pbxproj
 
 # xcode sample files
 EXTRA_DIST += \
   xcode/Samples/FrameworkSample/Info.plist \
   xcode/Samples/FrameworkSample/WidgetFramework.xcodeproj/project.pbxproj \
   xcode/Samples/FrameworkSample/runtests.sh \
   xcode/Samples/FrameworkSample/widget.cc \
   xcode/Samples/FrameworkSample/widget.h \
   xcode/Samples/FrameworkSample/widget_test.cc
 
 # C++Builder project files
 EXTRA_DIST += \
   codegear/gtest.cbproj \
   codegear/gtest.groupproj \
   codegear/gtest_all.cc \
   codegear/gtest_link.cc \
   codegear/gtest_main.cbproj \
   codegear/gtest_unittest.cbproj
 
 # Distribute and install M4 macro
 m4datadir = $(datadir)/aclocal
 m4data_DATA = m4/gtest.m4
 EXTRA_DIST += $(m4data_DATA)
 
 # We define the global AM_CPPFLAGS as everything we compile includes from these
 # directories.
 AM_CPPFLAGS = -I$(srcdir) -I$(srcdir)/include
 
 # Modifies compiler and linker flags for pthreads compatibility.
 if HAVE_PTHREADS
   AM_CXXFLAGS = @PTHREAD_CFLAGS@ -DGTEST_HAS_PTHREAD=1
   AM_LIBS = @PTHREAD_LIBS@
 else
   AM_CXXFLAGS = -DGTEST_HAS_PTHREAD=0
 endif
 
 # Build rules for libraries.
 lib_LTLIBRARIES = lib/libgtest.la lib/libgtest_main.la
 
 lib_libgtest_la_SOURCES = src/gtest-all.cc
 
 pkginclude_HEADERS = \
   include/gtest/gtest-death-test.h \
   include/gtest/gtest-message.h \
   include/gtest/gtest-param-test.h \
   include/gtest/gtest-printers.h \
   include/gtest/gtest-spi.h \
   include/gtest/gtest-test-part.h \
   include/gtest/gtest-typed-test.h \
   include/gtest/gtest.h \
   include/gtest/gtest_pred_impl.h \
   include/gtest/gtest_prod.h
 
 pkginclude_internaldir = $(pkgincludedir)/internal
 pkginclude_internal_HEADERS = \
   include/gtest/internal/gtest-death-test-internal.h \
   include/gtest/internal/gtest-filepath.h \
   include/gtest/internal/gtest-internal.h \
   include/gtest/internal/gtest-linked_ptr.h \
   include/gtest/internal/gtest-param-util-generated.h \
   include/gtest/internal/gtest-param-util.h \
   include/gtest/internal/gtest-port.h \
   include/gtest/internal/gtest-port-arch.h \
   include/gtest/internal/gtest-string.h \
   include/gtest/internal/gtest-tuple.h \
   include/gtest/internal/gtest-type-util.h \
   include/gtest/internal/custom/gtest.h \
   include/gtest/internal/custom/gtest-port.h \
   include/gtest/internal/custom/gtest-printers.h
 
 lib_libgtest_main_la_SOURCES = src/gtest_main.cc
 lib_libgtest_main_la_LIBADD = lib/libgtest.la
 
 # Build rules for samples and tests. Automake's naming for some of
 # these variables isn't terribly obvious, so this is a brief
 # reference:
 #
 # TESTS -- Programs run automatically by "make check"
 # check_PROGRAMS -- Programs built by "make check" but not necessarily run
 
 TESTS=
 TESTS_ENVIRONMENT = GTEST_SOURCE_DIR="$(srcdir)/test" \
                     GTEST_BUILD_DIR="$(top_builddir)/test"
 check_PROGRAMS=
 
 # A simple sample on using gtest.
 TESTS += samples/sample1_unittest \
     samples/sample2_unittest \
     samples/sample3_unittest \
     samples/sample4_unittest \
     samples/sample5_unittest \
     samples/sample6_unittest \
     samples/sample7_unittest \
     samples/sample8_unittest \
     samples/sample9_unittest \
     samples/sample10_unittest
 check_PROGRAMS += samples/sample1_unittest \
     samples/sample2_unittest \
     samples/sample3_unittest \
     samples/sample4_unittest \
     samples/sample5_unittest \
     samples/sample6_unittest \
     samples/sample7_unittest \
     samples/sample8_unittest \
     samples/sample9_unittest \
     samples/sample10_unittest
 
 samples_sample1_unittest_SOURCES = samples/sample1_unittest.cc samples/sample1.cc
 samples_sample1_unittest_LDADD = lib/libgtest_main.la \
                                  lib/libgtest.la
 samples_sample2_unittest_SOURCES = samples/sample2_unittest.cc samples/sample2.cc
 samples_sample2_unittest_LDADD = lib/libgtest_main.la \
                                  lib/libgtest.la
 samples_sample3_unittest_SOURCES = samples/sample3_unittest.cc
 samples_sample3_unittest_LDADD = lib/libgtest_main.la \
                                  lib/libgtest.la
 samples_sample4_unittest_SOURCES = samples/sample4_unittest.cc samples/sample4.cc
 samples_sample4_unittest_LDADD = lib/libgtest_main.la \
                                  lib/libgtest.la
 samples_sample5_unittest_SOURCES = samples/sample5_unittest.cc samples/sample1.cc
 samples_sample5_unittest_LDADD = lib/libgtest_main.la \
                                  lib/libgtest.la
 samples_sample6_unittest_SOURCES = samples/sample6_unittest.cc
 samples_sample6_unittest_LDADD = lib/libgtest_main.la \
                                  lib/libgtest.la
 samples_sample7_unittest_SOURCES = samples/sample7_unittest.cc
 samples_sample7_unittest_LDADD = lib/libgtest_main.la \
                                  lib/libgtest.la
 samples_sample8_unittest_SOURCES = samples/sample8_unittest.cc
 samples_sample8_unittest_LDADD = lib/libgtest_main.la \
                                  lib/libgtest.la
 
 # Also verify that libgtest works by itself.
 samples_sample9_unittest_SOURCES = samples/sample9_unittest.cc
 samples_sample9_unittest_LDADD = lib/libgtest.la
 samples_sample10_unittest_SOURCES = samples/sample10_unittest.cc
 samples_sample10_unittest_LDADD = lib/libgtest.la
 
 # This tests most constructs of gtest and verifies that libgtest_main
 # and libgtest work.
 TESTS += test/gtest_all_test
 check_PROGRAMS += test/gtest_all_test
 test_gtest_all_test_SOURCES = test/gtest_all_test.cc
 test_gtest_all_test_LDADD = lib/libgtest_main.la \
                             lib/libgtest.la
 
 # Tests that fused gtest files compile and work.
 FUSED_GTEST_SRC = \
   fused-src/gtest/gtest-all.cc \
   fused-src/gtest/gtest.h \
   fused-src/gtest/gtest_main.cc
 
 if HAVE_PYTHON
 TESTS += test/fused_gtest_test
 check_PROGRAMS += test/fused_gtest_test
 test_fused_gtest_test_SOURCES = $(FUSED_GTEST_SRC) \
                                 samples/sample1.cc samples/sample1_unittest.cc
 test_fused_gtest_test_CPPFLAGS = -I"$(srcdir)/fused-src"
 
 # Build rules for putting fused Google Test files into the distribution
 # package. The user can also create those files by manually running
 # scripts/fuse_gtest_files.py.
 $(test_fused_gtest_test_SOURCES): fused-gtest
 
 fused-gtest: $(pkginclude_HEADERS) $(pkginclude_internal_HEADERS) \
              $(GTEST_SRC) src/gtest-all.cc src/gtest_main.cc \
              scripts/fuse_gtest_files.py
 	mkdir -p "$(srcdir)/fused-src"
 	chmod -R u+w "$(srcdir)/fused-src"
 	rm -f "$(srcdir)/fused-src/gtest/gtest-all.cc"
 	rm -f "$(srcdir)/fused-src/gtest/gtest.h"
 	"$(srcdir)/scripts/fuse_gtest_files.py" "$(srcdir)/fused-src"
 	cp -f "$(srcdir)/src/gtest_main.cc" "$(srcdir)/fused-src/gtest/"
 
 maintainer-clean-local:
 	rm -rf "$(srcdir)/fused-src"
 endif
 
 # Death tests may produce core dumps in the build directory. In case
 # this happens, clean them to keep distcleancheck happy.
 CLEANFILES = core
 
 # Disables 'make install' as installing a compiled version of Google
 # Test can lead to undefined behavior due to violation of the
 # One-Definition Rule.
 
 install-exec-local:
 	echo "'make install' is dangerous and not supported. Instead, see README for how to integrate Google Test into your build system."
 	false
 
 install-data-local:
 	echo "'make install' is dangerous and not supported. Instead, see README for how to integrate Google Test into your build system."
 	false
diff --git a/googletest/cmake/internal_utils.cmake b/googletest/cmake/internal_utils.cmake
index be7af38f..086f51cf 100644
--- a/googletest/cmake/internal_utils.cmake
+++ b/googletest/cmake/internal_utils.cmake
@@ -1,280 +1,280 @@
 # Defines functions and macros useful for building Google Test and
 # Google Mock.
 #
 # Note:
 #
 # - This file will be run twice when building Google Mock (once via
 #   Google Test's CMakeLists.txt, and once via Google Mock's).
 #   Therefore it shouldn't have any side effects other than defining
 #   the functions and macros.
 #
 # - The functions/macros defined in this file may depend on Google
 #   Test and Google Mock's option() definitions, and thus must be
 #   called *after* the options have been defined.
 
 # Tweaks CMake's default compiler/linker settings to suit Google Test's needs.
 #
 # This must be a macro(), as inside a function string() can only
 # update variables in the function scope.
 macro(fix_default_compiler_settings_)
   if (MSVC)
     # For MSVC, CMake sets certain flags to defaults we want to override.
     # This replacement code is taken from sample in the CMake Wiki at
-    # http://www.cmake.org/Wiki/CMake_FAQ#Dynamic_Replace.
+    # https://gitlab.kitware.com/cmake/community/wikis/FAQ#dynamic-replace.
     foreach (flag_var
              CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
              CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
       if (NOT BUILD_SHARED_LIBS AND NOT gtest_force_shared_crt)
         # When Google Test is built as a shared library, it should also use
         # shared runtime libraries.  Otherwise, it may end up with multiple
         # copies of runtime library data in different modules, resulting in
         # hard-to-find crashes. When it is built as a static library, it is
         # preferable to use CRT as static libraries, as we don't have to rely
         # on CRT DLLs being available. CMake always defaults to using shared
         # CRT libraries, so we override that default here.
         string(REPLACE "/MD" "-MT" ${flag_var} "${${flag_var}}")
       endif()
 
       # We prefer more strict warning checking for building Google Test.
       # Replaces /W3 with /W4 in defaults.
       string(REPLACE "/W3" "/W4" ${flag_var} "${${flag_var}}")
     endforeach()
   endif()
 endmacro()
 
 # Defines the compiler/linker flags used to build Google Test and
 # Google Mock.  You can tweak these definitions to suit your need.  A
 # variable's value is empty before it's explicitly assigned to.
 macro(config_compiler_and_linker)
   # Note: pthreads on MinGW is not supported, even if available
   # instead, we use windows threading primitives
   unset(GTEST_HAS_PTHREAD)
   if (NOT gtest_disable_pthreads AND NOT MINGW)
     # Defines CMAKE_USE_PTHREADS_INIT and CMAKE_THREAD_LIBS_INIT.
     set(THREADS_PREFER_PTHREAD_FLAG ON)
     find_package(Threads)
     if (CMAKE_USE_PTHREADS_INIT)
       set(GTEST_HAS_PTHREAD ON)
     endif()
   endif()
 
   fix_default_compiler_settings_()
   if (MSVC)
     # Newlines inside flags variables break CMake's NMake generator.
     # TODO(vladl@google.com): Add -RTCs and -RTCu to debug builds.
     set(cxx_base_flags "-GS -W4 -WX -wd4251 -wd4275 -nologo -J -Zi")
     if (MSVC_VERSION LESS 1400)  # 1400 is Visual Studio 2005
       # Suppress spurious warnings MSVC 7.1 sometimes issues.
       # Forcing value to bool.
       set(cxx_base_flags "${cxx_base_flags} -wd4800")
       # Copy constructor and assignment operator could not be generated.
       set(cxx_base_flags "${cxx_base_flags} -wd4511 -wd4512")
       # Compatibility warnings not applicable to Google Test.
       # Resolved overload was found by argument-dependent lookup.
       set(cxx_base_flags "${cxx_base_flags} -wd4675")
     endif()
     if (MSVC_VERSION LESS 1500)  # 1500 is Visual Studio 2008
       # Conditional expression is constant.
       # When compiling with /W4, we get several instances of C4127
       # (Conditional expression is constant). In our code, we disable that
       # warning on a case-by-case basis. However, on Visual Studio 2005,
       # the warning fires on std::list. Therefore on that compiler and earlier,
       # we disable the warning project-wide.
       set(cxx_base_flags "${cxx_base_flags} -wd4127")
     endif()
     if (NOT (MSVC_VERSION LESS 1700))  # 1700 is Visual Studio 2012.
       # Suppress "unreachable code" warning on VS 2012 and later.
       # http://stackoverflow.com/questions/3232669 explains the issue.
       set(cxx_base_flags "${cxx_base_flags} -wd4702")
     endif()
 
     set(cxx_base_flags "${cxx_base_flags} -D_UNICODE -DUNICODE -DWIN32 -D_WIN32")
     set(cxx_base_flags "${cxx_base_flags} -DSTRICT -DWIN32_LEAN_AND_MEAN")
     set(cxx_exception_flags "-EHsc -D_HAS_EXCEPTIONS=1")
     set(cxx_no_exception_flags "-EHs-c- -D_HAS_EXCEPTIONS=0")
     set(cxx_no_rtti_flags "-GR-")
   elseif (CMAKE_COMPILER_IS_GNUCXX)
     set(cxx_base_flags "-Wall -Wshadow -Werror")
     if(NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.0.0)
       set(cxx_base_flags "${cxx_base_flags} -Wno-error=dangling-else")
     endif()
     set(cxx_exception_flags "-fexceptions")
     set(cxx_no_exception_flags "-fno-exceptions")
     # Until version 4.3.2, GCC doesn't define a macro to indicate
     # whether RTTI is enabled.  Therefore we define GTEST_HAS_RTTI
     # explicitly.
     set(cxx_no_rtti_flags "-fno-rtti -DGTEST_HAS_RTTI=0")
     set(cxx_strict_flags
       "-Wextra -Wno-unused-parameter -Wno-missing-field-initializers")
   elseif (CMAKE_CXX_COMPILER_ID STREQUAL "SunPro")
     set(cxx_exception_flags "-features=except")
     # Sun Pro doesn't provide macros to indicate whether exceptions and
     # RTTI are enabled, so we define GTEST_HAS_* explicitly.
     set(cxx_no_exception_flags "-features=no%except -DGTEST_HAS_EXCEPTIONS=0")
     set(cxx_no_rtti_flags "-features=no%rtti -DGTEST_HAS_RTTI=0")
   elseif (CMAKE_CXX_COMPILER_ID STREQUAL "VisualAge" OR
       CMAKE_CXX_COMPILER_ID STREQUAL "XL")
     # CMake 2.8 changes Visual Age's compiler ID to "XL".
     set(cxx_exception_flags "-qeh")
     set(cxx_no_exception_flags "-qnoeh")
     # Until version 9.0, Visual Age doesn't define a macro to indicate
     # whether RTTI is enabled.  Therefore we define GTEST_HAS_RTTI
     # explicitly.
     set(cxx_no_rtti_flags "-qnortti -DGTEST_HAS_RTTI=0")
   elseif (CMAKE_CXX_COMPILER_ID STREQUAL "HP")
     set(cxx_base_flags "-AA -mt")
     set(cxx_exception_flags "-DGTEST_HAS_EXCEPTIONS=1")
     set(cxx_no_exception_flags "+noeh -DGTEST_HAS_EXCEPTIONS=0")
     # RTTI can not be disabled in HP aCC compiler.
     set(cxx_no_rtti_flags "")
   endif()
 
   # The pthreads library is available and allowed?
   if (DEFINED GTEST_HAS_PTHREAD)
     set(GTEST_HAS_PTHREAD_MACRO "-DGTEST_HAS_PTHREAD=1")
   else()
     set(GTEST_HAS_PTHREAD_MACRO "-DGTEST_HAS_PTHREAD=0")
   endif()
   set(cxx_base_flags "${cxx_base_flags} ${GTEST_HAS_PTHREAD_MACRO}")
 
   # For building gtest's own tests and samples.
   set(cxx_exception "${CMAKE_CXX_FLAGS} ${cxx_base_flags} ${cxx_exception_flags}")
   set(cxx_no_exception
     "${CMAKE_CXX_FLAGS} ${cxx_base_flags} ${cxx_no_exception_flags}")
   set(cxx_default "${cxx_exception}")
   set(cxx_no_rtti "${cxx_default} ${cxx_no_rtti_flags}")
   set(cxx_use_own_tuple "${cxx_default} -DGTEST_USE_OWN_TR1_TUPLE=1")
 
   # For building the gtest libraries.
   set(cxx_strict "${cxx_default} ${cxx_strict_flags}")
 endmacro()
 
 # Defines the gtest & gtest_main libraries.  User tests should link
 # with one of them.
 function(cxx_library_with_type name type cxx_flags)
   # type can be either STATIC or SHARED to denote a static or shared library.
   # ARGN refers to additional arguments after 'cxx_flags'.
   add_library(${name} ${type} ${ARGN})
   set_target_properties(${name}
     PROPERTIES
     COMPILE_FLAGS "${cxx_flags}")
   # Generate debug library name with a postfix.
   set_target_properties(${name}
     PROPERTIES
     DEBUG_POSTFIX "d")
   if (BUILD_SHARED_LIBS OR type STREQUAL "SHARED")
     set_target_properties(${name}
       PROPERTIES
       COMPILE_DEFINITIONS "GTEST_CREATE_SHARED_LIBRARY=1")
   endif()
   if (DEFINED GTEST_HAS_PTHREAD)
     target_link_libraries(${name} ${CMAKE_THREAD_LIBS_INIT})
   endif()
 endfunction()
 
 ########################################################################
 #
 # Helper functions for creating build targets.
 
 function(cxx_shared_library name cxx_flags)
   cxx_library_with_type(${name} SHARED "${cxx_flags}" ${ARGN})
 endfunction()
 
 function(cxx_library name cxx_flags)
   cxx_library_with_type(${name} "" "${cxx_flags}" ${ARGN})
 endfunction()
 
 # cxx_executable_with_flags(name cxx_flags libs srcs...)
 #
 # creates a named C++ executable that depends on the given libraries and
 # is built from the given source files with the given compiler flags.
 function(cxx_executable_with_flags name cxx_flags libs)
   add_executable(${name} ${ARGN})
   if (MSVC AND (NOT (MSVC_VERSION LESS 1700)))  # 1700 is Visual Studio 2012.
     # BigObj required for tests.
     set(cxx_flags "${cxx_flags} -bigobj")
   endif()
   if (cxx_flags)
     set_target_properties(${name}
       PROPERTIES
       COMPILE_FLAGS "${cxx_flags}")
   endif()
   if (BUILD_SHARED_LIBS)
     set_target_properties(${name}
       PROPERTIES
       COMPILE_DEFINITIONS "GTEST_LINKED_AS_SHARED_LIBRARY=1")
   endif()
   # To support mixing linking in static and dynamic libraries, link each
   # library in with an extra call to target_link_libraries.
   foreach (lib "${libs}")
     target_link_libraries(${name} ${lib})
   endforeach()
 endfunction()
 
 # cxx_executable(name dir lib srcs...)
 #
 # creates a named target that depends on the given libs and is built
 # from the given source files.  dir/name.cc is implicitly included in
 # the source file list.
 function(cxx_executable name dir libs)
   cxx_executable_with_flags(
     ${name} "${cxx_default}" "${libs}" "${dir}/${name}.cc" ${ARGN})
 endfunction()
 
 # Sets PYTHONINTERP_FOUND and PYTHON_EXECUTABLE.
 find_package(PythonInterp)
 
 # cxx_test_with_flags(name cxx_flags libs srcs...)
 #
 # creates a named C++ test that depends on the given libs and is built
 # from the given source files with the given compiler flags.
 function(cxx_test_with_flags name cxx_flags libs)
   cxx_executable_with_flags(${name} "${cxx_flags}" "${libs}" ${ARGN})
   add_test(${name} ${name})
 endfunction()
 
 # cxx_test(name libs srcs...)
 #
 # creates a named test target that depends on the given libs and is
 # built from the given source files.  Unlike cxx_test_with_flags,
 # test/name.cc is already implicitly included in the source file list.
 function(cxx_test name libs)
   cxx_test_with_flags("${name}" "${cxx_default}" "${libs}"
     "test/${name}.cc" ${ARGN})
 endfunction()
 
 # py_test(name)
 #
 # creates a Python test with the given name whose main module is in
 # test/name.py.  It does nothing if Python is not installed.
 function(py_test name)
   if (PYTHONINTERP_FOUND)
     if (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1)
       if (CMAKE_CONFIGURATION_TYPES)
 	# Multi-configuration build generators as for Visual Studio save
 	# output in a subdirectory of CMAKE_CURRENT_BINARY_DIR (Debug,
 	# Release etc.), so we have to provide it here.
         add_test(
           NAME ${name}
           COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
               --build_dir=${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG> ${ARGN})
       else (CMAKE_CONFIGURATION_TYPES)
 	# Single-configuration build generators like Makefile generators
 	# don't have subdirs below CMAKE_CURRENT_BINARY_DIR.
         add_test(
           NAME ${name}
           COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
               --build_dir=${CMAKE_CURRENT_BINARY_DIR} ${ARGN})
       endif (CMAKE_CONFIGURATION_TYPES)
     else (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1)
       # ${CMAKE_CURRENT_BINARY_DIR} is known at configuration time, so we can
       # directly bind it from cmake. ${CTEST_CONFIGURATION_TYPE} is known
       # only at ctest runtime (by calling ctest -c <Configuration>), so
       # we have to escape $ to delay variable substitution here.
       add_test(
         ${name}
         ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py
           --build_dir=${CMAKE_CURRENT_BINARY_DIR}/\${CTEST_CONFIGURATION_TYPE} ${ARGN})
     endif (${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION} GREATER 3.1)
   endif(PYTHONINTERP_FOUND)
 endfunction()
diff --git a/googletest/docs/XcodeGuide.md b/googletest/docs/XcodeGuide.md
index 117265c5..1c60a33d 100644
--- a/googletest/docs/XcodeGuide.md
+++ b/googletest/docs/XcodeGuide.md
@@ -1,93 +1,93 @@
 
 
 This guide will explain how to use the Google Testing Framework in your Xcode projects on Mac OS X. This tutorial begins by quickly explaining what to do for experienced users. After the quick start, the guide goes provides additional explanation about each step.
 
 # Quick Start #
 
 Here is the quick guide for using Google Test in your Xcode project.
 
-  1. Download the source from the [website](http://code.google.com/p/googletest) using this command: `svn checkout http://googletest.googlecode.com/svn/trunk/ googletest-read-only`.
+  1. Download the source from the [website](https://github.com/google/googletest) using this command: `svn checkout http://googletest.googlecode.com/svn/trunk/ googletest-read-only`.
   1. Open up the `gtest.xcodeproj` in the `googletest-read-only/xcode/` directory and build the gtest.framework.
   1. Create a new "Shell Tool" target in your Xcode project called something like "UnitTests".
   1. Add the gtest.framework to your project and add it to the "Link Binary with Libraries" build phase of "UnitTests".
   1. Add your unit test source code to the "Compile Sources" build phase of "UnitTests".
   1. Edit the "UnitTests" executable and add an environment variable named "DYLD\_FRAMEWORK\_PATH" with a value equal to the path to the framework containing the gtest.framework relative to the compiled executable.
   1. Build and Go.
 
 The following sections further explain each of the steps listed above in depth, describing in more detail how to complete it including some variations.
 
 # Get the Source #
 
-Currently, the gtest.framework discussed here isn't available in a tagged release of Google Test, it is only available in the trunk. As explained at the Google Test [site](http://code.google.com/p/googletest/source/checkout">svn), you can get the code from anonymous SVN with this command:
+Currently, the gtest.framework discussed here isn't available in a tagged release of Google Test, it is only available in the trunk. As explained at the Google Test [site](https://github.com/google/googletest), you can get the code from anonymous SVN with this command:
 
 ```
 svn checkout http://googletest.googlecode.com/svn/trunk/ googletest-read-only
 ```
 
 Alternatively, if you are working with Subversion in your own code base, you can add Google Test as an external dependency to your own Subversion repository. By following this approach, everyone that checks out your svn repository will also receive a copy of Google Test (a specific version, if you wish) without having to check it out explicitly. This makes the set up of your project simpler and reduces the copied code in the repository.
 
 To use `svn:externals`, decide where you would like to have the external source reside. You might choose to put the external source inside the trunk, because you want it to be part of the branch when you make a release. However, keeping it outside the trunk in a version-tagged directory called something like `third-party/googletest/1.0.1`, is another option. Once the location is established, use `svn propedit svn:externals _directory_` to set the svn:externals property on a directory in your repository. This directory won't contain the code, but be its versioned parent directory.
 
-The command `svn propedit` will bring up your Subversion editor, making editing the long, (potentially multi-line) property simpler. This same method can be used to check out a tagged branch, by using the appropriate URL (e.g. `http://googletest.googlecode.com/svn/tags/release-1.0.1`). Additionally, the svn:externals property allows the specification of a particular revision of the trunk with the `-r_##_` option (e.g. `externals/src/googletest -r60 http://googletest.googlecode.com/svn/trunk`).
+The command `svn propedit` will bring up your Subversion editor, making editing the long, (potentially multi-line) property simpler. This same method can be used to check out a tagged branch, by using the appropriate URL (e.g. `https://github.com/google/googletest/releases/tag/release-1.0.1`). Additionally, the svn:externals property allows the specification of a particular revision of the trunk with the `-r_##_` option (e.g. `externals/src/googletest -r60 http://googletest.googlecode.com/svn/trunk`).
 
 Here is an example of using the svn:externals properties on a trunk (read via `svn propget`) of a project. This value checks out a copy of Google Test into the `trunk/externals/src/googletest/` directory.
 
 ```
 [Computer:svn] user$ svn propget svn:externals trunk
 externals/src/googletest http://googletest.googlecode.com/svn/trunk
 ```
 
 # Add the Framework to Your Project #
 
 The next step is to build and add the gtest.framework to your own project. This guide describes two common ways below.
 
   * **Option 1** --- The simplest way to add Google Test to your own project, is to open gtest.xcodeproj (found in the xcode/ directory of the Google Test trunk) and build the framework manually. Then, add the built framework into your project using the "Add->Existing Framework..." from the context menu or "Project->Add..." from the main menu. The gtest.framework is relocatable and contains the headers and object code that you'll need to make tests. This method requires rebuilding every time you upgrade Google Test in your project.
   * **Option 2** --- If you are going to be living off the trunk of Google Test, incorporating its latest features into your unit tests (or are a Google Test developer yourself). You'll want to rebuild the framework every time the source updates. to do this, you'll need to add the gtest.xcodeproj file, not the framework itself, to your own Xcode project. Then, from the build products that are revealed by the project's disclosure triangle, you can find the gtest.framework, which can be added to your targets (discussed below).
 
 # Make a Test Target #
 
 To start writing tests, make a new "Shell Tool" target. This target template is available under BSD, Cocoa, or Carbon. Add your unit test source code to the "Compile Sources" build phase of the target.
 
 Next, you'll want to add gtest.framework in two different ways, depending upon which option you chose above.
 
   * **Option 1** --- During compilation, Xcode will need to know that you are linking against the gtest.framework. Add the gtest.framework to the "Link Binary with Libraries" build phase of your test target. This will include the Google Test headers in your header search path, and will tell the linker where to find the library.
   * **Option 2** --- If your working out of the trunk, you'll also want to add gtest.framework to your "Link Binary with Libraries" build phase of your test target. In addition, you'll  want to add the gtest.framework as a dependency to your unit test target. This way, Xcode will make sure that gtest.framework is up to date, every time your build your target. Finally, if you don't share build directories with Google Test, you'll have to copy the gtest.framework into your own build products directory using a "Run Script" build phase.
 
 # Set Up the Executable Run Environment #
 
 Since the unit test executable is a shell tool, it doesn't have a bundle with a `Contents/Frameworks` directory, in which to place gtest.framework. Instead, the dynamic linker must be told at runtime to search for the framework in another location. This can be accomplished by setting the "DYLD\_FRAMEWORK\_PATH" environment variable in the "Edit Active Executable ..." Arguments tab, under "Variables to be set in the environment:". The path for this value is the path (relative or absolute) of the directory containing the gtest.framework.
 
 If you haven't set up the DYLD\_FRAMEWORK\_PATH, correctly, you might get a message like this:
 
 ```
 [Session started at 2008-08-15 06:23:57 -0600.]
   dyld: Library not loaded: @loader_path/../Frameworks/gtest.framework/Versions/A/gtest
     Referenced from: /Users/username/Documents/Sandbox/gtestSample/build/Debug/WidgetFrameworkTest
     Reason: image not found
 ```
 
 To correct this problem, go to to the directory containing the executable named in "Referenced from:" value in the error message above. Then, with the terminal in this location, find the relative path to the directory containing the gtest.framework. That is the value you'll need to set as the DYLD\_FRAMEWORK\_PATH.
 
 # Build and Go #
 
 Now, when you click "Build and Go", the test will be executed. Dumping out something like this:
 
 ```
 [Session started at 2008-08-06 06:36:13 -0600.]
 [==========] Running 2 tests from 1 test case.
 [----------] Global test environment set-up.
 [----------] 2 tests from WidgetInitializerTest
 [ RUN      ] WidgetInitializerTest.TestConstructor
 [       OK ] WidgetInitializerTest.TestConstructor
 [ RUN      ] WidgetInitializerTest.TestConversion
 [       OK ] WidgetInitializerTest.TestConversion
 [----------] Global test environment tear-down
 [==========] 2 tests from 1 test case ran.
 [  PASSED  ] 2 tests.
 
 The Debugger has exited with status 0.  
 ```
 
 # Summary #
 
-Unit testing is a valuable way to ensure your data model stays valid even during rapid development or refactoring. The Google Testing Framework is a great unit testing framework for C and C++ which integrates well with an Xcode development environment.
\ No newline at end of file
+Unit testing is a valuable way to ensure your data model stays valid even during rapid development or refactoring. The Google Testing Framework is a great unit testing framework for C and C++ which integrates well with an Xcode development environment.
diff --git a/googletest/docs/advanced.md b/googletest/docs/advanced.md
index 6883784d..feb8ad66 100644
--- a/googletest/docs/advanced.md
+++ b/googletest/docs/advanced.md
@@ -1,2536 +1,2536 @@
 # Advanced googletest Topics
 
 
 ## Introduction
 
 Now that you have read the [googletest Primer](primer) and learned how to write
 tests using googletest, it's time to learn some new tricks. This document will
 show you more assertions as well as how to construct complex failure messages,
 propagate fatal failures, reuse and speed up your test fixtures, and use various
 flags with your tests.
 
 ## More Assertions
 
 This section covers some less frequently used, but still significant,
 assertions.
 
 ### Explicit Success and Failure
 
 These three assertions do not actually test a value or expression. Instead, they
 generate a success or failure directly. Like the macros that actually perform a
 test, you may stream a custom failure message into them.
 
 ```c++
 SUCCEED();
 ```
 
 Generates a success. This does **NOT** make the overall test succeed. A test is
 considered successful only if none of its assertions fail during its execution.
 
 NOTE: `SUCCEED()` is purely documentary and currently doesn't generate any
 user-visible output. However, we may add `SUCCEED()` messages to googletest's
 output in the future.
 
 ```c++
 FAIL();
 ADD_FAILURE();
 ADD_FAILURE_AT("file_path", line_number);
 ```
 
 `FAIL()` generates a fatal failure, while `ADD_FAILURE()` and `ADD_FAILURE_AT()`
 generate a nonfatal failure. These are useful when control flow, rather than a
 Boolean expression, determines the test's success or failure. For example, you
 might want to write something like:
 
 ```c++
 switch(expression) {
   case 1:
      ... some checks ...
   case 2:
      ... some other checks ...
   default:
      FAIL() << "We shouldn't get here.";
 }
 ```
 
 NOTE: you can only use `FAIL()` in functions that return `void`. See the
 [Assertion Placement section](#assertion-placement) for more information.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Exception Assertions
 
 These are for verifying that a piece of code throws (or does not throw) an
 exception of the given type:
 
 Fatal assertion                            | Nonfatal assertion                         | Verifies
 ------------------------------------------ | ------------------------------------------ | --------
 `ASSERT_THROW(statement, exception_type);` | `EXPECT_THROW(statement, exception_type);` | `statement` throws an exception of the given type
 `ASSERT_ANY_THROW(statement);`             | `EXPECT_ANY_THROW(statement);`             | `statement` throws an exception of any type
 `ASSERT_NO_THROW(statement);`              | `EXPECT_NO_THROW(statement);`              | `statement` doesn't throw any exception
 
 Examples:
 
 ```c++
 ASSERT_THROW(Foo(5), bar_exception);
 
 EXPECT_NO_THROW({
   int n = 5;
   Bar(&n);
 });
 ```
 
 **Availability**: Linux, Windows, Mac; requires exceptions to be enabled in the
 build environment (note that `google3` **disables** exceptions).
 
 ### Predicate Assertions for Better Error Messages
 
 Even though googletest has a rich set of assertions, they can never be complete,
 as it's impossible (nor a good idea) to anticipate all scenarios a user might
 run into. Therefore, sometimes a user has to use `EXPECT_TRUE()` to check a
 complex expression, for lack of a better macro. This has the problem of not
 showing you the values of the parts of the expression, making it hard to
 understand what went wrong. As a workaround, some users choose to construct the
 failure message by themselves, streaming it into `EXPECT_TRUE()`. However, this
 is awkward especially when the expression has side-effects or is expensive to
 evaluate.
 
 googletest gives you three different options to solve this problem:
 
 #### Using an Existing Boolean Function
 
 If you already have a function or functor that returns `bool` (or a type that
 can be implicitly converted to `bool`), you can use it in a *predicate
 assertion* to get the function arguments printed for free:
 
 | Fatal assertion      | Nonfatal assertion   | Verifies                    |
 | -------------------- | -------------------- | --------------------------- |
 | `ASSERT_PRED1(pred1, | `EXPECT_PRED1(pred1, | `pred1(val1)` is true       |
 : val1);`              : val1);`              :                             :
 | `ASSERT_PRED2(pred2, | `EXPECT_PRED2(pred2, | `pred2(val1, val2)` is true |
 : val1, val2);`        : val1, val2);`        :                             :
 | `...`                | `...`                | ...                         |
 
 In the above, `predn` is an `n`-ary predicate function or functor, where `val1`,
 `val2`, ..., and `valn` are its arguments. The assertion succeeds if the
 predicate returns `true` when applied to the given arguments, and fails
 otherwise. When the assertion fails, it prints the value of each argument. In
 either case, the arguments are evaluated exactly once.
 
 Here's an example. Given
 
 ```c++
 // Returns true iff m and n have no common divisors except 1.
 bool MutuallyPrime(int m, int n) { ... }
 
 const int a = 3;
 const int b = 4;
 const int c = 10;
 ```
 
 the assertion
 
 ```c++
   EXPECT_PRED2(MutuallyPrime, a, b);
 ```
 
 will succeed, while the assertion
 
 ```c++
   EXPECT_PRED2(MutuallyPrime, b, c);
 ```
 
 will fail with the message
 
 ```none
 MutuallyPrime(b, c) is false, where
 b is 4
 c is 10
 ```
 
 > NOTE:
 >
 > 1.  If you see a compiler error "no matching function to call" when using
 >     `ASSERT_PRED*` or `EXPECT_PRED*`, please see
 >     [this](faq#OverloadedPredicate) for how to resolve it.
 > 1.  Currently we only provide predicate assertions of arity <= 5. If you need
->     a higher-arity assertion, let [us](http://g/opensource-gtest) know.
+>     a higher-arity assertion, let [us](https://github.com/google/googletest/issues) know.
 
 **Availability**: Linux, Windows, Mac.
 
 #### Using a Function That Returns an AssertionResult
 
 While `EXPECT_PRED*()` and friends are handy for a quick job, the syntax is not
 satisfactory: you have to use different macros for different arities, and it
 feels more like Lisp than C++. The `::testing::AssertionResult` class solves
 this problem.
 
 An `AssertionResult` object represents the result of an assertion (whether it's
 a success or a failure, and an associated message). You can create an
 `AssertionResult` using one of these factory functions:
 
 ```c++
 namespace testing {
 
 // Returns an AssertionResult object to indicate that an assertion has
 // succeeded.
 AssertionResult AssertionSuccess();
 
 // Returns an AssertionResult object to indicate that an assertion has
 // failed.
 AssertionResult AssertionFailure();
 
 }
 ```
 
 You can then use the `<<` operator to stream messages to the `AssertionResult`
 object.
 
 To provide more readable messages in Boolean assertions (e.g. `EXPECT_TRUE()`),
 write a predicate function that returns `AssertionResult` instead of `bool`. For
 example, if you define `IsEven()` as:
 
 ```c++
 ::testing::AssertionResult IsEven(int n) {
   if ((n % 2) == 0)
      return ::testing::AssertionSuccess();
   else
      return ::testing::AssertionFailure() << n << " is odd";
 }
 ```
 
 instead of:
 
 ```c++
 bool IsEven(int n) {
   return (n % 2) == 0;
 }
 ```
 
 the failed assertion `EXPECT_TRUE(IsEven(Fib(4)))` will print:
 
 ```none
 Value of: IsEven(Fib(4))
   Actual: false (3 is odd)
 Expected: true
 ```
 
 instead of a more opaque
 
 ```none
 Value of: IsEven(Fib(4))
   Actual: false
 Expected: true
 ```
 
 If you want informative messages in `EXPECT_FALSE` and `ASSERT_FALSE` as well
 (one third of Boolean assertions in the Google code base are negative ones), and
 are fine with making the predicate slower in the success case, you can supply a
 success message:
 
 ```c++
 ::testing::AssertionResult IsEven(int n) {
   if ((n % 2) == 0)
      return ::testing::AssertionSuccess() << n << " is even";
   else
      return ::testing::AssertionFailure() << n << " is odd";
 }
 ```
 
 Then the statement `EXPECT_FALSE(IsEven(Fib(6)))` will print
 
 ```none
   Value of: IsEven(Fib(6))
      Actual: true (8 is even)
   Expected: false
 ```
 
 **Availability**: Linux, Windows, Mac.
 
 #### Using a Predicate-Formatter
 
 If you find the default message generated by `(ASSERT|EXPECT)_PRED*` and
 `(ASSERT|EXPECT)_(TRUE|FALSE)` unsatisfactory, or some arguments to your
 predicate do not support streaming to `ostream`, you can instead use the
 following *predicate-formatter assertions* to *fully* customize how the message
 is formatted:
 
 Fatal assertion                                  | Nonfatal assertion                               | Verifies
 ------------------------------------------------ | ------------------------------------------------ | --------
 `ASSERT_PRED_FORMAT1(pred_format1, val1);`       | `EXPECT_PRED_FORMAT1(pred_format1, val1);`       | `pred_format1(val1)` is successful
 `ASSERT_PRED_FORMAT2(pred_format2, val1, val2);` | `EXPECT_PRED_FORMAT2(pred_format2, val1, val2);` | `pred_format2(val1, val2)` is successful
 `...`                                            | `...`                                            | ...
 
 The difference between this and the previous group of macros is that instead of
 a predicate, `(ASSERT|EXPECT)_PRED_FORMAT*` take a *predicate-formatter*
 (`pred_formatn`), which is a function or functor with the signature:
 
 ```c++
 ::testing::AssertionResult PredicateFormattern(const char* expr1,
                                                const char* expr2,
                                                ...
                                                const char* exprn,
                                                T1 val1,
                                                T2 val2,
                                                ...
                                                Tn valn);
 ```
 
 where `val1`, `val2`, ..., and `valn` are the values of the predicate arguments,
 and `expr1`, `expr2`, ..., and `exprn` are the corresponding expressions as they
 appear in the source code. The types `T1`, `T2`, ..., and `Tn` can be either
 value types or reference types. For example, if an argument has type `Foo`, you
 can declare it as either `Foo` or `const Foo&`, whichever is appropriate.
 
 As an example, let's improve the failure message in `MutuallyPrime()`, which was
 used with `EXPECT_PRED2()`:
 
 ```c++
 // Returns the smallest prime common divisor of m and n,
 // or 1 when m and n are mutually prime.
 int SmallestPrimeCommonDivisor(int m, int n) { ... }
 
 // A predicate-formatter for asserting that two integers are mutually prime.
 ::testing::AssertionResult AssertMutuallyPrime(const char* m_expr,
                                                const char* n_expr,
                                                int m,
                                                int n) {
   if (MutuallyPrime(m, n)) return ::testing::AssertionSuccess();
 
   return ::testing::AssertionFailure() << m_expr << " and " << n_expr
       << " (" << m << " and " << n << ") are not mutually prime, "
       << "as they have a common divisor " << SmallestPrimeCommonDivisor(m, n);
 }
 ```
 
 With this predicate-formatter, we can use
 
 ```c++
   EXPECT_PRED_FORMAT2(AssertMutuallyPrime, b, c);
 ```
 
 to generate the message
 
 ```none
 b and c (4 and 10) are not mutually prime, as they have a common divisor 2.
 ```
 
 As you may have realized, many of the built-in assertions we introduced earlier
 are special cases of `(EXPECT|ASSERT)_PRED_FORMAT*`. In fact, most of them are
 indeed defined using `(EXPECT|ASSERT)_PRED_FORMAT*`.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Floating-Point Comparison
 
 Comparing floating-point numbers is tricky. Due to round-off errors, it is very
 unlikely that two floating-points will match exactly. Therefore, `ASSERT_EQ` 's
 naive comparison usually doesn't work. And since floating-points can have a wide
 value range, no single fixed error bound works. It's better to compare by a
 fixed relative error bound, except for values close to 0 due to the loss of
 precision there.
 
 In general, for floating-point comparison to make sense, the user needs to
 carefully choose the error bound. If they don't want or care to, comparing in
 terms of Units in the Last Place (ULPs) is a good default, and googletest
 provides assertions to do this. Full details about ULPs are quite long; if you
 want to learn more, see
 [here](https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/).
 
 #### Floating-Point Macros
 
 | Fatal assertion         | Nonfatal assertion      | Verifies                |
 | ----------------------- | ----------------------- | ----------------------- |
 | `ASSERT_FLOAT_EQ(val1,  | `EXPECT_FLOAT_EQ(val1,  | the two `float` values  |
 : val2);`                 : val2);`                 : are almost equal        :
 | `ASSERT_DOUBLE_EQ(val1, | `EXPECT_DOUBLE_EQ(val1, | the two `double` values |
 : val2);`                 : val2);`                 : are almost equal        :
 
 By "almost equal" we mean the values are within 4 ULP's from each other.
 
 NOTE: `CHECK_DOUBLE_EQ()` in `base/logging.h` uses a fixed absolute error bound,
 so its result may differ from that of the googletest macros. That macro is
 unsafe and has been deprecated. Please don't use it any more.
 
 The following assertions allow you to choose the acceptable error bound:
 
 | Fatal assertion    | Nonfatal assertion       | Verifies                  |
 | ------------------ | ------------------------ | ------------------------- |
 | `ASSERT_NEAR(val1, | `EXPECT_NEAR(val1, val2, | the difference between    |
 : val2, abs_error);` : abs_error);`             : `val1` and `val2` doesn't :
 :                    :                          : exceed the given absolute :
 :                    :                          : error                     :
 
 **Availability**: Linux, Windows, Mac.
 
 #### Floating-Point Predicate-Format Functions
 
 Some floating-point operations are useful, but not that often used. In order to
 avoid an explosion of new macros, we provide them as predicate-format functions
 that can be used in predicate assertion macros (e.g. `EXPECT_PRED_FORMAT2`,
 etc).
 
 ```c++
 EXPECT_PRED_FORMAT2(::testing::FloatLE, val1, val2);
 EXPECT_PRED_FORMAT2(::testing::DoubleLE, val1, val2);
 ```
 
 Verifies that `val1` is less than, or almost equal to, `val2`. You can replace
 `EXPECT_PRED_FORMAT2` in the above table with `ASSERT_PRED_FORMAT2`.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Asserting Using gMock Matchers
 
-Google-developed C++ mocking framework [gMock](http://go/gmock) comes with a
+Google-developed C++ mocking framework [gMock](../../googlemock) comes with a
 library of matchers for validating arguments passed to mock objects. A gMock
 *matcher* is basically a predicate that knows how to describe itself. It can be
 used in these assertion macros:
 
 | Fatal assertion     | Nonfatal assertion             | Verifies              |
 | ------------------- | ------------------------------ | --------------------- |
 | `ASSERT_THAT(value, | `EXPECT_THAT(value, matcher);` | value matches matcher |
 : matcher);`          :                                :                       :
 
 For example, `StartsWith(prefix)` is a matcher that matches a string starting
 with `prefix`, and you can write:
 
 ```c++
 using ::testing::StartsWith;
 ...
     // Verifies that Foo() returns a string starting with "Hello".
     EXPECT_THAT(Foo(), StartsWith("Hello"));
 ```
 
-Read this [recipe](http://go/gmockguide#using-matchers-in-gunit-assertions) in
+Read this [recipe](../../googlemock/docs/CookBook.md#using-matchers-in-google-test-assertions) in
 the gMock Cookbook for more details.
 
 gMock has a rich set of matchers. You can do many things googletest cannot do
 alone with them. For a list of matchers gMock provides, read
-[this](http://go/gmockguide#using-matchers). Especially useful among them are
-some [protocol buffer matchers](http://go/protomatchers). It's easy to write
-your [own matchers](http://go/gmockguide#NewMatchers) too.
+[this](../../googlemock/docs/CookBook.md#using-matchers). Especially useful among them are
+some [protocol buffer matchers](https://github.com/google/nucleus/blob/master/nucleus/testing/protocol-buffer-matchers.h). It's easy to write
+your [own matchers](../../googlemock/docs/CookBook.md#writing-new-matchers-quickly) too.
 
 For example, you can use gMock's
-[EqualsProto](http://cs/#piper///depot/google3/testing/base/public/gmock_utils/protocol-buffer-matchers.h)
+[EqualsProto](https://github.com/google/nucleus/blob/master/nucleus/testing/protocol-buffer-matchers.h)
 to compare protos in your tests:
 
 ```c++
 #include "testing/base/public/gmock.h"
 using ::testing::EqualsProto;
 ...
     EXPECT_THAT(actual_proto, EqualsProto("foo: 123 bar: 'xyz'"));
     EXPECT_THAT(*actual_proto_ptr, EqualsProto(expected_proto));
 ```
 
 gMock is bundled with googletest, so you don't need to add any build dependency
 in order to take advantage of this. Just include `"testing/base/public/gmock.h"`
 and you're ready to go.
 
 **Availability**: Linux, Windows, and Mac.
 
 ### More String Assertions
 
 (Please read the [previous](#AssertThat) section first if you haven't.)
 
-You can use the gMock [string matchers](http://go/gmockguide#string-matchers)
+You can use the gMock [string matchers](../../googlemock/docs/CheatSheet.md#string-matchers)
 with `EXPECT_THAT()` or `ASSERT_THAT()` to do more string comparison tricks
 (sub-string, prefix, suffix, regular expression, and etc). For example,
 
 ```c++
 using ::testing::HasSubstr;
 using ::testing::MatchesRegex;
 ...
   ASSERT_THAT(foo_string, HasSubstr("needle"));
   EXPECT_THAT(bar_string, MatchesRegex("\\w*\\d+"));
 ```
 
 **Availability**: Linux, Windows, Mac.
 
 If the string contains a well-formed HTML or XML document, you can check whether
 its DOM tree matches an [XPath
 expression](http://www.w3.org/TR/xpath/#contents):
 
 ```c++
 // Currently still in //template/prototemplate/testing:xpath_matcher
 #include "template/prototemplate/testing/xpath_matcher.h"
 using prototemplate::testing::MatchesXPath;
 EXPECT_THAT(html_string, MatchesXPath("//a[text()='click here']"));
 ```
 
 **Availability**: Linux.
 
 ### Windows HRESULT assertions
 
 These assertions test for `HRESULT` success or failure.
 
 Fatal assertion                        | Nonfatal assertion                     | Verifies
 -------------------------------------- | -------------------------------------- | --------
 `ASSERT_HRESULT_SUCCEEDED(expression)` | `EXPECT_HRESULT_SUCCEEDED(expression)` | `expression` is a success `HRESULT`
 `ASSERT_HRESULT_FAILED(expression)`    | `EXPECT_HRESULT_FAILED(expression)`    | `expression` is a failure `HRESULT`
 
 The generated output contains the human-readable error message associated with
 the `HRESULT` code returned by `expression`.
 
 You might use them like this:
 
 ```c++
 CComPtr<IShellDispatch2> shell;
 ASSERT_HRESULT_SUCCEEDED(shell.CoCreateInstance(L"Shell.Application"));
 CComVariant empty;
 ASSERT_HRESULT_SUCCEEDED(shell->ShellExecute(CComBSTR(url), empty, empty, empty, empty));
 ```
 
 **Availability**: Windows.
 
 ### Type Assertions
 
 You can call the function
 
 ```c++
 ::testing::StaticAssertTypeEq<T1, T2>();
 ```
 
 to assert that types `T1` and `T2` are the same. The function does nothing if
 the assertion is satisfied. If the types are different, the function call will
 fail to compile, and the compiler error message will likely (depending on the
 compiler) show you the actual values of `T1` and `T2`. This is mainly useful
 inside template code.
 
 **Caveat**: When used inside a member function of a class template or a function
 template, `StaticAssertTypeEq<T1, T2>()` is effective only if the function is
 instantiated. For example, given:
 
 ```c++
 template <typename T> class Foo {
  public:
   void Bar() { ::testing::StaticAssertTypeEq<int, T>(); }
 };
 ```
 
 the code:
 
 ```c++
 void Test1() { Foo<bool> foo; }
 ```
 
 will not generate a compiler error, as `Foo<bool>::Bar()` is never actually
 instantiated. Instead, you need:
 
 ```c++
 void Test2() { Foo<bool> foo; foo.Bar(); }
 ```
 
 to cause a compiler error.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Assertion Placement
 
 You can use assertions in any C++ function. In particular, it doesn't have to be
 a method of the test fixture class. The one constraint is that assertions that
 generate a fatal failure (`FAIL*` and `ASSERT_*`) can only be used in
 void-returning functions. This is a consequence of Google's not using
 exceptions. By placing it in a non-void function you'll get a confusing compile
 error like `"error: void value not ignored as it ought to be"` or `"cannot
 initialize return object of type 'bool' with an rvalue of type 'void'"` or
 `"error: no viable conversion from 'void' to 'string'"`.
 
 If you need to use fatal assertions in a function that returns non-void, one
 option is to make the function return the value in an out parameter instead. For
 example, you can rewrite `T2 Foo(T1 x)` to `void Foo(T1 x, T2* result)`. You
 need to make sure that `*result` contains some sensible value even when the
 function returns prematurely. As the function now returns `void`, you can use
 any assertion inside of it.
 
 If changing the function's type is not an option, you should just use assertions
 that generate non-fatal failures, such as `ADD_FAILURE*` and `EXPECT_*`.
 
 NOTE: Constructors and destructors are not considered void-returning functions,
 according to the C++ language specification, and so you may not use fatal
 assertions in them. You'll get a compilation error if you try. A simple
 workaround is to transfer the entire body of the constructor or destructor to a
 private void-returning method. However, you should be aware that a fatal
 assertion failure in a constructor does not terminate the current test, as your
 intuition might suggest; it merely returns from the constructor early, possibly
 leaving your object in a partially-constructed state. Likewise, a fatal
 assertion failure in a destructor may leave your object in a
 partially-destructed state. Use assertions carefully in these situations!
 
 ## Teaching googletest How to Print Your Values
 
 When a test assertion such as `EXPECT_EQ` fails, googletest prints the argument
 values to help you debug. It does this using a user-extensible value printer.
 
 This printer knows how to print built-in C++ types, native arrays, STL
 containers, and any type that supports the `<<` operator. For other types, it
 prints the raw bytes in the value and hopes that you the user can figure it out.
 
 As mentioned earlier, the printer is *extensible*. That means you can teach it
 to do a better job at printing your particular type than to dump the bytes. To
 do that, define `<<` for your type:
 
 ```c++
 // Streams are allowed only for logging.  Don't include this for
 // any other purpose.
 #include <ostream>
 
 namespace foo {
 
 class Bar {  // We want googletest to be able to print instances of this.
 ...
   // Create a free inline friend function.
   friend ::std::ostream& operator<<(::std::ostream& os, const Bar& bar) {
     return os << bar.DebugString();  // whatever needed to print bar to os
   }
 };
 
 // If you can't declare the function in the class it's important that the
 // << operator is defined in the SAME namespace that defines Bar.  C++'s look-up
 // rules rely on that.
 ::std::ostream& operator<<(::std::ostream& os, const Bar& bar) {
   return os << bar.DebugString();  // whatever needed to print bar to os
 }
 
 }  // namespace foo
 ```
 
 Sometimes, this might not be an option: your team may consider it bad style to
 have a `<<` operator for `Bar`, or `Bar` may already have a `<<` operator that
 doesn't do what you want (and you cannot change it). If so, you can instead
 define a `PrintTo()` function like this:
 
 ```c++
 // Streams are allowed only for logging.  Don't include this for
 // any other purpose.
 #include <ostream>
 
 namespace foo {
 
 class Bar {
   ...
   friend void PrintTo(const Bar& bar, ::std::ostream* os) {
     *os << bar.DebugString();  // whatever needed to print bar to os
   }
 };
 
 // If you can't declare the function in the class it's important that PrintTo()
 // is defined in the SAME namespace that defines Bar.  C++'s look-up rules rely
 // on that.
 void PrintTo(const Bar& bar, ::std::ostream* os) {
   *os << bar.DebugString();  // whatever needed to print bar to os
 }
 
 }  // namespace foo
 ```
 
 If you have defined both `<<` and `PrintTo()`, the latter will be used when
 googletest is concerned. This allows you to customize how the value appears in
 googletest's output without affecting code that relies on the behavior of its
 `<<` operator.
 
 If you want to print a value `x` using googletest's value printer yourself, just
 call `::testing::PrintToString(x)`, which returns an `std::string`:
 
 ```c++
 vector<pair<Bar, int> > bar_ints = GetBarIntVector();
 
 EXPECT_TRUE(IsCorrectBarIntVector(bar_ints))
     << "bar_ints = " << ::testing::PrintToString(bar_ints);
 ```
 
 ## Death Tests
 
 In many applications, there are assertions that can cause application failure if
 a condition is not met. These sanity checks, which ensure that the program is in
 a known good state, are there to fail at the earliest possible time after some
 program state is corrupted. If the assertion checks the wrong condition, then
 the program may proceed in an erroneous state, which could lead to memory
 corruption, security holes, or worse. Hence it is vitally important to test that
 such assertion statements work as expected.
 
 Since these precondition checks cause the processes to die, we call such tests
 _death tests_. More generally, any test that checks that a program terminates
 (except by throwing an exception) in an expected fashion is also a death test.
 
 
 Note that if a piece of code throws an exception, we don't consider it "death"
 for the purpose of death tests, as the caller of the code could catch the
 exception and avoid the crash. If you want to verify exceptions thrown by your
 code, see [Exception Assertions](#ExceptionAssertions).
 
 If you want to test `EXPECT_*()/ASSERT_*()` failures in your test code, see
 Catching Failures
 
 ### How to Write a Death Test
 
 googletest has the following macros to support death tests:
 
 Fatal assertion                                | Nonfatal assertion                             | Verifies
 ---------------------------------------------- | ---------------------------------------------- | --------
 `ASSERT_DEATH(statement, regex);`              | `EXPECT_DEATH(statement, regex);`              | `statement` crashes with the given error
 `ASSERT_DEATH_IF_SUPPORTED(statement, regex);` | `EXPECT_DEATH_IF_SUPPORTED(statement, regex);` | if death tests are supported, verifies that `statement` crashes with the given error; otherwise verifies nothing
 `ASSERT_EXIT(statement, predicate, regex);`    | `EXPECT_EXIT(statement, predicate, regex);`    | `statement` exits with the given error, and its exit code matches `predicate`
 
 where `statement` is a statement that is expected to cause the process to die,
 `predicate` is a function or function object that evaluates an integer exit
 status, and `regex` is a (Perl) regular expression that the stderr output of
 `statement` is expected to match. Note that `statement` can be *any valid
 statement* (including *compound statement*) and doesn't have to be an
 expression.
 
 
 As usual, the `ASSERT` variants abort the current test function, while the
 `EXPECT` variants do not.
 
 > NOTE: We use the word "crash" here to mean that the process terminates with a
 > *non-zero* exit status code. There are two possibilities: either the process
 > has called `exit()` or `_exit()` with a non-zero value, or it may be killed by
 > a signal.
 >
 > This means that if `*statement*` terminates the process with a 0 exit code, it
 > is *not* considered a crash by `EXPECT_DEATH`. Use `EXPECT_EXIT` instead if
 > this is the case, or if you want to restrict the exit code more precisely.
 
 A predicate here must accept an `int` and return a `bool`. The death test
 succeeds only if the predicate returns `true`. googletest defines a few
 predicates that handle the most common cases:
 
 ```c++
 ::testing::ExitedWithCode(exit_code)
 ```
 
 This expression is `true` if the program exited normally with the given exit
 code.
 
 ```c++
 ::testing::KilledBySignal(signal_number)  // Not available on Windows.
 ```
 
 This expression is `true` if the program was killed by the given signal.
 
 The `*_DEATH` macros are convenient wrappers for `*_EXIT` that use a predicate
 that verifies the process' exit code is non-zero.
 
 Note that a death test only cares about three things:
 
 1.  does `statement` abort or exit the process?
 2.  (in the case of `ASSERT_EXIT` and `EXPECT_EXIT`) does the exit status
     satisfy `predicate`? Or (in the case of `ASSERT_DEATH` and `EXPECT_DEATH`)
     is the exit status non-zero? And
 3.  does the stderr output match `regex`?
 
 In particular, if `statement` generates an `ASSERT_*` or `EXPECT_*` failure, it
 will **not** cause the death test to fail, as googletest assertions don't abort
 the process.
 
 To write a death test, simply use one of the above macros inside your test
 function. For example,
 
 ```c++
 TEST(MyDeathTest, Foo) {
   // This death test uses a compound statement.
   ASSERT_DEATH({
     int n = 5;
     Foo(&n);
   }, "Error on line .* of Foo()");
 }
 
 TEST(MyDeathTest, NormalExit) {
   EXPECT_EXIT(NormalExit(), ::testing::ExitedWithCode(0), "Success");
 }
 
 TEST(MyDeathTest, KillMyself) {
   EXPECT_EXIT(KillMyself(), ::testing::KilledBySignal(SIGKILL),
               "Sending myself unblockable signal");
 }
 ```
 
 verifies that:
 
 *   calling `Foo(5)` causes the process to die with the given error message,
 *   calling `NormalExit()` causes the process to print `"Success"` to stderr and
     exit with exit code 0, and
 *   calling `KillMyself()` kills the process with signal `SIGKILL`.
 
 The test function body may contain other assertions and statements as well, if
 necessary.
 
 ### Death Test Naming
 
 IMPORTANT: We strongly recommend you to follow the convention of naming your
 **test case** (not test) `*DeathTest` when it contains a death test, as
 demonstrated in the above example. The [Death Tests And
 Threads](#death-tests-and-threads) section below explains why.
 
 If a test fixture class is shared by normal tests and death tests, you can use
 `using` or `typedef` to introduce an alias for the fixture class and avoid
 duplicating its code:
 
 ```c++
 class FooTest : public ::testing::Test { ... };
 
 using FooDeathTest = FooTest;
 
 TEST_F(FooTest, DoesThis) {
   // normal test
 }
 
 TEST_F(FooDeathTest, DoesThat) {
   // death test
 }
 ```
 
 **Availability**: Linux, Windows (requires MSVC 8.0 or above), Cygwin, and Mac
 
 ### Regular Expression Syntax
 
 
 On POSIX systems (e.g. Linux, Cygwin, and Mac), googletest uses the
 [POSIX extended regular expression](http://www.opengroup.org/onlinepubs/009695399/basedefs/xbd_chap09.html#tag_09_04)
 syntax. To learn about this syntax, you may want to read this
 [Wikipedia entry](http://en.wikipedia.org/wiki/Regular_expression#POSIX_Extended_Regular_Expressions).
 
 On Windows, googletest uses its own simple regular expression implementation. It
 lacks many features. For example, we don't support union (`"x|y"`), grouping
 (`"(xy)"`), brackets (`"[xy]"`), and repetition count (`"x{5,7}"`), among
 others. Below is what we do support (`A` denotes a literal character, period
 (`.`), or a single `\\ ` escape sequence; `x` and `y` denote regular
 expressions.):
 
 Expression | Meaning
 ---------- | --------------------------------------------------------------
 `c`        | matches any literal character `c`
 `\\d`      | matches any decimal digit
 `\\D`      | matches any character that's not a decimal digit
 `\\f`      | matches `\f`
 `\\n`      | matches `\n`
 `\\r`      | matches `\r`
 `\\s`      | matches any ASCII whitespace, including `\n`
 `\\S`      | matches any character that's not a whitespace
 `\\t`      | matches `\t`
 `\\v`      | matches `\v`
 `\\w`      | matches any letter, `_`, or decimal digit
 `\\W`      | matches any character that `\\w` doesn't match
 `\\c`      | matches any literal character `c`, which must be a punctuation
 `.`        | matches any single character except `\n`
 `A?`       | matches 0 or 1 occurrences of `A`
 `A*`       | matches 0 or many occurrences of `A`
 `A+`       | matches 1 or many occurrences of `A`
 `^`        | matches the beginning of a string (not that of each line)
 `$`        | matches the end of a string (not that of each line)
 `xy`       | matches `x` followed by `y`
 
 To help you determine which capability is available on your system, googletest
 defines macros to govern which regular expression it is using. The macros are:
 <!--absl:google3-begin(google3-only)-->`GTEST_USES_PCRE=1`, or
 <!--absl:google3-end--> `GTEST_USES_SIMPLE_RE=1` or `GTEST_USES_POSIX_RE=1`. If
 you want your death tests to work in all cases, you can either `#if` on these
 macros or use the more limited syntax only.
 
 ### How It Works
 
 Under the hood, `ASSERT_EXIT()` spawns a new process and executes the death test
 statement in that process. The details of how precisely that happens depend on
 the platform and the variable ::testing::GTEST_FLAG(death_test_style) (which is
 initialized from the command-line flag `--gtest_death_test_style`).
 
 *   On POSIX systems, `fork()` (or `clone()` on Linux) is used to spawn the
     child, after which:
     *   If the variable's value is `"fast"`, the death test statement is
         immediately executed.
     *   If the variable's value is `"threadsafe"`, the child process re-executes
         the unit test binary just as it was originally invoked, but with some
         extra flags to cause just the single death test under consideration to
         be run.
 *   On Windows, the child is spawned using the `CreateProcess()` API, and
     re-executes the binary to cause just the single death test under
     consideration to be run - much like the `threadsafe` mode on POSIX.
 
 Other values for the variable are illegal and will cause the death test to fail.
 Currently, the flag's default value is
 "fast". However, we reserve
 the right to change it in the future. Therefore, your tests should not depend on
 this. In either case, the parent process waits for the child process to
 complete, and checks that
 
 1.  the child's exit status satisfies the predicate, and
 2.  the child's stderr matches the regular expression.
 
 If the death test statement runs to completion without dying, the child process
 will nonetheless terminate, and the assertion fails.
 
 ### Death Tests And Threads
 
 The reason for the two death test styles has to do with thread safety. Due to
 well-known problems with forking in the presence of threads, death tests should
 be run in a single-threaded context. Sometimes, however, it isn't feasible to
 arrange that kind of environment. For example, statically-initialized modules
 may start threads before main is ever reached. Once threads have been created,
 it may be difficult or impossible to clean them up.
 
 googletest has three features intended to raise awareness of threading issues.
 
 1.  A warning is emitted if multiple threads are running when a death test is
     encountered.
 2.  Test cases with a name ending in "DeathTest" are run before all other tests.
 3.  It uses `clone()` instead of `fork()` to spawn the child process on Linux
     (`clone()` is not available on Cygwin and Mac), as `fork()` is more likely
     to cause the child to hang when the parent process has multiple threads.
 
 It's perfectly fine to create threads inside a death test statement; they are
 executed in a separate process and cannot affect the parent.
 
 ### Death Test Styles
 
 
 The "threadsafe" death test style was introduced in order to help mitigate the
 risks of testing in a possibly multithreaded environment. It trades increased
 test execution time (potentially dramatically so) for improved thread safety.
 
 The automated testing framework does not set the style flag. You can choose a
 particular style of death tests by setting the flag programmatically:
 
 ```c++
 testing::FLAGS_gtest_death_test_style="threadsafe"
 ```
 
 You can do this in `main()` to set the style for all death tests in the binary,
 or in individual tests. Recall that flags are saved before running each test and
 restored afterwards, so you need not do that yourself. For example:
 
 ```c++
 int main(int argc, char** argv) {
   InitGoogle(argv[0], &argc, &argv, true);
   ::testing::FLAGS_gtest_death_test_style = "fast";
   return RUN_ALL_TESTS();
 }
 
 TEST(MyDeathTest, TestOne) {
   ::testing::FLAGS_gtest_death_test_style = "threadsafe";
   // This test is run in the "threadsafe" style:
   ASSERT_DEATH(ThisShouldDie(), "");
 }
 
 TEST(MyDeathTest, TestTwo) {
   // This test is run in the "fast" style:
   ASSERT_DEATH(ThisShouldDie(), "");
 }
 ```
 
 
 ### Caveats
 
 The `statement` argument of `ASSERT_EXIT()` can be any valid C++ statement. If
 it leaves the current function via a `return` statement or by throwing an
 exception, the death test is considered to have failed. Some googletest macros
 may return from the current function (e.g. `ASSERT_TRUE()`), so be sure to avoid
 them in `statement`.
 
 Since `statement` runs in the child process, any in-memory side effect (e.g.
 modifying a variable, releasing memory, etc) it causes will *not* be observable
 in the parent process. In particular, if you release memory in a death test,
 your program will fail the heap check as the parent process will never see the
 memory reclaimed. To solve this problem, you can
 
 1.  try not to free memory in a death test;
 2.  free the memory again in the parent process; or
 3.  do not use the heap checker in your program.
 
 Due to an implementation detail, you cannot place multiple death test assertions
 on the same line; otherwise, compilation will fail with an unobvious error
 message.
 
 Despite the improved thread safety afforded by the "threadsafe" style of death
 test, thread problems such as deadlock are still possible in the presence of
 handlers registered with `pthread_atfork(3)`.
 
 
 ## Using Assertions in Sub-routines
 
 ### Adding Traces to Assertions
 
 If a test sub-routine is called from several places, when an assertion inside it
 fails, it can be hard to tell which invocation of the sub-routine the failure is
 from. 
 You can alleviate this problem using extra logging or custom failure messages,
 but that usually clutters up your tests. A better solution is to use the
 `SCOPED_TRACE` macro or the `ScopedTrace` utility:
 
 ```c++
 SCOPED_TRACE(message);
 ScopedTrace trace("file_path", line_number, message);
 ```
 
 where `message` can be anything streamable to `std::ostream`. `SCOPED_TRACE`
 macro will cause the current file name, line number, and the given message to be
 added in every failure message. `ScopedTrace` accepts explicit file name and
 line number in arguments, which is useful for writing test helpers. The effect
 will be undone when the control leaves the current lexical scope.
 
 For example,
 
 ```c++
 10: void Sub1(int n) {
 11:   EXPECT_EQ(1, Bar(n));
 12:   EXPECT_EQ(2, Bar(n + 1));
 13: }
 14:
 15: TEST(FooTest, Bar) {
 16:   {
 17:     SCOPED_TRACE("A");  // This trace point will be included in
 18:                         // every failure in this scope.
 19:     Sub1(1);
 20:   }
 21:   // Now it won't.
 22:   Sub1(9);
 23: }
 ```
 
 could result in messages like these:
 
 ```none
 path/to/foo_test.cc:11: Failure
 Value of: Bar(n)
 Expected: 1
   Actual: 2
    Trace:
 path/to/foo_test.cc:17: A
 
 path/to/foo_test.cc:12: Failure
 Value of: Bar(n + 1)
 Expected: 2
   Actual: 3
 ```
 
 Without the trace, it would've been difficult to know which invocation of
 `Sub1()` the two failures come from respectively. (You could add
 
 an extra message to each assertion in `Sub1()` to indicate the value of `n`, but
 that's tedious.)
 
 Some tips on using `SCOPED_TRACE`:
 
 1.  With a suitable message, it's often enough to use `SCOPED_TRACE` at the
     beginning of a sub-routine, instead of at each call site.
 2.  When calling sub-routines inside a loop, make the loop iterator part of the
     message in `SCOPED_TRACE` such that you can know which iteration the failure
     is from.
 3.  Sometimes the line number of the trace point is enough for identifying the
     particular invocation of a sub-routine. In this case, you don't have to
     choose a unique message for `SCOPED_TRACE`. You can simply use `""`.
 4.  You can use `SCOPED_TRACE` in an inner scope when there is one in the outer
     scope. In this case, all active trace points will be included in the failure
     messages, in reverse order they are encountered.
 5.  The trace dump is clickable in Emacs - hit `return` on a line number and
     you'll be taken to that line in the source file!
 
 **Availability**: Linux, Windows, Mac.
 
 ### Propagating Fatal Failures
 
 A common pitfall when using `ASSERT_*` and `FAIL*` is not understanding that
 when they fail they only abort the _current function_, not the entire test. For
 example, the following test will segfault:
 
 ```c++
 void Subroutine() {
   // Generates a fatal failure and aborts the current function.
   ASSERT_EQ(1, 2);
 
   // The following won't be executed.
   ...
 }
 
 TEST(FooTest, Bar) {
   Subroutine();  // The intended behavior is for the fatal failure
                  // in Subroutine() to abort the entire test.
 
   // The actual behavior: the function goes on after Subroutine() returns.
   int* p = NULL;
   *p = 3;  // Segfault!
 }
 ```
 
 To alleviate this, googletest provides three different solutions. You could use
 either exceptions, the `(ASSERT|EXPECT)_NO_FATAL_FAILURE` assertions or the
 `HasFatalFailure()` function. They are described in the following two
 subsections.
 
 #### Asserting on Subroutines with an exception
 
 The following code can turn ASSERT-failure into an exception:
 
 ```c++
 class ThrowListener : public testing::EmptyTestEventListener {
   void OnTestPartResult(const testing::TestPartResult& result) override {
     if (result.type() == testing::TestPartResult::kFatalFailure) {
       throw testing::AssertionException(result);
     }
   }
 };
 int main(int argc, char** argv) {
   ...
   testing::UnitTest::GetInstance()->listeners().Append(new ThrowListener);
   return RUN_ALL_TESTS();
 }
 ```
 
 This listener should be added after other listeners if you have any, otherwise
 they won't see failed `OnTestPartResult`.
 
 #### Asserting on Subroutines
 
 As shown above, if your test calls a subroutine that has an `ASSERT_*` failure
 in it, the test will continue after the subroutine returns. This may not be what
 you want.
 
 Often people want fatal failures to propagate like exceptions. For that
 googletest offers the following macros:
 
 Fatal assertion                       | Nonfatal assertion                    | Verifies
 ------------------------------------- | ------------------------------------- | --------
 `ASSERT_NO_FATAL_FAILURE(statement);` | `EXPECT_NO_FATAL_FAILURE(statement);` | `statement` doesn't generate any new fatal failures in the current thread.
 
 Only failures in the thread that executes the assertion are checked to determine
 the result of this type of assertions. If `statement` creates new threads,
 failures in these threads are ignored.
 
 Examples:
 
 ```c++
 ASSERT_NO_FATAL_FAILURE(Foo());
 
 int i;
 EXPECT_NO_FATAL_FAILURE({
   i = Bar();
 });
 ```
 
 **Availability**: Linux, Windows, Mac. Assertions from multiple threads are
 currently not supported on Windows.
 
 #### Checking for Failures in the Current Test
 
 `HasFatalFailure()` in the `::testing::Test` class returns `true` if an
 assertion in the current test has suffered a fatal failure. This allows
 functions to catch fatal failures in a sub-routine and return early.
 
 ```c++
 class Test {
  public:
   ...
   static bool HasFatalFailure();
 };
 ```
 
 The typical usage, which basically simulates the behavior of a thrown exception,
 is:
 
 ```c++
 TEST(FooTest, Bar) {
   Subroutine();
   // Aborts if Subroutine() had a fatal failure.
   if (HasFatalFailure()) return;
 
   // The following won't be executed.
   ...
 }
 ```
 
 If `HasFatalFailure()` is used outside of `TEST()` , `TEST_F()` , or a test
 fixture, you must add the `::testing::Test::` prefix, as in:
 
 ```c++
 if (::testing::Test::HasFatalFailure()) return;
 ```
 
 Similarly, `HasNonfatalFailure()` returns `true` if the current test has at
 least one non-fatal failure, and `HasFailure()` returns `true` if the current
 test has at least one failure of either kind.
 
 **Availability**: Linux, Windows, Mac.
 
 ## Logging Additional Information
 
 In your test code, you can call `RecordProperty("key", value)` to log additional
 information, where `value` can be either a string or an `int`. The *last* value
 recorded for a key will be emitted to the [XML output](#XmlReport) if you
 specify one. For example, the test
 
 ```c++
 TEST_F(WidgetUsageTest, MinAndMaxWidgets) {
   RecordProperty("MaximumWidgets", ComputeMaxUsage());
   RecordProperty("MinimumWidgets", ComputeMinUsage());
 }
 ```
 
 will output XML like this:
 
 ```xml
   ...
     <testcase name="MinAndMaxWidgets" status="run" time="0.006" classname="WidgetUsageTest" MaximumWidgets="12" MinimumWidgets="9" />
   ...
 ```
 
 > NOTE:
 >
 > *   `RecordProperty()` is a static member of the `Test` class. Therefore it
 >     needs to be prefixed with `::testing::Test::` if used outside of the
 >     `TEST` body and the test fixture class.
 > *   `*key*` must be a valid XML attribute name, and cannot conflict with the
 >     ones already used by googletest (`name`, `status`, `time`, `classname`,
 >     `type_param`, and `value_param`).
 > *   Calling `RecordProperty()` outside of the lifespan of a test is allowed.
 >     If it's called outside of a test but between a test case's
 >     `SetUpTestCase()` and `TearDownTestCase()` methods, it will be attributed
 >     to the XML element for the test case. If it's called outside of all test
 >     cases (e.g. in a test environment), it will be attributed to the top-level
 >     XML element.
 
 **Availability**: Linux, Windows, Mac.
 
 ## Sharing Resources Between Tests in the Same Test Case
 
 googletest creates a new test fixture object for each test in order to make
 tests independent and easier to debug. However, sometimes tests use resources
 that are expensive to set up, making the one-copy-per-test model prohibitively
 expensive.
 
 If the tests don't change the resource, there's no harm in their sharing a
 single resource copy. So, in addition to per-test set-up/tear-down, googletest
 also supports per-test-case set-up/tear-down. To use it:
 
 1.  In your test fixture class (say `FooTest` ), declare as `static` some member
     variables to hold the shared resources.
 1.  Outside your test fixture class (typically just below it), define those
     member variables, optionally giving them initial values.
 1.  In the same test fixture class, define a `static void SetUpTestCase()`
     function (remember not to spell it as **`SetupTestCase`** with a small `u`!)
     to set up the shared resources and a `static void TearDownTestCase()`
     function to tear them down.
 
 That's it! googletest automatically calls `SetUpTestCase()` before running the
 *first test* in the `FooTest` test case (i.e. before creating the first
 `FooTest` object), and calls `TearDownTestCase()` after running the *last test*
 in it (i.e. after deleting the last `FooTest` object). In between, the tests can
 use the shared resources.
 
 Remember that the test order is undefined, so your code can't depend on a test
 preceding or following another. Also, the tests must either not modify the state
 of any shared resource, or, if they do modify the state, they must restore the
 state to its original value before passing control to the next test.
 
 Here's an example of per-test-case set-up and tear-down:
 
 ```c++
 class FooTest : public ::testing::Test {
  protected:
   // Per-test-case set-up.
   // Called before the first test in this test case.
   // Can be omitted if not needed.
   static void SetUpTestCase() {
     shared_resource_ = new ...;
   }
 
   // Per-test-case tear-down.
   // Called after the last test in this test case.
   // Can be omitted if not needed.
   static void TearDownTestCase() {
     delete shared_resource_;
     shared_resource_ = NULL;
   }
 
   // You can define per-test set-up logic as usual.
   virtual void SetUp() { ... }
 
   // You can define per-test tear-down logic as usual.
   virtual void TearDown() { ... }
 
   // Some expensive resource shared by all tests.
   static T* shared_resource_;
 };
 
 T* FooTest::shared_resource_ = NULL;
 
 TEST_F(FooTest, Test1) {
   ... you can refer to shared_resource_ here ...
 }
 
 TEST_F(FooTest, Test2) {
   ... you can refer to shared_resource_ here ...
 }
 ```
 
 NOTE: Though the above code declares `SetUpTestCase()` protected, it may
 sometimes be necessary to declare it public, such as when using it with
 `TEST_P`.
 
 **Availability**: Linux, Windows, Mac.
 
 ## Global Set-Up and Tear-Down
 
 Just as you can do set-up and tear-down at the test level and the test case
 level, you can also do it at the test program level. Here's how.
 
 First, you subclass the `::testing::Environment` class to define a test
 environment, which knows how to set-up and tear-down:
 
 ```c++
 class Environment {
  public:
   virtual ~Environment() {}
 
   // Override this to define how to set up the environment.
   virtual void SetUp() {}
 
   // Override this to define how to tear down the environment.
   virtual void TearDown() {}
 };
 ```
 
 Then, you register an instance of your environment class with googletest by
 calling the `::testing::AddGlobalTestEnvironment()` function:
 
 ```c++
 Environment* AddGlobalTestEnvironment(Environment* env);
 ```
 
 Now, when `RUN_ALL_TESTS()` is called, it first calls the `SetUp()` method of
 the environment object, then runs the tests if there was no fatal failures, and
 finally calls `TearDown()` of the environment object.
 
 It's OK to register multiple environment objects. In this case, their `SetUp()`
 will be called in the order they are registered, and their `TearDown()` will be
 called in the reverse order.
 
 Note that googletest takes ownership of the registered environment objects.
 Therefore **do not delete them** by yourself.
 
 You should call `AddGlobalTestEnvironment()` before `RUN_ALL_TESTS()` is called,
 probably in `main()`. If you use `gtest_main`, you need to call this before
 `main()` starts for it to take effect. One way to do this is to define a global
 variable like this:
 
 ```c++
 ::testing::Environment* const foo_env =
     ::testing::AddGlobalTestEnvironment(new FooEnvironment);
 ```
 
 However, we strongly recommend you to write your own `main()` and call
 `AddGlobalTestEnvironment()` there, as relying on initialization of global
 variables makes the code harder to read and may cause problems when you register
 multiple environments from different translation units and the environments have
 dependencies among them (remember that the compiler doesn't guarantee the order
 in which global variables from different translation units are initialized).
 
 ## Value-Parameterized Tests
 
 *Value-parameterized tests* allow you to test your code with different
 parameters without writing multiple copies of the same test. This is useful in a
 number of situations, for example:
 
 *   You have a piece of code whose behavior is affected by one or more
     command-line flags. You want to make sure your code performs correctly for
     various values of those flags.
 *   You want to test different implementations of an OO interface.
 *   You want to test your code over various inputs (a.k.a. data-driven testing).
     This feature is easy to abuse, so please exercise your good sense when doing
     it!
 
 ### How to Write Value-Parameterized Tests
 
 To write value-parameterized tests, first you should define a fixture class. It
 must be derived from both `::testing::Test` and
 `::testing::WithParamInterface<T>` (the latter is a pure interface), where `T`
 is the type of your parameter values. For convenience, you can just derive the
 fixture class from `::testing::TestWithParam<T>`, which itself is derived from
 both `::testing::Test` and `::testing::WithParamInterface<T>`. `T` can be any
 copyable type. If it's a raw pointer, you are responsible for managing the
 lifespan of the pointed values.
 
 NOTE: If your test fixture defines `SetUpTestCase()` or `TearDownTestCase()`
 they must be declared **public** rather than **protected** in order to use
 `TEST_P`.
 
 ```c++
 class FooTest :
     public ::testing::TestWithParam<const char*> {
   // You can implement all the usual fixture class members here.
   // To access the test parameter, call GetParam() from class
   // TestWithParam<T>.
 };
 
 // Or, when you want to add parameters to a pre-existing fixture class:
 class BaseTest : public ::testing::Test {
   ...
 };
 class BarTest : public BaseTest,
                 public ::testing::WithParamInterface<const char*> {
   ...
 };
 ```
 
 Then, use the `TEST_P` macro to define as many test patterns using this fixture
 as you want. The `_P` suffix is for "parameterized" or "pattern", whichever you
 prefer to think.
 
 ```c++
 TEST_P(FooTest, DoesBlah) {
   // Inside a test, access the test parameter with the GetParam() method
   // of the TestWithParam<T> class:
   EXPECT_TRUE(foo.Blah(GetParam()));
   ...
 }
 
 TEST_P(FooTest, HasBlahBlah) {
   ...
 }
 ```
 
 Finally, you can use `INSTANTIATE_TEST_CASE_P` to instantiate the test case with
 any set of parameters you want. googletest defines a number of functions for
 generating test parameters. They return what we call (surprise!) *parameter
 generators*. Here is a summary of them, which are all in the `testing`
 namespace:
 
 | Parameter Generator          | Behavior                                    |
 | ---------------------------- | ------------------------------------------- |
 | `Range(begin, end [, step])` | Yields values `{begin, begin+step,          |
 :                              : begin+step+step, ...}`. The values do not   :
 :                              : include `end`. `step` defaults to 1.        :
 | `Values(v1, v2, ..., vN)`    | Yields values `{v1, v2, ..., vN}`.          |
 | `ValuesIn(container)` and    | Yields values from a C-style array, an      |
 : `ValuesIn(begin,end)`        : STL-style container, or an iterator range   :
 :                              : `[begin, end)`.                             :
 | `Bool()`                     | Yields sequence `{false, true}`.            |
 | `Combine(g1, g2, ..., gN)`   | Yields all combinations (Cartesian product) |
 :                              : as std\:\:tuples of the values generated by :
 :                              : the `N` generators.                         :
 
 For more details, see the comments at the definitions of these functions.
 
 The following statement will instantiate tests from the `FooTest` test case each
 with parameter values `"meeny"`, `"miny"`, and `"moe"`.
 
 ```c++
 INSTANTIATE_TEST_CASE_P(InstantiationName,
                         FooTest,
                         ::testing::Values("meeny", "miny", "moe"));
 ```
 
 NOTE: The code above must be placed at global or namespace scope, not at
 function scope.
 
 NOTE: Don't forget this step! If you do your test will silently pass, but none
 of its cases will ever run!
 
 To distinguish different instances of the pattern (yes, you can instantiate it
 more than once), the first argument to `INSTANTIATE_TEST_CASE_P` is a prefix
 that will be added to the actual test case name. Remember to pick unique
 prefixes for different instantiations. The tests from the instantiation above
 will have these names:
 
 *   `InstantiationName/FooTest.DoesBlah/0` for `"meeny"`
 *   `InstantiationName/FooTest.DoesBlah/1` for `"miny"`
 *   `InstantiationName/FooTest.DoesBlah/2` for `"moe"`
 *   `InstantiationName/FooTest.HasBlahBlah/0` for `"meeny"`
 *   `InstantiationName/FooTest.HasBlahBlah/1` for `"miny"`
 *   `InstantiationName/FooTest.HasBlahBlah/2` for `"moe"`
 
 You can use these names in [`--gtest_filter`](#TestFilter).
 
 This statement will instantiate all tests from `FooTest` again, each with
 parameter values `"cat"` and `"dog"`:
 
 ```c++
 const char* pets[] = {"cat", "dog"};
 INSTANTIATE_TEST_CASE_P(AnotherInstantiationName, FooTest,
                         ::testing::ValuesIn(pets));
 ```
 
 The tests from the instantiation above will have these names:
 
 *   `AnotherInstantiationName/FooTest.DoesBlah/0` for `"cat"`
 *   `AnotherInstantiationName/FooTest.DoesBlah/1` for `"dog"`
 *   `AnotherInstantiationName/FooTest.HasBlahBlah/0` for `"cat"`
 *   `AnotherInstantiationName/FooTest.HasBlahBlah/1` for `"dog"`
 
 Please note that `INSTANTIATE_TEST_CASE_P` will instantiate *all* tests in the
 given test case, whether their definitions come before or *after* the
 `INSTANTIATE_TEST_CASE_P` statement.
 
 You can see sample7_unittest.cc and sample8_unittest.cc for more examples.
 
 **Availability**: Linux, Windows (requires MSVC 8.0 or above), Mac
 
 ### Creating Value-Parameterized Abstract Tests
 
 In the above, we define and instantiate `FooTest` in the *same* source file.
 Sometimes you may want to define value-parameterized tests in a library and let
 other people instantiate them later. This pattern is known as *abstract tests*.
 As an example of its application, when you are designing an interface you can
 write a standard suite of abstract tests (perhaps using a factory function as
 the test parameter) that all implementations of the interface are expected to
 pass. When someone implements the interface, they can instantiate your suite to
 get all the interface-conformance tests for free.
 
 To define abstract tests, you should organize your code like this:
 
 1.  Put the definition of the parameterized test fixture class (e.g. `FooTest`)
     in a header file, say `foo_param_test.h`. Think of this as *declaring* your
     abstract tests.
 1.  Put the `TEST_P` definitions in `foo_param_test.cc`, which includes
     `foo_param_test.h`. Think of this as *implementing* your abstract tests.
 
 Once they are defined, you can instantiate them by including `foo_param_test.h`,
 invoking `INSTANTIATE_TEST_CASE_P()`, and depending on the library target that
 contains `foo_param_test.cc`. You can instantiate the same abstract test case
 multiple times, possibly in different source files.
 
 ### Specifying Names for Value-Parameterized Test Parameters
 
 The optional last argument to `INSTANTIATE_TEST_CASE_P()` allows the user to
 specify a function or functor that generates custom test name suffixes based on
 the test parameters. The function should accept one argument of type
 `testing::TestParamInfo<class ParamType>`, and return `std::string`.
 
 `testing::PrintToStringParamName` is a builtin test suffix generator that
 returns the value of `testing::PrintToString(GetParam())`. It does not work for
 `std::string` or C strings.
 
 NOTE: test names must be non-empty, unique, and may only contain ASCII
 alphanumeric characters. In particular, they [should not contain
 underscores](https://g3doc.corp.google.com/third_party/googletest/googletest/g3doc/faq.md#no-underscores).
 
 ```c++
 class MyTestCase : public testing::TestWithParam<int> {};
 
 TEST_P(MyTestCase, MyTest)
 {
   std::cout << "Example Test Param: " << GetParam() << std::endl;
 }
 
 INSTANTIATE_TEST_CASE_P(MyGroup, MyTestCase, testing::Range(0, 10),
                         testing::PrintToStringParamName());
 ```
 
 ## Typed Tests</id>
 
 Suppose you have multiple implementations of the same interface and want to make
 sure that all of them satisfy some common requirements. Or, you may have defined
 several types that are supposed to conform to the same "concept" and you want to
 verify it. In both cases, you want the same test logic repeated for different
 types.
 
 While you can write one `TEST` or `TEST_F` for each type you want to test (and
 you may even factor the test logic into a function template that you invoke from
 the `TEST`), it's tedious and doesn't scale: if you want `m` tests over `n`
 types, you'll end up writing `m*n` `TEST`s.
 
 *Typed tests* allow you to repeat the same test logic over a list of types. You
 only need to write the test logic once, although you must know the type list
 when writing typed tests. Here's how you do it:
 
 First, define a fixture class template. It should be parameterized by a type.
 Remember to derive it from `::testing::Test`:
 
 ```c++
 template <typename T>
 class FooTest : public ::testing::Test {
  public:
   ...
   typedef std::list<T> List;
   static T shared_;
   T value_;
 };
 ```
 
 Next, associate a list of types with the test case, which will be repeated for
 each type in the list:
 
 ```c++
 using MyTypes = ::testing::Types<char, int, unsigned int>;
 TYPED_TEST_CASE(FooTest, MyTypes);
 ```
 
 The type alias (`using` or `typedef`) is necessary for the `TYPED_TEST_CASE`
 macro to parse correctly. Otherwise the compiler will think that each comma in
 the type list introduces a new macro argument.
 
 Then, use `TYPED_TEST()` instead of `TEST_F()` to define a typed test for this
 test case. You can repeat this as many times as you want:
 
 ```c++
 TYPED_TEST(FooTest, DoesBlah) {
   // Inside a test, refer to the special name TypeParam to get the type
   // parameter.  Since we are inside a derived class template, C++ requires
   // us to visit the members of FooTest via 'this'.
   TypeParam n = this->value_;
 
   // To visit static members of the fixture, add the 'TestFixture::'
   // prefix.
   n += TestFixture::shared_;
 
   // To refer to typedefs in the fixture, add the 'typename TestFixture::'
   // prefix.  The 'typename' is required to satisfy the compiler.
   typename TestFixture::List values;
 
   values.push_back(n);
   ...
 }
 
 TYPED_TEST(FooTest, HasPropertyA) { ... }
 ```
 
 You can see sample6_unittest.cc
 
 **Availability**: Linux, Windows (requires MSVC 8.0 or above), Mac
 
 ## Type-Parameterized Tests
 
 *Type-parameterized tests* are like typed tests, except that they don't require
 you to know the list of types ahead of time. Instead, you can define the test
 logic first and instantiate it with different type lists later. You can even
 instantiate it more than once in the same program.
 
 If you are designing an interface or concept, you can define a suite of
 type-parameterized tests to verify properties that any valid implementation of
 the interface/concept should have. Then, the author of each implementation can
 just instantiate the test suite with their type to verify that it conforms to
 the requirements, without having to write similar tests repeatedly. Here's an
 example:
 
 First, define a fixture class template, as we did with typed tests:
 
 ```c++
 template <typename T>
 class FooTest : public ::testing::Test {
   ...
 };
 ```
 
 Next, declare that you will define a type-parameterized test case:
 
 ```c++
 TYPED_TEST_CASE_P(FooTest);
 ```
 
 Then, use `TYPED_TEST_P()` to define a type-parameterized test. You can repeat
 this as many times as you want:
 
 ```c++
 TYPED_TEST_P(FooTest, DoesBlah) {
   // Inside a test, refer to TypeParam to get the type parameter.
   TypeParam n = 0;
   ...
 }
 
 TYPED_TEST_P(FooTest, HasPropertyA) { ... }
 ```
 
 Now the tricky part: you need to register all test patterns using the
 `REGISTER_TYPED_TEST_CASE_P` macro before you can instantiate them. The first
 argument of the macro is the test case name; the rest are the names of the tests
 in this test case:
 
 ```c++
 REGISTER_TYPED_TEST_CASE_P(FooTest,
                            DoesBlah, HasPropertyA);
 ```
 
 Finally, you are free to instantiate the pattern with the types you want. If you
 put the above code in a header file, you can `#include` it in multiple C++
 source files and instantiate it multiple times.
 
 ```c++
 typedef ::testing::Types<char, int, unsigned int> MyTypes;
 INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, MyTypes);
 ```
 
 To distinguish different instances of the pattern, the first argument to the
 `INSTANTIATE_TYPED_TEST_CASE_P` macro is a prefix that will be added to the
 actual test case name. Remember to pick unique prefixes for different instances.
 
 In the special case where the type list contains only one type, you can write
 that type directly without `::testing::Types<...>`, like this:
 
 ```c++
 INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, int);
 ```
 
 You can see `sample6_unittest.cc` for a complete example.
 
 **Availability**: Linux, Windows (requires MSVC 8.0 or above), Mac
 
 ## Testing Private Code
 
 If you change your software's internal implementation, your tests should not
 break as long as the change is not observable by users. Therefore, **per the
 black-box testing principle, most of the time you should test your code through
 its public interfaces.**
 
 **If you still find yourself needing to test internal implementation code,
 consider if there's a better design.** The desire to test internal
 implementation is often a sign that the class is doing too much. Consider
 extracting an implementation class, and testing it. Then use that implementation
 class in the original class.
 
 If you absolutely have to test non-public interface code though, you can. There
 are two cases to consider:
 
 *   Static functions ( *not* the same as static member functions!) or unnamed
     namespaces, and
 *   Private or protected class members
 
 To test them, we use the following special techniques:
 
 *   Both static functions and definitions/declarations in an unnamed namespace
     are only visible within the same translation unit. To test them, you can
     `#include` the entire `.cc` file being tested in your `*_test.cc` file.
     (#including `.cc` files is not a good way to reuse code - you should not do
     this in production code!)
 
     However, a better approach is to move the private code into the
     `foo::internal` namespace, where `foo` is the namespace your project
     normally uses, and put the private declarations in a `*-internal.h` file.
     Your production `.cc` files and your tests are allowed to include this
     internal header, but your clients are not. This way, you can fully test your
     internal implementation without leaking it to your clients.
 
 *   Private class members are only accessible from within the class or by
     friends. To access a class' private members, you can declare your test
     fixture as a friend to the class and define accessors in your fixture. Tests
     using the fixture can then access the private members of your production
     class via the accessors in the fixture. Note that even though your fixture
     is a friend to your production class, your tests are not automatically
     friends to it, as they are technically defined in sub-classes of the
     fixture.
 
     Another way to test private members is to refactor them into an
     implementation class, which is then declared in a `*-internal.h` file. Your
     clients aren't allowed to include this header but your tests can. Such is
     called the
     [Pimpl](https://www.gamedev.net/articles/programming/general-and-gameplay-programming/the-c-pimpl-r1794/)
     (Private Implementation) idiom.
 
     Or, you can declare an individual test as a friend of your class by adding
     this line in the class body:
 
     ```c++
         FRIEND_TEST(TestCaseName, TestName);
     ```
 
     For example,
 
     ```c++
     // foo.h
 
 #include "gtest/gtest_prod.h"
 
     class Foo {
       ...
      private:
       FRIEND_TEST(FooTest, BarReturnsZeroOnNull);
 
       int Bar(void* x);
     };
 
     // foo_test.cc
     ...
     TEST(FooTest, BarReturnsZeroOnNull) {
       Foo foo;
       EXPECT_EQ(0, foo.Bar(NULL));  // Uses Foo's private member Bar().
     }
     ```
 
     Pay special attention when your class is defined in a namespace, as you
     should define your test fixtures and tests in the same namespace if you want
     them to be friends of your class. For example, if the code to be tested
     looks like:
 
     ```c++
     namespace my_namespace {
 
     class Foo {
       friend class FooTest;
       FRIEND_TEST(FooTest, Bar);
       FRIEND_TEST(FooTest, Baz);
       ... definition of the class Foo ...
     };
 
     }  // namespace my_namespace
     ```
 
     Your test code should be something like:
 
     ```c++
     namespace my_namespace {
 
     class FooTest : public ::testing::Test {
      protected:
       ...
     };
 
     TEST_F(FooTest, Bar) { ... }
     TEST_F(FooTest, Baz) { ... }
 
     }  // namespace my_namespace
     ```
 
 
     ## "Catching" Failures
 
 If you are building a testing utility on top of googletest, you'll want to test
 your utility. What framework would you use to test it? googletest, of course.
 
 The challenge is to verify that your testing utility reports failures correctly.
 In frameworks that report a failure by throwing an exception, you could catch
 the exception and assert on it. But googletest doesn't use exceptions, so how do
 we test that a piece of code generates an expected failure?
 
 gunit-spi.h contains some constructs to do this. After #including this header,
 you can use
 
 ```c++
   EXPECT_FATAL_FAILURE(statement, substring);
 ```
 
 to assert that `statement` generates a fatal (e.g. `ASSERT_*`) failure in the
 current thread whose message contains the given `substring`, or use
 
 ```c++
   EXPECT_NONFATAL_FAILURE(statement, substring);
 ```
 
 if you are expecting a non-fatal (e.g. `EXPECT_*`) failure.
 
 Only failures in the current thread are checked to determine the result of this
 type of expectations. If `statement` creates new threads, failures in these
 threads are also ignored. If you want to catch failures in other threads as
 well, use one of the following macros instead:
 
 ```c++
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS(statement, substring);
   EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(statement, substring);
 ```
 
 NOTE: Assertions from multiple threads are currently not supported on Windows.
 
 For technical reasons, there are some caveats:
 
 1.  You cannot stream a failure message to either macro.
 
 1.  `statement` in `EXPECT_FATAL_FAILURE{_ON_ALL_THREADS}()` cannot reference
     local non-static variables or non-static members of `this` object.
 
 1.  `statement` in `EXPECT_FATAL_FAILURE{_ON_ALL_THREADS}()()` cannot return a
     value.
 
 
 ## Getting the Current Test's Name
 
 Sometimes a function may need to know the name of the currently running test.
 For example, you may be using the `SetUp()` method of your test fixture to set
 the golden file name based on which test is running. The `::testing::TestInfo`
 class has this information:
 
 ```c++
 namespace testing {
 
 class TestInfo {
  public:
   // Returns the test case name and the test name, respectively.
   //
   // Do NOT delete or free the return value - it's managed by the
   // TestInfo class.
   const char* test_case_name() const;
   const char* name() const;
 };
 
 }
 ```
 
 To obtain a `TestInfo` object for the currently running test, call
 `current_test_info()` on the `UnitTest` singleton object:
 
 ```c++
   // Gets information about the currently running test.
   // Do NOT delete the returned object - it's managed by the UnitTest class.
   const ::testing::TestInfo* const test_info =
     ::testing::UnitTest::GetInstance()->current_test_info();
 
 
 
   printf("We are in test %s of test case %s.\n",
          test_info->name(),
          test_info->test_case_name());
 ```
 
 `current_test_info()` returns a null pointer if no test is running. In
 particular, you cannot find the test case name in `TestCaseSetUp()`,
 `TestCaseTearDown()` (where you know the test case name implicitly), or
 functions called from them.
 
 **Availability**: Linux, Windows, Mac.
 
 ## Extending googletest by Handling Test Events
 
 googletest provides an **event listener API** to let you receive notifications
 about the progress of a test program and test failures. The events you can
 listen to include the start and end of the test program, a test case, or a test
 method, among others. You may use this API to augment or replace the standard
 console output, replace the XML output, or provide a completely different form
 of output, such as a GUI or a database. You can also use test events as
 checkpoints to implement a resource leak checker, for example.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Defining Event Listeners
 
 To define a event listener, you subclass either testing::TestEventListener or
 testing::EmptyTestEventListener The former is an (abstract) interface, where
 *each pure virtual method can be overridden to handle a test event* (For
 example, when a test starts, the `OnTestStart()` method will be called.). The
 latter provides an empty implementation of all methods in the interface, such
 that a subclass only needs to override the methods it cares about.
 
 When an event is fired, its context is passed to the handler function as an
 argument. The following argument types are used:
 
 *   UnitTest reflects the state of the entire test program,
 *   TestCase has information about a test case, which can contain one or more
     tests,
 *   TestInfo contains the state of a test, and
 *   TestPartResult represents the result of a test assertion.
 
 An event handler function can examine the argument it receives to find out
 interesting information about the event and the test program's state.
 
 Here's an example:
 
 ```c++
   class MinimalistPrinter : public ::testing::EmptyTestEventListener {
     // Called before a test starts.
     virtual void OnTestStart(const ::testing::TestInfo& test_info) {
       printf("*** Test %s.%s starting.\n",
              test_info.test_case_name(), test_info.name());
     }
 
     // Called after a failed assertion or a SUCCESS().
     virtual void OnTestPartResult(const ::testing::TestPartResult& test_part_result) {
       printf("%s in %s:%d\n%s\n",
              test_part_result.failed() ? "*** Failure" : "Success",
              test_part_result.file_name(),
              test_part_result.line_number(),
              test_part_result.summary());
     }
 
     // Called after a test ends.
     virtual void OnTestEnd(const ::testing::TestInfo& test_info) {
       printf("*** Test %s.%s ending.\n",
              test_info.test_case_name(), test_info.name());
     }
   };
 ```
 
 ### Using Event Listeners
 
 To use the event listener you have defined, add an instance of it to the
 googletest event listener list (represented by class TestEventListeners - note
 the "s" at the end of the name) in your `main()` function, before calling
 `RUN_ALL_TESTS()`:
 
 ```c++
 int main(int argc, char** argv) {
   ::testing::InitGoogleTest(&argc, argv);
   // Gets hold of the event listener list.
   ::testing::TestEventListeners& listeners =
         ::testing::UnitTest::GetInstance()->listeners();
   // Adds a listener to the end.  googletest takes the ownership.
   listeners.Append(new MinimalistPrinter);
   return RUN_ALL_TESTS();
 }
 ```
 
 There's only one problem: the default test result printer is still in effect, so
 its output will mingle with the output from your minimalist printer. To suppress
 the default printer, just release it from the event listener list and delete it.
 You can do so by adding one line:
 
 ```c++
   ...
   delete listeners.Release(listeners.default_result_printer());
   listeners.Append(new MinimalistPrinter);
   return RUN_ALL_TESTS();
 ```
 
 Now, sit back and enjoy a completely different output from your tests. For more
 details, you can read this sample9_unittest.cc
 
 You may append more than one listener to the list. When an `On*Start()` or
 `OnTestPartResult()` event is fired, the listeners will receive it in the order
 they appear in the list (since new listeners are added to the end of the list,
 the default text printer and the default XML generator will receive the event
 first). An `On*End()` event will be received by the listeners in the *reverse*
 order. This allows output by listeners added later to be framed by output from
 listeners added earlier.
 
 ### Generating Failures in Listeners
 
 You may use failure-raising macros (`EXPECT_*()`, `ASSERT_*()`, `FAIL()`, etc)
 when processing an event. There are some restrictions:
 
 1.  You cannot generate any failure in `OnTestPartResult()` (otherwise it will
     cause `OnTestPartResult()` to be called recursively).
 1.  A listener that handles `OnTestPartResult()` is not allowed to generate any
     failure.
 
 When you add listeners to the listener list, you should put listeners that
 handle `OnTestPartResult()` *before* listeners that can generate failures. This
 ensures that failures generated by the latter are attributed to the right test
 by the former.
 
 We have a sample of failure-raising listener sample10_unittest.cc
 
 ## Running Test Programs: Advanced Options
 
 googletest test programs are ordinary executables. Once built, you can run them
 directly and affect their behavior via the following environment variables
 and/or command line flags. For the flags to work, your programs must call
 `::testing::InitGoogleTest()` before calling `RUN_ALL_TESTS()`.
 
 To see a list of supported flags and their usage, please run your test program
 with the `--help` flag. You can also use `-h`, `-?`, or `/?` for short.
 
 If an option is specified both by an environment variable and by a flag, the
 latter takes precedence.
 
 ### Selecting Tests
 
 #### Listing Test Names
 
 Sometimes it is necessary to list the available tests in a program before
 running them so that a filter may be applied if needed. Including the flag
 `--gtest_list_tests` overrides all other flags and lists tests in the following
 format:
 
 ```none
 TestCase1.
   TestName1
   TestName2
 TestCase2.
   TestName
 ```
 
 None of the tests listed are actually run if the flag is provided. There is no
 corresponding environment variable for this flag.
 
 **Availability**: Linux, Windows, Mac.
 
 #### Running a Subset of the Tests
 
 By default, a googletest program runs all tests the user has defined. Sometimes,
 you want to run only a subset of the tests (e.g. for debugging or quickly
 verifying a change). If you set the `GTEST_FILTER` environment variable or the
 `--gtest_filter` flag to a filter string, googletest will only run the tests
 whose full names (in the form of `TestCaseName.TestName`) match the filter.
 
 The format of a filter is a '`:`'-separated list of wildcard patterns (called
 the *positive patterns*) optionally followed by a '`-`' and another
 '`:`'-separated pattern list (called the *negative patterns*). A test matches
 the filter if and only if it matches any of the positive patterns but does not
 match any of the negative patterns.
 
 A pattern may contain `'*'` (matches any string) or `'?'` (matches any single
 character). For convenience, the filter
 
 `'*-NegativePatterns'` can be also written as `'-NegativePatterns'`.
 
 For example:
 
 *   `./foo_test` Has no flag, and thus runs all its tests.
 *   `./foo_test --gtest_filter=*` Also runs everything, due to the single
     match-everything `*` value.
 *   `./foo_test --gtest_filter=FooTest.*` Runs everything in test case `FooTest`
     .
 *   `./foo_test --gtest_filter=*Null*:*Constructor*` Runs any test whose full
     name contains either `"Null"` or `"Constructor"` .
 *   `./foo_test --gtest_filter=-*DeathTest.*` Runs all non-death tests.
 *   `./foo_test --gtest_filter=FooTest.*-FooTest.Bar` Runs everything in test
     case `FooTest` except `FooTest.Bar`.
 *   `./foo_test --gtest_filter=FooTest.*:BarTest.*-FooTest.Bar:BarTest.Foo` Runs
     everything in test case `FooTest` except `FooTest.Bar` and everything in
     test case `BarTest` except `BarTest.Foo`.
     
 #### Temporarily Disabling Tests
 
 If you have a broken test that you cannot fix right away, you can add the
 `DISABLED_` prefix to its name. This will exclude it from execution. This is
 better than commenting out the code or using `#if 0`, as disabled tests are
 still compiled (and thus won't rot).
 
 If you need to disable all tests in a test case, you can either add `DISABLED_`
 to the front of the name of each test, or alternatively add it to the front of
 the test case name.
 
 For example, the following tests won't be run by googletest, even though they
 will still be compiled:
 
 ```c++
 // Tests that Foo does Abc.
 TEST(FooTest, DISABLED_DoesAbc) { ... }
 
 class DISABLED_BarTest : public ::testing::Test { ... };
 
 // Tests that Bar does Xyz.
 TEST_F(DISABLED_BarTest, DoesXyz) { ... }
 ```
 
 NOTE: This feature should only be used for temporary pain-relief. You still have
 to fix the disabled tests at a later date. As a reminder, googletest will print
 a banner warning you if a test program contains any disabled tests.
 
 TIP: You can easily count the number of disabled tests you have using `gsearch`
 and/or `grep`. This number can be used as a metric for improving your test
 quality.
 
 **Availability**: Linux, Windows, Mac.
 
 #### Temporarily Enabling Disabled Tests
 
 To include disabled tests in test execution, just invoke the test program with
 the `--gtest_also_run_disabled_tests` flag or set the
 `GTEST_ALSO_RUN_DISABLED_TESTS` environment variable to a value other than `0`.
 You can combine this with the `--gtest_filter` flag to further select which
 disabled tests to run.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Repeating the Tests
 
 Once in a while you'll run into a test whose result is hit-or-miss. Perhaps it
 will fail only 1% of the time, making it rather hard to reproduce the bug under
 a debugger. This can be a major source of frustration.
 
 The `--gtest_repeat` flag allows you to repeat all (or selected) test methods in
 a program many times. Hopefully, a flaky test will eventually fail and give you
 a chance to debug. Here's how to use it:
 
 ```none
 $ foo_test --gtest_repeat=1000
 Repeat foo_test 1000 times and don't stop at failures.
 
 $ foo_test --gtest_repeat=-1
 A negative count means repeating forever.
 
 $ foo_test --gtest_repeat=1000 --gtest_break_on_failure
 Repeat foo_test 1000 times, stopping at the first failure.  This
 is especially useful when running under a debugger: when the test
 fails, it will drop into the debugger and you can then inspect
 variables and stacks.
 
 $ foo_test --gtest_repeat=1000 --gtest_filter=FooBar.*
 Repeat the tests whose name matches the filter 1000 times.
 ```
 
 If your test program contains [global set-up/tear-down](#GlobalSetUp) code, it
 will be repeated in each iteration as well, as the flakiness may be in it. You
 can also specify the repeat count by setting the `GTEST_REPEAT` environment
 variable.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Shuffling the Tests
 
 You can specify the `--gtest_shuffle` flag (or set the `GTEST_SHUFFLE`
 environment variable to `1`) to run the tests in a program in a random order.
 This helps to reveal bad dependencies between tests.
 
 By default, googletest uses a random seed calculated from the current time.
 Therefore you'll get a different order every time. The console output includes
 the random seed value, such that you can reproduce an order-related test failure
 later. To specify the random seed explicitly, use the `--gtest_random_seed=SEED`
 flag (or set the `GTEST_RANDOM_SEED` environment variable), where `SEED` is an
 integer in the range [0, 99999]. The seed value 0 is special: it tells
 googletest to do the default behavior of calculating the seed from the current
 time.
 
 If you combine this with `--gtest_repeat=N`, googletest will pick a different
 random seed and re-shuffle the tests in each iteration.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Controlling Test Output
 
 #### Colored Terminal Output
 
 googletest can use colors in its terminal output to make it easier to spot the
 important information:
 
 ...
 <span style="color:green">[----------]<span style="color:black"> 1 test from FooTest
 <span style="color:green">[ RUN      ]<span style="color:black"> FooTest.DoesAbc
 <span style="color:green">[       OK ]<span style="color:black"> FooTest.DoesAbc
 <span style="color:green">[----------]<span style="color:black"> 2 tests from BarTest
 <span style="color:green">[ RUN      ]<span style="color:black"> BarTest.HasXyzProperty
 <span style="color:green">[       OK ]<span style="color:black"> BarTest.HasXyzProperty
 <span style="color:green">[ RUN      ]<span style="color:black"> BarTest.ReturnsTrueOnSuccess
 ... some error messages ...
 <span   style="color:red">[  FAILED  ] <span style="color:black">BarTest.ReturnsTrueOnSuccess
 ...
 <span style="color:green">[==========]<span style="color:black"> 30 tests from 14 test cases ran.
 <span style="color:green">[  PASSED  ]<span style="color:black"> 28 tests.
 <span style="color:red">[  FAILED  ]<span style="color:black"> 2 tests, listed below:
 <span style="color:red">[  FAILED  ]<span style="color:black"> BarTest.ReturnsTrueOnSuccess
 <span style="color:red">[  FAILED  ]<span style="color:black"> AnotherTest.DoesXyz
 
   2 FAILED TESTS
 
 You can set the `GTEST_COLOR` environment variable or the `--gtest_color`
 command line flag to `yes`, `no`, or `auto` (the default) to enable colors,
 disable colors, or let googletest decide. When the value is `auto`, googletest
 will use colors if and only if the output goes to a terminal and (on non-Windows
 platforms) the `TERM` environment variable is set to `xterm` or `xterm-color`.
 
 >
 > **Availability**: Linux, Windows, Mac.
 
 #### Suppressing the Elapsed Time
 
 By default, googletest prints the time it takes to run each test. To disable
 that, run the test program with the `--gtest_print_time=0` command line flag, or
 set the GTEST_PRINT_TIME environment variable to `0`.
 
 **Availability**: Linux, Windows, Mac.
 
 #### Suppressing UTF-8 Text Output
 
 In case of assertion failures, googletest prints expected and actual values of
 type `string` both as hex-encoded strings as well as in readable UTF-8 text if
 they contain valid non-ASCII UTF-8 characters. If you want to suppress the UTF-8
 text because, for example, you don't have an UTF-8 compatible output medium, run
 the test program with `--gtest_print_utf8=0` or set the `GTEST_PRINT_UTF8`
 environment variable to `0`.
 
 **Availability**: Linux, Windows, Mac.
 
 
 #### Generating an XML Report
 
 googletest can emit a detailed XML report to a file in addition to its normal
 textual output. The report contains the duration of each test, and thus can help
 you identify slow tests. The report is also used by the http://unittest
 dashboard to show per-test-method error messages.
 
 To generate the XML report, set the `GTEST_OUTPUT` environment variable or the
 `--gtest_output` flag to the string `"xml:path_to_output_file"`, which will
 create the file at the given location. You can also just use the string `"xml"`,
 in which case the output can be found in the `test_detail.xml` file in the
 current directory.
 
 If you specify a directory (for example, `"xml:output/directory/"` on Linux or
 `"xml:output\directory\"` on Windows), googletest will create the XML file in
 that directory, named after the test executable (e.g. `foo_test.xml` for test
 program `foo_test` or `foo_test.exe`). If the file already exists (perhaps left
 over from a previous run), googletest will pick a different name (e.g.
 `foo_test_1.xml`) to avoid overwriting it.
 
 
 The report is based on the `junitreport` Ant task. Since that format was
 originally intended for Java, a little interpretation is required to make it
 apply to googletest tests, as shown here:
 
 ```xml
 <testsuites name="AllTests" ...>
   <testsuite name="test_case_name" ...>
     <testcase    name="test_name" ...>
       <failure message="..."/>
       <failure message="..."/>
       <failure message="..."/>
     </testcase>
   </testsuite>
 </testsuites>
 ```
 
 *   The root `<testsuites>` element corresponds to the entire test program.
 *   `<testsuite>` elements correspond to googletest test cases.
 *   `<testcase>` elements correspond to googletest test functions.
 
 For instance, the following program
 
 ```c++
 TEST(MathTest, Addition) { ... }
 TEST(MathTest, Subtraction) { ... }
 TEST(LogicTest, NonContradiction) { ... }
 ```
 
 could generate this report:
 
 ```xml
 <?xml version="1.0" encoding="UTF-8"?>
 <testsuites tests="3" failures="1" errors="0" time="0.035" timestamp="2011-10-31T18:52:42" name="AllTests">
   <testsuite name="MathTest" tests="2" failures="1" errors="0" time="0.015">
     <testcase name="Addition" status="run" time="0.007" classname="">
       <failure message="Value of: add(1, 1)&#x0A;  Actual: 3&#x0A;Expected: 2" type="">...</failure>
       <failure message="Value of: add(1, -1)&#x0A;  Actual: 1&#x0A;Expected: 0" type="">...</failure>
     </testcase>
     <testcase name="Subtraction" status="run" time="0.005" classname="">
     </testcase>
   </testsuite>
   <testsuite name="LogicTest" tests="1" failures="0" errors="0" time="0.005">
     <testcase name="NonContradiction" status="run" time="0.005" classname="">
     </testcase>
   </testsuite>
 </testsuites>
 ```
 
 Things to note:
 
 *   The `tests` attribute of a `<testsuites>` or `<testsuite>` element tells how
     many test functions the googletest program or test case contains, while the
     `failures` attribute tells how many of them failed.
 
 *   The `time` attribute expresses the duration of the test, test case, or
     entire test program in seconds.
 
 *   The `timestamp` attribute records the local date and time of the test
     execution.
 
 *   Each `<failure>` element corresponds to a single failed googletest
     assertion.
 
 **Availability**: Linux, Windows, Mac.
 
 #### Generating an JSON Report
 
 googletest can also emit a JSON report as an alternative format to XML. To
 generate the JSON report, set the `GTEST_OUTPUT` environment variable or the
 `--gtest_output` flag to the string `"json:path_to_output_file"`, which will
 create the file at the given location. You can also just use the string
 `"json"`, in which case the output can be found in the `test_detail.json` file
 in the current directory.
 
 The report format conforms to the following JSON Schema:
 
 ```json
 {
   "$schema": "http://json-schema.org/schema#",
   "type": "object",
   "definitions": {
     "TestCase": {
       "type": "object",
       "properties": {
         "name": { "type": "string" },
         "tests": { "type": "integer" },
         "failures": { "type": "integer" },
         "disabled": { "type": "integer" },
         "time": { "type": "string" },
         "testsuite": {
           "type": "array",
           "items": {
             "$ref": "#/definitions/TestInfo"
           }
         }
       }
     },
     "TestInfo": {
       "type": "object",
       "properties": {
         "name": { "type": "string" },
         "status": {
           "type": "string",
           "enum": ["RUN", "NOTRUN"]
         },
         "time": { "type": "string" },
         "classname": { "type": "string" },
         "failures": {
           "type": "array",
           "items": {
             "$ref": "#/definitions/Failure"
           }
         }
       }
     },
     "Failure": {
       "type": "object",
       "properties": {
         "failures": { "type": "string" },
         "type": { "type": "string" }
       }
     }
   },
   "properties": {
     "tests": { "type": "integer" },
     "failures": { "type": "integer" },
     "disabled": { "type": "integer" },
     "errors": { "type": "integer" },
     "timestamp": {
       "type": "string",
       "format": "date-time"
     },
     "time": { "type": "string" },
     "name": { "type": "string" },
     "testsuites": {
       "type": "array",
       "items": {
         "$ref": "#/definitions/TestCase"
       }
     }
   }
 }
 ```
 
 The report uses the format that conforms to the following Proto3 using the [JSON
 encoding](https://developers.google.com/protocol-buffers/docs/proto3#json):
 
 ```proto
 syntax = "proto3";
 
 package googletest;
 
 import "google/protobuf/timestamp.proto";
 import "google/protobuf/duration.proto";
 
 message UnitTest {
   int32 tests = 1;
   int32 failures = 2;
   int32 disabled = 3;
   int32 errors = 4;
   google.protobuf.Timestamp timestamp = 5;
   google.protobuf.Duration time = 6;
   string name = 7;
   repeated TestCase testsuites = 8;
 }
 
 message TestCase {
   string name = 1;
   int32 tests = 2;
   int32 failures = 3;
   int32 disabled = 4;
   int32 errors = 5;
   google.protobuf.Duration time = 6;
   repeated TestInfo testsuite = 7;
 }
 
 message TestInfo {
   string name = 1;
   enum Status {
     RUN = 0;
     NOTRUN = 1;
   }
   Status status = 2;
   google.protobuf.Duration time = 3;
   string classname = 4;
   message Failure {
     string failures = 1;
     string type = 2;
   }
   repeated Failure failures = 5;
 }
 ```
 
 For instance, the following program
 
 ```c++
 TEST(MathTest, Addition) { ... }
 TEST(MathTest, Subtraction) { ... }
 TEST(LogicTest, NonContradiction) { ... }
 ```
 
 could generate this report:
 
 ```json
 {
   "tests": 3,
   "failures": 1,
   "errors": 0,
   "time": "0.035s",
   "timestamp": "2011-10-31T18:52:42Z"
   "name": "AllTests",
   "testsuites": [
     {
       "name": "MathTest",
       "tests": 2,
       "failures": 1,
       "errors": 0,
       "time": "0.015s",
       "testsuite": [
         {
           "name": "Addition",
           "status": "RUN",
           "time": "0.007s",
           "classname": "",
           "failures": [
             {
               "message": "Value of: add(1, 1)\x0A  Actual: 3\x0AExpected: 2",
               "type": ""
             },
             {
               "message": "Value of: add(1, -1)\x0A  Actual: 1\x0AExpected: 0",
               "type": ""
             }
           ]
         },
         {
           "name": "Subtraction",
           "status": "RUN",
           "time": "0.005s",
           "classname": ""
         }
       ]
     }
     {
       "name": "LogicTest",
       "tests": 1,
       "failures": 0,
       "errors": 0,
       "time": "0.005s",
       "testsuite": [
         {
           "name": "NonContradiction",
           "status": "RUN",
           "time": "0.005s",
           "classname": ""
         }
       ]
     }
   ]
 }
 ```
 
 IMPORTANT: The exact format of the JSON document is subject to change.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Controlling How Failures Are Reported
 
 #### Turning Assertion Failures into Break-Points
 
 When running test programs under a debugger, it's very convenient if the
 debugger can catch an assertion failure and automatically drop into interactive
 mode. googletest's *break-on-failure* mode supports this behavior.
 
 To enable it, set the `GTEST_BREAK_ON_FAILURE` environment variable to a value
 other than `0` . Alternatively, you can use the `--gtest_break_on_failure`
 command line flag.
 
 **Availability**: Linux, Windows, Mac.
 
 #### Disabling Catching Test-Thrown Exceptions
 
 googletest can be used either with or without exceptions enabled. If a test
 throws a C++ exception or (on Windows) a structured exception (SEH), by default
 googletest catches it, reports it as a test failure, and continues with the next
 test method. This maximizes the coverage of a test run. Also, on Windows an
 uncaught exception will cause a pop-up window, so catching the exceptions allows
 you to run the tests automatically.
 
 When debugging the test failures, however, you may instead want the exceptions
 to be handled by the debugger, such that you can examine the call stack when an
 exception is thrown. To achieve that, set the `GTEST_CATCH_EXCEPTIONS`
 environment variable to `0`, or use the `--gtest_catch_exceptions=0` flag when
 running the tests.
 
 **Availability**: Linux, Windows, Mac.
 
diff --git a/googletest/docs/faq.md b/googletest/docs/faq.md
index dad28369..d613f7ba 100644
--- a/googletest/docs/faq.md
+++ b/googletest/docs/faq.md
@@ -1,769 +1,769 @@
 # Googletest FAQ
 
 
 ## Why should test case names and test names not contain underscore?
 
 Underscore (`_`) is special, as C++ reserves the following to be used by the
 compiler and the standard library:
 
 1.  any identifier that starts with an `_` followed by an upper-case letter, and
 1.  any identifier that contains two consecutive underscores (i.e. `__`)
     *anywhere* in its name.
 
 User code is *prohibited* from using such identifiers.
 
 Now let's look at what this means for `TEST` and `TEST_F`.
 
 Currently `TEST(TestCaseName, TestName)` generates a class named
 `TestCaseName_TestName_Test`. What happens if `TestCaseName` or `TestName`
 contains `_`?
 
 1.  If `TestCaseName` starts with an `_` followed by an upper-case letter (say,
     `_Foo`), we end up with `_Foo_TestName_Test`, which is reserved and thus
     invalid.
 1.  If `TestCaseName` ends with an `_` (say, `Foo_`), we get
     `Foo__TestName_Test`, which is invalid.
 1.  If `TestName` starts with an `_` (say, `_Bar`), we get
     `TestCaseName__Bar_Test`, which is invalid.
 1.  If `TestName` ends with an `_` (say, `Bar_`), we get
     `TestCaseName_Bar__Test`, which is invalid.
 
 So clearly `TestCaseName` and `TestName` cannot start or end with `_` (Actually,
 `TestCaseName` can start with `_` -- as long as the `_` isn't followed by an
 upper-case letter. But that's getting complicated. So for simplicity we just say
 that it cannot start with `_`.).
 
 It may seem fine for `TestCaseName` and `TestName` to contain `_` in the middle.
 However, consider this:
 
 ```c++
 TEST(Time, Flies_Like_An_Arrow) { ... }
 TEST(Time_Flies, Like_An_Arrow) { ... }
 ```
 
 Now, the two `TEST`s will both generate the same class
 (`Time_Flies_Like_An_Arrow_Test`). That's not good.
 
 So for simplicity, we just ask the users to avoid `_` in `TestCaseName` and
 `TestName`. The rule is more constraining than necessary, but it's simple and
 easy to remember. It also gives googletest some wiggle room in case its
 implementation needs to change in the future.
 
 If you violate the rule, there may not be immediate consequences, but your test
 may (just may) break with a new compiler (or a new version of the compiler you
 are using) or with a new version of googletest. Therefore it's best to follow
 the rule.
 
 ## Why does googletest support `EXPECT_EQ(NULL, ptr)` and `ASSERT_EQ(NULL, ptr)` but not `EXPECT_NE(NULL, ptr)` and `ASSERT_NE(NULL, ptr)`?
 
 First of all you can use `EXPECT_NE(nullptr, ptr)` and `ASSERT_NE(nullptr,
 ptr)`. This is the preferred syntax in the style guide because nullptr does not
 have the type problems that NULL does. Which is why NULL does not work.
 
 Due to some peculiarity of C++, it requires some non-trivial template meta
 programming tricks to support using `NULL` as an argument of the `EXPECT_XX()`
 and `ASSERT_XX()` macros. Therefore we only do it where it's most needed
 (otherwise we make the implementation of googletest harder to maintain and more
 error-prone than necessary).
 
 The `EXPECT_EQ()` macro takes the *expected* value as its first argument and the
 *actual* value as the second. It's reasonable that someone wants to write
 `EXPECT_EQ(NULL, some_expression)`, and this indeed was requested several times.
 Therefore we implemented it.
 
 The need for `EXPECT_NE(NULL, ptr)` isn't nearly as strong. When the assertion
 fails, you already know that `ptr` must be `NULL`, so it doesn't add any
 information to print `ptr` in this case. That means `EXPECT_TRUE(ptr != NULL)`
 works just as well.
 
 If we were to support `EXPECT_NE(NULL, ptr)`, for consistency we'll have to
 support `EXPECT_NE(ptr, NULL)` as well, as unlike `EXPECT_EQ`, we don't have a
 convention on the order of the two arguments for `EXPECT_NE`. This means using
 the template meta programming tricks twice in the implementation, making it even
 harder to understand and maintain. We believe the benefit doesn't justify the
 cost.
 
 Finally, with the growth of the gMock matcher library, we are encouraging people
 to use the unified `EXPECT_THAT(value, matcher)` syntax more often in tests. One
 significant advantage of the matcher approach is that matchers can be easily
 combined to form new matchers, while the `EXPECT_NE`, etc, macros cannot be
 easily combined. Therefore we want to invest more in the matchers than in the
 `EXPECT_XX()` macros.
 
 ## I need to test that different implementations of an interface satisfy some common requirements. Should I use typed tests or value-parameterized tests?
 
 For testing various implementations of the same interface, either typed tests or
 value-parameterized tests can get it done. It's really up to you the user to
 decide which is more convenient for you, depending on your particular case. Some
 rough guidelines:
 
 *   Typed tests can be easier to write if instances of the different
     implementations can be created the same way, modulo the type. For example,
     if all these implementations have a public default constructor (such that
     you can write `new TypeParam`), or if their factory functions have the same
     form (e.g. `CreateInstance<TypeParam>()`).
 *   Value-parameterized tests can be easier to write if you need different code
     patterns to create different implementations' instances, e.g. `new Foo` vs
     `new Bar(5)`. To accommodate for the differences, you can write factory
     function wrappers and pass these function pointers to the tests as their
     parameters.
 *   When a typed test fails, the output includes the name of the type, which can
     help you quickly identify which implementation is wrong. Value-parameterized
     tests cannot do this, so there you'll have to look at the iteration number
     to know which implementation the failure is from, which is less direct.
 *   If you make a mistake writing a typed test, the compiler errors can be
     harder to digest, as the code is templatized.
 *   When using typed tests, you need to make sure you are testing against the
     interface type, not the concrete types (in other words, you want to make
     sure `implicit_cast<MyInterface*>(my_concrete_impl)` works, not just that
     `my_concrete_impl` works). It's less likely to make mistakes in this area
     when using value-parameterized tests.
 
 I hope I didn't confuse you more. :-) If you don't mind, I'd suggest you to give
 both approaches a try. Practice is a much better way to grasp the subtle
 differences between the two tools. Once you have some concrete experience, you
 can much more easily decide which one to use the next time.
 
 ## My death tests became very slow - what happened?
 
 In August 2008 we had to switch the default death test style from `fast` to
 `threadsafe`, as the former is no longer safe now that threaded logging is the
 default. This caused many death tests to slow down. Unfortunately this change
 was necessary.
 
 Please read [Fixing Failing Death Tests](death_test_styles.md) for what you can
 do.
 
 ## I got some run-time errors about invalid proto descriptors when using `ProtocolMessageEquals`. Help!
 
 **Note:** `ProtocolMessageEquals` and `ProtocolMessageEquiv` are *deprecated*
 now. Please use `EqualsProto`, etc instead.
 
 `ProtocolMessageEquals` and `ProtocolMessageEquiv` were redefined recently and
 are now less tolerant on invalid protocol buffer definitions. In particular, if
 you have a `foo.proto` that doesn't fully qualify the type of a protocol message
 it references (e.g. `message<Bar>` where it should be `message<blah.Bar>`), you
 will now get run-time errors like:
 
 ```
 ... descriptor.cc:...] Invalid proto descriptor for file "path/to/foo.proto":
 ... descriptor.cc:...]  blah.MyMessage.my_field: ".Bar" is not defined.
 ```
 
 If you see this, your `.proto` file is broken and needs to be fixed by making
 the types fully qualified. The new definition of `ProtocolMessageEquals` and
 `ProtocolMessageEquiv` just happen to reveal your bug.
 
 ## My death test modifies some state, but the change seems lost after the death test finishes. Why?
 
 Death tests (`EXPECT_DEATH`, etc) are executed in a sub-process s.t. the
 expected crash won't kill the test program (i.e. the parent process). As a
 result, any in-memory side effects they incur are observable in their respective
 sub-processes, but not in the parent process. You can think of them as running
 in a parallel universe, more or less.
 
-In particular, if you use [gMock](http://go/gmock) and the death test statement
+In particular, if you use [gMock](../../googlemock) and the death test statement
 invokes some mock methods, the parent process will think the calls have never
 occurred. Therefore, you may want to move your `EXPECT_CALL` statements inside
 the `EXPECT_DEATH` macro.
 
 ## EXPECT_EQ(htonl(blah), blah_blah) generates weird compiler errors in opt mode. Is this a googletest bug?
 
 Actually, the bug is in `htonl()`.
 
 According to `'man htonl'`, `htonl()` is a *function*, which means it's valid to
 use `htonl` as a function pointer. However, in opt mode `htonl()` is defined as
 a *macro*, which breaks this usage.
 
 Worse, the macro definition of `htonl()` uses a `gcc` extension and is *not*
 standard C++. That hacky implementation has some ad hoc limitations. In
 particular, it prevents you from writing `Foo<sizeof(htonl(x))>()`, where `Foo`
 is a template that has an integral argument.
 
 The implementation of `EXPECT_EQ(a, b)` uses `sizeof(... a ...)` inside a
 template argument, and thus doesn't compile in opt mode when `a` contains a call
 to `htonl()`. It is difficult to make `EXPECT_EQ` bypass the `htonl()` bug, as
 the solution must work with different compilers on various platforms.
 
 `htonl()` has some other problems as described in `//util/endian/endian.h`,
 which defines `ghtonl()` to replace it. `ghtonl()` does the same thing `htonl()`
 does, only without its problems. We suggest you to use `ghtonl()` instead of
 `htonl()`, both in your tests and production code.
 
 `//util/endian/endian.h` also defines `ghtons()`, which solves similar problems
 in `htons()`.
 
 Don't forget to add `//util/endian` to the list of dependencies in the `BUILD`
 file wherever `ghtonl()` and `ghtons()` are used. The library consists of a
 single header file and will not bloat your binary.
 
 ## The compiler complains about "undefined references" to some static const member variables, but I did define them in the class body. What's wrong?
 
 If your class has a static data member:
 
 ```c++
 // foo.h
 class Foo {
   ...
   static const int kBar = 100;
 };
 ```
 
 You also need to define it *outside* of the class body in `foo.cc`:
 
 ```c++
 const int Foo::kBar;  // No initializer here.
 ```
 
 Otherwise your code is **invalid C++**, and may break in unexpected ways. In
 particular, using it in googletest comparison assertions (`EXPECT_EQ`, etc) will
 generate an "undefined reference" linker error. The fact that "it used to work"
 doesn't mean it's valid. It just means that you were lucky. :-)
 
 ## Can I derive a test fixture from another?
 
 Yes.
 
 Each test fixture has a corresponding and same named test case. This means only
 one test case can use a particular fixture. Sometimes, however, multiple test
 cases may want to use the same or slightly different fixtures. For example, you
 may want to make sure that all of a GUI library's test cases don't leak
 important system resources like fonts and brushes.
 
 In googletest, you share a fixture among test cases by putting the shared logic
 in a base test fixture, then deriving from that base a separate fixture for each
 test case that wants to use this common logic. You then use `TEST_F()` to write
 tests using each derived fixture.
 
 Typically, your code looks like this:
 
 ```c++
 // Defines a base test fixture.
 class BaseTest : public ::testing::Test {
  protected:
   ...
 };
 
 // Derives a fixture FooTest from BaseTest.
 class FooTest : public BaseTest {
  protected:
   void SetUp() override {
     BaseTest::SetUp();  // Sets up the base fixture first.
     ... additional set-up work ...
   }
 
   void TearDown() override {
     ... clean-up work for FooTest ...
     BaseTest::TearDown();  // Remember to tear down the base fixture
                            // after cleaning up FooTest!
   }
 
   ... functions and variables for FooTest ...
 };
 
 // Tests that use the fixture FooTest.
 TEST_F(FooTest, Bar) { ... }
 TEST_F(FooTest, Baz) { ... }
 
 ... additional fixtures derived from BaseTest ...
 ```
 
 If necessary, you can continue to derive test fixtures from a derived fixture.
 googletest has no limit on how deep the hierarchy can be.
 
 For a complete example using derived test fixtures, see [googletest
 sample](https://github.com/google/googletest/blob/master/googletest/samples/sample5_unittest.cc)
 
 ## My compiler complains "void value not ignored as it ought to be." What does this mean?
 
 You're probably using an `ASSERT_*()` in a function that doesn't return `void`.
 `ASSERT_*()` can only be used in `void` functions, due to exceptions being
 disabled by our build system. Please see more details
 [here](advanced.md#assertion-placement).
 
 ## My death test hangs (or seg-faults). How do I fix it?
 
 In googletest, death tests are run in a child process and the way they work is
 delicate. To write death tests you really need to understand how they work.
 Please make sure you have read [this](advanced.md#how-it-works).
 
 In particular, death tests don't like having multiple threads in the parent
 process. So the first thing you can try is to eliminate creating threads outside
-of `EXPECT_DEATH()`. For example, you may want to use [mocks](http://go/gmock)
+of `EXPECT_DEATH()`. For example, you may want to use [mocks](../../googlemock)
 or fake objects instead of real ones in your tests.
 
 Sometimes this is impossible as some library you must use may be creating
 threads before `main()` is even reached. In this case, you can try to minimize
 the chance of conflicts by either moving as many activities as possible inside
 `EXPECT_DEATH()` (in the extreme case, you want to move everything inside), or
 leaving as few things as possible in it. Also, you can try to set the death test
 style to `"threadsafe"`, which is safer but slower, and see if it helps.
 
 If you go with thread-safe death tests, remember that they rerun the test
 program from the beginning in the child process. Therefore make sure your
 program can run side-by-side with itself and is deterministic.
 
 In the end, this boils down to good concurrent programming. You have to make
 sure that there is no race conditions or dead locks in your program. No silver
 bullet - sorry!
 
 ## Should I use the constructor/destructor of the test fixture or SetUp()/TearDown()?
 
 The first thing to remember is that googletest does **not** reuse the same test
 fixture object across multiple tests. For each `TEST_F`, googletest will create
 a **fresh** test fixture object, immediately call `SetUp()`, run the test body,
 call `TearDown()`, and then delete the test fixture object.
 
 When you need to write per-test set-up and tear-down logic, you have the choice
 between using the test fixture constructor/destructor or `SetUp()/TearDown()`.
 The former is usually preferred, as it has the following benefits:
 
 *   By initializing a member variable in the constructor, we have the option to
     make it `const`, which helps prevent accidental changes to its value and
     makes the tests more obviously correct.
 *   In case we need to subclass the test fixture class, the subclass'
     constructor is guaranteed to call the base class' constructor *first*, and
     the subclass' destructor is guaranteed to call the base class' destructor
     *afterward*. With `SetUp()/TearDown()`, a subclass may make the mistake of
     forgetting to call the base class' `SetUp()/TearDown()` or call them at the
     wrong time.
 
 You may still want to use `SetUp()/TearDown()` in the following rare cases:
 
 *   In the body of a constructor (or destructor), it's not possible to use the
     `ASSERT_xx` macros. Therefore, if the set-up operation could cause a fatal
     test failure that should prevent the test from running, it's necessary to
     use a `CHECK` macro or to use `SetUp()` instead of a constructor.
 *   If the tear-down operation could throw an exception, you must use
     `TearDown()` as opposed to the destructor, as throwing in a destructor leads
     to undefined behavior and usually will kill your program right away. Note
     that many standard libraries (like STL) may throw when exceptions are
     enabled in the compiler. Therefore you should prefer `TearDown()` if you
     want to write portable tests that work with or without exceptions.
 *   The googletest team is considering making the assertion macros throw on
     platforms where exceptions are enabled (e.g. Windows, Mac OS, and Linux
     client-side), which will eliminate the need for the user to propagate
     failures from a subroutine to its caller. Therefore, you shouldn't use
     googletest assertions in a destructor if your code could run on such a
     platform.
 *   In a constructor or destructor, you cannot make a virtual function call on
     this object. (You can call a method declared as virtual, but it will be
     statically bound.) Therefore, if you need to call a method that will be
     overridden in a derived class, you have to use `SetUp()/TearDown()`.
 
 
 ## The compiler complains "no matching function to call" when I use ASSERT_PRED*. How do I fix it?
 
 If the predicate function you use in `ASSERT_PRED*` or `EXPECT_PRED*` is
 overloaded or a template, the compiler will have trouble figuring out which
 overloaded version it should use. `ASSERT_PRED_FORMAT*` and
 `EXPECT_PRED_FORMAT*` don't have this problem.
 
 If you see this error, you might want to switch to
 `(ASSERT|EXPECT)_PRED_FORMAT*`, which will also give you a better failure
 message. If, however, that is not an option, you can resolve the problem by
 explicitly telling the compiler which version to pick.
 
 For example, suppose you have
 
 ```c++
 bool IsPositive(int n) {
   return n > 0;
 }
 
 bool IsPositive(double x) {
   return x > 0;
 }
 ```
 
 you will get a compiler error if you write
 
 ```c++
 EXPECT_PRED1(IsPositive, 5);
 ```
 
 However, this will work:
 
 ```c++
 EXPECT_PRED1(static_cast<bool (*)(int)>(IsPositive), 5);
 ```
 
 (The stuff inside the angled brackets for the `static_cast` operator is the type
 of the function pointer for the `int`-version of `IsPositive()`.)
 
 As another example, when you have a template function
 
 ```c++
 template <typename T>
 bool IsNegative(T x) {
   return x < 0;
 }
 ```
 
 you can use it in a predicate assertion like this:
 
 ```c++
 ASSERT_PRED1(IsNegative<int>, -5);
 ```
 
 Things are more interesting if your template has more than one parameters. The
 following won't compile:
 
 ```c++
 ASSERT_PRED2(GreaterThan<int, int>, 5, 0);
 ```
 
 as the C++ pre-processor thinks you are giving `ASSERT_PRED2` 4 arguments, which
 is one more than expected. The workaround is to wrap the predicate function in
 parentheses:
 
 ```c++
 ASSERT_PRED2((GreaterThan<int, int>), 5, 0);
 ```
 
 
 ## My compiler complains about "ignoring return value" when I call RUN_ALL_TESTS(). Why?
 
 Some people had been ignoring the return value of `RUN_ALL_TESTS()`. That is,
 instead of
 
 ```c++
   return RUN_ALL_TESTS();
 ```
 
 they write
 
 ```c++
   RUN_ALL_TESTS();
 ```
 
 This is **wrong and dangerous**. The testing services needs to see the return
 value of `RUN_ALL_TESTS()` in order to determine if a test has passed. If your
 `main()` function ignores it, your test will be considered successful even if it
 has a googletest assertion failure. Very bad.
 
 We have decided to fix this (thanks to Michael Chastain for the idea). Now, your
 code will no longer be able to ignore `RUN_ALL_TESTS()` when compiled with
 `gcc`. If you do so, you'll get a compiler error.
 
 If you see the compiler complaining about you ignoring the return value of
 `RUN_ALL_TESTS()`, the fix is simple: just make sure its value is used as the
 return value of `main()`.
 
 But how could we introduce a change that breaks existing tests? Well, in this
 case, the code was already broken in the first place, so we didn't break it. :-)
 
 ## My compiler complains that a constructor (or destructor) cannot return a value. What's going on?
 
 Due to a peculiarity of C++, in order to support the syntax for streaming
 messages to an `ASSERT_*`, e.g.
 
 ```c++
   ASSERT_EQ(1, Foo()) << "blah blah" << foo;
 ```
 
 we had to give up using `ASSERT*` and `FAIL*` (but not `EXPECT*` and
 `ADD_FAILURE*`) in constructors and destructors. The workaround is to move the
 content of your constructor/destructor to a private void member function, or
 switch to `EXPECT_*()` if that works. This
 [section](advanced.md#assertion-placement) in the user's guide explains it.
 
 ## My SetUp() function is not called. Why?
 
 C++ is case-sensitive. Did you spell it as `Setup()`?
 
 Similarly, sometimes people spell `SetUpTestCase()` as `SetupTestCase()` and
 wonder why it's never called.
 
 ## How do I jump to the line of a failure in Emacs directly?
 
 googletest's failure message format is understood by Emacs and many other IDEs,
 like acme and XCode. If a googletest message is in a compilation buffer in
 Emacs, then it's clickable.
 
 
 ## I have several test cases which share the same test fixture logic, do I have to define a new test fixture class for each of them? This seems pretty tedious.
 
 You don't have to. Instead of
 
 ```c++
 class FooTest : public BaseTest {};
 
 TEST_F(FooTest, Abc) { ... }
 TEST_F(FooTest, Def) { ... }
 
 class BarTest : public BaseTest {};
 
 TEST_F(BarTest, Abc) { ... }
 TEST_F(BarTest, Def) { ... }
 ```
 
 you can simply `typedef` the test fixtures:
 
 ```c++
 typedef BaseTest FooTest;
 
 TEST_F(FooTest, Abc) { ... }
 TEST_F(FooTest, Def) { ... }
 
 typedef BaseTest BarTest;
 
 TEST_F(BarTest, Abc) { ... }
 TEST_F(BarTest, Def) { ... }
 ```
 
 ## googletest output is buried in a whole bunch of LOG messages. What do I do?
 
 The googletest output is meant to be a concise and human-friendly report. If
 your test generates textual output itself, it will mix with the googletest
 output, making it hard to read. However, there is an easy solution to this
 problem.
 
 Since `LOG` messages go to stderr, we decided to let googletest output go to
 stdout. This way, you can easily separate the two using redirection. For
 example:
 
 ```shell
 $ ./my_test > gtest_output.txt
 ```
 
 
 ## Why should I prefer test fixtures over global variables?
 
 There are several good reasons:
 
 1.  It's likely your test needs to change the states of its global variables.
     This makes it difficult to keep side effects from escaping one test and
     contaminating others, making debugging difficult. By using fixtures, each
     test has a fresh set of variables that's different (but with the same
     names). Thus, tests are kept independent of each other.
 1.  Global variables pollute the global namespace.
 1.  Test fixtures can be reused via subclassing, which cannot be done easily
     with global variables. This is useful if many test cases have something in
     common.
 
 
     ## What can the statement argument in ASSERT_DEATH() be?
 
 `ASSERT_DEATH(*statement*, *regex*)` (or any death assertion macro) can be used
 wherever `*statement*` is valid. So basically `*statement*` can be any C++
 statement that makes sense in the current context. In particular, it can
 reference global and/or local variables, and can be:
 
 *   a simple function call (often the case),
 *   a complex expression, or
 *   a compound statement.
 
 Some examples are shown here:
 
 ```c++
 // A death test can be a simple function call.
 TEST(MyDeathTest, FunctionCall) {
   ASSERT_DEATH(Xyz(5), "Xyz failed");
 }
 
 // Or a complex expression that references variables and functions.
 TEST(MyDeathTest, ComplexExpression) {
   const bool c = Condition();
   ASSERT_DEATH((c ? Func1(0) : object2.Method("test")),
                "(Func1|Method) failed");
 }
 
 // Death assertions can be used any where in a function.  In
 // particular, they can be inside a loop.
 TEST(MyDeathTest, InsideLoop) {
   // Verifies that Foo(0), Foo(1), ..., and Foo(4) all die.
   for (int i = 0; i < 5; i++) {
     EXPECT_DEATH_M(Foo(i), "Foo has \\d+ errors",
                    ::testing::Message() << "where i is " << i);
   }
 }
 
 // A death assertion can contain a compound statement.
 TEST(MyDeathTest, CompoundStatement) {
   // Verifies that at lease one of Bar(0), Bar(1), ..., and
   // Bar(4) dies.
   ASSERT_DEATH({
     for (int i = 0; i < 5; i++) {
       Bar(i);
     }
   },
   "Bar has \\d+ errors");
 }
 ```
 
 gtest-death-test_test.cc contains more examples if you are interested.
 
 ## I have a fixture class `FooTest`, but `TEST_F(FooTest, Bar)` gives me error ``"no matching function for call to `FooTest::FooTest()'"``. Why?
 
 Googletest needs to be able to create objects of your test fixture class, so it
 must have a default constructor. Normally the compiler will define one for you.
 However, there are cases where you have to define your own:
 
 *   If you explicitly declare a non-default constructor for class `FooTest`
     (`DISALLOW_EVIL_CONSTRUCTORS()` does this), then you need to define a
     default constructor, even if it would be empty.
 *   If `FooTest` has a const non-static data member, then you have to define the
     default constructor *and* initialize the const member in the initializer
     list of the constructor. (Early versions of `gcc` doesn't force you to
     initialize the const member. It's a bug that has been fixed in `gcc 4`.)
 
 ## Why does ASSERT_DEATH complain about previous threads that were already joined?
 
 With the Linux pthread library, there is no turning back once you cross the line
 from single thread to multiple threads. The first time you create a thread, a
 manager thread is created in addition, so you get 3, not 2, threads. Later when
 the thread you create joins the main thread, the thread count decrements by 1,
 but the manager thread will never be killed, so you still have 2 threads, which
 means you cannot safely run a death test.
 
 The new NPTL thread library doesn't suffer from this problem, as it doesn't
 create a manager thread. However, if you don't control which machine your test
 runs on, you shouldn't depend on this.
 
 ## Why does googletest require the entire test case, instead of individual tests, to be named *DeathTest when it uses ASSERT_DEATH?
 
 googletest does not interleave tests from different test cases. That is, it runs
 all tests in one test case first, and then runs all tests in the next test case,
 and so on. googletest does this because it needs to set up a test case before
 the first test in it is run, and tear it down afterwords. Splitting up the test
 case would require multiple set-up and tear-down processes, which is inefficient
 and makes the semantics unclean.
 
 If we were to determine the order of tests based on test name instead of test
 case name, then we would have a problem with the following situation:
 
 ```c++
 TEST_F(FooTest, AbcDeathTest) { ... }
 TEST_F(FooTest, Uvw) { ... }
 
 TEST_F(BarTest, DefDeathTest) { ... }
 TEST_F(BarTest, Xyz) { ... }
 ```
 
 Since `FooTest.AbcDeathTest` needs to run before `BarTest.Xyz`, and we don't
 interleave tests from different test cases, we need to run all tests in the
 `FooTest` case before running any test in the `BarTest` case. This contradicts
 with the requirement to run `BarTest.DefDeathTest` before `FooTest.Uvw`.
 
 ## But I don't like calling my entire test case \*DeathTest when it contains both death tests and non-death tests. What do I do?
 
 You don't have to, but if you like, you may split up the test case into
 `FooTest` and `FooDeathTest`, where the names make it clear that they are
 related:
 
 ```c++
 class FooTest : public ::testing::Test { ... };
 
 TEST_F(FooTest, Abc) { ... }
 TEST_F(FooTest, Def) { ... }
 
 using FooDeathTest = FooTest;
 
 TEST_F(FooDeathTest, Uvw) { ... EXPECT_DEATH(...) ... }
 TEST_F(FooDeathTest, Xyz) { ... ASSERT_DEATH(...) ... }
 ```
 
 ## googletest prints the LOG messages in a death test's child process only when the test fails. How can I see the LOG messages when the death test succeeds?
 
 Printing the LOG messages generated by the statement inside `EXPECT_DEATH()`
 makes it harder to search for real problems in the parent's log. Therefore,
 googletest only prints them when the death test has failed.
 
 If you really need to see such LOG messages, a workaround is to temporarily
 break the death test (e.g. by changing the regex pattern it is expected to
 match). Admittedly, this is a hack. We'll consider a more permanent solution
 after the fork-and-exec-style death tests are implemented.
 
 ## The compiler complains about "no match for 'operator<<'" when I use an assertion. What gives?
 
 If you use a user-defined type `FooType` in an assertion, you must make sure
 there is an `std::ostream& operator<<(std::ostream&, const FooType&)` function
 defined such that we can print a value of `FooType`.
 
 In addition, if `FooType` is declared in a name space, the `<<` operator also
 needs to be defined in the *same* name space. See go/totw/49 for details.
 
 ## How do I suppress the memory leak messages on Windows?
 
 Since the statically initialized googletest singleton requires allocations on
 the heap, the Visual C++ memory leak detector will report memory leaks at the
 end of the program run. The easiest way to avoid this is to use the
 `_CrtMemCheckpoint` and `_CrtMemDumpAllObjectsSince` calls to not report any
 statically initialized heap objects. See MSDN for more details and additional
 heap check/debug routines.
 
 
 ## How can my code detect if it is running in a test?
 
 If you write code that sniffs whether it's running in a test and does different
 things accordingly, you are leaking test-only logic into production code and
 there is no easy way to ensure that the test-only code paths aren't run by
 mistake in production. Such cleverness also leads to
 [Heisenbugs](https://en.wikipedia.org/wiki/Heisenbug). Therefore we strongly
 advise against the practice, and googletest doesn't provide a way to do it.
 
 In general, the recommended way to cause the code to behave differently under
-test is [Dependency Injection](http://go/dependency-injection). You can inject
+test is [Dependency Injection](https://en.wikipedia.org/wiki/Dependency_injection). You can inject
 different functionality from the test and from the production code. Since your
 production code doesn't link in the for-test logic at all (the
 [`testonly`](http://go/testonly) attribute for BUILD targets helps to ensure
 that), there is no danger in accidentally running it.
 
 However, if you *really*, *really*, *really* have no choice, and if you follow
 the rule of ending your test program names with `_test`, you can use the
 *horrible* hack of sniffing your executable name (`argv[0]` in `main()`) to know
 whether the code is under test.
 
 
 ## How do I temporarily disable a test?
 
 If you have a broken test that you cannot fix right away, you can add the
 DISABLED_ prefix to its name. This will exclude it from execution. This is
 better than commenting out the code or using #if 0, as disabled tests are still
 compiled (and thus won't rot).
 
 To include disabled tests in test execution, just invoke the test program with
 the --gtest_also_run_disabled_tests flag.
 
 ## Is it OK if I have two separate `TEST(Foo, Bar)` test methods defined in different namespaces?
 
 Yes.
 
 The rule is **all test methods in the same test case must use the same fixture
 class.** This means that the following is **allowed** because both tests use the
 same fixture class (`::testing::Test`).
 
 ```c++
 namespace foo {
 TEST(CoolTest, DoSomething) {
   SUCCEED();
 }
 }  // namespace foo
 
 namespace bar {
 TEST(CoolTest, DoSomething) {
   SUCCEED();
 }
 }  // namespace bar
 ```
 
 However, the following code is **not allowed** and will produce a runtime error
 from googletest because the test methods are using different test fixture
 classes with the same test case name.
 
 ```c++
 namespace foo {
 class CoolTest : public ::testing::Test {};  // Fixture foo::CoolTest
 TEST_F(CoolTest, DoSomething) {
   SUCCEED();
 }
 }  // namespace foo
 
 namespace bar {
 class CoolTest : public ::testing::Test {};  // Fixture: bar::CoolTest
 TEST_F(CoolTest, DoSomething) {
   SUCCEED();
 }
 }  // namespace bar
 ```
diff --git a/googletest/docs/primer.md b/googletest/docs/primer.md
index 260d50b8..02dea424 100644
--- a/googletest/docs/primer.md
+++ b/googletest/docs/primer.md
@@ -1,569 +1,569 @@
 # Googletest Primer
 
 
 ## Introduction: Why googletest?
 
 *googletest* helps you write better C++ tests.
 
-googletest is a testing framework developed by the [Testing
-Technology](http://engdoc/eng/testing/TT/) team with Google's specific
+googletest is a testing framework developed by the Testing
+Technology team with Google's specific
 requirements and constraints in mind. No matter whether you work on Linux,
 Windows, or a Mac, if you write C++ code, googletest can help you. And it
 supports *any* kind of tests, not just unit tests.
 
 So what makes a good test, and how does googletest fit in? We believe:
 
 1.  Tests should be *independent* and *repeatable*. It's a pain to debug a test
     that succeeds or fails as a result of other tests. googletest isolates the
     tests by running each of them on a different object. When a test fails,
     googletest allows you to run it in isolation for quick debugging.
 1.  Tests should be well *organized* and reflect the structure of the tested
     code. googletest groups related tests into test cases that can share data
     and subroutines. This common pattern is easy to recognize and makes tests
     easy to maintain. Such consistency is especially helpful when people switch
     projects and start to work on a new code base.
 1.  Tests should be *portable* and *reusable*. Google has a lot of code that is
     platform-neutral, its tests should also be platform-neutral. googletest
     works on different OSes, with different compilers (gcc, icc, and MSVC), with
     or without exceptions, so googletest tests can easily work with a variety of
     configurations.
 1.  When tests fail, they should provide as much *information* about the problem
     as possible. googletest doesn't stop at the first test failure. Instead, it
     only stops the current test and continues with the next. You can also set up
     tests that report non-fatal failures after which the current test continues.
     Thus, you can detect and fix multiple bugs in a single run-edit-compile
     cycle.
 1.  The testing framework should liberate test writers from housekeeping chores
     and let them focus on the test *content*. googletest automatically keeps
     track of all tests defined, and doesn't require the user to enumerate them
     in order to run them.
 1.  Tests should be *fast*. With googletest, you can reuse shared resources
     across tests and pay for the set-up/tear-down only once, without making
     tests depend on each other.
 
 Since googletest is based on the popular xUnit architecture, you'll feel right
 at home if you've used JUnit or PyUnit before. If not, it will take you about 10
 minutes to learn the basics and get started. So let's go!
 
 ## Beware of the nomenclature
 
 _Note:_ There might be some confusion of idea due to different
 definitions of the terms _Test_, _Test Case_ and _Test Suite_, so beware
 of misunderstanding these.
 
 Historically, googletest started to use the term _Test Case_ for grouping
 related tests, whereas current publications including the International Software
 Testing Qualifications Board ([ISTQB](http://www.istqb.org/)) and various
 textbooks on Software Quality use the term _[Test
 Suite](http://glossary.istqb.org/search/test%20suite)_ for this.
 
 The related term _Test_, as it is used in the googletest, is corresponding to
 the term _[Test Case](http://glossary.istqb.org/search/test%20case)_ of ISTQB
 and others.
 
 The term _Test_ is commonly of broad enough sense, including ISTQB's
 definition of _Test Case_, so it's not much of a problem here. But the
 term _Test Case_ as used in Google Test is of contradictory sense and thus confusing.
 
 Unfortunately replacing the term _Test Case_ by _Test Suite_ throughout the
 googletest is not easy without breaking dependent projects, as `TestCase` is
 part of the public API at various places.
 
 So for the time being, please be aware of the different definitions of
 the terms:
 
 Meaning                                                                              | googletest Term                                                                                            | [ISTQB](http://www.istqb.org/) Term
 :----------------------------------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------- | :----------------------------------
 Exercise a particular program path with specific input values and verify the results | [TEST()](#simple-tests)                                                                                    | [Test Case](http://glossary.istqb.org/search/test%20case)
-A set of several tests related to one component                                      | [TestCase](https://g3doc.corp.google.com/third_party/googletest/googletest/g3doc/primer.md#basic-concepts) | [TestSuite](http://glossary.istqb.org/search/test%20suite)
+A set of several tests related to one component                                      | [TestCase](#basic-concepts) | [TestSuite](http://glossary.istqb.org/search/test%20suite)
 
 ## Basic Concepts
 
 When using googletest, you start by writing *assertions*, which are statements
 that check whether a condition is true. An assertion's result can be *success*,
 *nonfatal failure*, or *fatal failure*. If a fatal failure occurs, it aborts the
 current function; otherwise the program continues normally.
 
 *Tests* use assertions to verify the tested code's behavior. If a test crashes
 or has a failed assertion, then it *fails*; otherwise it *succeeds*.
 
 A *test case* contains one or many tests. You should group your tests into test
 cases that reflect the structure of the tested code. When multiple tests in a
 test case need to share common objects and subroutines, you can put them into a
 *test fixture* class.
 
 A *test program* can contain multiple test cases.
 
 We'll now explain how to write a test program, starting at the individual
 assertion level and building up to tests and test cases.
 
 ## Assertions
 
 googletest assertions are macros that resemble function calls. You test a class
 or function by making assertions about its behavior. When an assertion fails,
 googletest prints the assertion's source file and line number location, along
 with a failure message. You may also supply a custom failure message which will
 be appended to googletest's message.
 
 The assertions come in pairs that test the same thing but have different effects
 on the current function. `ASSERT_*` versions generate fatal failures when they
 fail, and **abort the current function**. `EXPECT_*` versions generate nonfatal
 failures, which don't abort the current function. Usually `EXPECT_*` are
 preferred, as they allow more than one failure to be reported in a test.
 However, you should use `ASSERT_*` if it doesn't make sense to continue when the
 assertion in question fails.
 
 Since a failed `ASSERT_*` returns from the current function immediately,
 possibly skipping clean-up code that comes after it, it may cause a space leak.
 Depending on the nature of the leak, it may or may not be worth fixing - so keep
 this in mind if you get a heap checker error in addition to assertion errors.
 
 To provide a custom failure message, simply stream it into the macro using the
 `<<` operator, or a sequence of such operators. An example:
 
 ```c++
 ASSERT_EQ(x.size(), y.size()) << "Vectors x and y are of unequal length";
 
 for (int i = 0; i < x.size(); ++i) {
   EXPECT_EQ(x[i], y[i]) << "Vectors x and y differ at index " << i;
 }
 ```
 
 Anything that can be streamed to an `ostream` can be streamed to an assertion
 macro--in particular, C strings and `string` objects. If a wide string
 (`wchar_t*`, `TCHAR*` in `UNICODE` mode on Windows, or `std::wstring`) is
 streamed to an assertion, it will be translated to UTF-8 when printed.
 
 ### Basic Assertions
 
 These assertions do basic true/false condition testing.
 
 Fatal assertion            | Nonfatal assertion         | Verifies
 -------------------------- | -------------------------- | --------------------
 `ASSERT_TRUE(condition);`  | `EXPECT_TRUE(condition);`  | `condition` is true
 `ASSERT_FALSE(condition);` | `EXPECT_FALSE(condition);` | `condition` is false
 
 Remember, when they fail, `ASSERT_*` yields a fatal failure and returns from the
 current function, while `EXPECT_*` yields a nonfatal failure, allowing the
 function to continue running. In either case, an assertion failure means its
 containing test fails.
 
 **Availability**: Linux, Windows, Mac.
 
 ### Binary Comparison
 
 This section describes assertions that compare two values.
 
 Fatal assertion          | Nonfatal assertion       | Verifies
 ------------------------ | ------------------------ | --------------
 `ASSERT_EQ(val1, val2);` | `EXPECT_EQ(val1, val2);` | `val1 == val2`
 `ASSERT_NE(val1, val2);` | `EXPECT_NE(val1, val2);` | `val1 != val2`
 `ASSERT_LT(val1, val2);` | `EXPECT_LT(val1, val2);` | `val1 < val2`
 `ASSERT_LE(val1, val2);` | `EXPECT_LE(val1, val2);` | `val1 <= val2`
 `ASSERT_GT(val1, val2);` | `EXPECT_GT(val1, val2);` | `val1 > val2`
 `ASSERT_GE(val1, val2);` | `EXPECT_GE(val1, val2);` | `val1 >= val2`
 
 Value arguments must be comparable by the assertion's comparison operator or
 you'll get a compiler error. We used to require the arguments to support the
 `<<` operator for streaming to an `ostream`, but it's no longer necessary. If
 `<<` is supported, it will be called to print the arguments when the assertion
 fails; otherwise googletest will attempt to print them in the best way it can.
 For more details and how to customize the printing of the arguments, see
 gMock [recipe](../../googlemock/docs/CookBook.md#teaching-google-mock-how-to-print-your-values).).
 
 These assertions can work with a user-defined type, but only if you define the
 corresponding comparison operator (e.g. `==`, `<`, etc). Since this is
 discouraged by the Google [C++ Style
 Guide](https://google.github.io/styleguide/cppguide.html#Operator_Overloading),
 you may need to use `ASSERT_TRUE()` or `EXPECT_TRUE()` to assert the equality of
 two objects of a user-defined type.
 
 However, when possible, `ASSERT_EQ(actual, expected)` is preferred to
 `ASSERT_TRUE(actual == expected)`, since it tells you `actual` and `expected`'s
 values on failure.
 
 Arguments are always evaluated exactly once. Therefore, it's OK for the
 arguments to have side effects. However, as with any ordinary C/C++ function,
 the arguments' evaluation order is undefined (i.e. the compiler is free to
 choose any order) and your code should not depend on any particular argument
 evaluation order.
 
 `ASSERT_EQ()` does pointer equality on pointers. If used on two C strings, it
 tests if they are in the same memory location, not if they have the same value.
 Therefore, if you want to compare C strings (e.g. `const char*`) by value, use
 `ASSERT_STREQ()`, which will be described later on. In particular, to assert
 that a C string is `NULL`, use `ASSERT_STREQ(c_string, NULL)`. Consider use
 `ASSERT_EQ(c_string, nullptr)` if c++11 is supported. To compare two `string`
 objects, you should use `ASSERT_EQ`.
 
 When doing pointer comparisons use `*_EQ(ptr, nullptr)` and `*_NE(ptr, nullptr)`
 instead of `*_EQ(ptr, NULL)` and `*_NE(ptr, NULL)`. This is because `nullptr` is
 typed while `NULL` is not. See [FAQ](faq.md#why-does-google-test-support-expect_eqnull-ptr-and-assert_eqnull-ptr-but-not-expect_nenull-ptr-and-assert_nenull-ptr)
 for more details.
 
 If you're working with floating point numbers, you may want to use the floating
 point variations of some of these macros in order to avoid problems caused by
 rounding. See [Advanced googletest Topics](advanced.md) for details.
 
 Macros in this section work with both narrow and wide string objects (`string`
 and `wstring`).
 
 **Availability**: Linux, Windows, Mac.
 
 **Historical note**: Before February 2016 `*_EQ` had a convention of calling it
 as `ASSERT_EQ(expected, actual)`, so lots of existing code uses this order. Now
 `*_EQ` treats both parameters in the same way.
 
 ### String Comparison
 
 The assertions in this group compare two **C strings**. If you want to compare
 two `string` objects, use `EXPECT_EQ`, `EXPECT_NE`, and etc instead.
 
 | Fatal assertion                 | Nonfatal assertion              | Verifies                                                 |
 | ------------------------------- | ------------------------------- | -------------------------------------------------------- |
 | `ASSERT_STREQ(str1, str2);`     | `EXPECT_STREQ(str1, str2);`     | the two C strings have the same content                  |
 | `ASSERT_STRNE(str1, str2);`     | `EXPECT_STRNE(str1, str2);`     | the two C strings have different contents                |
 | `ASSERT_STRCASEEQ(str1, str2);` | `EXPECT_STRCASEEQ(str1, str2);` | the two C strings have the same content, ignoring case   |
 | `ASSERT_STRCASENE(str1, str2);` | `EXPECT_STRCASENE(str1, str2);` | the two C strings have different contents, ignoring case |
 
 Note that "CASE" in an assertion name means that case is ignored. A `NULL`
 pointer and an empty string are considered *different*.
 
 `*STREQ*` and `*STRNE*` also accept wide C strings (`wchar_t*`). If a comparison
 of two wide strings fails, their values will be printed as UTF-8 narrow strings.
 
 **Availability**: Linux, Windows, Mac.
 
 **See also**: For more string comparison tricks (substring, prefix, suffix, and
 regular expression matching, for example), see
 [this](https://github.com/google/googletest/blob/master/googletest/docs/advanced.md)
 in the Advanced googletest Guide.
 
 ## Simple Tests
 
 To create a test:
 
 1.  Use the `TEST()` macro to define and name a test function, These are
     ordinary C++ functions that don't return a value.
 1.  In this function, along with any valid C++ statements you want to include,
     use the various googletest assertions to check values.
 1.  The test's result is determined by the assertions; if any assertion in the
     test fails (either fatally or non-fatally), or if the test crashes, the
     entire test fails. Otherwise, it succeeds.
 
 ```c++
 TEST(TestCaseName, TestName) {
   ... test body ...
 }
 ```
 
 `TEST()` arguments go from general to specific. The *first* argument is the name
 of the test case, and the *second* argument is the test's name within the test
 case. Both names must be valid C++ identifiers, and they should not contain
 underscore (`_`). A test's *full name* consists of its containing test case and
 its individual name. Tests from different test cases can have the same
 individual name.
 
 For example, let's take a simple integer function:
 
 ```c++
 int Factorial(int n);  // Returns the factorial of n
 ```
 
 A test case for this function might look like:
 
 ```c++
 // Tests factorial of 0.
 TEST(FactorialTest, HandlesZeroInput) {
   EXPECT_EQ(Factorial(0), 1);
 }
 
 // Tests factorial of positive numbers.
 TEST(FactorialTest, HandlesPositiveInput) {
   EXPECT_EQ(Factorial(1), 1);
   EXPECT_EQ(Factorial(2), 2);
   EXPECT_EQ(Factorial(3), 6);
   EXPECT_EQ(Factorial(8), 40320);
 }
 ```
 
 googletest groups the test results by test cases, so logically-related tests
 should be in the same test case; in other words, the first argument to their
 `TEST()` should be the same. In the above example, we have two tests,
 `HandlesZeroInput` and `HandlesPositiveInput`, that belong to the same test case
 `FactorialTest`.
 
 When naming your test cases and tests, you should follow the same convention as
 for [naming functions and
 classes](https://google.github.io/styleguide/cppguide.html#Function_Names).
 
 **Availability**: Linux, Windows, Mac.
 
 ## Test Fixtures: Using the Same Data Configuration for Multiple Tests
 
 If you find yourself writing two or more tests that operate on similar data, you
 can use a *test fixture*. It allows you to reuse the same configuration of
 objects for several different tests.
 
 To create a fixture:
 
 1.  Derive a class from `::testing::Test` . Start its body with `protected:` as
     we'll want to access fixture members from sub-classes.
 1.  Inside the class, declare any objects you plan to use.
 1.  If necessary, write a default constructor or `SetUp()` function to prepare
     the objects for each test. A common mistake is to spell `SetUp()` as
     **`Setup()`** with a small `u` - Use `override` in C++11 to make sure you
     spelled it correctly
 1.  If necessary, write a destructor or `TearDown()` function to release any
     resources you allocated in `SetUp()` . To learn when you should use the
     constructor/destructor and when you should use `SetUp()/TearDown()`, read
     this [FAQ](faq.md#should-i-use-the-constructordestructor-of-the-test-fixture-or-the-set-uptear-down-function) entry.
 1.  If needed, define subroutines for your tests to share.
 
 When using a fixture, use `TEST_F()` instead of `TEST()` as it allows you to
 access objects and subroutines in the test fixture:
 
 ```c++
 TEST_F(TestCaseName, TestName) {
   ... test body ...
 }
 ```
 
 Like `TEST()`, the first argument is the test case name, but for `TEST_F()` this
 must be the name of the test fixture class. You've probably guessed: `_F` is for
 fixture.
 
 Unfortunately, the C++ macro system does not allow us to create a single macro
 that can handle both types of tests. Using the wrong macro causes a compiler
 error.
 
 Also, you must first define a test fixture class before using it in a
 `TEST_F()`, or you'll get the compiler error "`virtual outside class
 declaration`".
 
 For each test defined with `TEST_F()` , googletest will create a *fresh* test
 fixture at runtime, immediately initialize it via `SetUp()` , run the test,
 clean up by calling `TearDown()` , and then delete the test fixture. Note that
 different tests in the same test case have different test fixture objects, and
 googletest always deletes a test fixture before it creates the next one.
 googletest does **not** reuse the same test fixture for multiple tests. Any
 changes one test makes to the fixture do not affect other tests.
 
 As an example, let's write tests for a FIFO queue class named `Queue`, which has
 the following interface:
 
 ```c++
 template <typename E>  // E is the element type.
 class Queue {
  public:
   Queue();
   void Enqueue(const E& element);
   E* Dequeue();  // Returns NULL if the queue is empty.
   size_t size() const;
   ...
 };
 ```
 
 First, define a fixture class. By convention, you should give it the name
 `FooTest` where `Foo` is the class being tested.
 
 ```c++
 class QueueTest : public ::testing::Test {
  protected:
   void SetUp() override {
      q1_.Enqueue(1);
      q2_.Enqueue(2);
      q2_.Enqueue(3);
   }
 
   // void TearDown() override {}
 
   Queue<int> q0_;
   Queue<int> q1_;
   Queue<int> q2_;
 };
 ```
 
 In this case, `TearDown()` is not needed since we don't have to clean up after
 each test, other than what's already done by the destructor.
 
 Now we'll write tests using `TEST_F()` and this fixture.
 
 ```c++
 TEST_F(QueueTest, IsEmptyInitially) {
   EXPECT_EQ(q0_.size(), 0);
 }
 
 TEST_F(QueueTest, DequeueWorks) {
   int* n = q0_.Dequeue();
   EXPECT_EQ(n, nullptr);
 
   n = q1_.Dequeue();
   ASSERT_NE(n, nullptr);
   EXPECT_EQ(*n, 1);
   EXPECT_EQ(q1_.size(), 0);
   delete n;
 
   n = q2_.Dequeue();
   ASSERT_NE(n, nullptr);
   EXPECT_EQ(*n, 2);
   EXPECT_EQ(q2_.size(), 1);
   delete n;
 }
 ```
 
 The above uses both `ASSERT_*` and `EXPECT_*` assertions. The rule of thumb is
 to use `EXPECT_*` when you want the test to continue to reveal more errors after
 the assertion failure, and use `ASSERT_*` when continuing after failure doesn't
 make sense. For example, the second assertion in the `Dequeue` test is
 =ASSERT_NE(nullptr, n)=, as we need to dereference the pointer `n` later, which
 would lead to a segfault when `n` is `NULL`.
 
 When these tests run, the following happens:
 
 1.  googletest constructs a `QueueTest` object (let's call it `t1` ).
 1.  `t1.SetUp()` initializes `t1` .
 1.  The first test ( `IsEmptyInitially` ) runs on `t1` .
 1.  `t1.TearDown()` cleans up after the test finishes.
 1.  `t1` is destructed.
 1.  The above steps are repeated on another `QueueTest` object, this time
     running the `DequeueWorks` test.
 
 **Availability**: Linux, Windows, Mac.
 
 
 ## Invoking the Tests
 
 `TEST()` and `TEST_F()` implicitly register their tests with googletest. So,
 unlike with many other C++ testing frameworks, you don't have to re-list all
 your defined tests in order to run them.
 
 After defining your tests, you can run them with `RUN_ALL_TESTS()` , which
 returns `0` if all the tests are successful, or `1` otherwise. Note that
 `RUN_ALL_TESTS()` runs *all tests* in your link unit -- they can be from
 different test cases, or even different source files.
 
 When invoked, the `RUN_ALL_TESTS()` macro:
 
 1. Saves the state of all googletest flags
 
 *   Creates a test fixture object for the first test.
 
 *   Initializes it via `SetUp()`.
 
 *   Runs the test on the fixture object.
 
 *   Cleans up the fixture via `TearDown()`.
 
 *   Deletes the fixture.
 
 * Restores the state of all googletest flags
 
 *   Repeats the above steps for the next test, until all tests have run.
 
 If a fatal failure happens the subsequent steps will be skipped.
 
 > IMPORTANT: You must **not** ignore the return value of `RUN_ALL_TESTS()`, or
 > you will get a compiler error. The rationale for this design is that the
 > automated testing service determines whether a test has passed based on its
 > exit code, not on its stdout/stderr output; thus your `main()` function must
 > return the value of `RUN_ALL_TESTS()`.
 >
 > Also, you should call `RUN_ALL_TESTS()` only **once**. Calling it more than
 > once conflicts with some advanced googletest features (e.g. thread-safe [death
 > tests](advanced#death-tests)) and thus is not supported.
 
 **Availability**: Linux, Windows, Mac.
 
 ## Writing the main() Function
 
 In `google3`, the simplest approach is to use the default main() function
 provided by linking in `"//testing/base/public:gtest_main"`. If that doesn't
 cover what you need, you should write your own main() function, which should
 return the value of `RUN_ALL_TESTS()`. Link to `"//testing/base/public:gunit"`.
 You can start from this boilerplate:
 
 ```c++
 #include "this/package/foo.h"
 #include "gtest/gtest.h"
 
 namespace {
 
 // The fixture for testing class Foo.
 class FooTest : public ::testing::Test {
  protected:
   // You can remove any or all of the following functions if its body
   // is empty.
 
   FooTest() {
      // You can do set-up work for each test here.
   }
 
   ~FooTest() override {
      // You can do clean-up work that doesn't throw exceptions here.
   }
 
   // If the constructor and destructor are not enough for setting up
   // and cleaning up each test, you can define the following methods:
 
   void SetUp() override {
      // Code here will be called immediately after the constructor (right
      // before each test).
   }
 
   void TearDown() override {
      // Code here will be called immediately after each test (right
      // before the destructor).
   }
 
   // Objects declared here can be used by all tests in the test case for Foo.
 };
 
 // Tests that the Foo::Bar() method does Abc.
 TEST_F(FooTest, MethodBarDoesAbc) {
   const std::string input_filepath = "this/package/testdata/myinputfile.dat";
   const std::string output_filepath = "this/package/testdata/myoutputfile.dat";
   Foo f;
   EXPECT_EQ(f.Bar(input_filepath, output_filepath), 0);
 }
 
 // Tests that Foo does Xyz.
 TEST_F(FooTest, DoesXyz) {
   // Exercises the Xyz feature of Foo.
 }
 
 }  // namespace
 
 int main(int argc, char **argv) {
   ::testing::InitGoogleTest(&argc, argv);
   return RUN_ALL_TESTS();
 }
 ```
 
 
 The `::testing::InitGoogleTest()` function parses the command line for
 googletest flags, and removes all recognized flags. This allows the user to
 control a test program's behavior via various flags, which we'll cover in
 [AdvancedGuide](advanced.md). You **must** call this function before calling
 `RUN_ALL_TESTS()`, or the flags won't be properly initialized.
 
 On Windows, `InitGoogleTest()` also works with wide strings, so it can be used
 in programs compiled in `UNICODE` mode as well.
 
 But maybe you think that writing all those main() functions is too much work? We
 agree with you completely and that's why Google Test provides a basic
 implementation of main(). If it fits your needs, then just link your test with
 gtest\_main library and you are good to go.
 
 NOTE: `ParseGUnitFlags()` is deprecated in favor of `InitGoogleTest()`.
 
 
 ## Known Limitations
 
 *   Google Test is designed to be thread-safe. The implementation is thread-safe
     on systems where the `pthreads` library is available. It is currently
     _unsafe_ to use Google Test assertions from two threads concurrently on
     other systems (e.g. Windows). In most tests this is not an issue as usually
     the assertions are done in the main thread. If you want to help, you can
     volunteer to implement the necessary synchronization primitives in
     `gtest-port.h` for your platform.
diff --git a/googletest/include/gtest/internal/gtest-port.h b/googletest/include/gtest/internal/gtest-port.h
index e25ef720..95e3fd61 100644
--- a/googletest/include/gtest/internal/gtest-port.h
+++ b/googletest/include/gtest/internal/gtest-port.h
@@ -1,2685 +1,2685 @@
 // Copyright 2005, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Authors: wan@google.com (Zhanyong Wan)
 //
 // Low-level types and utilities for porting Google Test to various
 // platforms.  All macros ending with _ and symbols defined in an
 // internal namespace are subject to change without notice.  Code
 // outside Google Test MUST NOT USE THEM DIRECTLY.  Macros that don't
 // end with _ are part of Google Test's public API and can be used by
 // code outside Google Test.
 //
 // This file is fundamental to Google Test.  All other Google Test source
 // files are expected to #include this.  Therefore, it cannot #include
 // any other Google Test header.
 
 // GOOGLETEST_CM0001 DO NOT DELETE
 
 #ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_
 #define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_
 
 // Environment-describing macros
 // -----------------------------
 //
 // Google Test can be used in many different environments.  Macros in
 // this section tell Google Test what kind of environment it is being
 // used in, such that Google Test can provide environment-specific
 // features and implementations.
 //
 // Google Test tries to automatically detect the properties of its
 // environment, so users usually don't need to worry about these
 // macros.  However, the automatic detection is not perfect.
 // Sometimes it's necessary for a user to define some of the following
 // macros in the build script to override Google Test's decisions.
 //
 // If the user doesn't define a macro in the list, Google Test will
 // provide a default definition.  After this header is #included, all
 // macros in this list will be defined to either 1 or 0.
 //
 // Notes to maintainers:
 //   - Each macro here is a user-tweakable knob; do not grow the list
 //     lightly.
 //   - Use #if to key off these macros.  Don't use #ifdef or "#if
 //     defined(...)", which will not work as these macros are ALWAYS
 //     defined.
 //
 //   GTEST_HAS_CLONE          - Define it to 1/0 to indicate that clone(2)
 //                              is/isn't available.
 //   GTEST_HAS_EXCEPTIONS     - Define it to 1/0 to indicate that exceptions
 //                              are enabled.
 //   GTEST_HAS_GLOBAL_STRING  - Define it to 1/0 to indicate that ::string
 //                              is/isn't available
 //   GTEST_HAS_GLOBAL_WSTRING - Define it to 1/0 to indicate that ::wstring
 //                              is/isn't available
 //   GTEST_HAS_POSIX_RE       - Define it to 1/0 to indicate that POSIX regular
 //                              expressions are/aren't available.
 //   GTEST_HAS_PTHREAD        - Define it to 1/0 to indicate that <pthread.h>
 //                              is/isn't available.
 //   GTEST_HAS_RTTI           - Define it to 1/0 to indicate that RTTI is/isn't
 //                              enabled.
 //   GTEST_HAS_STD_WSTRING    - Define it to 1/0 to indicate that
 //                              std::wstring does/doesn't work (Google Test can
 //                              be used where std::wstring is unavailable).
 //   GTEST_HAS_TR1_TUPLE      - Define it to 1/0 to indicate tr1::tuple
 //                              is/isn't available.
 //   GTEST_HAS_SEH            - Define it to 1/0 to indicate whether the
 //                              compiler supports Microsoft's "Structured
 //                              Exception Handling".
 //   GTEST_HAS_STREAM_REDIRECTION
 //                            - Define it to 1/0 to indicate whether the
 //                              platform supports I/O stream redirection using
 //                              dup() and dup2().
 //   GTEST_USE_OWN_TR1_TUPLE  - Define it to 1/0 to indicate whether Google
 //                              Test's own tr1 tuple implementation should be
 //                              used.  Unused when the user sets
 //                              GTEST_HAS_TR1_TUPLE to 0.
 //   GTEST_LANG_CXX11         - Define it to 1/0 to indicate that Google Test
 //                              is building in C++11/C++98 mode.
 //   GTEST_LINKED_AS_SHARED_LIBRARY
 //                            - Define to 1 when compiling tests that use
 //                              Google Test as a shared library (known as
 //                              DLL on Windows).
 //   GTEST_CREATE_SHARED_LIBRARY
 //                            - Define to 1 when compiling Google Test itself
 //                              as a shared library.
 //   GTEST_DEFAULT_DEATH_TEST_STYLE
 //                            - The default value of --gtest_death_test_style.
 //                              The legacy default has been "fast" in the open
 //                              source version since 2008. The recommended value
 //                              is "threadsafe", and can be set in
 //                              custom/gtest-port.h.
 
 // Platform-indicating macros
 // --------------------------
 //
 // Macros indicating the platform on which Google Test is being used
 // (a macro is defined to 1 if compiled on the given platform;
 // otherwise UNDEFINED -- it's never defined to 0.).  Google Test
 // defines these macros automatically.  Code outside Google Test MUST
 // NOT define them.
 //
 //   GTEST_OS_AIX      - IBM AIX
 //   GTEST_OS_CYGWIN   - Cygwin
 //   GTEST_OS_FREEBSD  - FreeBSD
 //   GTEST_OS_FUCHSIA  - Fuchsia
 //   GTEST_OS_HPUX     - HP-UX
 //   GTEST_OS_LINUX    - Linux
 //     GTEST_OS_LINUX_ANDROID - Google Android
 //   GTEST_OS_MAC      - Mac OS X
 //     GTEST_OS_IOS    - iOS
 //   GTEST_OS_NACL     - Google Native Client (NaCl)
 //   GTEST_OS_NETBSD   - NetBSD
 //   GTEST_OS_OPENBSD  - OpenBSD
 //   GTEST_OS_QNX      - QNX
 //   GTEST_OS_SOLARIS  - Sun Solaris
 //   GTEST_OS_SYMBIAN  - Symbian
 //   GTEST_OS_WINDOWS  - Windows (Desktop, MinGW, or Mobile)
 //     GTEST_OS_WINDOWS_DESKTOP  - Windows Desktop
 //     GTEST_OS_WINDOWS_MINGW    - MinGW
 //     GTEST_OS_WINDOWS_MOBILE   - Windows Mobile
 //     GTEST_OS_WINDOWS_PHONE    - Windows Phone
 //     GTEST_OS_WINDOWS_RT       - Windows Store App/WinRT
 //   GTEST_OS_ZOS      - z/OS
 //
 // Among the platforms, Cygwin, Linux, Max OS X, and Windows have the
 // most stable support.  Since core members of the Google Test project
 // don't have access to other platforms, support for them may be less
 // stable.  If you notice any problems on your platform, please notify
 // googletestframework@googlegroups.com (patches for fixing them are
 // even more welcome!).
 //
 // It is possible that none of the GTEST_OS_* macros are defined.
 
 // Feature-indicating macros
 // -------------------------
 //
 // Macros indicating which Google Test features are available (a macro
 // is defined to 1 if the corresponding feature is supported;
 // otherwise UNDEFINED -- it's never defined to 0.).  Google Test
 // defines these macros automatically.  Code outside Google Test MUST
 // NOT define them.
 //
 // These macros are public so that portable tests can be written.
 // Such tests typically surround code using a feature with an #if
 // which controls that code.  For example:
 //
 // #if GTEST_HAS_DEATH_TEST
 //   EXPECT_DEATH(DoSomethingDeadly());
 // #endif
 //
 //   GTEST_HAS_COMBINE      - the Combine() function (for value-parameterized
 //                            tests)
 //   GTEST_HAS_DEATH_TEST   - death tests
 //   GTEST_HAS_TYPED_TEST   - typed tests
 //   GTEST_HAS_TYPED_TEST_P - type-parameterized tests
 //   GTEST_IS_THREADSAFE    - Google Test is thread-safe.
 //   GTEST_USES_POSIX_RE    - enhanced POSIX regex is used. Do not confuse with
 //                            GTEST_HAS_POSIX_RE (see above) which users can
 //                            define themselves.
 //   GTEST_USES_SIMPLE_RE   - our own simple regex is used;
 //                            the above RE\b(s) are mutually exclusive.
 //   GTEST_CAN_COMPARE_NULL - accepts untyped NULL in EXPECT_EQ().
 
 // Misc public macros
 // ------------------
 //
 //   GTEST_FLAG(flag_name)  - references the variable corresponding to
 //                            the given Google Test flag.
 
 // Internal utilities
 // ------------------
 //
 // The following macros and utilities are for Google Test's INTERNAL
 // use only.  Code outside Google Test MUST NOT USE THEM DIRECTLY.
 //
 // Macros for basic C++ coding:
 //   GTEST_AMBIGUOUS_ELSE_BLOCKER_ - for disabling a gcc warning.
 //   GTEST_ATTRIBUTE_UNUSED_  - declares that a class' instances or a
 //                              variable don't have to be used.
 //   GTEST_DISALLOW_ASSIGN_   - disables operator=.
 //   GTEST_DISALLOW_COPY_AND_ASSIGN_ - disables copy ctor and operator=.
 //   GTEST_MUST_USE_RESULT_   - declares that a function's result must be used.
 //   GTEST_INTENTIONAL_CONST_COND_PUSH_ - start code section where MSVC C4127 is
 //                                        suppressed (constant conditional).
 //   GTEST_INTENTIONAL_CONST_COND_POP_  - finish code section where MSVC C4127
 //                                        is suppressed.
 //
 // C++11 feature wrappers:
 //
 //   testing::internal::forward - portability wrapper for std::forward.
 //   testing::internal::move  - portability wrapper for std::move.
 //
 // Synchronization:
 //   Mutex, MutexLock, ThreadLocal, GetThreadCount()
 //                            - synchronization primitives.
 //
 // Template meta programming:
 //   is_pointer     - as in TR1; needed on Symbian and IBM XL C/C++ only.
 //   IteratorTraits - partial implementation of std::iterator_traits, which
 //                    is not available in libCstd when compiled with Sun C++.
 //
 // Smart pointers:
 //   scoped_ptr     - as in TR2.
 //
 // Regular expressions:
 //   RE             - a simple regular expression class using the POSIX
 //                    Extended Regular Expression syntax on UNIX-like platforms
 //                    or a reduced regular exception syntax on other
 //                    platforms, including Windows.
 // Logging:
 //   GTEST_LOG_()   - logs messages at the specified severity level.
 //   LogToStderr()  - directs all log messages to stderr.
 //   FlushInfoLog() - flushes informational log messages.
 //
 // Stdout and stderr capturing:
 //   CaptureStdout()     - starts capturing stdout.
 //   GetCapturedStdout() - stops capturing stdout and returns the captured
 //                         string.
 //   CaptureStderr()     - starts capturing stderr.
 //   GetCapturedStderr() - stops capturing stderr and returns the captured
 //                         string.
 //
 // Integer types:
 //   TypeWithSize   - maps an integer to a int type.
 //   Int32, UInt32, Int64, UInt64, TimeInMillis
 //                  - integers of known sizes.
 //   BiggestInt     - the biggest signed integer type.
 //
 // Command-line utilities:
 //   GTEST_DECLARE_*()  - declares a flag.
 //   GTEST_DEFINE_*()   - defines a flag.
 //   GetInjectableArgvs() - returns the command line as a vector of strings.
 //
 // Environment variable utilities:
 //   GetEnv()             - gets the value of an environment variable.
 //   BoolFromGTestEnv()   - parses a bool environment variable.
 //   Int32FromGTestEnv()  - parses an Int32 environment variable.
 //   StringFromGTestEnv() - parses a string environment variable.
 
 #include <ctype.h>   // for isspace, etc
 #include <stddef.h>  // for ptrdiff_t
 #include <stdlib.h>
 #include <stdio.h>
 #include <string.h>
 #ifndef _WIN32_WCE
 # include <sys/types.h>
 # include <sys/stat.h>
 #endif  // !_WIN32_WCE
 
 #if defined __APPLE__
 # include <AvailabilityMacros.h>
 # include <TargetConditionals.h>
 #endif
 
 // Brings in the definition of HAS_GLOBAL_STRING.  This must be done
 // BEFORE we test HAS_GLOBAL_STRING.
 #include <string>  // NOLINT
 #include <algorithm>  // NOLINT
 #include <iostream>  // NOLINT
 #include <sstream>  // NOLINT
 #include <utility>
 #include <vector>  // NOLINT
 
 #include "gtest/internal/gtest-port-arch.h"
 #include "gtest/internal/custom/gtest-port.h"
 
 #if !defined(GTEST_DEV_EMAIL_)
 # define GTEST_DEV_EMAIL_ "googletestframework@@googlegroups.com"
 # define GTEST_FLAG_PREFIX_ "gtest_"
 # define GTEST_FLAG_PREFIX_DASH_ "gtest-"
 # define GTEST_FLAG_PREFIX_UPPER_ "GTEST_"
 # define GTEST_NAME_ "Google Test"
 # define GTEST_PROJECT_URL_ "https://github.com/google/googletest/"
 #endif  // !defined(GTEST_DEV_EMAIL_)
 
 #if !defined(GTEST_INIT_GOOGLE_TEST_NAME_)
 # define GTEST_INIT_GOOGLE_TEST_NAME_ "testing::InitGoogleTest"
 #endif  // !defined(GTEST_INIT_GOOGLE_TEST_NAME_)
 
 // Determines the version of gcc that is used to compile this.
 #ifdef __GNUC__
 // 40302 means version 4.3.2.
 # define GTEST_GCC_VER_ \
     (__GNUC__*10000 + __GNUC_MINOR__*100 + __GNUC_PATCHLEVEL__)
 #endif  // __GNUC__
 
 // Macros for disabling Microsoft Visual C++ warnings.
 //
 //   GTEST_DISABLE_MSC_WARNINGS_PUSH_(4800 4385)
 //   /* code that triggers warnings C4800 and C4385 */
 //   GTEST_DISABLE_MSC_WARNINGS_POP_()
 #if _MSC_VER >= 1400
 # define GTEST_DISABLE_MSC_WARNINGS_PUSH_(warnings) \
     __pragma(warning(push))                        \
     __pragma(warning(disable: warnings))
 # define GTEST_DISABLE_MSC_WARNINGS_POP_()          \
     __pragma(warning(pop))
 #else
 // Older versions of MSVC don't have __pragma.
 # define GTEST_DISABLE_MSC_WARNINGS_PUSH_(warnings)
 # define GTEST_DISABLE_MSC_WARNINGS_POP_()
 #endif
 
 #ifndef GTEST_LANG_CXX11
 // gcc and clang define __GXX_EXPERIMENTAL_CXX0X__ when
 // -std={c,gnu}++{0x,11} is passed.  The C++11 standard specifies a
 // value for __cplusplus, and recent versions of clang, gcc, and
 // probably other compilers set that too in C++11 mode.
 # if __GXX_EXPERIMENTAL_CXX0X__ || __cplusplus >= 201103L || _MSC_VER >= 1900
 // Compiling in at least C++11 mode.
 #  define GTEST_LANG_CXX11 1
 # else
 #  define GTEST_LANG_CXX11 0
 # endif
 #endif
 
 // Distinct from C++11 language support, some environments don't provide
 // proper C++11 library support. Notably, it's possible to build in
 // C++11 mode when targeting Mac OS X 10.6, which has an old libstdc++
 // with no C++11 support.
 //
 // libstdc++ has sufficient C++11 support as of GCC 4.6.0, __GLIBCXX__
 // 20110325, but maintenance releases in the 4.4 and 4.5 series followed
 // this date, so check for those versions by their date stamps.
 // https://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html#abi.versioning
 #if GTEST_LANG_CXX11 && \
     (!defined(__GLIBCXX__) || ( \
         __GLIBCXX__ >= 20110325ul &&  /* GCC >= 4.6.0 */ \
         /* Blacklist of patch releases of older branches: */ \
         __GLIBCXX__ != 20110416ul &&  /* GCC 4.4.6 */ \
         __GLIBCXX__ != 20120313ul &&  /* GCC 4.4.7 */ \
         __GLIBCXX__ != 20110428ul &&  /* GCC 4.5.3 */ \
         __GLIBCXX__ != 20120702ul))   /* GCC 4.5.4 */
 # define GTEST_STDLIB_CXX11 1
 #endif
 
 // Only use C++11 library features if the library provides them.
 #if GTEST_STDLIB_CXX11
 # define GTEST_HAS_STD_BEGIN_AND_END_ 1
 # define GTEST_HAS_STD_FORWARD_LIST_ 1
 # if !defined(_MSC_VER) || (_MSC_FULL_VER >= 190023824)
 // works only with VS2015U2 and better
 #   define GTEST_HAS_STD_FUNCTION_ 1
 # endif
 # define GTEST_HAS_STD_INITIALIZER_LIST_ 1
 # define GTEST_HAS_STD_MOVE_ 1
 # define GTEST_HAS_STD_UNIQUE_PTR_ 1
 # define GTEST_HAS_STD_SHARED_PTR_ 1
 # define GTEST_HAS_UNORDERED_MAP_ 1
 # define GTEST_HAS_UNORDERED_SET_ 1
 #endif
 
 // C++11 specifies that <tuple> provides std::tuple.
 // Some platforms still might not have it, however.
 #if GTEST_LANG_CXX11
 # define GTEST_HAS_STD_TUPLE_ 1
 # if defined(__clang__)
-// Inspired by http://clang.llvm.org/docs/LanguageExtensions.html#__has_include
+// Inspired by https://clang.llvm.org/docs/LanguageExtensions.html#include-file-checking-macros
 #  if defined(__has_include) && !__has_include(<tuple>)
 #   undef GTEST_HAS_STD_TUPLE_
 #  endif
 # elif defined(_MSC_VER)
 // Inspired by boost/config/stdlib/dinkumware.hpp
 #  if defined(_CPPLIB_VER) && _CPPLIB_VER < 520
 #   undef GTEST_HAS_STD_TUPLE_
 #  endif
 # elif defined(__GLIBCXX__)
 // Inspired by boost/config/stdlib/libstdcpp3.hpp,
 // http://gcc.gnu.org/gcc-4.2/changes.html and
-// http://gcc.gnu.org/onlinedocs/libstdc++/manual/bk01pt01ch01.html#manual.intro.status.standard.200x
+// https://web.archive.org/web/20140227044429/gcc.gnu.org/onlinedocs/libstdc++/manual/bk01pt01ch01.html#manual.intro.status.standard.200x
 #  if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 2)
 #   undef GTEST_HAS_STD_TUPLE_
 #  endif
 # endif
 #endif
 
 // Brings in definitions for functions used in the testing::internal::posix
 // namespace (read, write, close, chdir, isatty, stat). We do not currently
 // use them on Windows Mobile.
 #if GTEST_OS_WINDOWS
 # if !GTEST_OS_WINDOWS_MOBILE
 #  include <direct.h>
 #  include <io.h>
 # endif
 // In order to avoid having to include <windows.h>, use forward declaration
 #if GTEST_OS_WINDOWS_MINGW && !defined(__MINGW64_VERSION_MAJOR)
 // MinGW defined _CRITICAL_SECTION and _RTL_CRITICAL_SECTION as two
 // separate (equivalent) structs, instead of using typedef
 typedef struct _CRITICAL_SECTION GTEST_CRITICAL_SECTION;
 #else
 // Assume CRITICAL_SECTION is a typedef of _RTL_CRITICAL_SECTION.
 // This assumption is verified by
 // WindowsTypesTest.CRITICAL_SECTIONIs_RTL_CRITICAL_SECTION.
 typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION;
 #endif
 #else
 // This assumes that non-Windows OSes provide unistd.h. For OSes where this
 // is not the case, we need to include headers that provide the functions
 // mentioned above.
 # include <unistd.h>
 # include <strings.h>
 #endif  // GTEST_OS_WINDOWS
 
 #if GTEST_OS_LINUX_ANDROID
 // Used to define __ANDROID_API__ matching the target NDK API level.
 #  include <android/api-level.h>  // NOLINT
 #endif
 
 // Defines this to true iff Google Test can use POSIX regular expressions.
 #ifndef GTEST_HAS_POSIX_RE
 # if GTEST_OS_LINUX_ANDROID
 // On Android, <regex.h> is only available starting with Gingerbread.
 #  define GTEST_HAS_POSIX_RE (__ANDROID_API__ >= 9)
 # else
 #  define GTEST_HAS_POSIX_RE (!GTEST_OS_WINDOWS)
 # endif
 #endif
 
 #if GTEST_USES_PCRE
 // The appropriate headers have already been included.
 
 #elif GTEST_HAS_POSIX_RE
 
 // On some platforms, <regex.h> needs someone to define size_t, and
 // won't compile otherwise.  We can #include it here as we already
 // included <stdlib.h>, which is guaranteed to define size_t through
 // <stddef.h>.
 # include <regex.h>  // NOLINT
 
 # define GTEST_USES_POSIX_RE 1
 
 #elif GTEST_OS_WINDOWS
 
 // <regex.h> is not available on Windows.  Use our own simple regex
 // implementation instead.
 # define GTEST_USES_SIMPLE_RE 1
 
 #else
 
 // <regex.h> may not be available on this platform.  Use our own
 // simple regex implementation instead.
 # define GTEST_USES_SIMPLE_RE 1
 
 #endif  // GTEST_USES_PCRE
 
 #ifndef GTEST_HAS_EXCEPTIONS
 // The user didn't tell us whether exceptions are enabled, so we need
 // to figure it out.
 # if defined(_MSC_VER) && defined(_CPPUNWIND)
 // MSVC defines _CPPUNWIND to 1 iff exceptions are enabled.
 #  define GTEST_HAS_EXCEPTIONS 1
 # elif defined(__BORLANDC__)
 // C++Builder's implementation of the STL uses the _HAS_EXCEPTIONS
 // macro to enable exceptions, so we'll do the same.
 // Assumes that exceptions are enabled by default.
 #  ifndef _HAS_EXCEPTIONS
 #   define _HAS_EXCEPTIONS 1
 #  endif  // _HAS_EXCEPTIONS
 #  define GTEST_HAS_EXCEPTIONS _HAS_EXCEPTIONS
 # elif defined(__clang__)
 // clang defines __EXCEPTIONS iff exceptions are enabled before clang 220714,
 // but iff cleanups are enabled after that. In Obj-C++ files, there can be
 // cleanups for ObjC exceptions which also need cleanups, even if C++ exceptions
 // are disabled. clang has __has_feature(cxx_exceptions) which checks for C++
 // exceptions starting at clang r206352, but which checked for cleanups prior to
 // that. To reliably check for C++ exception availability with clang, check for
 // __EXCEPTIONS && __has_feature(cxx_exceptions).
 #  define GTEST_HAS_EXCEPTIONS (__EXCEPTIONS && __has_feature(cxx_exceptions))
 # elif defined(__GNUC__) && __EXCEPTIONS
 // gcc defines __EXCEPTIONS to 1 iff exceptions are enabled.
 #  define GTEST_HAS_EXCEPTIONS 1
 # elif defined(__SUNPRO_CC)
 // Sun Pro CC supports exceptions.  However, there is no compile-time way of
 // detecting whether they are enabled or not.  Therefore, we assume that
 // they are enabled unless the user tells us otherwise.
 #  define GTEST_HAS_EXCEPTIONS 1
 # elif defined(__IBMCPP__) && __EXCEPTIONS
 // xlC defines __EXCEPTIONS to 1 iff exceptions are enabled.
 #  define GTEST_HAS_EXCEPTIONS 1
 # elif defined(__HP_aCC)
 // Exception handling is in effect by default in HP aCC compiler. It has to
 // be turned of by +noeh compiler option if desired.
 #  define GTEST_HAS_EXCEPTIONS 1
 # else
 // For other compilers, we assume exceptions are disabled to be
 // conservative.
 #  define GTEST_HAS_EXCEPTIONS 0
 # endif  // defined(_MSC_VER) || defined(__BORLANDC__)
 #endif  // GTEST_HAS_EXCEPTIONS
 
 #if !defined(GTEST_HAS_STD_STRING)
 // Even though we don't use this macro any longer, we keep it in case
 // some clients still depend on it.
 # define GTEST_HAS_STD_STRING 1
 #elif !GTEST_HAS_STD_STRING
 // The user told us that ::std::string isn't available.
 # error "::std::string isn't available."
 #endif  // !defined(GTEST_HAS_STD_STRING)
 
 #ifndef GTEST_HAS_GLOBAL_STRING
 # define GTEST_HAS_GLOBAL_STRING 0
 #endif  // GTEST_HAS_GLOBAL_STRING
 
 #ifndef GTEST_HAS_STD_WSTRING
 // The user didn't tell us whether ::std::wstring is available, so we need
 // to figure it out.
 // TODO(wan@google.com): uses autoconf to detect whether ::std::wstring
 //   is available.
 
 // Cygwin 1.7 and below doesn't support ::std::wstring.
 // Solaris' libc++ doesn't support it either.  Android has
 // no support for it at least as recent as Froyo (2.2).
 # define GTEST_HAS_STD_WSTRING \
     (!(GTEST_OS_LINUX_ANDROID || GTEST_OS_CYGWIN || GTEST_OS_SOLARIS))
 
 #endif  // GTEST_HAS_STD_WSTRING
 
 #ifndef GTEST_HAS_GLOBAL_WSTRING
 // The user didn't tell us whether ::wstring is available, so we need
 // to figure it out.
 # define GTEST_HAS_GLOBAL_WSTRING \
     (GTEST_HAS_STD_WSTRING && GTEST_HAS_GLOBAL_STRING)
 #endif  // GTEST_HAS_GLOBAL_WSTRING
 
 // Determines whether RTTI is available.
 #ifndef GTEST_HAS_RTTI
 // The user didn't tell us whether RTTI is enabled, so we need to
 // figure it out.
 
 # ifdef _MSC_VER
 
 #  ifdef _CPPRTTI  // MSVC defines this macro iff RTTI is enabled.
 #   define GTEST_HAS_RTTI 1
 #  else
 #   define GTEST_HAS_RTTI 0
 #  endif
 
 // Starting with version 4.3.2, gcc defines __GXX_RTTI iff RTTI is enabled.
 # elif defined(__GNUC__) && (GTEST_GCC_VER_ >= 40302)
 
 #  ifdef __GXX_RTTI
 // When building against STLport with the Android NDK and with
 // -frtti -fno-exceptions, the build fails at link time with undefined
 // references to __cxa_bad_typeid. Note sure if STL or toolchain bug,
 // so disable RTTI when detected.
 #   if GTEST_OS_LINUX_ANDROID && defined(_STLPORT_MAJOR) && \
        !defined(__EXCEPTIONS)
 #    define GTEST_HAS_RTTI 0
 #   else
 #    define GTEST_HAS_RTTI 1
 #   endif  // GTEST_OS_LINUX_ANDROID && __STLPORT_MAJOR && !__EXCEPTIONS
 #  else
 #   define GTEST_HAS_RTTI 0
 #  endif  // __GXX_RTTI
 
 // Clang defines __GXX_RTTI starting with version 3.0, but its manual recommends
 // using has_feature instead. has_feature(cxx_rtti) is supported since 2.7, the
 // first version with C++ support.
 # elif defined(__clang__)
 
 #  define GTEST_HAS_RTTI __has_feature(cxx_rtti)
 
 // Starting with version 9.0 IBM Visual Age defines __RTTI_ALL__ to 1 if
 // both the typeid and dynamic_cast features are present.
 # elif defined(__IBMCPP__) && (__IBMCPP__ >= 900)
 
 #  ifdef __RTTI_ALL__
 #   define GTEST_HAS_RTTI 1
 #  else
 #   define GTEST_HAS_RTTI 0
 #  endif
 
 # else
 
 // For all other compilers, we assume RTTI is enabled.
 #  define GTEST_HAS_RTTI 1
 
 # endif  // _MSC_VER
 
 #endif  // GTEST_HAS_RTTI
 
 // It's this header's responsibility to #include <typeinfo> when RTTI
 // is enabled.
 #if GTEST_HAS_RTTI
 # include <typeinfo>
 #endif
 
 // Determines whether Google Test can use the pthreads library.
 #ifndef GTEST_HAS_PTHREAD
 // The user didn't tell us explicitly, so we make reasonable assumptions about
 // which platforms have pthreads support.
 //
 // To disable threading support in Google Test, add -DGTEST_HAS_PTHREAD=0
 // to your compiler flags.
 #define GTEST_HAS_PTHREAD                                             \
   (GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_HPUX || GTEST_OS_QNX || \
    GTEST_OS_FREEBSD || GTEST_OS_NACL || GTEST_OS_NETBSD || GTEST_OS_FUCHSIA)
 #endif  // GTEST_HAS_PTHREAD
 
 #if GTEST_HAS_PTHREAD
 // gtest-port.h guarantees to #include <pthread.h> when GTEST_HAS_PTHREAD is
 // true.
 # include <pthread.h>  // NOLINT
 
 // For timespec and nanosleep, used below.
 # include <time.h>  // NOLINT
 #endif
 
 // Determines if hash_map/hash_set are available.
 // Only used for testing against those containers.
 #if !defined(GTEST_HAS_HASH_MAP_)
 # if defined(_MSC_VER) && (_MSC_VER < 1900)
 #  define GTEST_HAS_HASH_MAP_ 1  // Indicates that hash_map is available.
 #  define GTEST_HAS_HASH_SET_ 1  // Indicates that hash_set is available.
 # endif  // _MSC_VER
 #endif  // !defined(GTEST_HAS_HASH_MAP_)
 
 // Determines whether Google Test can use tr1/tuple.  You can define
 // this macro to 0 to prevent Google Test from using tuple (any
 // feature depending on tuple with be disabled in this mode).
 #ifndef GTEST_HAS_TR1_TUPLE
 # if GTEST_OS_LINUX_ANDROID && defined(_STLPORT_MAJOR)
 // STLport, provided with the Android NDK, has neither <tr1/tuple> or <tuple>.
 #  define GTEST_HAS_TR1_TUPLE 0
 # elif defined(_MSC_VER) && (_MSC_VER >= 1910)
 // Prevent `warning C4996: 'std::tr1': warning STL4002:
 // The non-Standard std::tr1 namespace and TR1-only machinery
 // are deprecated and will be REMOVED.`
 #  define GTEST_HAS_TR1_TUPLE 0
 # elif GTEST_LANG_CXX11 && defined(_LIBCPP_VERSION)
 // libc++ doesn't support TR1.
 #  define GTEST_HAS_TR1_TUPLE 0
 # else
 // The user didn't tell us not to do it, so we assume it's OK.
 #  define GTEST_HAS_TR1_TUPLE 1
 # endif
 #endif  // GTEST_HAS_TR1_TUPLE
 
 // Determines whether Google Test's own tr1 tuple implementation
 // should be used.
 #ifndef GTEST_USE_OWN_TR1_TUPLE
 // We use our own tuple implementation on Symbian.
 # if GTEST_OS_SYMBIAN
 #  define GTEST_USE_OWN_TR1_TUPLE 1
 # else
 // The user didn't tell us, so we need to figure it out.
 
 // We use our own TR1 tuple if we aren't sure the user has an
 // implementation of it already.  At this time, libstdc++ 4.0.0+ and
 // MSVC 2010 are the only mainstream standard libraries that come
 // with a TR1 tuple implementation.  NVIDIA's CUDA NVCC compiler
 // pretends to be GCC by defining __GNUC__ and friends, but cannot
 // compile GCC's tuple implementation.  MSVC 2008 (9.0) provides TR1
 // tuple in a 323 MB Feature Pack download, which we cannot assume the
 // user has.  QNX's QCC compiler is a modified GCC but it doesn't
 // support TR1 tuple.  libc++ only provides std::tuple, in C++11 mode,
 // and it can be used with some compilers that define __GNUC__.
 # if (defined(__GNUC__) && !defined(__CUDACC__) && (GTEST_GCC_VER_ >= 40000) \
       && !GTEST_OS_QNX && !defined(_LIBCPP_VERSION)) \
       || (_MSC_VER >= 1600 && _MSC_VER < 1900)
 #  define GTEST_ENV_HAS_TR1_TUPLE_ 1
 # endif
 
 // C++11 specifies that <tuple> provides std::tuple. Use that if gtest is used
 // in C++11 mode and libstdc++ isn't very old (binaries targeting OS X 10.6
 // can build with clang but need to use gcc4.2's libstdc++).
 # if GTEST_LANG_CXX11 && (!defined(__GLIBCXX__) || __GLIBCXX__ > 20110325)
 #  define GTEST_ENV_HAS_STD_TUPLE_ 1
 # endif
 
 # if GTEST_ENV_HAS_TR1_TUPLE_ || GTEST_ENV_HAS_STD_TUPLE_
 #  define GTEST_USE_OWN_TR1_TUPLE 0
 # else
 #  define GTEST_USE_OWN_TR1_TUPLE 1
 # endif
 # endif  // GTEST_OS_SYMBIAN
 #endif  // GTEST_USE_OWN_TR1_TUPLE
 
 // To avoid conditional compilation we make it gtest-port.h's responsibility
 // to #include the header implementing tuple.
 #if GTEST_HAS_STD_TUPLE_
 # include <tuple>  // IWYU pragma: export
 # define GTEST_TUPLE_NAMESPACE_ ::std
 #endif  // GTEST_HAS_STD_TUPLE_
 
 // We include tr1::tuple even if std::tuple is available to define printers for
 // them.
 #if GTEST_HAS_TR1_TUPLE
 # ifndef GTEST_TUPLE_NAMESPACE_
 #  define GTEST_TUPLE_NAMESPACE_ ::std::tr1
 # endif  // GTEST_TUPLE_NAMESPACE_
 
 # if GTEST_USE_OWN_TR1_TUPLE
 #  include "gtest/internal/gtest-tuple.h"  // IWYU pragma: export  // NOLINT
 # elif GTEST_OS_SYMBIAN
 
 // On Symbian, BOOST_HAS_TR1_TUPLE causes Boost's TR1 tuple library to
 // use STLport's tuple implementation, which unfortunately doesn't
 // work as the copy of STLport distributed with Symbian is incomplete.
 // By making sure BOOST_HAS_TR1_TUPLE is undefined, we force Boost to
 // use its own tuple implementation.
 #  ifdef BOOST_HAS_TR1_TUPLE
 #   undef BOOST_HAS_TR1_TUPLE
 #  endif  // BOOST_HAS_TR1_TUPLE
 
 // This prevents <boost/tr1/detail/config.hpp>, which defines
 // BOOST_HAS_TR1_TUPLE, from being #included by Boost's <tuple>.
 #  define BOOST_TR1_DETAIL_CONFIG_HPP_INCLUDED
 #  include <tuple>  // IWYU pragma: export  // NOLINT
 
 # elif defined(__GNUC__) && (GTEST_GCC_VER_ >= 40000)
 // GCC 4.0+ implements tr1/tuple in the <tr1/tuple> header.  This does
 // not conform to the TR1 spec, which requires the header to be <tuple>.
 
 #  if !GTEST_HAS_RTTI && GTEST_GCC_VER_ < 40302
 // Until version 4.3.2, gcc has a bug that causes <tr1/functional>,
 // which is #included by <tr1/tuple>, to not compile when RTTI is
 // disabled.  _TR1_FUNCTIONAL is the header guard for
 // <tr1/functional>.  Hence the following #define is a hack to prevent
 // <tr1/functional> from being included.
 #   define _TR1_FUNCTIONAL 1
 #   include <tr1/tuple>
 #   undef _TR1_FUNCTIONAL  // Allows the user to #include
                         // <tr1/functional> if they choose to.
 #  else
 #   include <tr1/tuple>  // NOLINT
 #  endif  // !GTEST_HAS_RTTI && GTEST_GCC_VER_ < 40302
 
 // VS 2010 now has tr1 support.
 # elif _MSC_VER >= 1600
 #  include <tuple>  // IWYU pragma: export  // NOLINT
 
 # else  // GTEST_USE_OWN_TR1_TUPLE
 #  include <tr1/tuple>  // IWYU pragma: export  // NOLINT
 # endif  // GTEST_USE_OWN_TR1_TUPLE
 
 #endif  // GTEST_HAS_TR1_TUPLE
 
 // Determines whether clone(2) is supported.
 // Usually it will only be available on Linux, excluding
 // Linux on the Itanium architecture.
 // Also see http://linux.die.net/man/2/clone.
 #ifndef GTEST_HAS_CLONE
 // The user didn't tell us, so we need to figure it out.
 
 # if GTEST_OS_LINUX && !defined(__ia64__)
 #  if GTEST_OS_LINUX_ANDROID
 // On Android, clone() became available at different API levels for each 32-bit
 // architecture.
 #    if defined(__LP64__) || \
         (defined(__arm__) && __ANDROID_API__ >= 9) || \
         (defined(__mips__) && __ANDROID_API__ >= 12) || \
         (defined(__i386__) && __ANDROID_API__ >= 17)
 #     define GTEST_HAS_CLONE 1
 #    else
 #     define GTEST_HAS_CLONE 0
 #    endif
 #  else
 #   define GTEST_HAS_CLONE 1
 #  endif
 # else
 #  define GTEST_HAS_CLONE 0
 # endif  // GTEST_OS_LINUX && !defined(__ia64__)
 
 #endif  // GTEST_HAS_CLONE
 
 // Determines whether to support stream redirection. This is used to test
 // output correctness and to implement death tests.
 #ifndef GTEST_HAS_STREAM_REDIRECTION
 // By default, we assume that stream redirection is supported on all
 // platforms except known mobile ones.
 # if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || \
     GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT
 #  define GTEST_HAS_STREAM_REDIRECTION 0
 # else
 #  define GTEST_HAS_STREAM_REDIRECTION 1
 # endif  // !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_SYMBIAN
 #endif  // GTEST_HAS_STREAM_REDIRECTION
 
 // Determines whether to support death tests.
 // Google Test does not support death tests for VC 7.1 and earlier as
 // abort() in a VC 7.1 application compiled as GUI in debug config
 // pops up a dialog window that cannot be suppressed programmatically.
 #if (GTEST_OS_LINUX || GTEST_OS_CYGWIN || GTEST_OS_SOLARIS ||   \
      (GTEST_OS_MAC && !GTEST_OS_IOS) ||                         \
      (GTEST_OS_WINDOWS_DESKTOP && _MSC_VER >= 1400) ||          \
      GTEST_OS_WINDOWS_MINGW || GTEST_OS_AIX || GTEST_OS_HPUX || \
      GTEST_OS_OPENBSD || GTEST_OS_QNX || GTEST_OS_FREEBSD || \
      GTEST_OS_NETBSD || GTEST_OS_FUCHSIA)
 # define GTEST_HAS_DEATH_TEST 1
 #endif
 
 // Determines whether to support type-driven tests.
 
 // Typed tests need <typeinfo> and variadic macros, which GCC, VC++ 8.0,
 // Sun Pro CC, IBM Visual Age, and HP aCC support.
 #if defined(__GNUC__) || (_MSC_VER >= 1400) || defined(__SUNPRO_CC) || \
     defined(__IBMCPP__) || defined(__HP_aCC)
 # define GTEST_HAS_TYPED_TEST 1
 # define GTEST_HAS_TYPED_TEST_P 1
 #endif
 
 // Determines whether to support Combine(). This only makes sense when
 // value-parameterized tests are enabled.  The implementation doesn't
 // work on Sun Studio since it doesn't understand templated conversion
 // operators.
 #if (GTEST_HAS_TR1_TUPLE || GTEST_HAS_STD_TUPLE_) && !defined(__SUNPRO_CC)
 # define GTEST_HAS_COMBINE 1
 #endif
 
 // Determines whether the system compiler uses UTF-16 for encoding wide strings.
 #define GTEST_WIDE_STRING_USES_UTF16_ \
     (GTEST_OS_WINDOWS || GTEST_OS_CYGWIN || GTEST_OS_SYMBIAN || GTEST_OS_AIX)
 
 // Determines whether test results can be streamed to a socket.
 #if GTEST_OS_LINUX
 # define GTEST_CAN_STREAM_RESULTS_ 1
 #endif
 
 // Defines some utility macros.
 
 // The GNU compiler emits a warning if nested "if" statements are followed by
 // an "else" statement and braces are not used to explicitly disambiguate the
 // "else" binding.  This leads to problems with code like:
 //
 //   if (gate)
 //     ASSERT_*(condition) << "Some message";
 //
 // The "switch (0) case 0:" idiom is used to suppress this.
 #ifdef __INTEL_COMPILER
 # define GTEST_AMBIGUOUS_ELSE_BLOCKER_
 #else
 # define GTEST_AMBIGUOUS_ELSE_BLOCKER_ switch (0) case 0: default:  // NOLINT
 #endif
 
 // Use this annotation at the end of a struct/class definition to
 // prevent the compiler from optimizing away instances that are never
 // used.  This is useful when all interesting logic happens inside the
 // c'tor and / or d'tor.  Example:
 //
 //   struct Foo {
 //     Foo() { ... }
 //   } GTEST_ATTRIBUTE_UNUSED_;
 //
 // Also use it after a variable or parameter declaration to tell the
 // compiler the variable/parameter does not have to be used.
 #if defined(__GNUC__) && !defined(COMPILER_ICC)
 # define GTEST_ATTRIBUTE_UNUSED_ __attribute__ ((unused))
 #elif defined(__clang__)
 # if __has_attribute(unused)
 #  define GTEST_ATTRIBUTE_UNUSED_ __attribute__ ((unused))
 # endif
 #endif
 #ifndef GTEST_ATTRIBUTE_UNUSED_
 # define GTEST_ATTRIBUTE_UNUSED_
 #endif
 
 #if GTEST_LANG_CXX11
 # define GTEST_CXX11_EQUALS_DELETE_ = delete
 #else  // GTEST_LANG_CXX11
 # define GTEST_CXX11_EQUALS_DELETE_
 #endif  // GTEST_LANG_CXX11
 
 // Use this annotation before a function that takes a printf format string.
 #if (defined(__GNUC__) || defined(__clang__)) && !defined(COMPILER_ICC)
 # if defined(__MINGW_PRINTF_FORMAT)
 // MinGW has two different printf implementations. Ensure the format macro
 // matches the selected implementation. See
 // https://sourceforge.net/p/mingw-w64/wiki2/gnu%20printf/.
 #  define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) \
        __attribute__((__format__(__MINGW_PRINTF_FORMAT, string_index, \
                                  first_to_check)))
 # else
 #  define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) \
        __attribute__((__format__(__printf__, string_index, first_to_check)))
 # endif
 #else
 # define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check)
 #endif
 
 
 // A macro to disallow operator=
 // This should be used in the private: declarations for a class.
 #define GTEST_DISALLOW_ASSIGN_(type) \
   void operator=(type const &) GTEST_CXX11_EQUALS_DELETE_
 
 // A macro to disallow copy constructor and operator=
 // This should be used in the private: declarations for a class.
 #define GTEST_DISALLOW_COPY_AND_ASSIGN_(type) \
   type(type const &) GTEST_CXX11_EQUALS_DELETE_; \
   GTEST_DISALLOW_ASSIGN_(type)
 
 // Tell the compiler to warn about unused return values for functions declared
 // with this macro.  The macro should be used on function declarations
 // following the argument list:
 //
 //   Sprocket* AllocateSprocket() GTEST_MUST_USE_RESULT_;
 #if defined(__GNUC__) && (GTEST_GCC_VER_ >= 30400) && !defined(COMPILER_ICC)
 # define GTEST_MUST_USE_RESULT_ __attribute__ ((warn_unused_result))
 #else
 # define GTEST_MUST_USE_RESULT_
 #endif  // __GNUC__ && (GTEST_GCC_VER_ >= 30400) && !COMPILER_ICC
 
 // MS C++ compiler emits warning when a conditional expression is compile time
 // constant. In some contexts this warning is false positive and needs to be
 // suppressed. Use the following two macros in such cases:
 //
 // GTEST_INTENTIONAL_CONST_COND_PUSH_()
 // while (true) {
 // GTEST_INTENTIONAL_CONST_COND_POP_()
 // }
 # define GTEST_INTENTIONAL_CONST_COND_PUSH_() \
     GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127)
 # define GTEST_INTENTIONAL_CONST_COND_POP_() \
     GTEST_DISABLE_MSC_WARNINGS_POP_()
 
 // Determine whether the compiler supports Microsoft's Structured Exception
 // Handling.  This is supported by several Windows compilers but generally
 // does not exist on any other system.
 #ifndef GTEST_HAS_SEH
 // The user didn't tell us, so we need to figure it out.
 
 # if defined(_MSC_VER) || defined(__BORLANDC__)
 // These two compilers are known to support SEH.
 #  define GTEST_HAS_SEH 1
 # else
 // Assume no SEH.
 #  define GTEST_HAS_SEH 0
 # endif
 
 #define GTEST_IS_THREADSAFE \
     (GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ \
      || (GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT) \
      || GTEST_HAS_PTHREAD)
 
 #endif  // GTEST_HAS_SEH
 
 // GTEST_API_ qualifies all symbols that must be exported. The definitions below
 // are guarded by #ifndef to give embedders a chance to define GTEST_API_ in
 // gtest/internal/custom/gtest-port.h
 #ifndef GTEST_API_
 
 #ifdef _MSC_VER
 # if GTEST_LINKED_AS_SHARED_LIBRARY
 #  define GTEST_API_ __declspec(dllimport)
 # elif GTEST_CREATE_SHARED_LIBRARY
 #  define GTEST_API_ __declspec(dllexport)
 # endif
 #elif __GNUC__ >= 4 || defined(__clang__)
 # define GTEST_API_ __attribute__((visibility ("default")))
 #endif  // _MSC_VER
 
 #endif  // GTEST_API_
 
 #ifndef GTEST_API_
 # define GTEST_API_
 #endif  // GTEST_API_
 
 #ifndef GTEST_DEFAULT_DEATH_TEST_STYLE
 # define GTEST_DEFAULT_DEATH_TEST_STYLE  "fast"
 #endif  // GTEST_DEFAULT_DEATH_TEST_STYLE
 
 #ifdef __GNUC__
 // Ask the compiler to never inline a given function.
 # define GTEST_NO_INLINE_ __attribute__((noinline))
 #else
 # define GTEST_NO_INLINE_
 #endif
 
 // _LIBCPP_VERSION is defined by the libc++ library from the LLVM project.
 #if !defined(GTEST_HAS_CXXABI_H_)
 # if defined(__GLIBCXX__) || (defined(_LIBCPP_VERSION) && !defined(_MSC_VER))
 #  define GTEST_HAS_CXXABI_H_ 1
 # else
 #  define GTEST_HAS_CXXABI_H_ 0
 # endif
 #endif
 
 // A function level attribute to disable checking for use of uninitialized
 // memory when built with MemorySanitizer.
 #if defined(__clang__)
 # if __has_feature(memory_sanitizer)
 #  define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ \
        __attribute__((no_sanitize_memory))
 # else
 #  define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_
 # endif  // __has_feature(memory_sanitizer)
 #else
 # define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_
 #endif  // __clang__
 
 // A function level attribute to disable AddressSanitizer instrumentation.
 #if defined(__clang__)
 # if __has_feature(address_sanitizer)
 #  define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ \
        __attribute__((no_sanitize_address))
 # else
 #  define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
 # endif  // __has_feature(address_sanitizer)
 #else
 # define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
 #endif  // __clang__
 
 // A function level attribute to disable ThreadSanitizer instrumentation.
 #if defined(__clang__)
 # if __has_feature(thread_sanitizer)
 #  define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ \
        __attribute__((no_sanitize_thread))
 # else
 #  define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_
 # endif  // __has_feature(thread_sanitizer)
 #else
 # define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_
 #endif  // __clang__
 
 namespace testing {
 
 class Message;
 
 #if defined(GTEST_TUPLE_NAMESPACE_)
 // Import tuple and friends into the ::testing namespace.
 // It is part of our interface, having them in ::testing allows us to change
 // their types as needed.
 using GTEST_TUPLE_NAMESPACE_::get;
 using GTEST_TUPLE_NAMESPACE_::make_tuple;
 using GTEST_TUPLE_NAMESPACE_::tuple;
 using GTEST_TUPLE_NAMESPACE_::tuple_size;
 using GTEST_TUPLE_NAMESPACE_::tuple_element;
 #endif  // defined(GTEST_TUPLE_NAMESPACE_)
 
 namespace internal {
 
 // A secret type that Google Test users don't know about.  It has no
 // definition on purpose.  Therefore it's impossible to create a
 // Secret object, which is what we want.
 class Secret;
 
 // The GTEST_COMPILE_ASSERT_ macro can be used to verify that a compile time
 // expression is true. For example, you could use it to verify the
 // size of a static array:
 //
 //   GTEST_COMPILE_ASSERT_(GTEST_ARRAY_SIZE_(names) == NUM_NAMES,
 //                         names_incorrect_size);
 //
 // or to make sure a struct is smaller than a certain size:
 //
 //   GTEST_COMPILE_ASSERT_(sizeof(foo) < 128, foo_too_large);
 //
 // The second argument to the macro is the name of the variable. If
 // the expression is false, most compilers will issue a warning/error
 // containing the name of the variable.
 
 #if GTEST_LANG_CXX11
 # define GTEST_COMPILE_ASSERT_(expr, msg) static_assert(expr, #msg)
 #else  // !GTEST_LANG_CXX11
 template <bool>
   struct CompileAssert {
 };
 
 # define GTEST_COMPILE_ASSERT_(expr, msg) \
   typedef ::testing::internal::CompileAssert<(static_cast<bool>(expr))> \
       msg[static_cast<bool>(expr) ? 1 : -1] GTEST_ATTRIBUTE_UNUSED_
 #endif  // !GTEST_LANG_CXX11
 
 // Implementation details of GTEST_COMPILE_ASSERT_:
 //
 // (In C++11, we simply use static_assert instead of the following)
 //
 // - GTEST_COMPILE_ASSERT_ works by defining an array type that has -1
 //   elements (and thus is invalid) when the expression is false.
 //
 // - The simpler definition
 //
 //    #define GTEST_COMPILE_ASSERT_(expr, msg) typedef char msg[(expr) ? 1 : -1]
 //
 //   does not work, as gcc supports variable-length arrays whose sizes
 //   are determined at run-time (this is gcc's extension and not part
 //   of the C++ standard).  As a result, gcc fails to reject the
 //   following code with the simple definition:
 //
 //     int foo;
 //     GTEST_COMPILE_ASSERT_(foo, msg); // not supposed to compile as foo is
 //                                      // not a compile-time constant.
 //
 // - By using the type CompileAssert<(bool(expr))>, we ensures that
 //   expr is a compile-time constant.  (Template arguments must be
 //   determined at compile-time.)
 //
 // - The outter parentheses in CompileAssert<(bool(expr))> are necessary
 //   to work around a bug in gcc 3.4.4 and 4.0.1.  If we had written
 //
 //     CompileAssert<bool(expr)>
 //
 //   instead, these compilers will refuse to compile
 //
 //     GTEST_COMPILE_ASSERT_(5 > 0, some_message);
 //
 //   (They seem to think the ">" in "5 > 0" marks the end of the
 //   template argument list.)
 //
 // - The array size is (bool(expr) ? 1 : -1), instead of simply
 //
 //     ((expr) ? 1 : -1).
 //
 //   This is to avoid running into a bug in MS VC 7.1, which
 //   causes ((0.0) ? 1 : -1) to incorrectly evaluate to 1.
 
 // StaticAssertTypeEqHelper is used by StaticAssertTypeEq defined in gtest.h.
 //
 // This template is declared, but intentionally undefined.
 template <typename T1, typename T2>
 struct StaticAssertTypeEqHelper;
 
 template <typename T>
 struct StaticAssertTypeEqHelper<T, T> {
   enum { value = true };
 };
 
 // Same as std::is_same<>.
 template <typename T, typename U>
 struct IsSame {
   enum { value = false };
 };
 template <typename T>
 struct IsSame<T, T> {
   enum { value = true };
 };
 
 // Evaluates to the number of elements in 'array'.
 #define GTEST_ARRAY_SIZE_(array) (sizeof(array) / sizeof(array[0]))
 
 #if GTEST_HAS_GLOBAL_STRING
 typedef ::string string;
 #else
 typedef ::std::string string;
 #endif  // GTEST_HAS_GLOBAL_STRING
 
 #if GTEST_HAS_GLOBAL_WSTRING
 typedef ::wstring wstring;
 #elif GTEST_HAS_STD_WSTRING
 typedef ::std::wstring wstring;
 #endif  // GTEST_HAS_GLOBAL_WSTRING
 
 // A helper for suppressing warnings on constant condition.  It just
 // returns 'condition'.
 GTEST_API_ bool IsTrue(bool condition);
 
 // Defines scoped_ptr.
 
 // This implementation of scoped_ptr is PARTIAL - it only contains
 // enough stuff to satisfy Google Test's need.
 template <typename T>
 class scoped_ptr {
  public:
   typedef T element_type;
 
   explicit scoped_ptr(T* p = NULL) : ptr_(p) {}
   ~scoped_ptr() { reset(); }
 
   T& operator*() const { return *ptr_; }
   T* operator->() const { return ptr_; }
   T* get() const { return ptr_; }
 
   T* release() {
     T* const ptr = ptr_;
     ptr_ = NULL;
     return ptr;
   }
 
   void reset(T* p = NULL) {
     if (p != ptr_) {
       if (IsTrue(sizeof(T) > 0)) {  // Makes sure T is a complete type.
         delete ptr_;
       }
       ptr_ = p;
     }
   }
 
   friend void swap(scoped_ptr& a, scoped_ptr& b) {
     using std::swap;
     swap(a.ptr_, b.ptr_);
   }
 
  private:
   T* ptr_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(scoped_ptr);
 };
 
 // Defines RE.
 
 #if GTEST_USES_PCRE
 // if used, PCRE is injected by custom/gtest-port.h
 #elif GTEST_USES_POSIX_RE || GTEST_USES_SIMPLE_RE
 
 // A simple C++ wrapper for <regex.h>.  It uses the POSIX Extended
 // Regular Expression syntax.
 class GTEST_API_ RE {
  public:
   // A copy constructor is required by the Standard to initialize object
   // references from r-values.
   RE(const RE& other) { Init(other.pattern()); }
 
   // Constructs an RE from a string.
   RE(const ::std::string& regex) { Init(regex.c_str()); }  // NOLINT
 
 # if GTEST_HAS_GLOBAL_STRING
 
   RE(const ::string& regex) { Init(regex.c_str()); }  // NOLINT
 
 # endif  // GTEST_HAS_GLOBAL_STRING
 
   RE(const char* regex) { Init(regex); }  // NOLINT
   ~RE();
 
   // Returns the string representation of the regex.
   const char* pattern() const { return pattern_; }
 
   // FullMatch(str, re) returns true iff regular expression re matches
   // the entire str.
   // PartialMatch(str, re) returns true iff regular expression re
   // matches a substring of str (including str itself).
   //
   // TODO(wan@google.com): make FullMatch() and PartialMatch() work
   // when str contains NUL characters.
   static bool FullMatch(const ::std::string& str, const RE& re) {
     return FullMatch(str.c_str(), re);
   }
   static bool PartialMatch(const ::std::string& str, const RE& re) {
     return PartialMatch(str.c_str(), re);
   }
 
 # if GTEST_HAS_GLOBAL_STRING
 
   static bool FullMatch(const ::string& str, const RE& re) {
     return FullMatch(str.c_str(), re);
   }
   static bool PartialMatch(const ::string& str, const RE& re) {
     return PartialMatch(str.c_str(), re);
   }
 
 # endif  // GTEST_HAS_GLOBAL_STRING
 
   static bool FullMatch(const char* str, const RE& re);
   static bool PartialMatch(const char* str, const RE& re);
 
  private:
   void Init(const char* regex);
 
   // We use a const char* instead of an std::string, as Google Test used to be
   // used where std::string is not available.  TODO(wan@google.com): change to
   // std::string.
   const char* pattern_;
   bool is_valid_;
 
 # if GTEST_USES_POSIX_RE
 
   regex_t full_regex_;     // For FullMatch().
   regex_t partial_regex_;  // For PartialMatch().
 
 # else  // GTEST_USES_SIMPLE_RE
 
   const char* full_pattern_;  // For FullMatch();
 
 # endif
 
   GTEST_DISALLOW_ASSIGN_(RE);
 };
 
 #endif  // GTEST_USES_PCRE
 
 // Formats a source file path and a line number as they would appear
 // in an error message from the compiler used to compile this code.
 GTEST_API_ ::std::string FormatFileLocation(const char* file, int line);
 
 // Formats a file location for compiler-independent XML output.
 // Although this function is not platform dependent, we put it next to
 // FormatFileLocation in order to contrast the two functions.
 GTEST_API_ ::std::string FormatCompilerIndependentFileLocation(const char* file,
                                                                int line);
 
 // Defines logging utilities:
 //   GTEST_LOG_(severity) - logs messages at the specified severity level. The
 //                          message itself is streamed into the macro.
 //   LogToStderr()  - directs all log messages to stderr.
 //   FlushInfoLog() - flushes informational log messages.
 
 enum GTestLogSeverity {
   GTEST_INFO,
   GTEST_WARNING,
   GTEST_ERROR,
   GTEST_FATAL
 };
 
 // Formats log entry severity, provides a stream object for streaming the
 // log message, and terminates the message with a newline when going out of
 // scope.
 class GTEST_API_ GTestLog {
  public:
   GTestLog(GTestLogSeverity severity, const char* file, int line);
 
   // Flushes the buffers and, if severity is GTEST_FATAL, aborts the program.
   ~GTestLog();
 
   ::std::ostream& GetStream() { return ::std::cerr; }
 
  private:
   const GTestLogSeverity severity_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestLog);
 };
 
 #if !defined(GTEST_LOG_)
 
 # define GTEST_LOG_(severity) \
     ::testing::internal::GTestLog(::testing::internal::GTEST_##severity, \
                                   __FILE__, __LINE__).GetStream()
 
 inline void LogToStderr() {}
 inline void FlushInfoLog() { fflush(NULL); }
 
 #endif  // !defined(GTEST_LOG_)
 
 #if !defined(GTEST_CHECK_)
 // INTERNAL IMPLEMENTATION - DO NOT USE.
 //
 // GTEST_CHECK_ is an all-mode assert. It aborts the program if the condition
 // is not satisfied.
 //  Synopsys:
 //    GTEST_CHECK_(boolean_condition);
 //     or
 //    GTEST_CHECK_(boolean_condition) << "Additional message";
 //
 //    This checks the condition and if the condition is not satisfied
 //    it prints message about the condition violation, including the
 //    condition itself, plus additional message streamed into it, if any,
 //    and then it aborts the program. It aborts the program irrespective of
 //    whether it is built in the debug mode or not.
 # define GTEST_CHECK_(condition) \
     GTEST_AMBIGUOUS_ELSE_BLOCKER_ \
     if (::testing::internal::IsTrue(condition)) \
       ; \
     else \
       GTEST_LOG_(FATAL) << "Condition " #condition " failed. "
 #endif  // !defined(GTEST_CHECK_)
 
 // An all-mode assert to verify that the given POSIX-style function
 // call returns 0 (indicating success).  Known limitation: this
 // doesn't expand to a balanced 'if' statement, so enclose the macro
 // in {} if you need to use it as the only statement in an 'if'
 // branch.
 #define GTEST_CHECK_POSIX_SUCCESS_(posix_call) \
   if (const int gtest_error = (posix_call)) \
     GTEST_LOG_(FATAL) << #posix_call << "failed with error " \
                       << gtest_error
 
 // Adds reference to a type if it is not a reference type,
 // otherwise leaves it unchanged.  This is the same as
 // tr1::add_reference, which is not widely available yet.
 template <typename T>
 struct AddReference { typedef T& type; };  // NOLINT
 template <typename T>
 struct AddReference<T&> { typedef T& type; };  // NOLINT
 
 // A handy wrapper around AddReference that works when the argument T
 // depends on template parameters.
 #define GTEST_ADD_REFERENCE_(T) \
     typename ::testing::internal::AddReference<T>::type
 
 // Transforms "T" into "const T&" according to standard reference collapsing
 // rules (this is only needed as a backport for C++98 compilers that do not
 // support reference collapsing). Specifically, it transforms:
 //
 //   char         ==> const char&
 //   const char   ==> const char&
 //   char&        ==> char&
 //   const char&  ==> const char&
 //
 // Note that the non-const reference will not have "const" added. This is
 // standard, and necessary so that "T" can always bind to "const T&".
 template <typename T>
 struct ConstRef { typedef const T& type; };
 template <typename T>
 struct ConstRef<T&> { typedef T& type; };
 
 // The argument T must depend on some template parameters.
 #define GTEST_REFERENCE_TO_CONST_(T) \
   typename ::testing::internal::ConstRef<T>::type
 
 #if GTEST_HAS_STD_MOVE_
 using std::forward;
 using std::move;
 
 template <typename T>
 struct RvalueRef {
   typedef T&& type;
 };
 #else  // GTEST_HAS_STD_MOVE_
 template <typename T>
 const T& move(const T& t) {
   return t;
 }
 template <typename T>
 GTEST_ADD_REFERENCE_(T) forward(GTEST_ADD_REFERENCE_(T) t) { return t; }
 
 template <typename T>
 struct RvalueRef {
   typedef const T& type;
 };
 #endif  // GTEST_HAS_STD_MOVE_
 
 // INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.
 //
 // Use ImplicitCast_ as a safe version of static_cast for upcasting in
 // the type hierarchy (e.g. casting a Foo* to a SuperclassOfFoo* or a
 // const Foo*).  When you use ImplicitCast_, the compiler checks that
 // the cast is safe.  Such explicit ImplicitCast_s are necessary in
 // surprisingly many situations where C++ demands an exact type match
 // instead of an argument type convertable to a target type.
 //
 // The syntax for using ImplicitCast_ is the same as for static_cast:
 //
 //   ImplicitCast_<ToType>(expr)
 //
 // ImplicitCast_ would have been part of the C++ standard library,
 // but the proposal was submitted too late.  It will probably make
 // its way into the language in the future.
 //
 // This relatively ugly name is intentional. It prevents clashes with
 // similar functions users may have (e.g., implicit_cast). The internal
 // namespace alone is not enough because the function can be found by ADL.
 template<typename To>
 inline To ImplicitCast_(To x) { return x; }
 
 // When you upcast (that is, cast a pointer from type Foo to type
 // SuperclassOfFoo), it's fine to use ImplicitCast_<>, since upcasts
 // always succeed.  When you downcast (that is, cast a pointer from
 // type Foo to type SubclassOfFoo), static_cast<> isn't safe, because
 // how do you know the pointer is really of type SubclassOfFoo?  It
 // could be a bare Foo, or of type DifferentSubclassOfFoo.  Thus,
 // when you downcast, you should use this macro.  In debug mode, we
 // use dynamic_cast<> to double-check the downcast is legal (we die
 // if it's not).  In normal mode, we do the efficient static_cast<>
 // instead.  Thus, it's important to test in debug mode to make sure
 // the cast is legal!
 //    This is the only place in the code we should use dynamic_cast<>.
 // In particular, you SHOULDN'T be using dynamic_cast<> in order to
 // do RTTI (eg code like this:
 //    if (dynamic_cast<Subclass1>(foo)) HandleASubclass1Object(foo);
 //    if (dynamic_cast<Subclass2>(foo)) HandleASubclass2Object(foo);
 // You should design the code some other way not to need this.
 //
 // This relatively ugly name is intentional. It prevents clashes with
 // similar functions users may have (e.g., down_cast). The internal
 // namespace alone is not enough because the function can be found by ADL.
 template<typename To, typename From>  // use like this: DownCast_<T*>(foo);
 inline To DownCast_(From* f) {  // so we only accept pointers
   // Ensures that To is a sub-type of From *.  This test is here only
   // for compile-time type checking, and has no overhead in an
   // optimized build at run-time, as it will be optimized away
   // completely.
   GTEST_INTENTIONAL_CONST_COND_PUSH_()
   if (false) {
   GTEST_INTENTIONAL_CONST_COND_POP_()
     const To to = NULL;
     ::testing::internal::ImplicitCast_<From*>(to);
   }
 
 #if GTEST_HAS_RTTI
   // RTTI: debug mode only!
   GTEST_CHECK_(f == NULL || dynamic_cast<To>(f) != NULL);
 #endif
   return static_cast<To>(f);
 }
 
 // Downcasts the pointer of type Base to Derived.
 // Derived must be a subclass of Base. The parameter MUST
 // point to a class of type Derived, not any subclass of it.
 // When RTTI is available, the function performs a runtime
 // check to enforce this.
 template <class Derived, class Base>
 Derived* CheckedDowncastToActualType(Base* base) {
 #if GTEST_HAS_RTTI
   GTEST_CHECK_(typeid(*base) == typeid(Derived));
 #endif
 
 #if GTEST_HAS_DOWNCAST_
   return ::down_cast<Derived*>(base);
 #elif GTEST_HAS_RTTI
   return dynamic_cast<Derived*>(base);  // NOLINT
 #else
   return static_cast<Derived*>(base);  // Poor man's downcast.
 #endif
 }
 
 #if GTEST_HAS_STREAM_REDIRECTION
 
 // Defines the stderr capturer:
 //   CaptureStdout     - starts capturing stdout.
 //   GetCapturedStdout - stops capturing stdout and returns the captured string.
 //   CaptureStderr     - starts capturing stderr.
 //   GetCapturedStderr - stops capturing stderr and returns the captured string.
 //
 GTEST_API_ void CaptureStdout();
 GTEST_API_ std::string GetCapturedStdout();
 GTEST_API_ void CaptureStderr();
 GTEST_API_ std::string GetCapturedStderr();
 
 #endif  // GTEST_HAS_STREAM_REDIRECTION
 // Returns the size (in bytes) of a file.
 GTEST_API_ size_t GetFileSize(FILE* file);
 
 // Reads the entire content of a file as a string.
 GTEST_API_ std::string ReadEntireFile(FILE* file);
 
 // All command line arguments.
 GTEST_API_ std::vector<std::string> GetArgvs();
 
 #if GTEST_HAS_DEATH_TEST
 
 std::vector<std::string> GetInjectableArgvs();
 // Deprecated: pass the args vector by value instead.
 void SetInjectableArgvs(const std::vector<std::string>* new_argvs);
 void SetInjectableArgvs(const std::vector<std::string>& new_argvs);
 #if GTEST_HAS_GLOBAL_STRING
 void SetInjectableArgvs(const std::vector< ::string>& new_argvs);
 #endif  // GTEST_HAS_GLOBAL_STRING
 void ClearInjectableArgvs();
 
 #endif  // GTEST_HAS_DEATH_TEST
 
 // Defines synchronization primitives.
 #if GTEST_IS_THREADSAFE
 # if GTEST_HAS_PTHREAD
 // Sleeps for (roughly) n milliseconds.  This function is only for testing
 // Google Test's own constructs.  Don't use it in user tests, either
 // directly or indirectly.
 inline void SleepMilliseconds(int n) {
   const timespec time = {
     0,                  // 0 seconds.
     n * 1000L * 1000L,  // And n ms.
   };
   nanosleep(&time, NULL);
 }
 # endif  // GTEST_HAS_PTHREAD
 
 # if GTEST_HAS_NOTIFICATION_
 // Notification has already been imported into the namespace.
 // Nothing to do here.
 
 # elif GTEST_HAS_PTHREAD
 // Allows a controller thread to pause execution of newly created
 // threads until notified.  Instances of this class must be created
 // and destroyed in the controller thread.
 //
 // This class is only for testing Google Test's own constructs. Do not
 // use it in user tests, either directly or indirectly.
 class Notification {
  public:
   Notification() : notified_(false) {
     GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_init(&mutex_, NULL));
   }
   ~Notification() {
     pthread_mutex_destroy(&mutex_);
   }
 
   // Notifies all threads created with this notification to start. Must
   // be called from the controller thread.
   void Notify() {
     pthread_mutex_lock(&mutex_);
     notified_ = true;
     pthread_mutex_unlock(&mutex_);
   }
 
   // Blocks until the controller thread notifies. Must be called from a test
   // thread.
   void WaitForNotification() {
     for (;;) {
       pthread_mutex_lock(&mutex_);
       const bool notified = notified_;
       pthread_mutex_unlock(&mutex_);
       if (notified)
         break;
       SleepMilliseconds(10);
     }
   }
 
  private:
   pthread_mutex_t mutex_;
   bool notified_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(Notification);
 };
 
 # elif GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT
 
 GTEST_API_ void SleepMilliseconds(int n);
 
 // Provides leak-safe Windows kernel handle ownership.
 // Used in death tests and in threading support.
 class GTEST_API_ AutoHandle {
  public:
   // Assume that Win32 HANDLE type is equivalent to void*. Doing so allows us to
   // avoid including <windows.h> in this header file. Including <windows.h> is
   // undesirable because it defines a lot of symbols and macros that tend to
   // conflict with client code. This assumption is verified by
   // WindowsTypesTest.HANDLEIsVoidStar.
   typedef void* Handle;
   AutoHandle();
   explicit AutoHandle(Handle handle);
 
   ~AutoHandle();
 
   Handle Get() const;
   void Reset();
   void Reset(Handle handle);
 
  private:
   // Returns true iff the handle is a valid handle object that can be closed.
   bool IsCloseable() const;
 
   Handle handle_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(AutoHandle);
 };
 
 // Allows a controller thread to pause execution of newly created
 // threads until notified.  Instances of this class must be created
 // and destroyed in the controller thread.
 //
 // This class is only for testing Google Test's own constructs. Do not
 // use it in user tests, either directly or indirectly.
 class GTEST_API_ Notification {
  public:
   Notification();
   void Notify();
   void WaitForNotification();
 
  private:
   AutoHandle event_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(Notification);
 };
 # endif  // GTEST_HAS_NOTIFICATION_
 
 // On MinGW, we can have both GTEST_OS_WINDOWS and GTEST_HAS_PTHREAD
 // defined, but we don't want to use MinGW's pthreads implementation, which
 // has conformance problems with some versions of the POSIX standard.
 # if GTEST_HAS_PTHREAD && !GTEST_OS_WINDOWS_MINGW
 
 // As a C-function, ThreadFuncWithCLinkage cannot be templated itself.
 // Consequently, it cannot select a correct instantiation of ThreadWithParam
 // in order to call its Run(). Introducing ThreadWithParamBase as a
 // non-templated base class for ThreadWithParam allows us to bypass this
 // problem.
 class ThreadWithParamBase {
  public:
   virtual ~ThreadWithParamBase() {}
   virtual void Run() = 0;
 };
 
 // pthread_create() accepts a pointer to a function type with the C linkage.
 // According to the Standard (7.5/1), function types with different linkages
 // are different even if they are otherwise identical.  Some compilers (for
 // example, SunStudio) treat them as different types.  Since class methods
 // cannot be defined with C-linkage we need to define a free C-function to
 // pass into pthread_create().
 extern "C" inline void* ThreadFuncWithCLinkage(void* thread) {
   static_cast<ThreadWithParamBase*>(thread)->Run();
   return NULL;
 }
 
 // Helper class for testing Google Test's multi-threading constructs.
 // To use it, write:
 //
 //   void ThreadFunc(int param) { /* Do things with param */ }
 //   Notification thread_can_start;
 //   ...
 //   // The thread_can_start parameter is optional; you can supply NULL.
 //   ThreadWithParam<int> thread(&ThreadFunc, 5, &thread_can_start);
 //   thread_can_start.Notify();
 //
 // These classes are only for testing Google Test's own constructs. Do
 // not use them in user tests, either directly or indirectly.
 template <typename T>
 class ThreadWithParam : public ThreadWithParamBase {
  public:
   typedef void UserThreadFunc(T);
 
   ThreadWithParam(UserThreadFunc* func, T param, Notification* thread_can_start)
       : func_(func),
         param_(param),
         thread_can_start_(thread_can_start),
         finished_(false) {
     ThreadWithParamBase* const base = this;
     // The thread can be created only after all fields except thread_
     // have been initialized.
     GTEST_CHECK_POSIX_SUCCESS_(
         pthread_create(&thread_, 0, &ThreadFuncWithCLinkage, base));
   }
   ~ThreadWithParam() { Join(); }
 
   void Join() {
     if (!finished_) {
       GTEST_CHECK_POSIX_SUCCESS_(pthread_join(thread_, 0));
       finished_ = true;
     }
   }
 
   virtual void Run() {
     if (thread_can_start_ != NULL)
       thread_can_start_->WaitForNotification();
     func_(param_);
   }
 
  private:
   UserThreadFunc* const func_;  // User-supplied thread function.
   const T param_;  // User-supplied parameter to the thread function.
   // When non-NULL, used to block execution until the controller thread
   // notifies.
   Notification* const thread_can_start_;
   bool finished_;  // true iff we know that the thread function has finished.
   pthread_t thread_;  // The native thread object.
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParam);
 };
 # endif  // !GTEST_OS_WINDOWS && GTEST_HAS_PTHREAD ||
          // GTEST_HAS_MUTEX_AND_THREAD_LOCAL_
 
 # if GTEST_HAS_MUTEX_AND_THREAD_LOCAL_
 // Mutex and ThreadLocal have already been imported into the namespace.
 // Nothing to do here.
 
 # elif GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT
 
 // Mutex implements mutex on Windows platforms.  It is used in conjunction
 // with class MutexLock:
 //
 //   Mutex mutex;
 //   ...
 //   MutexLock lock(&mutex);  // Acquires the mutex and releases it at the
 //                            // end of the current scope.
 //
 // A static Mutex *must* be defined or declared using one of the following
 // macros:
 //   GTEST_DEFINE_STATIC_MUTEX_(g_some_mutex);
 //   GTEST_DECLARE_STATIC_MUTEX_(g_some_mutex);
 //
 // (A non-static Mutex is defined/declared in the usual way).
 class GTEST_API_ Mutex {
  public:
   enum MutexType { kStatic = 0, kDynamic = 1 };
   // We rely on kStaticMutex being 0 as it is to what the linker initializes
   // type_ in static mutexes.  critical_section_ will be initialized lazily
   // in ThreadSafeLazyInit().
   enum StaticConstructorSelector { kStaticMutex = 0 };
 
   // This constructor intentionally does nothing.  It relies on type_ being
   // statically initialized to 0 (effectively setting it to kStatic) and on
   // ThreadSafeLazyInit() to lazily initialize the rest of the members.
   explicit Mutex(StaticConstructorSelector /*dummy*/) {}
 
   Mutex();
   ~Mutex();
 
   void Lock();
 
   void Unlock();
 
   // Does nothing if the current thread holds the mutex. Otherwise, crashes
   // with high probability.
   void AssertHeld();
 
  private:
   // Initializes owner_thread_id_ and critical_section_ in static mutexes.
   void ThreadSafeLazyInit();
 
-  // Per http://blogs.msdn.com/b/oldnewthing/archive/2004/02/23/78395.aspx,
+  // Per https://blogs.msdn.microsoft.com/oldnewthing/20040223-00/?p=40503,
   // we assume that 0 is an invalid value for thread IDs.
   unsigned int owner_thread_id_;
 
   // For static mutexes, we rely on these members being initialized to zeros
   // by the linker.
   MutexType type_;
   long critical_section_init_phase_;  // NOLINT
   GTEST_CRITICAL_SECTION* critical_section_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(Mutex);
 };
 
 # define GTEST_DECLARE_STATIC_MUTEX_(mutex) \
     extern ::testing::internal::Mutex mutex
 
 # define GTEST_DEFINE_STATIC_MUTEX_(mutex) \
     ::testing::internal::Mutex mutex(::testing::internal::Mutex::kStaticMutex)
 
 // We cannot name this class MutexLock because the ctor declaration would
 // conflict with a macro named MutexLock, which is defined on some
 // platforms. That macro is used as a defensive measure to prevent against
 // inadvertent misuses of MutexLock like "MutexLock(&mu)" rather than
 // "MutexLock l(&mu)".  Hence the typedef trick below.
 class GTestMutexLock {
  public:
   explicit GTestMutexLock(Mutex* mutex)
       : mutex_(mutex) { mutex_->Lock(); }
 
   ~GTestMutexLock() { mutex_->Unlock(); }
 
  private:
   Mutex* const mutex_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestMutexLock);
 };
 
 typedef GTestMutexLock MutexLock;
 
 // Base class for ValueHolder<T>.  Allows a caller to hold and delete a value
 // without knowing its type.
 class ThreadLocalValueHolderBase {
  public:
   virtual ~ThreadLocalValueHolderBase() {}
 };
 
 // Provides a way for a thread to send notifications to a ThreadLocal
 // regardless of its parameter type.
 class ThreadLocalBase {
  public:
   // Creates a new ValueHolder<T> object holding a default value passed to
   // this ThreadLocal<T>'s constructor and returns it.  It is the caller's
   // responsibility not to call this when the ThreadLocal<T> instance already
   // has a value on the current thread.
   virtual ThreadLocalValueHolderBase* NewValueForCurrentThread() const = 0;
 
  protected:
   ThreadLocalBase() {}
   virtual ~ThreadLocalBase() {}
 
  private:
   GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocalBase);
 };
 
 // Maps a thread to a set of ThreadLocals that have values instantiated on that
 // thread and notifies them when the thread exits.  A ThreadLocal instance is
 // expected to persist until all threads it has values on have terminated.
 class GTEST_API_ ThreadLocalRegistry {
  public:
   // Registers thread_local_instance as having value on the current thread.
   // Returns a value that can be used to identify the thread from other threads.
   static ThreadLocalValueHolderBase* GetValueOnCurrentThread(
       const ThreadLocalBase* thread_local_instance);
 
   // Invoked when a ThreadLocal instance is destroyed.
   static void OnThreadLocalDestroyed(
       const ThreadLocalBase* thread_local_instance);
 };
 
 class GTEST_API_ ThreadWithParamBase {
  public:
   void Join();
 
  protected:
   class Runnable {
    public:
     virtual ~Runnable() {}
     virtual void Run() = 0;
   };
 
   ThreadWithParamBase(Runnable *runnable, Notification* thread_can_start);
   virtual ~ThreadWithParamBase();
 
  private:
   AutoHandle thread_;
 };
 
 // Helper class for testing Google Test's multi-threading constructs.
 template <typename T>
 class ThreadWithParam : public ThreadWithParamBase {
  public:
   typedef void UserThreadFunc(T);
 
   ThreadWithParam(UserThreadFunc* func, T param, Notification* thread_can_start)
       : ThreadWithParamBase(new RunnableImpl(func, param), thread_can_start) {
   }
   virtual ~ThreadWithParam() {}
 
  private:
   class RunnableImpl : public Runnable {
    public:
     RunnableImpl(UserThreadFunc* func, T param)
         : func_(func),
           param_(param) {
     }
     virtual ~RunnableImpl() {}
     virtual void Run() {
       func_(param_);
     }
 
    private:
     UserThreadFunc* const func_;
     const T param_;
 
     GTEST_DISALLOW_COPY_AND_ASSIGN_(RunnableImpl);
   };
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParam);
 };
 
 // Implements thread-local storage on Windows systems.
 //
 //   // Thread 1
 //   ThreadLocal<int> tl(100);  // 100 is the default value for each thread.
 //
 //   // Thread 2
 //   tl.set(150);  // Changes the value for thread 2 only.
 //   EXPECT_EQ(150, tl.get());
 //
 //   // Thread 1
 //   EXPECT_EQ(100, tl.get());  // In thread 1, tl has the original value.
 //   tl.set(200);
 //   EXPECT_EQ(200, tl.get());
 //
 // The template type argument T must have a public copy constructor.
 // In addition, the default ThreadLocal constructor requires T to have
 // a public default constructor.
 //
 // The users of a TheadLocal instance have to make sure that all but one
 // threads (including the main one) using that instance have exited before
 // destroying it. Otherwise, the per-thread objects managed for them by the
 // ThreadLocal instance are not guaranteed to be destroyed on all platforms.
 //
 // Google Test only uses global ThreadLocal objects.  That means they
 // will die after main() has returned.  Therefore, no per-thread
 // object managed by Google Test will be leaked as long as all threads
 // using Google Test have exited when main() returns.
 template <typename T>
 class ThreadLocal : public ThreadLocalBase {
  public:
   ThreadLocal() : default_factory_(new DefaultValueHolderFactory()) {}
   explicit ThreadLocal(const T& value)
       : default_factory_(new InstanceValueHolderFactory(value)) {}
 
   ~ThreadLocal() { ThreadLocalRegistry::OnThreadLocalDestroyed(this); }
 
   T* pointer() { return GetOrCreateValue(); }
   const T* pointer() const { return GetOrCreateValue(); }
   const T& get() const { return *pointer(); }
   void set(const T& value) { *pointer() = value; }
 
  private:
   // Holds a value of T.  Can be deleted via its base class without the caller
   // knowing the type of T.
   class ValueHolder : public ThreadLocalValueHolderBase {
    public:
     ValueHolder() : value_() {}
     explicit ValueHolder(const T& value) : value_(value) {}
 
     T* pointer() { return &value_; }
 
    private:
     T value_;
     GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolder);
   };
 
 
   T* GetOrCreateValue() const {
     return static_cast<ValueHolder*>(
         ThreadLocalRegistry::GetValueOnCurrentThread(this))->pointer();
   }
 
   virtual ThreadLocalValueHolderBase* NewValueForCurrentThread() const {
     return default_factory_->MakeNewHolder();
   }
 
   class ValueHolderFactory {
    public:
     ValueHolderFactory() {}
     virtual ~ValueHolderFactory() {}
     virtual ValueHolder* MakeNewHolder() const = 0;
 
    private:
     GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolderFactory);
   };
 
   class DefaultValueHolderFactory : public ValueHolderFactory {
    public:
     DefaultValueHolderFactory() {}
     virtual ValueHolder* MakeNewHolder() const { return new ValueHolder(); }
 
    private:
     GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultValueHolderFactory);
   };
 
   class InstanceValueHolderFactory : public ValueHolderFactory {
    public:
     explicit InstanceValueHolderFactory(const T& value) : value_(value) {}
     virtual ValueHolder* MakeNewHolder() const {
       return new ValueHolder(value_);
     }
 
    private:
     const T value_;  // The value for each thread.
 
     GTEST_DISALLOW_COPY_AND_ASSIGN_(InstanceValueHolderFactory);
   };
 
   scoped_ptr<ValueHolderFactory> default_factory_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocal);
 };
 
 # elif GTEST_HAS_PTHREAD
 
 // MutexBase and Mutex implement mutex on pthreads-based platforms.
 class MutexBase {
  public:
   // Acquires this mutex.
   void Lock() {
     GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&mutex_));
     owner_ = pthread_self();
     has_owner_ = true;
   }
 
   // Releases this mutex.
   void Unlock() {
     // Since the lock is being released the owner_ field should no longer be
     // considered valid. We don't protect writing to has_owner_ here, as it's
     // the caller's responsibility to ensure that the current thread holds the
     // mutex when this is called.
     has_owner_ = false;
     GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&mutex_));
   }
 
   // Does nothing if the current thread holds the mutex. Otherwise, crashes
   // with high probability.
   void AssertHeld() const {
     GTEST_CHECK_(has_owner_ && pthread_equal(owner_, pthread_self()))
         << "The current thread is not holding the mutex @" << this;
   }
 
   // A static mutex may be used before main() is entered.  It may even
   // be used before the dynamic initialization stage.  Therefore we
   // must be able to initialize a static mutex object at link time.
   // This means MutexBase has to be a POD and its member variables
   // have to be public.
  public:
   pthread_mutex_t mutex_;  // The underlying pthread mutex.
   // has_owner_ indicates whether the owner_ field below contains a valid thread
   // ID and is therefore safe to inspect (e.g., to use in pthread_equal()). All
   // accesses to the owner_ field should be protected by a check of this field.
   // An alternative might be to memset() owner_ to all zeros, but there's no
   // guarantee that a zero'd pthread_t is necessarily invalid or even different
   // from pthread_self().
   bool has_owner_;
   pthread_t owner_;  // The thread holding the mutex.
 };
 
 // Forward-declares a static mutex.
 #  define GTEST_DECLARE_STATIC_MUTEX_(mutex) \
      extern ::testing::internal::MutexBase mutex
 
 // Defines and statically (i.e. at link time) initializes a static mutex.
 // The initialization list here does not explicitly initialize each field,
 // instead relying on default initialization for the unspecified fields. In
 // particular, the owner_ field (a pthread_t) is not explicitly initialized.
 // This allows initialization to work whether pthread_t is a scalar or struct.
 // The flag -Wmissing-field-initializers must not be specified for this to work.
 #  define GTEST_DEFINE_STATIC_MUTEX_(mutex) \
      ::testing::internal::MutexBase mutex = { PTHREAD_MUTEX_INITIALIZER, false, 0 }
 
 // The Mutex class can only be used for mutexes created at runtime. It
 // shares its API with MutexBase otherwise.
 class Mutex : public MutexBase {
  public:
   Mutex() {
     GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_init(&mutex_, NULL));
     has_owner_ = false;
   }
   ~Mutex() {
     GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&mutex_));
   }
 
  private:
   GTEST_DISALLOW_COPY_AND_ASSIGN_(Mutex);
 };
 
 // We cannot name this class MutexLock because the ctor declaration would
 // conflict with a macro named MutexLock, which is defined on some
 // platforms. That macro is used as a defensive measure to prevent against
 // inadvertent misuses of MutexLock like "MutexLock(&mu)" rather than
 // "MutexLock l(&mu)".  Hence the typedef trick below.
 class GTestMutexLock {
  public:
   explicit GTestMutexLock(MutexBase* mutex)
       : mutex_(mutex) { mutex_->Lock(); }
 
   ~GTestMutexLock() { mutex_->Unlock(); }
 
  private:
   MutexBase* const mutex_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestMutexLock);
 };
 
 typedef GTestMutexLock MutexLock;
 
 // Helpers for ThreadLocal.
 
 // pthread_key_create() requires DeleteThreadLocalValue() to have
 // C-linkage.  Therefore it cannot be templatized to access
 // ThreadLocal<T>.  Hence the need for class
 // ThreadLocalValueHolderBase.
 class ThreadLocalValueHolderBase {
  public:
   virtual ~ThreadLocalValueHolderBase() {}
 };
 
 // Called by pthread to delete thread-local data stored by
 // pthread_setspecific().
 extern "C" inline void DeleteThreadLocalValue(void* value_holder) {
   delete static_cast<ThreadLocalValueHolderBase*>(value_holder);
 }
 
 // Implements thread-local storage on pthreads-based systems.
 template <typename T>
 class GTEST_API_ ThreadLocal {
  public:
   ThreadLocal()
       : key_(CreateKey()), default_factory_(new DefaultValueHolderFactory()) {}
   explicit ThreadLocal(const T& value)
       : key_(CreateKey()),
         default_factory_(new InstanceValueHolderFactory(value)) {}
 
   ~ThreadLocal() {
     // Destroys the managed object for the current thread, if any.
     DeleteThreadLocalValue(pthread_getspecific(key_));
 
     // Releases resources associated with the key.  This will *not*
     // delete managed objects for other threads.
     GTEST_CHECK_POSIX_SUCCESS_(pthread_key_delete(key_));
   }
 
   T* pointer() { return GetOrCreateValue(); }
   const T* pointer() const { return GetOrCreateValue(); }
   const T& get() const { return *pointer(); }
   void set(const T& value) { *pointer() = value; }
 
  private:
   // Holds a value of type T.
   class ValueHolder : public ThreadLocalValueHolderBase {
    public:
     ValueHolder() : value_() {}
     explicit ValueHolder(const T& value) : value_(value) {}
 
     T* pointer() { return &value_; }
 
    private:
     T value_;
     GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolder);
   };
 
   static pthread_key_t CreateKey() {
     pthread_key_t key;
     // When a thread exits, DeleteThreadLocalValue() will be called on
     // the object managed for that thread.
     GTEST_CHECK_POSIX_SUCCESS_(
         pthread_key_create(&key, &DeleteThreadLocalValue));
     return key;
   }
 
   T* GetOrCreateValue() const {
     ThreadLocalValueHolderBase* const holder =
         static_cast<ThreadLocalValueHolderBase*>(pthread_getspecific(key_));
     if (holder != NULL) {
       return CheckedDowncastToActualType<ValueHolder>(holder)->pointer();
     }
 
     ValueHolder* const new_holder = default_factory_->MakeNewHolder();
     ThreadLocalValueHolderBase* const holder_base = new_holder;
     GTEST_CHECK_POSIX_SUCCESS_(pthread_setspecific(key_, holder_base));
     return new_holder->pointer();
   }
 
   class ValueHolderFactory {
    public:
     ValueHolderFactory() {}
     virtual ~ValueHolderFactory() {}
     virtual ValueHolder* MakeNewHolder() const = 0;
 
    private:
     GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolderFactory);
   };
 
   class DefaultValueHolderFactory : public ValueHolderFactory {
    public:
     DefaultValueHolderFactory() {}
     virtual ValueHolder* MakeNewHolder() const { return new ValueHolder(); }
 
    private:
     GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultValueHolderFactory);
   };
 
   class InstanceValueHolderFactory : public ValueHolderFactory {
    public:
     explicit InstanceValueHolderFactory(const T& value) : value_(value) {}
     virtual ValueHolder* MakeNewHolder() const {
       return new ValueHolder(value_);
     }
 
    private:
     const T value_;  // The value for each thread.
 
     GTEST_DISALLOW_COPY_AND_ASSIGN_(InstanceValueHolderFactory);
   };
 
   // A key pthreads uses for looking up per-thread values.
   const pthread_key_t key_;
   scoped_ptr<ValueHolderFactory> default_factory_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocal);
 };
 
 # endif  // GTEST_HAS_MUTEX_AND_THREAD_LOCAL_
 
 #else  // GTEST_IS_THREADSAFE
 
 // A dummy implementation of synchronization primitives (mutex, lock,
 // and thread-local variable).  Necessary for compiling Google Test where
 // mutex is not supported - using Google Test in multiple threads is not
 // supported on such platforms.
 
 class Mutex {
  public:
   Mutex() {}
   void Lock() {}
   void Unlock() {}
   void AssertHeld() const {}
 };
 
 # define GTEST_DECLARE_STATIC_MUTEX_(mutex) \
   extern ::testing::internal::Mutex mutex
 
 # define GTEST_DEFINE_STATIC_MUTEX_(mutex) ::testing::internal::Mutex mutex
 
 // We cannot name this class MutexLock because the ctor declaration would
 // conflict with a macro named MutexLock, which is defined on some
 // platforms. That macro is used as a defensive measure to prevent against
 // inadvertent misuses of MutexLock like "MutexLock(&mu)" rather than
 // "MutexLock l(&mu)".  Hence the typedef trick below.
 class GTestMutexLock {
  public:
   explicit GTestMutexLock(Mutex*) {}  // NOLINT
 };
 
 typedef GTestMutexLock MutexLock;
 
 template <typename T>
 class GTEST_API_ ThreadLocal {
  public:
   ThreadLocal() : value_() {}
   explicit ThreadLocal(const T& value) : value_(value) {}
   T* pointer() { return &value_; }
   const T* pointer() const { return &value_; }
   const T& get() const { return value_; }
   void set(const T& value) { value_ = value; }
  private:
   T value_;
 };
 
 #endif  // GTEST_IS_THREADSAFE
 
 // Returns the number of threads running in the process, or 0 to indicate that
 // we cannot detect it.
 GTEST_API_ size_t GetThreadCount();
 
 // Passing non-POD classes through ellipsis (...) crashes the ARM
 // compiler and generates a warning in Sun Studio before 12u4. The Nokia Symbian
 // and the IBM XL C/C++ compiler try to instantiate a copy constructor
 // for objects passed through ellipsis (...), failing for uncopyable
 // objects.  We define this to ensure that only POD is passed through
 // ellipsis on these systems.
 #if defined(__SYMBIAN32__) || defined(__IBMCPP__) || \
      (defined(__SUNPRO_CC) && __SUNPRO_CC < 0x5130)
 // We lose support for NULL detection where the compiler doesn't like
 // passing non-POD classes through ellipsis (...).
 # define GTEST_ELLIPSIS_NEEDS_POD_ 1
 #else
 # define GTEST_CAN_COMPARE_NULL 1
 #endif
 
 // The Nokia Symbian and IBM XL C/C++ compilers cannot decide between
 // const T& and const T* in a function template.  These compilers
 // _can_ decide between class template specializations for T and T*,
 // so a tr1::type_traits-like is_pointer works.
 #if defined(__SYMBIAN32__) || defined(__IBMCPP__)
 # define GTEST_NEEDS_IS_POINTER_ 1
 #endif
 
 template <bool bool_value>
 struct bool_constant {
   typedef bool_constant<bool_value> type;
   static const bool value = bool_value;
 };
 template <bool bool_value> const bool bool_constant<bool_value>::value;
 
 typedef bool_constant<false> false_type;
 typedef bool_constant<true> true_type;
 
 template <typename T, typename U>
 struct is_same : public false_type {};
 
 template <typename T>
 struct is_same<T, T> : public true_type {};
 
 
 template <typename T>
 struct is_pointer : public false_type {};
 
 template <typename T>
 struct is_pointer<T*> : public true_type {};
 
 template <typename Iterator>
 struct IteratorTraits {
   typedef typename Iterator::value_type value_type;
 };
 
 
 template <typename T>
 struct IteratorTraits<T*> {
   typedef T value_type;
 };
 
 template <typename T>
 struct IteratorTraits<const T*> {
   typedef T value_type;
 };
 
 #if GTEST_OS_WINDOWS
 # define GTEST_PATH_SEP_ "\\"
 # define GTEST_HAS_ALT_PATH_SEP_ 1
 // The biggest signed integer type the compiler supports.
 typedef __int64 BiggestInt;
 #else
 # define GTEST_PATH_SEP_ "/"
 # define GTEST_HAS_ALT_PATH_SEP_ 0
 typedef long long BiggestInt;  // NOLINT
 #endif  // GTEST_OS_WINDOWS
 
 // Utilities for char.
 
 // isspace(int ch) and friends accept an unsigned char or EOF.  char
 // may be signed, depending on the compiler (or compiler flags).
 // Therefore we need to cast a char to unsigned char before calling
 // isspace(), etc.
 
 inline bool IsAlpha(char ch) {
   return isalpha(static_cast<unsigned char>(ch)) != 0;
 }
 inline bool IsAlNum(char ch) {
   return isalnum(static_cast<unsigned char>(ch)) != 0;
 }
 inline bool IsDigit(char ch) {
   return isdigit(static_cast<unsigned char>(ch)) != 0;
 }
 inline bool IsLower(char ch) {
   return islower(static_cast<unsigned char>(ch)) != 0;
 }
 inline bool IsSpace(char ch) {
   return isspace(static_cast<unsigned char>(ch)) != 0;
 }
 inline bool IsUpper(char ch) {
   return isupper(static_cast<unsigned char>(ch)) != 0;
 }
 inline bool IsXDigit(char ch) {
   return isxdigit(static_cast<unsigned char>(ch)) != 0;
 }
 inline bool IsXDigit(wchar_t ch) {
   const unsigned char low_byte = static_cast<unsigned char>(ch);
   return ch == low_byte && isxdigit(low_byte) != 0;
 }
 
 inline char ToLower(char ch) {
   return static_cast<char>(tolower(static_cast<unsigned char>(ch)));
 }
 inline char ToUpper(char ch) {
   return static_cast<char>(toupper(static_cast<unsigned char>(ch)));
 }
 
 inline std::string StripTrailingSpaces(std::string str) {
   std::string::iterator it = str.end();
   while (it != str.begin() && IsSpace(*--it))
     it = str.erase(it);
   return str;
 }
 
 // The testing::internal::posix namespace holds wrappers for common
 // POSIX functions.  These wrappers hide the differences between
 // Windows/MSVC and POSIX systems.  Since some compilers define these
 // standard functions as macros, the wrapper cannot have the same name
 // as the wrapped function.
 
 namespace posix {
 
 // Functions with a different name on Windows.
 
 #if GTEST_OS_WINDOWS
 
 typedef struct _stat StatStruct;
 
 # ifdef __BORLANDC__
 inline int IsATTY(int fd) { return isatty(fd); }
 inline int StrCaseCmp(const char* s1, const char* s2) {
   return stricmp(s1, s2);
 }
 inline char* StrDup(const char* src) { return strdup(src); }
 # else  // !__BORLANDC__
 #  if GTEST_OS_WINDOWS_MOBILE
 inline int IsATTY(int /* fd */) { return 0; }
 #  else
 inline int IsATTY(int fd) { return _isatty(fd); }
 #  endif  // GTEST_OS_WINDOWS_MOBILE
 inline int StrCaseCmp(const char* s1, const char* s2) {
   return _stricmp(s1, s2);
 }
 inline char* StrDup(const char* src) { return _strdup(src); }
 # endif  // __BORLANDC__
 
 # if GTEST_OS_WINDOWS_MOBILE
 inline int FileNo(FILE* file) { return reinterpret_cast<int>(_fileno(file)); }
 // Stat(), RmDir(), and IsDir() are not needed on Windows CE at this
 // time and thus not defined there.
 # else
 inline int FileNo(FILE* file) { return _fileno(file); }
 inline int Stat(const char* path, StatStruct* buf) { return _stat(path, buf); }
 inline int RmDir(const char* dir) { return _rmdir(dir); }
 inline bool IsDir(const StatStruct& st) {
   return (_S_IFDIR & st.st_mode) != 0;
 }
 # endif  // GTEST_OS_WINDOWS_MOBILE
 
 #else
 
 typedef struct stat StatStruct;
 
 inline int FileNo(FILE* file) { return fileno(file); }
 inline int IsATTY(int fd) { return isatty(fd); }
 inline int Stat(const char* path, StatStruct* buf) { return stat(path, buf); }
 inline int StrCaseCmp(const char* s1, const char* s2) {
   return strcasecmp(s1, s2);
 }
 inline char* StrDup(const char* src) { return strdup(src); }
 inline int RmDir(const char* dir) { return rmdir(dir); }
 inline bool IsDir(const StatStruct& st) { return S_ISDIR(st.st_mode); }
 
 #endif  // GTEST_OS_WINDOWS
 
 // Functions deprecated by MSVC 8.0.
 
 GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996 /* deprecated function */)
 
 inline const char* StrNCpy(char* dest, const char* src, size_t n) {
   return strncpy(dest, src, n);
 }
 
 // ChDir(), FReopen(), FDOpen(), Read(), Write(), Close(), and
 // StrError() aren't needed on Windows CE at this time and thus not
 // defined there.
 
 #if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT
 inline int ChDir(const char* dir) { return chdir(dir); }
 #endif
 inline FILE* FOpen(const char* path, const char* mode) {
   return fopen(path, mode);
 }
 #if !GTEST_OS_WINDOWS_MOBILE
 inline FILE *FReopen(const char* path, const char* mode, FILE* stream) {
   return freopen(path, mode, stream);
 }
 inline FILE* FDOpen(int fd, const char* mode) { return fdopen(fd, mode); }
 #endif
 inline int FClose(FILE* fp) { return fclose(fp); }
 #if !GTEST_OS_WINDOWS_MOBILE
 inline int Read(int fd, void* buf, unsigned int count) {
   return static_cast<int>(read(fd, buf, count));
 }
 inline int Write(int fd, const void* buf, unsigned int count) {
   return static_cast<int>(write(fd, buf, count));
 }
 inline int Close(int fd) { return close(fd); }
 inline const char* StrError(int errnum) { return strerror(errnum); }
 #endif
 inline const char* GetEnv(const char* name) {
 #if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT
   // We are on Windows CE, which has no environment variables.
   static_cast<void>(name);  // To prevent 'unused argument' warning.
   return NULL;
 #elif defined(__BORLANDC__) || defined(__SunOS_5_8) || defined(__SunOS_5_9)
   // Environment variables which we programmatically clear will be set to the
   // empty string rather than unset (NULL).  Handle that case.
   const char* const env = getenv(name);
   return (env != NULL && env[0] != '\0') ? env : NULL;
 #else
   return getenv(name);
 #endif
 }
 
 GTEST_DISABLE_MSC_WARNINGS_POP_()
 
 #if GTEST_OS_WINDOWS_MOBILE
 // Windows CE has no C library. The abort() function is used in
 // several places in Google Test. This implementation provides a reasonable
 // imitation of standard behaviour.
 void Abort();
 #else
 inline void Abort() { abort(); }
 #endif  // GTEST_OS_WINDOWS_MOBILE
 
 }  // namespace posix
 
 // MSVC "deprecates" snprintf and issues warnings wherever it is used.  In
 // order to avoid these warnings, we need to use _snprintf or _snprintf_s on
 // MSVC-based platforms.  We map the GTEST_SNPRINTF_ macro to the appropriate
 // function in order to achieve that.  We use macro definition here because
 // snprintf is a variadic function.
 #if _MSC_VER >= 1400 && !GTEST_OS_WINDOWS_MOBILE
 // MSVC 2005 and above support variadic macros.
 # define GTEST_SNPRINTF_(buffer, size, format, ...) \
      _snprintf_s(buffer, size, size, format, __VA_ARGS__)
 #elif defined(_MSC_VER)
 // Windows CE does not define _snprintf_s and MSVC prior to 2005 doesn't
 // complain about _snprintf.
 # define GTEST_SNPRINTF_ _snprintf
 #else
 # define GTEST_SNPRINTF_ snprintf
 #endif
 
 // The maximum number a BiggestInt can represent.  This definition
 // works no matter BiggestInt is represented in one's complement or
 // two's complement.
 //
 // We cannot rely on numeric_limits in STL, as __int64 and long long
 // are not part of standard C++ and numeric_limits doesn't need to be
 // defined for them.
 const BiggestInt kMaxBiggestInt =
     ~(static_cast<BiggestInt>(1) << (8*sizeof(BiggestInt) - 1));
 
 // This template class serves as a compile-time function from size to
 // type.  It maps a size in bytes to a primitive type with that
 // size. e.g.
 //
 //   TypeWithSize<4>::UInt
 //
 // is typedef-ed to be unsigned int (unsigned integer made up of 4
 // bytes).
 //
 // Such functionality should belong to STL, but I cannot find it
 // there.
 //
 // Google Test uses this class in the implementation of floating-point
 // comparison.
 //
 // For now it only handles UInt (unsigned int) as that's all Google Test
 // needs.  Other types can be easily added in the future if need
 // arises.
 template <size_t size>
 class TypeWithSize {
  public:
   // This prevents the user from using TypeWithSize<N> with incorrect
   // values of N.
   typedef void UInt;
 };
 
 // The specialization for size 4.
 template <>
 class TypeWithSize<4> {
  public:
   // unsigned int has size 4 in both gcc and MSVC.
   //
   // As base/basictypes.h doesn't compile on Windows, we cannot use
   // uint32, uint64, and etc here.
   typedef int Int;
   typedef unsigned int UInt;
 };
 
 // The specialization for size 8.
 template <>
 class TypeWithSize<8> {
  public:
 #if GTEST_OS_WINDOWS
   typedef __int64 Int;
   typedef unsigned __int64 UInt;
 #else
   typedef long long Int;  // NOLINT
   typedef unsigned long long UInt;  // NOLINT
 #endif  // GTEST_OS_WINDOWS
 };
 
 // Integer types of known sizes.
 typedef TypeWithSize<4>::Int Int32;
 typedef TypeWithSize<4>::UInt UInt32;
 typedef TypeWithSize<8>::Int Int64;
 typedef TypeWithSize<8>::UInt UInt64;
 typedef TypeWithSize<8>::Int TimeInMillis;  // Represents time in milliseconds.
 
 // Utilities for command line flags and environment variables.
 
 // Macro for referencing flags.
 #if !defined(GTEST_FLAG)
 # define GTEST_FLAG(name) FLAGS_gtest_##name
 #endif  // !defined(GTEST_FLAG)
 
 #if !defined(GTEST_USE_OWN_FLAGFILE_FLAG_)
 # define GTEST_USE_OWN_FLAGFILE_FLAG_ 1
 #endif  // !defined(GTEST_USE_OWN_FLAGFILE_FLAG_)
 
 #if !defined(GTEST_DECLARE_bool_)
 # define GTEST_FLAG_SAVER_ ::testing::internal::GTestFlagSaver
 
 // Macros for declaring flags.
 # define GTEST_DECLARE_bool_(name) GTEST_API_ extern bool GTEST_FLAG(name)
 # define GTEST_DECLARE_int32_(name) \
     GTEST_API_ extern ::testing::internal::Int32 GTEST_FLAG(name)
 # define GTEST_DECLARE_string_(name) \
     GTEST_API_ extern ::std::string GTEST_FLAG(name)
 
 // Macros for defining flags.
 # define GTEST_DEFINE_bool_(name, default_val, doc) \
     GTEST_API_ bool GTEST_FLAG(name) = (default_val)
 # define GTEST_DEFINE_int32_(name, default_val, doc) \
     GTEST_API_ ::testing::internal::Int32 GTEST_FLAG(name) = (default_val)
 # define GTEST_DEFINE_string_(name, default_val, doc) \
     GTEST_API_ ::std::string GTEST_FLAG(name) = (default_val)
 
 #endif  // !defined(GTEST_DECLARE_bool_)
 
 // Thread annotations
 #if !defined(GTEST_EXCLUSIVE_LOCK_REQUIRED_)
 # define GTEST_EXCLUSIVE_LOCK_REQUIRED_(locks)
 # define GTEST_LOCK_EXCLUDED_(locks)
 #endif  // !defined(GTEST_EXCLUSIVE_LOCK_REQUIRED_)
 
 // Parses 'str' for a 32-bit signed integer.  If successful, writes the result
 // to *value and returns true; otherwise leaves *value unchanged and returns
 // false.
 // TODO(chandlerc): Find a better way to refactor flag and environment parsing
 // out of both gtest-port.cc and gtest.cc to avoid exporting this utility
 // function.
 bool ParseInt32(const Message& src_text, const char* str, Int32* value);
 
 // Parses a bool/Int32/string from the environment variable
 // corresponding to the given Google Test flag.
 bool BoolFromGTestEnv(const char* flag, bool default_val);
 GTEST_API_ Int32 Int32FromGTestEnv(const char* flag, Int32 default_val);
 std::string OutputFlagAlsoCheckEnvVar();
 const char* StringFromGTestEnv(const char* flag, const char* default_val);
 
 }  // namespace internal
 }  // namespace testing
 
 #endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_
diff --git a/googletest/scripts/upload.py b/googletest/scripts/upload.py
index 81e8e04d..c852e4c9 100755
--- a/googletest/scripts/upload.py
+++ b/googletest/scripts/upload.py
@@ -1,1387 +1,1387 @@
 #!/usr/bin/env python
 #
 # Copyright 2007 Google Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
 #     http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
 """Tool for uploading diffs from a version control system to the codereview app.
 
 Usage summary: upload.py [options] [-- diff_options]
 
 Diff options are passed to the diff command of the underlying system.
 
 Supported version control systems:
   Git
   Mercurial
   Subversion
 
 It is important for Git/Mercurial users to specify a tree/node/branch to diff
 against by using the '--rev' option.
 """
 # This code is derived from appcfg.py in the App Engine SDK (open source),
 # and from ASPN recipe #146306.
 
 import cookielib
 import getpass
 import logging
 import md5
 import mimetypes
 import optparse
 import os
 import re
 import socket
 import subprocess
 import sys
 import urllib
 import urllib2
 import urlparse
 
 try:
   import readline
 except ImportError:
   pass
 
 # The logging verbosity:
 #  0: Errors only.
 #  1: Status messages.
 #  2: Info logs.
 #  3: Debug logs.
 verbosity = 1
 
 # Max size of patch or base file.
 MAX_UPLOAD_SIZE = 900 * 1024
 
 
 def GetEmail(prompt):
   """Prompts the user for their email address and returns it.
 
   The last used email address is saved to a file and offered up as a suggestion
   to the user. If the user presses enter without typing in anything the last
   used email address is used. If the user enters a new address, it is saved
   for next time we prompt.
 
   """
   last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
   last_email = ""
   if os.path.exists(last_email_file_name):
     try:
       last_email_file = open(last_email_file_name, "r")
       last_email = last_email_file.readline().strip("\n")
       last_email_file.close()
       prompt += " [%s]" % last_email
     except IOError, e:
       pass
   email = raw_input(prompt + ": ").strip()
   if email:
     try:
       last_email_file = open(last_email_file_name, "w")
       last_email_file.write(email)
       last_email_file.close()
     except IOError, e:
       pass
   else:
     email = last_email
   return email
 
 
 def StatusUpdate(msg):
   """Print a status message to stdout.
 
   If 'verbosity' is greater than 0, print the message.
 
   Args:
     msg: The string to print.
   """
   if verbosity > 0:
     print msg
 
 
 def ErrorExit(msg):
   """Print an error message to stderr and exit."""
   print >>sys.stderr, msg
   sys.exit(1)
 
 
 class ClientLoginError(urllib2.HTTPError):
   """Raised to indicate there was an error authenticating with ClientLogin."""
 
   def __init__(self, url, code, msg, headers, args):
     urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
     self.args = args
     self.reason = args["Error"]
 
 
 class AbstractRpcServer(object):
   """Provides a common interface for a simple RPC server."""
 
   def __init__(self, host, auth_function, host_override=None, extra_headers={},
                save_cookies=False):
     """Creates a new HttpRpcServer.
 
     Args:
       host: The host to send requests to.
       auth_function: A function that takes no arguments and returns an
         (email, password) tuple when called. Will be called if authentication
         is required.
       host_override: The host header to send to the server (defaults to host).
       extra_headers: A dict of extra headers to append to every request.
       save_cookies: If True, save the authentication cookies to local disk.
         If False, use an in-memory cookiejar instead.  Subclasses must
         implement this functionality.  Defaults to False.
     """
     self.host = host
     self.host_override = host_override
     self.auth_function = auth_function
     self.authenticated = False
     self.extra_headers = extra_headers
     self.save_cookies = save_cookies
     self.opener = self._GetOpener()
     if self.host_override:
       logging.info("Server: %s; Host: %s", self.host, self.host_override)
     else:
       logging.info("Server: %s", self.host)
 
   def _GetOpener(self):
     """Returns an OpenerDirector for making HTTP requests.
 
     Returns:
       A urllib2.OpenerDirector object.
     """
     raise NotImplementedError()
 
   def _CreateRequest(self, url, data=None):
     """Creates a new urllib request."""
     logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
     req = urllib2.Request(url, data=data)
     if self.host_override:
       req.add_header("Host", self.host_override)
     for key, value in self.extra_headers.iteritems():
       req.add_header(key, value)
     return req
 
   def _GetAuthToken(self, email, password):
     """Uses ClientLogin to authenticate the user, returning an auth token.
 
     Args:
       email:    The user's email address
       password: The user's password
 
     Raises:
       ClientLoginError: If there was an error authenticating with ClientLogin.
       HTTPError: If there was some other form of HTTP error.
 
     Returns:
       The authentication token returned by ClientLogin.
     """
     account_type = "GOOGLE"
     if self.host.endswith(".google.com"):
       # Needed for use inside Google.
       account_type = "HOSTED"
     req = self._CreateRequest(
         url="https://www.google.com/accounts/ClientLogin",
         data=urllib.urlencode({
             "Email": email,
             "Passwd": password,
             "service": "ah",
             "source": "rietveld-codereview-upload",
             "accountType": account_type,
         }),
     )
     try:
       response = self.opener.open(req)
       response_body = response.read()
       response_dict = dict(x.split("=")
                            for x in response_body.split("\n") if x)
       return response_dict["Auth"]
     except urllib2.HTTPError, e:
       if e.code == 403:
         body = e.read()
         response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
         raise ClientLoginError(req.get_full_url(), e.code, e.msg,
                                e.headers, response_dict)
       else:
         raise
 
   def _GetAuthCookie(self, auth_token):
     """Fetches authentication cookies for an authentication token.
 
     Args:
       auth_token: The authentication token returned by ClientLogin.
 
     Raises:
       HTTPError: If there was an error fetching the authentication cookies.
     """
     # This is a dummy value to allow us to identify when we're successful.
     continue_location = "http://localhost/"
     args = {"continue": continue_location, "auth": auth_token}
     req = self._CreateRequest("http://%s/_ah/login?%s" %
                               (self.host, urllib.urlencode(args)))
     try:
       response = self.opener.open(req)
     except urllib2.HTTPError, e:
       response = e
     if (response.code != 302 or
         response.info()["location"] != continue_location):
       raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
                               response.headers, response.fp)
     self.authenticated = True
 
   def _Authenticate(self):
     """Authenticates the user.
 
     The authentication process works as follows:
      1) We get a username and password from the user
      2) We use ClientLogin to obtain an AUTH token for the user
-        (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
+        (see https://developers.google.com/identity/protocols/AuthForInstalledApps).
      3) We pass the auth token to /_ah/login on the server to obtain an
         authentication cookie. If login was successful, it tries to redirect
         us to the URL we provided.
 
     If we attempt to access the upload API without first obtaining an
     authentication cookie, it returns a 401 response and directs us to
     authenticate ourselves with ClientLogin.
     """
     for i in range(3):
       credentials = self.auth_function()
       try:
         auth_token = self._GetAuthToken(credentials[0], credentials[1])
       except ClientLoginError, e:
         if e.reason == "BadAuthentication":
           print >>sys.stderr, "Invalid username or password."
           continue
         if e.reason == "CaptchaRequired":
           print >>sys.stderr, (
               "Please go to\n"
               "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
               "and verify you are a human.  Then try again.")
           break
         if e.reason == "NotVerified":
           print >>sys.stderr, "Account not verified."
           break
         if e.reason == "TermsNotAgreed":
           print >>sys.stderr, "User has not agreed to TOS."
           break
         if e.reason == "AccountDeleted":
           print >>sys.stderr, "The user account has been deleted."
           break
         if e.reason == "AccountDisabled":
           print >>sys.stderr, "The user account has been disabled."
           break
         if e.reason == "ServiceDisabled":
           print >>sys.stderr, ("The user's access to the service has been "
                                "disabled.")
           break
         if e.reason == "ServiceUnavailable":
           print >>sys.stderr, "The service is not available; try again later."
           break
         raise
       self._GetAuthCookie(auth_token)
       return
 
   def Send(self, request_path, payload=None,
            content_type="application/octet-stream",
            timeout=None,
            **kwargs):
     """Sends an RPC and returns the response.
 
     Args:
       request_path: The path to send the request to, eg /api/appversion/create.
       payload: The body of the request, or None to send an empty request.
       content_type: The Content-Type header to use.
       timeout: timeout in seconds; default None i.e. no timeout.
         (Note: for large requests on OS X, the timeout doesn't work right.)
       kwargs: Any keyword arguments are converted into query string parameters.
 
     Returns:
       The response body, as a string.
     """
     # TODO: Don't require authentication.  Let the server say
     # whether it is necessary.
     if not self.authenticated:
       self._Authenticate()
 
     old_timeout = socket.getdefaulttimeout()
     socket.setdefaulttimeout(timeout)
     try:
       tries = 0
       while True:
         tries += 1
         args = dict(kwargs)
         url = "http://%s%s" % (self.host, request_path)
         if args:
           url += "?" + urllib.urlencode(args)
         req = self._CreateRequest(url=url, data=payload)
         req.add_header("Content-Type", content_type)
         try:
           f = self.opener.open(req)
           response = f.read()
           f.close()
           return response
         except urllib2.HTTPError, e:
           if tries > 3:
             raise
           elif e.code == 401:
             self._Authenticate()
 ##           elif e.code >= 500 and e.code < 600:
 ##             # Server Error - try again.
 ##             continue
           else:
             raise
     finally:
       socket.setdefaulttimeout(old_timeout)
 
 
 class HttpRpcServer(AbstractRpcServer):
   """Provides a simplified RPC-style interface for HTTP requests."""
 
   def _Authenticate(self):
     """Save the cookie jar after authentication."""
     super(HttpRpcServer, self)._Authenticate()
     if self.save_cookies:
       StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
       self.cookie_jar.save()
 
   def _GetOpener(self):
     """Returns an OpenerDirector that supports cookies and ignores redirects.
 
     Returns:
       A urllib2.OpenerDirector object.
     """
     opener = urllib2.OpenerDirector()
     opener.add_handler(urllib2.ProxyHandler())
     opener.add_handler(urllib2.UnknownHandler())
     opener.add_handler(urllib2.HTTPHandler())
     opener.add_handler(urllib2.HTTPDefaultErrorHandler())
     opener.add_handler(urllib2.HTTPSHandler())
     opener.add_handler(urllib2.HTTPErrorProcessor())
     if self.save_cookies:
       self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
       self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
       if os.path.exists(self.cookie_file):
         try:
           self.cookie_jar.load()
           self.authenticated = True
           StatusUpdate("Loaded authentication cookies from %s" %
                        self.cookie_file)
         except (cookielib.LoadError, IOError):
           # Failed to load cookies - just ignore them.
           pass
       else:
         # Create an empty cookie file with mode 600
         fd = os.open(self.cookie_file, os.O_CREAT, 0600)
         os.close(fd)
       # Always chmod the cookie file
       os.chmod(self.cookie_file, 0600)
     else:
       # Don't save cookies across runs of update.py.
       self.cookie_jar = cookielib.CookieJar()
     opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
     return opener
 
 
 parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
 parser.add_option("-y", "--assume_yes", action="store_true",
                   dest="assume_yes", default=False,
                   help="Assume that the answer to yes/no questions is 'yes'.")
 # Logging
 group = parser.add_option_group("Logging options")
 group.add_option("-q", "--quiet", action="store_const", const=0,
                  dest="verbose", help="Print errors only.")
 group.add_option("-v", "--verbose", action="store_const", const=2,
                  dest="verbose", default=1,
                  help="Print info level logs (default).")
 group.add_option("--noisy", action="store_const", const=3,
                  dest="verbose", help="Print all logs.")
 # Review server
 group = parser.add_option_group("Review server options")
 group.add_option("-s", "--server", action="store", dest="server",
                  default="codereview.appspot.com",
                  metavar="SERVER",
                  help=("The server to upload to. The format is host[:port]. "
                        "Defaults to 'codereview.appspot.com'."))
 group.add_option("-e", "--email", action="store", dest="email",
                  metavar="EMAIL", default=None,
                  help="The username to use. Will prompt if omitted.")
 group.add_option("-H", "--host", action="store", dest="host",
                  metavar="HOST", default=None,
                  help="Overrides the Host header sent with all RPCs.")
 group.add_option("--no_cookies", action="store_false",
                  dest="save_cookies", default=True,
                  help="Do not save authentication cookies to local disk.")
 # Issue
 group = parser.add_option_group("Issue options")
 group.add_option("-d", "--description", action="store", dest="description",
                  metavar="DESCRIPTION", default=None,
                  help="Optional description when creating an issue.")
 group.add_option("-f", "--description_file", action="store",
                  dest="description_file", metavar="DESCRIPTION_FILE",
                  default=None,
                  help="Optional path of a file that contains "
                       "the description when creating an issue.")
 group.add_option("-r", "--reviewers", action="store", dest="reviewers",
                  metavar="REVIEWERS", default=None,
                  help="Add reviewers (comma separated email addresses).")
 group.add_option("--cc", action="store", dest="cc",
                  metavar="CC", default=None,
                  help="Add CC (comma separated email addresses).")
 # Upload options
 group = parser.add_option_group("Patch options")
 group.add_option("-m", "--message", action="store", dest="message",
                  metavar="MESSAGE", default=None,
                  help="A message to identify the patch. "
                       "Will prompt if omitted.")
 group.add_option("-i", "--issue", type="int", action="store",
                  metavar="ISSUE", default=None,
                  help="Issue number to which to add. Defaults to new issue.")
 group.add_option("--download_base", action="store_true",
                  dest="download_base", default=False,
                  help="Base files will be downloaded by the server "
                  "(side-by-side diffs may not work on files with CRs).")
 group.add_option("--rev", action="store", dest="revision",
                  metavar="REV", default=None,
                  help="Branch/tree/revision to diff against (used by DVCS).")
 group.add_option("--send_mail", action="store_true",
                  dest="send_mail", default=False,
                  help="Send notification email to reviewers.")
 
 
 def GetRpcServer(options):
   """Returns an instance of an AbstractRpcServer.
 
   Returns:
     A new AbstractRpcServer, on which RPC calls can be made.
   """
 
   rpc_server_class = HttpRpcServer
 
   def GetUserCredentials():
     """Prompts the user for a username and password."""
     email = options.email
     if email is None:
       email = GetEmail("Email (login for uploading to %s)" % options.server)
     password = getpass.getpass("Password for %s: " % email)
     return (email, password)
 
   # If this is the dev_appserver, use fake authentication.
   host = (options.host or options.server).lower()
   if host == "localhost" or host.startswith("localhost:"):
     email = options.email
     if email is None:
       email = "test@example.com"
       logging.info("Using debug user %s.  Override with --email" % email)
     server = rpc_server_class(
         options.server,
         lambda: (email, "password"),
         host_override=options.host,
         extra_headers={"Cookie":
                        'dev_appserver_login="%s:False"' % email},
         save_cookies=options.save_cookies)
     # Don't try to talk to ClientLogin.
     server.authenticated = True
     return server
 
   return rpc_server_class(options.server, GetUserCredentials,
                           host_override=options.host,
                           save_cookies=options.save_cookies)
 
 
 def EncodeMultipartFormData(fields, files):
   """Encode form fields for multipart/form-data.
 
   Args:
     fields: A sequence of (name, value) elements for regular form fields.
     files: A sequence of (name, filename, value) elements for data to be
            uploaded as files.
   Returns:
     (content_type, body) ready for httplib.HTTP instance.
 
   Source:
-    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
+    https://web.archive.org/web/20160116052001/code.activestate.com/recipes/146306
   """
   BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
   CRLF = '\r\n'
   lines = []
   for (key, value) in fields:
     lines.append('--' + BOUNDARY)
     lines.append('Content-Disposition: form-data; name="%s"' % key)
     lines.append('')
     lines.append(value)
   for (key, filename, value) in files:
     lines.append('--' + BOUNDARY)
     lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
              (key, filename))
     lines.append('Content-Type: %s' % GetContentType(filename))
     lines.append('')
     lines.append(value)
   lines.append('--' + BOUNDARY + '--')
   lines.append('')
   body = CRLF.join(lines)
   content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
   return content_type, body
 
 
 def GetContentType(filename):
   """Helper to guess the content-type from the filename."""
   return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
 
 
 # Use a shell for subcommands on Windows to get a PATH search.
 use_shell = sys.platform.startswith("win")
 
 def RunShellWithReturnCode(command, print_output=False,
                            universal_newlines=True):
   """Executes a command and returns the output from stdout and the return code.
 
   Args:
     command: Command to execute.
     print_output: If True, the output is printed to stdout.
                   If False, both stdout and stderr are ignored.
     universal_newlines: Use universal_newlines flag (default: True).
 
   Returns:
     Tuple (output, return code)
   """
   logging.info("Running %s", command)
   p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                        shell=use_shell, universal_newlines=universal_newlines)
   if print_output:
     output_array = []
     while True:
       line = p.stdout.readline()
       if not line:
         break
       print line.strip("\n")
       output_array.append(line)
     output = "".join(output_array)
   else:
     output = p.stdout.read()
   p.wait()
   errout = p.stderr.read()
   if print_output and errout:
     print >>sys.stderr, errout
   p.stdout.close()
   p.stderr.close()
   return output, p.returncode
 
 
 def RunShell(command, silent_ok=False, universal_newlines=True,
              print_output=False):
   data, retcode = RunShellWithReturnCode(command, print_output,
                                          universal_newlines)
   if retcode:
     ErrorExit("Got error status from %s:\n%s" % (command, data))
   if not silent_ok and not data:
     ErrorExit("No output from %s" % command)
   return data
 
 
 class VersionControlSystem(object):
   """Abstract base class providing an interface to the VCS."""
 
   def __init__(self, options):
     """Constructor.
 
     Args:
       options: Command line options.
     """
     self.options = options
 
   def GenerateDiff(self, args):
     """Return the current diff as a string.
 
     Args:
       args: Extra arguments to pass to the diff command.
     """
     raise NotImplementedError(
         "abstract method -- subclass %s must override" % self.__class__)
 
   def GetUnknownFiles(self):
     """Return a list of files unknown to the VCS."""
     raise NotImplementedError(
         "abstract method -- subclass %s must override" % self.__class__)
 
   def CheckForUnknownFiles(self):
     """Show an "are you sure?" prompt if there are unknown files."""
     unknown_files = self.GetUnknownFiles()
     if unknown_files:
       print "The following files are not added to version control:"
       for line in unknown_files:
         print line
       prompt = "Are you sure to continue?(y/N) "
       answer = raw_input(prompt).strip()
       if answer != "y":
         ErrorExit("User aborted")
 
   def GetBaseFile(self, filename):
     """Get the content of the upstream version of a file.
 
     Returns:
       A tuple (base_content, new_content, is_binary, status)
         base_content: The contents of the base file.
         new_content: For text files, this is empty.  For binary files, this is
           the contents of the new file, since the diff output won't contain
           information to reconstruct the current file.
         is_binary: True iff the file is binary.
         status: The status of the file.
     """
 
     raise NotImplementedError(
         "abstract method -- subclass %s must override" % self.__class__)
 
 
   def GetBaseFiles(self, diff):
     """Helper that calls GetBase file for each file in the patch.
 
     Returns:
       A dictionary that maps from filename to GetBaseFile's tuple.  Filenames
       are retrieved based on lines that start with "Index:" or
       "Property changes on:".
     """
     files = {}
     for line in diff.splitlines(True):
       if line.startswith('Index:') or line.startswith('Property changes on:'):
         unused, filename = line.split(':', 1)
         # On Windows if a file has property changes its filename uses '\'
         # instead of '/'.
         filename = filename.strip().replace('\\', '/')
         files[filename] = self.GetBaseFile(filename)
     return files
 
 
   def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
                       files):
     """Uploads the base files (and if necessary, the current ones as well)."""
 
     def UploadFile(filename, file_id, content, is_binary, status, is_base):
       """Uploads a file to the server."""
       file_too_large = False
       if is_base:
         type = "base"
       else:
         type = "current"
       if len(content) > MAX_UPLOAD_SIZE:
         print ("Not uploading the %s file for %s because it's too large." %
                (type, filename))
         file_too_large = True
         content = ""
       checksum = md5.new(content).hexdigest()
       if options.verbose > 0 and not file_too_large:
         print "Uploading %s file for %s" % (type, filename)
       url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
       form_fields = [("filename", filename),
                      ("status", status),
                      ("checksum", checksum),
                      ("is_binary", str(is_binary)),
                      ("is_current", str(not is_base)),
                     ]
       if file_too_large:
         form_fields.append(("file_too_large", "1"))
       if options.email:
         form_fields.append(("user", options.email))
       ctype, body = EncodeMultipartFormData(form_fields,
                                             [("data", filename, content)])
       response_body = rpc_server.Send(url, body,
                                       content_type=ctype)
       if not response_body.startswith("OK"):
         StatusUpdate("  --> %s" % response_body)
         sys.exit(1)
 
     patches = dict()
     [patches.setdefault(v, k) for k, v in patch_list]
     for filename in patches.keys():
       base_content, new_content, is_binary, status = files[filename]
       file_id_str = patches.get(filename)
       if file_id_str.find("nobase") != -1:
         base_content = None
         file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
       file_id = int(file_id_str)
       if base_content != None:
         UploadFile(filename, file_id, base_content, is_binary, status, True)
       if new_content != None:
         UploadFile(filename, file_id, new_content, is_binary, status, False)
 
   def IsImage(self, filename):
     """Returns true if the filename has an image extension."""
     mimetype =  mimetypes.guess_type(filename)[0]
     if not mimetype:
       return False
     return mimetype.startswith("image/")
 
 
 class SubversionVCS(VersionControlSystem):
   """Implementation of the VersionControlSystem interface for Subversion."""
 
   def __init__(self, options):
     super(SubversionVCS, self).__init__(options)
     if self.options.revision:
       match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
       if not match:
         ErrorExit("Invalid Subversion revision %s." % self.options.revision)
       self.rev_start = match.group(1)
       self.rev_end = match.group(3)
     else:
       self.rev_start = self.rev_end = None
     # Cache output from "svn list -r REVNO dirname".
     # Keys: dirname, Values: 2-tuple (output for start rev and end rev).
     self.svnls_cache = {}
     # SVN base URL is required to fetch files deleted in an older revision.
     # Result is cached to not guess it over and over again in GetBaseFile().
     required = self.options.download_base or self.options.revision is not None
     self.svn_base = self._GuessBase(required)
 
   def GuessBase(self, required):
     """Wrapper for _GuessBase."""
     return self.svn_base
 
   def _GuessBase(self, required):
     """Returns the SVN base URL.
 
     Args:
       required: If true, exits if the url can't be guessed, otherwise None is
         returned.
     """
     info = RunShell(["svn", "info"])
     for line in info.splitlines():
       words = line.split()
       if len(words) == 2 and words[0] == "URL:":
         url = words[1]
         scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
         username, netloc = urllib.splituser(netloc)
         if username:
           logging.info("Removed username from base URL")
         if netloc.endswith("svn.python.org"):
           if netloc == "svn.python.org":
             if path.startswith("/projects/"):
               path = path[9:]
           elif netloc != "pythondev@svn.python.org":
             ErrorExit("Unrecognized Python URL: %s" % url)
           base = "http://svn.python.org/view/*checkout*%s/" % path
           logging.info("Guessed Python base = %s", base)
         elif netloc.endswith("svn.collab.net"):
           if path.startswith("/repos/"):
             path = path[6:]
           base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
           logging.info("Guessed CollabNet base = %s", base)
         elif netloc.endswith(".googlecode.com"):
           path = path + "/"
           base = urlparse.urlunparse(("http", netloc, path, params,
                                       query, fragment))
           logging.info("Guessed Google Code base = %s", base)
         else:
           path = path + "/"
           base = urlparse.urlunparse((scheme, netloc, path, params,
                                       query, fragment))
           logging.info("Guessed base = %s", base)
         return base
     if required:
       ErrorExit("Can't find URL in output from svn info")
     return None
 
   def GenerateDiff(self, args):
     cmd = ["svn", "diff"]
     if self.options.revision:
       cmd += ["-r", self.options.revision]
     cmd.extend(args)
     data = RunShell(cmd)
     count = 0
     for line in data.splitlines():
       if line.startswith("Index:") or line.startswith("Property changes on:"):
         count += 1
         logging.info(line)
     if not count:
       ErrorExit("No valid patches found in output from svn diff")
     return data
 
   def _CollapseKeywords(self, content, keyword_str):
     """Collapses SVN keywords."""
     # svn cat translates keywords but svn diff doesn't. As a result of this
     # behavior patching.PatchChunks() fails with a chunk mismatch error.
     # This part was originally written by the Review Board development team
-    # who had the same problem (http://reviews.review-board.org/r/276/).
+    # who had the same problem (https://reviews.reviewboard.org/r/276/).
     # Mapping of keywords to known aliases
     svn_keywords = {
       # Standard keywords
       'Date':                ['Date', 'LastChangedDate'],
       'Revision':            ['Revision', 'LastChangedRevision', 'Rev'],
       'Author':              ['Author', 'LastChangedBy'],
       'HeadURL':             ['HeadURL', 'URL'],
       'Id':                  ['Id'],
 
       # Aliases
       'LastChangedDate':     ['LastChangedDate', 'Date'],
       'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
       'LastChangedBy':       ['LastChangedBy', 'Author'],
       'URL':                 ['URL', 'HeadURL'],
     }
 
     def repl(m):
        if m.group(2):
          return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
        return "$%s$" % m.group(1)
     keywords = [keyword
                 for name in keyword_str.split(" ")
                 for keyword in svn_keywords.get(name, [])]
     return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
 
   def GetUnknownFiles(self):
     status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
     unknown_files = []
     for line in status.split("\n"):
       if line and line[0] == "?":
         unknown_files.append(line)
     return unknown_files
 
   def ReadFile(self, filename):
     """Returns the contents of a file."""
     file = open(filename, 'rb')
     result = ""
     try:
       result = file.read()
     finally:
       file.close()
     return result
 
   def GetStatus(self, filename):
     """Returns the status of a file."""
     if not self.options.revision:
       status = RunShell(["svn", "status", "--ignore-externals", filename])
       if not status:
         ErrorExit("svn status returned no output for %s" % filename)
       status_lines = status.splitlines()
       # If file is in a cl, the output will begin with
       # "\n--- Changelist 'cl_name':\n".  See
-      # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
+      # https://web.archive.org/web/20090918234815/svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
       if (len(status_lines) == 3 and
           not status_lines[0] and
           status_lines[1].startswith("--- Changelist")):
         status = status_lines[2]
       else:
         status = status_lines[0]
     # If we have a revision to diff against we need to run "svn list"
     # for the old and the new revision and compare the results to get
     # the correct status for a file.
     else:
       dirname, relfilename = os.path.split(filename)
       if dirname not in self.svnls_cache:
         cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
         out, returncode = RunShellWithReturnCode(cmd)
         if returncode:
           ErrorExit("Failed to get status for %s." % filename)
         old_files = out.splitlines()
         args = ["svn", "list"]
         if self.rev_end:
           args += ["-r", self.rev_end]
         cmd = args + [dirname or "."]
         out, returncode = RunShellWithReturnCode(cmd)
         if returncode:
           ErrorExit("Failed to run command %s" % cmd)
         self.svnls_cache[dirname] = (old_files, out.splitlines())
       old_files, new_files = self.svnls_cache[dirname]
       if relfilename in old_files and relfilename not in new_files:
         status = "D   "
       elif relfilename in old_files and relfilename in new_files:
         status = "M   "
       else:
         status = "A   "
     return status
 
   def GetBaseFile(self, filename):
     status = self.GetStatus(filename)
     base_content = None
     new_content = None
 
     # If a file is copied its status will be "A  +", which signifies
     # "addition-with-history".  See "svn st" for more information.  We need to
     # upload the original file or else diff parsing will fail if the file was
     # edited.
     if status[0] == "A" and status[3] != "+":
       # We'll need to upload the new content if we're adding a binary file
       # since diff's output won't contain it.
       mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
                           silent_ok=True)
       base_content = ""
       is_binary = mimetype and not mimetype.startswith("text/")
       if is_binary and self.IsImage(filename):
         new_content = self.ReadFile(filename)
     elif (status[0] in ("M", "D", "R") or
           (status[0] == "A" and status[3] == "+") or  # Copied file.
           (status[0] == " " and status[1] == "M")):  # Property change.
       args = []
       if self.options.revision:
         url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
       else:
         # Don't change filename, it's needed later.
         url = filename
         args += ["-r", "BASE"]
       cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
       mimetype, returncode = RunShellWithReturnCode(cmd)
       if returncode:
         # File does not exist in the requested revision.
         # Reset mimetype, it contains an error message.
         mimetype = ""
       get_base = False
       is_binary = mimetype and not mimetype.startswith("text/")
       if status[0] == " ":
         # Empty base content just to force an upload.
         base_content = ""
       elif is_binary:
         if self.IsImage(filename):
           get_base = True
           if status[0] == "M":
             if not self.rev_end:
               new_content = self.ReadFile(filename)
             else:
               url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
               new_content = RunShell(["svn", "cat", url],
                                      universal_newlines=True, silent_ok=True)
         else:
           base_content = ""
       else:
         get_base = True
 
       if get_base:
         if is_binary:
           universal_newlines = False
         else:
           universal_newlines = True
         if self.rev_start:
           # "svn cat -r REV delete_file.txt" doesn't work. cat requires
           # the full URL with "@REV" appended instead of using "-r" option.
           url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
           base_content = RunShell(["svn", "cat", url],
                                   universal_newlines=universal_newlines,
                                   silent_ok=True)
         else:
           base_content = RunShell(["svn", "cat", filename],
                                   universal_newlines=universal_newlines,
                                   silent_ok=True)
         if not is_binary:
           args = []
           if self.rev_start:
             url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
           else:
             url = filename
             args += ["-r", "BASE"]
           cmd = ["svn"] + args + ["propget", "svn:keywords", url]
           keywords, returncode = RunShellWithReturnCode(cmd)
           if keywords and not returncode:
             base_content = self._CollapseKeywords(base_content, keywords)
     else:
       StatusUpdate("svn status returned unexpected output: %s" % status)
       sys.exit(1)
     return base_content, new_content, is_binary, status[0:5]
 
 
 class GitVCS(VersionControlSystem):
   """Implementation of the VersionControlSystem interface for Git."""
 
   def __init__(self, options):
     super(GitVCS, self).__init__(options)
     # Map of filename -> hash of base file.
     self.base_hashes = {}
 
   def GenerateDiff(self, extra_args):
     # This is more complicated than svn's GenerateDiff because we must convert
     # the diff output to include an svn-style "Index:" line as well as record
     # the hashes of the base files, so we can upload them along with our diff.
     if self.options.revision:
       extra_args = [self.options.revision] + extra_args
     gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
     svndiff = []
     filecount = 0
     filename = None
     for line in gitdiff.splitlines():
       match = re.match(r"diff --git a/(.*) b/.*$", line)
       if match:
         filecount += 1
         filename = match.group(1)
         svndiff.append("Index: %s\n" % filename)
       else:
         # The "index" line in a git diff looks like this (long hashes elided):
         #   index 82c0d44..b2cee3f 100755
         # We want to save the left hash, as that identifies the base file.
         match = re.match(r"index (\w+)\.\.", line)
         if match:
           self.base_hashes[filename] = match.group(1)
       svndiff.append(line + "\n")
     if not filecount:
       ErrorExit("No valid patches found in output from git diff")
     return "".join(svndiff)
 
   def GetUnknownFiles(self):
     status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
                       silent_ok=True)
     return status.splitlines()
 
   def GetBaseFile(self, filename):
     hash = self.base_hashes[filename]
     base_content = None
     new_content = None
     is_binary = False
     if hash == "0" * 40:  # All-zero hash indicates no base file.
       status = "A"
       base_content = ""
     else:
       status = "M"
       base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
       if returncode:
         ErrorExit("Got error status from 'git show %s'" % hash)
     return (base_content, new_content, is_binary, status)
 
 
 class MercurialVCS(VersionControlSystem):
   """Implementation of the VersionControlSystem interface for Mercurial."""
 
   def __init__(self, options, repo_dir):
     super(MercurialVCS, self).__init__(options)
     # Absolute path to repository (we can be in a subdir)
     self.repo_dir = os.path.normpath(repo_dir)
     # Compute the subdir
     cwd = os.path.normpath(os.getcwd())
     assert cwd.startswith(self.repo_dir)
     self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
     if self.options.revision:
       self.base_rev = self.options.revision
     else:
       self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
 
   def _GetRelPath(self, filename):
     """Get relative path of a file according to the current directory,
     given its logical path in the repo."""
     assert filename.startswith(self.subdir), filename
     return filename[len(self.subdir):].lstrip(r"\/")
 
   def GenerateDiff(self, extra_args):
     # If no file specified, restrict to the current subdir
     extra_args = extra_args or ["."]
     cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
     data = RunShell(cmd, silent_ok=True)
     svndiff = []
     filecount = 0
     for line in data.splitlines():
       m = re.match("diff --git a/(\S+) b/(\S+)", line)
       if m:
         # Modify line to make it look like as it comes from svn diff.
         # With this modification no changes on the server side are required
         # to make upload.py work with Mercurial repos.
         # NOTE: for proper handling of moved/copied files, we have to use
         # the second filename.
         filename = m.group(2)
         svndiff.append("Index: %s" % filename)
         svndiff.append("=" * 67)
         filecount += 1
         logging.info(line)
       else:
         svndiff.append(line)
     if not filecount:
       ErrorExit("No valid patches found in output from hg diff")
     return "\n".join(svndiff) + "\n"
 
   def GetUnknownFiles(self):
     """Return a list of files unknown to the VCS."""
     args = []
     status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
         silent_ok=True)
     unknown_files = []
     for line in status.splitlines():
       st, fn = line.split(" ", 1)
       if st == "?":
         unknown_files.append(fn)
     return unknown_files
 
   def GetBaseFile(self, filename):
     # "hg status" and "hg cat" both take a path relative to the current subdir
     # rather than to the repo root, but "hg diff" has given us the full path
     # to the repo root.
     base_content = ""
     new_content = None
     is_binary = False
     oldrelpath = relpath = self._GetRelPath(filename)
     # "hg status -C" returns two lines for moved/copied files, one otherwise
     out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
     out = out.splitlines()
     # HACK: strip error message about missing file/directory if it isn't in
     # the working copy
     if out[0].startswith('%s: ' % relpath):
       out = out[1:]
     if len(out) > 1:
       # Moved/copied => considered as modified, use old filename to
       # retrieve base contents
       oldrelpath = out[1].strip()
       status = "M"
     else:
       status, _ = out[0].split(' ', 1)
     if status != "A":
       base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
         silent_ok=True)
       is_binary = "\0" in base_content  # Mercurial's heuristic
     if status != "R":
       new_content = open(relpath, "rb").read()
       is_binary = is_binary or "\0" in new_content
     if is_binary and base_content:
       # Fetch again without converting newlines
       base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
         silent_ok=True, universal_newlines=False)
     if not is_binary or not self.IsImage(relpath):
       new_content = None
     return base_content, new_content, is_binary, status
 
 
 # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
 def SplitPatch(data):
   """Splits a patch into separate pieces for each file.
 
   Args:
     data: A string containing the output of svn diff.
 
   Returns:
     A list of 2-tuple (filename, text) where text is the svn diff output
       pertaining to filename.
   """
   patches = []
   filename = None
   diff = []
   for line in data.splitlines(True):
     new_filename = None
     if line.startswith('Index:'):
       unused, new_filename = line.split(':', 1)
       new_filename = new_filename.strip()
     elif line.startswith('Property changes on:'):
       unused, temp_filename = line.split(':', 1)
       # When a file is modified, paths use '/' between directories, however
       # when a property is modified '\' is used on Windows.  Make them the same
       # otherwise the file shows up twice.
       temp_filename = temp_filename.strip().replace('\\', '/')
       if temp_filename != filename:
         # File has property changes but no modifications, create a new diff.
         new_filename = temp_filename
     if new_filename:
       if filename and diff:
         patches.append((filename, ''.join(diff)))
       filename = new_filename
       diff = [line]
       continue
     if diff is not None:
       diff.append(line)
   if filename and diff:
     patches.append((filename, ''.join(diff)))
   return patches
 
 
 def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
   """Uploads a separate patch for each file in the diff output.
 
   Returns a list of [patch_key, filename] for each file.
   """
   patches = SplitPatch(data)
   rv = []
   for patch in patches:
     if len(patch[1]) > MAX_UPLOAD_SIZE:
       print ("Not uploading the patch for " + patch[0] +
              " because the file is too large.")
       continue
     form_fields = [("filename", patch[0])]
     if not options.download_base:
       form_fields.append(("content_upload", "1"))
     files = [("data", "data.diff", patch[1])]
     ctype, body = EncodeMultipartFormData(form_fields, files)
     url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
     print "Uploading patch for " + patch[0]
     response_body = rpc_server.Send(url, body, content_type=ctype)
     lines = response_body.splitlines()
     if not lines or lines[0] != "OK":
       StatusUpdate("  --> %s" % response_body)
       sys.exit(1)
     rv.append([lines[1], patch[0]])
   return rv
 
 
 def GuessVCS(options):
   """Helper to guess the version control system.
 
   This examines the current directory, guesses which VersionControlSystem
   we're using, and returns an instance of the appropriate class.  Exit with an
   error if we can't figure it out.
 
   Returns:
     A VersionControlSystem instance. Exits if the VCS can't be guessed.
   """
   # Mercurial has a command to get the base directory of a repository
   # Try running it, but don't die if we don't have hg installed.
   # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
   try:
     out, returncode = RunShellWithReturnCode(["hg", "root"])
     if returncode == 0:
       return MercurialVCS(options, out.strip())
   except OSError, (errno, message):
     if errno != 2:  # ENOENT -- they don't have hg installed.
       raise
 
   # Subversion has a .svn in all working directories.
   if os.path.isdir('.svn'):
     logging.info("Guessed VCS = Subversion")
     return SubversionVCS(options)
 
   # Git has a command to test if you're in a git tree.
   # Try running it, but don't die if we don't have git installed.
   try:
     out, returncode = RunShellWithReturnCode(["git", "rev-parse",
                                               "--is-inside-work-tree"])
     if returncode == 0:
       return GitVCS(options)
   except OSError, (errno, message):
     if errno != 2:  # ENOENT -- they don't have git installed.
       raise
 
   ErrorExit(("Could not guess version control system. "
              "Are you in a working copy directory?"))
 
 
 def RealMain(argv, data=None):
   """The real main function.
 
   Args:
     argv: Command line arguments.
     data: Diff contents. If None (default) the diff is generated by
       the VersionControlSystem implementation returned by GuessVCS().
 
   Returns:
     A 2-tuple (issue id, patchset id).
     The patchset id is None if the base files are not uploaded by this
     script (applies only to SVN checkouts).
   """
   logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
                               "%(lineno)s %(message)s "))
   os.environ['LC_ALL'] = 'C'
   options, args = parser.parse_args(argv[1:])
   global verbosity
   verbosity = options.verbose
   if verbosity >= 3:
     logging.getLogger().setLevel(logging.DEBUG)
   elif verbosity >= 2:
     logging.getLogger().setLevel(logging.INFO)
   vcs = GuessVCS(options)
   if isinstance(vcs, SubversionVCS):
     # base field is only allowed for Subversion.
     # Note: Fetching base files may become deprecated in future releases.
     base = vcs.GuessBase(options.download_base)
   else:
     base = None
   if not base and options.download_base:
     options.download_base = True
     logging.info("Enabled upload of base file")
   if not options.assume_yes:
     vcs.CheckForUnknownFiles()
   if data is None:
     data = vcs.GenerateDiff(args)
   files = vcs.GetBaseFiles(data)
   if verbosity >= 1:
     print "Upload server:", options.server, "(change with -s/--server)"
   if options.issue:
     prompt = "Message describing this patch set: "
   else:
     prompt = "New issue subject: "
   message = options.message or raw_input(prompt).strip()
   if not message:
     ErrorExit("A non-empty message is required")
   rpc_server = GetRpcServer(options)
   form_fields = [("subject", message)]
   if base:
     form_fields.append(("base", base))
   if options.issue:
     form_fields.append(("issue", str(options.issue)))
   if options.email:
     form_fields.append(("user", options.email))
   if options.reviewers:
     for reviewer in options.reviewers.split(','):
       if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
         ErrorExit("Invalid email address: %s" % reviewer)
     form_fields.append(("reviewers", options.reviewers))
   if options.cc:
     for cc in options.cc.split(','):
       if "@" in cc and not cc.split("@")[1].count(".") == 1:
         ErrorExit("Invalid email address: %s" % cc)
     form_fields.append(("cc", options.cc))
   description = options.description
   if options.description_file:
     if options.description:
       ErrorExit("Can't specify description and description_file")
     file = open(options.description_file, 'r')
     description = file.read()
     file.close()
   if description:
     form_fields.append(("description", description))
   # Send a hash of all the base file so the server can determine if a copy
   # already exists in an earlier patchset.
   base_hashes = ""
   for file, info in files.iteritems():
     if not info[0] is None:
       checksum = md5.new(info[0]).hexdigest()
       if base_hashes:
         base_hashes += "|"
       base_hashes += checksum + ":" + file
   form_fields.append(("base_hashes", base_hashes))
   # If we're uploading base files, don't send the email before the uploads, so
   # that it contains the file status.
   if options.send_mail and options.download_base:
     form_fields.append(("send_mail", "1"))
   if not options.download_base:
     form_fields.append(("content_upload", "1"))
   if len(data) > MAX_UPLOAD_SIZE:
     print "Patch is large, so uploading file patches separately."
     uploaded_diff_file = []
     form_fields.append(("separate_patches", "1"))
   else:
     uploaded_diff_file = [("data", "data.diff", data)]
   ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
   response_body = rpc_server.Send("/upload", body, content_type=ctype)
   patchset = None
   if not options.download_base or not uploaded_diff_file:
     lines = response_body.splitlines()
     if len(lines) >= 2:
       msg = lines[0]
       patchset = lines[1].strip()
       patches = [x.split(" ", 1) for x in lines[2:]]
     else:
       msg = response_body
   else:
     msg = response_body
   StatusUpdate(msg)
   if not response_body.startswith("Issue created.") and \
   not response_body.startswith("Issue updated."):
     sys.exit(0)
   issue = msg[msg.rfind("/")+1:]
 
   if not uploaded_diff_file:
     result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
     if not options.download_base:
       patches = result
 
   if not options.download_base:
     vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
     if options.send_mail:
       rpc_server.Send("/" + issue + "/mail", payload="")
   return issue, patchset
 
 
 def main():
   try:
     RealMain(sys.argv)
   except KeyboardInterrupt:
     print
     StatusUpdate("Interrupted.")
     sys.exit(1)
 
 
 if __name__ == "__main__":
   main()
diff --git a/googletest/src/gtest-port.cc b/googletest/src/gtest-port.cc
index f8a0ad65..5fbb08b7 100644
--- a/googletest/src/gtest-port.cc
+++ b/googletest/src/gtest-port.cc
@@ -1,1277 +1,1277 @@
 // Copyright 2008, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 
 #include "gtest/internal/gtest-port.h"
 
 #include <limits.h>
 #include <stdlib.h>
 #include <stdio.h>
 #include <string.h>
 #include <fstream>
 
 #if GTEST_OS_WINDOWS
 # include <windows.h>
 # include <io.h>
 # include <sys/stat.h>
 # include <map>  // Used in ThreadLocal.
 #else
 # include <unistd.h>
 #endif  // GTEST_OS_WINDOWS
 
 #if GTEST_OS_MAC
 # include <mach/mach_init.h>
 # include <mach/task.h>
 # include <mach/vm_map.h>
 #endif  // GTEST_OS_MAC
 
 #if GTEST_OS_QNX
 # include <devctl.h>
 # include <fcntl.h>
 # include <sys/procfs.h>
 #endif  // GTEST_OS_QNX
 
 #if GTEST_OS_AIX
 # include <procinfo.h>
 # include <sys/types.h>
 #endif  // GTEST_OS_AIX
 
 #if GTEST_OS_FUCHSIA
 # include <zircon/process.h>
 # include <zircon/syscalls.h>
 #endif  // GTEST_OS_FUCHSIA
 
 #include "gtest/gtest-spi.h"
 #include "gtest/gtest-message.h"
 #include "gtest/internal/gtest-internal.h"
 #include "gtest/internal/gtest-string.h"
 #include "src/gtest-internal-inl.h"
 
 namespace testing {
 namespace internal {
 
 #if defined(_MSC_VER) || defined(__BORLANDC__)
 // MSVC and C++Builder do not provide a definition of STDERR_FILENO.
 const int kStdOutFileno = 1;
 const int kStdErrFileno = 2;
 #else
 const int kStdOutFileno = STDOUT_FILENO;
 const int kStdErrFileno = STDERR_FILENO;
 #endif  // _MSC_VER
 
 #if GTEST_OS_LINUX
 
 namespace {
 template <typename T>
 T ReadProcFileField(const std::string& filename, int field) {
   std::string dummy;
   std::ifstream file(filename.c_str());
   while (field-- > 0) {
     file >> dummy;
   }
   T output = 0;
   file >> output;
   return output;
 }
 }  // namespace
 
 // Returns the number of active threads, or 0 when there is an error.
 size_t GetThreadCount() {
   const std::string filename =
       (Message() << "/proc/" << getpid() << "/stat").GetString();
   return ReadProcFileField<int>(filename, 19);
 }
 
 #elif GTEST_OS_MAC
 
 size_t GetThreadCount() {
   const task_t task = mach_task_self();
   mach_msg_type_number_t thread_count;
   thread_act_array_t thread_list;
   const kern_return_t status = task_threads(task, &thread_list, &thread_count);
   if (status == KERN_SUCCESS) {
     // task_threads allocates resources in thread_list and we need to free them
     // to avoid leaks.
     vm_deallocate(task,
                   reinterpret_cast<vm_address_t>(thread_list),
                   sizeof(thread_t) * thread_count);
     return static_cast<size_t>(thread_count);
   } else {
     return 0;
   }
 }
 
 #elif GTEST_OS_QNX
 
 // Returns the number of threads running in the process, or 0 to indicate that
 // we cannot detect it.
 size_t GetThreadCount() {
   const int fd = open("/proc/self/as", O_RDONLY);
   if (fd < 0) {
     return 0;
   }
   procfs_info process_info;
   const int status =
       devctl(fd, DCMD_PROC_INFO, &process_info, sizeof(process_info), NULL);
   close(fd);
   if (status == EOK) {
     return static_cast<size_t>(process_info.num_threads);
   } else {
     return 0;
   }
 }
 
 #elif GTEST_OS_AIX
 
 size_t GetThreadCount() {
   struct procentry64 entry;
   pid_t pid = getpid();
   int status = getprocs64(&entry, sizeof(entry), NULL, 0, &pid, 1);
   if (status == 1) {
     return entry.pi_thcount;
   } else {
     return 0;
   }
 }
 
 #elif GTEST_OS_FUCHSIA
 
 size_t GetThreadCount() {
   int dummy_buffer;
   size_t avail;
   zx_status_t status = zx_object_get_info(
       zx_process_self(),
       ZX_INFO_PROCESS_THREADS,
       &dummy_buffer,
       0,
       nullptr,
       &avail);
   if (status == ZX_OK) {
     return avail;
   } else {
     return 0;
   }
 }
 
 #else
 
 size_t GetThreadCount() {
   // There's no portable way to detect the number of threads, so we just
   // return 0 to indicate that we cannot detect it.
   return 0;
 }
 
 #endif  // GTEST_OS_LINUX
 
 #if GTEST_IS_THREADSAFE && GTEST_OS_WINDOWS
 
 void SleepMilliseconds(int n) {
   ::Sleep(n);
 }
 
 AutoHandle::AutoHandle()
     : handle_(INVALID_HANDLE_VALUE) {}
 
 AutoHandle::AutoHandle(Handle handle)
     : handle_(handle) {}
 
 AutoHandle::~AutoHandle() {
   Reset();
 }
 
 AutoHandle::Handle AutoHandle::Get() const {
   return handle_;
 }
 
 void AutoHandle::Reset() {
   Reset(INVALID_HANDLE_VALUE);
 }
 
 void AutoHandle::Reset(HANDLE handle) {
   // Resetting with the same handle we already own is invalid.
   if (handle_ != handle) {
     if (IsCloseable()) {
       ::CloseHandle(handle_);
     }
     handle_ = handle;
   } else {
     GTEST_CHECK_(!IsCloseable())
         << "Resetting a valid handle to itself is likely a programmer error "
             "and thus not allowed.";
   }
 }
 
 bool AutoHandle::IsCloseable() const {
   // Different Windows APIs may use either of these values to represent an
   // invalid handle.
   return handle_ != NULL && handle_ != INVALID_HANDLE_VALUE;
 }
 
 Notification::Notification()
     : event_(::CreateEvent(NULL,   // Default security attributes.
                            TRUE,   // Do not reset automatically.
                            FALSE,  // Initially unset.
                            NULL)) {  // Anonymous event.
   GTEST_CHECK_(event_.Get() != NULL);
 }
 
 void Notification::Notify() {
   GTEST_CHECK_(::SetEvent(event_.Get()) != FALSE);
 }
 
 void Notification::WaitForNotification() {
   GTEST_CHECK_(
       ::WaitForSingleObject(event_.Get(), INFINITE) == WAIT_OBJECT_0);
 }
 
 Mutex::Mutex()
     : owner_thread_id_(0),
       type_(kDynamic),
       critical_section_init_phase_(0),
       critical_section_(new CRITICAL_SECTION) {
   ::InitializeCriticalSection(critical_section_);
 }
 
 Mutex::~Mutex() {
   // Static mutexes are leaked intentionally. It is not thread-safe to try
   // to clean them up.
   // TODO(yukawa): Switch to Slim Reader/Writer (SRW) Locks, which requires
   // nothing to clean it up but is available only on Vista and later.
-  // http://msdn.microsoft.com/en-us/library/windows/desktop/aa904937.aspx
+  // https://docs.microsoft.com/en-us/windows/desktop/Sync/slim-reader-writer--srw--locks
   if (type_ == kDynamic) {
     ::DeleteCriticalSection(critical_section_);
     delete critical_section_;
     critical_section_ = NULL;
   }
 }
 
 void Mutex::Lock() {
   ThreadSafeLazyInit();
   ::EnterCriticalSection(critical_section_);
   owner_thread_id_ = ::GetCurrentThreadId();
 }
 
 void Mutex::Unlock() {
   ThreadSafeLazyInit();
   // We don't protect writing to owner_thread_id_ here, as it's the
   // caller's responsibility to ensure that the current thread holds the
   // mutex when this is called.
   owner_thread_id_ = 0;
   ::LeaveCriticalSection(critical_section_);
 }
 
 // Does nothing if the current thread holds the mutex. Otherwise, crashes
 // with high probability.
 void Mutex::AssertHeld() {
   ThreadSafeLazyInit();
   GTEST_CHECK_(owner_thread_id_ == ::GetCurrentThreadId())
       << "The current thread is not holding the mutex @" << this;
 }
 
 // Initializes owner_thread_id_ and critical_section_ in static mutexes.
 void Mutex::ThreadSafeLazyInit() {
   // Dynamic mutexes are initialized in the constructor.
   if (type_ == kStatic) {
     switch (
         ::InterlockedCompareExchange(&critical_section_init_phase_, 1L, 0L)) {
       case 0:
         // If critical_section_init_phase_ was 0 before the exchange, we
         // are the first to test it and need to perform the initialization.
         owner_thread_id_ = 0;
         critical_section_ = new CRITICAL_SECTION;
         ::InitializeCriticalSection(critical_section_);
         // Updates the critical_section_init_phase_ to 2 to signal
         // initialization complete.
         GTEST_CHECK_(::InterlockedCompareExchange(
                           &critical_section_init_phase_, 2L, 1L) ==
                       1L);
         break;
       case 1:
         // Somebody else is already initializing the mutex; spin until they
         // are done.
         while (::InterlockedCompareExchange(&critical_section_init_phase_,
                                             2L,
                                             2L) != 2L) {
           // Possibly yields the rest of the thread's time slice to other
           // threads.
           ::Sleep(0);
         }
         break;
 
       case 2:
         break;  // The mutex is already initialized and ready for use.
 
       default:
         GTEST_CHECK_(false)
             << "Unexpected value of critical_section_init_phase_ "
             << "while initializing a static mutex.";
     }
   }
 }
 
 namespace {
 
 class ThreadWithParamSupport : public ThreadWithParamBase {
  public:
   static HANDLE CreateThread(Runnable* runnable,
                              Notification* thread_can_start) {
     ThreadMainParam* param = new ThreadMainParam(runnable, thread_can_start);
     DWORD thread_id;
     // TODO(yukawa): Consider to use _beginthreadex instead.
     HANDLE thread_handle = ::CreateThread(
         NULL,    // Default security.
         0,       // Default stack size.
         &ThreadWithParamSupport::ThreadMain,
         param,   // Parameter to ThreadMainStatic
         0x0,     // Default creation flags.
         &thread_id);  // Need a valid pointer for the call to work under Win98.
     GTEST_CHECK_(thread_handle != NULL) << "CreateThread failed with error "
                                         << ::GetLastError() << ".";
     if (thread_handle == NULL) {
       delete param;
     }
     return thread_handle;
   }
 
  private:
   struct ThreadMainParam {
     ThreadMainParam(Runnable* runnable, Notification* thread_can_start)
         : runnable_(runnable),
           thread_can_start_(thread_can_start) {
     }
     scoped_ptr<Runnable> runnable_;
     // Does not own.
     Notification* thread_can_start_;
   };
 
   static DWORD WINAPI ThreadMain(void* ptr) {
     // Transfers ownership.
     scoped_ptr<ThreadMainParam> param(static_cast<ThreadMainParam*>(ptr));
     if (param->thread_can_start_ != NULL)
       param->thread_can_start_->WaitForNotification();
     param->runnable_->Run();
     return 0;
   }
 
   // Prohibit instantiation.
   ThreadWithParamSupport();
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParamSupport);
 };
 
 }  // namespace
 
 ThreadWithParamBase::ThreadWithParamBase(Runnable *runnable,
                                          Notification* thread_can_start)
       : thread_(ThreadWithParamSupport::CreateThread(runnable,
                                                      thread_can_start)) {
 }
 
 ThreadWithParamBase::~ThreadWithParamBase() {
   Join();
 }
 
 void ThreadWithParamBase::Join() {
   GTEST_CHECK_(::WaitForSingleObject(thread_.Get(), INFINITE) == WAIT_OBJECT_0)
       << "Failed to join the thread with error " << ::GetLastError() << ".";
 }
 
 // Maps a thread to a set of ThreadIdToThreadLocals that have values
 // instantiated on that thread and notifies them when the thread exits.  A
 // ThreadLocal instance is expected to persist until all threads it has
 // values on have terminated.
 class ThreadLocalRegistryImpl {
  public:
   // Registers thread_local_instance as having value on the current thread.
   // Returns a value that can be used to identify the thread from other threads.
   static ThreadLocalValueHolderBase* GetValueOnCurrentThread(
       const ThreadLocalBase* thread_local_instance) {
     DWORD current_thread = ::GetCurrentThreadId();
     MutexLock lock(&mutex_);
     ThreadIdToThreadLocals* const thread_to_thread_locals =
         GetThreadLocalsMapLocked();
     ThreadIdToThreadLocals::iterator thread_local_pos =
         thread_to_thread_locals->find(current_thread);
     if (thread_local_pos == thread_to_thread_locals->end()) {
       thread_local_pos = thread_to_thread_locals->insert(
           std::make_pair(current_thread, ThreadLocalValues())).first;
       StartWatcherThreadFor(current_thread);
     }
     ThreadLocalValues& thread_local_values = thread_local_pos->second;
     ThreadLocalValues::iterator value_pos =
         thread_local_values.find(thread_local_instance);
     if (value_pos == thread_local_values.end()) {
       value_pos =
           thread_local_values
               .insert(std::make_pair(
                   thread_local_instance,
                   linked_ptr<ThreadLocalValueHolderBase>(
                       thread_local_instance->NewValueForCurrentThread())))
               .first;
     }
     return value_pos->second.get();
   }
 
   static void OnThreadLocalDestroyed(
       const ThreadLocalBase* thread_local_instance) {
     std::vector<linked_ptr<ThreadLocalValueHolderBase> > value_holders;
     // Clean up the ThreadLocalValues data structure while holding the lock, but
     // defer the destruction of the ThreadLocalValueHolderBases.
     {
       MutexLock lock(&mutex_);
       ThreadIdToThreadLocals* const thread_to_thread_locals =
           GetThreadLocalsMapLocked();
       for (ThreadIdToThreadLocals::iterator it =
           thread_to_thread_locals->begin();
           it != thread_to_thread_locals->end();
           ++it) {
         ThreadLocalValues& thread_local_values = it->second;
         ThreadLocalValues::iterator value_pos =
             thread_local_values.find(thread_local_instance);
         if (value_pos != thread_local_values.end()) {
           value_holders.push_back(value_pos->second);
           thread_local_values.erase(value_pos);
           // This 'if' can only be successful at most once, so theoretically we
           // could break out of the loop here, but we don't bother doing so.
         }
       }
     }
     // Outside the lock, let the destructor for 'value_holders' deallocate the
     // ThreadLocalValueHolderBases.
   }
 
   static void OnThreadExit(DWORD thread_id) {
     GTEST_CHECK_(thread_id != 0) << ::GetLastError();
     std::vector<linked_ptr<ThreadLocalValueHolderBase> > value_holders;
     // Clean up the ThreadIdToThreadLocals data structure while holding the
     // lock, but defer the destruction of the ThreadLocalValueHolderBases.
     {
       MutexLock lock(&mutex_);
       ThreadIdToThreadLocals* const thread_to_thread_locals =
           GetThreadLocalsMapLocked();
       ThreadIdToThreadLocals::iterator thread_local_pos =
           thread_to_thread_locals->find(thread_id);
       if (thread_local_pos != thread_to_thread_locals->end()) {
         ThreadLocalValues& thread_local_values = thread_local_pos->second;
         for (ThreadLocalValues::iterator value_pos =
             thread_local_values.begin();
             value_pos != thread_local_values.end();
             ++value_pos) {
           value_holders.push_back(value_pos->second);
         }
         thread_to_thread_locals->erase(thread_local_pos);
       }
     }
     // Outside the lock, let the destructor for 'value_holders' deallocate the
     // ThreadLocalValueHolderBases.
   }
 
  private:
   // In a particular thread, maps a ThreadLocal object to its value.
   typedef std::map<const ThreadLocalBase*,
                    linked_ptr<ThreadLocalValueHolderBase> > ThreadLocalValues;
   // Stores all ThreadIdToThreadLocals having values in a thread, indexed by
   // thread's ID.
   typedef std::map<DWORD, ThreadLocalValues> ThreadIdToThreadLocals;
 
   // Holds the thread id and thread handle that we pass from
   // StartWatcherThreadFor to WatcherThreadFunc.
   typedef std::pair<DWORD, HANDLE> ThreadIdAndHandle;
 
   static void StartWatcherThreadFor(DWORD thread_id) {
     // The returned handle will be kept in thread_map and closed by
     // watcher_thread in WatcherThreadFunc.
     HANDLE thread = ::OpenThread(SYNCHRONIZE | THREAD_QUERY_INFORMATION,
                                  FALSE,
                                  thread_id);
     GTEST_CHECK_(thread != NULL);
     // We need to pass a valid thread ID pointer into CreateThread for it
     // to work correctly under Win98.
     DWORD watcher_thread_id;
     HANDLE watcher_thread = ::CreateThread(
         NULL,   // Default security.
         0,      // Default stack size
         &ThreadLocalRegistryImpl::WatcherThreadFunc,
         reinterpret_cast<LPVOID>(new ThreadIdAndHandle(thread_id, thread)),
         CREATE_SUSPENDED,
         &watcher_thread_id);
     GTEST_CHECK_(watcher_thread != NULL);
     // Give the watcher thread the same priority as ours to avoid being
     // blocked by it.
     ::SetThreadPriority(watcher_thread,
                         ::GetThreadPriority(::GetCurrentThread()));
     ::ResumeThread(watcher_thread);
     ::CloseHandle(watcher_thread);
   }
 
   // Monitors exit from a given thread and notifies those
   // ThreadIdToThreadLocals about thread termination.
   static DWORD WINAPI WatcherThreadFunc(LPVOID param) {
     const ThreadIdAndHandle* tah =
         reinterpret_cast<const ThreadIdAndHandle*>(param);
     GTEST_CHECK_(
         ::WaitForSingleObject(tah->second, INFINITE) == WAIT_OBJECT_0);
     OnThreadExit(tah->first);
     ::CloseHandle(tah->second);
     delete tah;
     return 0;
   }
 
   // Returns map of thread local instances.
   static ThreadIdToThreadLocals* GetThreadLocalsMapLocked() {
     mutex_.AssertHeld();
     static ThreadIdToThreadLocals* map = new ThreadIdToThreadLocals;
     return map;
   }
 
   // Protects access to GetThreadLocalsMapLocked() and its return value.
   static Mutex mutex_;
   // Protects access to GetThreadMapLocked() and its return value.
   static Mutex thread_map_mutex_;
 };
 
 Mutex ThreadLocalRegistryImpl::mutex_(Mutex::kStaticMutex);
 Mutex ThreadLocalRegistryImpl::thread_map_mutex_(Mutex::kStaticMutex);
 
 ThreadLocalValueHolderBase* ThreadLocalRegistry::GetValueOnCurrentThread(
       const ThreadLocalBase* thread_local_instance) {
   return ThreadLocalRegistryImpl::GetValueOnCurrentThread(
       thread_local_instance);
 }
 
 void ThreadLocalRegistry::OnThreadLocalDestroyed(
       const ThreadLocalBase* thread_local_instance) {
   ThreadLocalRegistryImpl::OnThreadLocalDestroyed(thread_local_instance);
 }
 
 #endif  // GTEST_IS_THREADSAFE && GTEST_OS_WINDOWS
 
 #if GTEST_USES_POSIX_RE
 
 // Implements RE.  Currently only needed for death tests.
 
 RE::~RE() {
   if (is_valid_) {
     // regfree'ing an invalid regex might crash because the content
     // of the regex is undefined. Since the regex's are essentially
     // the same, one cannot be valid (or invalid) without the other
     // being so too.
     regfree(&partial_regex_);
     regfree(&full_regex_);
   }
   free(const_cast<char*>(pattern_));
 }
 
 // Returns true iff regular expression re matches the entire str.
 bool RE::FullMatch(const char* str, const RE& re) {
   if (!re.is_valid_) return false;
 
   regmatch_t match;
   return regexec(&re.full_regex_, str, 1, &match, 0) == 0;
 }
 
 // Returns true iff regular expression re matches a substring of str
 // (including str itself).
 bool RE::PartialMatch(const char* str, const RE& re) {
   if (!re.is_valid_) return false;
 
   regmatch_t match;
   return regexec(&re.partial_regex_, str, 1, &match, 0) == 0;
 }
 
 // Initializes an RE from its string representation.
 void RE::Init(const char* regex) {
   pattern_ = posix::StrDup(regex);
 
   // Reserves enough bytes to hold the regular expression used for a
   // full match.
   const size_t full_regex_len = strlen(regex) + 10;
   char* const full_pattern = new char[full_regex_len];
 
   snprintf(full_pattern, full_regex_len, "^(%s)$", regex);
   is_valid_ = regcomp(&full_regex_, full_pattern, REG_EXTENDED) == 0;
   // We want to call regcomp(&partial_regex_, ...) even if the
   // previous expression returns false.  Otherwise partial_regex_ may
   // not be properly initialized can may cause trouble when it's
   // freed.
   //
   // Some implementation of POSIX regex (e.g. on at least some
   // versions of Cygwin) doesn't accept the empty string as a valid
   // regex.  We change it to an equivalent form "()" to be safe.
   if (is_valid_) {
     const char* const partial_regex = (*regex == '\0') ? "()" : regex;
     is_valid_ = regcomp(&partial_regex_, partial_regex, REG_EXTENDED) == 0;
   }
   EXPECT_TRUE(is_valid_)
       << "Regular expression \"" << regex
       << "\" is not a valid POSIX Extended regular expression.";
 
   delete[] full_pattern;
 }
 
 #elif GTEST_USES_SIMPLE_RE
 
 // Returns true iff ch appears anywhere in str (excluding the
 // terminating '\0' character).
 bool IsInSet(char ch, const char* str) {
   return ch != '\0' && strchr(str, ch) != NULL;
 }
 
 // Returns true iff ch belongs to the given classification.  Unlike
 // similar functions in <ctype.h>, these aren't affected by the
 // current locale.
 bool IsAsciiDigit(char ch) { return '0' <= ch && ch <= '9'; }
 bool IsAsciiPunct(char ch) {
   return IsInSet(ch, "^-!\"#$%&'()*+,./:;<=>?@[\\]_`{|}~");
 }
 bool IsRepeat(char ch) { return IsInSet(ch, "?*+"); }
 bool IsAsciiWhiteSpace(char ch) { return IsInSet(ch, " \f\n\r\t\v"); }
 bool IsAsciiWordChar(char ch) {
   return ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') ||
       ('0' <= ch && ch <= '9') || ch == '_';
 }
 
 // Returns true iff "\\c" is a supported escape sequence.
 bool IsValidEscape(char c) {
   return (IsAsciiPunct(c) || IsInSet(c, "dDfnrsStvwW"));
 }
 
 // Returns true iff the given atom (specified by escaped and pattern)
 // matches ch.  The result is undefined if the atom is invalid.
 bool AtomMatchesChar(bool escaped, char pattern_char, char ch) {
   if (escaped) {  // "\\p" where p is pattern_char.
     switch (pattern_char) {
       case 'd': return IsAsciiDigit(ch);
       case 'D': return !IsAsciiDigit(ch);
       case 'f': return ch == '\f';
       case 'n': return ch == '\n';
       case 'r': return ch == '\r';
       case 's': return IsAsciiWhiteSpace(ch);
       case 'S': return !IsAsciiWhiteSpace(ch);
       case 't': return ch == '\t';
       case 'v': return ch == '\v';
       case 'w': return IsAsciiWordChar(ch);
       case 'W': return !IsAsciiWordChar(ch);
     }
     return IsAsciiPunct(pattern_char) && pattern_char == ch;
   }
 
   return (pattern_char == '.' && ch != '\n') || pattern_char == ch;
 }
 
 // Helper function used by ValidateRegex() to format error messages.
 static std::string FormatRegexSyntaxError(const char* regex, int index) {
   return (Message() << "Syntax error at index " << index
           << " in simple regular expression \"" << regex << "\": ").GetString();
 }
 
 // Generates non-fatal failures and returns false if regex is invalid;
 // otherwise returns true.
 bool ValidateRegex(const char* regex) {
   if (regex == NULL) {
     // TODO(wan@google.com): fix the source file location in the
     // assertion failures to match where the regex is used in user
     // code.
     ADD_FAILURE() << "NULL is not a valid simple regular expression.";
     return false;
   }
 
   bool is_valid = true;
 
   // True iff ?, *, or + can follow the previous atom.
   bool prev_repeatable = false;
   for (int i = 0; regex[i]; i++) {
     if (regex[i] == '\\') {  // An escape sequence
       i++;
       if (regex[i] == '\0') {
         ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)
                       << "'\\' cannot appear at the end.";
         return false;
       }
 
       if (!IsValidEscape(regex[i])) {
         ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)
                       << "invalid escape sequence \"\\" << regex[i] << "\".";
         is_valid = false;
       }
       prev_repeatable = true;
     } else {  // Not an escape sequence.
       const char ch = regex[i];
 
       if (ch == '^' && i > 0) {
         ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
                       << "'^' can only appear at the beginning.";
         is_valid = false;
       } else if (ch == '$' && regex[i + 1] != '\0') {
         ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
                       << "'$' can only appear at the end.";
         is_valid = false;
       } else if (IsInSet(ch, "()[]{}|")) {
         ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
                       << "'" << ch << "' is unsupported.";
         is_valid = false;
       } else if (IsRepeat(ch) && !prev_repeatable) {
         ADD_FAILURE() << FormatRegexSyntaxError(regex, i)
                       << "'" << ch << "' can only follow a repeatable token.";
         is_valid = false;
       }
 
       prev_repeatable = !IsInSet(ch, "^$?*+");
     }
   }
 
   return is_valid;
 }
 
 // Matches a repeated regex atom followed by a valid simple regular
 // expression.  The regex atom is defined as c if escaped is false,
 // or \c otherwise.  repeat is the repetition meta character (?, *,
 // or +).  The behavior is undefined if str contains too many
 // characters to be indexable by size_t, in which case the test will
 // probably time out anyway.  We are fine with this limitation as
 // std::string has it too.
 bool MatchRepetitionAndRegexAtHead(
     bool escaped, char c, char repeat, const char* regex,
     const char* str) {
   const size_t min_count = (repeat == '+') ? 1 : 0;
   const size_t max_count = (repeat == '?') ? 1 :
       static_cast<size_t>(-1) - 1;
   // We cannot call numeric_limits::max() as it conflicts with the
   // max() macro on Windows.
 
   for (size_t i = 0; i <= max_count; ++i) {
     // We know that the atom matches each of the first i characters in str.
     if (i >= min_count && MatchRegexAtHead(regex, str + i)) {
       // We have enough matches at the head, and the tail matches too.
       // Since we only care about *whether* the pattern matches str
       // (as opposed to *how* it matches), there is no need to find a
       // greedy match.
       return true;
     }
     if (str[i] == '\0' || !AtomMatchesChar(escaped, c, str[i]))
       return false;
   }
   return false;
 }
 
 // Returns true iff regex matches a prefix of str.  regex must be a
 // valid simple regular expression and not start with "^", or the
 // result is undefined.
 bool MatchRegexAtHead(const char* regex, const char* str) {
   if (*regex == '\0')  // An empty regex matches a prefix of anything.
     return true;
 
   // "$" only matches the end of a string.  Note that regex being
   // valid guarantees that there's nothing after "$" in it.
   if (*regex == '$')
     return *str == '\0';
 
   // Is the first thing in regex an escape sequence?
   const bool escaped = *regex == '\\';
   if (escaped)
     ++regex;
   if (IsRepeat(regex[1])) {
     // MatchRepetitionAndRegexAtHead() calls MatchRegexAtHead(), so
     // here's an indirect recursion.  It terminates as the regex gets
     // shorter in each recursion.
     return MatchRepetitionAndRegexAtHead(
         escaped, regex[0], regex[1], regex + 2, str);
   } else {
     // regex isn't empty, isn't "$", and doesn't start with a
     // repetition.  We match the first atom of regex with the first
     // character of str and recurse.
     return (*str != '\0') && AtomMatchesChar(escaped, *regex, *str) &&
         MatchRegexAtHead(regex + 1, str + 1);
   }
 }
 
 // Returns true iff regex matches any substring of str.  regex must be
 // a valid simple regular expression, or the result is undefined.
 //
 // The algorithm is recursive, but the recursion depth doesn't exceed
 // the regex length, so we won't need to worry about running out of
 // stack space normally.  In rare cases the time complexity can be
 // exponential with respect to the regex length + the string length,
 // but usually it's must faster (often close to linear).
 bool MatchRegexAnywhere(const char* regex, const char* str) {
   if (regex == NULL || str == NULL)
     return false;
 
   if (*regex == '^')
     return MatchRegexAtHead(regex + 1, str);
 
   // A successful match can be anywhere in str.
   do {
     if (MatchRegexAtHead(regex, str))
       return true;
   } while (*str++ != '\0');
   return false;
 }
 
 // Implements the RE class.
 
 RE::~RE() {
   free(const_cast<char*>(pattern_));
   free(const_cast<char*>(full_pattern_));
 }
 
 // Returns true iff regular expression re matches the entire str.
 bool RE::FullMatch(const char* str, const RE& re) {
   return re.is_valid_ && MatchRegexAnywhere(re.full_pattern_, str);
 }
 
 // Returns true iff regular expression re matches a substring of str
 // (including str itself).
 bool RE::PartialMatch(const char* str, const RE& re) {
   return re.is_valid_ && MatchRegexAnywhere(re.pattern_, str);
 }
 
 // Initializes an RE from its string representation.
 void RE::Init(const char* regex) {
   pattern_ = full_pattern_ = NULL;
   if (regex != NULL) {
     pattern_ = posix::StrDup(regex);
   }
 
   is_valid_ = ValidateRegex(regex);
   if (!is_valid_) {
     // No need to calculate the full pattern when the regex is invalid.
     return;
   }
 
   const size_t len = strlen(regex);
   // Reserves enough bytes to hold the regular expression used for a
   // full match: we need space to prepend a '^', append a '$', and
   // terminate the string with '\0'.
   char* buffer = static_cast<char*>(malloc(len + 3));
   full_pattern_ = buffer;
 
   if (*regex != '^')
     *buffer++ = '^';  // Makes sure full_pattern_ starts with '^'.
 
   // We don't use snprintf or strncpy, as they trigger a warning when
   // compiled with VC++ 8.0.
   memcpy(buffer, regex, len);
   buffer += len;
 
   if (len == 0 || regex[len - 1] != '$')
     *buffer++ = '$';  // Makes sure full_pattern_ ends with '$'.
 
   *buffer = '\0';
 }
 
 #endif  // GTEST_USES_POSIX_RE
 
 const char kUnknownFile[] = "unknown file";
 
 // Formats a source file path and a line number as they would appear
 // in an error message from the compiler used to compile this code.
 GTEST_API_ ::std::string FormatFileLocation(const char* file, int line) {
   const std::string file_name(file == NULL ? kUnknownFile : file);
 
   if (line < 0) {
     return file_name + ":";
   }
 #ifdef _MSC_VER
   return file_name + "(" + StreamableToString(line) + "):";
 #else
   return file_name + ":" + StreamableToString(line) + ":";
 #endif  // _MSC_VER
 }
 
 // Formats a file location for compiler-independent XML output.
 // Although this function is not platform dependent, we put it next to
 // FormatFileLocation in order to contrast the two functions.
 // Note that FormatCompilerIndependentFileLocation() does NOT append colon
 // to the file location it produces, unlike FormatFileLocation().
 GTEST_API_ ::std::string FormatCompilerIndependentFileLocation(
     const char* file, int line) {
   const std::string file_name(file == NULL ? kUnknownFile : file);
 
   if (line < 0)
     return file_name;
   else
     return file_name + ":" + StreamableToString(line);
 }
 
 GTestLog::GTestLog(GTestLogSeverity severity, const char* file, int line)
     : severity_(severity) {
   const char* const marker =
       severity == GTEST_INFO ?    "[  INFO ]" :
       severity == GTEST_WARNING ? "[WARNING]" :
       severity == GTEST_ERROR ?   "[ ERROR ]" : "[ FATAL ]";
   GetStream() << ::std::endl << marker << " "
               << FormatFileLocation(file, line).c_str() << ": ";
 }
 
 // Flushes the buffers and, if severity is GTEST_FATAL, aborts the program.
 GTestLog::~GTestLog() {
   GetStream() << ::std::endl;
   if (severity_ == GTEST_FATAL) {
     fflush(stderr);
     posix::Abort();
   }
 }
 
 // Disable Microsoft deprecation warnings for POSIX functions called from
 // this class (creat, dup, dup2, and close)
 GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996)
 
 #if GTEST_HAS_STREAM_REDIRECTION
 
 // Object that captures an output stream (stdout/stderr).
 class CapturedStream {
  public:
   // The ctor redirects the stream to a temporary file.
   explicit CapturedStream(int fd) : fd_(fd), uncaptured_fd_(dup(fd)) {
 # if GTEST_OS_WINDOWS
     char temp_dir_path[MAX_PATH + 1] = { '\0' };  // NOLINT
     char temp_file_path[MAX_PATH + 1] = { '\0' };  // NOLINT
 
     ::GetTempPathA(sizeof(temp_dir_path), temp_dir_path);
     const UINT success = ::GetTempFileNameA(temp_dir_path,
                                             "gtest_redir",
                                             0,  // Generate unique file name.
                                             temp_file_path);
     GTEST_CHECK_(success != 0)
         << "Unable to create a temporary file in " << temp_dir_path;
     const int captured_fd = creat(temp_file_path, _S_IREAD | _S_IWRITE);
     GTEST_CHECK_(captured_fd != -1) << "Unable to open temporary file "
                                     << temp_file_path;
     filename_ = temp_file_path;
 # else
     // There's no guarantee that a test has write access to the current
     // directory, so we create the temporary file in the /tmp directory
     // instead. We use /tmp on most systems, and /sdcard on Android.
     // That's because Android doesn't have /tmp.
 #  if GTEST_OS_LINUX_ANDROID
     // Note: Android applications are expected to call the framework's
     // Context.getExternalStorageDirectory() method through JNI to get
     // the location of the world-writable SD Card directory. However,
     // this requires a Context handle, which cannot be retrieved
     // globally from native code. Doing so also precludes running the
     // code as part of a regular standalone executable, which doesn't
     // run in a Dalvik process (e.g. when running it through 'adb shell').
     //
     // The location /sdcard is directly accessible from native code
     // and is the only location (unofficially) supported by the Android
     // team. It's generally a symlink to the real SD Card mount point
     // which can be /mnt/sdcard, /mnt/sdcard0, /system/media/sdcard, or
     // other OEM-customized locations. Never rely on these, and always
     // use /sdcard.
     char name_template[] = "/sdcard/gtest_captured_stream.XXXXXX";
 #  else
     char name_template[] = "/tmp/captured_stream.XXXXXX";
 #  endif  // GTEST_OS_LINUX_ANDROID
     const int captured_fd = mkstemp(name_template);
     filename_ = name_template;
 # endif  // GTEST_OS_WINDOWS
     fflush(NULL);
     dup2(captured_fd, fd_);
     close(captured_fd);
   }
 
   ~CapturedStream() {
     remove(filename_.c_str());
   }
 
   std::string GetCapturedString() {
     if (uncaptured_fd_ != -1) {
       // Restores the original stream.
       fflush(NULL);
       dup2(uncaptured_fd_, fd_);
       close(uncaptured_fd_);
       uncaptured_fd_ = -1;
     }
 
     FILE* const file = posix::FOpen(filename_.c_str(), "r");
     const std::string content = ReadEntireFile(file);
     posix::FClose(file);
     return content;
   }
 
  private:
   const int fd_;  // A stream to capture.
   int uncaptured_fd_;
   // Name of the temporary file holding the stderr output.
   ::std::string filename_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(CapturedStream);
 };
 
 GTEST_DISABLE_MSC_WARNINGS_POP_()
 
 static CapturedStream* g_captured_stderr = NULL;
 static CapturedStream* g_captured_stdout = NULL;
 
 // Starts capturing an output stream (stdout/stderr).
 static void CaptureStream(int fd, const char* stream_name,
                           CapturedStream** stream) {
   if (*stream != NULL) {
     GTEST_LOG_(FATAL) << "Only one " << stream_name
                       << " capturer can exist at a time.";
   }
   *stream = new CapturedStream(fd);
 }
 
 // Stops capturing the output stream and returns the captured string.
 static std::string GetCapturedStream(CapturedStream** captured_stream) {
   const std::string content = (*captured_stream)->GetCapturedString();
 
   delete *captured_stream;
   *captured_stream = NULL;
 
   return content;
 }
 
 // Starts capturing stdout.
 void CaptureStdout() {
   CaptureStream(kStdOutFileno, "stdout", &g_captured_stdout);
 }
 
 // Starts capturing stderr.
 void CaptureStderr() {
   CaptureStream(kStdErrFileno, "stderr", &g_captured_stderr);
 }
 
 // Stops capturing stdout and returns the captured string.
 std::string GetCapturedStdout() {
   return GetCapturedStream(&g_captured_stdout);
 }
 
 // Stops capturing stderr and returns the captured string.
 std::string GetCapturedStderr() {
   return GetCapturedStream(&g_captured_stderr);
 }
 
 #endif  // GTEST_HAS_STREAM_REDIRECTION
 
 
 
 
 
 size_t GetFileSize(FILE* file) {
   fseek(file, 0, SEEK_END);
   return static_cast<size_t>(ftell(file));
 }
 
 std::string ReadEntireFile(FILE* file) {
   const size_t file_size = GetFileSize(file);
   char* const buffer = new char[file_size];
 
   size_t bytes_last_read = 0;  // # of bytes read in the last fread()
   size_t bytes_read = 0;       // # of bytes read so far
 
   fseek(file, 0, SEEK_SET);
 
   // Keeps reading the file until we cannot read further or the
   // pre-determined file size is reached.
   do {
     bytes_last_read = fread(buffer+bytes_read, 1, file_size-bytes_read, file);
     bytes_read += bytes_last_read;
   } while (bytes_last_read > 0 && bytes_read < file_size);
 
   const std::string content(buffer, bytes_read);
   delete[] buffer;
 
   return content;
 }
 
 #if GTEST_HAS_DEATH_TEST
 static const std::vector<std::string>* g_injected_test_argvs = NULL;  // Owned.
 
 std::vector<std::string> GetInjectableArgvs() {
   if (g_injected_test_argvs != NULL) {
     return *g_injected_test_argvs;
   }
   return GetArgvs();
 }
 
 void SetInjectableArgvs(const std::vector<std::string>* new_argvs) {
   if (g_injected_test_argvs != new_argvs) delete g_injected_test_argvs;
   g_injected_test_argvs = new_argvs;
 }
 
 void SetInjectableArgvs(const std::vector<std::string>& new_argvs) {
   SetInjectableArgvs(
       new std::vector<std::string>(new_argvs.begin(), new_argvs.end()));
 }
 
 #if GTEST_HAS_GLOBAL_STRING
 void SetInjectableArgvs(const std::vector< ::string>& new_argvs) {
   SetInjectableArgvs(
       new std::vector<std::string>(new_argvs.begin(), new_argvs.end()));
 }
 #endif  // GTEST_HAS_GLOBAL_STRING
 
 void ClearInjectableArgvs() {
   delete g_injected_test_argvs;
   g_injected_test_argvs = NULL;
 }
 #endif  // GTEST_HAS_DEATH_TEST
 
 #if GTEST_OS_WINDOWS_MOBILE
 namespace posix {
 void Abort() {
   DebugBreak();
   TerminateProcess(GetCurrentProcess(), 1);
 }
 }  // namespace posix
 #endif  // GTEST_OS_WINDOWS_MOBILE
 
 // Returns the name of the environment variable corresponding to the
 // given flag.  For example, FlagToEnvVar("foo") will return
 // "GTEST_FOO" in the open-source version.
 static std::string FlagToEnvVar(const char* flag) {
   const std::string full_flag =
       (Message() << GTEST_FLAG_PREFIX_ << flag).GetString();
 
   Message env_var;
   for (size_t i = 0; i != full_flag.length(); i++) {
     env_var << ToUpper(full_flag.c_str()[i]);
   }
 
   return env_var.GetString();
 }
 
 // Parses 'str' for a 32-bit signed integer.  If successful, writes
 // the result to *value and returns true; otherwise leaves *value
 // unchanged and returns false.
 bool ParseInt32(const Message& src_text, const char* str, Int32* value) {
   // Parses the environment variable as a decimal integer.
   char* end = NULL;
   const long long_value = strtol(str, &end, 10);  // NOLINT
 
   // Has strtol() consumed all characters in the string?
   if (*end != '\0') {
     // No - an invalid character was encountered.
     Message msg;
     msg << "WARNING: " << src_text
         << " is expected to be a 32-bit integer, but actually"
         << " has value \"" << str << "\".\n";
     printf("%s", msg.GetString().c_str());
     fflush(stdout);
     return false;
   }
 
   // Is the parsed value in the range of an Int32?
   const Int32 result = static_cast<Int32>(long_value);
   if (long_value == LONG_MAX || long_value == LONG_MIN ||
       // The parsed value overflows as a long.  (strtol() returns
       // LONG_MAX or LONG_MIN when the input overflows.)
       result != long_value
       // The parsed value overflows as an Int32.
       ) {
     Message msg;
     msg << "WARNING: " << src_text
         << " is expected to be a 32-bit integer, but actually"
         << " has value " << str << ", which overflows.\n";
     printf("%s", msg.GetString().c_str());
     fflush(stdout);
     return false;
   }
 
   *value = result;
   return true;
 }
 
 // Reads and returns the Boolean environment variable corresponding to
 // the given flag; if it's not set, returns default_value.
 //
 // The value is considered true iff it's not "0".
 bool BoolFromGTestEnv(const char* flag, bool default_value) {
 #if defined(GTEST_GET_BOOL_FROM_ENV_)
   return GTEST_GET_BOOL_FROM_ENV_(flag, default_value);
 #else
   const std::string env_var = FlagToEnvVar(flag);
   const char* const string_value = posix::GetEnv(env_var.c_str());
   return string_value == NULL ?
       default_value : strcmp(string_value, "0") != 0;
 #endif  // defined(GTEST_GET_BOOL_FROM_ENV_)
 }
 
 // Reads and returns a 32-bit integer stored in the environment
 // variable corresponding to the given flag; if it isn't set or
 // doesn't represent a valid 32-bit integer, returns default_value.
 Int32 Int32FromGTestEnv(const char* flag, Int32 default_value) {
 #if defined(GTEST_GET_INT32_FROM_ENV_)
   return GTEST_GET_INT32_FROM_ENV_(flag, default_value);
 #else
   const std::string env_var = FlagToEnvVar(flag);
   const char* const string_value = posix::GetEnv(env_var.c_str());
   if (string_value == NULL) {
     // The environment variable is not set.
     return default_value;
   }
 
   Int32 result = default_value;
   if (!ParseInt32(Message() << "Environment variable " << env_var,
                   string_value, &result)) {
     printf("The default value %s is used.\n",
            (Message() << default_value).GetString().c_str());
     fflush(stdout);
     return default_value;
   }
 
   return result;
 #endif  // defined(GTEST_GET_INT32_FROM_ENV_)
 }
 
 // As a special case for the 'output' flag, if GTEST_OUTPUT is not
 // set, we look for XML_OUTPUT_FILE, which is set by the Bazel build
 // system.  The value of XML_OUTPUT_FILE is a filename without the
 // "xml:" prefix of GTEST_OUTPUT.
 // Note that this is meant to be called at the call site so it does
 // not check that the flag is 'output'
 // In essence this checks an env variable called XML_OUTPUT_FILE
 // and if it is set we prepend "xml:" to its value, if it not set we return ""
 std::string OutputFlagAlsoCheckEnvVar(){
   std::string default_value_for_output_flag = "";
   const char* xml_output_file_env = posix::GetEnv("XML_OUTPUT_FILE");
   if (NULL != xml_output_file_env) {
     default_value_for_output_flag = std::string("xml:") + xml_output_file_env;
   }
   return default_value_for_output_flag;
 }
 
 // Reads and returns the string environment variable corresponding to
 // the given flag; if it's not set, returns default_value.
 const char* StringFromGTestEnv(const char* flag, const char* default_value) {
 #if defined(GTEST_GET_STRING_FROM_ENV_)
   return GTEST_GET_STRING_FROM_ENV_(flag, default_value);
 #else
   const std::string env_var = FlagToEnvVar(flag);
   const char* const value = posix::GetEnv(env_var.c_str());
   return value == NULL ? default_value : value;
 #endif  // defined(GTEST_GET_STRING_FROM_ENV_)
 }
 
 }  // namespace internal
 }  // namespace testing
diff --git a/googletest/test/BUILD.bazel b/googletest/test/BUILD.bazel
index 405feee7..a930d65e 100644
--- a/googletest/test/BUILD.bazel
+++ b/googletest/test/BUILD.bazel
@@ -1,418 +1,527 @@
 # Copyright 2017 Google Inc.
 # All Rights Reserved.
 #
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 #
 # Author: misterg@google.com (Gennadiy Civil)
 #
 # Bazel BUILD for The Google C++ Testing Framework (Google Test)
 
 licenses(["notice"])
 
 config_setting(
     name = "windows",
     values = {"cpu": "x64_windows"},
 )
 
 config_setting(
     name = "windows_msvc",
     values = {"cpu": "x64_windows_msvc"},
 )
 
 config_setting(
     name = "has_absl",
     values = {"define": "absl=1"},
 )
 
-#on windows exclude gtest-tuple.h and gtest-tuple_test.cc
+#on windows exclude gtest-tuple.h and googletest-tuple-test.cc
 cc_test(
     name = "gtest_all_test",
     size = "small",
     srcs = glob(
         include = [
             "gtest-*.cc",
+            "googletest-*.cc",
             "*.h",
             "googletest/include/gtest/**/*.h",
         ],
         exclude = [
             "gtest-unittest-api_test.cc",
-            "gtest-tuple_test.cc",
+            "googletest-tuple-test.cc",
             "googletest/src/gtest-all.cc",
             "gtest_all_test.cc",
             "gtest-death-test_ex_test.cc",
             "gtest-listener_test.cc",
             "gtest-unittest-api_test.cc",
-            "gtest-param-test_test.cc",
+            "googletest-param-test-test.cc",
+            "googletest-catch-exceptions-test_.cc",
+            "googletest-color-test_.cc",
+            "googletest-env-var-test_.cc",
+            "googletest-filter-unittest_.cc",
+            "googletest-break-on-failure-unittest_.cc",
+             "googletest-listener-test.cc",
+             "googletest-output-test_.cc",
+             "googletest-list-tests-unittest_.cc",
+             "googletest-shuffle-test_.cc",
+             "googletest-uninitialized-test_.cc",
+             "googletest-death-test_ex_test.cc",
+             "googletest-param-test-test",
+             "googletest-throw-on-failure-test_.cc",
+             "googletest-param-test-invalid-name1-test_.cc",
+             "googletest-param-test-invalid-name2-test_.cc",
+
         ],
     ) + select({
         "//:windows": [],
         "//:windows_msvc": [],
         "//conditions:default": [
-            "gtest-tuple_test.cc",
+            "googletest-tuple-test.cc",
         ],
     }),
     copts = select({
         "//:windows": ["-DGTEST_USE_OWN_TR1_TUPLE=0"],
         "//:windows_msvc": ["-DGTEST_USE_OWN_TR1_TUPLE=0"],
         "//conditions:default": ["-DGTEST_USE_OWN_TR1_TUPLE=1"],
     }),
     includes = [
         "googletest",
         "googletest/include",
         "googletest/include/internal",
         "googletest/test",
     ],
     linkopts = select({
         "//:windows": [],
         "//:windows_msvc": [],
         "//conditions:default": [
             "-pthread",
         ],
     }),
     deps = ["//:gtest_main"],
 )
 
+
+# Tests death tests.
+cc_test(
+    name = "googletest-death-test-test",
+    size = "medium",
+    srcs = ["googletest-death-test-test.cc"],
+    deps = ["//:gtest_main"],
+)
+
+cc_test(
+    name = "gtest_test_macro_stack_footprint_test",
+    size = "small",
+    srcs = ["gtest_test_macro_stack_footprint_test.cc"],
+    deps = ["//:gtest"],
+)
+
 #These googletest tests have their own main()
 cc_test(
-    name = "gtest-listener_test",
+    name = "googletest-listener-test",
     size = "small",
-    srcs = [
-        "gtest-listener_test.cc",
-    ],
-    deps = [
-        "//:gtest",
-    ],
+    srcs = ["googletest-listener-test.cc"],
+    deps = ["//:gtest_main"],
 )
 
 cc_test(
     name = "gtest-unittest-api_test",
     size = "small",
     srcs = [
         "gtest-unittest-api_test.cc",
     ],
     deps = [
         "//:gtest",
     ],
 )
 
 cc_test(
-    name = "gtest-param-test_test",
+    name = "googletest-param-test-test",
     size = "small",
     srcs = [
-        "gtest-param-test2_test.cc",
-        "gtest-param-test_test.cc",
-        "gtest-param-test_test.h",
-    ],
-    deps = [
-        "//:gtest",
+        "googletest-param-test-test.cc",
+        "googletest-param-test-test.h",
+        "googletest-param-test2-test.cc",
     ],
+    deps = ["//:gtest"],
 )
 
 cc_test(
     name = "gtest_unittest",
     size = "small",
     srcs = ["gtest_unittest.cc"],
     args = ["--heap_check=strict"],
     shard_count = 2,
     deps = ["//:gtest_main"],
 )
 
 #  Py tests
 
 py_library(
     name = "gtest_test_utils",
     testonly = 1,
     srcs = ["gtest_test_utils.py"],
 )
 
 cc_binary(
     name = "gtest_help_test_",
     testonly = 1,
     srcs = ["gtest_help_test_.cc"],
     deps = ["//:gtest_main"],
 )
 
 py_test(
     name = "gtest_help_test",
     size = "small",
     srcs = ["gtest_help_test.py"],
     data = [":gtest_help_test_"],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
-    name = "gtest_output_test_",
+    name = "googletest-output-test_",
     testonly = 1,
-    srcs = ["gtest_output_test_.cc"],
+    srcs = ["googletest-output-test_.cc"],
     deps = ["//:gtest"],
 )
 
+
 py_test(
-    name = "gtest_output_test",
+    name = "googletest-output-test",
     size = "small",
-    srcs = ["gtest_output_test.py"],
+    srcs = ["googletest-output-test.py"],
     args = select({
         ":has_absl": [],
         "//conditions:default": ["--no_stacktrace_support"],
     }),
     data = [
-        "gtest_output_test_golden_lin.txt",
-        ":gtest_output_test_",
+        "googletest-output-test-golden-lin.txt",
+        ":googletest-output-test_",
     ],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
-    name = "gtest_color_test_",
+    name = "googletest-color-test_",
     testonly = 1,
-    srcs = ["gtest_color_test_.cc"],
+    srcs = ["googletest-color-test_.cc"],
     deps = ["//:gtest"],
 )
 
 py_test(
-    name = "gtest_color_test",
+    name = "googletest-color-test",
     size = "small",
-    srcs = ["gtest_color_test.py"],
-    data = [":gtest_color_test_"],
+    srcs = ["googletest-color-test.py"],
+    data = [":googletest-color-test_"],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
-    name = "gtest_env_var_test_",
+    name = "googletest-env-var-test_",
     testonly = 1,
-    srcs = ["gtest_env_var_test_.cc"],
+    srcs = ["googletest-env-var-test_.cc"],
     deps = ["//:gtest"],
 )
 
 py_test(
-    name = "gtest_env_var_test",
-    size = "small",
-    srcs = ["gtest_env_var_test.py"],
-    data = [":gtest_env_var_test_"],
+    name = "googletest-env-var-test",
+    size = "medium",
+    srcs = ["googletest-env-var-test.py"],
+    data = [":googletest-env-var-test_"],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
-    name = "gtest_filter_unittest_",
+    name = "googletest-filter-unittest_",
     testonly = 1,
-    srcs = ["gtest_filter_unittest_.cc"],
+    srcs = ["googletest-filter-unittest_.cc"],
     deps = ["//:gtest"],
 )
 
 py_test(
-    name = "gtest_filter_unittest",
-    size = "small",
-    srcs = ["gtest_filter_unittest.py"],
-    data = [":gtest_filter_unittest_"],
+    name = "googletest-filter-unittest",
+    size = "medium",
+    srcs = ["googletest-filter-unittest.py"],
+    data = [":googletest-filter-unittest_"],
     deps = [":gtest_test_utils"],
 )
 
+
 cc_binary(
-    name = "gtest_break_on_failure_unittest_",
+    name = "googletest-break-on-failure-unittest_",
     testonly = 1,
-    srcs = ["gtest_break_on_failure_unittest_.cc"],
+    srcs = ["googletest-break-on-failure-unittest_.cc"],
     deps = ["//:gtest"],
 )
 
+
+
 py_test(
-    name = "gtest_break_on_failure_unittest",
+    name = "googletest-break-on-failure-unittest",
     size = "small",
-    srcs = ["gtest_break_on_failure_unittest.py"],
-    data = [":gtest_break_on_failure_unittest_"],
+    srcs = ["googletest-break-on-failure-unittest.py"],
+    data = [":googletest-break-on-failure-unittest_"],
     deps = [":gtest_test_utils"],
 )
 
+
 cc_test(
     name = "gtest_assert_by_exception_test",
     size = "small",
     srcs = ["gtest_assert_by_exception_test.cc"],
     deps = ["//:gtest"],
 )
 
+
+
 cc_binary(
-    name = "gtest_throw_on_failure_test_",
+    name = "googletest-throw-on-failure-test_",
     testonly = 1,
-    srcs = ["gtest_throw_on_failure_test_.cc"],
+    srcs = ["googletest-throw-on-failure-test_.cc"],
     deps = ["//:gtest"],
 )
 
 py_test(
-    name = "gtest_throw_on_failure_test",
+    name = "googletest-throw-on-failure-test",
     size = "small",
-    srcs = ["gtest_throw_on_failure_test.py"],
-    data = [":gtest_throw_on_failure_test_"],
+    srcs = ["googletest-throw-on-failure-test.py"],
+    data = [":googletest-throw-on-failure-test_"],
     deps = [":gtest_test_utils"],
 )
 
+
 cc_binary(
-    name = "gtest_list_tests_unittest_",
+    name = "googletest-list-tests-unittest_",
     testonly = 1,
-    srcs = ["gtest_list_tests_unittest_.cc"],
+    srcs = ["googletest-list-tests-unittest_.cc"],
     deps = ["//:gtest"],
 )
 
 py_test(
-    name = "gtest_list_tests_unittest",
+    name = "googletest-list-tests-unittest",
     size = "small",
-    srcs = ["gtest_list_tests_unittest.py"],
-    data = [":gtest_list_tests_unittest_"],
+    srcs = ["googletest-list-tests-unittest.py"],
+    data = [":googletest-list-tests-unittest_"],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
-    name = "gtest_shuffle_test_",
-    srcs = ["gtest_shuffle_test_.cc"],
+    name = "googletest-shuffle-test_",
+    srcs = ["googletest-shuffle-test_.cc"],
     deps = ["//:gtest"],
 )
 
 py_test(
-    name = "gtest_shuffle_test",
+    name = "googletest-shuffle-test",
     size = "small",
-    srcs = ["gtest_shuffle_test.py"],
-    data = [":gtest_shuffle_test_"],
+    srcs = ["googletest-shuffle-test.py"],
+    data = [":googletest-shuffle-test_"],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
-    name = "gtest_catch_exceptions_no_ex_test_",
+    name = "googletest-catch-exceptions-no-ex-test_",
     testonly = 1,
-    srcs = ["gtest_catch_exceptions_test_.cc"],
+    srcs = ["googletest-catch-exceptions-test_.cc"],
     deps = ["//:gtest_main"],
 )
 
 cc_binary(
-    name = "gtest_catch_exceptions_ex_test_",
+    name = "googletest-catch-exceptions-ex-test_",
     testonly = 1,
-    srcs = ["gtest_catch_exceptions_test_.cc"],
+    srcs = ["googletest-catch-exceptions-test_.cc"],
     copts = ["-fexceptions"],
     deps = ["//:gtest_main"],
 )
 
 py_test(
-    name = "gtest_catch_exceptions_test",
+    name = "googletest-catch-exceptions-test",
     size = "small",
-    srcs = ["gtest_catch_exceptions_test.py"],
+    srcs = ["googletest-catch-exceptions-test.py"],
     data = [
-        ":gtest_catch_exceptions_ex_test_",
-        ":gtest_catch_exceptions_no_ex_test_",
+        ":googletest-catch-exceptions-ex-test_",
+        ":googletest-catch-exceptions-no-ex-test_",
     ],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
     name = "gtest_xml_output_unittest_",
     testonly = 1,
     srcs = ["gtest_xml_output_unittest_.cc"],
     deps = ["//:gtest"],
 )
 
 cc_test(
     name = "gtest_no_test_unittest",
     size = "small",
     srcs = ["gtest_no_test_unittest.cc"],
     deps = ["//:gtest"],
 )
 
 py_test(
     name = "gtest_xml_output_unittest",
     size = "small",
     srcs = [
         "gtest_xml_output_unittest.py",
         "gtest_xml_test_utils.py",
     ],
     args = select({
         ":has_absl": [],
         "//conditions:default": ["--no_stacktrace_support"],
     }),
     data = [
         # We invoke gtest_no_test_unittest to verify the XML output
         # when the test program contains no test definition.
         ":gtest_no_test_unittest",
         ":gtest_xml_output_unittest_",
     ],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
     name = "gtest_xml_outfile1_test_",
     testonly = 1,
     srcs = ["gtest_xml_outfile1_test_.cc"],
     deps = ["//:gtest_main"],
 )
 
 cc_binary(
     name = "gtest_xml_outfile2_test_",
     testonly = 1,
     srcs = ["gtest_xml_outfile2_test_.cc"],
     deps = ["//:gtest_main"],
 )
 
 py_test(
     name = "gtest_xml_outfiles_test",
     size = "small",
     srcs = [
         "gtest_xml_outfiles_test.py",
         "gtest_xml_test_utils.py",
     ],
     data = [
         ":gtest_xml_outfile1_test_",
         ":gtest_xml_outfile2_test_",
     ],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
-    name = "gtest_uninitialized_test_",
+    name = "googletest-uninitialized-test_",
     testonly = 1,
-    srcs = ["gtest_uninitialized_test_.cc"],
+    srcs = ["googletest-uninitialized-test_.cc"],
     deps = ["//:gtest"],
 )
 
 py_test(
-    name = "gtest_uninitialized_test",
+    name = "googletest-uninitialized-test",
     size = "medium",
-    srcs = ["gtest_uninitialized_test.py"],
-    data = [":gtest_uninitialized_test_"],
+    srcs = ["googletest-uninitialized-test.py"],
+    data = ["googletest-uninitialized-test_"],
     deps = [":gtest_test_utils"],
 )
 
 cc_binary(
     name = "gtest_testbridge_test_",
     testonly = 1,
     srcs = ["gtest_testbridge_test_.cc"],
     deps = ["//:gtest_main"],
 )
 
 # Tests that filtering via testbridge works
 py_test(
     name = "gtest_testbridge_test",
     size = "small",
     srcs = ["gtest_testbridge_test.py"],
     data = [":gtest_testbridge_test_"],
     deps = [":gtest_test_utils"],
 )
+
+
+py_test(
+    name = "googletest-json-outfiles-test",
+    size = "small",
+    srcs = [
+        "googletest-json-outfiles-test.py",
+        "gtest_json_test_utils.py",
+    ],
+    data = [
+        ":gtest_xml_outfile1_test_",
+        ":gtest_xml_outfile2_test_",
+    ],
+    deps = [":gtest_test_utils"],
+)
+
+py_test(
+    name = "googletest-json-output-unittest",
+    size = "medium",
+    srcs = [
+        "googletest-json-output-unittest.py",
+        "gtest_json_test_utils.py",
+    ],
+    data = [
+        # We invoke gtest_no_test_unittest to verify the JSON output
+        # when the test program contains no test definition.
+        ":gtest_no_test_unittest",
+        ":gtest_xml_output_unittest_",
+    ],
+    args = select({
+        ":has_absl": [],
+        "//conditions:default": ["--no_stacktrace_support"],
+    }),
+    deps = [":gtest_test_utils"],
+)
+# Verifies interaction of death tests and exceptions.
+cc_test(
+    name = "googletest-death-test_ex_catch_test",
+    size = "medium",
+    srcs = ["googletest-death-test_ex_test.cc"],
+    copts = ["-fexceptions"],
+    defines = ["GTEST_ENABLE_CATCH_EXCEPTIONS_=1"],
+    deps = ["//:gtest"],
+)
+
+cc_binary(
+    name = "googletest-param-test-invalid-name1-test_",
+    testonly = 1,
+    srcs = ["googletest-param-test-invalid-name1-test_.cc"],
+    deps = ["//:gtest"],
+)
+
+cc_binary(
+    name = "googletest-param-test-invalid-name2-test_",
+    testonly = 1,
+    srcs = ["googletest-param-test-invalid-name2-test_.cc"],
+    deps = ["//:gtest"],
+)
+
+py_test(
+    name = "googletest-param-test-invalid-name1-test",
+    size = "small",
+    srcs = ["googletest-param-test-invalid-name1-test.py"],
+    data = [":googletest-param-test-invalid-name1-test_"],
+    deps = [":gtest_test_utils"],
+)
+
+py_test(
+    name = "googletest-param-test-invalid-name2-test",
+    size = "small",
+    srcs = ["googletest-param-test-invalid-name2-test.py"],
+    data = [":googletest-param-test-invalid-name2-test_"],
+    deps = [":gtest_test_utils"],
+)
diff --git a/googletest/test/gtest_break_on_failure_unittest.py b/googletest/test/googletest-break-on-failure-unittest.py
similarity index 96%
rename from googletest/test/gtest_break_on_failure_unittest.py
rename to googletest/test/googletest-break-on-failure-unittest.py
index 16e19dbc..cd77547d 100755
--- a/googletest/test/gtest_break_on_failure_unittest.py
+++ b/googletest/test/googletest-break-on-failure-unittest.py
@@ -1,210 +1,210 @@
 #!/usr/bin/env python
 #
 # Copyright 2006, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Unit test for Google Test's break-on-failure mode.
 
 A user can ask Google Test to seg-fault when an assertion fails, using
 either the GTEST_BREAK_ON_FAILURE environment variable or the
 --gtest_break_on_failure flag.  This script tests such functionality
-by invoking gtest_break_on_failure_unittest_ (a program written with
+by invoking googletest-break-on-failure-unittest_ (a program written with
 Google Test) with different environments and command line flags.
 """
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import os
 import gtest_test_utils
 
 # Constants.
 
 IS_WINDOWS = os.name == 'nt'
 
 # The environment variable for enabling/disabling the break-on-failure mode.
 BREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE'
 
 # The command line flag for enabling/disabling the break-on-failure mode.
 BREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure'
 
 # The environment variable for enabling/disabling the throw-on-failure mode.
 THROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE'
 
 # The environment variable for enabling/disabling the catch-exceptions mode.
 CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS'
 
-# Path to the gtest_break_on_failure_unittest_ program.
+# Path to the googletest-break-on-failure-unittest_ program.
 EXE_PATH = gtest_test_utils.GetTestExecutablePath(
-    'gtest_break_on_failure_unittest_')
+    'googletest-break-on-failure-unittest_')
 
 
 environ = gtest_test_utils.environ
 SetEnvVar = gtest_test_utils.SetEnvVar
 
 # Tests in this file run a Google-Test-based test program and expect it
 # to terminate prematurely.  Therefore they are incompatible with
 # the premature-exit-file protocol by design.  Unset the
 # premature-exit filepath to prevent Google Test from creating
 # the file.
 SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
 
 
 def Run(command):
   """Runs a command; returns 1 if it was killed by a signal, or 0 otherwise."""
 
   p = gtest_test_utils.Subprocess(command, env=environ)
   if p.terminated_by_signal:
     return 1
   else:
     return 0
 
 
 # The tests.
 
 
 class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):
   """Tests using the GTEST_BREAK_ON_FAILURE environment variable or
   the --gtest_break_on_failure flag to turn assertion failures into
   segmentation faults.
   """
 
   def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault):
-    """Runs gtest_break_on_failure_unittest_ and verifies that it does
+    """Runs googletest-break-on-failure-unittest_ and verifies that it does
     (or does not) have a seg-fault.
 
     Args:
       env_var_value:    value of the GTEST_BREAK_ON_FAILURE environment
                         variable; None if the variable should be unset.
       flag_value:       value of the --gtest_break_on_failure flag;
                         None if the flag should not be present.
       expect_seg_fault: 1 if the program is expected to generate a seg-fault;
                         0 otherwise.
     """
 
     SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value)
 
     if env_var_value is None:
       env_var_value_msg = ' is not set'
     else:
       env_var_value_msg = '=' + env_var_value
 
     if flag_value is None:
       flag = ''
     elif flag_value == '0':
       flag = '--%s=0' % BREAK_ON_FAILURE_FLAG
     else:
       flag = '--%s' % BREAK_ON_FAILURE_FLAG
 
     command = [EXE_PATH]
     if flag:
       command.append(flag)
 
     if expect_seg_fault:
       should_or_not = 'should'
     else:
       should_or_not = 'should not'
 
     has_seg_fault = Run(command)
 
     SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None)
 
     msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' %
            (BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command),
             should_or_not))
     self.assert_(has_seg_fault == expect_seg_fault, msg)
 
   def testDefaultBehavior(self):
     """Tests the behavior of the default mode."""
 
     self.RunAndVerify(env_var_value=None,
                       flag_value=None,
                       expect_seg_fault=0)
 
   def testEnvVar(self):
     """Tests using the GTEST_BREAK_ON_FAILURE environment variable."""
 
     self.RunAndVerify(env_var_value='0',
                       flag_value=None,
                       expect_seg_fault=0)
     self.RunAndVerify(env_var_value='1',
                       flag_value=None,
                       expect_seg_fault=1)
 
   def testFlag(self):
     """Tests using the --gtest_break_on_failure flag."""
 
     self.RunAndVerify(env_var_value=None,
                       flag_value='0',
                       expect_seg_fault=0)
     self.RunAndVerify(env_var_value=None,
                       flag_value='1',
                       expect_seg_fault=1)
 
   def testFlagOverridesEnvVar(self):
     """Tests that the flag overrides the environment variable."""
 
     self.RunAndVerify(env_var_value='0',
                       flag_value='0',
                       expect_seg_fault=0)
     self.RunAndVerify(env_var_value='0',
                       flag_value='1',
                       expect_seg_fault=1)
     self.RunAndVerify(env_var_value='1',
                       flag_value='0',
                       expect_seg_fault=0)
     self.RunAndVerify(env_var_value='1',
                       flag_value='1',
                       expect_seg_fault=1)
 
   def testBreakOnFailureOverridesThrowOnFailure(self):
     """Tests that gtest_break_on_failure overrides gtest_throw_on_failure."""
 
     SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1')
     try:
       self.RunAndVerify(env_var_value=None,
                         flag_value='1',
                         expect_seg_fault=1)
     finally:
       SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None)
 
   if IS_WINDOWS:
     def testCatchExceptionsDoesNotInterfere(self):
       """Tests that gtest_catch_exceptions doesn't interfere."""
 
       SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1')
       try:
         self.RunAndVerify(env_var_value='1',
                           flag_value='1',
                           expect_seg_fault=1)
       finally:
         SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None)
 
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_break_on_failure_unittest_.cc b/googletest/test/googletest-break-on-failure-unittest_.cc
similarity index 100%
rename from googletest/test/gtest_break_on_failure_unittest_.cc
rename to googletest/test/googletest-break-on-failure-unittest_.cc
diff --git a/googletest/test/gtest_catch_exceptions_test.py b/googletest/test/googletest-catch-exceptions-test.py
similarity index 96%
rename from googletest/test/gtest_catch_exceptions_test.py
rename to googletest/test/googletest-catch-exceptions-test.py
index 760f914f..69dbadf3 100755
--- a/googletest/test/gtest_catch_exceptions_test.py
+++ b/googletest/test/googletest-catch-exceptions-test.py
@@ -1,235 +1,235 @@
 #!/usr/bin/env python
 #
 # Copyright 2010 Google Inc.  All Rights Reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Tests Google Test's exception catching behavior.
 
-This script invokes gtest_catch_exceptions_test_ and
-gtest_catch_exceptions_ex_test_ (programs written with
+This script invokes googletest-catch-exceptions-test_ and
+googletest-catch-exceptions-ex-test_ (programs written with
 Google Test) and verifies their output.
 """
 
 __author__ = 'vladl@google.com (Vlad Losev)'
 
 import gtest_test_utils
 
 # Constants.
 FLAG_PREFIX = '--gtest_'
 LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
 NO_CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions=0'
 FILTER_FLAG = FLAG_PREFIX + 'filter'
 
-# Path to the gtest_catch_exceptions_ex_test_ binary, compiled with
+# Path to the googletest-catch-exceptions-ex-test_ binary, compiled with
 # exceptions enabled.
 EX_EXE_PATH = gtest_test_utils.GetTestExecutablePath(
-    'gtest_catch_exceptions_ex_test_')
+    'googletest-catch-exceptions-ex-test_')
 
-# Path to the gtest_catch_exceptions_test_ binary, compiled with
+# Path to the googletest-catch-exceptions-test_ binary, compiled with
 # exceptions disabled.
 EXE_PATH = gtest_test_utils.GetTestExecutablePath(
-    'gtest_catch_exceptions_no_ex_test_')
+    'googletest-catch-exceptions-no-ex-test_')
 
 environ = gtest_test_utils.environ
 SetEnvVar = gtest_test_utils.SetEnvVar
 
 # Tests in this file run a Google-Test-based test program and expect it
 # to terminate prematurely.  Therefore they are incompatible with
 # the premature-exit-file protocol by design.  Unset the
 # premature-exit filepath to prevent Google Test from creating
 # the file.
 SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
 
 TEST_LIST = gtest_test_utils.Subprocess(
     [EXE_PATH, LIST_TESTS_FLAG], env=environ).output
 
 SUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST
 
 if SUPPORTS_SEH_EXCEPTIONS:
   BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output
 
 EX_BINARY_OUTPUT = gtest_test_utils.Subprocess(
     [EX_EXE_PATH], env=environ).output
 
 
 # The tests.
 if SUPPORTS_SEH_EXCEPTIONS:
   # pylint:disable-msg=C6302
   class CatchSehExceptionsTest(gtest_test_utils.TestCase):
     """Tests exception-catching behavior."""
 
 
     def TestSehExceptions(self, test_output):
       self.assert_('SEH exception with code 0x2a thrown '
                    'in the test fixture\'s constructor'
                    in test_output)
       self.assert_('SEH exception with code 0x2a thrown '
                    'in the test fixture\'s destructor'
                    in test_output)
       self.assert_('SEH exception with code 0x2a thrown in SetUpTestCase()'
                    in test_output)
       self.assert_('SEH exception with code 0x2a thrown in TearDownTestCase()'
                    in test_output)
       self.assert_('SEH exception with code 0x2a thrown in SetUp()'
                    in test_output)
       self.assert_('SEH exception with code 0x2a thrown in TearDown()'
                    in test_output)
       self.assert_('SEH exception with code 0x2a thrown in the test body'
                    in test_output)
 
     def testCatchesSehExceptionsWithCxxExceptionsEnabled(self):
       self.TestSehExceptions(EX_BINARY_OUTPUT)
 
     def testCatchesSehExceptionsWithCxxExceptionsDisabled(self):
       self.TestSehExceptions(BINARY_OUTPUT)
 
 
 class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
   """Tests C++ exception-catching behavior.
 
      Tests in this test case verify that:
      * C++ exceptions are caught and logged as C++ (not SEH) exceptions
      * Exception thrown affect the remainder of the test work flow in the
        expected manner.
   """
 
   def testCatchesCxxExceptionsInFixtureConstructor(self):
     self.assert_('C++ exception with description '
                  '"Standard C++ exception" thrown '
                  'in the test fixture\'s constructor'
                  in EX_BINARY_OUTPUT)
     self.assert_('unexpected' not in EX_BINARY_OUTPUT,
                  'This failure belongs in this test only if '
                  '"CxxExceptionInConstructorTest" (no quotes) '
                  'appears on the same line as words "called unexpectedly"')
 
   if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in
       EX_BINARY_OUTPUT):
 
     def testCatchesCxxExceptionsInFixtureDestructor(self):
       self.assert_('C++ exception with description '
                    '"Standard C++ exception" thrown '
                    'in the test fixture\'s destructor'
                    in EX_BINARY_OUTPUT)
       self.assert_('CxxExceptionInDestructorTest::TearDownTestCase() '
                    'called as expected.'
                    in EX_BINARY_OUTPUT)
 
   def testCatchesCxxExceptionsInSetUpTestCase(self):
     self.assert_('C++ exception with description "Standard C++ exception"'
                  ' thrown in SetUpTestCase()'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInConstructorTest::TearDownTestCase() '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInSetUpTestCaseTest constructor '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInSetUpTestCaseTest destructor '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInSetUpTestCaseTest::SetUp() '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInSetUpTestCaseTest::TearDown() '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInSetUpTestCaseTest test body '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
 
   def testCatchesCxxExceptionsInTearDownTestCase(self):
     self.assert_('C++ exception with description "Standard C++ exception"'
                  ' thrown in TearDownTestCase()'
                  in EX_BINARY_OUTPUT)
 
   def testCatchesCxxExceptionsInSetUp(self):
     self.assert_('C++ exception with description "Standard C++ exception"'
                  ' thrown in SetUp()'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInSetUpTest::TearDownTestCase() '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInSetUpTest destructor '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInSetUpTest::TearDown() '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('unexpected' not in EX_BINARY_OUTPUT,
                  'This failure belongs in this test only if '
                  '"CxxExceptionInSetUpTest" (no quotes) '
                  'appears on the same line as words "called unexpectedly"')
 
   def testCatchesCxxExceptionsInTearDown(self):
     self.assert_('C++ exception with description "Standard C++ exception"'
                  ' thrown in TearDown()'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInTearDownTest::TearDownTestCase() '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInTearDownTest destructor '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
 
   def testCatchesCxxExceptionsInTestBody(self):
     self.assert_('C++ exception with description "Standard C++ exception"'
                  ' thrown in the test body'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInTestBodyTest::TearDownTestCase() '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInTestBodyTest destructor '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
     self.assert_('CxxExceptionInTestBodyTest::TearDown() '
                  'called as expected.'
                  in EX_BINARY_OUTPUT)
 
   def testCatchesNonStdCxxExceptions(self):
     self.assert_('Unknown C++ exception thrown in the test body'
                  in EX_BINARY_OUTPUT)
 
   def testUnhandledCxxExceptionsAbortTheProgram(self):
     # Filters out SEH exception tests on Windows. Unhandled SEH exceptions
     # cause tests to show pop-up windows there.
     FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*'
     # By default, Google Test doesn't catch the exceptions.
     uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess(
         [EX_EXE_PATH,
          NO_CATCH_EXCEPTIONS_FLAG,
          FITLER_OUT_SEH_TESTS_FLAG],
         env=environ).output
 
     self.assert_('Unhandled C++ exception terminating the program'
                  in uncaught_exceptions_ex_binary_output)
     self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output)
 
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_catch_exceptions_test_.cc b/googletest/test/googletest-catch-exceptions-test_.cc
similarity index 99%
rename from googletest/test/gtest_catch_exceptions_test_.cc
rename to googletest/test/googletest-catch-exceptions-test_.cc
index c6d953c0..cb018f91 100644
--- a/googletest/test/gtest_catch_exceptions_test_.cc
+++ b/googletest/test/googletest-catch-exceptions-test_.cc
@@ -1,311 +1,311 @@
 // Copyright 2010, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: vladl@google.com (Vlad Losev)
 //
 // Tests for Google Test itself. Tests in this file throw C++ or SEH
-// exceptions, and the output is verified by gtest_catch_exceptions_test.py.
+// exceptions, and the output is verified by googletest-catch-exceptions-test.py.
 
 #include "gtest/gtest.h"
 
 #include <stdio.h>  // NOLINT
 #include <stdlib.h>  // For exit().
 
 #if GTEST_HAS_SEH
 # include <windows.h>
 #endif
 
 #if GTEST_HAS_EXCEPTIONS
 # include <exception>  // For set_terminate().
 # include <stdexcept>
 #endif
 
 using testing::Test;
 
 #if GTEST_HAS_SEH
 
 class SehExceptionInConstructorTest : public Test {
  public:
   SehExceptionInConstructorTest() { RaiseException(42, 0, 0, NULL); }
 };
 
 TEST_F(SehExceptionInConstructorTest, ThrowsExceptionInConstructor) {}
 
 class SehExceptionInDestructorTest : public Test {
  public:
   ~SehExceptionInDestructorTest() { RaiseException(42, 0, 0, NULL); }
 };
 
 TEST_F(SehExceptionInDestructorTest, ThrowsExceptionInDestructor) {}
 
 class SehExceptionInSetUpTestCaseTest : public Test {
  public:
   static void SetUpTestCase() { RaiseException(42, 0, 0, NULL); }
 };
 
 TEST_F(SehExceptionInSetUpTestCaseTest, ThrowsExceptionInSetUpTestCase) {}
 
 class SehExceptionInTearDownTestCaseTest : public Test {
  public:
   static void TearDownTestCase() { RaiseException(42, 0, 0, NULL); }
 };
 
 TEST_F(SehExceptionInTearDownTestCaseTest, ThrowsExceptionInTearDownTestCase) {}
 
 class SehExceptionInSetUpTest : public Test {
  protected:
   virtual void SetUp() { RaiseException(42, 0, 0, NULL); }
 };
 
 TEST_F(SehExceptionInSetUpTest, ThrowsExceptionInSetUp) {}
 
 class SehExceptionInTearDownTest : public Test {
  protected:
   virtual void TearDown() { RaiseException(42, 0, 0, NULL); }
 };
 
 TEST_F(SehExceptionInTearDownTest, ThrowsExceptionInTearDown) {}
 
 TEST(SehExceptionTest, ThrowsSehException) {
   RaiseException(42, 0, 0, NULL);
 }
 
 #endif  // GTEST_HAS_SEH
 
 #if GTEST_HAS_EXCEPTIONS
 
 class CxxExceptionInConstructorTest : public Test {
  public:
   CxxExceptionInConstructorTest() {
     // Without this macro VC++ complains about unreachable code at the end of
     // the constructor.
     GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(
         throw std::runtime_error("Standard C++ exception"));
   }
 
   static void TearDownTestCase() {
     printf("%s",
            "CxxExceptionInConstructorTest::TearDownTestCase() "
            "called as expected.\n");
   }
 
  protected:
   ~CxxExceptionInConstructorTest() {
     ADD_FAILURE() << "CxxExceptionInConstructorTest destructor "
                   << "called unexpectedly.";
   }
 
   virtual void SetUp() {
     ADD_FAILURE() << "CxxExceptionInConstructorTest::SetUp() "
                   << "called unexpectedly.";
   }
 
   virtual void TearDown() {
     ADD_FAILURE() << "CxxExceptionInConstructorTest::TearDown() "
                   << "called unexpectedly.";
   }
 };
 
 TEST_F(CxxExceptionInConstructorTest, ThrowsExceptionInConstructor) {
   ADD_FAILURE() << "CxxExceptionInConstructorTest test body "
                 << "called unexpectedly.";
 }
 
 // Exceptions in destructors are not supported in C++11.
 #if !GTEST_LANG_CXX11
 class CxxExceptionInDestructorTest : public Test {
  public:
   static void TearDownTestCase() {
     printf("%s",
            "CxxExceptionInDestructorTest::TearDownTestCase() "
            "called as expected.\n");
   }
 
  protected:
   ~CxxExceptionInDestructorTest() {
     GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(
         throw std::runtime_error("Standard C++ exception"));
   }
 };
 
 TEST_F(CxxExceptionInDestructorTest, ThrowsExceptionInDestructor) {}
 #endif  // C++11 mode
 
 class CxxExceptionInSetUpTestCaseTest : public Test {
  public:
   CxxExceptionInSetUpTestCaseTest() {
     printf("%s",
            "CxxExceptionInSetUpTestCaseTest constructor "
            "called as expected.\n");
   }
 
   static void SetUpTestCase() {
     throw std::runtime_error("Standard C++ exception");
   }
 
   static void TearDownTestCase() {
     printf("%s",
            "CxxExceptionInSetUpTestCaseTest::TearDownTestCase() "
            "called as expected.\n");
   }
 
  protected:
   ~CxxExceptionInSetUpTestCaseTest() {
     printf("%s",
            "CxxExceptionInSetUpTestCaseTest destructor "
            "called as expected.\n");
   }
 
   virtual void SetUp() {
     printf("%s",
            "CxxExceptionInSetUpTestCaseTest::SetUp() "
            "called as expected.\n");
   }
 
   virtual void TearDown() {
     printf("%s",
            "CxxExceptionInSetUpTestCaseTest::TearDown() "
            "called as expected.\n");
   }
 };
 
 TEST_F(CxxExceptionInSetUpTestCaseTest, ThrowsExceptionInSetUpTestCase) {
   printf("%s",
          "CxxExceptionInSetUpTestCaseTest test body "
          "called as expected.\n");
 }
 
 class CxxExceptionInTearDownTestCaseTest : public Test {
  public:
   static void TearDownTestCase() {
     throw std::runtime_error("Standard C++ exception");
   }
 };
 
 TEST_F(CxxExceptionInTearDownTestCaseTest, ThrowsExceptionInTearDownTestCase) {}
 
 class CxxExceptionInSetUpTest : public Test {
  public:
   static void TearDownTestCase() {
     printf("%s",
            "CxxExceptionInSetUpTest::TearDownTestCase() "
            "called as expected.\n");
   }
 
  protected:
   ~CxxExceptionInSetUpTest() {
     printf("%s",
            "CxxExceptionInSetUpTest destructor "
            "called as expected.\n");
   }
 
   virtual void SetUp() { throw std::runtime_error("Standard C++ exception"); }
 
   virtual void TearDown() {
     printf("%s",
            "CxxExceptionInSetUpTest::TearDown() "
            "called as expected.\n");
   }
 };
 
 TEST_F(CxxExceptionInSetUpTest, ThrowsExceptionInSetUp) {
   ADD_FAILURE() << "CxxExceptionInSetUpTest test body "
                 << "called unexpectedly.";
 }
 
 class CxxExceptionInTearDownTest : public Test {
  public:
   static void TearDownTestCase() {
     printf("%s",
            "CxxExceptionInTearDownTest::TearDownTestCase() "
            "called as expected.\n");
   }
 
  protected:
   ~CxxExceptionInTearDownTest() {
     printf("%s",
            "CxxExceptionInTearDownTest destructor "
            "called as expected.\n");
   }
 
   virtual void TearDown() {
     throw std::runtime_error("Standard C++ exception");
   }
 };
 
 TEST_F(CxxExceptionInTearDownTest, ThrowsExceptionInTearDown) {}
 
 class CxxExceptionInTestBodyTest : public Test {
  public:
   static void TearDownTestCase() {
     printf("%s",
            "CxxExceptionInTestBodyTest::TearDownTestCase() "
            "called as expected.\n");
   }
 
  protected:
   ~CxxExceptionInTestBodyTest() {
     printf("%s",
            "CxxExceptionInTestBodyTest destructor "
            "called as expected.\n");
   }
 
   virtual void TearDown() {
     printf("%s",
            "CxxExceptionInTestBodyTest::TearDown() "
            "called as expected.\n");
   }
 };
 
 TEST_F(CxxExceptionInTestBodyTest, ThrowsStdCxxException) {
   throw std::runtime_error("Standard C++ exception");
 }
 
 TEST(CxxExceptionTest, ThrowsNonStdCxxException) {
   throw "C-string";
 }
 
 // This terminate handler aborts the program using exit() rather than abort().
 // This avoids showing pop-ups on Windows systems and core dumps on Unix-like
 // ones.
 void TerminateHandler() {
   fprintf(stderr, "%s\n", "Unhandled C++ exception terminating the program.");
   fflush(NULL);
   exit(3);
 }
 
 #endif  // GTEST_HAS_EXCEPTIONS
 
 int main(int argc, char** argv) {
 #if GTEST_HAS_EXCEPTIONS
   std::set_terminate(&TerminateHandler);
 #endif
   testing::InitGoogleTest(&argc, argv);
   return RUN_ALL_TESTS();
 }
diff --git a/googletest/test/gtest_color_test.py b/googletest/test/googletest-color-test.py
similarity index 97%
rename from googletest/test/gtest_color_test.py
rename to googletest/test/googletest-color-test.py
index 49b8ed2d..875d4785 100755
--- a/googletest/test/gtest_color_test.py
+++ b/googletest/test/googletest-color-test.py
@@ -1,129 +1,129 @@
 #!/usr/bin/env python
 #
 # Copyright 2008, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Verifies that Google Test correctly determines whether to use colors."""
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import os
 import gtest_test_utils
 
 IS_WINDOWS = os.name == 'nt'
 
 COLOR_ENV_VAR = 'GTEST_COLOR'
 COLOR_FLAG = 'gtest_color'
-COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_color_test_')
+COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-color-test_')
 
 
 def SetEnvVar(env_var, value):
   """Sets the env variable to 'value'; unsets it when 'value' is None."""
 
   if value is not None:
     os.environ[env_var] = value
   elif env_var in os.environ:
     del os.environ[env_var]
 
 
 def UsesColor(term, color_env_var, color_flag):
-  """Runs gtest_color_test_ and returns its exit code."""
+  """Runs googletest-color-test_ and returns its exit code."""
 
   SetEnvVar('TERM', term)
   SetEnvVar(COLOR_ENV_VAR, color_env_var)
 
   if color_flag is None:
     args = []
   else:
     args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
   p = gtest_test_utils.Subprocess([COMMAND] + args)
   return not p.exited or p.exit_code
 
 
 class GTestColorTest(gtest_test_utils.TestCase):
   def testNoEnvVarNoFlag(self):
     """Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
 
     if not IS_WINDOWS:
       self.assert_(not UsesColor('dumb', None, None))
       self.assert_(not UsesColor('emacs', None, None))
       self.assert_(not UsesColor('xterm-mono', None, None))
       self.assert_(not UsesColor('unknown', None, None))
       self.assert_(not UsesColor(None, None, None))
     self.assert_(UsesColor('linux', None, None))
     self.assert_(UsesColor('cygwin', None, None))
     self.assert_(UsesColor('xterm', None, None))
     self.assert_(UsesColor('xterm-color', None, None))
     self.assert_(UsesColor('xterm-256color', None, None))
 
   def testFlagOnly(self):
     """Tests the case when there's --gtest_color but not GTEST_COLOR."""
 
     self.assert_(not UsesColor('dumb', None, 'no'))
     self.assert_(not UsesColor('xterm-color', None, 'no'))
     if not IS_WINDOWS:
       self.assert_(not UsesColor('emacs', None, 'auto'))
     self.assert_(UsesColor('xterm', None, 'auto'))
     self.assert_(UsesColor('dumb', None, 'yes'))
     self.assert_(UsesColor('xterm', None, 'yes'))
 
   def testEnvVarOnly(self):
     """Tests the case when there's GTEST_COLOR but not --gtest_color."""
 
     self.assert_(not UsesColor('dumb', 'no', None))
     self.assert_(not UsesColor('xterm-color', 'no', None))
     if not IS_WINDOWS:
       self.assert_(not UsesColor('dumb', 'auto', None))
     self.assert_(UsesColor('xterm-color', 'auto', None))
     self.assert_(UsesColor('dumb', 'yes', None))
     self.assert_(UsesColor('xterm-color', 'yes', None))
 
   def testEnvVarAndFlag(self):
     """Tests the case when there are both GTEST_COLOR and --gtest_color."""
 
     self.assert_(not UsesColor('xterm-color', 'no', 'no'))
     self.assert_(UsesColor('dumb', 'no', 'yes'))
     self.assert_(UsesColor('xterm-color', 'no', 'auto'))
 
   def testAliasesOfYesAndNo(self):
     """Tests using aliases in specifying --gtest_color."""
 
     self.assert_(UsesColor('dumb', None, 'true'))
     self.assert_(UsesColor('dumb', None, 'YES'))
     self.assert_(UsesColor('dumb', None, 'T'))
     self.assert_(UsesColor('dumb', None, '1'))
 
     self.assert_(not UsesColor('xterm', None, 'f'))
     self.assert_(not UsesColor('xterm', None, 'false'))
     self.assert_(not UsesColor('xterm', None, '0'))
     self.assert_(not UsesColor('xterm', None, 'unknown'))
 
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_color_test_.cc b/googletest/test/googletest-color-test_.cc
similarity index 100%
rename from googletest/test/gtest_color_test_.cc
rename to googletest/test/googletest-color-test_.cc
diff --git a/googletest/test/gtest-death-test_test.cc b/googletest/test/googletest-death-test-test.cc
similarity index 99%
rename from googletest/test/gtest-death-test_test.cc
rename to googletest/test/googletest-death-test-test.cc
index 37261cb6..9d8f13c6 100644
--- a/googletest/test/gtest-death-test_test.cc
+++ b/googletest/test/googletest-death-test-test.cc
@@ -1,1424 +1,1424 @@
 // Copyright 2005, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 //
 // Tests for death tests.
 
 #include "gtest/gtest-death-test.h"
 #include "gtest/gtest.h"
 #include "gtest/internal/gtest-filepath.h"
 
 using testing::internal::AlwaysFalse;
 using testing::internal::AlwaysTrue;
 
 #if GTEST_HAS_DEATH_TEST
 
 # if GTEST_OS_WINDOWS
 #  include <direct.h>          // For chdir().
 # else
 #  include <unistd.h>
 #  include <sys/wait.h>        // For waitpid.
 # endif  // GTEST_OS_WINDOWS
 
 # include <limits.h>
 # include <signal.h>
 # include <stdio.h>
 
 # if GTEST_OS_LINUX
 #  include <sys/time.h>
 # endif  // GTEST_OS_LINUX
 
 # include "gtest/gtest-spi.h"
 # include "src/gtest-internal-inl.h"
 
 namespace posix = ::testing::internal::posix;
 
 using testing::Message;
 using testing::internal::DeathTest;
 using testing::internal::DeathTestFactory;
 using testing::internal::FilePath;
 using testing::internal::GetLastErrnoDescription;
 using testing::internal::GetUnitTestImpl;
 using testing::internal::InDeathTestChild;
 using testing::internal::ParseNaturalNumber;
 
 namespace testing {
 namespace internal {
 
 // A helper class whose objects replace the death test factory for a
 // single UnitTest object during their lifetimes.
 class ReplaceDeathTestFactory {
  public:
   explicit ReplaceDeathTestFactory(DeathTestFactory* new_factory)
       : unit_test_impl_(GetUnitTestImpl()) {
     old_factory_ = unit_test_impl_->death_test_factory_.release();
     unit_test_impl_->death_test_factory_.reset(new_factory);
   }
 
   ~ReplaceDeathTestFactory() {
     unit_test_impl_->death_test_factory_.release();
     unit_test_impl_->death_test_factory_.reset(old_factory_);
   }
  private:
   // Prevents copying ReplaceDeathTestFactory objects.
   ReplaceDeathTestFactory(const ReplaceDeathTestFactory&);
   void operator=(const ReplaceDeathTestFactory&);
 
   UnitTestImpl* unit_test_impl_;
   DeathTestFactory* old_factory_;
 };
 
 }  // namespace internal
 }  // namespace testing
 
 void DieWithMessage(const ::std::string& message) {
   fprintf(stderr, "%s", message.c_str());
   fflush(stderr);  // Make sure the text is printed before the process exits.
 
   // We call _exit() instead of exit(), as the former is a direct
   // system call and thus safer in the presence of threads.  exit()
   // will invoke user-defined exit-hooks, which may do dangerous
   // things that conflict with death tests.
   //
   // Some compilers can recognize that _exit() never returns and issue the
   // 'unreachable code' warning for code following this function, unless
   // fooled by a fake condition.
   if (AlwaysTrue())
     _exit(1);
 }
 
 void DieInside(const ::std::string& function) {
   DieWithMessage("death inside " + function + "().");
 }
 
 // Tests that death tests work.
 
 class TestForDeathTest : public testing::Test {
  protected:
   TestForDeathTest() : original_dir_(FilePath::GetCurrentDir()) {}
 
   virtual ~TestForDeathTest() {
     posix::ChDir(original_dir_.c_str());
   }
 
   // A static member function that's expected to die.
   static void StaticMemberFunction() { DieInside("StaticMemberFunction"); }
 
   // A method of the test fixture that may die.
   void MemberFunction() {
     if (should_die_)
       DieInside("MemberFunction");
   }
 
   // True iff MemberFunction() should die.
   bool should_die_;
   const FilePath original_dir_;
 };
 
 // A class with a member function that may die.
 class MayDie {
  public:
   explicit MayDie(bool should_die) : should_die_(should_die) {}
 
   // A member function that may die.
   void MemberFunction() const {
     if (should_die_)
       DieInside("MayDie::MemberFunction");
   }
 
  private:
   // True iff MemberFunction() should die.
   bool should_die_;
 };
 
 // A global function that's expected to die.
 void GlobalFunction() { DieInside("GlobalFunction"); }
 
 // A non-void function that's expected to die.
 int NonVoidFunction() {
   DieInside("NonVoidFunction");
   return 1;
 }
 
 // A unary function that may die.
 void DieIf(bool should_die) {
   if (should_die)
     DieInside("DieIf");
 }
 
 // A binary function that may die.
 bool DieIfLessThan(int x, int y) {
   if (x < y) {
     DieInside("DieIfLessThan");
   }
   return true;
 }
 
 // Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture.
 void DeathTestSubroutine() {
   EXPECT_DEATH(GlobalFunction(), "death.*GlobalFunction");
   ASSERT_DEATH(GlobalFunction(), "death.*GlobalFunction");
 }
 
 // Death in dbg, not opt.
 int DieInDebugElse12(int* sideeffect) {
   if (sideeffect) *sideeffect = 12;
 
 # ifndef NDEBUG
 
   DieInside("DieInDebugElse12");
 
 # endif  // NDEBUG
 
   return 12;
 }
 
 # if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA
 
 // Tests the ExitedWithCode predicate.
 TEST(ExitStatusPredicateTest, ExitedWithCode) {
   // On Windows, the process's exit code is the same as its exit status,
   // so the predicate just compares the its input with its parameter.
   EXPECT_TRUE(testing::ExitedWithCode(0)(0));
   EXPECT_TRUE(testing::ExitedWithCode(1)(1));
   EXPECT_TRUE(testing::ExitedWithCode(42)(42));
   EXPECT_FALSE(testing::ExitedWithCode(0)(1));
   EXPECT_FALSE(testing::ExitedWithCode(1)(0));
 }
 
 # else
 
 // Returns the exit status of a process that calls _exit(2) with a
 // given exit code.  This is a helper function for the
 // ExitStatusPredicateTest test suite.
 static int NormalExitStatus(int exit_code) {
   pid_t child_pid = fork();
   if (child_pid == 0) {
     _exit(exit_code);
   }
   int status;
   waitpid(child_pid, &status, 0);
   return status;
 }
 
 // Returns the exit status of a process that raises a given signal.
 // If the signal does not cause the process to die, then it returns
 // instead the exit status of a process that exits normally with exit
 // code 1.  This is a helper function for the ExitStatusPredicateTest
 // test suite.
 static int KilledExitStatus(int signum) {
   pid_t child_pid = fork();
   if (child_pid == 0) {
     raise(signum);
     _exit(1);
   }
   int status;
   waitpid(child_pid, &status, 0);
   return status;
 }
 
 // Tests the ExitedWithCode predicate.
 TEST(ExitStatusPredicateTest, ExitedWithCode) {
   const int status0  = NormalExitStatus(0);
   const int status1  = NormalExitStatus(1);
   const int status42 = NormalExitStatus(42);
   const testing::ExitedWithCode pred0(0);
   const testing::ExitedWithCode pred1(1);
   const testing::ExitedWithCode pred42(42);
   EXPECT_PRED1(pred0,  status0);
   EXPECT_PRED1(pred1,  status1);
   EXPECT_PRED1(pred42, status42);
   EXPECT_FALSE(pred0(status1));
   EXPECT_FALSE(pred42(status0));
   EXPECT_FALSE(pred1(status42));
 }
 
 // Tests the KilledBySignal predicate.
 TEST(ExitStatusPredicateTest, KilledBySignal) {
   const int status_segv = KilledExitStatus(SIGSEGV);
   const int status_kill = KilledExitStatus(SIGKILL);
   const testing::KilledBySignal pred_segv(SIGSEGV);
   const testing::KilledBySignal pred_kill(SIGKILL);
   EXPECT_PRED1(pred_segv, status_segv);
   EXPECT_PRED1(pred_kill, status_kill);
   EXPECT_FALSE(pred_segv(status_kill));
   EXPECT_FALSE(pred_kill(status_segv));
 }
 
 # endif  // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA
 
 // Tests that the death test macros expand to code which may or may not
 // be followed by operator<<, and that in either case the complete text
 // comprises only a single C++ statement.
 TEST_F(TestForDeathTest, SingleStatement) {
   if (AlwaysFalse())
     // This would fail if executed; this is a compilation test only
     ASSERT_DEATH(return, "");
 
   if (AlwaysTrue())
     EXPECT_DEATH(_exit(1), "");
   else
     // This empty "else" branch is meant to ensure that EXPECT_DEATH
     // doesn't expand into an "if" statement without an "else"
     ;
 
   if (AlwaysFalse())
     ASSERT_DEATH(return, "") << "did not die";
 
   if (AlwaysFalse())
     ;
   else
     EXPECT_DEATH(_exit(1), "") << 1 << 2 << 3;
 }
 
 void DieWithEmbeddedNul() {
   fprintf(stderr, "Hello%cmy null world.\n", '\0');
   fflush(stderr);
   _exit(1);
 }
 
 # if GTEST_USES_PCRE
 
 // Tests that EXPECT_DEATH and ASSERT_DEATH work when the error
 // message has a NUL character in it.
 TEST_F(TestForDeathTest, EmbeddedNulInMessage) {
   EXPECT_DEATH(DieWithEmbeddedNul(), "my null world");
   ASSERT_DEATH(DieWithEmbeddedNul(), "my null world");
 }
 
 # endif  // GTEST_USES_PCRE
 
 // Tests that death test macros expand to code which interacts well with switch
 // statements.
 TEST_F(TestForDeathTest, SwitchStatement) {
   // Microsoft compiler usually complains about switch statements without
   // case labels. We suppress that warning for this test.
   GTEST_DISABLE_MSC_WARNINGS_PUSH_(4065)
 
   switch (0)
     default:
       ASSERT_DEATH(_exit(1), "") << "exit in default switch handler";
 
   switch (0)
     case 0:
       EXPECT_DEATH(_exit(1), "") << "exit in switch case";
 
   GTEST_DISABLE_MSC_WARNINGS_POP_()
 }
 
 // Tests that a static member function can be used in a "fast" style
 // death test.
 TEST_F(TestForDeathTest, StaticMemberFunctionFastStyle) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   ASSERT_DEATH(StaticMemberFunction(), "death.*StaticMember");
 }
 
 // Tests that a method of the test fixture can be used in a "fast"
 // style death test.
 TEST_F(TestForDeathTest, MemberFunctionFastStyle) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   should_die_ = true;
   EXPECT_DEATH(MemberFunction(), "inside.*MemberFunction");
 }
 
 void ChangeToRootDir() { posix::ChDir(GTEST_PATH_SEP_); }
 
 // Tests that death tests work even if the current directory has been
 // changed.
 TEST_F(TestForDeathTest, FastDeathTestInChangedDir) {
   testing::GTEST_FLAG(death_test_style) = "fast";
 
   ChangeToRootDir();
   EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
 
   ChangeToRootDir();
   ASSERT_DEATH(_exit(1), "");
 }
 
 # if GTEST_OS_LINUX
 void SigprofAction(int, siginfo_t*, void*) { /* no op */ }
 
 // Sets SIGPROF action and ITIMER_PROF timer (interval: 1ms).
 void SetSigprofActionAndTimer() {
   struct itimerval timer;
   timer.it_interval.tv_sec = 0;
   timer.it_interval.tv_usec = 1;
   timer.it_value = timer.it_interval;
   ASSERT_EQ(0, setitimer(ITIMER_PROF, &timer, NULL));
   struct sigaction signal_action;
   memset(&signal_action, 0, sizeof(signal_action));
   sigemptyset(&signal_action.sa_mask);
   signal_action.sa_sigaction = SigprofAction;
   signal_action.sa_flags = SA_RESTART | SA_SIGINFO;
   ASSERT_EQ(0, sigaction(SIGPROF, &signal_action, NULL));
 }
 
 // Disables ITIMER_PROF timer and ignores SIGPROF signal.
 void DisableSigprofActionAndTimer(struct sigaction* old_signal_action) {
   struct itimerval timer;
   timer.it_interval.tv_sec = 0;
   timer.it_interval.tv_usec = 0;
   timer.it_value = timer.it_interval;
   ASSERT_EQ(0, setitimer(ITIMER_PROF, &timer, NULL));
   struct sigaction signal_action;
   memset(&signal_action, 0, sizeof(signal_action));
   sigemptyset(&signal_action.sa_mask);
   signal_action.sa_handler = SIG_IGN;
   ASSERT_EQ(0, sigaction(SIGPROF, &signal_action, old_signal_action));
 }
 
 // Tests that death tests work when SIGPROF handler and timer are set.
 TEST_F(TestForDeathTest, FastSigprofActionSet) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   SetSigprofActionAndTimer();
   EXPECT_DEATH(_exit(1), "");
   struct sigaction old_signal_action;
   DisableSigprofActionAndTimer(&old_signal_action);
   EXPECT_TRUE(old_signal_action.sa_sigaction == SigprofAction);
 }
 
 TEST_F(TestForDeathTest, ThreadSafeSigprofActionSet) {
   testing::GTEST_FLAG(death_test_style) = "threadsafe";
   SetSigprofActionAndTimer();
   EXPECT_DEATH(_exit(1), "");
   struct sigaction old_signal_action;
   DisableSigprofActionAndTimer(&old_signal_action);
   EXPECT_TRUE(old_signal_action.sa_sigaction == SigprofAction);
 }
 # endif  // GTEST_OS_LINUX
 
 // Repeats a representative sample of death tests in the "threadsafe" style:
 
 TEST_F(TestForDeathTest, StaticMemberFunctionThreadsafeStyle) {
   testing::GTEST_FLAG(death_test_style) = "threadsafe";
   ASSERT_DEATH(StaticMemberFunction(), "death.*StaticMember");
 }
 
 TEST_F(TestForDeathTest, MemberFunctionThreadsafeStyle) {
   testing::GTEST_FLAG(death_test_style) = "threadsafe";
   should_die_ = true;
   EXPECT_DEATH(MemberFunction(), "inside.*MemberFunction");
 }
 
 TEST_F(TestForDeathTest, ThreadsafeDeathTestInLoop) {
   testing::GTEST_FLAG(death_test_style) = "threadsafe";
 
   for (int i = 0; i < 3; ++i)
     EXPECT_EXIT(_exit(i), testing::ExitedWithCode(i), "") << ": i = " << i;
 }
 
 TEST_F(TestForDeathTest, ThreadsafeDeathTestInChangedDir) {
   testing::GTEST_FLAG(death_test_style) = "threadsafe";
 
   ChangeToRootDir();
   EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), "");
 
   ChangeToRootDir();
   ASSERT_DEATH(_exit(1), "");
 }
 
 TEST_F(TestForDeathTest, MixedStyles) {
   testing::GTEST_FLAG(death_test_style) = "threadsafe";
   EXPECT_DEATH(_exit(1), "");
   testing::GTEST_FLAG(death_test_style) = "fast";
   EXPECT_DEATH(_exit(1), "");
 }
 
 # if GTEST_HAS_CLONE && GTEST_HAS_PTHREAD
 
 namespace {
 
 bool pthread_flag;
 
 void SetPthreadFlag() {
   pthread_flag = true;
 }
 
 }  // namespace
 
 TEST_F(TestForDeathTest, DoesNotExecuteAtforkHooks) {
   if (!testing::GTEST_FLAG(death_test_use_fork)) {
     testing::GTEST_FLAG(death_test_style) = "threadsafe";
     pthread_flag = false;
     ASSERT_EQ(0, pthread_atfork(&SetPthreadFlag, NULL, NULL));
     ASSERT_DEATH(_exit(1), "");
     ASSERT_FALSE(pthread_flag);
   }
 }
 
 # endif  // GTEST_HAS_CLONE && GTEST_HAS_PTHREAD
 
 // Tests that a method of another class can be used in a death test.
 TEST_F(TestForDeathTest, MethodOfAnotherClass) {
   const MayDie x(true);
   ASSERT_DEATH(x.MemberFunction(), "MayDie\\:\\:MemberFunction");
 }
 
 // Tests that a global function can be used in a death test.
 TEST_F(TestForDeathTest, GlobalFunction) {
   EXPECT_DEATH(GlobalFunction(), "GlobalFunction");
 }
 
 // Tests that any value convertible to an RE works as a second
 // argument to EXPECT_DEATH.
 TEST_F(TestForDeathTest, AcceptsAnythingConvertibleToRE) {
   static const char regex_c_str[] = "GlobalFunction";
   EXPECT_DEATH(GlobalFunction(), regex_c_str);
 
   const testing::internal::RE regex(regex_c_str);
   EXPECT_DEATH(GlobalFunction(), regex);
 
 # if GTEST_HAS_GLOBAL_STRING
 
   const ::string regex_str(regex_c_str);
   EXPECT_DEATH(GlobalFunction(), regex_str);
 
 # endif  // GTEST_HAS_GLOBAL_STRING
 
 # if !GTEST_USES_PCRE
 
   const ::std::string regex_std_str(regex_c_str);
   EXPECT_DEATH(GlobalFunction(), regex_std_str);
 
 # endif  // !GTEST_USES_PCRE
 }
 
 // Tests that a non-void function can be used in a death test.
 TEST_F(TestForDeathTest, NonVoidFunction) {
   ASSERT_DEATH(NonVoidFunction(), "NonVoidFunction");
 }
 
 // Tests that functions that take parameter(s) can be used in a death test.
 TEST_F(TestForDeathTest, FunctionWithParameter) {
   EXPECT_DEATH(DieIf(true), "DieIf\\(\\)");
   EXPECT_DEATH(DieIfLessThan(2, 3), "DieIfLessThan");
 }
 
 // Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture.
 TEST_F(TestForDeathTest, OutsideFixture) {
   DeathTestSubroutine();
 }
 
 // Tests that death tests can be done inside a loop.
 TEST_F(TestForDeathTest, InsideLoop) {
   for (int i = 0; i < 5; i++) {
     EXPECT_DEATH(DieIfLessThan(-1, i), "DieIfLessThan") << "where i == " << i;
   }
 }
 
 // Tests that a compound statement can be used in a death test.
 TEST_F(TestForDeathTest, CompoundStatement) {
   EXPECT_DEATH({  // NOLINT
     const int x = 2;
     const int y = x + 1;
     DieIfLessThan(x, y);
   },
   "DieIfLessThan");
 }
 
 // Tests that code that doesn't die causes a death test to fail.
 TEST_F(TestForDeathTest, DoesNotDie) {
   EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(DieIf(false), "DieIf"),
                           "failed to die");
 }
 
 // Tests that a death test fails when the error message isn't expected.
 TEST_F(TestForDeathTest, ErrorMessageMismatch) {
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_DEATH(DieIf(true), "DieIfLessThan") << "End of death test message.";
   }, "died but not with expected error");
 }
 
 // On exit, *aborted will be true iff the EXPECT_DEATH() statement
 // aborted the function.
 void ExpectDeathTestHelper(bool* aborted) {
   *aborted = true;
   EXPECT_DEATH(DieIf(false), "DieIf");  // This assertion should fail.
   *aborted = false;
 }
 
 // Tests that EXPECT_DEATH doesn't abort the test on failure.
 TEST_F(TestForDeathTest, EXPECT_DEATH) {
   bool aborted = true;
   EXPECT_NONFATAL_FAILURE(ExpectDeathTestHelper(&aborted),
                           "failed to die");
   EXPECT_FALSE(aborted);
 }
 
 // Tests that ASSERT_DEATH does abort the test on failure.
 TEST_F(TestForDeathTest, ASSERT_DEATH) {
   static bool aborted;
   EXPECT_FATAL_FAILURE({  // NOLINT
     aborted = true;
     ASSERT_DEATH(DieIf(false), "DieIf");  // This assertion should fail.
     aborted = false;
   }, "failed to die");
   EXPECT_TRUE(aborted);
 }
 
 // Tests that EXPECT_DEATH evaluates the arguments exactly once.
 TEST_F(TestForDeathTest, SingleEvaluation) {
   int x = 3;
   EXPECT_DEATH(DieIf((++x) == 4), "DieIf");
 
   const char* regex = "DieIf";
   const char* regex_save = regex;
   EXPECT_DEATH(DieIfLessThan(3, 4), regex++);
   EXPECT_EQ(regex_save + 1, regex);
 }
 
 // Tests that run-away death tests are reported as failures.
 TEST_F(TestForDeathTest, RunawayIsFailure) {
   EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(static_cast<void>(0), "Foo"),
                           "failed to die.");
 }
 
 // Tests that death tests report executing 'return' in the statement as
 // failure.
 TEST_F(TestForDeathTest, ReturnIsFailure) {
   EXPECT_FATAL_FAILURE(ASSERT_DEATH(return, "Bar"),
                        "illegal return in test statement.");
 }
 
 // Tests that EXPECT_DEBUG_DEATH works as expected, that is, you can stream a
 // message to it, and in debug mode it:
 // 1. Asserts on death.
 // 2. Has no side effect.
 //
 // And in opt mode, it:
 // 1.  Has side effects but does not assert.
 TEST_F(TestForDeathTest, TestExpectDebugDeath) {
   int sideeffect = 0;
 
   // Put the regex in a local variable to make sure we don't get an "unused"
   // warning in opt mode.
   const char* regex = "death.*DieInDebugElse12";
 
   EXPECT_DEBUG_DEATH(DieInDebugElse12(&sideeffect), regex)
       << "Must accept a streamed message";
 
 # ifdef NDEBUG
 
   // Checks that the assignment occurs in opt mode (sideeffect).
   EXPECT_EQ(12, sideeffect);
 
 # else
 
   // Checks that the assignment does not occur in dbg mode (no sideeffect).
   EXPECT_EQ(0, sideeffect);
 
 # endif
 }
 
 // Tests that ASSERT_DEBUG_DEATH works as expected, that is, you can stream a
 // message to it, and in debug mode it:
 // 1. Asserts on death.
 // 2. Has no side effect.
 //
 // And in opt mode, it:
 // 1.  Has side effects but does not assert.
 TEST_F(TestForDeathTest, TestAssertDebugDeath) {
   int sideeffect = 0;
 
   ASSERT_DEBUG_DEATH(DieInDebugElse12(&sideeffect), "death.*DieInDebugElse12")
       << "Must accept a streamed message";
 
 # ifdef NDEBUG
 
   // Checks that the assignment occurs in opt mode (sideeffect).
   EXPECT_EQ(12, sideeffect);
 
 # else
 
   // Checks that the assignment does not occur in dbg mode (no sideeffect).
   EXPECT_EQ(0, sideeffect);
 
 # endif
 }
 
 # ifndef NDEBUG
 
 void ExpectDebugDeathHelper(bool* aborted) {
   *aborted = true;
   EXPECT_DEBUG_DEATH(return, "") << "This is expected to fail.";
   *aborted = false;
 }
 
 #  if GTEST_OS_WINDOWS
 TEST(PopUpDeathTest, DoesNotShowPopUpOnAbort) {
   printf("This test should be considered failing if it shows "
          "any pop-up dialogs.\n");
   fflush(stdout);
 
   EXPECT_DEATH({
     testing::GTEST_FLAG(catch_exceptions) = false;
     abort();
   }, "");
 }
 #  endif  // GTEST_OS_WINDOWS
 
 // Tests that EXPECT_DEBUG_DEATH in debug mode does not abort
 // the function.
 TEST_F(TestForDeathTest, ExpectDebugDeathDoesNotAbort) {
   bool aborted = true;
   EXPECT_NONFATAL_FAILURE(ExpectDebugDeathHelper(&aborted), "");
   EXPECT_FALSE(aborted);
 }
 
 void AssertDebugDeathHelper(bool* aborted) {
   *aborted = true;
   GTEST_LOG_(INFO) << "Before ASSERT_DEBUG_DEATH";
   ASSERT_DEBUG_DEATH(GTEST_LOG_(INFO) << "In ASSERT_DEBUG_DEATH"; return, "")
       << "This is expected to fail.";
   GTEST_LOG_(INFO) << "After ASSERT_DEBUG_DEATH";
   *aborted = false;
 }
 
 // Tests that ASSERT_DEBUG_DEATH in debug mode aborts the function on
 // failure.
 TEST_F(TestForDeathTest, AssertDebugDeathAborts) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts2) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts3) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts4) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts5) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts6) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts7) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts8) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts9) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 TEST_F(TestForDeathTest, AssertDebugDeathAborts10) {
   static bool aborted;
   aborted = false;
   EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), "");
   EXPECT_TRUE(aborted);
 }
 
 # endif  // _NDEBUG
 
 // Tests the *_EXIT family of macros, using a variety of predicates.
 static void TestExitMacros() {
   EXPECT_EXIT(_exit(1),  testing::ExitedWithCode(1),  "");
   ASSERT_EXIT(_exit(42), testing::ExitedWithCode(42), "");
 
 # if GTEST_OS_WINDOWS
 
   // Of all signals effects on the process exit code, only those of SIGABRT
   // are documented on Windows.
-  // See http://msdn.microsoft.com/en-us/library/dwwzkt4c(VS.71).aspx.
+  // See https://msdn.microsoft.com/en-us/query-bi/m/dwwzkt4c.
   EXPECT_EXIT(raise(SIGABRT), testing::ExitedWithCode(3), "") << "b_ar";
 
 # elif !GTEST_OS_FUCHSIA
 
   // Fuchsia has no unix signals.
   EXPECT_EXIT(raise(SIGKILL), testing::KilledBySignal(SIGKILL), "") << "foo";
   ASSERT_EXIT(raise(SIGUSR2), testing::KilledBySignal(SIGUSR2), "") << "bar";
 
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_EXIT(_exit(0), testing::KilledBySignal(SIGSEGV), "")
       << "This failure is expected, too.";
   }, "This failure is expected, too.");
 
 # endif  // GTEST_OS_WINDOWS
 
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_EXIT(raise(SIGSEGV), testing::ExitedWithCode(0), "")
       << "This failure is expected.";
   }, "This failure is expected.");
 }
 
 TEST_F(TestForDeathTest, ExitMacros) {
   TestExitMacros();
 }
 
 TEST_F(TestForDeathTest, ExitMacrosUsingFork) {
   testing::GTEST_FLAG(death_test_use_fork) = true;
   TestExitMacros();
 }
 
 TEST_F(TestForDeathTest, InvalidStyle) {
   testing::GTEST_FLAG(death_test_style) = "rococo";
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_DEATH(_exit(0), "") << "This failure is expected.";
   }, "This failure is expected.");
 }
 
 TEST_F(TestForDeathTest, DeathTestFailedOutput) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   EXPECT_NONFATAL_FAILURE(
       EXPECT_DEATH(DieWithMessage("death\n"),
                    "expected message"),
       "Actual msg:\n"
       "[  DEATH   ] death\n");
 }
 
 TEST_F(TestForDeathTest, DeathTestUnexpectedReturnOutput) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   EXPECT_NONFATAL_FAILURE(
       EXPECT_DEATH({
           fprintf(stderr, "returning\n");
           fflush(stderr);
           return;
         }, ""),
       "    Result: illegal return in test statement.\n"
       " Error msg:\n"
       "[  DEATH   ] returning\n");
 }
 
 TEST_F(TestForDeathTest, DeathTestBadExitCodeOutput) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   EXPECT_NONFATAL_FAILURE(
       EXPECT_EXIT(DieWithMessage("exiting with rc 1\n"),
                   testing::ExitedWithCode(3),
                   "expected message"),
       "    Result: died but not with expected exit code:\n"
       "            Exited with exit status 1\n"
       "Actual msg:\n"
       "[  DEATH   ] exiting with rc 1\n");
 }
 
 TEST_F(TestForDeathTest, DeathTestMultiLineMatchFail) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   EXPECT_NONFATAL_FAILURE(
       EXPECT_DEATH(DieWithMessage("line 1\nline 2\nline 3\n"),
                    "line 1\nxyz\nline 3\n"),
       "Actual msg:\n"
       "[  DEATH   ] line 1\n"
       "[  DEATH   ] line 2\n"
       "[  DEATH   ] line 3\n");
 }
 
 TEST_F(TestForDeathTest, DeathTestMultiLineMatchPass) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   EXPECT_DEATH(DieWithMessage("line 1\nline 2\nline 3\n"),
                "line 1\nline 2\nline 3\n");
 }
 
 // A DeathTestFactory that returns MockDeathTests.
 class MockDeathTestFactory : public DeathTestFactory {
  public:
   MockDeathTestFactory();
   virtual bool Create(const char* statement,
                       const ::testing::internal::RE* regex,
                       const char* file, int line, DeathTest** test);
 
   // Sets the parameters for subsequent calls to Create.
   void SetParameters(bool create, DeathTest::TestRole role,
                      int status, bool passed);
 
   // Accessors.
   int AssumeRoleCalls() const { return assume_role_calls_; }
   int WaitCalls() const { return wait_calls_; }
   size_t PassedCalls() const { return passed_args_.size(); }
   bool PassedArgument(int n) const { return passed_args_[n]; }
   size_t AbortCalls() const { return abort_args_.size(); }
   DeathTest::AbortReason AbortArgument(int n) const {
     return abort_args_[n];
   }
   bool TestDeleted() const { return test_deleted_; }
 
  private:
   friend class MockDeathTest;
   // If true, Create will return a MockDeathTest; otherwise it returns
   // NULL.
   bool create_;
   // The value a MockDeathTest will return from its AssumeRole method.
   DeathTest::TestRole role_;
   // The value a MockDeathTest will return from its Wait method.
   int status_;
   // The value a MockDeathTest will return from its Passed method.
   bool passed_;
 
   // Number of times AssumeRole was called.
   int assume_role_calls_;
   // Number of times Wait was called.
   int wait_calls_;
   // The arguments to the calls to Passed since the last call to
   // SetParameters.
   std::vector<bool> passed_args_;
   // The arguments to the calls to Abort since the last call to
   // SetParameters.
   std::vector<DeathTest::AbortReason> abort_args_;
   // True if the last MockDeathTest returned by Create has been
   // deleted.
   bool test_deleted_;
 };
 
 
 // A DeathTest implementation useful in testing.  It returns values set
 // at its creation from its various inherited DeathTest methods, and
 // reports calls to those methods to its parent MockDeathTestFactory
 // object.
 class MockDeathTest : public DeathTest {
  public:
   MockDeathTest(MockDeathTestFactory *parent,
                 TestRole role, int status, bool passed) :
       parent_(parent), role_(role), status_(status), passed_(passed) {
   }
   virtual ~MockDeathTest() {
     parent_->test_deleted_ = true;
   }
   virtual TestRole AssumeRole() {
     ++parent_->assume_role_calls_;
     return role_;
   }
   virtual int Wait() {
     ++parent_->wait_calls_;
     return status_;
   }
   virtual bool Passed(bool exit_status_ok) {
     parent_->passed_args_.push_back(exit_status_ok);
     return passed_;
   }
   virtual void Abort(AbortReason reason) {
     parent_->abort_args_.push_back(reason);
   }
 
  private:
   MockDeathTestFactory* const parent_;
   const TestRole role_;
   const int status_;
   const bool passed_;
 };
 
 
 // MockDeathTestFactory constructor.
 MockDeathTestFactory::MockDeathTestFactory()
     : create_(true),
       role_(DeathTest::OVERSEE_TEST),
       status_(0),
       passed_(true),
       assume_role_calls_(0),
       wait_calls_(0),
       passed_args_(),
       abort_args_() {
 }
 
 
 // Sets the parameters for subsequent calls to Create.
 void MockDeathTestFactory::SetParameters(bool create,
                                          DeathTest::TestRole role,
                                          int status, bool passed) {
   create_ = create;
   role_ = role;
   status_ = status;
   passed_ = passed;
 
   assume_role_calls_ = 0;
   wait_calls_ = 0;
   passed_args_.clear();
   abort_args_.clear();
 }
 
 
 // Sets test to NULL (if create_ is false) or to the address of a new
 // MockDeathTest object with parameters taken from the last call
 // to SetParameters (if create_ is true).  Always returns true.
 bool MockDeathTestFactory::Create(const char* /*statement*/,
                                   const ::testing::internal::RE* /*regex*/,
                                   const char* /*file*/,
                                   int /*line*/,
                                   DeathTest** test) {
   test_deleted_ = false;
   if (create_) {
     *test = new MockDeathTest(this, role_, status_, passed_);
   } else {
     *test = NULL;
   }
   return true;
 }
 
 // A test fixture for testing the logic of the GTEST_DEATH_TEST_ macro.
 // It installs a MockDeathTestFactory that is used for the duration
 // of the test case.
 class MacroLogicDeathTest : public testing::Test {
  protected:
   static testing::internal::ReplaceDeathTestFactory* replacer_;
   static MockDeathTestFactory* factory_;
 
   static void SetUpTestCase() {
     factory_ = new MockDeathTestFactory;
     replacer_ = new testing::internal::ReplaceDeathTestFactory(factory_);
   }
 
   static void TearDownTestCase() {
     delete replacer_;
     replacer_ = NULL;
     delete factory_;
     factory_ = NULL;
   }
 
   // Runs a death test that breaks the rules by returning.  Such a death
   // test cannot be run directly from a test routine that uses a
   // MockDeathTest, or the remainder of the routine will not be executed.
   static void RunReturningDeathTest(bool* flag) {
     ASSERT_DEATH({  // NOLINT
       *flag = true;
       return;
     }, "");
   }
 };
 
 testing::internal::ReplaceDeathTestFactory* MacroLogicDeathTest::replacer_
     = NULL;
 MockDeathTestFactory* MacroLogicDeathTest::factory_ = NULL;
 
 
 // Test that nothing happens when the factory doesn't return a DeathTest:
 TEST_F(MacroLogicDeathTest, NothingHappens) {
   bool flag = false;
   factory_->SetParameters(false, DeathTest::OVERSEE_TEST, 0, true);
   EXPECT_DEATH(flag = true, "");
   EXPECT_FALSE(flag);
   EXPECT_EQ(0, factory_->AssumeRoleCalls());
   EXPECT_EQ(0, factory_->WaitCalls());
   EXPECT_EQ(0U, factory_->PassedCalls());
   EXPECT_EQ(0U, factory_->AbortCalls());
   EXPECT_FALSE(factory_->TestDeleted());
 }
 
 // Test that the parent process doesn't run the death test code,
 // and that the Passed method returns false when the (simulated)
 // child process exits with status 0:
 TEST_F(MacroLogicDeathTest, ChildExitsSuccessfully) {
   bool flag = false;
   factory_->SetParameters(true, DeathTest::OVERSEE_TEST, 0, true);
   EXPECT_DEATH(flag = true, "");
   EXPECT_FALSE(flag);
   EXPECT_EQ(1, factory_->AssumeRoleCalls());
   EXPECT_EQ(1, factory_->WaitCalls());
   ASSERT_EQ(1U, factory_->PassedCalls());
   EXPECT_FALSE(factory_->PassedArgument(0));
   EXPECT_EQ(0U, factory_->AbortCalls());
   EXPECT_TRUE(factory_->TestDeleted());
 }
 
 // Tests that the Passed method was given the argument "true" when
 // the (simulated) child process exits with status 1:
 TEST_F(MacroLogicDeathTest, ChildExitsUnsuccessfully) {
   bool flag = false;
   factory_->SetParameters(true, DeathTest::OVERSEE_TEST, 1, true);
   EXPECT_DEATH(flag = true, "");
   EXPECT_FALSE(flag);
   EXPECT_EQ(1, factory_->AssumeRoleCalls());
   EXPECT_EQ(1, factory_->WaitCalls());
   ASSERT_EQ(1U, factory_->PassedCalls());
   EXPECT_TRUE(factory_->PassedArgument(0));
   EXPECT_EQ(0U, factory_->AbortCalls());
   EXPECT_TRUE(factory_->TestDeleted());
 }
 
 // Tests that the (simulated) child process executes the death test
 // code, and is aborted with the correct AbortReason if it
 // executes a return statement.
 TEST_F(MacroLogicDeathTest, ChildPerformsReturn) {
   bool flag = false;
   factory_->SetParameters(true, DeathTest::EXECUTE_TEST, 0, true);
   RunReturningDeathTest(&flag);
   EXPECT_TRUE(flag);
   EXPECT_EQ(1, factory_->AssumeRoleCalls());
   EXPECT_EQ(0, factory_->WaitCalls());
   EXPECT_EQ(0U, factory_->PassedCalls());
   EXPECT_EQ(1U, factory_->AbortCalls());
   EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT,
             factory_->AbortArgument(0));
   EXPECT_TRUE(factory_->TestDeleted());
 }
 
 // Tests that the (simulated) child process is aborted with the
 // correct AbortReason if it does not die.
 TEST_F(MacroLogicDeathTest, ChildDoesNotDie) {
   bool flag = false;
   factory_->SetParameters(true, DeathTest::EXECUTE_TEST, 0, true);
   EXPECT_DEATH(flag = true, "");
   EXPECT_TRUE(flag);
   EXPECT_EQ(1, factory_->AssumeRoleCalls());
   EXPECT_EQ(0, factory_->WaitCalls());
   EXPECT_EQ(0U, factory_->PassedCalls());
   // This time there are two calls to Abort: one since the test didn't
   // die, and another from the ReturnSentinel when it's destroyed.  The
   // sentinel normally isn't destroyed if a test doesn't die, since
   // _exit(2) is called in that case by ForkingDeathTest, but not by
   // our MockDeathTest.
   ASSERT_EQ(2U, factory_->AbortCalls());
   EXPECT_EQ(DeathTest::TEST_DID_NOT_DIE,
             factory_->AbortArgument(0));
   EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT,
             factory_->AbortArgument(1));
   EXPECT_TRUE(factory_->TestDeleted());
 }
 
 // Tests that a successful death test does not register a successful
 // test part.
 TEST(SuccessRegistrationDeathTest, NoSuccessPart) {
   EXPECT_DEATH(_exit(1), "");
   EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
 }
 
 TEST(StreamingAssertionsDeathTest, DeathTest) {
   EXPECT_DEATH(_exit(1), "") << "unexpected failure";
   ASSERT_DEATH(_exit(1), "") << "unexpected failure";
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_DEATH(_exit(0), "") << "expected failure";
   }, "expected failure");
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_DEATH(_exit(0), "") << "expected failure";
   }, "expected failure");
 }
 
 // Tests that GetLastErrnoDescription returns an empty string when the
 // last error is 0 and non-empty string when it is non-zero.
 TEST(GetLastErrnoDescription, GetLastErrnoDescriptionWorks) {
   errno = ENOENT;
   EXPECT_STRNE("", GetLastErrnoDescription().c_str());
   errno = 0;
   EXPECT_STREQ("", GetLastErrnoDescription().c_str());
 }
 
 # if GTEST_OS_WINDOWS
 TEST(AutoHandleTest, AutoHandleWorks) {
   HANDLE handle = ::CreateEvent(NULL, FALSE, FALSE, NULL);
   ASSERT_NE(INVALID_HANDLE_VALUE, handle);
 
   // Tests that the AutoHandle is correctly initialized with a handle.
   testing::internal::AutoHandle auto_handle(handle);
   EXPECT_EQ(handle, auto_handle.Get());
 
   // Tests that Reset assigns INVALID_HANDLE_VALUE.
   // Note that this cannot verify whether the original handle is closed.
   auto_handle.Reset();
   EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle.Get());
 
   // Tests that Reset assigns the new handle.
   // Note that this cannot verify whether the original handle is closed.
   handle = ::CreateEvent(NULL, FALSE, FALSE, NULL);
   ASSERT_NE(INVALID_HANDLE_VALUE, handle);
   auto_handle.Reset(handle);
   EXPECT_EQ(handle, auto_handle.Get());
 
   // Tests that AutoHandle contains INVALID_HANDLE_VALUE by default.
   testing::internal::AutoHandle auto_handle2;
   EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle2.Get());
 }
 # endif  // GTEST_OS_WINDOWS
 
 # if GTEST_OS_WINDOWS
 typedef unsigned __int64 BiggestParsable;
 typedef signed __int64 BiggestSignedParsable;
 # else
 typedef unsigned long long BiggestParsable;
 typedef signed long long BiggestSignedParsable;
 # endif  // GTEST_OS_WINDOWS
 
 // We cannot use std::numeric_limits<T>::max() as it clashes with the
 // max() macro defined by <windows.h>.
 const BiggestParsable kBiggestParsableMax = ULLONG_MAX;
 const BiggestSignedParsable kBiggestSignedParsableMax = LLONG_MAX;
 
 TEST(ParseNaturalNumberTest, RejectsInvalidFormat) {
   BiggestParsable result = 0;
 
   // Rejects non-numbers.
   EXPECT_FALSE(ParseNaturalNumber("non-number string", &result));
 
   // Rejects numbers with whitespace prefix.
   EXPECT_FALSE(ParseNaturalNumber(" 123", &result));
 
   // Rejects negative numbers.
   EXPECT_FALSE(ParseNaturalNumber("-123", &result));
 
   // Rejects numbers starting with a plus sign.
   EXPECT_FALSE(ParseNaturalNumber("+123", &result));
   errno = 0;
 }
 
 TEST(ParseNaturalNumberTest, RejectsOverflownNumbers) {
   BiggestParsable result = 0;
 
   EXPECT_FALSE(ParseNaturalNumber("99999999999999999999999", &result));
 
   signed char char_result = 0;
   EXPECT_FALSE(ParseNaturalNumber("200", &char_result));
   errno = 0;
 }
 
 TEST(ParseNaturalNumberTest, AcceptsValidNumbers) {
   BiggestParsable result = 0;
 
   result = 0;
   ASSERT_TRUE(ParseNaturalNumber("123", &result));
   EXPECT_EQ(123U, result);
 
   // Check 0 as an edge case.
   result = 1;
   ASSERT_TRUE(ParseNaturalNumber("0", &result));
   EXPECT_EQ(0U, result);
 
   result = 1;
   ASSERT_TRUE(ParseNaturalNumber("00000", &result));
   EXPECT_EQ(0U, result);
 }
 
 TEST(ParseNaturalNumberTest, AcceptsTypeLimits) {
   Message msg;
   msg << kBiggestParsableMax;
 
   BiggestParsable result = 0;
   EXPECT_TRUE(ParseNaturalNumber(msg.GetString(), &result));
   EXPECT_EQ(kBiggestParsableMax, result);
 
   Message msg2;
   msg2 << kBiggestSignedParsableMax;
 
   BiggestSignedParsable signed_result = 0;
   EXPECT_TRUE(ParseNaturalNumber(msg2.GetString(), &signed_result));
   EXPECT_EQ(kBiggestSignedParsableMax, signed_result);
 
   Message msg3;
   msg3 << INT_MAX;
 
   int int_result = 0;
   EXPECT_TRUE(ParseNaturalNumber(msg3.GetString(), &int_result));
   EXPECT_EQ(INT_MAX, int_result);
 
   Message msg4;
   msg4 << UINT_MAX;
 
   unsigned int uint_result = 0;
   EXPECT_TRUE(ParseNaturalNumber(msg4.GetString(), &uint_result));
   EXPECT_EQ(UINT_MAX, uint_result);
 }
 
 TEST(ParseNaturalNumberTest, WorksForShorterIntegers) {
   short short_result = 0;
   ASSERT_TRUE(ParseNaturalNumber("123", &short_result));
   EXPECT_EQ(123, short_result);
 
   signed char char_result = 0;
   ASSERT_TRUE(ParseNaturalNumber("123", &char_result));
   EXPECT_EQ(123, char_result);
 }
 
 # if GTEST_OS_WINDOWS
 TEST(EnvironmentTest, HandleFitsIntoSizeT) {
   // TODO(vladl@google.com): Remove this test after this condition is verified
   // in a static assertion in gtest-death-test.cc in the function
   // GetStatusFileDescriptor.
   ASSERT_TRUE(sizeof(HANDLE) <= sizeof(size_t));
 }
 # endif  // GTEST_OS_WINDOWS
 
 // Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED trigger
 // failures when death tests are available on the system.
 TEST(ConditionalDeathMacrosDeathTest, ExpectsDeathWhenDeathTestsAvailable) {
   EXPECT_DEATH_IF_SUPPORTED(DieInside("CondDeathTestExpectMacro"),
                             "death inside CondDeathTestExpectMacro");
   ASSERT_DEATH_IF_SUPPORTED(DieInside("CondDeathTestAssertMacro"),
                             "death inside CondDeathTestAssertMacro");
 
   // Empty statement will not crash, which must trigger a failure.
   EXPECT_NONFATAL_FAILURE(EXPECT_DEATH_IF_SUPPORTED(;, ""), "");
   EXPECT_FATAL_FAILURE(ASSERT_DEATH_IF_SUPPORTED(;, ""), "");
 }
 
 TEST(InDeathTestChildDeathTest, ReportsDeathTestCorrectlyInFastStyle) {
   testing::GTEST_FLAG(death_test_style) = "fast";
   EXPECT_FALSE(InDeathTestChild());
   EXPECT_DEATH({
     fprintf(stderr, InDeathTestChild() ? "Inside" : "Outside");
     fflush(stderr);
     _exit(1);
   }, "Inside");
 }
 
 TEST(InDeathTestChildDeathTest, ReportsDeathTestCorrectlyInThreadSafeStyle) {
   testing::GTEST_FLAG(death_test_style) = "threadsafe";
   EXPECT_FALSE(InDeathTestChild());
   EXPECT_DEATH({
     fprintf(stderr, InDeathTestChild() ? "Inside" : "Outside");
     fflush(stderr);
     _exit(1);
   }, "Inside");
 }
 
 #else  // !GTEST_HAS_DEATH_TEST follows
 
 using testing::internal::CaptureStderr;
 using testing::internal::GetCapturedStderr;
 
 // Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED are still
 // defined but do not trigger failures when death tests are not available on
 // the system.
 TEST(ConditionalDeathMacrosTest, WarnsWhenDeathTestsNotAvailable) {
   // Empty statement will not crash, but that should not trigger a failure
   // when death tests are not supported.
   CaptureStderr();
   EXPECT_DEATH_IF_SUPPORTED(;, "");
   std::string output = GetCapturedStderr();
   ASSERT_TRUE(NULL != strstr(output.c_str(),
                              "Death tests are not supported on this platform"));
   ASSERT_TRUE(NULL != strstr(output.c_str(), ";"));
 
   // The streamed message should not be printed as there is no test failure.
   CaptureStderr();
   EXPECT_DEATH_IF_SUPPORTED(;, "") << "streamed message";
   output = GetCapturedStderr();
   ASSERT_TRUE(NULL == strstr(output.c_str(), "streamed message"));
 
   CaptureStderr();
   ASSERT_DEATH_IF_SUPPORTED(;, "");  // NOLINT
   output = GetCapturedStderr();
   ASSERT_TRUE(NULL != strstr(output.c_str(),
                              "Death tests are not supported on this platform"));
   ASSERT_TRUE(NULL != strstr(output.c_str(), ";"));
 
   CaptureStderr();
   ASSERT_DEATH_IF_SUPPORTED(;, "") << "streamed message";  // NOLINT
   output = GetCapturedStderr();
   ASSERT_TRUE(NULL == strstr(output.c_str(), "streamed message"));
 }
 
 void FuncWithAssert(int* n) {
   ASSERT_DEATH_IF_SUPPORTED(return;, "");
   (*n)++;
 }
 
 // Tests that ASSERT_DEATH_IF_SUPPORTED does not return from the current
 // function (as ASSERT_DEATH does) if death tests are not supported.
 TEST(ConditionalDeathMacrosTest, AssertDeatDoesNotReturnhIfUnsupported) {
   int n = 0;
   FuncWithAssert(&n);
   EXPECT_EQ(1, n);
 }
 
 #endif  // !GTEST_HAS_DEATH_TEST
 
 // Tests that the death test macros expand to code which may or may not
 // be followed by operator<<, and that in either case the complete text
 // comprises only a single C++ statement.
 //
 // The syntax should work whether death tests are available or not.
 TEST(ConditionalDeathMacrosSyntaxDeathTest, SingleStatement) {
   if (AlwaysFalse())
     // This would fail if executed; this is a compilation test only
     ASSERT_DEATH_IF_SUPPORTED(return, "");
 
   if (AlwaysTrue())
     EXPECT_DEATH_IF_SUPPORTED(_exit(1), "");
   else
     // This empty "else" branch is meant to ensure that EXPECT_DEATH
     // doesn't expand into an "if" statement without an "else"
     ;  // NOLINT
 
   if (AlwaysFalse())
     ASSERT_DEATH_IF_SUPPORTED(return, "") << "did not die";
 
   if (AlwaysFalse())
     ;  // NOLINT
   else
     EXPECT_DEATH_IF_SUPPORTED(_exit(1), "") << 1 << 2 << 3;
 }
 
 // Tests that conditional death test macros expand to code which interacts
 // well with switch statements.
 TEST(ConditionalDeathMacrosSyntaxDeathTest, SwitchStatement) {
   // Microsoft compiler usually complains about switch statements without
   // case labels. We suppress that warning for this test.
   GTEST_DISABLE_MSC_WARNINGS_PUSH_(4065)
 
   switch (0)
     default:
       ASSERT_DEATH_IF_SUPPORTED(_exit(1), "")
           << "exit in default switch handler";
 
   switch (0)
     case 0:
       EXPECT_DEATH_IF_SUPPORTED(_exit(1), "") << "exit in switch case";
 
   GTEST_DISABLE_MSC_WARNINGS_POP_()
 }
 
 // Tests that a test case whose name ends with "DeathTest" works fine
 // on Windows.
 TEST(NotADeathTest, Test) {
   SUCCEED();
 }
diff --git a/googletest/test/gtest-death-test_ex_test.cc b/googletest/test/googletest-death-test_ex_test.cc
similarity index 98%
rename from googletest/test/gtest-death-test_ex_test.cc
rename to googletest/test/googletest-death-test_ex_test.cc
index b50a13d5..33910742 100644
--- a/googletest/test/gtest-death-test_ex_test.cc
+++ b/googletest/test/googletest-death-test_ex_test.cc
@@ -1,93 +1,93 @@
 // Copyright 2010, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: vladl@google.com (Vlad Losev)
 //
 // Tests that verify interaction of exceptions and death tests.
 
 #include "gtest/gtest-death-test.h"
 #include "gtest/gtest.h"
 
 #if GTEST_HAS_DEATH_TEST
 
 # if GTEST_HAS_SEH
 #  include <windows.h>          // For RaiseException().
 # endif
 
 # include "gtest/gtest-spi.h"
 
 # if GTEST_HAS_EXCEPTIONS
 
 #  include <exception>  // For std::exception.
 
 // Tests that death tests report thrown exceptions as failures and that the
 // exceptions do not escape death test macros.
 TEST(CxxExceptionDeathTest, ExceptionIsFailure) {
   try {
     EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw 1, ""), "threw an exception");
   } catch (...) {  // NOLINT
     FAIL() << "An exception escaped a death test macro invocation "
            << "with catch_exceptions "
            << (testing::GTEST_FLAG(catch_exceptions) ? "enabled" : "disabled");
   }
 }
 
 class TestException : public std::exception {
  public:
   virtual const char* what() const throw() { return "exceptional message"; }
 };
 
 TEST(CxxExceptionDeathTest, PrintsMessageForStdExceptions) {
   // Verifies that the exception message is quoted in the failure text.
   EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), ""),
                           "exceptional message");
   // Verifies that the location is mentioned in the failure text.
   EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), ""),
-                          "gtest-death-test_ex_test.cc");
+                          "googletest-death-test_ex_test.cc");
 }
 # endif  // GTEST_HAS_EXCEPTIONS
 
 # if GTEST_HAS_SEH
 // Tests that enabling interception of SEH exceptions with the
 // catch_exceptions flag does not interfere with SEH exceptions being
 // treated as death by death tests.
 TEST(SehExceptionDeasTest, CatchExceptionsDoesNotInterfere) {
   EXPECT_DEATH(RaiseException(42, 0x0, 0, NULL), "")
       << "with catch_exceptions "
       << (testing::GTEST_FLAG(catch_exceptions) ? "enabled" : "disabled");
 }
 # endif
 
 #endif  // GTEST_HAS_DEATH_TEST
 
 int main(int argc, char** argv) {
   testing::InitGoogleTest(&argc, argv);
   testing::GTEST_FLAG(catch_exceptions) = GTEST_ENABLE_CATCH_EXCEPTIONS_ != 0;
   return RUN_ALL_TESTS();
 }
diff --git a/googletest/test/gtest_env_var_test.py b/googletest/test/googletest-env-var-test.py
similarity index 96%
rename from googletest/test/gtest_env_var_test.py
rename to googletest/test/googletest-env-var-test.py
index beb2a8b0..9c80e2a1 100755
--- a/googletest/test/gtest_env_var_test.py
+++ b/googletest/test/googletest-env-var-test.py
@@ -1,119 +1,119 @@
 #!/usr/bin/env python
 #
 # Copyright 2008, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Verifies that Google Test correctly parses environment variables."""
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import os
 import gtest_test_utils
 
 
 IS_WINDOWS = os.name == 'nt'
 IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
 
-COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
+COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-env-var-test_')
 
 environ = os.environ.copy()
 
 
 def AssertEq(expected, actual):
   if expected != actual:
     print 'Expected: %s' % (expected,)
     print '  Actual: %s' % (actual,)
     raise AssertionError
 
 
 def SetEnvVar(env_var, value):
   """Sets the env variable to 'value'; unsets it when 'value' is None."""
 
   if value is not None:
     environ[env_var] = value
   elif env_var in environ:
     del environ[env_var]
 
 
 def GetFlag(flag):
-  """Runs gtest_env_var_test_ and returns its output."""
+  """Runs googletest-env-var-test_ and returns its output."""
 
   args = [COMMAND]
   if flag is not None:
     args += [flag]
   return gtest_test_utils.Subprocess(args, env=environ).output
 
 
 def TestFlag(flag, test_val, default_val):
   """Verifies that the given flag is affected by the corresponding env var."""
 
   env_var = 'GTEST_' + flag.upper()
   SetEnvVar(env_var, test_val)
   AssertEq(test_val, GetFlag(flag))
   SetEnvVar(env_var, None)
   AssertEq(default_val, GetFlag(flag))
 
 
 class GTestEnvVarTest(gtest_test_utils.TestCase):
 
   def testEnvVarAffectsFlag(self):
     """Tests that environment variable should affect the corresponding flag."""
 
     TestFlag('break_on_failure', '1', '0')
     TestFlag('color', 'yes', 'auto')
     TestFlag('filter', 'FooTest.Bar', '*')
     SetEnvVar('XML_OUTPUT_FILE', None)  # For 'output' test
     TestFlag('output', 'xml:tmp/foo.xml', '')
     TestFlag('print_time', '0', '1')
     TestFlag('repeat', '999', '1')
     TestFlag('throw_on_failure', '1', '0')
     TestFlag('death_test_style', 'threadsafe', 'fast')
     TestFlag('catch_exceptions', '0', '1')
 
     if IS_LINUX:
       TestFlag('death_test_use_fork', '1', '0')
       TestFlag('stack_trace_depth', '0', '100')
 
 
   def testXmlOutputFile(self):
     """Tests that $XML_OUTPUT_FILE affects the output flag."""
 
     SetEnvVar('GTEST_OUTPUT', None)
     SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
     AssertEq('xml:tmp/bar.xml', GetFlag('output'))
 
   def testXmlOutputFileOverride(self):
     """Tests that $XML_OUTPUT_FILE is overridden by $GTEST_OUTPUT."""
 
     SetEnvVar('GTEST_OUTPUT', 'xml:tmp/foo.xml')
     SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
     AssertEq('xml:tmp/foo.xml', GetFlag('output'))
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_env_var_test_.cc b/googletest/test/googletest-env-var-test_.cc
similarity index 98%
rename from googletest/test/gtest_env_var_test_.cc
rename to googletest/test/googletest-env-var-test_.cc
index 9b668dc0..74b95064 100644
--- a/googletest/test/gtest_env_var_test_.cc
+++ b/googletest/test/googletest-env-var-test_.cc
@@ -1,124 +1,124 @@
 // Copyright 2008, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 
 // A helper program for testing that Google Test parses the environment
 // variables correctly.
 
 #include "gtest/gtest.h"
 
 #include <iostream>
 
 #include "src/gtest-internal-inl.h"
 
 using ::std::cout;
 
 namespace testing {
 
 // The purpose of this is to make the test more realistic by ensuring
 // that the UnitTest singleton is created before main() is entered.
 // We don't actual run the TEST itself.
 TEST(GTestEnvVarTest, Dummy) {
 }
 
 void PrintFlag(const char* flag) {
   if (strcmp(flag, "break_on_failure") == 0) {
     cout << GTEST_FLAG(break_on_failure);
     return;
   }
 
   if (strcmp(flag, "catch_exceptions") == 0) {
     cout << GTEST_FLAG(catch_exceptions);
     return;
   }
 
   if (strcmp(flag, "color") == 0) {
     cout << GTEST_FLAG(color);
     return;
   }
 
   if (strcmp(flag, "death_test_style") == 0) {
     cout << GTEST_FLAG(death_test_style);
     return;
   }
 
   if (strcmp(flag, "death_test_use_fork") == 0) {
     cout << GTEST_FLAG(death_test_use_fork);
     return;
   }
 
   if (strcmp(flag, "filter") == 0) {
     cout << GTEST_FLAG(filter);
     return;
   }
 
   if (strcmp(flag, "output") == 0) {
     cout << GTEST_FLAG(output);
     return;
   }
 
   if (strcmp(flag, "print_time") == 0) {
     cout << GTEST_FLAG(print_time);
     return;
   }
 
   if (strcmp(flag, "repeat") == 0) {
     cout << GTEST_FLAG(repeat);
     return;
   }
 
   if (strcmp(flag, "stack_trace_depth") == 0) {
     cout << GTEST_FLAG(stack_trace_depth);
     return;
   }
 
   if (strcmp(flag, "throw_on_failure") == 0) {
     cout << GTEST_FLAG(throw_on_failure);
     return;
   }
 
   cout << "Invalid flag name " << flag
        << ".  Valid names are break_on_failure, color, filter, etc.\n";
   exit(1);
 }
 
 }  // namespace testing
 
 int main(int argc, char** argv) {
   testing::InitGoogleTest(&argc, argv);
 
   if (argc != 2) {
-    cout << "Usage: gtest_env_var_test_ NAME_OF_FLAG\n";
+    cout << "Usage: googletest-env-var-test_ NAME_OF_FLAG\n";
     return 1;
   }
 
   testing::PrintFlag(argv[1]);
   return 0;
 }
diff --git a/googletest/test/gtest-filepath_test.cc b/googletest/test/googletest-filepath-test.cc
similarity index 100%
rename from googletest/test/gtest-filepath_test.cc
rename to googletest/test/googletest-filepath-test.cc
diff --git a/googletest/test/gtest_filter_unittest.py b/googletest/test/googletest-filter-unittest.py
similarity index 98%
rename from googletest/test/gtest_filter_unittest.py
rename to googletest/test/googletest-filter-unittest.py
index 92cc77c8..1e554d56 100755
--- a/googletest/test/gtest_filter_unittest.py
+++ b/googletest/test/googletest-filter-unittest.py
@@ -1,638 +1,638 @@
 #!/usr/bin/env python
 #
 # Copyright 2005 Google Inc. All Rights Reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Unit test for Google Test test filters.
 
 A user can specify which test(s) in a Google Test program to run via either
 the GTEST_FILTER environment variable or the --gtest_filter flag.
 This script tests such functionality by invoking
-gtest_filter_unittest_ (a program written with Google Test) with different
+googletest-filter-unittest_ (a program written with Google Test) with different
 environments and command line flags.
 
 Note that test sharding may also influence which tests are filtered. Therefore,
 we test that here also.
 """
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import os
 import re
 import sets
 import sys
 import gtest_test_utils
 
 # Constants.
 
 # Checks if this platform can pass empty environment variables to child
 # processes.  We set an env variable to an empty string and invoke a python
 # script in a subprocess to print whether the variable is STILL in
 # os.environ.  We then use 'eval' to parse the child's output so that an
 # exception is thrown if the input is anything other than 'True' nor 'False'.
 CAN_PASS_EMPTY_ENV = False
 if sys.executable:
   os.environ['EMPTY_VAR'] = ''
   child = gtest_test_utils.Subprocess(
       [sys.executable, '-c', 'import os; print \'EMPTY_VAR\' in os.environ'])
   CAN_PASS_EMPTY_ENV = eval(child.output)
 
 
 # Check if this platform can unset environment variables in child processes.
 # We set an env variable to a non-empty string, unset it, and invoke
 # a python script in a subprocess to print whether the variable
 # is NO LONGER in os.environ.
 # We use 'eval' to parse the child's output so that an exception
 # is thrown if the input is neither 'True' nor 'False'.
 CAN_UNSET_ENV = False
 if sys.executable:
   os.environ['UNSET_VAR'] = 'X'
   del os.environ['UNSET_VAR']
   child = gtest_test_utils.Subprocess(
       [sys.executable, '-c', 'import os; print \'UNSET_VAR\' not in os.environ'
       ])
   CAN_UNSET_ENV = eval(child.output)
 
 
 # Checks if we should test with an empty filter. This doesn't
 # make sense on platforms that cannot pass empty env variables (Win32)
 # and on platforms that cannot unset variables (since we cannot tell
 # the difference between "" and NULL -- Borland and Solaris < 5.10)
 CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)
 
 
 # The environment variable for specifying the test filters.
 FILTER_ENV_VAR = 'GTEST_FILTER'
 
 # The environment variables for test sharding.
 TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
 SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
 SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
 
 # The command line flag for specifying the test filters.
 FILTER_FLAG = 'gtest_filter'
 
 # The command line flag for including disabled tests.
 ALSO_RUN_DISABLED_TESTS_FLAG = 'gtest_also_run_disabled_tests'
 
-# Command to run the gtest_filter_unittest_ program.
-COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_')
+# Command to run the googletest-filter-unittest_ program.
+COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-filter-unittest_')
 
 # Regex for determining whether parameterized tests are enabled in the binary.
 PARAM_TEST_REGEX = re.compile(r'/ParamTest')
 
 # Regex for parsing test case names from Google Test's output.
 TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
 
 # Regex for parsing test names from Google Test's output.
 TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
 
 # The command line flag to tell Google Test to output the list of tests it
 # will run.
 LIST_TESTS_FLAG = '--gtest_list_tests'
 
 # Indicates whether Google Test supports death tests.
 SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
     [COMMAND, LIST_TESTS_FLAG]).output
 
-# Full names of all tests in gtest_filter_unittests_.
+# Full names of all tests in googletest-filter-unittests_.
 PARAM_TESTS = [
     'SeqP/ParamTest.TestX/0',
     'SeqP/ParamTest.TestX/1',
     'SeqP/ParamTest.TestY/0',
     'SeqP/ParamTest.TestY/1',
     'SeqQ/ParamTest.TestX/0',
     'SeqQ/ParamTest.TestX/1',
     'SeqQ/ParamTest.TestY/0',
     'SeqQ/ParamTest.TestY/1',
     ]
 
 DISABLED_TESTS = [
     'BarTest.DISABLED_TestFour',
     'BarTest.DISABLED_TestFive',
     'BazTest.DISABLED_TestC',
     'DISABLED_FoobarTest.Test1',
     'DISABLED_FoobarTest.DISABLED_Test2',
     'DISABLED_FoobarbazTest.TestA',
     ]
 
 if SUPPORTS_DEATH_TESTS:
   DEATH_TESTS = [
     'HasDeathTest.Test1',
     'HasDeathTest.Test2',
     ]
 else:
   DEATH_TESTS = []
 
 # All the non-disabled tests.
 ACTIVE_TESTS = [
     'FooTest.Abc',
     'FooTest.Xyz',
 
     'BarTest.TestOne',
     'BarTest.TestTwo',
     'BarTest.TestThree',
 
     'BazTest.TestOne',
     'BazTest.TestA',
     'BazTest.TestB',
     ] + DEATH_TESTS + PARAM_TESTS
 
 param_tests_present = None
 
 # Utilities.
 
 environ = os.environ.copy()
 
 
 def SetEnvVar(env_var, value):
   """Sets the env variable to 'value'; unsets it when 'value' is None."""
 
   if value is not None:
     environ[env_var] = value
   elif env_var in environ:
     del environ[env_var]
 
 
 def RunAndReturnOutput(args = None):
   """Runs the test program and returns its output."""
 
   return gtest_test_utils.Subprocess([COMMAND] + (args or []),
                                      env=environ).output
 
 
 def RunAndExtractTestList(args = None):
   """Runs the test program and returns its exit code and a list of tests run."""
 
   p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
   tests_run = []
   test_case = ''
   test = ''
   for line in p.output.split('\n'):
     match = TEST_CASE_REGEX.match(line)
     if match is not None:
       test_case = match.group(1)
     else:
       match = TEST_REGEX.match(line)
       if match is not None:
         test = match.group(1)
         tests_run.append(test_case + '.' + test)
   return (tests_run, p.exit_code)
 
 
 def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
   """Runs the given function and arguments in a modified environment."""
   try:
     original_env = environ.copy()
     environ.update(extra_env)
     return function(*args, **kwargs)
   finally:
     environ.clear()
     environ.update(original_env)
 
 
 def RunWithSharding(total_shards, shard_index, command):
   """Runs a test program shard and returns exit code and a list of tests run."""
 
   extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
                TOTAL_SHARDS_ENV_VAR: str(total_shards)}
   return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
 
 # The unit test.
 
 
 class GTestFilterUnitTest(gtest_test_utils.TestCase):
   """Tests the env variable or the command line flag to filter tests."""
 
   # Utilities.
 
   def AssertSetEqual(self, lhs, rhs):
     """Asserts that two sets are equal."""
 
     for elem in lhs:
       self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
 
     for elem in rhs:
       self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
 
   def AssertPartitionIsValid(self, set_var, list_of_sets):
     """Asserts that list_of_sets is a valid partition of set_var."""
 
     full_partition = []
     for slice_var in list_of_sets:
       full_partition.extend(slice_var)
     self.assertEqual(len(set_var), len(full_partition))
     self.assertEqual(sets.Set(set_var), sets.Set(full_partition))
 
   def AdjustForParameterizedTests(self, tests_to_run):
     """Adjust tests_to_run in case value parameterized tests are disabled."""
 
     global param_tests_present
     if not param_tests_present:
       return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS))
     else:
       return tests_to_run
 
   def RunAndVerify(self, gtest_filter, tests_to_run):
     """Checks that the binary runs correct set of tests for a given filter."""
 
     tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
 
     # First, tests using the environment variable.
 
     # Windows removes empty variables from the environment when passing it
     # to a new process.  This means it is impossible to pass an empty filter
     # into a process using the environment variable.  However, we can still
     # test the case when the variable is not supplied (i.e., gtest_filter is
     # None).
     # pylint: disable-msg=C6403
     if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
       SetEnvVar(FILTER_ENV_VAR, gtest_filter)
       tests_run = RunAndExtractTestList()[0]
       SetEnvVar(FILTER_ENV_VAR, None)
       self.AssertSetEqual(tests_run, tests_to_run)
     # pylint: enable-msg=C6403
 
     # Next, tests using the command line flag.
 
     if gtest_filter is None:
       args = []
     else:
       args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
 
     tests_run = RunAndExtractTestList(args)[0]
     self.AssertSetEqual(tests_run, tests_to_run)
 
   def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
                                args=None, check_exit_0=False):
     """Checks that binary runs correct tests for the given filter and shard.
 
-    Runs all shards of gtest_filter_unittest_ with the given filter, and
+    Runs all shards of googletest-filter-unittest_ with the given filter, and
     verifies that the right set of tests were run. The union of tests run
     on each shard should be identical to tests_to_run, without duplicates.
     If check_exit_0, .
 
     Args:
       gtest_filter: A filter to apply to the tests.
       total_shards: A total number of shards to split test run into.
       tests_to_run: A set of tests expected to run.
       args   :      Arguments to pass to the to the test binary.
       check_exit_0: When set to a true value, make sure that all shards
                     return 0.
     """
 
     tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
 
     # Windows removes empty variables from the environment when passing it
     # to a new process.  This means it is impossible to pass an empty filter
     # into a process using the environment variable.  However, we can still
     # test the case when the variable is not supplied (i.e., gtest_filter is
     # None).
     # pylint: disable-msg=C6403
     if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
       SetEnvVar(FILTER_ENV_VAR, gtest_filter)
       partition = []
       for i in range(0, total_shards):
         (tests_run, exit_code) = RunWithSharding(total_shards, i, args)
         if check_exit_0:
           self.assertEqual(0, exit_code)
         partition.append(tests_run)
 
       self.AssertPartitionIsValid(tests_to_run, partition)
       SetEnvVar(FILTER_ENV_VAR, None)
     # pylint: enable-msg=C6403
 
   def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
     """Checks that the binary runs correct set of tests for the given filter.
 
-    Runs gtest_filter_unittest_ with the given filter, and enables
+    Runs googletest-filter-unittest_ with the given filter, and enables
     disabled tests. Verifies that the right set of tests were run.
 
     Args:
       gtest_filter: A filter to apply to the tests.
       tests_to_run: A set of tests expected to run.
     """
 
     tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
 
     # Construct the command line.
     args = ['--%s' % ALSO_RUN_DISABLED_TESTS_FLAG]
     if gtest_filter is not None:
       args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
 
     tests_run = RunAndExtractTestList(args)[0]
     self.AssertSetEqual(tests_run, tests_to_run)
 
   def setUp(self):
     """Sets up test case.
 
     Determines whether value-parameterized tests are enabled in the binary and
     sets the flags accordingly.
     """
 
     global param_tests_present
     if param_tests_present is None:
       param_tests_present = PARAM_TEST_REGEX.search(
           RunAndReturnOutput()) is not None
 
   def testDefaultBehavior(self):
     """Tests the behavior of not specifying the filter."""
 
     self.RunAndVerify(None, ACTIVE_TESTS)
 
   def testDefaultBehaviorWithShards(self):
     """Tests the behavior without the filter, with sharding enabled."""
 
     self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)
     self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)
     self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)
     self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)
     self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)
 
   def testEmptyFilter(self):
     """Tests an empty filter."""
 
     self.RunAndVerify('', [])
     self.RunAndVerifyWithSharding('', 1, [])
     self.RunAndVerifyWithSharding('', 2, [])
 
   def testBadFilter(self):
     """Tests a filter that matches nothing."""
 
     self.RunAndVerify('BadFilter', [])
     self.RunAndVerifyAllowingDisabled('BadFilter', [])
 
   def testFullName(self):
     """Tests filtering by full name."""
 
     self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])
     self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])
     self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])
 
   def testUniversalFilters(self):
     """Tests filters that match everything."""
 
     self.RunAndVerify('*', ACTIVE_TESTS)
     self.RunAndVerify('*.*', ACTIVE_TESTS)
     self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)
     self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)
     self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)
 
   def testFilterByTestCase(self):
     """Tests filtering by test case name."""
 
     self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])
 
     BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
     self.RunAndVerify('BazTest.*', BAZ_TESTS)
     self.RunAndVerifyAllowingDisabled('BazTest.*',
                                       BAZ_TESTS + ['BazTest.DISABLED_TestC'])
 
   def testFilterByTest(self):
     """Tests filtering by test name."""
 
     self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])
 
   def testFilterDisabledTests(self):
     """Select only the disabled tests to run."""
 
     self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
     self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
                                       ['DISABLED_FoobarTest.Test1'])
 
     self.RunAndVerify('*DISABLED_*', [])
     self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
 
     self.RunAndVerify('*.DISABLED_*', [])
     self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
         'BarTest.DISABLED_TestFour',
         'BarTest.DISABLED_TestFive',
         'BazTest.DISABLED_TestC',
         'DISABLED_FoobarTest.DISABLED_Test2',
         ])
 
     self.RunAndVerify('DISABLED_*', [])
     self.RunAndVerifyAllowingDisabled('DISABLED_*', [
         'DISABLED_FoobarTest.Test1',
         'DISABLED_FoobarTest.DISABLED_Test2',
         'DISABLED_FoobarbazTest.TestA',
         ])
 
   def testWildcardInTestCaseName(self):
     """Tests using wildcard in the test case name."""
 
     self.RunAndVerify('*a*.*', [
         'BarTest.TestOne',
         'BarTest.TestTwo',
         'BarTest.TestThree',
 
         'BazTest.TestOne',
         'BazTest.TestA',
         'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)
 
   def testWildcardInTestName(self):
     """Tests using wildcard in the test name."""
 
     self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])
 
   def testFilterWithoutDot(self):
     """Tests a filter that has no '.' in it."""
 
     self.RunAndVerify('*z*', [
         'FooTest.Xyz',
 
         'BazTest.TestOne',
         'BazTest.TestA',
         'BazTest.TestB',
         ])
 
   def testTwoPatterns(self):
     """Tests filters that consist of two patterns."""
 
     self.RunAndVerify('Foo*.*:*A*', [
         'FooTest.Abc',
         'FooTest.Xyz',
 
         'BazTest.TestA',
         ])
 
     # An empty pattern + a non-empty one
     self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
 
   def testThreePatterns(self):
     """Tests filters that consist of three patterns."""
 
     self.RunAndVerify('*oo*:*A*:*One', [
         'FooTest.Abc',
         'FooTest.Xyz',
 
         'BarTest.TestOne',
 
         'BazTest.TestOne',
         'BazTest.TestA',
         ])
 
     # The 2nd pattern is empty.
     self.RunAndVerify('*oo*::*One', [
         'FooTest.Abc',
         'FooTest.Xyz',
 
         'BarTest.TestOne',
 
         'BazTest.TestOne',
         ])
 
     # The last 2 patterns are empty.
     self.RunAndVerify('*oo*::', [
         'FooTest.Abc',
         'FooTest.Xyz',
         ])
 
   def testNegativeFilters(self):
     self.RunAndVerify('*-BazTest.TestOne', [
         'FooTest.Abc',
         'FooTest.Xyz',
 
         'BarTest.TestOne',
         'BarTest.TestTwo',
         'BarTest.TestThree',
 
         'BazTest.TestA',
         'BazTest.TestB',
         ] + DEATH_TESTS + PARAM_TESTS)
 
     self.RunAndVerify('*-FooTest.Abc:BazTest.*', [
         'FooTest.Xyz',
 
         'BarTest.TestOne',
         'BarTest.TestTwo',
         'BarTest.TestThree',
         ] + DEATH_TESTS + PARAM_TESTS)
 
     self.RunAndVerify('BarTest.*-BarTest.TestOne', [
         'BarTest.TestTwo',
         'BarTest.TestThree',
         ])
 
     # Tests without leading '*'.
     self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [
         'BarTest.TestOne',
         'BarTest.TestTwo',
         'BarTest.TestThree',
         ] + DEATH_TESTS + PARAM_TESTS)
 
     # Value parameterized tests.
     self.RunAndVerify('*/*', PARAM_TESTS)
 
     # Value parameterized tests filtering by the sequence name.
     self.RunAndVerify('SeqP/*', [
         'SeqP/ParamTest.TestX/0',
         'SeqP/ParamTest.TestX/1',
         'SeqP/ParamTest.TestY/0',
         'SeqP/ParamTest.TestY/1',
         ])
 
     # Value parameterized tests filtering by the test name.
     self.RunAndVerify('*/0', [
         'SeqP/ParamTest.TestX/0',
         'SeqP/ParamTest.TestY/0',
         'SeqQ/ParamTest.TestX/0',
         'SeqQ/ParamTest.TestY/0',
         ])
 
   def testFlagOverridesEnvVar(self):
     """Tests that the filter flag overrides the filtering env. variable."""
 
     SetEnvVar(FILTER_ENV_VAR, 'Foo*')
     args = ['--%s=%s' % (FILTER_FLAG, '*One')]
     tests_run = RunAndExtractTestList(args)[0]
     SetEnvVar(FILTER_ENV_VAR, None)
 
     self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
 
   def testShardStatusFileIsCreated(self):
     """Tests that the shard file is created if specified in the environment."""
 
     shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
                                      'shard_status_file')
     self.assert_(not os.path.exists(shard_status_file))
 
     extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
     try:
       InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
     finally:
       self.assert_(os.path.exists(shard_status_file))
       os.remove(shard_status_file)
 
   def testShardStatusFileIsCreatedWithListTests(self):
     """Tests that the shard file is created with the "list_tests" flag."""
 
     shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
                                      'shard_status_file2')
     self.assert_(not os.path.exists(shard_status_file))
 
     extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
     try:
       output = InvokeWithModifiedEnv(extra_env,
                                      RunAndReturnOutput,
                                      [LIST_TESTS_FLAG])
     finally:
       # This assertion ensures that Google Test enumerated the tests as
       # opposed to running them.
       self.assert_('[==========]' not in output,
                    'Unexpected output during test enumeration.\n'
                    'Please ensure that LIST_TESTS_FLAG is assigned the\n'
                    'correct flag value for listing Google Test tests.')
 
       self.assert_(os.path.exists(shard_status_file))
       os.remove(shard_status_file)
 
   if SUPPORTS_DEATH_TESTS:
     def testShardingWorksWithDeathTests(self):
       """Tests integration with death tests and sharding."""
 
       gtest_filter = 'HasDeathTest.*:SeqP/*'
       expected_tests = [
           'HasDeathTest.Test1',
           'HasDeathTest.Test2',
 
           'SeqP/ParamTest.TestX/0',
           'SeqP/ParamTest.TestX/1',
           'SeqP/ParamTest.TestY/0',
           'SeqP/ParamTest.TestY/1',
           ]
 
       for flag in ['--gtest_death_test_style=threadsafe',
                    '--gtest_death_test_style=fast']:
         self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
                                       check_exit_0=True, args=[flag])
         self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
                                       check_exit_0=True, args=[flag])
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_filter_unittest_.cc b/googletest/test/googletest-filter-unittest_.cc
similarity index 100%
rename from googletest/test/gtest_filter_unittest_.cc
rename to googletest/test/googletest-filter-unittest_.cc
diff --git a/googletest/test/gtest_json_outfiles_test.py b/googletest/test/googletest-json-outfiles-test.py
similarity index 100%
rename from googletest/test/gtest_json_outfiles_test.py
rename to googletest/test/googletest-json-outfiles-test.py
diff --git a/googletest/test/gtest_json_output_unittest.py b/googletest/test/googletest-json-output-unittest.py
similarity index 98%
rename from googletest/test/gtest_json_output_unittest.py
rename to googletest/test/googletest-json-output-unittest.py
index 12047c4f..57dcd5fa 100644
--- a/googletest/test/gtest_json_output_unittest.py
+++ b/googletest/test/googletest-json-output-unittest.py
@@ -1,611 +1,618 @@
 #!/usr/bin/env python
 # Copyright 2018, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Unit test for the gtest_json_output module."""
 
 import datetime
 import errno
 import json
 import os
 import re
 import sys
 
 import gtest_json_test_utils
 import gtest_test_utils
 
 GTEST_FILTER_FLAG = '--gtest_filter'
 GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
 GTEST_OUTPUT_FLAG = '--gtest_output'
 GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.json'
 GTEST_PROGRAM_NAME = 'gtest_xml_output_unittest_'
 
-SUPPORTS_STACK_TRACES = False
+# The flag indicating stacktraces are not supported
+NO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support'
+
+SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv
 
 if SUPPORTS_STACK_TRACES:
   STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
 else:
   STACK_TRACE_TEMPLATE = ''
 
 EXPECTED_NON_EMPTY = {
     u'tests': 23,
     u'failures': 4,
     u'disabled': 2,
     u'errors': 0,
     u'timestamp': u'*',
     u'time': u'*',
     u'ad_hoc_property': u'42',
     u'name': u'AllTests',
     u'testsuites': [
         {
             u'name': u'SuccessfulTest',
             u'tests': 1,
             u'failures': 0,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'Succeeds',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'SuccessfulTest'
                 }
             ]
         },
         {
             u'name': u'FailedTest',
             u'tests': 1,
             u'failures': 1,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'Fails',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'FailedTest',
                     u'failures': [
                         {
                             u'failure':
                                 u'gtest_xml_output_unittest_.cc:*\n'
                                 u'Expected equality of these values:\n'
                                 u'  1\n  2' + STACK_TRACE_TEMPLATE,
                             u'type': u''
                         }
                     ]
                 }
             ]
         },
         {
             u'name': u'DisabledTest',
             u'tests': 1,
             u'failures': 0,
             u'disabled': 1,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'DISABLED_test_not_run',
                     u'status': u'NOTRUN',
                     u'time': u'*',
                     u'classname': u'DisabledTest'
                 }
             ]
         },
         {
             u'name': u'MixedResultTest',
             u'tests': 3,
             u'failures': 1,
             u'disabled': 1,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'Succeeds',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'MixedResultTest'
                 },
                 {
                     u'name': u'Fails',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'MixedResultTest',
                     u'failures': [
                         {
                             u'failure':
                                 u'gtest_xml_output_unittest_.cc:*\n'
                                 u'Expected equality of these values:\n'
                                 u'  1\n  2' + STACK_TRACE_TEMPLATE,
                             u'type': u''
                         },
                         {
                             u'failure':
                                 u'gtest_xml_output_unittest_.cc:*\n'
                                 u'Expected equality of these values:\n'
                                 u'  2\n  3' + STACK_TRACE_TEMPLATE,
                             u'type': u''
                         }
                     ]
                 },
                 {
                     u'name': u'DISABLED_test',
                     u'status': u'NOTRUN',
                     u'time': u'*',
                     u'classname': u'MixedResultTest'
                 }
             ]
         },
         {
             u'name': u'XmlQuotingTest',
             u'tests': 1,
             u'failures': 1,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'OutputsCData',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'XmlQuotingTest',
                     u'failures': [
                         {
                             u'failure':
                                 u'gtest_xml_output_unittest_.cc:*\n'
                                 u'Failed\nXML output: <?xml encoding="utf-8">'
                                 u'<top><![CDATA[cdata text]]></top>' +
                                 STACK_TRACE_TEMPLATE,
                             u'type': u''
                         }
                     ]
                 }
             ]
         },
         {
             u'name': u'InvalidCharactersTest',
             u'tests': 1,
             u'failures': 1,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'InvalidCharactersInMessage',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'InvalidCharactersTest',
                     u'failures': [
                         {
                             u'failure':
                                 u'gtest_xml_output_unittest_.cc:*\n'
                                 u'Failed\nInvalid characters in brackets'
                                 u' [\x01\x02]' + STACK_TRACE_TEMPLATE,
                             u'type': u''
                         }
                     ]
                 }
             ]
         },
         {
             u'name': u'PropertyRecordingTest',
             u'tests': 4,
             u'failures': 0,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'SetUpTestCase': u'yes',
             u'TearDownTestCase': u'aye',
             u'testsuite': [
                 {
                     u'name': u'OneProperty',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'PropertyRecordingTest',
                     u'key_1': u'1'
                 },
                 {
                     u'name': u'IntValuedProperty',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'PropertyRecordingTest',
                     u'key_int': u'1'
                 },
                 {
                     u'name': u'ThreeProperties',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'PropertyRecordingTest',
                     u'key_1': u'1',
                     u'key_2': u'2',
                     u'key_3': u'3'
                 },
                 {
                     u'name': u'TwoValuesForOneKeyUsesLastValue',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'PropertyRecordingTest',
                     u'key_1': u'2'
                 }
             ]
         },
         {
             u'name': u'NoFixtureTest',
             u'tests': 3,
             u'failures': 0,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'RecordProperty',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'NoFixtureTest',
                     u'key': u'1'
                 },
                 {
                     u'name': u'ExternalUtilityThatCallsRecordIntValuedProperty',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'NoFixtureTest',
                     u'key_for_utility_int': u'1'
                 },
                 {
                     u'name':
                         u'ExternalUtilityThatCallsRecordStringValuedProperty',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'NoFixtureTest',
                     u'key_for_utility_string': u'1'
                 }
             ]
         },
         {
             u'name': u'TypedTest/0',
             u'tests': 1,
             u'failures': 0,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'HasTypeParamAttribute',
                     u'type_param': u'int',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'TypedTest/0'
                 }
             ]
         },
         {
             u'name': u'TypedTest/1',
             u'tests': 1,
             u'failures': 0,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'HasTypeParamAttribute',
                     u'type_param': u'long',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'TypedTest/1'
                 }
             ]
         },
         {
             u'name': u'Single/TypeParameterizedTestCase/0',
             u'tests': 1,
             u'failures': 0,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'HasTypeParamAttribute',
                     u'type_param': u'int',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'Single/TypeParameterizedTestCase/0'
                 }
             ]
         },
         {
             u'name': u'Single/TypeParameterizedTestCase/1',
             u'tests': 1,
             u'failures': 0,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'HasTypeParamAttribute',
                     u'type_param': u'long',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'Single/TypeParameterizedTestCase/1'
                 }
             ]
         },
         {
             u'name': u'Single/ValueParamTest',
             u'tests': 4,
             u'failures': 0,
             u'disabled': 0,
             u'errors': 0,
             u'time': u'*',
             u'testsuite': [
                 {
                     u'name': u'HasValueParamAttribute/0',
                     u'value_param': u'33',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'Single/ValueParamTest'
                 },
                 {
                     u'name': u'HasValueParamAttribute/1',
                     u'value_param': u'42',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'Single/ValueParamTest'
                 },
                 {
                     u'name': u'AnotherTestThatHasValueParamAttribute/0',
                     u'value_param': u'33',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'Single/ValueParamTest'
                 },
                 {
                     u'name': u'AnotherTestThatHasValueParamAttribute/1',
                     u'value_param': u'42',
                     u'status': u'RUN',
                     u'time': u'*',
                     u'classname': u'Single/ValueParamTest'
                 }
             ]
         }
     ]
 }
 
 EXPECTED_FILTERED = {
     u'tests': 1,
     u'failures': 0,
     u'disabled': 0,
     u'errors': 0,
     u'time': u'*',
     u'timestamp': u'*',
     u'name': u'AllTests',
     u'ad_hoc_property': u'42',
     u'testsuites': [{
         u'name': u'SuccessfulTest',
         u'tests': 1,
         u'failures': 0,
         u'disabled': 0,
         u'errors': 0,
         u'time': u'*',
         u'testsuite': [{
             u'name': u'Succeeds',
             u'status': u'RUN',
             u'time': u'*',
             u'classname': u'SuccessfulTest',
         }]
     }],
 }
 
 EXPECTED_EMPTY = {
     u'tests': 0,
     u'failures': 0,
     u'disabled': 0,
     u'errors': 0,
     u'time': u'*',
     u'timestamp': u'*',
     u'name': u'AllTests',
     u'testsuites': [],
 }
 
 GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
 
 SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
     [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
 
 
 class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
   """Unit test for Google Test's JSON output functionality.
   """
 
   # This test currently breaks on platforms that do not support typed and
   # type-parameterized tests, so we don't run it under them.
   if SUPPORTS_TYPED_TESTS:
 
     def testNonEmptyJsonOutput(self):
       """Verifies JSON output for a Google Test binary with non-empty output.
 
       Runs a test program that generates a non-empty JSON output, and
       tests that the JSON output is expected.
       """
       self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY, 1)
 
   def testEmptyJsonOutput(self):
     """Verifies JSON output for a Google Test binary without actual tests.
 
     Runs a test program that generates an empty JSON output, and
     tests that the JSON output is expected.
     """
 
     self._TestJsonOutput('gtest_no_test_unittest', EXPECTED_EMPTY, 0)
 
   def testTimestampValue(self):
     """Checks whether the timestamp attribute in the JSON output is valid.
 
     Runs a test program that generates an empty JSON output, and checks if
     the timestamp attribute in the testsuites tag is valid.
     """
     actual = self._GetJsonOutput('gtest_no_test_unittest', [], 0)
     date_time_str = actual['timestamp']
     # datetime.strptime() is only available in Python 2.5+ so we have to
     # parse the expected datetime manually.
     match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
     self.assertTrue(
         re.match,
         'JSON datettime string %s has incorrect format' % date_time_str)
     date_time_from_json = datetime.datetime(
         year=int(match.group(1)), month=int(match.group(2)),
         day=int(match.group(3)), hour=int(match.group(4)),
         minute=int(match.group(5)), second=int(match.group(6)))
 
     time_delta = abs(datetime.datetime.now() - date_time_from_json)
     # timestamp value should be near the current local time
     self.assertTrue(time_delta < datetime.timedelta(seconds=600),
                     'time_delta is %s' % time_delta)
 
   def testDefaultOutputFile(self):
     """Verifies the default output file name.
 
     Confirms that Google Test produces an JSON output file with the expected
     default name if no name is explicitly specified.
     """
     output_file = os.path.join(gtest_test_utils.GetTempDir(),
                                GTEST_DEFAULT_OUTPUT_FILE)
     gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
         'gtest_no_test_unittest')
     try:
       os.remove(output_file)
     except OSError:
       e = sys.exc_info()[1]
       if e.errno != errno.ENOENT:
         raise
 
     p = gtest_test_utils.Subprocess(
         [gtest_prog_path, '%s=json' % GTEST_OUTPUT_FLAG],
         working_dir=gtest_test_utils.GetTempDir())
     self.assert_(p.exited)
     self.assertEquals(0, p.exit_code)
     self.assert_(os.path.isfile(output_file))
 
   def testSuppressedJsonOutput(self):
     """Verifies that no JSON output is generated.
 
     Tests that no JSON file is generated if the default JSON listener is
     shut down before RUN_ALL_TESTS is invoked.
     """
 
     json_path = os.path.join(gtest_test_utils.GetTempDir(),
                              GTEST_PROGRAM_NAME + 'out.json')
     if os.path.isfile(json_path):
       os.remove(json_path)
 
     command = [GTEST_PROGRAM_PATH,
                '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path),
                '--shut_down_xml']
     p = gtest_test_utils.Subprocess(command)
     if p.terminated_by_signal:
       # p.signal is available only if p.terminated_by_signal is True.
       self.assertFalse(
           p.terminated_by_signal,
           '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
     else:
       self.assert_(p.exited)
       self.assertEquals(1, p.exit_code,
                         "'%s' exited with code %s, which doesn't match "
                         'the expected exit code %s.'
                         % (command, p.exit_code, 1))
 
     self.assert_(not os.path.isfile(json_path))
 
   def testFilteredTestJsonOutput(self):
     """Verifies JSON output when a filter is applied.
 
     Runs a test program that executes only some tests and verifies that
     non-selected tests do not show up in the JSON output.
     """
 
     self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED, 0,
                          extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
 
   def _GetJsonOutput(self, gtest_prog_name, extra_args, expected_exit_code):
     """Returns the JSON output generated by running the program gtest_prog_name.
 
     Furthermore, the program's exit code must be expected_exit_code.
 
     Args:
       gtest_prog_name: Google Test binary name.
       extra_args: extra arguments to binary invocation.
       expected_exit_code: program's exit code.
     """
     json_path = os.path.join(gtest_test_utils.GetTempDir(),
                              gtest_prog_name + 'out.json')
     gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
 
     command = (
         [gtest_prog_path, '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path)] +
         extra_args
     )
     p = gtest_test_utils.Subprocess(command)
     if p.terminated_by_signal:
       self.assert_(False,
                    '%s was killed by signal %d' % (gtest_prog_name, p.signal))
     else:
       self.assert_(p.exited)
       self.assertEquals(expected_exit_code, p.exit_code,
                         "'%s' exited with code %s, which doesn't match "
                         'the expected exit code %s.'
                         % (command, p.exit_code, expected_exit_code))
     with open(json_path) as f:
       actual = json.load(f)
     return actual
 
   def _TestJsonOutput(self, gtest_prog_name, expected,
                       expected_exit_code, extra_args=None):
     """Checks the JSON output generated by the Google Test binary.
 
     Asserts that the JSON document generated by running the program
     gtest_prog_name matches expected_json, a string containing another
     JSON document.  Furthermore, the program's exit code must be
     expected_exit_code.
 
     Args:
       gtest_prog_name: Google Test binary name.
       expected: expected output.
       expected_exit_code: program's exit code.
       extra_args: extra arguments to binary invocation.
     """
 
     actual = self._GetJsonOutput(gtest_prog_name, extra_args or [],
                                  expected_exit_code)
     self.assertEqual(expected, gtest_json_test_utils.normalize(actual))
 
 
 if __name__ == '__main__':
+  if NO_STACKTRACE_SUPPORT_FLAG in sys.argv:
+    # unittest.main() can't handle unknown flags
+    sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG)
+
   os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest-linked_ptr_test.cc b/googletest/test/googletest-linked-ptr-test.cc
similarity index 100%
rename from googletest/test/gtest-linked_ptr_test.cc
rename to googletest/test/googletest-linked-ptr-test.cc
diff --git a/googletest/test/gtest_list_tests_unittest.py b/googletest/test/googletest-list-tests-unittest.py
similarity index 92%
rename from googletest/test/gtest_list_tests_unittest.py
rename to googletest/test/googletest-list-tests-unittest.py
index ebf1a3c9..a38073a1 100755
--- a/googletest/test/gtest_list_tests_unittest.py
+++ b/googletest/test/googletest-list-tests-unittest.py
@@ -1,207 +1,207 @@
 #!/usr/bin/env python
 #
 # Copyright 2006, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Unit test for Google Test's --gtest_list_tests flag.
 
 A user can ask Google Test to list all tests by specifying the
 --gtest_list_tests flag.  This script tests such functionality
-by invoking gtest_list_tests_unittest_ (a program written with
+by invoking googletest-list-tests-unittest_ (a program written with
 Google Test) the command line flags.
 """
 
 __author__ = 'phanna@google.com (Patrick Hanna)'
 
 import re
 import gtest_test_utils
 
 # Constants.
 
 # The command line flag for enabling/disabling listing all tests.
 LIST_TESTS_FLAG = 'gtest_list_tests'
 
-# Path to the gtest_list_tests_unittest_ program.
-EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_list_tests_unittest_')
+# Path to the googletest-list-tests-unittest_ program.
+EXE_PATH = gtest_test_utils.GetTestExecutablePath('googletest-list-tests-unittest_')
 
-# The expected output when running gtest_list_tests_unittest_ with
+# The expected output when running googletest-list-tests-unittest_ with
 # --gtest_list_tests
 EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\.
   Test1
 Foo\.
   Bar1
   Bar2
   DISABLED_Bar3
 Abc\.
   Xyz
   Def
 FooBar\.
   Baz
 FooTest\.
   Test1
   DISABLED_Test2
   Test3
 TypedTest/0\.  # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
   TestA
   TestB
 TypedTest/1\.  # TypeParam = int\s*\*( __ptr64)?
   TestA
   TestB
 TypedTest/2\.  # TypeParam = .*MyArray<bool,\s*42>
   TestA
   TestB
 My/TypeParamTest/0\.  # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
   TestA
   TestB
 My/TypeParamTest/1\.  # TypeParam = int\s*\*( __ptr64)?
   TestA
   TestB
 My/TypeParamTest/2\.  # TypeParam = .*MyArray<bool,\s*42>
   TestA
   TestB
 MyInstantiation/ValueParamTest\.
   TestA/0  # GetParam\(\) = one line
   TestA/1  # GetParam\(\) = two\\nlines
   TestA/2  # GetParam\(\) = a very\\nlo{241}\.\.\.
   TestB/0  # GetParam\(\) = one line
   TestB/1  # GetParam\(\) = two\\nlines
   TestB/2  # GetParam\(\) = a very\\nlo{241}\.\.\.
 """)
 
-# The expected output when running gtest_list_tests_unittest_ with
+# The expected output when running googletest-list-tests-unittest_ with
 # --gtest_list_tests and --gtest_filter=Foo*.
 EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\.
   Test1
 Foo\.
   Bar1
   Bar2
   DISABLED_Bar3
 FooBar\.
   Baz
 FooTest\.
   Test1
   DISABLED_Test2
   Test3
 """)
 
 # Utilities.
 
 
 def Run(args):
-  """Runs gtest_list_tests_unittest_ and returns the list of tests printed."""
+  """Runs googletest-list-tests-unittest_ and returns the list of tests printed."""
 
   return gtest_test_utils.Subprocess([EXE_PATH] + args,
                                      capture_stderr=False).output
 
 
 # The unit test.
 
 
 class GTestListTestsUnitTest(gtest_test_utils.TestCase):
   """Tests using the --gtest_list_tests flag to list all tests."""
 
   def RunAndVerify(self, flag_value, expected_output_re, other_flag):
-    """Runs gtest_list_tests_unittest_ and verifies that it prints
+    """Runs googletest-list-tests-unittest_ and verifies that it prints
     the correct tests.
 
     Args:
       flag_value:         value of the --gtest_list_tests flag;
                           None if the flag should not be present.
       expected_output_re: regular expression that matches the expected
                           output after running command;
       other_flag:         a different flag to be passed to command
                           along with gtest_list_tests;
                           None if the flag should not be present.
     """
 
     if flag_value is None:
       flag = ''
       flag_expression = 'not set'
     elif flag_value == '0':
       flag = '--%s=0' % LIST_TESTS_FLAG
       flag_expression = '0'
     else:
       flag = '--%s' % LIST_TESTS_FLAG
       flag_expression = '1'
 
     args = [flag]
 
     if other_flag is not None:
       args += [other_flag]
 
     output = Run(args)
 
     if expected_output_re:
       self.assert_(
           expected_output_re.match(output),
           ('when %s is %s, the output of "%s" is "%s",\n'
            'which does not match regex "%s"' %
            (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,
             expected_output_re.pattern)))
     else:
       self.assert_(
           not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
           ('when %s is %s, the output of "%s" is "%s"'%
            (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))
 
   def testDefaultBehavior(self):
     """Tests the behavior of the default mode."""
 
     self.RunAndVerify(flag_value=None,
                       expected_output_re=None,
                       other_flag=None)
 
   def testFlag(self):
     """Tests using the --gtest_list_tests flag."""
 
     self.RunAndVerify(flag_value='0',
                       expected_output_re=None,
                       other_flag=None)
     self.RunAndVerify(flag_value='1',
                       expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
                       other_flag=None)
 
   def testOverrideNonFilterFlags(self):
     """Tests that --gtest_list_tests overrides the non-filter flags."""
 
     self.RunAndVerify(flag_value='1',
                       expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
                       other_flag='--gtest_break_on_failure')
 
   def testWithFilterFlags(self):
     """Tests that --gtest_list_tests takes into account the
     --gtest_filter flag."""
 
     self.RunAndVerify(flag_value='1',
                       expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
                       other_flag='--gtest_filter=Foo*')
 
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_list_tests_unittest_.cc b/googletest/test/googletest-list-tests-unittest_.cc
similarity index 100%
rename from googletest/test/gtest_list_tests_unittest_.cc
rename to googletest/test/googletest-list-tests-unittest_.cc
diff --git a/googletest/test/gtest-listener_test.cc b/googletest/test/googletest-listener-test.cc
similarity index 100%
rename from googletest/test/gtest-listener_test.cc
rename to googletest/test/googletest-listener-test.cc
diff --git a/googletest/test/gtest-message_test.cc b/googletest/test/googletest-message-test.cc
similarity index 100%
rename from googletest/test/gtest-message_test.cc
rename to googletest/test/googletest-message-test.cc
diff --git a/googletest/test/gtest-options_test.cc b/googletest/test/googletest-options-test.cc
similarity index 98%
rename from googletest/test/gtest-options_test.cc
rename to googletest/test/googletest-options-test.cc
index 10cb1df7..e4c0d04f 100644
--- a/googletest/test/gtest-options_test.cc
+++ b/googletest/test/googletest-options-test.cc
@@ -1,213 +1,213 @@
 // Copyright 2008, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 //
 // Google Test UnitTestOptions tests
 //
 // This file tests classes and functions used internally by
 // Google Test.  They are subject to change without notice.
 //
 // This file is #included from gtest.cc, to avoid changing build or
 // make-files on Windows and other platforms. Do not #include this file
 // anywhere else!
 
 #include "gtest/gtest.h"
 
 #if GTEST_OS_WINDOWS_MOBILE
 # include <windows.h>
 #elif GTEST_OS_WINDOWS
 # include <direct.h>
 #endif  // GTEST_OS_WINDOWS_MOBILE
 
 #include "src/gtest-internal-inl.h"
 
 namespace testing {
 namespace internal {
 namespace {
 
 // Turns the given relative path into an absolute path.
 FilePath GetAbsolutePathOf(const FilePath& relative_path) {
   return FilePath::ConcatPaths(FilePath::GetCurrentDir(), relative_path);
 }
 
 // Testing UnitTestOptions::GetOutputFormat/GetOutputFile.
 
 TEST(XmlOutputTest, GetOutputFormatDefault) {
   GTEST_FLAG(output) = "";
   EXPECT_STREQ("", UnitTestOptions::GetOutputFormat().c_str());
 }
 
 TEST(XmlOutputTest, GetOutputFormat) {
   GTEST_FLAG(output) = "xml:filename";
   EXPECT_STREQ("xml", UnitTestOptions::GetOutputFormat().c_str());
 }
 
 TEST(XmlOutputTest, GetOutputFileDefault) {
   GTEST_FLAG(output) = "";
   EXPECT_EQ(GetAbsolutePathOf(FilePath("test_detail.xml")).string(),
             UnitTestOptions::GetAbsolutePathToOutputFile());
 }
 
 TEST(XmlOutputTest, GetOutputFileSingleFile) {
   GTEST_FLAG(output) = "xml:filename.abc";
   EXPECT_EQ(GetAbsolutePathOf(FilePath("filename.abc")).string(),
             UnitTestOptions::GetAbsolutePathToOutputFile());
 }
 
 TEST(XmlOutputTest, GetOutputFileFromDirectoryPath) {
   GTEST_FLAG(output) = "xml:path" GTEST_PATH_SEP_;
   const std::string expected_output_file =
       GetAbsolutePathOf(
           FilePath(std::string("path") + GTEST_PATH_SEP_ +
                    GetCurrentExecutableName().string() + ".xml")).string();
   const std::string& output_file =
       UnitTestOptions::GetAbsolutePathToOutputFile();
 #if GTEST_OS_WINDOWS
   EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
 #else
   EXPECT_EQ(expected_output_file, output_file.c_str());
 #endif
 }
 
 TEST(OutputFileHelpersTest, GetCurrentExecutableName) {
   const std::string exe_str = GetCurrentExecutableName().string();
 #if GTEST_OS_WINDOWS
   const bool success =
-      _strcmpi("gtest-options_test", exe_str.c_str()) == 0 ||
+      _strcmpi("googletest-options-test", exe_str.c_str()) == 0 ||
       _strcmpi("gtest-options-ex_test", exe_str.c_str()) == 0 ||
       _strcmpi("gtest_all_test", exe_str.c_str()) == 0 ||
       _strcmpi("gtest_dll_test", exe_str.c_str()) == 0;
 #elif GTEST_OS_FUCHSIA
   const bool success = exe_str == "app";
 #else
   // TODO(wan@google.com): remove the hard-coded "lt-" prefix when
   //   Chandler Carruth's libtool replacement is ready.
   const bool success =
-      exe_str == "gtest-options_test" ||
+      exe_str == "googletest-options-test" ||
       exe_str == "gtest_all_test" ||
       exe_str == "lt-gtest_all_test" ||
       exe_str == "gtest_dll_test";
 #endif  // GTEST_OS_WINDOWS
   if (!success)
     FAIL() << "GetCurrentExecutableName() returns " << exe_str;
 }
 
 #if !GTEST_OS_FUCHSIA
 
 class XmlOutputChangeDirTest : public Test {
  protected:
   virtual void SetUp() {
     original_working_dir_ = FilePath::GetCurrentDir();
     posix::ChDir("..");
     // This will make the test fail if run from the root directory.
     EXPECT_NE(original_working_dir_.string(),
               FilePath::GetCurrentDir().string());
   }
 
   virtual void TearDown() {
     posix::ChDir(original_working_dir_.string().c_str());
   }
 
   FilePath original_working_dir_;
 };
 
 TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefault) {
   GTEST_FLAG(output) = "";
   EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,
                                   FilePath("test_detail.xml")).string(),
             UnitTestOptions::GetAbsolutePathToOutputFile());
 }
 
 TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefaultXML) {
   GTEST_FLAG(output) = "xml";
   EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,
                                   FilePath("test_detail.xml")).string(),
             UnitTestOptions::GetAbsolutePathToOutputFile());
 }
 
 TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativeFile) {
   GTEST_FLAG(output) = "xml:filename.abc";
   EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,
                                   FilePath("filename.abc")).string(),
             UnitTestOptions::GetAbsolutePathToOutputFile());
 }
 
 TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativePath) {
   GTEST_FLAG(output) = "xml:path" GTEST_PATH_SEP_;
   const std::string expected_output_file =
       FilePath::ConcatPaths(
           original_working_dir_,
           FilePath(std::string("path") + GTEST_PATH_SEP_ +
                    GetCurrentExecutableName().string() + ".xml")).string();
   const std::string& output_file =
       UnitTestOptions::GetAbsolutePathToOutputFile();
 #if GTEST_OS_WINDOWS
   EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
 #else
   EXPECT_EQ(expected_output_file, output_file.c_str());
 #endif
 }
 
 TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsoluteFile) {
 #if GTEST_OS_WINDOWS
   GTEST_FLAG(output) = "xml:c:\\tmp\\filename.abc";
   EXPECT_EQ(FilePath("c:\\tmp\\filename.abc").string(),
             UnitTestOptions::GetAbsolutePathToOutputFile());
 #else
   GTEST_FLAG(output) ="xml:/tmp/filename.abc";
   EXPECT_EQ(FilePath("/tmp/filename.abc").string(),
             UnitTestOptions::GetAbsolutePathToOutputFile());
 #endif
 }
 
 TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsolutePath) {
 #if GTEST_OS_WINDOWS
   const std::string path = "c:\\tmp\\";
 #else
   const std::string path = "/tmp/";
 #endif
 
   GTEST_FLAG(output) = "xml:" + path;
   const std::string expected_output_file =
       path + GetCurrentExecutableName().string() + ".xml";
   const std::string& output_file =
       UnitTestOptions::GetAbsolutePathToOutputFile();
 
 #if GTEST_OS_WINDOWS
   EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());
 #else
   EXPECT_EQ(expected_output_file, output_file.c_str());
 #endif
 }
 
 #endif  // !GTEST_OS_FUCHSIA
 
 }  // namespace
 }  // namespace internal
 }  // namespace testing
diff --git a/googletest/test/gtest_output_test_golden_lin.txt b/googletest/test/googletest-output-test-golden-lin.txt
similarity index 88%
rename from googletest/test/gtest_output_test_golden_lin.txt
rename to googletest/test/googletest-output-test-golden-lin.txt
index 02a77a8e..b0c51898 100644
--- a/googletest/test/gtest_output_test_golden_lin.txt
+++ b/googletest/test/googletest-output-test-golden-lin.txt
@@ -1,997 +1,997 @@
 The non-test part of the code is expected to have 2 failures.
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Value of: false
   Actual: false
 Expected: true
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   2
   3
 Stack trace: (omitted)
 
 [==========] Running 68 tests from 30 test cases.
 [----------] Global test environment set-up.
 FooEnvironment::SetUp() called.
 BarEnvironment::SetUp() called.
 [----------] 1 test from ADeathTest
 [ RUN      ] ADeathTest.ShouldRunFirst
 [       OK ] ADeathTest.ShouldRunFirst
 [----------] 1 test from ATypedDeathTest/0, where TypeParam = int
 [ RUN      ] ATypedDeathTest/0.ShouldRunFirst
 [       OK ] ATypedDeathTest/0.ShouldRunFirst
 [----------] 1 test from ATypedDeathTest/1, where TypeParam = double
 [ RUN      ] ATypedDeathTest/1.ShouldRunFirst
 [       OK ] ATypedDeathTest/1.ShouldRunFirst
 [----------] 1 test from My/ATypeParamDeathTest/0, where TypeParam = int
 [ RUN      ] My/ATypeParamDeathTest/0.ShouldRunFirst
 [       OK ] My/ATypeParamDeathTest/0.ShouldRunFirst
 [----------] 1 test from My/ATypeParamDeathTest/1, where TypeParam = double
 [ RUN      ] My/ATypeParamDeathTest/1.ShouldRunFirst
 [       OK ] My/ATypeParamDeathTest/1.ShouldRunFirst
 [----------] 2 tests from PassingTest
 [ RUN      ] PassingTest.PassingTest1
 [       OK ] PassingTest.PassingTest1
 [ RUN      ] PassingTest.PassingTest2
 [       OK ] PassingTest.PassingTest2
 [----------] 2 tests from NonfatalFailureTest
 [ RUN      ] NonfatalFailureTest.EscapesStringOperands
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   kGoldenString
     Which is: "\"Line"
   actual
     Which is: "actual \"string\""
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   golden
     Which is: "\"Line"
   actual
     Which is: "actual \"string\""
 Stack trace: (omitted)
 
 [  FAILED  ] NonfatalFailureTest.EscapesStringOperands
 [ RUN      ] NonfatalFailureTest.DiffForLongStrings
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   golden_str
     Which is: "\"Line\0 1\"\nLine 2"
   "Line 2"
 With diff:
 @@ -1,2 @@
 -\"Line\0 1\"
  Line 2
 
 Stack trace: (omitted)
 
 [  FAILED  ] NonfatalFailureTest.DiffForLongStrings
 [----------] 3 tests from FatalFailureTest
 [ RUN      ] FatalFailureTest.FatalFailureInSubroutine
 (expecting a failure that x should be 1)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   x
     Which is: 2
 Stack trace: (omitted)
 
 [  FAILED  ] FatalFailureTest.FatalFailureInSubroutine
 [ RUN      ] FatalFailureTest.FatalFailureInNestedSubroutine
 (expecting a failure that x should be 1)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   x
     Which is: 2
 Stack trace: (omitted)
 
 [  FAILED  ] FatalFailureTest.FatalFailureInNestedSubroutine
 [ RUN      ] FatalFailureTest.NonfatalFailureInSubroutine
 (expecting a failure on false)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Value of: false
   Actual: false
 Expected: true
 Stack trace: (omitted)
 
 [  FAILED  ] FatalFailureTest.NonfatalFailureInSubroutine
 [----------] 1 test from LoggingTest
 [ RUN      ] LoggingTest.InterleavingLoggingAndAssertions
 (expecting 2 failures on (3) >= (a[i]))
 i == 0
 i == 1
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected: (3) >= (a[i]), actual: 3 vs 9
 Stack trace: (omitted)
 
 i == 2
 i == 3
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected: (3) >= (a[i]), actual: 3 vs 6
 Stack trace: (omitted)
 
 [  FAILED  ] LoggingTest.InterleavingLoggingAndAssertions
 [----------] 7 tests from SCOPED_TRACETest
 [ RUN      ] SCOPED_TRACETest.AcceptedValues
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Just checking that all these values work fine.
 Google Test trace:
-gtest_output_test_.cc:#: (null)
-gtest_output_test_.cc:#: 1337
-gtest_output_test_.cc:#: std::string
-gtest_output_test_.cc:#: literal string
+googletest-output-test_.cc:#: (null)
+googletest-output-test_.cc:#: 1337
+googletest-output-test_.cc:#: std::string
+googletest-output-test_.cc:#: literal string
 Stack trace: (omitted)
 
 [  FAILED  ] SCOPED_TRACETest.AcceptedValues
 [ RUN      ] SCOPED_TRACETest.ObeysScopes
 (expected to fail)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 This failure is expected, and shouldn't have a trace.
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 This failure is expected, and should have a trace.
 Google Test trace:
-gtest_output_test_.cc:#: Expected trace
+googletest-output-test_.cc:#: Expected trace
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 This failure is expected, and shouldn't have a trace.
 Stack trace: (omitted)
 
 [  FAILED  ] SCOPED_TRACETest.ObeysScopes
 [ RUN      ] SCOPED_TRACETest.WorksInLoop
 (expected to fail)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   2
   n
     Which is: 1
 Google Test trace:
-gtest_output_test_.cc:#: i = 1
+googletest-output-test_.cc:#: i = 1
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   n
     Which is: 2
 Google Test trace:
-gtest_output_test_.cc:#: i = 2
+googletest-output-test_.cc:#: i = 2
 Stack trace: (omitted)
 
 [  FAILED  ] SCOPED_TRACETest.WorksInLoop
 [ RUN      ] SCOPED_TRACETest.WorksInSubroutine
 (expected to fail)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   2
   n
     Which is: 1
 Google Test trace:
-gtest_output_test_.cc:#: n = 1
+googletest-output-test_.cc:#: n = 1
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   n
     Which is: 2
 Google Test trace:
-gtest_output_test_.cc:#: n = 2
+googletest-output-test_.cc:#: n = 2
 Stack trace: (omitted)
 
 [  FAILED  ] SCOPED_TRACETest.WorksInSubroutine
 [ RUN      ] SCOPED_TRACETest.CanBeNested
 (expected to fail)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   n
     Which is: 2
 Google Test trace:
-gtest_output_test_.cc:#: n = 2
-gtest_output_test_.cc:#: 
+googletest-output-test_.cc:#: n = 2
+googletest-output-test_.cc:#: 
 Stack trace: (omitted)
 
 [  FAILED  ] SCOPED_TRACETest.CanBeNested
 [ RUN      ] SCOPED_TRACETest.CanBeRepeated
 (expected to fail)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 This failure is expected, and should contain trace point A.
 Google Test trace:
-gtest_output_test_.cc:#: A
+googletest-output-test_.cc:#: A
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 This failure is expected, and should contain trace point A and B.
 Google Test trace:
-gtest_output_test_.cc:#: B
-gtest_output_test_.cc:#: A
+googletest-output-test_.cc:#: B
+googletest-output-test_.cc:#: A
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 This failure is expected, and should contain trace point A, B, and C.
 Google Test trace:
-gtest_output_test_.cc:#: C
-gtest_output_test_.cc:#: B
-gtest_output_test_.cc:#: A
+googletest-output-test_.cc:#: C
+googletest-output-test_.cc:#: B
+googletest-output-test_.cc:#: A
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 This failure is expected, and should contain trace point A, B, and D.
 Google Test trace:
-gtest_output_test_.cc:#: D
-gtest_output_test_.cc:#: B
-gtest_output_test_.cc:#: A
+googletest-output-test_.cc:#: D
+googletest-output-test_.cc:#: B
+googletest-output-test_.cc:#: A
 Stack trace: (omitted)
 
 [  FAILED  ] SCOPED_TRACETest.CanBeRepeated
 [ RUN      ] SCOPED_TRACETest.WorksConcurrently
 (expecting 6 failures)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #1 (in thread B, only trace B alive).
 Google Test trace:
-gtest_output_test_.cc:#: Trace B
+googletest-output-test_.cc:#: Trace B
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #2 (in thread A, trace A & B both alive).
 Google Test trace:
-gtest_output_test_.cc:#: Trace A
+googletest-output-test_.cc:#: Trace A
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #3 (in thread B, trace A & B both alive).
 Google Test trace:
-gtest_output_test_.cc:#: Trace B
+googletest-output-test_.cc:#: Trace B
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #4 (in thread B, only trace A alive).
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #5 (in thread A, only trace A alive).
 Google Test trace:
-gtest_output_test_.cc:#: Trace A
+googletest-output-test_.cc:#: Trace A
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #6 (in thread A, no trace alive).
 Stack trace: (omitted)
 
 [  FAILED  ] SCOPED_TRACETest.WorksConcurrently
 [----------] 1 test from ScopedTraceTest
 [ RUN      ] ScopedTraceTest.WithExplicitFileAndLine
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Check that the trace is attached to a particular location.
 Google Test trace:
 explicit_file.cc:123: expected trace message
 Stack trace: (omitted)
 
 [  FAILED  ] ScopedTraceTest.WithExplicitFileAndLine
 [----------] 1 test from NonFatalFailureInFixtureConstructorTest
 [ RUN      ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
 (expecting 5 failures)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #1, in the test fixture c'tor.
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #2, in SetUp().
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #3, in the test body.
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #4, in TearDown.
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #5, in the test fixture d'tor.
 Stack trace: (omitted)
 
 [  FAILED  ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
 [----------] 1 test from FatalFailureInFixtureConstructorTest
 [ RUN      ] FatalFailureInFixtureConstructorTest.FailureInConstructor
 (expecting 2 failures)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #1, in the test fixture c'tor.
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #2, in the test fixture d'tor.
 Stack trace: (omitted)
 
 [  FAILED  ] FatalFailureInFixtureConstructorTest.FailureInConstructor
 [----------] 1 test from NonFatalFailureInSetUpTest
 [ RUN      ] NonFatalFailureInSetUpTest.FailureInSetUp
 (expecting 4 failures)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #1, in SetUp().
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #2, in the test function.
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #3, in TearDown().
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #4, in the test fixture d'tor.
 Stack trace: (omitted)
 
 [  FAILED  ] NonFatalFailureInSetUpTest.FailureInSetUp
 [----------] 1 test from FatalFailureInSetUpTest
 [ RUN      ] FatalFailureInSetUpTest.FailureInSetUp
 (expecting 3 failures)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #1, in SetUp().
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #2, in TearDown().
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected failure #3, in the test fixture d'tor.
 Stack trace: (omitted)
 
 [  FAILED  ] FatalFailureInSetUpTest.FailureInSetUp
 [----------] 1 test from AddFailureAtTest
 [ RUN      ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
 foo.cc:42: Failure
 Failed
 Expected failure in foo.cc
 Stack trace: (omitted)
 
 [  FAILED  ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
 [----------] 4 tests from MixedUpTestCaseTest
 [ RUN      ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
 [       OK ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
 [ RUN      ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
 [       OK ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
 [ RUN      ] MixedUpTestCaseTest.ThisShouldFail
 gtest.cc:#: Failure
 Failed
 All tests in the same test case must use the same test fixture
 class.  However, in test case MixedUpTestCaseTest,
 you defined test FirstTestFromNamespaceFoo and test ThisShouldFail
 using two different test fixture classes.  This can happen if
 the two classes are from different namespaces or translation
 units and have the same name.  You should probably rename one
 of the classes to put the tests into different test cases.
 Stack trace: (omitted)
 
 [  FAILED  ] MixedUpTestCaseTest.ThisShouldFail
 [ RUN      ] MixedUpTestCaseTest.ThisShouldFailToo
 gtest.cc:#: Failure
 Failed
 All tests in the same test case must use the same test fixture
 class.  However, in test case MixedUpTestCaseTest,
 you defined test FirstTestFromNamespaceFoo and test ThisShouldFailToo
 using two different test fixture classes.  This can happen if
 the two classes are from different namespaces or translation
 units and have the same name.  You should probably rename one
 of the classes to put the tests into different test cases.
 Stack trace: (omitted)
 
 [  FAILED  ] MixedUpTestCaseTest.ThisShouldFailToo
 [----------] 2 tests from MixedUpTestCaseWithSameTestNameTest
 [ RUN      ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
 [       OK ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
 [ RUN      ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
 gtest.cc:#: Failure
 Failed
 All tests in the same test case must use the same test fixture
 class.  However, in test case MixedUpTestCaseWithSameTestNameTest,
 you defined test TheSecondTestWithThisNameShouldFail and test TheSecondTestWithThisNameShouldFail
 using two different test fixture classes.  This can happen if
 the two classes are from different namespaces or translation
 units and have the same name.  You should probably rename one
 of the classes to put the tests into different test cases.
 Stack trace: (omitted)
 
 [  FAILED  ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
 [----------] 2 tests from TEST_F_before_TEST_in_same_test_case
 [ RUN      ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
 [       OK ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
 [ RUN      ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
 gtest.cc:#: Failure
 Failed
 All tests in the same test case must use the same test fixture
 class, so mixing TEST_F and TEST in the same test case is
 illegal.  In test case TEST_F_before_TEST_in_same_test_case,
 test DefinedUsingTEST_F is defined using TEST_F but
 test DefinedUsingTESTAndShouldFail is defined using TEST.  You probably
 want to change the TEST to TEST_F or move it to another test
 case.
 Stack trace: (omitted)
 
 [  FAILED  ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
 [----------] 2 tests from TEST_before_TEST_F_in_same_test_case
 [ RUN      ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
 [       OK ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST
 [ RUN      ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
 gtest.cc:#: Failure
 Failed
 All tests in the same test case must use the same test fixture
 class, so mixing TEST_F and TEST in the same test case is
 illegal.  In test case TEST_before_TEST_F_in_same_test_case,
 test DefinedUsingTEST_FAndShouldFail is defined using TEST_F but
 test DefinedUsingTEST is defined using TEST.  You probably
 want to change the TEST to TEST_F or move it to another test
 case.
 Stack trace: (omitted)
 
 [  FAILED  ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
 [----------] 8 tests from ExpectNonfatalFailureTest
 [ RUN      ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
 [       OK ] ExpectNonfatalFailureTest.CanReferenceGlobalVariables
 [ RUN      ] ExpectNonfatalFailureTest.CanReferenceLocalVariables
 [       OK ] ExpectNonfatalFailureTest.CanReferenceLocalVariables
 [ RUN      ] ExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure
 [       OK ] ExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 0 failures
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 2 failures
-gtest_output_test_.cc:#: Non-fatal failure:
+googletest-output-test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure 1.
 Stack trace: (omitted)
 
 
-gtest_output_test_.cc:#: Non-fatal failure:
+googletest-output-test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure 2.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual:
-gtest_output_test_.cc:#: Fatal failure:
+googletest-output-test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 0 failures
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
 [ RUN      ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 0 failures
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
 [----------] 8 tests from ExpectFatalFailureTest
 [ RUN      ] ExpectFatalFailureTest.CanReferenceGlobalVariables
 [       OK ] ExpectFatalFailureTest.CanReferenceGlobalVariables
 [ RUN      ] ExpectFatalFailureTest.CanReferenceLocalStaticVariables
 [       OK ] ExpectFatalFailureTest.CanReferenceLocalStaticVariables
 [ RUN      ] ExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure
 [       OK ] ExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure
 [ RUN      ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 0 failures
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
 [ RUN      ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 2 failures
-gtest_output_test_.cc:#: Fatal failure:
+googletest-output-test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 
-gtest_output_test_.cc:#: Fatal failure:
+googletest-output-test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
 [ RUN      ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual:
-gtest_output_test_.cc:#: Non-fatal failure:
+googletest-output-test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
 [ RUN      ] ExpectFatalFailureTest.FailsWhenStatementReturns
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 0 failures
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenStatementReturns
 [ RUN      ] ExpectFatalFailureTest.FailsWhenStatementThrows
 (expecting a failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 0 failures
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenStatementThrows
 [----------] 2 tests from TypedTest/0, where TypeParam = int
 [ RUN      ] TypedTest/0.Success
 [       OK ] TypedTest/0.Success
 [ RUN      ] TypedTest/0.Failure
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   TypeParam()
     Which is: 0
 Expected failure
 Stack trace: (omitted)
 
 [  FAILED  ] TypedTest/0.Failure, where TypeParam = int
 [----------] 2 tests from Unsigned/TypedTestP/0, where TypeParam = unsigned char
 [ RUN      ] Unsigned/TypedTestP/0.Success
 [       OK ] Unsigned/TypedTestP/0.Success
 [ RUN      ] Unsigned/TypedTestP/0.Failure
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1U
     Which is: 1
   TypeParam()
     Which is: '\0'
 Expected failure
 Stack trace: (omitted)
 
 [  FAILED  ] Unsigned/TypedTestP/0.Failure, where TypeParam = unsigned char
 [----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
 [ RUN      ] Unsigned/TypedTestP/1.Success
 [       OK ] Unsigned/TypedTestP/1.Success
 [ RUN      ] Unsigned/TypedTestP/1.Failure
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1U
     Which is: 1
   TypeParam()
     Which is: 0
 Expected failure
 Stack trace: (omitted)
 
 [  FAILED  ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned int
 [----------] 4 tests from ExpectFailureTest
 [ RUN      ] ExpectFailureTest.ExpectFatalFailure
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual:
-gtest_output_test_.cc:#: Success:
+googletest-output-test_.cc:#: Success:
 Succeeded
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual:
-gtest_output_test_.cc:#: Non-fatal failure:
+googletest-output-test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure containing "Some other fatal failure expected."
   Actual:
-gtest_output_test_.cc:#: Fatal failure:
+googletest-output-test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureTest.ExpectFatalFailure
 [ RUN      ] ExpectFailureTest.ExpectNonFatalFailure
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual:
-gtest_output_test_.cc:#: Success:
+googletest-output-test_.cc:#: Success:
 Succeeded
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual:
-gtest_output_test_.cc:#: Fatal failure:
+googletest-output-test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure containing "Some other non-fatal failure."
   Actual:
-gtest_output_test_.cc:#: Non-fatal failure:
+googletest-output-test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureTest.ExpectNonFatalFailure
 [ RUN      ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual:
-gtest_output_test_.cc:#: Success:
+googletest-output-test_.cc:#: Success:
 Succeeded
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual:
-gtest_output_test_.cc:#: Non-fatal failure:
+googletest-output-test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 fatal failure containing "Some other fatal failure expected."
   Actual:
-gtest_output_test_.cc:#: Fatal failure:
+googletest-output-test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
 [ RUN      ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual:
-gtest_output_test_.cc:#: Success:
+googletest-output-test_.cc:#: Success:
 Succeeded
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual:
-gtest_output_test_.cc:#: Fatal failure:
+googletest-output-test_.cc:#: Fatal failure:
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 (expecting 1 failure)
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure containing "Some other non-fatal failure."
   Actual:
-gtest_output_test_.cc:#: Non-fatal failure:
+googletest-output-test_.cc:#: Non-fatal failure:
 Failed
 Expected non-fatal failure.
 Stack trace: (omitted)
 
 
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
 [----------] 2 tests from ExpectFailureWithThreadsTest
 [ RUN      ] ExpectFailureWithThreadsTest.ExpectFatalFailure
 (expecting 2 failures)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 gtest.cc:#: Failure
 Expected: 1 fatal failure
   Actual: 0 failures
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureWithThreadsTest.ExpectFatalFailure
 [ RUN      ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
 (expecting 2 failures)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected non-fatal failure.
 Stack trace: (omitted)
 
 gtest.cc:#: Failure
 Expected: 1 non-fatal failure
   Actual: 0 failures
 Stack trace: (omitted)
 
 [  FAILED  ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
 [----------] 1 test from ScopedFakeTestPartResultReporterTest
 [ RUN      ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
 (expecting 2 failures)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected non-fatal failure.
 Stack trace: (omitted)
 
 [  FAILED  ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
 [----------] 1 test from PrintingFailingParams/FailingParamTest
 [ RUN      ] PrintingFailingParams/FailingParamTest.Fails/0
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   GetParam()
     Which is: 2
 Stack trace: (omitted)
 
 [  FAILED  ] PrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2
 [----------] 2 tests from PrintingStrings/ParamTest
 [ RUN      ] PrintingStrings/ParamTest.Success/a
 [       OK ] PrintingStrings/ParamTest.Success/a
 [ RUN      ] PrintingStrings/ParamTest.Failure/a
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   "b"
   GetParam()
     Which is: "a"
 Expected failure
 Stack trace: (omitted)
 
 [  FAILED  ] PrintingStrings/ParamTest.Failure/a, where GetParam() = "a"
 [----------] Global test environment tear-down
 BarEnvironment::TearDown() called.
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected non-fatal failure.
 Stack trace: (omitted)
 
 FooEnvironment::TearDown() called.
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Failed
 Expected fatal failure.
 Stack trace: (omitted)
 
 [==========] 68 tests from 30 test cases ran.
 [  PASSED  ] 22 tests.
 [  FAILED  ] 46 tests, listed below:
 [  FAILED  ] NonfatalFailureTest.EscapesStringOperands
 [  FAILED  ] NonfatalFailureTest.DiffForLongStrings
 [  FAILED  ] FatalFailureTest.FatalFailureInSubroutine
 [  FAILED  ] FatalFailureTest.FatalFailureInNestedSubroutine
 [  FAILED  ] FatalFailureTest.NonfatalFailureInSubroutine
 [  FAILED  ] LoggingTest.InterleavingLoggingAndAssertions
 [  FAILED  ] SCOPED_TRACETest.AcceptedValues
 [  FAILED  ] SCOPED_TRACETest.ObeysScopes
 [  FAILED  ] SCOPED_TRACETest.WorksInLoop
 [  FAILED  ] SCOPED_TRACETest.WorksInSubroutine
 [  FAILED  ] SCOPED_TRACETest.CanBeNested
 [  FAILED  ] SCOPED_TRACETest.CanBeRepeated
 [  FAILED  ] SCOPED_TRACETest.WorksConcurrently
 [  FAILED  ] ScopedTraceTest.WithExplicitFileAndLine
 [  FAILED  ] NonFatalFailureInFixtureConstructorTest.FailureInConstructor
 [  FAILED  ] FatalFailureInFixtureConstructorTest.FailureInConstructor
 [  FAILED  ] NonFatalFailureInSetUpTest.FailureInSetUp
 [  FAILED  ] FatalFailureInSetUpTest.FailureInSetUp
 [  FAILED  ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
 [  FAILED  ] MixedUpTestCaseTest.ThisShouldFail
 [  FAILED  ] MixedUpTestCaseTest.ThisShouldFailToo
 [  FAILED  ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
 [  FAILED  ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
 [  FAILED  ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenStatementReturns
 [  FAILED  ] ExpectNonfatalFailureTest.FailsWhenStatementThrows
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenStatementReturns
 [  FAILED  ] ExpectFatalFailureTest.FailsWhenStatementThrows
 [  FAILED  ] TypedTest/0.Failure, where TypeParam = int
 [  FAILED  ] Unsigned/TypedTestP/0.Failure, where TypeParam = unsigned char
 [  FAILED  ] Unsigned/TypedTestP/1.Failure, where TypeParam = unsigned int
 [  FAILED  ] ExpectFailureTest.ExpectFatalFailure
 [  FAILED  ] ExpectFailureTest.ExpectNonFatalFailure
 [  FAILED  ] ExpectFailureTest.ExpectFatalFailureOnAllThreads
 [  FAILED  ] ExpectFailureTest.ExpectNonFatalFailureOnAllThreads
 [  FAILED  ] ExpectFailureWithThreadsTest.ExpectFatalFailure
 [  FAILED  ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
 [  FAILED  ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
 [  FAILED  ] PrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2
 [  FAILED  ] PrintingStrings/ParamTest.Failure/a, where GetParam() = "a"
 
 46 FAILED TESTS
   YOU HAVE 1 DISABLED TEST
 
 Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
 [==========] Running 4 tests from 2 test cases.
 [----------] Global test environment set-up.
 [----------] 3 tests from FatalFailureTest
 [ RUN      ] FatalFailureTest.FatalFailureInSubroutine
 (expecting a failure that x should be 1)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   x
     Which is: 2
 Stack trace: (omitted)
 
 [  FAILED  ] FatalFailureTest.FatalFailureInSubroutine (? ms)
 [ RUN      ] FatalFailureTest.FatalFailureInNestedSubroutine
 (expecting a failure that x should be 1)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected equality of these values:
   1
   x
     Which is: 2
 Stack trace: (omitted)
 
 [  FAILED  ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
 [ RUN      ] FatalFailureTest.NonfatalFailureInSubroutine
 (expecting a failure on false)
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Value of: false
   Actual: false
 Expected: true
 Stack trace: (omitted)
 
 [  FAILED  ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
 [----------] 3 tests from FatalFailureTest (? ms total)
 
 [----------] 1 test from LoggingTest
 [ RUN      ] LoggingTest.InterleavingLoggingAndAssertions
 (expecting 2 failures on (3) >= (a[i]))
 i == 0
 i == 1
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected: (3) >= (a[i]), actual: 3 vs 9
 Stack trace: (omitted)
 
 i == 2
 i == 3
-gtest_output_test_.cc:#: Failure
+googletest-output-test_.cc:#: Failure
 Expected: (3) >= (a[i]), actual: 3 vs 6
 Stack trace: (omitted)
 
 [  FAILED  ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
 [----------] 1 test from LoggingTest (? ms total)
 
 [----------] Global test environment tear-down
 [==========] 4 tests from 2 test cases ran. (? ms total)
 [  PASSED  ] 0 tests.
 [  FAILED  ] 4 tests, listed below:
 [  FAILED  ] FatalFailureTest.FatalFailureInSubroutine
 [  FAILED  ] FatalFailureTest.FatalFailureInNestedSubroutine
 [  FAILED  ] FatalFailureTest.NonfatalFailureInSubroutine
 [  FAILED  ] LoggingTest.InterleavingLoggingAndAssertions
 
  4 FAILED TESTS
 Note: Google Test filter = *DISABLED_*
 [==========] Running 1 test from 1 test case.
 [----------] Global test environment set-up.
 [----------] 1 test from DisabledTestsWarningTest
 [ RUN      ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
 [       OK ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
 [----------] Global test environment tear-down
 [==========] 1 test from 1 test case ran.
 [  PASSED  ] 1 test.
 Note: Google Test filter = PassingTest.*
 Note: This is test shard 2 of 2.
 [==========] Running 1 test from 1 test case.
 [----------] Global test environment set-up.
 [----------] 1 test from PassingTest
 [ RUN      ] PassingTest.PassingTest2
 [       OK ] PassingTest.PassingTest2
 [----------] Global test environment tear-down
 [==========] 1 test from 1 test case ran.
 [  PASSED  ] 1 test.
diff --git a/googletest/test/gtest_output_test.py b/googletest/test/googletest-output-test.py
similarity index 94%
rename from googletest/test/gtest_output_test.py
rename to googletest/test/googletest-output-test.py
index 63763b95..c1c36527 100755
--- a/googletest/test/gtest_output_test.py
+++ b/googletest/test/googletest-output-test.py
@@ -1,350 +1,350 @@
 #!/usr/bin/env python
 #
 # Copyright 2008, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Tests the text output of Google C++ Testing and Mocking Framework.
 
 
 SYNOPSIS
-       gtest_output_test.py --build_dir=BUILD/DIR --gengolden
-         # where BUILD/DIR contains the built gtest_output_test_ file.
-       gtest_output_test.py --gengolden
-       gtest_output_test.py
+       googletest_output_test.py --build_dir=BUILD/DIR --gengolden
+         # where BUILD/DIR contains the built googletest-output-test_ file.
+       googletest_output_test.py --gengolden
+       googletest_output_test.py
 """
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import difflib
 import os
 import re
 import sys
 import gtest_test_utils
 
 
 # The flag for generating the golden file
 GENGOLDEN_FLAG = '--gengolden'
 CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
 
 # The flag indicating stacktraces are not supported
 NO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support'
 
 IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
 IS_WINDOWS = os.name == 'nt'
 
 # TODO(vladl@google.com): remove the _lin suffix.
-GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
+GOLDEN_NAME = 'googletest-output-test-golden-lin.txt'
 
-PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
+PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('googletest-output-test_')
 
 # At least one command we exercise must not have the
 # 'internal_skip_environment_and_ad_hoc_tests' argument.
 COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
 COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
 COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
                           '--gtest_print_time',
                           'internal_skip_environment_and_ad_hoc_tests',
                           '--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
 COMMAND_WITH_DISABLED = (
     {}, [PROGRAM_PATH,
          '--gtest_also_run_disabled_tests',
          'internal_skip_environment_and_ad_hoc_tests',
          '--gtest_filter=*DISABLED_*'])
 COMMAND_WITH_SHARDING = (
     {'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
     [PROGRAM_PATH,
      'internal_skip_environment_and_ad_hoc_tests',
      '--gtest_filter=PassingTest.*'])
 
 GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
 
 
 def ToUnixLineEnding(s):
   """Changes all Windows/Mac line endings in s to UNIX line endings."""
 
   return s.replace('\r\n', '\n').replace('\r', '\n')
 
 
 def RemoveLocations(test_output):
   """Removes all file location info from a Google Test program's output.
 
   Args:
        test_output:  the output of a Google Test program.
 
   Returns:
        output with all file location info (in the form of
        'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
        'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
        'FILE_NAME:#: '.
   """
 
-  return re.sub(r'.*[/\\]((gtest_output_test_|gtest).cc)(\:\d+|\(\d+\))\: ',
+  return re.sub(r'.*[/\\]((googletest-output-test_|gtest).cc)(\:\d+|\(\d+\))\: ',
                 r'\1:#: ', test_output)
 
 
 def RemoveStackTraceDetails(output):
   """Removes all stack traces from a Google Test program's output."""
 
   # *? means "find the shortest string that matches".
   return re.sub(r'Stack trace:(.|\n)*?\n\n',
                 'Stack trace: (omitted)\n\n', output)
 
 
 def RemoveStackTraces(output):
   """Removes all traces of stack traces from a Google Test program's output."""
 
   # *? means "find the shortest string that matches".
   return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
 
 
 def RemoveTime(output):
   """Removes all time information from a Google Test program's output."""
 
   return re.sub(r'\(\d+ ms', '(? ms', output)
 
 
 def RemoveTypeInfoDetails(test_output):
   """Removes compiler-specific type info from Google Test program's output.
 
   Args:
        test_output:  the output of a Google Test program.
 
   Returns:
        output with type information normalized to canonical form.
   """
 
   # some compilers output the name of type 'unsigned int' as 'unsigned'
   return re.sub(r'unsigned int', 'unsigned', test_output)
 
 
 def NormalizeToCurrentPlatform(test_output):
   """Normalizes platform specific output details for easier comparison."""
 
   if IS_WINDOWS:
     # Removes the color information that is not present on Windows.
     test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output)
     # Changes failure message headers into the Windows format.
     test_output = re.sub(r': Failure\n', r': error: ', test_output)
     # Changes file(line_number) to file:line_number.
     test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output)
 
   return test_output
 
 
 def RemoveTestCounts(output):
   """Removes test counts from a Google Test program's output."""
 
   output = re.sub(r'\d+ tests?, listed below',
                   '? tests, listed below', output)
   output = re.sub(r'\d+ FAILED TESTS',
                   '? FAILED TESTS', output)
   output = re.sub(r'\d+ tests? from \d+ test cases?',
                   '? tests from ? test cases', output)
   output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
                   r'? tests from \1', output)
   return re.sub(r'\d+ tests?\.', '? tests.', output)
 
 
 def RemoveMatchingTests(test_output, pattern):
   """Removes output of specified tests from a Google Test program's output.
 
   This function strips not only the beginning and the end of a test but also
   all output in between.
 
   Args:
     test_output:       A string containing the test output.
     pattern:           A regex string that matches names of test cases or
                        tests to remove.
 
   Returns:
     Contents of test_output with tests whose names match pattern removed.
   """
 
   test_output = re.sub(
       r'.*\[ RUN      \] .*%s(.|\n)*?\[(  FAILED  |       OK )\] .*%s.*\n' % (
           pattern, pattern),
       '',
       test_output)
   return re.sub(r'.*%s.*\n' % pattern, '', test_output)
 
 
 def NormalizeOutput(output):
-  """Normalizes output (the output of gtest_output_test_.exe)."""
+  """Normalizes output (the output of googletest-output-test_.exe)."""
 
   output = ToUnixLineEnding(output)
   output = RemoveLocations(output)
   output = RemoveStackTraceDetails(output)
   output = RemoveTime(output)
   return output
 
 
 def GetShellCommandOutput(env_cmd):
   """Runs a command in a sub-process, and returns its output in a string.
 
   Args:
     env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
              environment variables to set, and element 1 is a string with
              the command and any flags.
 
   Returns:
     A string with the command's combined standard and diagnostic output.
   """
 
   # Spawns cmd in a sub-process, and gets its standard I/O file objects.
   # Set and save the environment properly.
   environ = os.environ.copy()
   environ.update(env_cmd[0])
   p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)
 
   return p.output
 
 
 def GetCommandOutput(env_cmd):
   """Runs a command and returns its output with all file location
   info stripped off.
 
   Args:
     env_cmd:  The shell command. A 2-tuple where element 0 is a dict of extra
               environment variables to set, and element 1 is a string with
               the command and any flags.
   """
 
   # Disables exception pop-ups on Windows.
   environ, cmdline = env_cmd
   environ = dict(environ)  # Ensures we are modifying a copy.
   environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'
   return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))
 
 
 def GetOutputOfAllCommands():
   """Returns concatenated output from several representative commands."""
 
   return (GetCommandOutput(COMMAND_WITH_COLOR) +
           GetCommandOutput(COMMAND_WITH_TIME) +
           GetCommandOutput(COMMAND_WITH_DISABLED) +
           GetCommandOutput(COMMAND_WITH_SHARDING))
 
 
 test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
 SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
 SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
 SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
 SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv
 
 CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
                             SUPPORTS_TYPED_TESTS and
                             SUPPORTS_THREADS and
                             SUPPORTS_STACK_TRACES)
 
 class GTestOutputTest(gtest_test_utils.TestCase):
   def RemoveUnsupportedTests(self, test_output):
     if not SUPPORTS_DEATH_TESTS:
       test_output = RemoveMatchingTests(test_output, 'DeathTest')
     if not SUPPORTS_TYPED_TESTS:
       test_output = RemoveMatchingTests(test_output, 'TypedTest')
       test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
       test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
     if not SUPPORTS_THREADS:
       test_output = RemoveMatchingTests(test_output,
                                         'ExpectFailureWithThreadsTest')
       test_output = RemoveMatchingTests(test_output,
                                         'ScopedFakeTestPartResultReporterTest')
       test_output = RemoveMatchingTests(test_output,
                                         'WorksConcurrently')
     if not SUPPORTS_STACK_TRACES:
       test_output = RemoveStackTraces(test_output)
 
     return test_output
 
   def testOutput(self):
     output = GetOutputOfAllCommands()
 
     golden_file = open(GOLDEN_PATH, 'rb')
     # A mis-configured source control system can cause \r appear in EOL
     # sequences when we read the golden file irrespective of an operating
     # system used. Therefore, we need to strip those \r's from newlines
     # unconditionally.
     golden = ToUnixLineEnding(golden_file.read())
     golden_file.close()
 
     # We want the test to pass regardless of certain features being
     # supported or not.
 
     # We still have to remove type name specifics in all cases.
     normalized_actual = RemoveTypeInfoDetails(output)
     normalized_golden = RemoveTypeInfoDetails(golden)
 
     if CAN_GENERATE_GOLDEN_FILE:
       self.assertEqual(normalized_golden, normalized_actual,
                        '\n'.join(difflib.unified_diff(
                            normalized_golden.split('\n'),
                            normalized_actual.split('\n'),
                            'golden', 'actual')))
     else:
       normalized_actual = NormalizeToCurrentPlatform(
           RemoveTestCounts(normalized_actual))
       normalized_golden = NormalizeToCurrentPlatform(
           RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
 
       # This code is very handy when debugging golden file differences:
       if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
         open(os.path.join(
             gtest_test_utils.GetSourceDir(),
-            '_gtest_output_test_normalized_actual.txt'), 'wb').write(
+            '_googletest-output-test_normalized_actual.txt'), 'wb').write(
                 normalized_actual)
         open(os.path.join(
             gtest_test_utils.GetSourceDir(),
-            '_gtest_output_test_normalized_golden.txt'), 'wb').write(
+            '_googletest-output-test_normalized_golden.txt'), 'wb').write(
                 normalized_golden)
 
       self.assertEqual(normalized_golden, normalized_actual)
 
 
 if __name__ == '__main__':
   if NO_STACKTRACE_SUPPORT_FLAG in sys.argv:
     # unittest.main() can't handle unknown flags
     sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG)
 
   if GENGOLDEN_FLAG in sys.argv:
     if CAN_GENERATE_GOLDEN_FILE:
       output = GetOutputOfAllCommands()
       golden_file = open(GOLDEN_PATH, 'wb')
       golden_file.write(output)
       golden_file.close()
     else:
       message = (
           """Unable to write a golden file when compiled in an environment
 that does not support all the required features (death tests,
 typed tests, stack traces, and multiple threads).
 Please build this test and generate the golden file using Blaze on Linux.""")
 
       sys.stderr.write(message)
       sys.exit(1)
   else:
     gtest_test_utils.Main()
diff --git a/googletest/test/gtest_output_test_.cc b/googletest/test/googletest-output-test_.cc
similarity index 99%
rename from googletest/test/gtest_output_test_.cc
rename to googletest/test/googletest-output-test_.cc
index 9ae9dc60..29fc9931 100644
--- a/googletest/test/gtest_output_test_.cc
+++ b/googletest/test/googletest-output-test_.cc
@@ -1,1067 +1,1067 @@
 // Copyright 2005, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // The purpose of this file is to generate Google Test output under
 // various conditions.  The output will then be verified by
-// gtest_output_test.py to ensure that Google Test generates the
+// googletest-output-test.py to ensure that Google Test generates the
 // desired messages.  Therefore, most tests in this file are MEANT TO
 // FAIL.
 //
 // Author: wan@google.com (Zhanyong Wan)
 
 #include "gtest/gtest-spi.h"
 #include "gtest/gtest.h"
 #include "src/gtest-internal-inl.h"
 
 #include <stdlib.h>
 
 #if GTEST_IS_THREADSAFE
 using testing::ScopedFakeTestPartResultReporter;
 using testing::TestPartResultArray;
 
 using testing::internal::Notification;
 using testing::internal::ThreadWithParam;
 #endif
 
 namespace posix = ::testing::internal::posix;
 
 // Tests catching fatal failures.
 
 // A subroutine used by the following test.
 void TestEq1(int x) {
   ASSERT_EQ(1, x);
 }
 
 // This function calls a test subroutine, catches the fatal failure it
 // generates, and then returns early.
 void TryTestSubroutine() {
   // Calls a subrountine that yields a fatal failure.
   TestEq1(2);
 
   // Catches the fatal failure and aborts the test.
   //
   // The testing::Test:: prefix is necessary when calling
   // HasFatalFailure() outside of a TEST, TEST_F, or test fixture.
   if (testing::Test::HasFatalFailure()) return;
 
   // If we get here, something is wrong.
   FAIL() << "This should never be reached.";
 }
 
 TEST(PassingTest, PassingTest1) {
 }
 
 TEST(PassingTest, PassingTest2) {
 }
 
 // Tests that parameters of failing parameterized tests are printed in the
 // failing test summary.
 class FailingParamTest : public testing::TestWithParam<int> {};
 
 TEST_P(FailingParamTest, Fails) {
   EXPECT_EQ(1, GetParam());
 }
 
 // This generates a test which will fail. Google Test is expected to print
 // its parameter when it outputs the list of all failed tests.
 INSTANTIATE_TEST_CASE_P(PrintingFailingParams,
                         FailingParamTest,
                         testing::Values(2));
 
 static const char kGoldenString[] = "\"Line\0 1\"\nLine 2";
 
 TEST(NonfatalFailureTest, EscapesStringOperands) {
   std::string actual = "actual \"string\"";
   EXPECT_EQ(kGoldenString, actual);
 
   const char* golden = kGoldenString;
   EXPECT_EQ(golden, actual);
 }
 
 TEST(NonfatalFailureTest, DiffForLongStrings) {
   std::string golden_str(kGoldenString, sizeof(kGoldenString) - 1);
   EXPECT_EQ(golden_str, "Line 2");
 }
 
 // Tests catching a fatal failure in a subroutine.
 TEST(FatalFailureTest, FatalFailureInSubroutine) {
   printf("(expecting a failure that x should be 1)\n");
 
   TryTestSubroutine();
 }
 
 // Tests catching a fatal failure in a nested subroutine.
 TEST(FatalFailureTest, FatalFailureInNestedSubroutine) {
   printf("(expecting a failure that x should be 1)\n");
 
   // Calls a subrountine that yields a fatal failure.
   TryTestSubroutine();
 
   // Catches the fatal failure and aborts the test.
   //
   // When calling HasFatalFailure() inside a TEST, TEST_F, or test
   // fixture, the testing::Test:: prefix is not needed.
   if (HasFatalFailure()) return;
 
   // If we get here, something is wrong.
   FAIL() << "This should never be reached.";
 }
 
 // Tests HasFatalFailure() after a failed EXPECT check.
 TEST(FatalFailureTest, NonfatalFailureInSubroutine) {
   printf("(expecting a failure on false)\n");
   EXPECT_TRUE(false);  // Generates a nonfatal failure
   ASSERT_FALSE(HasFatalFailure());  // This should succeed.
 }
 
 // Tests interleaving user logging and Google Test assertions.
 TEST(LoggingTest, InterleavingLoggingAndAssertions) {
   static const int a[4] = {
     3, 9, 2, 6
   };
 
   printf("(expecting 2 failures on (3) >= (a[i]))\n");
   for (int i = 0; i < static_cast<int>(sizeof(a)/sizeof(*a)); i++) {
     printf("i == %d\n", i);
     EXPECT_GE(3, a[i]);
   }
 }
 
 // Tests the SCOPED_TRACE macro.
 
 // A helper function for testing SCOPED_TRACE.
 void SubWithoutTrace(int n) {
   EXPECT_EQ(1, n);
   ASSERT_EQ(2, n);
 }
 
 // Another helper function for testing SCOPED_TRACE.
 void SubWithTrace(int n) {
   SCOPED_TRACE(testing::Message() << "n = " << n);
 
   SubWithoutTrace(n);
 }
 
 TEST(SCOPED_TRACETest, AcceptedValues) {
   SCOPED_TRACE("literal string");
   SCOPED_TRACE(std::string("std::string"));
   SCOPED_TRACE(1337);  // streamable type
   const char* null_value = NULL;
   SCOPED_TRACE(null_value);
 
   ADD_FAILURE() << "Just checking that all these values work fine.";
 }
 
 // Tests that SCOPED_TRACE() obeys lexical scopes.
 TEST(SCOPED_TRACETest, ObeysScopes) {
   printf("(expected to fail)\n");
 
   // There should be no trace before SCOPED_TRACE() is invoked.
   ADD_FAILURE() << "This failure is expected, and shouldn't have a trace.";
 
   {
     SCOPED_TRACE("Expected trace");
     // After SCOPED_TRACE(), a failure in the current scope should contain
     // the trace.
     ADD_FAILURE() << "This failure is expected, and should have a trace.";
   }
 
   // Once the control leaves the scope of the SCOPED_TRACE(), there
   // should be no trace again.
   ADD_FAILURE() << "This failure is expected, and shouldn't have a trace.";
 }
 
 // Tests that SCOPED_TRACE works inside a loop.
 TEST(SCOPED_TRACETest, WorksInLoop) {
   printf("(expected to fail)\n");
 
   for (int i = 1; i <= 2; i++) {
     SCOPED_TRACE(testing::Message() << "i = " << i);
 
     SubWithoutTrace(i);
   }
 }
 
 // Tests that SCOPED_TRACE works in a subroutine.
 TEST(SCOPED_TRACETest, WorksInSubroutine) {
   printf("(expected to fail)\n");
 
   SubWithTrace(1);
   SubWithTrace(2);
 }
 
 // Tests that SCOPED_TRACE can be nested.
 TEST(SCOPED_TRACETest, CanBeNested) {
   printf("(expected to fail)\n");
 
   SCOPED_TRACE("");  // A trace without a message.
 
   SubWithTrace(2);
 }
 
 // Tests that multiple SCOPED_TRACEs can be used in the same scope.
 TEST(SCOPED_TRACETest, CanBeRepeated) {
   printf("(expected to fail)\n");
 
   SCOPED_TRACE("A");
   ADD_FAILURE()
       << "This failure is expected, and should contain trace point A.";
 
   SCOPED_TRACE("B");
   ADD_FAILURE()
       << "This failure is expected, and should contain trace point A and B.";
 
   {
     SCOPED_TRACE("C");
     ADD_FAILURE() << "This failure is expected, and should "
                   << "contain trace point A, B, and C.";
   }
 
   SCOPED_TRACE("D");
   ADD_FAILURE() << "This failure is expected, and should "
                 << "contain trace point A, B, and D.";
 }
 
 #if GTEST_IS_THREADSAFE
 // Tests that SCOPED_TRACE()s can be used concurrently from multiple
 // threads.  Namely, an assertion should be affected by
 // SCOPED_TRACE()s in its own thread only.
 
 // Here's the sequence of actions that happen in the test:
 //
 //   Thread A (main)                | Thread B (spawned)
 //   ===============================|================================
 //   spawns thread B                |
 //   -------------------------------+--------------------------------
 //   waits for n1                   | SCOPED_TRACE("Trace B");
 //                                  | generates failure #1
 //                                  | notifies n1
 //   -------------------------------+--------------------------------
 //   SCOPED_TRACE("Trace A");       | waits for n2
 //   generates failure #2           |
 //   notifies n2                    |
 //   -------------------------------|--------------------------------
 //   waits for n3                   | generates failure #3
 //                                  | trace B dies
 //                                  | generates failure #4
 //                                  | notifies n3
 //   -------------------------------|--------------------------------
 //   generates failure #5           | finishes
 //   trace A dies                   |
 //   generates failure #6           |
 //   -------------------------------|--------------------------------
 //   waits for thread B to finish   |
 
 struct CheckPoints {
   Notification n1;
   Notification n2;
   Notification n3;
 };
 
 static void ThreadWithScopedTrace(CheckPoints* check_points) {
   {
     SCOPED_TRACE("Trace B");
     ADD_FAILURE()
         << "Expected failure #1 (in thread B, only trace B alive).";
     check_points->n1.Notify();
     check_points->n2.WaitForNotification();
 
     ADD_FAILURE()
         << "Expected failure #3 (in thread B, trace A & B both alive).";
   }  // Trace B dies here.
   ADD_FAILURE()
       << "Expected failure #4 (in thread B, only trace A alive).";
   check_points->n3.Notify();
 }
 
 TEST(SCOPED_TRACETest, WorksConcurrently) {
   printf("(expecting 6 failures)\n");
 
   CheckPoints check_points;
   ThreadWithParam<CheckPoints*> thread(&ThreadWithScopedTrace,
                                        &check_points,
                                        NULL);
   check_points.n1.WaitForNotification();
 
   {
     SCOPED_TRACE("Trace A");
     ADD_FAILURE()
         << "Expected failure #2 (in thread A, trace A & B both alive).";
     check_points.n2.Notify();
     check_points.n3.WaitForNotification();
 
     ADD_FAILURE()
         << "Expected failure #5 (in thread A, only trace A alive).";
   }  // Trace A dies here.
   ADD_FAILURE()
       << "Expected failure #6 (in thread A, no trace alive).";
   thread.Join();
 }
 #endif  // GTEST_IS_THREADSAFE
 
 // Tests basic functionality of the ScopedTrace utility (most of its features
 // are already tested in SCOPED_TRACETest).
 TEST(ScopedTraceTest, WithExplicitFileAndLine) {
   testing::ScopedTrace trace("explicit_file.cc", 123, "expected trace message");
   ADD_FAILURE() << "Check that the trace is attached to a particular location.";
 }
 
 TEST(DisabledTestsWarningTest,
      DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning) {
   // This test body is intentionally empty.  Its sole purpose is for
   // verifying that the --gtest_also_run_disabled_tests flag
   // suppresses the "YOU HAVE 12 DISABLED TESTS" warning at the end of
   // the test output.
 }
 
 // Tests using assertions outside of TEST and TEST_F.
 //
 // This function creates two failures intentionally.
 void AdHocTest() {
   printf("The non-test part of the code is expected to have 2 failures.\n\n");
   EXPECT_TRUE(false);
   EXPECT_EQ(2, 3);
 }
 
 // Runs all TESTs, all TEST_Fs, and the ad hoc test.
 int RunAllTests() {
   AdHocTest();
   return RUN_ALL_TESTS();
 }
 
 // Tests non-fatal failures in the fixture constructor.
 class NonFatalFailureInFixtureConstructorTest : public testing::Test {
  protected:
   NonFatalFailureInFixtureConstructorTest() {
     printf("(expecting 5 failures)\n");
     ADD_FAILURE() << "Expected failure #1, in the test fixture c'tor.";
   }
 
   ~NonFatalFailureInFixtureConstructorTest() {
     ADD_FAILURE() << "Expected failure #5, in the test fixture d'tor.";
   }
 
   virtual void SetUp() {
     ADD_FAILURE() << "Expected failure #2, in SetUp().";
   }
 
   virtual void TearDown() {
     ADD_FAILURE() << "Expected failure #4, in TearDown.";
   }
 };
 
 TEST_F(NonFatalFailureInFixtureConstructorTest, FailureInConstructor) {
   ADD_FAILURE() << "Expected failure #3, in the test body.";
 }
 
 // Tests fatal failures in the fixture constructor.
 class FatalFailureInFixtureConstructorTest : public testing::Test {
  protected:
   FatalFailureInFixtureConstructorTest() {
     printf("(expecting 2 failures)\n");
     Init();
   }
 
   ~FatalFailureInFixtureConstructorTest() {
     ADD_FAILURE() << "Expected failure #2, in the test fixture d'tor.";
   }
 
   virtual void SetUp() {
     ADD_FAILURE() << "UNEXPECTED failure in SetUp().  "
                   << "We should never get here, as the test fixture c'tor "
                   << "had a fatal failure.";
   }
 
   virtual void TearDown() {
     ADD_FAILURE() << "UNEXPECTED failure in TearDown().  "
                   << "We should never get here, as the test fixture c'tor "
                   << "had a fatal failure.";
   }
 
  private:
   void Init() {
     FAIL() << "Expected failure #1, in the test fixture c'tor.";
   }
 };
 
 TEST_F(FatalFailureInFixtureConstructorTest, FailureInConstructor) {
   ADD_FAILURE() << "UNEXPECTED failure in the test body.  "
                 << "We should never get here, as the test fixture c'tor "
                 << "had a fatal failure.";
 }
 
 // Tests non-fatal failures in SetUp().
 class NonFatalFailureInSetUpTest : public testing::Test {
  protected:
   virtual ~NonFatalFailureInSetUpTest() {
     Deinit();
   }
 
   virtual void SetUp() {
     printf("(expecting 4 failures)\n");
     ADD_FAILURE() << "Expected failure #1, in SetUp().";
   }
 
   virtual void TearDown() {
     FAIL() << "Expected failure #3, in TearDown().";
   }
  private:
   void Deinit() {
     FAIL() << "Expected failure #4, in the test fixture d'tor.";
   }
 };
 
 TEST_F(NonFatalFailureInSetUpTest, FailureInSetUp) {
   FAIL() << "Expected failure #2, in the test function.";
 }
 
 // Tests fatal failures in SetUp().
 class FatalFailureInSetUpTest : public testing::Test {
  protected:
   virtual ~FatalFailureInSetUpTest() {
     Deinit();
   }
 
   virtual void SetUp() {
     printf("(expecting 3 failures)\n");
     FAIL() << "Expected failure #1, in SetUp().";
   }
 
   virtual void TearDown() {
     FAIL() << "Expected failure #2, in TearDown().";
   }
  private:
   void Deinit() {
     FAIL() << "Expected failure #3, in the test fixture d'tor.";
   }
 };
 
 TEST_F(FatalFailureInSetUpTest, FailureInSetUp) {
   FAIL() << "UNEXPECTED failure in the test function.  "
          << "We should never get here, as SetUp() failed.";
 }
 
 TEST(AddFailureAtTest, MessageContainsSpecifiedFileAndLineNumber) {
   ADD_FAILURE_AT("foo.cc", 42) << "Expected failure in foo.cc";
 }
 
 #if GTEST_IS_THREADSAFE
 
 // A unary function that may die.
 void DieIf(bool should_die) {
   GTEST_CHECK_(!should_die) << " - death inside DieIf().";
 }
 
 // Tests running death tests in a multi-threaded context.
 
 // Used for coordination between the main and the spawn thread.
 struct SpawnThreadNotifications {
   SpawnThreadNotifications() {}
 
   Notification spawn_thread_started;
   Notification spawn_thread_ok_to_terminate;
 
  private:
   GTEST_DISALLOW_COPY_AND_ASSIGN_(SpawnThreadNotifications);
 };
 
 // The function to be executed in the thread spawn by the
 // MultipleThreads test (below).
 static void ThreadRoutine(SpawnThreadNotifications* notifications) {
   // Signals the main thread that this thread has started.
   notifications->spawn_thread_started.Notify();
 
   // Waits for permission to finish from the main thread.
   notifications->spawn_thread_ok_to_terminate.WaitForNotification();
 }
 
 // This is a death-test test, but it's not named with a DeathTest
 // suffix.  It starts threads which might interfere with later
 // death tests, so it must run after all other death tests.
 class DeathTestAndMultiThreadsTest : public testing::Test {
  protected:
   // Starts a thread and waits for it to begin.
   virtual void SetUp() {
     thread_.reset(new ThreadWithParam<SpawnThreadNotifications*>(
         &ThreadRoutine, &notifications_, NULL));
     notifications_.spawn_thread_started.WaitForNotification();
   }
   // Tells the thread to finish, and reaps it.
   // Depending on the version of the thread library in use,
   // a manager thread might still be left running that will interfere
   // with later death tests.  This is unfortunate, but this class
   // cleans up after itself as best it can.
   virtual void TearDown() {
     notifications_.spawn_thread_ok_to_terminate.Notify();
   }
 
  private:
   SpawnThreadNotifications notifications_;
   testing::internal::scoped_ptr<ThreadWithParam<SpawnThreadNotifications*> >
       thread_;
 };
 
 #endif  // GTEST_IS_THREADSAFE
 
 // The MixedUpTestCaseTest test case verifies that Google Test will fail a
 // test if it uses a different fixture class than what other tests in
 // the same test case use.  It deliberately contains two fixture
 // classes with the same name but defined in different namespaces.
 
 // The MixedUpTestCaseWithSameTestNameTest test case verifies that
 // when the user defines two tests with the same test case name AND
 // same test name (but in different namespaces), the second test will
 // fail.
 
 namespace foo {
 
 class MixedUpTestCaseTest : public testing::Test {
 };
 
 TEST_F(MixedUpTestCaseTest, FirstTestFromNamespaceFoo) {}
 TEST_F(MixedUpTestCaseTest, SecondTestFromNamespaceFoo) {}
 
 class MixedUpTestCaseWithSameTestNameTest : public testing::Test {
 };
 
 TEST_F(MixedUpTestCaseWithSameTestNameTest,
        TheSecondTestWithThisNameShouldFail) {}
 
 }  // namespace foo
 
 namespace bar {
 
 class MixedUpTestCaseTest : public testing::Test {
 };
 
 // The following two tests are expected to fail.  We rely on the
 // golden file to check that Google Test generates the right error message.
 TEST_F(MixedUpTestCaseTest, ThisShouldFail) {}
 TEST_F(MixedUpTestCaseTest, ThisShouldFailToo) {}
 
 class MixedUpTestCaseWithSameTestNameTest : public testing::Test {
 };
 
 // Expected to fail.  We rely on the golden file to check that Google Test
 // generates the right error message.
 TEST_F(MixedUpTestCaseWithSameTestNameTest,
        TheSecondTestWithThisNameShouldFail) {}
 
 }  // namespace bar
 
 // The following two test cases verify that Google Test catches the user
 // error of mixing TEST and TEST_F in the same test case.  The first
 // test case checks the scenario where TEST_F appears before TEST, and
 // the second one checks where TEST appears before TEST_F.
 
 class TEST_F_before_TEST_in_same_test_case : public testing::Test {
 };
 
 TEST_F(TEST_F_before_TEST_in_same_test_case, DefinedUsingTEST_F) {}
 
 // Expected to fail.  We rely on the golden file to check that Google Test
 // generates the right error message.
 TEST(TEST_F_before_TEST_in_same_test_case, DefinedUsingTESTAndShouldFail) {}
 
 class TEST_before_TEST_F_in_same_test_case : public testing::Test {
 };
 
 TEST(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST) {}
 
 // Expected to fail.  We rely on the golden file to check that Google Test
 // generates the right error message.
 TEST_F(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST_FAndShouldFail) {
 }
 
 // Used for testing EXPECT_NONFATAL_FAILURE() and EXPECT_FATAL_FAILURE().
 int global_integer = 0;
 
 // Tests that EXPECT_NONFATAL_FAILURE() can reference global variables.
 TEST(ExpectNonfatalFailureTest, CanReferenceGlobalVariables) {
   global_integer = 0;
   EXPECT_NONFATAL_FAILURE({
     EXPECT_EQ(1, global_integer) << "Expected non-fatal failure.";
   }, "Expected non-fatal failure.");
 }
 
 // Tests that EXPECT_NONFATAL_FAILURE() can reference local variables
 // (static or not).
 TEST(ExpectNonfatalFailureTest, CanReferenceLocalVariables) {
   int m = 0;
   static int n;
   n = 1;
   EXPECT_NONFATAL_FAILURE({
     EXPECT_EQ(m, n) << "Expected non-fatal failure.";
   }, "Expected non-fatal failure.");
 }
 
 // Tests that EXPECT_NONFATAL_FAILURE() succeeds when there is exactly
 // one non-fatal failure and no fatal failure.
 TEST(ExpectNonfatalFailureTest, SucceedsWhenThereIsOneNonfatalFailure) {
   EXPECT_NONFATAL_FAILURE({
     ADD_FAILURE() << "Expected non-fatal failure.";
   }, "Expected non-fatal failure.");
 }
 
 // Tests that EXPECT_NONFATAL_FAILURE() fails when there is no
 // non-fatal failure.
 TEST(ExpectNonfatalFailureTest, FailsWhenThereIsNoNonfatalFailure) {
   printf("(expecting a failure)\n");
   EXPECT_NONFATAL_FAILURE({
   }, "");
 }
 
 // Tests that EXPECT_NONFATAL_FAILURE() fails when there are two
 // non-fatal failures.
 TEST(ExpectNonfatalFailureTest, FailsWhenThereAreTwoNonfatalFailures) {
   printf("(expecting a failure)\n");
   EXPECT_NONFATAL_FAILURE({
     ADD_FAILURE() << "Expected non-fatal failure 1.";
     ADD_FAILURE() << "Expected non-fatal failure 2.";
   }, "");
 }
 
 // Tests that EXPECT_NONFATAL_FAILURE() fails when there is one fatal
 // failure.
 TEST(ExpectNonfatalFailureTest, FailsWhenThereIsOneFatalFailure) {
   printf("(expecting a failure)\n");
   EXPECT_NONFATAL_FAILURE({
     FAIL() << "Expected fatal failure.";
   }, "");
 }
 
 // Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being
 // tested returns.
 TEST(ExpectNonfatalFailureTest, FailsWhenStatementReturns) {
   printf("(expecting a failure)\n");
   EXPECT_NONFATAL_FAILURE({
     return;
   }, "");
 }
 
 #if GTEST_HAS_EXCEPTIONS
 
 // Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being
 // tested throws.
 TEST(ExpectNonfatalFailureTest, FailsWhenStatementThrows) {
   printf("(expecting a failure)\n");
   try {
     EXPECT_NONFATAL_FAILURE({
       throw 0;
     }, "");
   } catch(int) {  // NOLINT
   }
 }
 
 #endif  // GTEST_HAS_EXCEPTIONS
 
 // Tests that EXPECT_FATAL_FAILURE() can reference global variables.
 TEST(ExpectFatalFailureTest, CanReferenceGlobalVariables) {
   global_integer = 0;
   EXPECT_FATAL_FAILURE({
     ASSERT_EQ(1, global_integer) << "Expected fatal failure.";
   }, "Expected fatal failure.");
 }
 
 // Tests that EXPECT_FATAL_FAILURE() can reference local static
 // variables.
 TEST(ExpectFatalFailureTest, CanReferenceLocalStaticVariables) {
   static int n;
   n = 1;
   EXPECT_FATAL_FAILURE({
     ASSERT_EQ(0, n) << "Expected fatal failure.";
   }, "Expected fatal failure.");
 }
 
 // Tests that EXPECT_FATAL_FAILURE() succeeds when there is exactly
 // one fatal failure and no non-fatal failure.
 TEST(ExpectFatalFailureTest, SucceedsWhenThereIsOneFatalFailure) {
   EXPECT_FATAL_FAILURE({
     FAIL() << "Expected fatal failure.";
   }, "Expected fatal failure.");
 }
 
 // Tests that EXPECT_FATAL_FAILURE() fails when there is no fatal
 // failure.
 TEST(ExpectFatalFailureTest, FailsWhenThereIsNoFatalFailure) {
   printf("(expecting a failure)\n");
   EXPECT_FATAL_FAILURE({
   }, "");
 }
 
 // A helper for generating a fatal failure.
 void FatalFailure() {
   FAIL() << "Expected fatal failure.";
 }
 
 // Tests that EXPECT_FATAL_FAILURE() fails when there are two
 // fatal failures.
 TEST(ExpectFatalFailureTest, FailsWhenThereAreTwoFatalFailures) {
   printf("(expecting a failure)\n");
   EXPECT_FATAL_FAILURE({
     FatalFailure();
     FatalFailure();
   }, "");
 }
 
 // Tests that EXPECT_FATAL_FAILURE() fails when there is one non-fatal
 // failure.
 TEST(ExpectFatalFailureTest, FailsWhenThereIsOneNonfatalFailure) {
   printf("(expecting a failure)\n");
   EXPECT_FATAL_FAILURE({
     ADD_FAILURE() << "Expected non-fatal failure.";
   }, "");
 }
 
 // Tests that EXPECT_FATAL_FAILURE() fails when the statement being
 // tested returns.
 TEST(ExpectFatalFailureTest, FailsWhenStatementReturns) {
   printf("(expecting a failure)\n");
   EXPECT_FATAL_FAILURE({
     return;
   }, "");
 }
 
 #if GTEST_HAS_EXCEPTIONS
 
 // Tests that EXPECT_FATAL_FAILURE() fails when the statement being
 // tested throws.
 TEST(ExpectFatalFailureTest, FailsWhenStatementThrows) {
   printf("(expecting a failure)\n");
   try {
     EXPECT_FATAL_FAILURE({
       throw 0;
     }, "");
   } catch(int) {  // NOLINT
   }
 }
 
 #endif  // GTEST_HAS_EXCEPTIONS
 
 // This #ifdef block tests the output of value-parameterized tests.
 
 std::string ParamNameFunc(const testing::TestParamInfo<std::string>& info) {
   return info.param;
 }
 
 class ParamTest : public testing::TestWithParam<std::string> {
 };
 
 TEST_P(ParamTest, Success) {
   EXPECT_EQ("a", GetParam());
 }
 
 TEST_P(ParamTest, Failure) {
   EXPECT_EQ("b", GetParam()) << "Expected failure";
 }
 
 INSTANTIATE_TEST_CASE_P(PrintingStrings,
                         ParamTest,
                         testing::Values(std::string("a")),
                         ParamNameFunc);
 
 // This #ifdef block tests the output of typed tests.
 #if GTEST_HAS_TYPED_TEST
 
 template <typename T>
 class TypedTest : public testing::Test {
 };
 
 TYPED_TEST_CASE(TypedTest, testing::Types<int>);
 
 TYPED_TEST(TypedTest, Success) {
   EXPECT_EQ(0, TypeParam());
 }
 
 TYPED_TEST(TypedTest, Failure) {
   EXPECT_EQ(1, TypeParam()) << "Expected failure";
 }
 
 #endif  // GTEST_HAS_TYPED_TEST
 
 // This #ifdef block tests the output of type-parameterized tests.
 #if GTEST_HAS_TYPED_TEST_P
 
 template <typename T>
 class TypedTestP : public testing::Test {
 };
 
 TYPED_TEST_CASE_P(TypedTestP);
 
 TYPED_TEST_P(TypedTestP, Success) {
   EXPECT_EQ(0U, TypeParam());
 }
 
 TYPED_TEST_P(TypedTestP, Failure) {
   EXPECT_EQ(1U, TypeParam()) << "Expected failure";
 }
 
 REGISTER_TYPED_TEST_CASE_P(TypedTestP, Success, Failure);
 
 typedef testing::Types<unsigned char, unsigned int> UnsignedTypes;
 INSTANTIATE_TYPED_TEST_CASE_P(Unsigned, TypedTestP, UnsignedTypes);
 
 #endif  // GTEST_HAS_TYPED_TEST_P
 
 #if GTEST_HAS_DEATH_TEST
 
 // We rely on the golden file to verify that tests whose test case
 // name ends with DeathTest are run first.
 
 TEST(ADeathTest, ShouldRunFirst) {
 }
 
 # if GTEST_HAS_TYPED_TEST
 
 // We rely on the golden file to verify that typed tests whose test
 // case name ends with DeathTest are run first.
 
 template <typename T>
 class ATypedDeathTest : public testing::Test {
 };
 
 typedef testing::Types<int, double> NumericTypes;
 TYPED_TEST_CASE(ATypedDeathTest, NumericTypes);
 
 TYPED_TEST(ATypedDeathTest, ShouldRunFirst) {
 }
 
 # endif  // GTEST_HAS_TYPED_TEST
 
 # if GTEST_HAS_TYPED_TEST_P
 
 
 // We rely on the golden file to verify that type-parameterized tests
 // whose test case name ends with DeathTest are run first.
 
 template <typename T>
 class ATypeParamDeathTest : public testing::Test {
 };
 
 TYPED_TEST_CASE_P(ATypeParamDeathTest);
 
 TYPED_TEST_P(ATypeParamDeathTest, ShouldRunFirst) {
 }
 
 REGISTER_TYPED_TEST_CASE_P(ATypeParamDeathTest, ShouldRunFirst);
 
 INSTANTIATE_TYPED_TEST_CASE_P(My, ATypeParamDeathTest, NumericTypes);
 
 # endif  // GTEST_HAS_TYPED_TEST_P
 
 #endif  // GTEST_HAS_DEATH_TEST
 
 // Tests various failure conditions of
 // EXPECT_{,NON}FATAL_FAILURE{,_ON_ALL_THREADS}.
 class ExpectFailureTest : public testing::Test {
  public:  // Must be public and not protected due to a bug in g++ 3.4.2.
   enum FailureMode {
     FATAL_FAILURE,
     NONFATAL_FAILURE
   };
   static void AddFailure(FailureMode failure) {
     if (failure == FATAL_FAILURE) {
       FAIL() << "Expected fatal failure.";
     } else {
       ADD_FAILURE() << "Expected non-fatal failure.";
     }
   }
 };
 
 TEST_F(ExpectFailureTest, ExpectFatalFailure) {
   // Expected fatal failure, but succeeds.
   printf("(expecting 1 failure)\n");
   EXPECT_FATAL_FAILURE(SUCCEED(), "Expected fatal failure.");
   // Expected fatal failure, but got a non-fatal failure.
   printf("(expecting 1 failure)\n");
   EXPECT_FATAL_FAILURE(AddFailure(NONFATAL_FAILURE), "Expected non-fatal "
                        "failure.");
   // Wrong message.
   printf("(expecting 1 failure)\n");
   EXPECT_FATAL_FAILURE(AddFailure(FATAL_FAILURE), "Some other fatal failure "
                        "expected.");
 }
 
 TEST_F(ExpectFailureTest, ExpectNonFatalFailure) {
   // Expected non-fatal failure, but succeeds.
   printf("(expecting 1 failure)\n");
   EXPECT_NONFATAL_FAILURE(SUCCEED(), "Expected non-fatal failure.");
   // Expected non-fatal failure, but got a fatal failure.
   printf("(expecting 1 failure)\n");
   EXPECT_NONFATAL_FAILURE(AddFailure(FATAL_FAILURE), "Expected fatal failure.");
   // Wrong message.
   printf("(expecting 1 failure)\n");
   EXPECT_NONFATAL_FAILURE(AddFailure(NONFATAL_FAILURE), "Some other non-fatal "
                           "failure.");
 }
 
 #if GTEST_IS_THREADSAFE
 
 class ExpectFailureWithThreadsTest : public ExpectFailureTest {
  protected:
   static void AddFailureInOtherThread(FailureMode failure) {
     ThreadWithParam<FailureMode> thread(&AddFailure, failure, NULL);
     thread.Join();
   }
 };
 
 TEST_F(ExpectFailureWithThreadsTest, ExpectFatalFailure) {
   // We only intercept the current thread.
   printf("(expecting 2 failures)\n");
   EXPECT_FATAL_FAILURE(AddFailureInOtherThread(FATAL_FAILURE),
                        "Expected fatal failure.");
 }
 
 TEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailure) {
   // We only intercept the current thread.
   printf("(expecting 2 failures)\n");
   EXPECT_NONFATAL_FAILURE(AddFailureInOtherThread(NONFATAL_FAILURE),
                           "Expected non-fatal failure.");
 }
 
 typedef ExpectFailureWithThreadsTest ScopedFakeTestPartResultReporterTest;
 
 // Tests that the ScopedFakeTestPartResultReporter only catches failures from
 // the current thread if it is instantiated with INTERCEPT_ONLY_CURRENT_THREAD.
 TEST_F(ScopedFakeTestPartResultReporterTest, InterceptOnlyCurrentThread) {
   printf("(expecting 2 failures)\n");
   TestPartResultArray results;
   {
     ScopedFakeTestPartResultReporter reporter(
         ScopedFakeTestPartResultReporter::INTERCEPT_ONLY_CURRENT_THREAD,
         &results);
     AddFailureInOtherThread(FATAL_FAILURE);
     AddFailureInOtherThread(NONFATAL_FAILURE);
   }
   // The two failures should not have been intercepted.
   EXPECT_EQ(0, results.size()) << "This shouldn't fail.";
 }
 
 #endif  // GTEST_IS_THREADSAFE
 
 TEST_F(ExpectFailureTest, ExpectFatalFailureOnAllThreads) {
   // Expected fatal failure, but succeeds.
   printf("(expecting 1 failure)\n");
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), "Expected fatal failure.");
   // Expected fatal failure, but got a non-fatal failure.
   printf("(expecting 1 failure)\n");
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailure(NONFATAL_FAILURE),
                                       "Expected non-fatal failure.");
   // Wrong message.
   printf("(expecting 1 failure)\n");
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailure(FATAL_FAILURE),
                                       "Some other fatal failure expected.");
 }
 
 TEST_F(ExpectFailureTest, ExpectNonFatalFailureOnAllThreads) {
   // Expected non-fatal failure, but succeeds.
   printf("(expecting 1 failure)\n");
   EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), "Expected non-fatal "
                                          "failure.");
   // Expected non-fatal failure, but got a fatal failure.
   printf("(expecting 1 failure)\n");
   EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddFailure(FATAL_FAILURE),
                                          "Expected fatal failure.");
   // Wrong message.
   printf("(expecting 1 failure)\n");
   EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddFailure(NONFATAL_FAILURE),
                                          "Some other non-fatal failure.");
 }
 
 
 // Two test environments for testing testing::AddGlobalTestEnvironment().
 
 class FooEnvironment : public testing::Environment {
  public:
   virtual void SetUp() {
     printf("%s", "FooEnvironment::SetUp() called.\n");
   }
 
   virtual void TearDown() {
     printf("%s", "FooEnvironment::TearDown() called.\n");
     FAIL() << "Expected fatal failure.";
   }
 };
 
 class BarEnvironment : public testing::Environment {
  public:
   virtual void SetUp() {
     printf("%s", "BarEnvironment::SetUp() called.\n");
   }
 
   virtual void TearDown() {
     printf("%s", "BarEnvironment::TearDown() called.\n");
     ADD_FAILURE() << "Expected non-fatal failure.";
   }
 };
 
 // The main function.
 //
 // The idea is to use Google Test to run all the tests we have defined (some
 // of them are intended to fail), and then compare the test results
 // with the "golden" file.
 int main(int argc, char **argv) {
   testing::GTEST_FLAG(print_time) = false;
 
   // We just run the tests, knowing some of them are intended to fail.
   // We will use a separate Python script to compare the output of
   // this program with the golden file.
 
   // It's hard to test InitGoogleTest() directly, as it has many
   // global side effects.  The following line serves as a sanity test
   // for it.
   testing::InitGoogleTest(&argc, argv);
   bool internal_skip_environment_and_ad_hoc_tests =
       std::count(argv, argv + argc,
                  std::string("internal_skip_environment_and_ad_hoc_tests")) > 0;
 
 #if GTEST_HAS_DEATH_TEST
   if (testing::internal::GTEST_FLAG(internal_run_death_test) != "") {
     // Skip the usual output capturing if we're running as the child
     // process of an threadsafe-style death test.
 # if GTEST_OS_WINDOWS
     posix::FReopen("nul:", "w", stdout);
 # else
     posix::FReopen("/dev/null", "w", stdout);
 # endif  // GTEST_OS_WINDOWS
     return RUN_ALL_TESTS();
   }
 #endif  // GTEST_HAS_DEATH_TEST
 
   if (internal_skip_environment_and_ad_hoc_tests)
     return RUN_ALL_TESTS();
 
   // Registers two global test environments.
   // The golden file verifies that they are set up in the order they
   // are registered, and torn down in the reverse order.
   testing::AddGlobalTestEnvironment(new FooEnvironment);
   testing::AddGlobalTestEnvironment(new BarEnvironment);
 
   return RunAllTests();
 }
diff --git a/googletest/test/gtest_uninitialized_test.py b/googletest/test/googletest-param-test-invalid-name1-test.py
old mode 100755
new mode 100644
similarity index 75%
copy from googletest/test/gtest_uninitialized_test.py
copy to googletest/test/googletest-param-test-invalid-name1-test.py
index ae91f2aa..63be0439
--- a/googletest/test/gtest_uninitialized_test.py
+++ b/googletest/test/googletest-param-test-invalid-name1-test.py
@@ -1,69 +1,72 @@
 #!/usr/bin/env python
 #
-# Copyright 2008, Google Inc.
-# All rights reserved.
+# Copyright 2015 Google Inc. All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Verifies that Google Test warns the user when not initialized properly."""
 
-__author__ = 'wan@google.com (Zhanyong Wan)'
+__author__ = 'jmadill@google.com (Jamie Madill)'
 
-import gtest_test_utils
+import os
 
-COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_uninitialized_test_')
+IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
 
+if IS_LINUX:
+  import gtest_test_utils
+else:
+  import gtest_test_utils
 
-def Assert(condition):
-  if not condition:
-    raise AssertionError
+binary_name = 'googletest-param-test-invalid-name1-test_'
+COMMAND = gtest_test_utils.GetTestExecutablePath(binary_name)
 
 
-def AssertEq(expected, actual):
-  if expected != actual:
-    print 'Expected: %s' % (expected,)
-    print '  Actual: %s' % (actual,)
+def Assert(condition):
+  if not condition:
     raise AssertionError
 
 
 def TestExitCodeAndOutput(command):
   """Runs the given command and verifies its exit code and output."""
 
-  # Verifies that 'command' exits with code 1.
+  err = ('Parameterized test name \'"InvalidWithQuotes"\' is invalid')
+
   p = gtest_test_utils.Subprocess(command)
-  if p.exited and p.exit_code == 0:
-    Assert('IMPORTANT NOTICE' in p.output);
-  Assert('InitGoogleTest' in p.output)
+  Assert(p.terminated_by_signal)
+
+  # Verify the output message contains appropriate output
+  Assert(err in p.output)
+
 
+class GTestParamTestInvalidName1Test(gtest_test_utils.TestCase):
 
-class GTestUninitializedTest(gtest_test_utils.TestCase):
   def testExitCodeAndOutput(self):
     TestExitCodeAndOutput(COMMAND)
 
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_uninitialized_test_.cc b/googletest/test/googletest-param-test-invalid-name1-test_.cc
similarity index 76%
copy from googletest/test/gtest_uninitialized_test_.cc
copy to googletest/test/googletest-param-test-invalid-name1-test_.cc
index 2ba0e8b8..68dd4706 100644
--- a/googletest/test/gtest_uninitialized_test_.cc
+++ b/googletest/test/googletest-param-test-invalid-name1-test_.cc
@@ -1,43 +1,51 @@
-// Copyright 2008, Google Inc.
+// Copyright 2015, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
-// Author: wan@google.com (Zhanyong Wan)
+// Author: jmadill@google.com (Jamie Madill)
 
 #include "gtest/gtest.h"
 
-TEST(DummyTest, Dummy) {
-  // This test doesn't verify anything.  We just need it to create a
-  // realistic stage for testing the behavior of Google Test when
-  // RUN_ALL_TESTS() is called without
-  // testing::InitGoogleTest() being called first.
+namespace {
+class DummyTest : public ::testing::TestWithParam<const char *> {};
+
+TEST_P(DummyTest, Dummy) {
 }
 
-int main() {
+INSTANTIATE_TEST_CASE_P(InvalidTestName,
+                        DummyTest,
+                        ::testing::Values("InvalidWithQuotes"),
+                        ::testing::PrintToStringParamName());
+
+}  // namespace
+
+int main(int argc, char *argv[]) {
+  testing::InitGoogleTest(&argc, argv);
   return RUN_ALL_TESTS();
 }
+
diff --git a/googletest/test/gtest_uninitialized_test.py b/googletest/test/googletest-param-test-invalid-name2-test.py
old mode 100755
new mode 100644
similarity index 76%
copy from googletest/test/gtest_uninitialized_test.py
copy to googletest/test/googletest-param-test-invalid-name2-test.py
index ae91f2aa..b1a80c18
--- a/googletest/test/gtest_uninitialized_test.py
+++ b/googletest/test/googletest-param-test-invalid-name2-test.py
@@ -1,69 +1,71 @@
 #!/usr/bin/env python
 #
-# Copyright 2008, Google Inc.
-# All rights reserved.
+# Copyright 2015 Google Inc. All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Verifies that Google Test warns the user when not initialized properly."""
 
-__author__ = 'wan@google.com (Zhanyong Wan)'
+__author__ = 'jmadill@google.com (Jamie Madill)'
 
-import gtest_test_utils
+import os
 
-COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_uninitialized_test_')
+IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
 
+if IS_LINUX:
+  import gtest_test_utils
+else:
+  import gtest_test_utils
 
-def Assert(condition):
-  if not condition:
-    raise AssertionError
+binary_name = 'googletest-param-test-invalid-name2-test_'
+COMMAND = gtest_test_utils.GetTestExecutablePath(binary_name)
 
 
-def AssertEq(expected, actual):
-  if expected != actual:
-    print 'Expected: %s' % (expected,)
-    print '  Actual: %s' % (actual,)
+def Assert(condition):
+  if not condition:
     raise AssertionError
 
 
 def TestExitCodeAndOutput(command):
   """Runs the given command and verifies its exit code and output."""
 
-  # Verifies that 'command' exits with code 1.
+  err = ('Duplicate parameterized test name \'a\'')
+
   p = gtest_test_utils.Subprocess(command)
-  if p.exited and p.exit_code == 0:
-    Assert('IMPORTANT NOTICE' in p.output);
-  Assert('InitGoogleTest' in p.output)
+  Assert(p.terminated_by_signal)
+
+  # Check for appropriate output
+  Assert(err in p.output)
 
 
-class GTestUninitializedTest(gtest_test_utils.TestCase):
+class GTestParamTestInvalidName2Test(gtest_test_utils.TestCase):
+
   def testExitCodeAndOutput(self):
     TestExitCodeAndOutput(COMMAND)
 
-
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_uninitialized_test_.cc b/googletest/test/googletest-param-test-invalid-name2-test_.cc
similarity index 72%
copy from googletest/test/gtest_uninitialized_test_.cc
copy to googletest/test/googletest-param-test-invalid-name2-test_.cc
index 2ba0e8b8..9a8ec4eb 100644
--- a/googletest/test/gtest_uninitialized_test_.cc
+++ b/googletest/test/googletest-param-test-invalid-name2-test_.cc
@@ -1,43 +1,56 @@
-// Copyright 2008, Google Inc.
+// Copyright 2015, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
-// Author: wan@google.com (Zhanyong Wan)
+// Author: jmadill@google.com (Jamie Madill)
 
 #include "gtest/gtest.h"
 
-TEST(DummyTest, Dummy) {
-  // This test doesn't verify anything.  We just need it to create a
-  // realistic stage for testing the behavior of Google Test when
-  // RUN_ALL_TESTS() is called without
-  // testing::InitGoogleTest() being called first.
+namespace {
+class DummyTest : public ::testing::TestWithParam<const char *> {};
+
+std::string StringParamTestSuffix(
+    const testing::TestParamInfo<const char*>& info) {
+  return std::string(info.param);
+}
+
+TEST_P(DummyTest, Dummy) {
 }
 
-int main() {
+INSTANTIATE_TEST_CASE_P(DuplicateTestNames,
+                        DummyTest,
+                        ::testing::Values("a", "b", "a", "c"),
+                        StringParamTestSuffix);
+}  // namespace
+
+int main(int argc, char *argv[]) {
+  testing::InitGoogleTest(&argc, argv);
   return RUN_ALL_TESTS();
 }
+
+
diff --git a/googletest/test/gtest-param-test_test.cc b/googletest/test/googletest-param-test-test.cc
similarity index 99%
rename from googletest/test/gtest-param-test_test.cc
rename to googletest/test/googletest-param-test-test.cc
index adc4d1b5..893b132a 100644
--- a/googletest/test/gtest-param-test_test.cc
+++ b/googletest/test/googletest-param-test-test.cc
@@ -1,1110 +1,1110 @@
 // Copyright 2008, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: vladl@google.com (Vlad Losev)
 //
 // Tests for Google Test itself. This file verifies that the parameter
 // generators objects produce correct parameter sequences and that
 // Google Test runtime instantiates correct tests from those sequences.
 
 #include "gtest/gtest.h"
 
 # include <algorithm>
 # include <iostream>
 # include <list>
 # include <sstream>
 # include <string>
 # include <vector>
 
 # include "src/gtest-internal-inl.h"  // for UnitTestOptions
-# include "test/gtest-param-test_test.h"
+# include "test/googletest-param-test-test.h"
 
 using ::std::vector;
 using ::std::sort;
 
 using ::testing::AddGlobalTestEnvironment;
 using ::testing::Bool;
 using ::testing::Message;
 using ::testing::Range;
 using ::testing::TestWithParam;
 using ::testing::Values;
 using ::testing::ValuesIn;
 
 # if GTEST_HAS_COMBINE
 using ::testing::Combine;
 using ::testing::get;
 using ::testing::make_tuple;
 using ::testing::tuple;
 # endif  // GTEST_HAS_COMBINE
 
 using ::testing::internal::ParamGenerator;
 using ::testing::internal::UnitTestOptions;
 
 // Prints a value to a string.
 //
 // TODO(wan@google.com): remove PrintValue() when we move matchers and
 // EXPECT_THAT() from Google Mock to Google Test.  At that time, we
 // can write EXPECT_THAT(x, Eq(y)) to compare two tuples x and y, as
 // EXPECT_THAT() and the matchers know how to print tuples.
 template <typename T>
 ::std::string PrintValue(const T& value) {
   ::std::stringstream stream;
   stream << value;
   return stream.str();
 }
 
 # if GTEST_HAS_COMBINE
 
 // These overloads allow printing tuples in our tests.  We cannot
 // define an operator<< for tuples, as that definition needs to be in
 // the std namespace in order to be picked up by Google Test via
 // Argument-Dependent Lookup, yet defining anything in the std
 // namespace in non-STL code is undefined behavior.
 
 template <typename T1, typename T2>
 ::std::string PrintValue(const tuple<T1, T2>& value) {
   ::std::stringstream stream;
   stream << "(" << get<0>(value) << ", " << get<1>(value) << ")";
   return stream.str();
 }
 
 template <typename T1, typename T2, typename T3>
 ::std::string PrintValue(const tuple<T1, T2, T3>& value) {
   ::std::stringstream stream;
   stream << "(" << get<0>(value) << ", " << get<1>(value)
          << ", "<< get<2>(value) << ")";
   return stream.str();
 }
 
 template <typename T1, typename T2, typename T3, typename T4, typename T5,
           typename T6, typename T7, typename T8, typename T9, typename T10>
 ::std::string PrintValue(
     const tuple<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>& value) {
   ::std::stringstream stream;
   stream << "(" << get<0>(value) << ", " << get<1>(value)
          << ", "<< get<2>(value) << ", " << get<3>(value)
          << ", "<< get<4>(value) << ", " << get<5>(value)
          << ", "<< get<6>(value) << ", " << get<7>(value)
          << ", "<< get<8>(value) << ", " << get<9>(value) << ")";
   return stream.str();
 }
 
 # endif  // GTEST_HAS_COMBINE
 
 // Verifies that a sequence generated by the generator and accessed
 // via the iterator object matches the expected one using Google Test
 // assertions.
 template <typename T, size_t N>
 void VerifyGenerator(const ParamGenerator<T>& generator,
                      const T (&expected_values)[N]) {
   typename ParamGenerator<T>::iterator it = generator.begin();
   for (size_t i = 0; i < N; ++i) {
     ASSERT_FALSE(it == generator.end())
         << "At element " << i << " when accessing via an iterator "
         << "created with the copy constructor.\n";
     // We cannot use EXPECT_EQ() here as the values may be tuples,
     // which don't support <<.
     EXPECT_TRUE(expected_values[i] == *it)
         << "where i is " << i
         << ", expected_values[i] is " << PrintValue(expected_values[i])
         << ", *it is " << PrintValue(*it)
         << ", and 'it' is an iterator created with the copy constructor.\n";
     ++it;
   }
   EXPECT_TRUE(it == generator.end())
         << "At the presumed end of sequence when accessing via an iterator "
         << "created with the copy constructor.\n";
 
   // Test the iterator assignment. The following lines verify that
   // the sequence accessed via an iterator initialized via the
   // assignment operator (as opposed to a copy constructor) matches
   // just the same.
   it = generator.begin();
   for (size_t i = 0; i < N; ++i) {
     ASSERT_FALSE(it == generator.end())
         << "At element " << i << " when accessing via an iterator "
         << "created with the assignment operator.\n";
     EXPECT_TRUE(expected_values[i] == *it)
         << "where i is " << i
         << ", expected_values[i] is " << PrintValue(expected_values[i])
         << ", *it is " << PrintValue(*it)
         << ", and 'it' is an iterator created with the copy constructor.\n";
     ++it;
   }
   EXPECT_TRUE(it == generator.end())
         << "At the presumed end of sequence when accessing via an iterator "
         << "created with the assignment operator.\n";
 }
 
 template <typename T>
 void VerifyGeneratorIsEmpty(const ParamGenerator<T>& generator) {
   typename ParamGenerator<T>::iterator it = generator.begin();
   EXPECT_TRUE(it == generator.end());
 
   it = generator.begin();
   EXPECT_TRUE(it == generator.end());
 }
 
 // Generator tests. They test that each of the provided generator functions
 // generates an expected sequence of values. The general test pattern
 // instantiates a generator using one of the generator functions,
 // checks the sequence produced by the generator using its iterator API,
 // and then resets the iterator back to the beginning of the sequence
 // and checks the sequence again.
 
 // Tests that iterators produced by generator functions conform to the
 // ForwardIterator concept.
 TEST(IteratorTest, ParamIteratorConformsToForwardIteratorConcept) {
   const ParamGenerator<int> gen = Range(0, 10);
   ParamGenerator<int>::iterator it = gen.begin();
 
   // Verifies that iterator initialization works as expected.
   ParamGenerator<int>::iterator it2 = it;
   EXPECT_TRUE(*it == *it2) << "Initialized iterators must point to the "
                            << "element same as its source points to";
 
   // Verifies that iterator assignment works as expected.
   ++it;
   EXPECT_FALSE(*it == *it2);
   it2 = it;
   EXPECT_TRUE(*it == *it2) << "Assigned iterators must point to the "
                            << "element same as its source points to";
 
   // Verifies that prefix operator++() returns *this.
   EXPECT_EQ(&it, &(++it)) << "Result of the prefix operator++ must be "
                           << "refer to the original object";
 
   // Verifies that the result of the postfix operator++ points to the value
   // pointed to by the original iterator.
   int original_value = *it;  // Have to compute it outside of macro call to be
                              // unaffected by the parameter evaluation order.
   EXPECT_EQ(original_value, *(it++));
 
   // Verifies that prefix and postfix operator++() advance an iterator
   // all the same.
   it2 = it;
   ++it;
   ++it2;
   EXPECT_TRUE(*it == *it2);
 }
 
 // Tests that Range() generates the expected sequence.
 TEST(RangeTest, IntRangeWithDefaultStep) {
   const ParamGenerator<int> gen = Range(0, 3);
   const int expected_values[] = {0, 1, 2};
   VerifyGenerator(gen, expected_values);
 }
 
 // Edge case. Tests that Range() generates the single element sequence
 // as expected when provided with range limits that are equal.
 TEST(RangeTest, IntRangeSingleValue) {
   const ParamGenerator<int> gen = Range(0, 1);
   const int expected_values[] = {0};
   VerifyGenerator(gen, expected_values);
 }
 
 // Edge case. Tests that Range() with generates empty sequence when
 // supplied with an empty range.
 TEST(RangeTest, IntRangeEmpty) {
   const ParamGenerator<int> gen = Range(0, 0);
   VerifyGeneratorIsEmpty(gen);
 }
 
 // Tests that Range() with custom step (greater then one) generates
 // the expected sequence.
 TEST(RangeTest, IntRangeWithCustomStep) {
   const ParamGenerator<int> gen = Range(0, 9, 3);
   const int expected_values[] = {0, 3, 6};
   VerifyGenerator(gen, expected_values);
 }
 
 // Tests that Range() with custom step (greater then one) generates
 // the expected sequence when the last element does not fall on the
 // upper range limit. Sequences generated by Range() must not have
 // elements beyond the range limits.
 TEST(RangeTest, IntRangeWithCustomStepOverUpperBound) {
   const ParamGenerator<int> gen = Range(0, 4, 3);
   const int expected_values[] = {0, 3};
   VerifyGenerator(gen, expected_values);
 }
 
 // Verifies that Range works with user-defined types that define
 // copy constructor, operator=(), operator+(), and operator<().
 class DogAdder {
  public:
   explicit DogAdder(const char* a_value) : value_(a_value) {}
   DogAdder(const DogAdder& other) : value_(other.value_.c_str()) {}
 
   DogAdder operator=(const DogAdder& other) {
     if (this != &other)
       value_ = other.value_;
     return *this;
   }
   DogAdder operator+(const DogAdder& other) const {
     Message msg;
     msg << value_.c_str() << other.value_.c_str();
     return DogAdder(msg.GetString().c_str());
   }
   bool operator<(const DogAdder& other) const {
     return value_ < other.value_;
   }
   const std::string& value() const { return value_; }
 
  private:
   std::string value_;
 };
 
 TEST(RangeTest, WorksWithACustomType) {
   const ParamGenerator<DogAdder> gen =
       Range(DogAdder("cat"), DogAdder("catdogdog"), DogAdder("dog"));
   ParamGenerator<DogAdder>::iterator it = gen.begin();
 
   ASSERT_FALSE(it == gen.end());
   EXPECT_STREQ("cat", it->value().c_str());
 
   ASSERT_FALSE(++it == gen.end());
   EXPECT_STREQ("catdog", it->value().c_str());
 
   EXPECT_TRUE(++it == gen.end());
 }
 
 class IntWrapper {
  public:
   explicit IntWrapper(int a_value) : value_(a_value) {}
   IntWrapper(const IntWrapper& other) : value_(other.value_) {}
 
   IntWrapper operator=(const IntWrapper& other) {
     value_ = other.value_;
     return *this;
   }
   // operator+() adds a different type.
   IntWrapper operator+(int other) const { return IntWrapper(value_ + other); }
   bool operator<(const IntWrapper& other) const {
     return value_ < other.value_;
   }
   int value() const { return value_; }
 
  private:
   int value_;
 };
 
 TEST(RangeTest, WorksWithACustomTypeWithDifferentIncrementType) {
   const ParamGenerator<IntWrapper> gen = Range(IntWrapper(0), IntWrapper(2));
   ParamGenerator<IntWrapper>::iterator it = gen.begin();
 
   ASSERT_FALSE(it == gen.end());
   EXPECT_EQ(0, it->value());
 
   ASSERT_FALSE(++it == gen.end());
   EXPECT_EQ(1, it->value());
 
   EXPECT_TRUE(++it == gen.end());
 }
 
 // Tests that ValuesIn() with an array parameter generates
 // the expected sequence.
 TEST(ValuesInTest, ValuesInArray) {
   int array[] = {3, 5, 8};
   const ParamGenerator<int> gen = ValuesIn(array);
   VerifyGenerator(gen, array);
 }
 
 // Tests that ValuesIn() with a const array parameter generates
 // the expected sequence.
 TEST(ValuesInTest, ValuesInConstArray) {
   const int array[] = {3, 5, 8};
   const ParamGenerator<int> gen = ValuesIn(array);
   VerifyGenerator(gen, array);
 }
 
 // Edge case. Tests that ValuesIn() with an array parameter containing a
 // single element generates the single element sequence.
 TEST(ValuesInTest, ValuesInSingleElementArray) {
   int array[] = {42};
   const ParamGenerator<int> gen = ValuesIn(array);
   VerifyGenerator(gen, array);
 }
 
 // Tests that ValuesIn() generates the expected sequence for an STL
 // container (vector).
 TEST(ValuesInTest, ValuesInVector) {
   typedef ::std::vector<int> ContainerType;
   ContainerType values;
   values.push_back(3);
   values.push_back(5);
   values.push_back(8);
   const ParamGenerator<int> gen = ValuesIn(values);
 
   const int expected_values[] = {3, 5, 8};
   VerifyGenerator(gen, expected_values);
 }
 
 // Tests that ValuesIn() generates the expected sequence.
 TEST(ValuesInTest, ValuesInIteratorRange) {
   typedef ::std::vector<int> ContainerType;
   ContainerType values;
   values.push_back(3);
   values.push_back(5);
   values.push_back(8);
   const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());
 
   const int expected_values[] = {3, 5, 8};
   VerifyGenerator(gen, expected_values);
 }
 
 // Edge case. Tests that ValuesIn() provided with an iterator range specifying a
 // single value generates a single-element sequence.
 TEST(ValuesInTest, ValuesInSingleElementIteratorRange) {
   typedef ::std::vector<int> ContainerType;
   ContainerType values;
   values.push_back(42);
   const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());
 
   const int expected_values[] = {42};
   VerifyGenerator(gen, expected_values);
 }
 
 // Edge case. Tests that ValuesIn() provided with an empty iterator range
 // generates an empty sequence.
 TEST(ValuesInTest, ValuesInEmptyIteratorRange) {
   typedef ::std::vector<int> ContainerType;
   ContainerType values;
   const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());
 
   VerifyGeneratorIsEmpty(gen);
 }
 
 // Tests that the Values() generates the expected sequence.
 TEST(ValuesTest, ValuesWorks) {
   const ParamGenerator<int> gen = Values(3, 5, 8);
 
   const int expected_values[] = {3, 5, 8};
   VerifyGenerator(gen, expected_values);
 }
 
 // Tests that Values() generates the expected sequences from elements of
 // different types convertible to ParamGenerator's parameter type.
 TEST(ValuesTest, ValuesWorksForValuesOfCompatibleTypes) {
   const ParamGenerator<double> gen = Values(3, 5.0f, 8.0);
 
   const double expected_values[] = {3.0, 5.0, 8.0};
   VerifyGenerator(gen, expected_values);
 }
 
 TEST(ValuesTest, ValuesWorksForMaxLengthList) {
   const ParamGenerator<int> gen = Values(
       10, 20, 30, 40, 50, 60, 70, 80, 90, 100,
       110, 120, 130, 140, 150, 160, 170, 180, 190, 200,
       210, 220, 230, 240, 250, 260, 270, 280, 290, 300,
       310, 320, 330, 340, 350, 360, 370, 380, 390, 400,
       410, 420, 430, 440, 450, 460, 470, 480, 490, 500);
 
   const int expected_values[] = {
       10, 20, 30, 40, 50, 60, 70, 80, 90, 100,
       110, 120, 130, 140, 150, 160, 170, 180, 190, 200,
       210, 220, 230, 240, 250, 260, 270, 280, 290, 300,
       310, 320, 330, 340, 350, 360, 370, 380, 390, 400,
       410, 420, 430, 440, 450, 460, 470, 480, 490, 500};
   VerifyGenerator(gen, expected_values);
 }
 
 // Edge case test. Tests that single-parameter Values() generates the sequence
 // with the single value.
 TEST(ValuesTest, ValuesWithSingleParameter) {
   const ParamGenerator<int> gen = Values(42);
 
   const int expected_values[] = {42};
   VerifyGenerator(gen, expected_values);
 }
 
 // Tests that Bool() generates sequence (false, true).
 TEST(BoolTest, BoolWorks) {
   const ParamGenerator<bool> gen = Bool();
 
   const bool expected_values[] = {false, true};
   VerifyGenerator(gen, expected_values);
 }
 
 # if GTEST_HAS_COMBINE
 
 // Tests that Combine() with two parameters generates the expected sequence.
 TEST(CombineTest, CombineWithTwoParameters) {
   const char* foo = "foo";
   const char* bar = "bar";
   const ParamGenerator<tuple<const char*, int> > gen =
       Combine(Values(foo, bar), Values(3, 4));
 
   tuple<const char*, int> expected_values[] = {
     make_tuple(foo, 3), make_tuple(foo, 4),
     make_tuple(bar, 3), make_tuple(bar, 4)};
   VerifyGenerator(gen, expected_values);
 }
 
 // Tests that Combine() with three parameters generates the expected sequence.
 TEST(CombineTest, CombineWithThreeParameters) {
   const ParamGenerator<tuple<int, int, int> > gen = Combine(Values(0, 1),
                                                             Values(3, 4),
                                                             Values(5, 6));
   tuple<int, int, int> expected_values[] = {
     make_tuple(0, 3, 5), make_tuple(0, 3, 6),
     make_tuple(0, 4, 5), make_tuple(0, 4, 6),
     make_tuple(1, 3, 5), make_tuple(1, 3, 6),
     make_tuple(1, 4, 5), make_tuple(1, 4, 6)};
   VerifyGenerator(gen, expected_values);
 }
 
 // Tests that the Combine() with the first parameter generating a single value
 // sequence generates a sequence with the number of elements equal to the
 // number of elements in the sequence generated by the second parameter.
 TEST(CombineTest, CombineWithFirstParameterSingleValue) {
   const ParamGenerator<tuple<int, int> > gen = Combine(Values(42),
                                                        Values(0, 1));
 
   tuple<int, int> expected_values[] = {make_tuple(42, 0), make_tuple(42, 1)};
   VerifyGenerator(gen, expected_values);
 }
 
 // Tests that the Combine() with the second parameter generating a single value
 // sequence generates a sequence with the number of elements equal to the
 // number of elements in the sequence generated by the first parameter.
 TEST(CombineTest, CombineWithSecondParameterSingleValue) {
   const ParamGenerator<tuple<int, int> > gen = Combine(Values(0, 1),
                                                        Values(42));
 
   tuple<int, int> expected_values[] = {make_tuple(0, 42), make_tuple(1, 42)};
   VerifyGenerator(gen, expected_values);
 }
 
 // Tests that when the first parameter produces an empty sequence,
 // Combine() produces an empty sequence, too.
 TEST(CombineTest, CombineWithFirstParameterEmptyRange) {
   const ParamGenerator<tuple<int, int> > gen = Combine(Range(0, 0),
                                                        Values(0, 1));
   VerifyGeneratorIsEmpty(gen);
 }
 
 // Tests that when the second parameter produces an empty sequence,
 // Combine() produces an empty sequence, too.
 TEST(CombineTest, CombineWithSecondParameterEmptyRange) {
   const ParamGenerator<tuple<int, int> > gen = Combine(Values(0, 1),
                                                        Range(1, 1));
   VerifyGeneratorIsEmpty(gen);
 }
 
 // Edge case. Tests that combine works with the maximum number
 // of parameters supported by Google Test (currently 10).
 TEST(CombineTest, CombineWithMaxNumberOfParameters) {
   const char* foo = "foo";
   const char* bar = "bar";
   const ParamGenerator<tuple<const char*, int, int, int, int, int, int, int,
                              int, int> > gen = Combine(Values(foo, bar),
                                                        Values(1), Values(2),
                                                        Values(3), Values(4),
                                                        Values(5), Values(6),
                                                        Values(7), Values(8),
                                                        Values(9));
 
   tuple<const char*, int, int, int, int, int, int, int, int, int>
       expected_values[] = {make_tuple(foo, 1, 2, 3, 4, 5, 6, 7, 8, 9),
                            make_tuple(bar, 1, 2, 3, 4, 5, 6, 7, 8, 9)};
   VerifyGenerator(gen, expected_values);
 }
 
 #if GTEST_LANG_CXX11
 
 class NonDefaultConstructAssignString {
  public:
   NonDefaultConstructAssignString(const std::string& s) : str_(s) {}
 
   const std::string& str() const { return str_; }
 
  private:
   std::string str_;
 
   // Not default constructible
   NonDefaultConstructAssignString();
   // Not assignable
   void operator=(const NonDefaultConstructAssignString&);
 };
 
 TEST(CombineTest, NonDefaultConstructAssign) {
   const ParamGenerator<tuple<int, NonDefaultConstructAssignString> > gen =
       Combine(Values(0, 1), Values(NonDefaultConstructAssignString("A"),
                                    NonDefaultConstructAssignString("B")));
 
   ParamGenerator<tuple<int, NonDefaultConstructAssignString> >::iterator it =
       gen.begin();
 
   EXPECT_EQ(0, std::get<0>(*it));
   EXPECT_EQ("A", std::get<1>(*it).str());
   ++it;
 
   EXPECT_EQ(0, std::get<0>(*it));
   EXPECT_EQ("B", std::get<1>(*it).str());
   ++it;
 
   EXPECT_EQ(1, std::get<0>(*it));
   EXPECT_EQ("A", std::get<1>(*it).str());
   ++it;
 
   EXPECT_EQ(1, std::get<0>(*it));
   EXPECT_EQ("B", std::get<1>(*it).str());
   ++it;
 
   EXPECT_TRUE(it == gen.end());
 }
 
 #endif   // GTEST_LANG_CXX11
 # endif  // GTEST_HAS_COMBINE
 
 // Tests that an generator produces correct sequence after being
 // assigned from another generator.
 TEST(ParamGeneratorTest, AssignmentWorks) {
   ParamGenerator<int> gen = Values(1, 2);
   const ParamGenerator<int> gen2 = Values(3, 4);
   gen = gen2;
 
   const int expected_values[] = {3, 4};
   VerifyGenerator(gen, expected_values);
 }
 
 // This test verifies that the tests are expanded and run as specified:
 // one test per element from the sequence produced by the generator
 // specified in INSTANTIATE_TEST_CASE_P. It also verifies that the test's
 // fixture constructor, SetUp(), and TearDown() have run and have been
 // supplied with the correct parameters.
 
 // The use of environment object allows detection of the case where no test
 // case functionality is run at all. In this case TestCaseTearDown will not
 // be able to detect missing tests, naturally.
 template <int kExpectedCalls>
 class TestGenerationEnvironment : public ::testing::Environment {
  public:
   static TestGenerationEnvironment* Instance() {
     static TestGenerationEnvironment* instance = new TestGenerationEnvironment;
     return instance;
   }
 
   void FixtureConstructorExecuted() { fixture_constructor_count_++; }
   void SetUpExecuted() { set_up_count_++; }
   void TearDownExecuted() { tear_down_count_++; }
   void TestBodyExecuted() { test_body_count_++; }
 
   virtual void TearDown() {
     // If all MultipleTestGenerationTest tests have been de-selected
     // by the filter flag, the following checks make no sense.
     bool perform_check = false;
 
     for (int i = 0; i < kExpectedCalls; ++i) {
       Message msg;
       msg << "TestsExpandedAndRun/" << i;
       if (UnitTestOptions::FilterMatchesTest(
              "TestExpansionModule/MultipleTestGenerationTest",
               msg.GetString().c_str())) {
         perform_check = true;
       }
     }
     if (perform_check) {
       EXPECT_EQ(kExpectedCalls, fixture_constructor_count_)
           << "Fixture constructor of ParamTestGenerationTest test case "
           << "has not been run as expected.";
       EXPECT_EQ(kExpectedCalls, set_up_count_)
           << "Fixture SetUp method of ParamTestGenerationTest test case "
           << "has not been run as expected.";
       EXPECT_EQ(kExpectedCalls, tear_down_count_)
           << "Fixture TearDown method of ParamTestGenerationTest test case "
           << "has not been run as expected.";
       EXPECT_EQ(kExpectedCalls, test_body_count_)
           << "Test in ParamTestGenerationTest test case "
           << "has not been run as expected.";
     }
   }
 
  private:
   TestGenerationEnvironment() : fixture_constructor_count_(0), set_up_count_(0),
                                 tear_down_count_(0), test_body_count_(0) {}
 
   int fixture_constructor_count_;
   int set_up_count_;
   int tear_down_count_;
   int test_body_count_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationEnvironment);
 };
 
 const int test_generation_params[] = {36, 42, 72};
 
 class TestGenerationTest : public TestWithParam<int> {
  public:
   enum {
     PARAMETER_COUNT =
         sizeof(test_generation_params)/sizeof(test_generation_params[0])
   };
 
   typedef TestGenerationEnvironment<PARAMETER_COUNT> Environment;
 
   TestGenerationTest() {
     Environment::Instance()->FixtureConstructorExecuted();
     current_parameter_ = GetParam();
   }
   virtual void SetUp() {
     Environment::Instance()->SetUpExecuted();
     EXPECT_EQ(current_parameter_, GetParam());
   }
   virtual void TearDown() {
     Environment::Instance()->TearDownExecuted();
     EXPECT_EQ(current_parameter_, GetParam());
   }
 
   static void SetUpTestCase() {
     bool all_tests_in_test_case_selected = true;
 
     for (int i = 0; i < PARAMETER_COUNT; ++i) {
       Message test_name;
       test_name << "TestsExpandedAndRun/" << i;
       if ( !UnitTestOptions::FilterMatchesTest(
                 "TestExpansionModule/MultipleTestGenerationTest",
                 test_name.GetString())) {
         all_tests_in_test_case_selected = false;
       }
     }
     EXPECT_TRUE(all_tests_in_test_case_selected)
         << "When running the TestGenerationTest test case all of its tests\n"
         << "must be selected by the filter flag for the test case to pass.\n"
         << "If not all of them are enabled, we can't reliably conclude\n"
         << "that the correct number of tests have been generated.";
 
     collected_parameters_.clear();
   }
 
   static void TearDownTestCase() {
     vector<int> expected_values(test_generation_params,
                                 test_generation_params + PARAMETER_COUNT);
     // Test execution order is not guaranteed by Google Test,
     // so the order of values in collected_parameters_ can be
     // different and we have to sort to compare.
     sort(expected_values.begin(), expected_values.end());
     sort(collected_parameters_.begin(), collected_parameters_.end());
 
     EXPECT_TRUE(collected_parameters_ == expected_values);
   }
 
  protected:
   int current_parameter_;
   static vector<int> collected_parameters_;
 
  private:
   GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationTest);
 };
 vector<int> TestGenerationTest::collected_parameters_;
 
 TEST_P(TestGenerationTest, TestsExpandedAndRun) {
   Environment::Instance()->TestBodyExecuted();
   EXPECT_EQ(current_parameter_, GetParam());
   collected_parameters_.push_back(GetParam());
 }
 INSTANTIATE_TEST_CASE_P(TestExpansionModule, TestGenerationTest,
                         ValuesIn(test_generation_params));
 
 // This test verifies that the element sequence (third parameter of
 // INSTANTIATE_TEST_CASE_P) is evaluated in InitGoogleTest() and neither at
 // the call site of INSTANTIATE_TEST_CASE_P nor in RUN_ALL_TESTS().  For
 // that, we declare param_value_ to be a static member of
 // GeneratorEvaluationTest and initialize it to 0.  We set it to 1 in
 // main(), just before invocation of InitGoogleTest().  After calling
 // InitGoogleTest(), we set the value to 2.  If the sequence is evaluated
 // before or after InitGoogleTest, INSTANTIATE_TEST_CASE_P will create a
 // test with parameter other than 1, and the test body will fail the
 // assertion.
 class GeneratorEvaluationTest : public TestWithParam<int> {
  public:
   static int param_value() { return param_value_; }
   static void set_param_value(int param_value) { param_value_ = param_value; }
 
  private:
   static int param_value_;
 };
 int GeneratorEvaluationTest::param_value_ = 0;
 
 TEST_P(GeneratorEvaluationTest, GeneratorsEvaluatedInMain) {
   EXPECT_EQ(1, GetParam());
 }
 INSTANTIATE_TEST_CASE_P(GenEvalModule,
                         GeneratorEvaluationTest,
                         Values(GeneratorEvaluationTest::param_value()));
 
 // Tests that generators defined in a different translation unit are
 // functional. Generator extern_gen is defined in gtest-param-test_test2.cc.
 extern ParamGenerator<int> extern_gen;
 class ExternalGeneratorTest : public TestWithParam<int> {};
 TEST_P(ExternalGeneratorTest, ExternalGenerator) {
   // Sequence produced by extern_gen contains only a single value
   // which we verify here.
   EXPECT_EQ(GetParam(), 33);
 }
 INSTANTIATE_TEST_CASE_P(ExternalGeneratorModule,
                         ExternalGeneratorTest,
                         extern_gen);
 
 // Tests that a parameterized test case can be defined in one translation
 // unit and instantiated in another. This test will be instantiated in
 // gtest-param-test_test2.cc. ExternalInstantiationTest fixture class is
 // defined in gtest-param-test_test.h.
 TEST_P(ExternalInstantiationTest, IsMultipleOf33) {
   EXPECT_EQ(0, GetParam() % 33);
 }
 
 // Tests that a parameterized test case can be instantiated with multiple
 // generators.
 class MultipleInstantiationTest : public TestWithParam<int> {};
 TEST_P(MultipleInstantiationTest, AllowsMultipleInstances) {
 }
 INSTANTIATE_TEST_CASE_P(Sequence1, MultipleInstantiationTest, Values(1, 2));
 INSTANTIATE_TEST_CASE_P(Sequence2, MultipleInstantiationTest, Range(3, 5));
 
 // Tests that a parameterized test case can be instantiated
 // in multiple translation units. This test will be instantiated
 // here and in gtest-param-test_test2.cc.
 // InstantiationInMultipleTranslationUnitsTest fixture class
 // is defined in gtest-param-test_test.h.
 TEST_P(InstantiationInMultipleTranslaionUnitsTest, IsMultipleOf42) {
   EXPECT_EQ(0, GetParam() % 42);
 }
 INSTANTIATE_TEST_CASE_P(Sequence1,
                         InstantiationInMultipleTranslaionUnitsTest,
                         Values(42, 42*2));
 
 // Tests that each iteration of parameterized test runs in a separate test
 // object.
 class SeparateInstanceTest : public TestWithParam<int> {
  public:
   SeparateInstanceTest() : count_(0) {}
 
   static void TearDownTestCase() {
     EXPECT_GE(global_count_, 2)
         << "If some (but not all) SeparateInstanceTest tests have been "
         << "filtered out this test will fail. Make sure that all "
         << "GeneratorEvaluationTest are selected or de-selected together "
         << "by the test filter.";
   }
 
  protected:
   int count_;
   static int global_count_;
 };
 int SeparateInstanceTest::global_count_ = 0;
 
 TEST_P(SeparateInstanceTest, TestsRunInSeparateInstances) {
   EXPECT_EQ(0, count_++);
   global_count_++;
 }
 INSTANTIATE_TEST_CASE_P(FourElemSequence, SeparateInstanceTest, Range(1, 4));
 
 // Tests that all instantiations of a test have named appropriately. Test
 // defined with TEST_P(TestCaseName, TestName) and instantiated with
 // INSTANTIATE_TEST_CASE_P(SequenceName, TestCaseName, generator) must be named
 // SequenceName/TestCaseName.TestName/i, where i is the 0-based index of the
 // sequence element used to instantiate the test.
 class NamingTest : public TestWithParam<int> {};
 
 TEST_P(NamingTest, TestsReportCorrectNamesAndParameters) {
   const ::testing::TestInfo* const test_info =
      ::testing::UnitTest::GetInstance()->current_test_info();
 
   EXPECT_STREQ("ZeroToFiveSequence/NamingTest", test_info->test_case_name());
 
   Message index_stream;
   index_stream << "TestsReportCorrectNamesAndParameters/" << GetParam();
   EXPECT_STREQ(index_stream.GetString().c_str(), test_info->name());
 
   EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param());
 }
 
 INSTANTIATE_TEST_CASE_P(ZeroToFiveSequence, NamingTest, Range(0, 5));
 
 // Tests that macros in test names are expanded correctly.
 class MacroNamingTest : public TestWithParam<int> {};
 
 #define PREFIX_WITH_FOO(test_name) Foo##test_name
 #define PREFIX_WITH_MACRO(test_name) Macro##test_name
 
 TEST_P(PREFIX_WITH_MACRO(NamingTest), PREFIX_WITH_FOO(SomeTestName)) {
   const ::testing::TestInfo* const test_info =
      ::testing::UnitTest::GetInstance()->current_test_info();
 
   EXPECT_STREQ("FortyTwo/MacroNamingTest", test_info->test_case_name());
   EXPECT_STREQ("FooSomeTestName", test_info->name());
 }
 
 INSTANTIATE_TEST_CASE_P(FortyTwo, MacroNamingTest, Values(42));
 
 // Tests the same thing for non-parametrized tests.
 class MacroNamingTestNonParametrized : public ::testing::Test {};
 
 TEST_F(PREFIX_WITH_MACRO(NamingTestNonParametrized),
        PREFIX_WITH_FOO(SomeTestName)) {
   const ::testing::TestInfo* const test_info =
      ::testing::UnitTest::GetInstance()->current_test_info();
 
   EXPECT_STREQ("MacroNamingTestNonParametrized", test_info->test_case_name());
   EXPECT_STREQ("FooSomeTestName", test_info->name());
 }
 
 // Tests that user supplied custom parameter names are working correctly.
 // Runs the test with a builtin helper method which uses PrintToString,
 // as well as a custom function and custom functor to ensure all possible
 // uses work correctly.
 class CustomFunctorNamingTest : public TestWithParam<std::string> {};
 TEST_P(CustomFunctorNamingTest, CustomTestNames) {}
 
 struct CustomParamNameFunctor {
   std::string operator()(const ::testing::TestParamInfo<std::string>& inf) {
     return inf.param;
   }
 };
 
 INSTANTIATE_TEST_CASE_P(CustomParamNameFunctor,
                         CustomFunctorNamingTest,
                         Values(std::string("FunctorName")),
                         CustomParamNameFunctor());
 
 INSTANTIATE_TEST_CASE_P(AllAllowedCharacters,
                         CustomFunctorNamingTest,
                         Values("abcdefghijklmnopqrstuvwxyz",
                                "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
                                "01234567890_"),
                         CustomParamNameFunctor());
 
 inline std::string CustomParamNameFunction(
     const ::testing::TestParamInfo<std::string>& inf) {
   return inf.param;
 }
 
 class CustomFunctionNamingTest : public TestWithParam<std::string> {};
 TEST_P(CustomFunctionNamingTest, CustomTestNames) {}
 
 INSTANTIATE_TEST_CASE_P(CustomParamNameFunction,
                         CustomFunctionNamingTest,
                         Values(std::string("FunctionName")),
                         CustomParamNameFunction);
 
 #if GTEST_LANG_CXX11
 
 // Test custom naming with a lambda
 
 class CustomLambdaNamingTest : public TestWithParam<std::string> {};
 TEST_P(CustomLambdaNamingTest, CustomTestNames) {}
 
 INSTANTIATE_TEST_CASE_P(CustomParamNameLambda, CustomLambdaNamingTest,
                         Values(std::string("LambdaName")),
                         [](const ::testing::TestParamInfo<std::string>& inf) {
                           return inf.param;
                         });
 
 #endif  // GTEST_LANG_CXX11
 
 TEST(CustomNamingTest, CheckNameRegistry) {
   ::testing::UnitTest* unit_test = ::testing::UnitTest::GetInstance();
   std::set<std::string> test_names;
   for (int case_num = 0;
        case_num < unit_test->total_test_case_count();
        ++case_num) {
     const ::testing::TestCase* test_case = unit_test->GetTestCase(case_num);
     for (int test_num = 0;
          test_num < test_case->total_test_count();
          ++test_num) {
       const ::testing::TestInfo* test_info = test_case->GetTestInfo(test_num);
       test_names.insert(std::string(test_info->name()));
     }
   }
   EXPECT_EQ(1u, test_names.count("CustomTestNames/FunctorName"));
   EXPECT_EQ(1u, test_names.count("CustomTestNames/FunctionName"));
 #if GTEST_LANG_CXX11
   EXPECT_EQ(1u, test_names.count("CustomTestNames/LambdaName"));
 #endif  // GTEST_LANG_CXX11
 }
 
 // Test a numeric name to ensure PrintToStringParamName works correctly.
 
 class CustomIntegerNamingTest : public TestWithParam<int> {};
 
 TEST_P(CustomIntegerNamingTest, TestsReportCorrectNames) {
   const ::testing::TestInfo* const test_info =
      ::testing::UnitTest::GetInstance()->current_test_info();
   Message test_name_stream;
   test_name_stream << "TestsReportCorrectNames/" << GetParam();
   EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());
 }
 
 INSTANTIATE_TEST_CASE_P(PrintToString,
                         CustomIntegerNamingTest,
                         Range(0, 5),
                         ::testing::PrintToStringParamName());
 
 // Test a custom struct with PrintToString.
 
 struct CustomStruct {
   explicit CustomStruct(int value) : x(value) {}
   int x;
 };
 
 std::ostream& operator<<(std::ostream& stream, const CustomStruct& val) {
   stream << val.x;
   return stream;
 }
 
 class CustomStructNamingTest : public TestWithParam<CustomStruct> {};
 
 TEST_P(CustomStructNamingTest, TestsReportCorrectNames) {
   const ::testing::TestInfo* const test_info =
      ::testing::UnitTest::GetInstance()->current_test_info();
   Message test_name_stream;
   test_name_stream << "TestsReportCorrectNames/" << GetParam();
   EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());
 }
 
 INSTANTIATE_TEST_CASE_P(PrintToString,
                         CustomStructNamingTest,
                         Values(CustomStruct(0), CustomStruct(1)),
                         ::testing::PrintToStringParamName());
 
 // Test that using a stateful parameter naming function works as expected.
 
 struct StatefulNamingFunctor {
   StatefulNamingFunctor() : sum(0) {}
   std::string operator()(const ::testing::TestParamInfo<int>& info) {
     int value = info.param + sum;
     sum += info.param;
     return ::testing::PrintToString(value);
   }
   int sum;
 };
 
 class StatefulNamingTest : public ::testing::TestWithParam<int> {
  protected:
   StatefulNamingTest() : sum_(0) {}
   int sum_;
 };
 
 TEST_P(StatefulNamingTest, TestsReportCorrectNames) {
   const ::testing::TestInfo* const test_info =
      ::testing::UnitTest::GetInstance()->current_test_info();
   sum_ += GetParam();
   Message test_name_stream;
   test_name_stream << "TestsReportCorrectNames/" << sum_;
   EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());
 }
 
 INSTANTIATE_TEST_CASE_P(StatefulNamingFunctor,
                         StatefulNamingTest,
                         Range(0, 5),
                         StatefulNamingFunctor());
 
 // Class that cannot be streamed into an ostream.  It needs to be copyable
 // (and, in case of MSVC, also assignable) in order to be a test parameter
 // type.  Its default copy constructor and assignment operator do exactly
 // what we need.
 class Unstreamable {
  public:
   explicit Unstreamable(int value) : value_(value) {}
 
  private:
   int value_;
 };
 
 class CommentTest : public TestWithParam<Unstreamable> {};
 
 TEST_P(CommentTest, TestsCorrectlyReportUnstreamableParams) {
   const ::testing::TestInfo* const test_info =
      ::testing::UnitTest::GetInstance()->current_test_info();
 
   EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param());
 }
 
 INSTANTIATE_TEST_CASE_P(InstantiationWithComments,
                         CommentTest,
                         Values(Unstreamable(1)));
 
 // Verify that we can create a hierarchy of test fixtures, where the base
 // class fixture is not parameterized and the derived class is. In this case
 // ParameterizedDerivedTest inherits from NonParameterizedBaseTest.  We
 // perform simple tests on both.
 class NonParameterizedBaseTest : public ::testing::Test {
  public:
   NonParameterizedBaseTest() : n_(17) { }
  protected:
   int n_;
 };
 
 class ParameterizedDerivedTest : public NonParameterizedBaseTest,
                                  public ::testing::WithParamInterface<int> {
  protected:
   ParameterizedDerivedTest() : count_(0) { }
   int count_;
   static int global_count_;
 };
 
 int ParameterizedDerivedTest::global_count_ = 0;
 
 TEST_F(NonParameterizedBaseTest, FixtureIsInitialized) {
   EXPECT_EQ(17, n_);
 }
 
 TEST_P(ParameterizedDerivedTest, SeesSequence) {
   EXPECT_EQ(17, n_);
   EXPECT_EQ(0, count_++);
   EXPECT_EQ(GetParam(), global_count_++);
 }
 
 class ParameterizedDeathTest : public ::testing::TestWithParam<int> { };
 
 TEST_F(ParameterizedDeathTest, GetParamDiesFromTestF) {
   EXPECT_DEATH_IF_SUPPORTED(GetParam(),
                             ".* value-parameterized test .*");
 }
 
 INSTANTIATE_TEST_CASE_P(RangeZeroToFive, ParameterizedDerivedTest, Range(0, 5));
 
 
 int main(int argc, char **argv) {
   // Used in TestGenerationTest test case.
   AddGlobalTestEnvironment(TestGenerationTest::Environment::Instance());
   // Used in GeneratorEvaluationTest test case. Tests that the updated value
   // will be picked up for instantiating tests in GeneratorEvaluationTest.
   GeneratorEvaluationTest::set_param_value(1);
 
   ::testing::InitGoogleTest(&argc, argv);
 
   // Used in GeneratorEvaluationTest test case. Tests that value updated
   // here will NOT be used for instantiating tests in
   // GeneratorEvaluationTest.
   GeneratorEvaluationTest::set_param_value(2);
 
   return RUN_ALL_TESTS();
 }
diff --git a/googletest/test/gtest-param-test_test.h b/googletest/test/googletest-param-test-test.h
similarity index 100%
rename from googletest/test/gtest-param-test_test.h
rename to googletest/test/googletest-param-test-test.h
diff --git a/googletest/test/gtest-param-test2_test.cc b/googletest/test/googletest-param-test2-test.cc
similarity index 91%
copy from googletest/test/gtest-param-test2_test.cc
copy to googletest/test/googletest-param-test2-test.cc
index c3b2d189..c0a908bb 100644
--- a/googletest/test/gtest-param-test2_test.cc
+++ b/googletest/test/googletest-param-test2-test.cc
@@ -1,61 +1,61 @@
 // Copyright 2008, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: vladl@google.com (Vlad Losev)
 //
 // Tests for Google Test itself.  This verifies that the basic constructs of
 // Google Test work.
 
 #include "gtest/gtest.h"
-#include "gtest-param-test_test.h"
+#include "googletest-param-test-test.h"
 
 using ::testing::Values;
 using ::testing::internal::ParamGenerator;
 
 // Tests that generators defined in a different translation unit
 // are functional. The test using extern_gen is defined
-// in gtest-param-test_test.cc.
+// in googletest-param-test-test.cc.
 ParamGenerator<int> extern_gen = Values(33);
 
 // Tests that a parameterized test case can be defined in one translation unit
-// and instantiated in another. The test is defined in gtest-param-test_test.cc
+// and instantiated in another. The test is defined in googletest-param-test-test.cc
 // and ExternalInstantiationTest fixture class is defined in
 // gtest-param-test_test.h.
 INSTANTIATE_TEST_CASE_P(MultiplesOf33,
                         ExternalInstantiationTest,
                         Values(33, 66));
 
 // Tests that a parameterized test case can be instantiated
 // in multiple translation units. Another instantiation is defined
-// in gtest-param-test_test.cc and InstantiationInMultipleTranslaionUnitsTest
+// in googletest-param-test-test.cc and InstantiationInMultipleTranslaionUnitsTest
 // fixture is defined in gtest-param-test_test.h
 INSTANTIATE_TEST_CASE_P(Sequence2,
                         InstantiationInMultipleTranslaionUnitsTest,
                         Values(42*3, 42*4, 42*5));
 
diff --git a/googletest/test/gtest-port_test.cc b/googletest/test/googletest-port-test.cc
similarity index 99%
rename from googletest/test/gtest-port_test.cc
rename to googletest/test/googletest-port-test.cc
index 3801e5ee..15a629fc 100644
--- a/googletest/test/gtest-port_test.cc
+++ b/googletest/test/googletest-port-test.cc
@@ -1,1303 +1,1303 @@
 // Copyright 2008, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Authors: vladl@google.com (Vlad Losev), wan@google.com (Zhanyong Wan)
 //
 // This file tests the internal cross-platform support utilities.
 
 #include "gtest/internal/gtest-port.h"
 
 #include <stdio.h>
 
 #if GTEST_OS_MAC
 # include <time.h>
 #endif  // GTEST_OS_MAC
 
 #include <list>
 #include <utility>  // For std::pair and std::make_pair.
 #include <vector>
 
 #include "gtest/gtest.h"
 #include "gtest/gtest-spi.h"
 #include "src/gtest-internal-inl.h"
 
 using std::make_pair;
 using std::pair;
 
 namespace testing {
 namespace internal {
 
 TEST(IsXDigitTest, WorksForNarrowAscii) {
   EXPECT_TRUE(IsXDigit('0'));
   EXPECT_TRUE(IsXDigit('9'));
   EXPECT_TRUE(IsXDigit('A'));
   EXPECT_TRUE(IsXDigit('F'));
   EXPECT_TRUE(IsXDigit('a'));
   EXPECT_TRUE(IsXDigit('f'));
 
   EXPECT_FALSE(IsXDigit('-'));
   EXPECT_FALSE(IsXDigit('g'));
   EXPECT_FALSE(IsXDigit('G'));
 }
 
 TEST(IsXDigitTest, ReturnsFalseForNarrowNonAscii) {
   EXPECT_FALSE(IsXDigit(static_cast<char>('\x80')));
   EXPECT_FALSE(IsXDigit(static_cast<char>('0' | '\x80')));
 }
 
 TEST(IsXDigitTest, WorksForWideAscii) {
   EXPECT_TRUE(IsXDigit(L'0'));
   EXPECT_TRUE(IsXDigit(L'9'));
   EXPECT_TRUE(IsXDigit(L'A'));
   EXPECT_TRUE(IsXDigit(L'F'));
   EXPECT_TRUE(IsXDigit(L'a'));
   EXPECT_TRUE(IsXDigit(L'f'));
 
   EXPECT_FALSE(IsXDigit(L'-'));
   EXPECT_FALSE(IsXDigit(L'g'));
   EXPECT_FALSE(IsXDigit(L'G'));
 }
 
 TEST(IsXDigitTest, ReturnsFalseForWideNonAscii) {
   EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(0x80)));
   EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(L'0' | 0x80)));
   EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(L'0' | 0x100)));
 }
 
 class Base {
  public:
   // Copy constructor and assignment operator do exactly what we need, so we
   // use them.
   Base() : member_(0) {}
   explicit Base(int n) : member_(n) {}
   virtual ~Base() {}
   int member() { return member_; }
 
  private:
   int member_;
 };
 
 class Derived : public Base {
  public:
   explicit Derived(int n) : Base(n) {}
 };
 
 TEST(ImplicitCastTest, ConvertsPointers) {
   Derived derived(0);
   EXPECT_TRUE(&derived == ::testing::internal::ImplicitCast_<Base*>(&derived));
 }
 
 TEST(ImplicitCastTest, CanUseInheritance) {
   Derived derived(1);
   Base base = ::testing::internal::ImplicitCast_<Base>(derived);
   EXPECT_EQ(derived.member(), base.member());
 }
 
 class Castable {
  public:
   explicit Castable(bool* converted) : converted_(converted) {}
   operator Base() {
     *converted_ = true;
     return Base();
   }
 
  private:
   bool* converted_;
 };
 
 TEST(ImplicitCastTest, CanUseNonConstCastOperator) {
   bool converted = false;
   Castable castable(&converted);
   Base base = ::testing::internal::ImplicitCast_<Base>(castable);
   EXPECT_TRUE(converted);
 }
 
 class ConstCastable {
  public:
   explicit ConstCastable(bool* converted) : converted_(converted) {}
   operator Base() const {
     *converted_ = true;
     return Base();
   }
 
  private:
   bool* converted_;
 };
 
 TEST(ImplicitCastTest, CanUseConstCastOperatorOnConstValues) {
   bool converted = false;
   const ConstCastable const_castable(&converted);
   Base base = ::testing::internal::ImplicitCast_<Base>(const_castable);
   EXPECT_TRUE(converted);
 }
 
 class ConstAndNonConstCastable {
  public:
   ConstAndNonConstCastable(bool* converted, bool* const_converted)
       : converted_(converted), const_converted_(const_converted) {}
   operator Base() {
     *converted_ = true;
     return Base();
   }
   operator Base() const {
     *const_converted_ = true;
     return Base();
   }
 
  private:
   bool* converted_;
   bool* const_converted_;
 };
 
 TEST(ImplicitCastTest, CanSelectBetweenConstAndNonConstCasrAppropriately) {
   bool converted = false;
   bool const_converted = false;
   ConstAndNonConstCastable castable(&converted, &const_converted);
   Base base = ::testing::internal::ImplicitCast_<Base>(castable);
   EXPECT_TRUE(converted);
   EXPECT_FALSE(const_converted);
 
   converted = false;
   const_converted = false;
   const ConstAndNonConstCastable const_castable(&converted, &const_converted);
   base = ::testing::internal::ImplicitCast_<Base>(const_castable);
   EXPECT_FALSE(converted);
   EXPECT_TRUE(const_converted);
 }
 
 class To {
  public:
   To(bool* converted) { *converted = true; }  // NOLINT
 };
 
 TEST(ImplicitCastTest, CanUseImplicitConstructor) {
   bool converted = false;
   To to = ::testing::internal::ImplicitCast_<To>(&converted);
   (void)to;
   EXPECT_TRUE(converted);
 }
 
 TEST(IteratorTraitsTest, WorksForSTLContainerIterators) {
   StaticAssertTypeEq<int,
       IteratorTraits< ::std::vector<int>::const_iterator>::value_type>();
   StaticAssertTypeEq<bool,
       IteratorTraits< ::std::list<bool>::iterator>::value_type>();
 }
 
 TEST(IteratorTraitsTest, WorksForPointerToNonConst) {
   StaticAssertTypeEq<char, IteratorTraits<char*>::value_type>();
   StaticAssertTypeEq<const void*, IteratorTraits<const void**>::value_type>();
 }
 
 TEST(IteratorTraitsTest, WorksForPointerToConst) {
   StaticAssertTypeEq<char, IteratorTraits<const char*>::value_type>();
   StaticAssertTypeEq<const void*,
       IteratorTraits<const void* const*>::value_type>();
 }
 
 // Tests that the element_type typedef is available in scoped_ptr and refers
 // to the parameter type.
 TEST(ScopedPtrTest, DefinesElementType) {
   StaticAssertTypeEq<int, ::testing::internal::scoped_ptr<int>::element_type>();
 }
 
 // TODO(vladl@google.com): Implement THE REST of scoped_ptr tests.
 
 TEST(GtestCheckSyntaxTest, BehavesLikeASingleStatement) {
   if (AlwaysFalse())
     GTEST_CHECK_(false) << "This should never be executed; "
                            "It's a compilation test only.";
 
   if (AlwaysTrue())
     GTEST_CHECK_(true);
   else
     ;  // NOLINT
 
   if (AlwaysFalse())
     ;  // NOLINT
   else
     GTEST_CHECK_(true) << "";
 }
 
 TEST(GtestCheckSyntaxTest, WorksWithSwitch) {
   switch (0) {
     case 1:
       break;
     default:
       GTEST_CHECK_(true);
   }
 
   switch (0)
     case 0:
       GTEST_CHECK_(true) << "Check failed in switch case";
 }
 
 // Verifies behavior of FormatFileLocation.
 TEST(FormatFileLocationTest, FormatsFileLocation) {
   EXPECT_PRED_FORMAT2(IsSubstring, "foo.cc", FormatFileLocation("foo.cc", 42));
   EXPECT_PRED_FORMAT2(IsSubstring, "42", FormatFileLocation("foo.cc", 42));
 }
 
 TEST(FormatFileLocationTest, FormatsUnknownFile) {
   EXPECT_PRED_FORMAT2(
       IsSubstring, "unknown file", FormatFileLocation(NULL, 42));
   EXPECT_PRED_FORMAT2(IsSubstring, "42", FormatFileLocation(NULL, 42));
 }
 
 TEST(FormatFileLocationTest, FormatsUknownLine) {
   EXPECT_EQ("foo.cc:", FormatFileLocation("foo.cc", -1));
 }
 
 TEST(FormatFileLocationTest, FormatsUknownFileAndLine) {
   EXPECT_EQ("unknown file:", FormatFileLocation(NULL, -1));
 }
 
 // Verifies behavior of FormatCompilerIndependentFileLocation.
 TEST(FormatCompilerIndependentFileLocationTest, FormatsFileLocation) {
   EXPECT_EQ("foo.cc:42", FormatCompilerIndependentFileLocation("foo.cc", 42));
 }
 
 TEST(FormatCompilerIndependentFileLocationTest, FormatsUknownFile) {
   EXPECT_EQ("unknown file:42",
             FormatCompilerIndependentFileLocation(NULL, 42));
 }
 
 TEST(FormatCompilerIndependentFileLocationTest, FormatsUknownLine) {
   EXPECT_EQ("foo.cc", FormatCompilerIndependentFileLocation("foo.cc", -1));
 }
 
 TEST(FormatCompilerIndependentFileLocationTest, FormatsUknownFileAndLine) {
   EXPECT_EQ("unknown file", FormatCompilerIndependentFileLocation(NULL, -1));
 }
 
 #if GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA
 void* ThreadFunc(void* data) {
   internal::Mutex* mutex = static_cast<internal::Mutex*>(data);
   mutex->Lock();
   mutex->Unlock();
   return NULL;
 }
 
 TEST(GetThreadCountTest, ReturnsCorrectValue) {
   const size_t starting_count = GetThreadCount();
   pthread_t       thread_id;
 
   internal::Mutex mutex;
   {
     internal::MutexLock lock(&mutex);
     pthread_attr_t  attr;
     ASSERT_EQ(0, pthread_attr_init(&attr));
     ASSERT_EQ(0, pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE));
 
     const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex);
     ASSERT_EQ(0, pthread_attr_destroy(&attr));
     ASSERT_EQ(0, status);
     EXPECT_EQ(starting_count + 1, GetThreadCount());
   }
 
   void* dummy;
   ASSERT_EQ(0, pthread_join(thread_id, &dummy));
 
   // The OS may not immediately report the updated thread count after
   // joining a thread, causing flakiness in this test. To counter that, we
   // wait for up to .5 seconds for the OS to report the correct value.
   for (int i = 0; i < 5; ++i) {
     if (GetThreadCount() == starting_count)
       break;
 
     SleepMilliseconds(100);
   }
 
   EXPECT_EQ(starting_count, GetThreadCount());
 }
 #else
 TEST(GetThreadCountTest, ReturnsZeroWhenUnableToCountThreads) {
   EXPECT_EQ(0U, GetThreadCount());
 }
 #endif  // GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA
 
 TEST(GtestCheckDeathTest, DiesWithCorrectOutputOnFailure) {
   const bool a_false_condition = false;
   const char regex[] =
 #ifdef _MSC_VER
-     "gtest-port_test\\.cc\\(\\d+\\):"
+     "googletest-port-test\\.cc\\(\\d+\\):"
 #elif GTEST_USES_POSIX_RE
-     "gtest-port_test\\.cc:[0-9]+"
+     "googletest-port-test\\.cc:[0-9]+"
 #else
-     "gtest-port_test\\.cc:\\d+"
+     "googletest-port-test\\.cc:\\d+"
 #endif  // _MSC_VER
      ".*a_false_condition.*Extra info.*";
 
   EXPECT_DEATH_IF_SUPPORTED(GTEST_CHECK_(a_false_condition) << "Extra info",
                             regex);
 }
 
 #if GTEST_HAS_DEATH_TEST
 
 TEST(GtestCheckDeathTest, LivesSilentlyOnSuccess) {
   EXPECT_EXIT({
       GTEST_CHECK_(true) << "Extra info";
       ::std::cerr << "Success\n";
       exit(0); },
       ::testing::ExitedWithCode(0), "Success");
 }
 
 #endif  // GTEST_HAS_DEATH_TEST
 
 // Verifies that Google Test choose regular expression engine appropriate to
 // the platform. The test will produce compiler errors in case of failure.
 // For simplicity, we only cover the most important platforms here.
 TEST(RegexEngineSelectionTest, SelectsCorrectRegexEngine) {
 #if !GTEST_USES_PCRE
 # if GTEST_HAS_POSIX_RE
 
   EXPECT_TRUE(GTEST_USES_POSIX_RE);
 
 # else
 
   EXPECT_TRUE(GTEST_USES_SIMPLE_RE);
 
 # endif
 #endif  // !GTEST_USES_PCRE
 }
 
 #if GTEST_USES_POSIX_RE
 
 # if GTEST_HAS_TYPED_TEST
 
 template <typename Str>
 class RETest : public ::testing::Test {};
 
 // Defines StringTypes as the list of all string types that class RE
 // supports.
 typedef testing::Types<
     ::std::string,
 #  if GTEST_HAS_GLOBAL_STRING
     ::string,
 #  endif  // GTEST_HAS_GLOBAL_STRING
     const char*> StringTypes;
 
 TYPED_TEST_CASE(RETest, StringTypes);
 
 // Tests RE's implicit constructors.
 TYPED_TEST(RETest, ImplicitConstructorWorks) {
   const RE empty(TypeParam(""));
   EXPECT_STREQ("", empty.pattern());
 
   const RE simple(TypeParam("hello"));
   EXPECT_STREQ("hello", simple.pattern());
 
   const RE normal(TypeParam(".*(\\w+)"));
   EXPECT_STREQ(".*(\\w+)", normal.pattern());
 }
 
 // Tests that RE's constructors reject invalid regular expressions.
 TYPED_TEST(RETest, RejectsInvalidRegex) {
   EXPECT_NONFATAL_FAILURE({
     const RE invalid(TypeParam("?"));
   }, "\"?\" is not a valid POSIX Extended regular expression.");
 }
 
 // Tests RE::FullMatch().
 TYPED_TEST(RETest, FullMatchWorks) {
   const RE empty(TypeParam(""));
   EXPECT_TRUE(RE::FullMatch(TypeParam(""), empty));
   EXPECT_FALSE(RE::FullMatch(TypeParam("a"), empty));
 
   const RE re(TypeParam("a.*z"));
   EXPECT_TRUE(RE::FullMatch(TypeParam("az"), re));
   EXPECT_TRUE(RE::FullMatch(TypeParam("axyz"), re));
   EXPECT_FALSE(RE::FullMatch(TypeParam("baz"), re));
   EXPECT_FALSE(RE::FullMatch(TypeParam("azy"), re));
 }
 
 // Tests RE::PartialMatch().
 TYPED_TEST(RETest, PartialMatchWorks) {
   const RE empty(TypeParam(""));
   EXPECT_TRUE(RE::PartialMatch(TypeParam(""), empty));
   EXPECT_TRUE(RE::PartialMatch(TypeParam("a"), empty));
 
   const RE re(TypeParam("a.*z"));
   EXPECT_TRUE(RE::PartialMatch(TypeParam("az"), re));
   EXPECT_TRUE(RE::PartialMatch(TypeParam("axyz"), re));
   EXPECT_TRUE(RE::PartialMatch(TypeParam("baz"), re));
   EXPECT_TRUE(RE::PartialMatch(TypeParam("azy"), re));
   EXPECT_FALSE(RE::PartialMatch(TypeParam("zza"), re));
 }
 
 # endif  // GTEST_HAS_TYPED_TEST
 
 #elif GTEST_USES_SIMPLE_RE
 
 TEST(IsInSetTest, NulCharIsNotInAnySet) {
   EXPECT_FALSE(IsInSet('\0', ""));
   EXPECT_FALSE(IsInSet('\0', "\0"));
   EXPECT_FALSE(IsInSet('\0', "a"));
 }
 
 TEST(IsInSetTest, WorksForNonNulChars) {
   EXPECT_FALSE(IsInSet('a', "Ab"));
   EXPECT_FALSE(IsInSet('c', ""));
 
   EXPECT_TRUE(IsInSet('b', "bcd"));
   EXPECT_TRUE(IsInSet('b', "ab"));
 }
 
 TEST(IsAsciiDigitTest, IsFalseForNonDigit) {
   EXPECT_FALSE(IsAsciiDigit('\0'));
   EXPECT_FALSE(IsAsciiDigit(' '));
   EXPECT_FALSE(IsAsciiDigit('+'));
   EXPECT_FALSE(IsAsciiDigit('-'));
   EXPECT_FALSE(IsAsciiDigit('.'));
   EXPECT_FALSE(IsAsciiDigit('a'));
 }
 
 TEST(IsAsciiDigitTest, IsTrueForDigit) {
   EXPECT_TRUE(IsAsciiDigit('0'));
   EXPECT_TRUE(IsAsciiDigit('1'));
   EXPECT_TRUE(IsAsciiDigit('5'));
   EXPECT_TRUE(IsAsciiDigit('9'));
 }
 
 TEST(IsAsciiPunctTest, IsFalseForNonPunct) {
   EXPECT_FALSE(IsAsciiPunct('\0'));
   EXPECT_FALSE(IsAsciiPunct(' '));
   EXPECT_FALSE(IsAsciiPunct('\n'));
   EXPECT_FALSE(IsAsciiPunct('a'));
   EXPECT_FALSE(IsAsciiPunct('0'));
 }
 
 TEST(IsAsciiPunctTest, IsTrueForPunct) {
   for (const char* p = "^-!\"#$%&'()*+,./:;<=>?@[\\]_`{|}~"; *p; p++) {
     EXPECT_PRED1(IsAsciiPunct, *p);
   }
 }
 
 TEST(IsRepeatTest, IsFalseForNonRepeatChar) {
   EXPECT_FALSE(IsRepeat('\0'));
   EXPECT_FALSE(IsRepeat(' '));
   EXPECT_FALSE(IsRepeat('a'));
   EXPECT_FALSE(IsRepeat('1'));
   EXPECT_FALSE(IsRepeat('-'));
 }
 
 TEST(IsRepeatTest, IsTrueForRepeatChar) {
   EXPECT_TRUE(IsRepeat('?'));
   EXPECT_TRUE(IsRepeat('*'));
   EXPECT_TRUE(IsRepeat('+'));
 }
 
 TEST(IsAsciiWhiteSpaceTest, IsFalseForNonWhiteSpace) {
   EXPECT_FALSE(IsAsciiWhiteSpace('\0'));
   EXPECT_FALSE(IsAsciiWhiteSpace('a'));
   EXPECT_FALSE(IsAsciiWhiteSpace('1'));
   EXPECT_FALSE(IsAsciiWhiteSpace('+'));
   EXPECT_FALSE(IsAsciiWhiteSpace('_'));
 }
 
 TEST(IsAsciiWhiteSpaceTest, IsTrueForWhiteSpace) {
   EXPECT_TRUE(IsAsciiWhiteSpace(' '));
   EXPECT_TRUE(IsAsciiWhiteSpace('\n'));
   EXPECT_TRUE(IsAsciiWhiteSpace('\r'));
   EXPECT_TRUE(IsAsciiWhiteSpace('\t'));
   EXPECT_TRUE(IsAsciiWhiteSpace('\v'));
   EXPECT_TRUE(IsAsciiWhiteSpace('\f'));
 }
 
 TEST(IsAsciiWordCharTest, IsFalseForNonWordChar) {
   EXPECT_FALSE(IsAsciiWordChar('\0'));
   EXPECT_FALSE(IsAsciiWordChar('+'));
   EXPECT_FALSE(IsAsciiWordChar('.'));
   EXPECT_FALSE(IsAsciiWordChar(' '));
   EXPECT_FALSE(IsAsciiWordChar('\n'));
 }
 
 TEST(IsAsciiWordCharTest, IsTrueForLetter) {
   EXPECT_TRUE(IsAsciiWordChar('a'));
   EXPECT_TRUE(IsAsciiWordChar('b'));
   EXPECT_TRUE(IsAsciiWordChar('A'));
   EXPECT_TRUE(IsAsciiWordChar('Z'));
 }
 
 TEST(IsAsciiWordCharTest, IsTrueForDigit) {
   EXPECT_TRUE(IsAsciiWordChar('0'));
   EXPECT_TRUE(IsAsciiWordChar('1'));
   EXPECT_TRUE(IsAsciiWordChar('7'));
   EXPECT_TRUE(IsAsciiWordChar('9'));
 }
 
 TEST(IsAsciiWordCharTest, IsTrueForUnderscore) {
   EXPECT_TRUE(IsAsciiWordChar('_'));
 }
 
 TEST(IsValidEscapeTest, IsFalseForNonPrintable) {
   EXPECT_FALSE(IsValidEscape('\0'));
   EXPECT_FALSE(IsValidEscape('\007'));
 }
 
 TEST(IsValidEscapeTest, IsFalseForDigit) {
   EXPECT_FALSE(IsValidEscape('0'));
   EXPECT_FALSE(IsValidEscape('9'));
 }
 
 TEST(IsValidEscapeTest, IsFalseForWhiteSpace) {
   EXPECT_FALSE(IsValidEscape(' '));
   EXPECT_FALSE(IsValidEscape('\n'));
 }
 
 TEST(IsValidEscapeTest, IsFalseForSomeLetter) {
   EXPECT_FALSE(IsValidEscape('a'));
   EXPECT_FALSE(IsValidEscape('Z'));
 }
 
 TEST(IsValidEscapeTest, IsTrueForPunct) {
   EXPECT_TRUE(IsValidEscape('.'));
   EXPECT_TRUE(IsValidEscape('-'));
   EXPECT_TRUE(IsValidEscape('^'));
   EXPECT_TRUE(IsValidEscape('$'));
   EXPECT_TRUE(IsValidEscape('('));
   EXPECT_TRUE(IsValidEscape(']'));
   EXPECT_TRUE(IsValidEscape('{'));
   EXPECT_TRUE(IsValidEscape('|'));
 }
 
 TEST(IsValidEscapeTest, IsTrueForSomeLetter) {
   EXPECT_TRUE(IsValidEscape('d'));
   EXPECT_TRUE(IsValidEscape('D'));
   EXPECT_TRUE(IsValidEscape('s'));
   EXPECT_TRUE(IsValidEscape('S'));
   EXPECT_TRUE(IsValidEscape('w'));
   EXPECT_TRUE(IsValidEscape('W'));
 }
 
 TEST(AtomMatchesCharTest, EscapedPunct) {
   EXPECT_FALSE(AtomMatchesChar(true, '\\', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, '\\', ' '));
   EXPECT_FALSE(AtomMatchesChar(true, '_', '.'));
   EXPECT_FALSE(AtomMatchesChar(true, '.', 'a'));
 
   EXPECT_TRUE(AtomMatchesChar(true, '\\', '\\'));
   EXPECT_TRUE(AtomMatchesChar(true, '_', '_'));
   EXPECT_TRUE(AtomMatchesChar(true, '+', '+'));
   EXPECT_TRUE(AtomMatchesChar(true, '.', '.'));
 }
 
 TEST(AtomMatchesCharTest, Escaped_d) {
   EXPECT_FALSE(AtomMatchesChar(true, 'd', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, 'd', 'a'));
   EXPECT_FALSE(AtomMatchesChar(true, 'd', '.'));
 
   EXPECT_TRUE(AtomMatchesChar(true, 'd', '0'));
   EXPECT_TRUE(AtomMatchesChar(true, 'd', '9'));
 }
 
 TEST(AtomMatchesCharTest, Escaped_D) {
   EXPECT_FALSE(AtomMatchesChar(true, 'D', '0'));
   EXPECT_FALSE(AtomMatchesChar(true, 'D', '9'));
 
   EXPECT_TRUE(AtomMatchesChar(true, 'D', '\0'));
   EXPECT_TRUE(AtomMatchesChar(true, 'D', 'a'));
   EXPECT_TRUE(AtomMatchesChar(true, 'D', '-'));
 }
 
 TEST(AtomMatchesCharTest, Escaped_s) {
   EXPECT_FALSE(AtomMatchesChar(true, 's', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, 's', 'a'));
   EXPECT_FALSE(AtomMatchesChar(true, 's', '.'));
   EXPECT_FALSE(AtomMatchesChar(true, 's', '9'));
 
   EXPECT_TRUE(AtomMatchesChar(true, 's', ' '));
   EXPECT_TRUE(AtomMatchesChar(true, 's', '\n'));
   EXPECT_TRUE(AtomMatchesChar(true, 's', '\t'));
 }
 
 TEST(AtomMatchesCharTest, Escaped_S) {
   EXPECT_FALSE(AtomMatchesChar(true, 'S', ' '));
   EXPECT_FALSE(AtomMatchesChar(true, 'S', '\r'));
 
   EXPECT_TRUE(AtomMatchesChar(true, 'S', '\0'));
   EXPECT_TRUE(AtomMatchesChar(true, 'S', 'a'));
   EXPECT_TRUE(AtomMatchesChar(true, 'S', '9'));
 }
 
 TEST(AtomMatchesCharTest, Escaped_w) {
   EXPECT_FALSE(AtomMatchesChar(true, 'w', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, 'w', '+'));
   EXPECT_FALSE(AtomMatchesChar(true, 'w', ' '));
   EXPECT_FALSE(AtomMatchesChar(true, 'w', '\n'));
 
   EXPECT_TRUE(AtomMatchesChar(true, 'w', '0'));
   EXPECT_TRUE(AtomMatchesChar(true, 'w', 'b'));
   EXPECT_TRUE(AtomMatchesChar(true, 'w', 'C'));
   EXPECT_TRUE(AtomMatchesChar(true, 'w', '_'));
 }
 
 TEST(AtomMatchesCharTest, Escaped_W) {
   EXPECT_FALSE(AtomMatchesChar(true, 'W', 'A'));
   EXPECT_FALSE(AtomMatchesChar(true, 'W', 'b'));
   EXPECT_FALSE(AtomMatchesChar(true, 'W', '9'));
   EXPECT_FALSE(AtomMatchesChar(true, 'W', '_'));
 
   EXPECT_TRUE(AtomMatchesChar(true, 'W', '\0'));
   EXPECT_TRUE(AtomMatchesChar(true, 'W', '*'));
   EXPECT_TRUE(AtomMatchesChar(true, 'W', '\n'));
 }
 
 TEST(AtomMatchesCharTest, EscapedWhiteSpace) {
   EXPECT_FALSE(AtomMatchesChar(true, 'f', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, 'f', '\n'));
   EXPECT_FALSE(AtomMatchesChar(true, 'n', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, 'n', '\r'));
   EXPECT_FALSE(AtomMatchesChar(true, 'r', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, 'r', 'a'));
   EXPECT_FALSE(AtomMatchesChar(true, 't', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, 't', 't'));
   EXPECT_FALSE(AtomMatchesChar(true, 'v', '\0'));
   EXPECT_FALSE(AtomMatchesChar(true, 'v', '\f'));
 
   EXPECT_TRUE(AtomMatchesChar(true, 'f', '\f'));
   EXPECT_TRUE(AtomMatchesChar(true, 'n', '\n'));
   EXPECT_TRUE(AtomMatchesChar(true, 'r', '\r'));
   EXPECT_TRUE(AtomMatchesChar(true, 't', '\t'));
   EXPECT_TRUE(AtomMatchesChar(true, 'v', '\v'));
 }
 
 TEST(AtomMatchesCharTest, UnescapedDot) {
   EXPECT_FALSE(AtomMatchesChar(false, '.', '\n'));
 
   EXPECT_TRUE(AtomMatchesChar(false, '.', '\0'));
   EXPECT_TRUE(AtomMatchesChar(false, '.', '.'));
   EXPECT_TRUE(AtomMatchesChar(false, '.', 'a'));
   EXPECT_TRUE(AtomMatchesChar(false, '.', ' '));
 }
 
 TEST(AtomMatchesCharTest, UnescapedChar) {
   EXPECT_FALSE(AtomMatchesChar(false, 'a', '\0'));
   EXPECT_FALSE(AtomMatchesChar(false, 'a', 'b'));
   EXPECT_FALSE(AtomMatchesChar(false, '$', 'a'));
 
   EXPECT_TRUE(AtomMatchesChar(false, '$', '$'));
   EXPECT_TRUE(AtomMatchesChar(false, '5', '5'));
   EXPECT_TRUE(AtomMatchesChar(false, 'Z', 'Z'));
 }
 
 TEST(ValidateRegexTest, GeneratesFailureAndReturnsFalseForInvalid) {
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(NULL)),
                           "NULL is not a valid simple regular expression");
   EXPECT_NONFATAL_FAILURE(
       ASSERT_FALSE(ValidateRegex("a\\")),
       "Syntax error at index 1 in simple regular expression \"a\\\": ");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a\\")),
                           "'\\' cannot appear at the end");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("\\n\\")),
                           "'\\' cannot appear at the end");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("\\s\\hb")),
                           "invalid escape sequence \"\\h\"");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^^")),
                           "'^' can only appear at the beginning");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(".*^b")),
                           "'^' can only appear at the beginning");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("$$")),
                           "'$' can only appear at the end");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^$a")),
                           "'$' can only appear at the end");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a(b")),
                           "'(' is unsupported");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("ab)")),
                           "')' is unsupported");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("[ab")),
                           "'[' is unsupported");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a{2")),
                           "'{' is unsupported");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("?")),
                           "'?' can only follow a repeatable token");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^*")),
                           "'*' can only follow a repeatable token");
   EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("5*+")),
                           "'+' can only follow a repeatable token");
 }
 
 TEST(ValidateRegexTest, ReturnsTrueForValid) {
   EXPECT_TRUE(ValidateRegex(""));
   EXPECT_TRUE(ValidateRegex("a"));
   EXPECT_TRUE(ValidateRegex(".*"));
   EXPECT_TRUE(ValidateRegex("^a_+"));
   EXPECT_TRUE(ValidateRegex("^a\\t\\&?"));
   EXPECT_TRUE(ValidateRegex("09*$"));
   EXPECT_TRUE(ValidateRegex("^Z$"));
   EXPECT_TRUE(ValidateRegex("a\\^Z\\$\\(\\)\\|\\[\\]\\{\\}"));
 }
 
 TEST(MatchRepetitionAndRegexAtHeadTest, WorksForZeroOrOne) {
   EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "a", "ba"));
   // Repeating more than once.
   EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "aab"));
 
   // Repeating zero times.
   EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "ba"));
   // Repeating once.
   EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "ab"));
   EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '#', '?', ".", "##"));
 }
 
 TEST(MatchRepetitionAndRegexAtHeadTest, WorksForZeroOrMany) {
   EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '*', "a$", "baab"));
 
   // Repeating zero times.
   EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '*', "b", "bc"));
   // Repeating once.
   EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '*', "b", "abc"));
   // Repeating more than once.
   EXPECT_TRUE(MatchRepetitionAndRegexAtHead(true, 'w', '*', "-", "ab_1-g"));
 }
 
 TEST(MatchRepetitionAndRegexAtHeadTest, WorksForOneOrMany) {
   EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '+', "a$", "baab"));
   // Repeating zero times.
   EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '+', "b", "bc"));
 
   // Repeating once.
   EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '+', "b", "abc"));
   // Repeating more than once.
   EXPECT_TRUE(MatchRepetitionAndRegexAtHead(true, 'w', '+', "-", "ab_1-g"));
 }
 
 TEST(MatchRegexAtHeadTest, ReturnsTrueForEmptyRegex) {
   EXPECT_TRUE(MatchRegexAtHead("", ""));
   EXPECT_TRUE(MatchRegexAtHead("", "ab"));
 }
 
 TEST(MatchRegexAtHeadTest, WorksWhenDollarIsInRegex) {
   EXPECT_FALSE(MatchRegexAtHead("$", "a"));
 
   EXPECT_TRUE(MatchRegexAtHead("$", ""));
   EXPECT_TRUE(MatchRegexAtHead("a$", "a"));
 }
 
 TEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithEscapeSequence) {
   EXPECT_FALSE(MatchRegexAtHead("\\w", "+"));
   EXPECT_FALSE(MatchRegexAtHead("\\W", "ab"));
 
   EXPECT_TRUE(MatchRegexAtHead("\\sa", "\nab"));
   EXPECT_TRUE(MatchRegexAtHead("\\d", "1a"));
 }
 
 TEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithRepetition) {
   EXPECT_FALSE(MatchRegexAtHead(".+a", "abc"));
   EXPECT_FALSE(MatchRegexAtHead("a?b", "aab"));
 
   EXPECT_TRUE(MatchRegexAtHead(".*a", "bc12-ab"));
   EXPECT_TRUE(MatchRegexAtHead("a?b", "b"));
   EXPECT_TRUE(MatchRegexAtHead("a?b", "ab"));
 }
 
 TEST(MatchRegexAtHeadTest,
      WorksWhenRegexStartsWithRepetionOfEscapeSequence) {
   EXPECT_FALSE(MatchRegexAtHead("\\.+a", "abc"));
   EXPECT_FALSE(MatchRegexAtHead("\\s?b", "  b"));
 
   EXPECT_TRUE(MatchRegexAtHead("\\(*a", "((((ab"));
   EXPECT_TRUE(MatchRegexAtHead("\\^?b", "^b"));
   EXPECT_TRUE(MatchRegexAtHead("\\\\?b", "b"));
   EXPECT_TRUE(MatchRegexAtHead("\\\\?b", "\\b"));
 }
 
 TEST(MatchRegexAtHeadTest, MatchesSequentially) {
   EXPECT_FALSE(MatchRegexAtHead("ab.*c", "acabc"));
 
   EXPECT_TRUE(MatchRegexAtHead("ab.*c", "ab-fsc"));
 }
 
 TEST(MatchRegexAnywhereTest, ReturnsFalseWhenStringIsNull) {
   EXPECT_FALSE(MatchRegexAnywhere("", NULL));
 }
 
 TEST(MatchRegexAnywhereTest, WorksWhenRegexStartsWithCaret) {
   EXPECT_FALSE(MatchRegexAnywhere("^a", "ba"));
   EXPECT_FALSE(MatchRegexAnywhere("^$", "a"));
 
   EXPECT_TRUE(MatchRegexAnywhere("^a", "ab"));
   EXPECT_TRUE(MatchRegexAnywhere("^", "ab"));
   EXPECT_TRUE(MatchRegexAnywhere("^$", ""));
 }
 
 TEST(MatchRegexAnywhereTest, ReturnsFalseWhenNoMatch) {
   EXPECT_FALSE(MatchRegexAnywhere("a", "bcde123"));
   EXPECT_FALSE(MatchRegexAnywhere("a.+a", "--aa88888888"));
 }
 
 TEST(MatchRegexAnywhereTest, ReturnsTrueWhenMatchingPrefix) {
   EXPECT_TRUE(MatchRegexAnywhere("\\w+", "ab1_ - 5"));
   EXPECT_TRUE(MatchRegexAnywhere(".*=", "="));
   EXPECT_TRUE(MatchRegexAnywhere("x.*ab?.*bc", "xaaabc"));
 }
 
 TEST(MatchRegexAnywhereTest, ReturnsTrueWhenMatchingNonPrefix) {
   EXPECT_TRUE(MatchRegexAnywhere("\\w+", "$$$ ab1_ - 5"));
   EXPECT_TRUE(MatchRegexAnywhere("\\.+=", "=  ...="));
 }
 
 // Tests RE's implicit constructors.
 TEST(RETest, ImplicitConstructorWorks) {
   const RE empty("");
   EXPECT_STREQ("", empty.pattern());
 
   const RE simple("hello");
   EXPECT_STREQ("hello", simple.pattern());
 }
 
 // Tests that RE's constructors reject invalid regular expressions.
 TEST(RETest, RejectsInvalidRegex) {
   EXPECT_NONFATAL_FAILURE({
     const RE normal(NULL);
   }, "NULL is not a valid simple regular expression");
 
   EXPECT_NONFATAL_FAILURE({
     const RE normal(".*(\\w+");
   }, "'(' is unsupported");
 
   EXPECT_NONFATAL_FAILURE({
     const RE invalid("^?");
   }, "'?' can only follow a repeatable token");
 }
 
 // Tests RE::FullMatch().
 TEST(RETest, FullMatchWorks) {
   const RE empty("");
   EXPECT_TRUE(RE::FullMatch("", empty));
   EXPECT_FALSE(RE::FullMatch("a", empty));
 
   const RE re1("a");
   EXPECT_TRUE(RE::FullMatch("a", re1));
 
   const RE re("a.*z");
   EXPECT_TRUE(RE::FullMatch("az", re));
   EXPECT_TRUE(RE::FullMatch("axyz", re));
   EXPECT_FALSE(RE::FullMatch("baz", re));
   EXPECT_FALSE(RE::FullMatch("azy", re));
 }
 
 // Tests RE::PartialMatch().
 TEST(RETest, PartialMatchWorks) {
   const RE empty("");
   EXPECT_TRUE(RE::PartialMatch("", empty));
   EXPECT_TRUE(RE::PartialMatch("a", empty));
 
   const RE re("a.*z");
   EXPECT_TRUE(RE::PartialMatch("az", re));
   EXPECT_TRUE(RE::PartialMatch("axyz", re));
   EXPECT_TRUE(RE::PartialMatch("baz", re));
   EXPECT_TRUE(RE::PartialMatch("azy", re));
   EXPECT_FALSE(RE::PartialMatch("zza", re));
 }
 
 #endif  // GTEST_USES_POSIX_RE
 
 #if !GTEST_OS_WINDOWS_MOBILE
 
 TEST(CaptureTest, CapturesStdout) {
   CaptureStdout();
   fprintf(stdout, "abc");
   EXPECT_STREQ("abc", GetCapturedStdout().c_str());
 
   CaptureStdout();
   fprintf(stdout, "def%cghi", '\0');
   EXPECT_EQ(::std::string("def\0ghi", 7), ::std::string(GetCapturedStdout()));
 }
 
 TEST(CaptureTest, CapturesStderr) {
   CaptureStderr();
   fprintf(stderr, "jkl");
   EXPECT_STREQ("jkl", GetCapturedStderr().c_str());
 
   CaptureStderr();
   fprintf(stderr, "jkl%cmno", '\0');
   EXPECT_EQ(::std::string("jkl\0mno", 7), ::std::string(GetCapturedStderr()));
 }
 
 // Tests that stdout and stderr capture don't interfere with each other.
 TEST(CaptureTest, CapturesStdoutAndStderr) {
   CaptureStdout();
   CaptureStderr();
   fprintf(stdout, "pqr");
   fprintf(stderr, "stu");
   EXPECT_STREQ("pqr", GetCapturedStdout().c_str());
   EXPECT_STREQ("stu", GetCapturedStderr().c_str());
 }
 
 TEST(CaptureDeathTest, CannotReenterStdoutCapture) {
   CaptureStdout();
   EXPECT_DEATH_IF_SUPPORTED(CaptureStdout(),
                             "Only one stdout capturer can exist at a time");
   GetCapturedStdout();
 
   // We cannot test stderr capturing using death tests as they use it
   // themselves.
 }
 
 #endif  // !GTEST_OS_WINDOWS_MOBILE
 
 TEST(ThreadLocalTest, DefaultConstructorInitializesToDefaultValues) {
   ThreadLocal<int> t1;
   EXPECT_EQ(0, t1.get());
 
   ThreadLocal<void*> t2;
   EXPECT_TRUE(t2.get() == NULL);
 }
 
 TEST(ThreadLocalTest, SingleParamConstructorInitializesToParam) {
   ThreadLocal<int> t1(123);
   EXPECT_EQ(123, t1.get());
 
   int i = 0;
   ThreadLocal<int*> t2(&i);
   EXPECT_EQ(&i, t2.get());
 }
 
 class NoDefaultContructor {
  public:
   explicit NoDefaultContructor(const char*) {}
   NoDefaultContructor(const NoDefaultContructor&) {}
 };
 
 TEST(ThreadLocalTest, ValueDefaultContructorIsNotRequiredForParamVersion) {
   ThreadLocal<NoDefaultContructor> bar(NoDefaultContructor("foo"));
   bar.pointer();
 }
 
 TEST(ThreadLocalTest, GetAndPointerReturnSameValue) {
   ThreadLocal<std::string> thread_local_string;
 
   EXPECT_EQ(thread_local_string.pointer(), &(thread_local_string.get()));
 
   // Verifies the condition still holds after calling set.
   thread_local_string.set("foo");
   EXPECT_EQ(thread_local_string.pointer(), &(thread_local_string.get()));
 }
 
 TEST(ThreadLocalTest, PointerAndConstPointerReturnSameValue) {
   ThreadLocal<std::string> thread_local_string;
   const ThreadLocal<std::string>& const_thread_local_string =
       thread_local_string;
 
   EXPECT_EQ(thread_local_string.pointer(), const_thread_local_string.pointer());
 
   thread_local_string.set("foo");
   EXPECT_EQ(thread_local_string.pointer(), const_thread_local_string.pointer());
 }
 
 #if GTEST_IS_THREADSAFE
 
 void AddTwo(int* param) { *param += 2; }
 
 TEST(ThreadWithParamTest, ConstructorExecutesThreadFunc) {
   int i = 40;
   ThreadWithParam<int*> thread(&AddTwo, &i, NULL);
   thread.Join();
   EXPECT_EQ(42, i);
 }
 
 TEST(MutexDeathTest, AssertHeldShouldAssertWhenNotLocked) {
   // AssertHeld() is flaky only in the presence of multiple threads accessing
   // the lock. In this case, the test is robust.
   EXPECT_DEATH_IF_SUPPORTED({
     Mutex m;
     { MutexLock lock(&m); }
     m.AssertHeld();
   },
   "thread .*hold");
 }
 
 TEST(MutexTest, AssertHeldShouldNotAssertWhenLocked) {
   Mutex m;
   MutexLock lock(&m);
   m.AssertHeld();
 }
 
 class AtomicCounterWithMutex {
  public:
   explicit AtomicCounterWithMutex(Mutex* mutex) :
     value_(0), mutex_(mutex), random_(42) {}
 
   void Increment() {
     MutexLock lock(mutex_);
     int temp = value_;
     {
       // We need to put up a memory barrier to prevent reads and writes to
       // value_ rearranged with the call to SleepMilliseconds when observed
       // from other threads.
 #if GTEST_HAS_PTHREAD
       // On POSIX, locking a mutex puts up a memory barrier.  We cannot use
       // Mutex and MutexLock here or rely on their memory barrier
       // functionality as we are testing them here.
       pthread_mutex_t memory_barrier_mutex;
       GTEST_CHECK_POSIX_SUCCESS_(
           pthread_mutex_init(&memory_barrier_mutex, NULL));
       GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&memory_barrier_mutex));
 
       SleepMilliseconds(random_.Generate(30));
 
       GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&memory_barrier_mutex));
       GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&memory_barrier_mutex));
 #elif GTEST_OS_WINDOWS
       // On Windows, performing an interlocked access puts up a memory barrier.
       volatile LONG dummy = 0;
       ::InterlockedIncrement(&dummy);
       SleepMilliseconds(random_.Generate(30));
       ::InterlockedIncrement(&dummy);
 #else
 # error "Memory barrier not implemented on this platform."
 #endif  // GTEST_HAS_PTHREAD
     }
     value_ = temp + 1;
   }
   int value() const { return value_; }
 
  private:
   volatile int value_;
   Mutex* const mutex_;  // Protects value_.
   Random       random_;
 };
 
 void CountingThreadFunc(pair<AtomicCounterWithMutex*, int> param) {
   for (int i = 0; i < param.second; ++i)
       param.first->Increment();
 }
 
 // Tests that the mutex only lets one thread at a time to lock it.
 TEST(MutexTest, OnlyOneThreadCanLockAtATime) {
   Mutex mutex;
   AtomicCounterWithMutex locked_counter(&mutex);
 
   typedef ThreadWithParam<pair<AtomicCounterWithMutex*, int> > ThreadType;
   const int kCycleCount = 20;
   const int kThreadCount = 7;
   scoped_ptr<ThreadType> counting_threads[kThreadCount];
   Notification threads_can_start;
   // Creates and runs kThreadCount threads that increment locked_counter
   // kCycleCount times each.
   for (int i = 0; i < kThreadCount; ++i) {
     counting_threads[i].reset(new ThreadType(&CountingThreadFunc,
                                              make_pair(&locked_counter,
                                                        kCycleCount),
                                              &threads_can_start));
   }
   threads_can_start.Notify();
   for (int i = 0; i < kThreadCount; ++i)
     counting_threads[i]->Join();
 
   // If the mutex lets more than one thread to increment the counter at a
   // time, they are likely to encounter a race condition and have some
   // increments overwritten, resulting in the lower then expected counter
   // value.
   EXPECT_EQ(kCycleCount * kThreadCount, locked_counter.value());
 }
 
 template <typename T>
 void RunFromThread(void (func)(T), T param) {
   ThreadWithParam<T> thread(func, param, NULL);
   thread.Join();
 }
 
 void RetrieveThreadLocalValue(
     pair<ThreadLocal<std::string>*, std::string*> param) {
   *param.second = param.first->get();
 }
 
 TEST(ThreadLocalTest, ParameterizedConstructorSetsDefault) {
   ThreadLocal<std::string> thread_local_string("foo");
   EXPECT_STREQ("foo", thread_local_string.get().c_str());
 
   thread_local_string.set("bar");
   EXPECT_STREQ("bar", thread_local_string.get().c_str());
 
   std::string result;
   RunFromThread(&RetrieveThreadLocalValue,
                 make_pair(&thread_local_string, &result));
   EXPECT_STREQ("foo", result.c_str());
 }
 
 // Keeps track of whether of destructors being called on instances of
 // DestructorTracker.  On Windows, waits for the destructor call reports.
 class DestructorCall {
  public:
   DestructorCall() {
     invoked_ = false;
 #if GTEST_OS_WINDOWS
     wait_event_.Reset(::CreateEvent(NULL, TRUE, FALSE, NULL));
     GTEST_CHECK_(wait_event_.Get() != NULL);
 #endif
   }
 
   bool CheckDestroyed() const {
 #if GTEST_OS_WINDOWS
     if (::WaitForSingleObject(wait_event_.Get(), 1000) != WAIT_OBJECT_0)
       return false;
 #endif
     return invoked_;
   }
 
   void ReportDestroyed() {
     invoked_ = true;
 #if GTEST_OS_WINDOWS
     ::SetEvent(wait_event_.Get());
 #endif
   }
 
   static std::vector<DestructorCall*>& List() { return *list_; }
 
   static void ResetList() {
     for (size_t i = 0; i < list_->size(); ++i) {
       delete list_->at(i);
     }
     list_->clear();
   }
 
  private:
   bool invoked_;
 #if GTEST_OS_WINDOWS
   AutoHandle wait_event_;
 #endif
   static std::vector<DestructorCall*>* const list_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(DestructorCall);
 };
 
 std::vector<DestructorCall*>* const DestructorCall::list_ =
     new std::vector<DestructorCall*>;
 
 // DestructorTracker keeps track of whether its instances have been
 // destroyed.
 class DestructorTracker {
  public:
   DestructorTracker() : index_(GetNewIndex()) {}
   DestructorTracker(const DestructorTracker& /* rhs */)
       : index_(GetNewIndex()) {}
   ~DestructorTracker() {
     // We never access DestructorCall::List() concurrently, so we don't need
     // to protect this access with a mutex.
     DestructorCall::List()[index_]->ReportDestroyed();
   }
 
  private:
   static size_t GetNewIndex() {
     DestructorCall::List().push_back(new DestructorCall);
     return DestructorCall::List().size() - 1;
   }
   const size_t index_;
 
   GTEST_DISALLOW_ASSIGN_(DestructorTracker);
 };
 
 typedef ThreadLocal<DestructorTracker>* ThreadParam;
 
 void CallThreadLocalGet(ThreadParam thread_local_param) {
   thread_local_param->get();
 }
 
 // Tests that when a ThreadLocal object dies in a thread, it destroys
 // the managed object for that thread.
 TEST(ThreadLocalTest, DestroysManagedObjectForOwnThreadWhenDying) {
   DestructorCall::ResetList();
 
   {
     ThreadLocal<DestructorTracker> thread_local_tracker;
     ASSERT_EQ(0U, DestructorCall::List().size());
 
     // This creates another DestructorTracker object for the main thread.
     thread_local_tracker.get();
     ASSERT_EQ(1U, DestructorCall::List().size());
     ASSERT_FALSE(DestructorCall::List()[0]->CheckDestroyed());
   }
 
   // Now thread_local_tracker has died.
   ASSERT_EQ(1U, DestructorCall::List().size());
   EXPECT_TRUE(DestructorCall::List()[0]->CheckDestroyed());
 
   DestructorCall::ResetList();
 }
 
 // Tests that when a thread exits, the thread-local object for that
 // thread is destroyed.
 TEST(ThreadLocalTest, DestroysManagedObjectAtThreadExit) {
   DestructorCall::ResetList();
 
   {
     ThreadLocal<DestructorTracker> thread_local_tracker;
     ASSERT_EQ(0U, DestructorCall::List().size());
 
     // This creates another DestructorTracker object in the new thread.
     ThreadWithParam<ThreadParam> thread(
         &CallThreadLocalGet, &thread_local_tracker, NULL);
     thread.Join();
 
     // The thread has exited, and we should have a DestroyedTracker
     // instance created for it. But it may not have been destroyed yet.
     ASSERT_EQ(1U, DestructorCall::List().size());
   }
 
   // The thread has exited and thread_local_tracker has died.
   ASSERT_EQ(1U, DestructorCall::List().size());
   EXPECT_TRUE(DestructorCall::List()[0]->CheckDestroyed());
 
   DestructorCall::ResetList();
 }
 
 TEST(ThreadLocalTest, ThreadLocalMutationsAffectOnlyCurrentThread) {
   ThreadLocal<std::string> thread_local_string;
   thread_local_string.set("Foo");
   EXPECT_STREQ("Foo", thread_local_string.get().c_str());
 
   std::string result;
   RunFromThread(&RetrieveThreadLocalValue,
                 make_pair(&thread_local_string, &result));
   EXPECT_TRUE(result.empty());
 }
 
 #endif  // GTEST_IS_THREADSAFE
 
 #if GTEST_OS_WINDOWS
 TEST(WindowsTypesTest, HANDLEIsVoidStar) {
   StaticAssertTypeEq<HANDLE, void*>();
 }
 
 #if GTEST_OS_WINDOWS_MINGW && !defined(__MINGW64_VERSION_MAJOR)
 TEST(WindowsTypesTest, _CRITICAL_SECTIONIs_CRITICAL_SECTION) {
   StaticAssertTypeEq<CRITICAL_SECTION, _CRITICAL_SECTION>();
 }
 #else
 TEST(WindowsTypesTest, CRITICAL_SECTIONIs_RTL_CRITICAL_SECTION) {
   StaticAssertTypeEq<CRITICAL_SECTION, _RTL_CRITICAL_SECTION>();
 }
 #endif
 
 #endif  // GTEST_OS_WINDOWS
 
 }  // namespace internal
 }  // namespace testing
diff --git a/googletest/test/gtest-printers_test.cc b/googletest/test/googletest-printers-test.cc
similarity index 99%
rename from googletest/test/gtest-printers_test.cc
rename to googletest/test/googletest-printers-test.cc
index 49b3bd46..1b1026e7 100644
--- a/googletest/test/gtest-printers_test.cc
+++ b/googletest/test/googletest-printers-test.cc
@@ -1,1737 +1,1737 @@
 // Copyright 2007, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 
 // Google Test - The Google C++ Testing and Mocking Framework
 //
 // This file tests the universal value printer.
 
 #include "gtest/gtest-printers.h"
 
 #include <ctype.h>
 #include <limits.h>
 #include <string.h>
 #include <algorithm>
 #include <deque>
 #include <list>
 #include <map>
 #include <set>
 #include <sstream>
 #include <string>
 #include <utility>
 #include <vector>
 
 #include "gtest/gtest.h"
 
 #if GTEST_HAS_UNORDERED_MAP_
 # include <unordered_map>  // NOLINT
 #endif  // GTEST_HAS_UNORDERED_MAP_
 
 #if GTEST_HAS_UNORDERED_SET_
 # include <unordered_set>  // NOLINT
 #endif  // GTEST_HAS_UNORDERED_SET_
 
 #if GTEST_HAS_STD_FORWARD_LIST_
 # include <forward_list> // NOLINT
 #endif  // GTEST_HAS_STD_FORWARD_LIST_
 
 // Some user-defined types for testing the universal value printer.
 
 // An anonymous enum type.
 enum AnonymousEnum {
   kAE1 = -1,
   kAE2 = 1
 };
 
 // An enum without a user-defined printer.
 enum EnumWithoutPrinter {
   kEWP1 = -2,
   kEWP2 = 42
 };
 
 // An enum with a << operator.
 enum EnumWithStreaming {
   kEWS1 = 10
 };
 
 std::ostream& operator<<(std::ostream& os, EnumWithStreaming e) {
   return os << (e == kEWS1 ? "kEWS1" : "invalid");
 }
 
 // An enum with a PrintTo() function.
 enum EnumWithPrintTo {
   kEWPT1 = 1
 };
 
 void PrintTo(EnumWithPrintTo e, std::ostream* os) {
   *os << (e == kEWPT1 ? "kEWPT1" : "invalid");
 }
 
 // A class implicitly convertible to BiggestInt.
 class BiggestIntConvertible {
  public:
   operator ::testing::internal::BiggestInt() const { return 42; }
 };
 
 // A user-defined unprintable class template in the global namespace.
 template <typename T>
 class UnprintableTemplateInGlobal {
  public:
   UnprintableTemplateInGlobal() : value_() {}
  private:
   T value_;
 };
 
 // A user-defined streamable type in the global namespace.
 class StreamableInGlobal {
  public:
   virtual ~StreamableInGlobal() {}
 };
 
 inline void operator<<(::std::ostream& os, const StreamableInGlobal& /* x */) {
   os << "StreamableInGlobal";
 }
 
 void operator<<(::std::ostream& os, const StreamableInGlobal* /* x */) {
   os << "StreamableInGlobal*";
 }
 
 namespace foo {
 
 // A user-defined unprintable type in a user namespace.
 class UnprintableInFoo {
  public:
   UnprintableInFoo() : z_(0) { memcpy(xy_, "\xEF\x12\x0\x0\x34\xAB\x0\x0", 8); }
   double z() const { return z_; }
  private:
   char xy_[8];
   double z_;
 };
 
 // A user-defined printable type in a user-chosen namespace.
 struct PrintableViaPrintTo {
   PrintableViaPrintTo() : value() {}
   int value;
 };
 
 void PrintTo(const PrintableViaPrintTo& x, ::std::ostream* os) {
   *os << "PrintableViaPrintTo: " << x.value;
 }
 
 // A type with a user-defined << for printing its pointer.
 struct PointerPrintable {
 };
 
 ::std::ostream& operator<<(::std::ostream& os,
                            const PointerPrintable* /* x */) {
   return os << "PointerPrintable*";
 }
 
 // A user-defined printable class template in a user-chosen namespace.
 template <typename T>
 class PrintableViaPrintToTemplate {
  public:
   explicit PrintableViaPrintToTemplate(const T& a_value) : value_(a_value) {}
 
   const T& value() const { return value_; }
  private:
   T value_;
 };
 
 template <typename T>
 void PrintTo(const PrintableViaPrintToTemplate<T>& x, ::std::ostream* os) {
   *os << "PrintableViaPrintToTemplate: " << x.value();
 }
 
 // A user-defined streamable class template in a user namespace.
 template <typename T>
 class StreamableTemplateInFoo {
  public:
   StreamableTemplateInFoo() : value_() {}
 
   const T& value() const { return value_; }
  private:
   T value_;
 };
 
 template <typename T>
 inline ::std::ostream& operator<<(::std::ostream& os,
                                   const StreamableTemplateInFoo<T>& x) {
   return os << "StreamableTemplateInFoo: " << x.value();
 }
 
 // A user-defined streamable but recursivly-defined container type in
 // a user namespace, it mimics therefore std::filesystem::path or
 // boost::filesystem::path.
 class PathLike {
  public:
   struct iterator {
     typedef PathLike value_type;
   };
 
   PathLike() {}
 
   iterator begin() const { return iterator(); }
   iterator end() const { return iterator(); }
 
   friend ::std::ostream& operator<<(::std::ostream& os, const PathLike&) {
     return os << "Streamable-PathLike";
   }
 };
 
 }  // namespace foo
 
 namespace testing {
 namespace gtest_printers_test {
 
 using ::std::deque;
 using ::std::list;
 using ::std::make_pair;
 using ::std::map;
 using ::std::multimap;
 using ::std::multiset;
 using ::std::pair;
 using ::std::set;
 using ::std::vector;
 using ::testing::PrintToString;
 using ::testing::internal::FormatForComparisonFailureMessage;
 using ::testing::internal::ImplicitCast_;
 using ::testing::internal::NativeArray;
 using ::testing::internal::RE;
 using ::testing::internal::RelationToSourceReference;
 using ::testing::internal::Strings;
 using ::testing::internal::UniversalPrint;
 using ::testing::internal::UniversalPrinter;
 using ::testing::internal::UniversalTersePrint;
 #if GTEST_HAS_TR1_TUPLE || GTEST_HAS_STD_TUPLE_
 using ::testing::internal::UniversalTersePrintTupleFieldsToStrings;
 #endif
 
 // Prints a value to a string using the universal value printer.  This
 // is a helper for testing UniversalPrinter<T>::Print() for various types.
 template <typename T>
 std::string Print(const T& value) {
   ::std::stringstream ss;
   UniversalPrinter<T>::Print(value, &ss);
   return ss.str();
 }
 
 // Prints a value passed by reference to a string, using the universal
 // value printer.  This is a helper for testing
 // UniversalPrinter<T&>::Print() for various types.
 template <typename T>
 std::string PrintByRef(const T& value) {
   ::std::stringstream ss;
   UniversalPrinter<T&>::Print(value, &ss);
   return ss.str();
 }
 
 // Tests printing various enum types.
 
 TEST(PrintEnumTest, AnonymousEnum) {
   EXPECT_EQ("-1", Print(kAE1));
   EXPECT_EQ("1", Print(kAE2));
 }
 
 TEST(PrintEnumTest, EnumWithoutPrinter) {
   EXPECT_EQ("-2", Print(kEWP1));
   EXPECT_EQ("42", Print(kEWP2));
 }
 
 TEST(PrintEnumTest, EnumWithStreaming) {
   EXPECT_EQ("kEWS1", Print(kEWS1));
   EXPECT_EQ("invalid", Print(static_cast<EnumWithStreaming>(0)));
 }
 
 TEST(PrintEnumTest, EnumWithPrintTo) {
   EXPECT_EQ("kEWPT1", Print(kEWPT1));
   EXPECT_EQ("invalid", Print(static_cast<EnumWithPrintTo>(0)));
 }
 
 // Tests printing a class implicitly convertible to BiggestInt.
 
 TEST(PrintClassTest, BiggestIntConvertible) {
   EXPECT_EQ("42", Print(BiggestIntConvertible()));
 }
 
 // Tests printing various char types.
 
 // char.
 TEST(PrintCharTest, PlainChar) {
   EXPECT_EQ("'\\0'", Print('\0'));
   EXPECT_EQ("'\\'' (39, 0x27)", Print('\''));
   EXPECT_EQ("'\"' (34, 0x22)", Print('"'));
   EXPECT_EQ("'?' (63, 0x3F)", Print('?'));
   EXPECT_EQ("'\\\\' (92, 0x5C)", Print('\\'));
   EXPECT_EQ("'\\a' (7)", Print('\a'));
   EXPECT_EQ("'\\b' (8)", Print('\b'));
   EXPECT_EQ("'\\f' (12, 0xC)", Print('\f'));
   EXPECT_EQ("'\\n' (10, 0xA)", Print('\n'));
   EXPECT_EQ("'\\r' (13, 0xD)", Print('\r'));
   EXPECT_EQ("'\\t' (9)", Print('\t'));
   EXPECT_EQ("'\\v' (11, 0xB)", Print('\v'));
   EXPECT_EQ("'\\x7F' (127)", Print('\x7F'));
   EXPECT_EQ("'\\xFF' (255)", Print('\xFF'));
   EXPECT_EQ("' ' (32, 0x20)", Print(' '));
   EXPECT_EQ("'a' (97, 0x61)", Print('a'));
 }
 
 // signed char.
 TEST(PrintCharTest, SignedChar) {
   EXPECT_EQ("'\\0'", Print(static_cast<signed char>('\0')));
   EXPECT_EQ("'\\xCE' (-50)",
             Print(static_cast<signed char>(-50)));
 }
 
 // unsigned char.
 TEST(PrintCharTest, UnsignedChar) {
   EXPECT_EQ("'\\0'", Print(static_cast<unsigned char>('\0')));
   EXPECT_EQ("'b' (98, 0x62)",
             Print(static_cast<unsigned char>('b')));
 }
 
 // Tests printing other simple, built-in types.
 
 // bool.
 TEST(PrintBuiltInTypeTest, Bool) {
   EXPECT_EQ("false", Print(false));
   EXPECT_EQ("true", Print(true));
 }
 
 // wchar_t.
 TEST(PrintBuiltInTypeTest, Wchar_t) {
   EXPECT_EQ("L'\\0'", Print(L'\0'));
   EXPECT_EQ("L'\\'' (39, 0x27)", Print(L'\''));
   EXPECT_EQ("L'\"' (34, 0x22)", Print(L'"'));
   EXPECT_EQ("L'?' (63, 0x3F)", Print(L'?'));
   EXPECT_EQ("L'\\\\' (92, 0x5C)", Print(L'\\'));
   EXPECT_EQ("L'\\a' (7)", Print(L'\a'));
   EXPECT_EQ("L'\\b' (8)", Print(L'\b'));
   EXPECT_EQ("L'\\f' (12, 0xC)", Print(L'\f'));
   EXPECT_EQ("L'\\n' (10, 0xA)", Print(L'\n'));
   EXPECT_EQ("L'\\r' (13, 0xD)", Print(L'\r'));
   EXPECT_EQ("L'\\t' (9)", Print(L'\t'));
   EXPECT_EQ("L'\\v' (11, 0xB)", Print(L'\v'));
   EXPECT_EQ("L'\\x7F' (127)", Print(L'\x7F'));
   EXPECT_EQ("L'\\xFF' (255)", Print(L'\xFF'));
   EXPECT_EQ("L' ' (32, 0x20)", Print(L' '));
   EXPECT_EQ("L'a' (97, 0x61)", Print(L'a'));
   EXPECT_EQ("L'\\x576' (1398)", Print(static_cast<wchar_t>(0x576)));
   EXPECT_EQ("L'\\xC74D' (51021)", Print(static_cast<wchar_t>(0xC74D)));
 }
 
 // Test that Int64 provides more storage than wchar_t.
 TEST(PrintTypeSizeTest, Wchar_t) {
   EXPECT_LT(sizeof(wchar_t), sizeof(testing::internal::Int64));
 }
 
 // Various integer types.
 TEST(PrintBuiltInTypeTest, Integer) {
   EXPECT_EQ("'\\xFF' (255)", Print(static_cast<unsigned char>(255)));  // uint8
   EXPECT_EQ("'\\x80' (-128)", Print(static_cast<signed char>(-128)));  // int8
   EXPECT_EQ("65535", Print(USHRT_MAX));  // uint16
   EXPECT_EQ("-32768", Print(SHRT_MIN));  // int16
   EXPECT_EQ("4294967295", Print(UINT_MAX));  // uint32
   EXPECT_EQ("-2147483648", Print(INT_MIN));  // int32
   EXPECT_EQ("18446744073709551615",
             Print(static_cast<testing::internal::UInt64>(-1)));  // uint64
   EXPECT_EQ("-9223372036854775808",
             Print(static_cast<testing::internal::Int64>(1) << 63));  // int64
 }
 
 // Size types.
 TEST(PrintBuiltInTypeTest, Size_t) {
   EXPECT_EQ("1", Print(sizeof('a')));  // size_t.
 #if !GTEST_OS_WINDOWS
   // Windows has no ssize_t type.
   EXPECT_EQ("-2", Print(static_cast<ssize_t>(-2)));  // ssize_t.
 #endif  // !GTEST_OS_WINDOWS
 }
 
 // Floating-points.
 TEST(PrintBuiltInTypeTest, FloatingPoints) {
   EXPECT_EQ("1.5", Print(1.5f));   // float
   EXPECT_EQ("-2.5", Print(-2.5));  // double
 }
 
 // Since ::std::stringstream::operator<<(const void *) formats the pointer
 // output differently with different compilers, we have to create the expected
 // output first and use it as our expectation.
 static std::string PrintPointer(const void* p) {
   ::std::stringstream expected_result_stream;
   expected_result_stream << p;
   return expected_result_stream.str();
 }
 
 // Tests printing C strings.
 
 // const char*.
 TEST(PrintCStringTest, Const) {
   const char* p = "World";
   EXPECT_EQ(PrintPointer(p) + " pointing to \"World\"", Print(p));
 }
 
 // char*.
 TEST(PrintCStringTest, NonConst) {
   char p[] = "Hi";
   EXPECT_EQ(PrintPointer(p) + " pointing to \"Hi\"",
             Print(static_cast<char*>(p)));
 }
 
 // NULL C string.
 TEST(PrintCStringTest, Null) {
   const char* p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // Tests that C strings are escaped properly.
 TEST(PrintCStringTest, EscapesProperly) {
   const char* p = "'\"?\\\a\b\f\n\r\t\v\x7F\xFF a";
   EXPECT_EQ(PrintPointer(p) + " pointing to \"'\\\"?\\\\\\a\\b\\f"
             "\\n\\r\\t\\v\\x7F\\xFF a\"",
             Print(p));
 }
 
 // MSVC compiler can be configured to define whar_t as a typedef
 // of unsigned short. Defining an overload for const wchar_t* in that case
 // would cause pointers to unsigned shorts be printed as wide strings,
 // possibly accessing more memory than intended and causing invalid
 // memory accesses. MSVC defines _NATIVE_WCHAR_T_DEFINED symbol when
 // wchar_t is implemented as a native type.
 #if !defined(_MSC_VER) || defined(_NATIVE_WCHAR_T_DEFINED)
 
 // const wchar_t*.
 TEST(PrintWideCStringTest, Const) {
   const wchar_t* p = L"World";
   EXPECT_EQ(PrintPointer(p) + " pointing to L\"World\"", Print(p));
 }
 
 // wchar_t*.
 TEST(PrintWideCStringTest, NonConst) {
   wchar_t p[] = L"Hi";
   EXPECT_EQ(PrintPointer(p) + " pointing to L\"Hi\"",
             Print(static_cast<wchar_t*>(p)));
 }
 
 // NULL wide C string.
 TEST(PrintWideCStringTest, Null) {
   const wchar_t* p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // Tests that wide C strings are escaped properly.
 TEST(PrintWideCStringTest, EscapesProperly) {
   const wchar_t s[] = {'\'', '"', '?', '\\', '\a', '\b', '\f', '\n', '\r',
                        '\t', '\v', 0xD3, 0x576, 0x8D3, 0xC74D, ' ', 'a', '\0'};
   EXPECT_EQ(PrintPointer(s) + " pointing to L\"'\\\"?\\\\\\a\\b\\f"
             "\\n\\r\\t\\v\\xD3\\x576\\x8D3\\xC74D a\"",
             Print(static_cast<const wchar_t*>(s)));
 }
 #endif  // native wchar_t
 
 // Tests printing pointers to other char types.
 
 // signed char*.
 TEST(PrintCharPointerTest, SignedChar) {
   signed char* p = reinterpret_cast<signed char*>(0x1234);
   EXPECT_EQ(PrintPointer(p), Print(p));
   p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // const signed char*.
 TEST(PrintCharPointerTest, ConstSignedChar) {
   signed char* p = reinterpret_cast<signed char*>(0x1234);
   EXPECT_EQ(PrintPointer(p), Print(p));
   p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // unsigned char*.
 TEST(PrintCharPointerTest, UnsignedChar) {
   unsigned char* p = reinterpret_cast<unsigned char*>(0x1234);
   EXPECT_EQ(PrintPointer(p), Print(p));
   p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // const unsigned char*.
 TEST(PrintCharPointerTest, ConstUnsignedChar) {
   const unsigned char* p = reinterpret_cast<const unsigned char*>(0x1234);
   EXPECT_EQ(PrintPointer(p), Print(p));
   p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // Tests printing pointers to simple, built-in types.
 
 // bool*.
 TEST(PrintPointerToBuiltInTypeTest, Bool) {
   bool* p = reinterpret_cast<bool*>(0xABCD);
   EXPECT_EQ(PrintPointer(p), Print(p));
   p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // void*.
 TEST(PrintPointerToBuiltInTypeTest, Void) {
   void* p = reinterpret_cast<void*>(0xABCD);
   EXPECT_EQ(PrintPointer(p), Print(p));
   p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // const void*.
 TEST(PrintPointerToBuiltInTypeTest, ConstVoid) {
   const void* p = reinterpret_cast<const void*>(0xABCD);
   EXPECT_EQ(PrintPointer(p), Print(p));
   p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // Tests printing pointers to pointers.
 TEST(PrintPointerToPointerTest, IntPointerPointer) {
   int** p = reinterpret_cast<int**>(0xABCD);
   EXPECT_EQ(PrintPointer(p), Print(p));
   p = NULL;
   EXPECT_EQ("NULL", Print(p));
 }
 
 // Tests printing (non-member) function pointers.
 
 void MyFunction(int /* n */) {}
 
 TEST(PrintPointerTest, NonMemberFunctionPointer) {
   // We cannot directly cast &MyFunction to const void* because the
   // standard disallows casting between pointers to functions and
   // pointers to objects, and some compilers (e.g. GCC 3.4) enforce
   // this limitation.
   EXPECT_EQ(
       PrintPointer(reinterpret_cast<const void*>(
           reinterpret_cast<internal::BiggestInt>(&MyFunction))),
       Print(&MyFunction));
   int (*p)(bool) = NULL;  // NOLINT
   EXPECT_EQ("NULL", Print(p));
 }
 
 // An assertion predicate determining whether a one string is a prefix for
 // another.
 template <typename StringType>
 AssertionResult HasPrefix(const StringType& str, const StringType& prefix) {
   if (str.find(prefix, 0) == 0)
     return AssertionSuccess();
 
   const bool is_wide_string = sizeof(prefix[0]) > 1;
   const char* const begin_string_quote = is_wide_string ? "L\"" : "\"";
   return AssertionFailure()
       << begin_string_quote << prefix << "\" is not a prefix of "
       << begin_string_quote << str << "\"\n";
 }
 
 // Tests printing member variable pointers.  Although they are called
 // pointers, they don't point to a location in the address space.
 // Their representation is implementation-defined.  Thus they will be
 // printed as raw bytes.
 
 struct Foo {
  public:
   virtual ~Foo() {}
   int MyMethod(char x) { return x + 1; }
   virtual char MyVirtualMethod(int /* n */) { return 'a'; }
 
   int value;
 };
 
 TEST(PrintPointerTest, MemberVariablePointer) {
   EXPECT_TRUE(HasPrefix(Print(&Foo::value),
                         Print(sizeof(&Foo::value)) + "-byte object "));
-  int (Foo::*p) = NULL;  // NOLINT
+  int Foo::*p = NULL;  // NOLINT
   EXPECT_TRUE(HasPrefix(Print(p),
                         Print(sizeof(p)) + "-byte object "));
 }
 
 // Tests printing member function pointers.  Although they are called
 // pointers, they don't point to a location in the address space.
 // Their representation is implementation-defined.  Thus they will be
 // printed as raw bytes.
 TEST(PrintPointerTest, MemberFunctionPointer) {
   EXPECT_TRUE(HasPrefix(Print(&Foo::MyMethod),
                         Print(sizeof(&Foo::MyMethod)) + "-byte object "));
   EXPECT_TRUE(
       HasPrefix(Print(&Foo::MyVirtualMethod),
                 Print(sizeof((&Foo::MyVirtualMethod))) + "-byte object "));
   int (Foo::*p)(char) = NULL;  // NOLINT
   EXPECT_TRUE(HasPrefix(Print(p),
                         Print(sizeof(p)) + "-byte object "));
 }
 
 // Tests printing C arrays.
 
 // The difference between this and Print() is that it ensures that the
 // argument is a reference to an array.
 template <typename T, size_t N>
 std::string PrintArrayHelper(T (&a)[N]) {
   return Print(a);
 }
 
 // One-dimensional array.
 TEST(PrintArrayTest, OneDimensionalArray) {
   int a[5] = { 1, 2, 3, 4, 5 };
   EXPECT_EQ("{ 1, 2, 3, 4, 5 }", PrintArrayHelper(a));
 }
 
 // Two-dimensional array.
 TEST(PrintArrayTest, TwoDimensionalArray) {
   int a[2][5] = {
     { 1, 2, 3, 4, 5 },
     { 6, 7, 8, 9, 0 }
   };
   EXPECT_EQ("{ { 1, 2, 3, 4, 5 }, { 6, 7, 8, 9, 0 } }", PrintArrayHelper(a));
 }
 
 // Array of const elements.
 TEST(PrintArrayTest, ConstArray) {
   const bool a[1] = { false };
   EXPECT_EQ("{ false }", PrintArrayHelper(a));
 }
 
 // char array without terminating NUL.
 TEST(PrintArrayTest, CharArrayWithNoTerminatingNul) {
   // Array a contains '\0' in the middle and doesn't end with '\0'.
   char a[] = { 'H', '\0', 'i' };
   EXPECT_EQ("\"H\\0i\" (no terminating NUL)", PrintArrayHelper(a));
 }
 
 // const char array with terminating NUL.
 TEST(PrintArrayTest, ConstCharArrayWithTerminatingNul) {
   const char a[] = "\0Hi";
   EXPECT_EQ("\"\\0Hi\"", PrintArrayHelper(a));
 }
 
 // const wchar_t array without terminating NUL.
 TEST(PrintArrayTest, WCharArrayWithNoTerminatingNul) {
   // Array a contains '\0' in the middle and doesn't end with '\0'.
   const wchar_t a[] = { L'H', L'\0', L'i' };
   EXPECT_EQ("L\"H\\0i\" (no terminating NUL)", PrintArrayHelper(a));
 }
 
 // wchar_t array with terminating NUL.
 TEST(PrintArrayTest, WConstCharArrayWithTerminatingNul) {
   const wchar_t a[] = L"\0Hi";
   EXPECT_EQ("L\"\\0Hi\"", PrintArrayHelper(a));
 }
 
 // Array of objects.
 TEST(PrintArrayTest, ObjectArray) {
   std::string a[3] = {"Hi", "Hello", "Ni hao"};
   EXPECT_EQ("{ \"Hi\", \"Hello\", \"Ni hao\" }", PrintArrayHelper(a));
 }
 
 // Array with many elements.
 TEST(PrintArrayTest, BigArray) {
   int a[100] = { 1, 2, 3 };
   EXPECT_EQ("{ 1, 2, 3, 0, 0, 0, 0, 0, ..., 0, 0, 0, 0, 0, 0, 0, 0 }",
             PrintArrayHelper(a));
 }
 
 // Tests printing ::string and ::std::string.
 
 #if GTEST_HAS_GLOBAL_STRING
 // ::string.
 TEST(PrintStringTest, StringInGlobalNamespace) {
   const char s[] = "'\"?\\\a\b\f\n\0\r\t\v\x7F\xFF a";
   const ::string str(s, sizeof(s));
   EXPECT_EQ("\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v\\x7F\\xFF a\\0\"",
             Print(str));
 }
 #endif  // GTEST_HAS_GLOBAL_STRING
 
 // ::std::string.
 TEST(PrintStringTest, StringInStdNamespace) {
   const char s[] = "'\"?\\\a\b\f\n\0\r\t\v\x7F\xFF a";
   const ::std::string str(s, sizeof(s));
   EXPECT_EQ("\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v\\x7F\\xFF a\\0\"",
             Print(str));
 }
 
 TEST(PrintStringTest, StringAmbiguousHex) {
   // "\x6BANANA" is ambiguous, it can be interpreted as starting with either of:
   // '\x6', '\x6B', or '\x6BA'.
 
   // a hex escaping sequence following by a decimal digit
   EXPECT_EQ("\"0\\x12\" \"3\"", Print(::std::string("0\x12" "3")));
   // a hex escaping sequence following by a hex digit (lower-case)
   EXPECT_EQ("\"mm\\x6\" \"bananas\"", Print(::std::string("mm\x6" "bananas")));
   // a hex escaping sequence following by a hex digit (upper-case)
   EXPECT_EQ("\"NOM\\x6\" \"BANANA\"", Print(::std::string("NOM\x6" "BANANA")));
   // a hex escaping sequence following by a non-xdigit
   EXPECT_EQ("\"!\\x5-!\"", Print(::std::string("!\x5-!")));
 }
 
 // Tests printing ::wstring and ::std::wstring.
 
 #if GTEST_HAS_GLOBAL_WSTRING
 // ::wstring.
 TEST(PrintWideStringTest, StringInGlobalNamespace) {
   const wchar_t s[] = L"'\"?\\\a\b\f\n\0\r\t\v\xD3\x576\x8D3\xC74D a";
   const ::wstring str(s, sizeof(s)/sizeof(wchar_t));
   EXPECT_EQ("L\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v"
             "\\xD3\\x576\\x8D3\\xC74D a\\0\"",
             Print(str));
 }
 #endif  // GTEST_HAS_GLOBAL_WSTRING
 
 #if GTEST_HAS_STD_WSTRING
 // ::std::wstring.
 TEST(PrintWideStringTest, StringInStdNamespace) {
   const wchar_t s[] = L"'\"?\\\a\b\f\n\0\r\t\v\xD3\x576\x8D3\xC74D a";
   const ::std::wstring str(s, sizeof(s)/sizeof(wchar_t));
   EXPECT_EQ("L\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v"
             "\\xD3\\x576\\x8D3\\xC74D a\\0\"",
             Print(str));
 }
 
 TEST(PrintWideStringTest, StringAmbiguousHex) {
   // same for wide strings.
   EXPECT_EQ("L\"0\\x12\" L\"3\"", Print(::std::wstring(L"0\x12" L"3")));
   EXPECT_EQ("L\"mm\\x6\" L\"bananas\"",
             Print(::std::wstring(L"mm\x6" L"bananas")));
   EXPECT_EQ("L\"NOM\\x6\" L\"BANANA\"",
             Print(::std::wstring(L"NOM\x6" L"BANANA")));
   EXPECT_EQ("L\"!\\x5-!\"", Print(::std::wstring(L"!\x5-!")));
 }
 #endif  // GTEST_HAS_STD_WSTRING
 
 // Tests printing types that support generic streaming (i.e. streaming
 // to std::basic_ostream<Char, CharTraits> for any valid Char and
 // CharTraits types).
 
 // Tests printing a non-template type that supports generic streaming.
 
 class AllowsGenericStreaming {};
 
 template <typename Char, typename CharTraits>
 std::basic_ostream<Char, CharTraits>& operator<<(
     std::basic_ostream<Char, CharTraits>& os,
     const AllowsGenericStreaming& /* a */) {
   return os << "AllowsGenericStreaming";
 }
 
 TEST(PrintTypeWithGenericStreamingTest, NonTemplateType) {
   AllowsGenericStreaming a;
   EXPECT_EQ("AllowsGenericStreaming", Print(a));
 }
 
 // Tests printing a template type that supports generic streaming.
 
 template <typename T>
 class AllowsGenericStreamingTemplate {};
 
 template <typename Char, typename CharTraits, typename T>
 std::basic_ostream<Char, CharTraits>& operator<<(
     std::basic_ostream<Char, CharTraits>& os,
     const AllowsGenericStreamingTemplate<T>& /* a */) {
   return os << "AllowsGenericStreamingTemplate";
 }
 
 TEST(PrintTypeWithGenericStreamingTest, TemplateType) {
   AllowsGenericStreamingTemplate<int> a;
   EXPECT_EQ("AllowsGenericStreamingTemplate", Print(a));
 }
 
 // Tests printing a type that supports generic streaming and can be
 // implicitly converted to another printable type.
 
 template <typename T>
 class AllowsGenericStreamingAndImplicitConversionTemplate {
  public:
   operator bool() const { return false; }
 };
 
 template <typename Char, typename CharTraits, typename T>
 std::basic_ostream<Char, CharTraits>& operator<<(
     std::basic_ostream<Char, CharTraits>& os,
     const AllowsGenericStreamingAndImplicitConversionTemplate<T>& /* a */) {
   return os << "AllowsGenericStreamingAndImplicitConversionTemplate";
 }
 
 TEST(PrintTypeWithGenericStreamingTest, TypeImplicitlyConvertible) {
   AllowsGenericStreamingAndImplicitConversionTemplate<int> a;
   EXPECT_EQ("AllowsGenericStreamingAndImplicitConversionTemplate", Print(a));
 }
 
 #if GTEST_HAS_ABSL
 
 // Tests printing ::absl::string_view.
 
 TEST(PrintStringViewTest, SimpleStringView) {
   const ::absl::string_view sp = "Hello";
   EXPECT_EQ("\"Hello\"", Print(sp));
 }
 
 TEST(PrintStringViewTest, UnprintableCharacters) {
   const char str[] = "NUL (\0) and \r\t";
   const ::absl::string_view sp(str, sizeof(str) - 1);
   EXPECT_EQ("\"NUL (\\0) and \\r\\t\"", Print(sp));
 }
 
 #endif  // GTEST_HAS_ABSL
 
 // Tests printing STL containers.
 
 TEST(PrintStlContainerTest, EmptyDeque) {
   deque<char> empty;
   EXPECT_EQ("{}", Print(empty));
 }
 
 TEST(PrintStlContainerTest, NonEmptyDeque) {
   deque<int> non_empty;
   non_empty.push_back(1);
   non_empty.push_back(3);
   EXPECT_EQ("{ 1, 3 }", Print(non_empty));
 }
 
 #if GTEST_HAS_UNORDERED_MAP_
 
 TEST(PrintStlContainerTest, OneElementHashMap) {
   ::std::unordered_map<int, char> map1;
   map1[1] = 'a';
   EXPECT_EQ("{ (1, 'a' (97, 0x61)) }", Print(map1));
 }
 
 TEST(PrintStlContainerTest, HashMultiMap) {
   ::std::unordered_multimap<int, bool> map1;
   map1.insert(make_pair(5, true));
   map1.insert(make_pair(5, false));
 
   // Elements of hash_multimap can be printed in any order.
   const std::string result = Print(map1);
   EXPECT_TRUE(result == "{ (5, true), (5, false) }" ||
               result == "{ (5, false), (5, true) }")
                   << " where Print(map1) returns \"" << result << "\".";
 }
 
 #endif  // GTEST_HAS_UNORDERED_MAP_
 
 #if GTEST_HAS_UNORDERED_SET_
 
 TEST(PrintStlContainerTest, HashSet) {
   ::std::unordered_set<int> set1;
   set1.insert(1);
   EXPECT_EQ("{ 1 }", Print(set1));
 }
 
 TEST(PrintStlContainerTest, HashMultiSet) {
   const int kSize = 5;
   int a[kSize] = { 1, 1, 2, 5, 1 };
   ::std::unordered_multiset<int> set1(a, a + kSize);
 
   // Elements of hash_multiset can be printed in any order.
   const std::string result = Print(set1);
   const std::string expected_pattern = "{ d, d, d, d, d }";  // d means a digit.
 
   // Verifies the result matches the expected pattern; also extracts
   // the numbers in the result.
   ASSERT_EQ(expected_pattern.length(), result.length());
   std::vector<int> numbers;
   for (size_t i = 0; i != result.length(); i++) {
     if (expected_pattern[i] == 'd') {
       ASSERT_NE(isdigit(static_cast<unsigned char>(result[i])), 0);
       numbers.push_back(result[i] - '0');
     } else {
       EXPECT_EQ(expected_pattern[i], result[i]) << " where result is "
                                                 << result;
     }
   }
 
   // Makes sure the result contains the right numbers.
   std::sort(numbers.begin(), numbers.end());
   std::sort(a, a + kSize);
   EXPECT_TRUE(std::equal(a, a + kSize, numbers.begin()));
 }
 
 #endif  //  GTEST_HAS_UNORDERED_SET_
 
 TEST(PrintStlContainerTest, List) {
   const std::string a[] = {"hello", "world"};
   const list<std::string> strings(a, a + 2);
   EXPECT_EQ("{ \"hello\", \"world\" }", Print(strings));
 }
 
 TEST(PrintStlContainerTest, Map) {
   map<int, bool> map1;
   map1[1] = true;
   map1[5] = false;
   map1[3] = true;
   EXPECT_EQ("{ (1, true), (3, true), (5, false) }", Print(map1));
 }
 
 TEST(PrintStlContainerTest, MultiMap) {
   multimap<bool, int> map1;
   // The make_pair template function would deduce the type as
   // pair<bool, int> here, and since the key part in a multimap has to
   // be constant, without a templated ctor in the pair class (as in
   // libCstd on Solaris), make_pair call would fail to compile as no
   // implicit conversion is found.  Thus explicit typename is used
   // here instead.
   map1.insert(pair<const bool, int>(true, 0));
   map1.insert(pair<const bool, int>(true, 1));
   map1.insert(pair<const bool, int>(false, 2));
   EXPECT_EQ("{ (false, 2), (true, 0), (true, 1) }", Print(map1));
 }
 
 TEST(PrintStlContainerTest, Set) {
   const unsigned int a[] = { 3, 0, 5 };
   set<unsigned int> set1(a, a + 3);
   EXPECT_EQ("{ 0, 3, 5 }", Print(set1));
 }
 
 TEST(PrintStlContainerTest, MultiSet) {
   const int a[] = { 1, 1, 2, 5, 1 };
   multiset<int> set1(a, a + 5);
   EXPECT_EQ("{ 1, 1, 1, 2, 5 }", Print(set1));
 }
 
 #if GTEST_HAS_STD_FORWARD_LIST_
 // <slist> is available on Linux in the google3 mode, but not on
 // Windows or Mac OS X.
 
 TEST(PrintStlContainerTest, SinglyLinkedList) {
   int a[] = { 9, 2, 8 };
   const std::forward_list<int> ints(a, a + 3);
   EXPECT_EQ("{ 9, 2, 8 }", Print(ints));
 }
 #endif  // GTEST_HAS_STD_FORWARD_LIST_
 
 TEST(PrintStlContainerTest, Pair) {
   pair<const bool, int> p(true, 5);
   EXPECT_EQ("(true, 5)", Print(p));
 }
 
 TEST(PrintStlContainerTest, Vector) {
   vector<int> v;
   v.push_back(1);
   v.push_back(2);
   EXPECT_EQ("{ 1, 2 }", Print(v));
 }
 
 TEST(PrintStlContainerTest, LongSequence) {
   const int a[100] = { 1, 2, 3 };
   const vector<int> v(a, a + 100);
   EXPECT_EQ("{ 1, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "
             "0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ... }", Print(v));
 }
 
 TEST(PrintStlContainerTest, NestedContainer) {
   const int a1[] = { 1, 2 };
   const int a2[] = { 3, 4, 5 };
   const list<int> l1(a1, a1 + 2);
   const list<int> l2(a2, a2 + 3);
 
   vector<list<int> > v;
   v.push_back(l1);
   v.push_back(l2);
   EXPECT_EQ("{ { 1, 2 }, { 3, 4, 5 } }", Print(v));
 }
 
 TEST(PrintStlContainerTest, OneDimensionalNativeArray) {
   const int a[3] = { 1, 2, 3 };
   NativeArray<int> b(a, 3, RelationToSourceReference());
   EXPECT_EQ("{ 1, 2, 3 }", Print(b));
 }
 
 TEST(PrintStlContainerTest, TwoDimensionalNativeArray) {
   const int a[2][3] = { { 1, 2, 3 }, { 4, 5, 6 } };
   NativeArray<int[3]> b(a, 2, RelationToSourceReference());
   EXPECT_EQ("{ { 1, 2, 3 }, { 4, 5, 6 } }", Print(b));
 }
 
 // Tests that a class named iterator isn't treated as a container.
 
 struct iterator {
   char x;
 };
 
 TEST(PrintStlContainerTest, Iterator) {
   iterator it = {};
   EXPECT_EQ("1-byte object <00>", Print(it));
 }
 
 // Tests that a class named const_iterator isn't treated as a container.
 
 struct const_iterator {
   char x;
 };
 
 TEST(PrintStlContainerTest, ConstIterator) {
   const_iterator it = {};
   EXPECT_EQ("1-byte object <00>", Print(it));
 }
 
 #if GTEST_HAS_TR1_TUPLE
 // Tests printing ::std::tr1::tuples.
 
 // Tuples of various arities.
 TEST(PrintTr1TupleTest, VariousSizes) {
   ::std::tr1::tuple<> t0;
   EXPECT_EQ("()", Print(t0));
 
   ::std::tr1::tuple<int> t1(5);
   EXPECT_EQ("(5)", Print(t1));
 
   ::std::tr1::tuple<char, bool> t2('a', true);
   EXPECT_EQ("('a' (97, 0x61), true)", Print(t2));
 
   ::std::tr1::tuple<bool, int, int> t3(false, 2, 3);
   EXPECT_EQ("(false, 2, 3)", Print(t3));
 
   ::std::tr1::tuple<bool, int, int, int> t4(false, 2, 3, 4);
   EXPECT_EQ("(false, 2, 3, 4)", Print(t4));
 
   ::std::tr1::tuple<bool, int, int, int, bool> t5(false, 2, 3, 4, true);
   EXPECT_EQ("(false, 2, 3, 4, true)", Print(t5));
 
   ::std::tr1::tuple<bool, int, int, int, bool, int> t6(false, 2, 3, 4, true, 6);
   EXPECT_EQ("(false, 2, 3, 4, true, 6)", Print(t6));
 
   ::std::tr1::tuple<bool, int, int, int, bool, int, int> t7(
       false, 2, 3, 4, true, 6, 7);
   EXPECT_EQ("(false, 2, 3, 4, true, 6, 7)", Print(t7));
 
   ::std::tr1::tuple<bool, int, int, int, bool, int, int, bool> t8(
       false, 2, 3, 4, true, 6, 7, true);
   EXPECT_EQ("(false, 2, 3, 4, true, 6, 7, true)", Print(t8));
 
   ::std::tr1::tuple<bool, int, int, int, bool, int, int, bool, int> t9(
       false, 2, 3, 4, true, 6, 7, true, 9);
   EXPECT_EQ("(false, 2, 3, 4, true, 6, 7, true, 9)", Print(t9));
 
   const char* const str = "8";
   // VC++ 2010's implementation of tuple of C++0x is deficient, requiring
   // an explicit type cast of NULL to be used.
   ::std::tr1::tuple<bool, char, short, testing::internal::Int32,  // NOLINT
                     testing::internal::Int64, float, double, const char*, void*,
                     std::string>
       t10(false, 'a', static_cast<short>(3), 4, 5, 1.5F, -2.5, str,  // NOLINT
           ImplicitCast_<void*>(NULL), "10");
   EXPECT_EQ("(false, 'a' (97, 0x61), 3, 4, 5, 1.5, -2.5, " + PrintPointer(str) +
             " pointing to \"8\", NULL, \"10\")",
             Print(t10));
 }
 
 // Nested tuples.
 TEST(PrintTr1TupleTest, NestedTuple) {
   ::std::tr1::tuple< ::std::tr1::tuple<int, bool>, char> nested(
       ::std::tr1::make_tuple(5, true), 'a');
   EXPECT_EQ("((5, true), 'a' (97, 0x61))", Print(nested));
 }
 
 #endif  // GTEST_HAS_TR1_TUPLE
 
 #if GTEST_HAS_STD_TUPLE_
 // Tests printing ::std::tuples.
 
 // Tuples of various arities.
 TEST(PrintStdTupleTest, VariousSizes) {
   ::std::tuple<> t0;
   EXPECT_EQ("()", Print(t0));
 
   ::std::tuple<int> t1(5);
   EXPECT_EQ("(5)", Print(t1));
 
   ::std::tuple<char, bool> t2('a', true);
   EXPECT_EQ("('a' (97, 0x61), true)", Print(t2));
 
   ::std::tuple<bool, int, int> t3(false, 2, 3);
   EXPECT_EQ("(false, 2, 3)", Print(t3));
 
   ::std::tuple<bool, int, int, int> t4(false, 2, 3, 4);
   EXPECT_EQ("(false, 2, 3, 4)", Print(t4));
 
   ::std::tuple<bool, int, int, int, bool> t5(false, 2, 3, 4, true);
   EXPECT_EQ("(false, 2, 3, 4, true)", Print(t5));
 
   ::std::tuple<bool, int, int, int, bool, int> t6(false, 2, 3, 4, true, 6);
   EXPECT_EQ("(false, 2, 3, 4, true, 6)", Print(t6));
 
   ::std::tuple<bool, int, int, int, bool, int, int> t7(
       false, 2, 3, 4, true, 6, 7);
   EXPECT_EQ("(false, 2, 3, 4, true, 6, 7)", Print(t7));
 
   ::std::tuple<bool, int, int, int, bool, int, int, bool> t8(
       false, 2, 3, 4, true, 6, 7, true);
   EXPECT_EQ("(false, 2, 3, 4, true, 6, 7, true)", Print(t8));
 
   ::std::tuple<bool, int, int, int, bool, int, int, bool, int> t9(
       false, 2, 3, 4, true, 6, 7, true, 9);
   EXPECT_EQ("(false, 2, 3, 4, true, 6, 7, true, 9)", Print(t9));
 
   const char* const str = "8";
   // VC++ 2010's implementation of tuple of C++0x is deficient, requiring
   // an explicit type cast of NULL to be used.
   ::std::tuple<bool, char, short, testing::internal::Int32,  // NOLINT
                testing::internal::Int64, float, double, const char*, void*,
                std::string>
       t10(false, 'a', static_cast<short>(3), 4, 5, 1.5F, -2.5, str,  // NOLINT
           ImplicitCast_<void*>(NULL), "10");
   EXPECT_EQ("(false, 'a' (97, 0x61), 3, 4, 5, 1.5, -2.5, " + PrintPointer(str) +
             " pointing to \"8\", NULL, \"10\")",
             Print(t10));
 }
 
 // Nested tuples.
 TEST(PrintStdTupleTest, NestedTuple) {
   ::std::tuple< ::std::tuple<int, bool>, char> nested(
       ::std::make_tuple(5, true), 'a');
   EXPECT_EQ("((5, true), 'a' (97, 0x61))", Print(nested));
 }
 
 #endif  // GTEST_LANG_CXX11
 
 #if GTEST_LANG_CXX11
 TEST(PrintNullptrT, Basic) {
   EXPECT_EQ("(nullptr)", Print(nullptr));
 }
 #endif  // GTEST_LANG_CXX11
 
 // Tests printing user-defined unprintable types.
 
 // Unprintable types in the global namespace.
 TEST(PrintUnprintableTypeTest, InGlobalNamespace) {
   EXPECT_EQ("1-byte object <00>",
             Print(UnprintableTemplateInGlobal<char>()));
 }
 
 // Unprintable types in a user namespace.
 TEST(PrintUnprintableTypeTest, InUserNamespace) {
   EXPECT_EQ("16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>",
             Print(::foo::UnprintableInFoo()));
 }
 
 // Unprintable types are that too big to be printed completely.
 
 struct Big {
   Big() { memset(array, 0, sizeof(array)); }
   char array[257];
 };
 
 TEST(PrintUnpritableTypeTest, BigObject) {
   EXPECT_EQ("257-byte object <00-00 00-00 00-00 00-00 00-00 00-00 "
             "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
             "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
             "00-00 00-00 00-00 00-00 00-00 00-00 ... 00-00 00-00 00-00 "
             "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
             "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 "
             "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00>",
             Print(Big()));
 }
 
 // Tests printing user-defined streamable types.
 
 // Streamable types in the global namespace.
 TEST(PrintStreamableTypeTest, InGlobalNamespace) {
   StreamableInGlobal x;
   EXPECT_EQ("StreamableInGlobal", Print(x));
   EXPECT_EQ("StreamableInGlobal*", Print(&x));
 }
 
 // Printable template types in a user namespace.
 TEST(PrintStreamableTypeTest, TemplateTypeInUserNamespace) {
   EXPECT_EQ("StreamableTemplateInFoo: 0",
             Print(::foo::StreamableTemplateInFoo<int>()));
 }
 
 // Tests printing a user-defined recursive container type that has a <<
 // operator.
 TEST(PrintStreamableTypeTest, PathLikeInUserNamespace) {
   ::foo::PathLike x;
   EXPECT_EQ("Streamable-PathLike", Print(x));
   const ::foo::PathLike cx;
   EXPECT_EQ("Streamable-PathLike", Print(cx));
 }
 
 // Tests printing user-defined types that have a PrintTo() function.
 TEST(PrintPrintableTypeTest, InUserNamespace) {
   EXPECT_EQ("PrintableViaPrintTo: 0",
             Print(::foo::PrintableViaPrintTo()));
 }
 
 // Tests printing a pointer to a user-defined type that has a <<
 // operator for its pointer.
 TEST(PrintPrintableTypeTest, PointerInUserNamespace) {
   ::foo::PointerPrintable x;
   EXPECT_EQ("PointerPrintable*", Print(&x));
 }
 
 // Tests printing user-defined class template that have a PrintTo() function.
 TEST(PrintPrintableTypeTest, TemplateInUserNamespace) {
   EXPECT_EQ("PrintableViaPrintToTemplate: 5",
             Print(::foo::PrintableViaPrintToTemplate<int>(5)));
 }
 
 // Tests that the universal printer prints both the address and the
 // value of a reference.
 TEST(PrintReferenceTest, PrintsAddressAndValue) {
   int n = 5;
   EXPECT_EQ("@" + PrintPointer(&n) + " 5", PrintByRef(n));
 
   int a[2][3] = {
     { 0, 1, 2 },
     { 3, 4, 5 }
   };
   EXPECT_EQ("@" + PrintPointer(a) + " { { 0, 1, 2 }, { 3, 4, 5 } }",
             PrintByRef(a));
 
   const ::foo::UnprintableInFoo x;
   EXPECT_EQ("@" + PrintPointer(&x) + " 16-byte object "
             "<EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>",
             PrintByRef(x));
 }
 
 // Tests that the universal printer prints a function pointer passed by
 // reference.
 TEST(PrintReferenceTest, HandlesFunctionPointer) {
   void (*fp)(int n) = &MyFunction;
   const std::string fp_pointer_string =
       PrintPointer(reinterpret_cast<const void*>(&fp));
   // We cannot directly cast &MyFunction to const void* because the
   // standard disallows casting between pointers to functions and
   // pointers to objects, and some compilers (e.g. GCC 3.4) enforce
   // this limitation.
   const std::string fp_string = PrintPointer(reinterpret_cast<const void*>(
       reinterpret_cast<internal::BiggestInt>(fp)));
   EXPECT_EQ("@" + fp_pointer_string + " " + fp_string,
             PrintByRef(fp));
 }
 
 // Tests that the universal printer prints a member function pointer
 // passed by reference.
 TEST(PrintReferenceTest, HandlesMemberFunctionPointer) {
   int (Foo::*p)(char ch) = &Foo::MyMethod;
   EXPECT_TRUE(HasPrefix(
       PrintByRef(p),
       "@" + PrintPointer(reinterpret_cast<const void*>(&p)) + " " +
           Print(sizeof(p)) + "-byte object "));
 
   char (Foo::*p2)(int n) = &Foo::MyVirtualMethod;
   EXPECT_TRUE(HasPrefix(
       PrintByRef(p2),
       "@" + PrintPointer(reinterpret_cast<const void*>(&p2)) + " " +
           Print(sizeof(p2)) + "-byte object "));
 }
 
 // Tests that the universal printer prints a member variable pointer
 // passed by reference.
 TEST(PrintReferenceTest, HandlesMemberVariablePointer) {
-  int (Foo::*p) = &Foo::value;  // NOLINT
+  int Foo::*p = &Foo::value;  // NOLINT
   EXPECT_TRUE(HasPrefix(
       PrintByRef(p),
       "@" + PrintPointer(&p) + " " + Print(sizeof(p)) + "-byte object "));
 }
 
 // Tests that FormatForComparisonFailureMessage(), which is used to print
 // an operand in a comparison assertion (e.g. ASSERT_EQ) when the assertion
 // fails, formats the operand in the desired way.
 
 // scalar
 TEST(FormatForComparisonFailureMessageTest, WorksForScalar) {
   EXPECT_STREQ("123",
                FormatForComparisonFailureMessage(123, 124).c_str());
 }
 
 // non-char pointer
 TEST(FormatForComparisonFailureMessageTest, WorksForNonCharPointer) {
   int n = 0;
   EXPECT_EQ(PrintPointer(&n),
             FormatForComparisonFailureMessage(&n, &n).c_str());
 }
 
 // non-char array
 TEST(FormatForComparisonFailureMessageTest, FormatsNonCharArrayAsPointer) {
   // In expression 'array == x', 'array' is compared by pointer.
   // Therefore we want to print an array operand as a pointer.
   int n[] = { 1, 2, 3 };
   EXPECT_EQ(PrintPointer(n),
             FormatForComparisonFailureMessage(n, n).c_str());
 }
 
 // Tests formatting a char pointer when it's compared with another pointer.
 // In this case we want to print it as a raw pointer, as the comparison is by
 // pointer.
 
 // char pointer vs pointer
 TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsPointer) {
   // In expression 'p == x', where 'p' and 'x' are (const or not) char
   // pointers, the operands are compared by pointer.  Therefore we
   // want to print 'p' as a pointer instead of a C string (we don't
   // even know if it's supposed to point to a valid C string).
 
   // const char*
   const char* s = "hello";
   EXPECT_EQ(PrintPointer(s),
             FormatForComparisonFailureMessage(s, s).c_str());
 
   // char*
   char ch = 'a';
   EXPECT_EQ(PrintPointer(&ch),
             FormatForComparisonFailureMessage(&ch, &ch).c_str());
 }
 
 // wchar_t pointer vs pointer
 TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsPointer) {
   // In expression 'p == x', where 'p' and 'x' are (const or not) char
   // pointers, the operands are compared by pointer.  Therefore we
   // want to print 'p' as a pointer instead of a wide C string (we don't
   // even know if it's supposed to point to a valid wide C string).
 
   // const wchar_t*
   const wchar_t* s = L"hello";
   EXPECT_EQ(PrintPointer(s),
             FormatForComparisonFailureMessage(s, s).c_str());
 
   // wchar_t*
   wchar_t ch = L'a';
   EXPECT_EQ(PrintPointer(&ch),
             FormatForComparisonFailureMessage(&ch, &ch).c_str());
 }
 
 // Tests formatting a char pointer when it's compared to a string object.
 // In this case we want to print the char pointer as a C string.
 
 #if GTEST_HAS_GLOBAL_STRING
 // char pointer vs ::string
 TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsString) {
   const char* s = "hello \"world";
   EXPECT_STREQ("\"hello \\\"world\"",  // The string content should be escaped.
                FormatForComparisonFailureMessage(s, ::string()).c_str());
 
   // char*
   char str[] = "hi\1";
   char* p = str;
   EXPECT_STREQ("\"hi\\x1\"",  // The string content should be escaped.
                FormatForComparisonFailureMessage(p, ::string()).c_str());
 }
 #endif
 
 // char pointer vs std::string
 TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsStdString) {
   const char* s = "hello \"world";
   EXPECT_STREQ("\"hello \\\"world\"",  // The string content should be escaped.
                FormatForComparisonFailureMessage(s, ::std::string()).c_str());
 
   // char*
   char str[] = "hi\1";
   char* p = str;
   EXPECT_STREQ("\"hi\\x1\"",  // The string content should be escaped.
                FormatForComparisonFailureMessage(p, ::std::string()).c_str());
 }
 
 #if GTEST_HAS_GLOBAL_WSTRING
 // wchar_t pointer vs ::wstring
 TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsWString) {
   const wchar_t* s = L"hi \"world";
   EXPECT_STREQ("L\"hi \\\"world\"",  // The string content should be escaped.
                FormatForComparisonFailureMessage(s, ::wstring()).c_str());
 
   // wchar_t*
   wchar_t str[] = L"hi\1";
   wchar_t* p = str;
   EXPECT_STREQ("L\"hi\\x1\"",  // The string content should be escaped.
                FormatForComparisonFailureMessage(p, ::wstring()).c_str());
 }
 #endif
 
 #if GTEST_HAS_STD_WSTRING
 // wchar_t pointer vs std::wstring
 TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsStdWString) {
   const wchar_t* s = L"hi \"world";
   EXPECT_STREQ("L\"hi \\\"world\"",  // The string content should be escaped.
                FormatForComparisonFailureMessage(s, ::std::wstring()).c_str());
 
   // wchar_t*
   wchar_t str[] = L"hi\1";
   wchar_t* p = str;
   EXPECT_STREQ("L\"hi\\x1\"",  // The string content should be escaped.
                FormatForComparisonFailureMessage(p, ::std::wstring()).c_str());
 }
 #endif
 
 // Tests formatting a char array when it's compared with a pointer or array.
 // In this case we want to print the array as a row pointer, as the comparison
 // is by pointer.
 
 // char array vs pointer
 TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsPointer) {
   char str[] = "hi \"world\"";
   char* p = NULL;
   EXPECT_EQ(PrintPointer(str),
             FormatForComparisonFailureMessage(str, p).c_str());
 }
 
 // char array vs char array
 TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsCharArray) {
   const char str[] = "hi \"world\"";
   EXPECT_EQ(PrintPointer(str),
             FormatForComparisonFailureMessage(str, str).c_str());
 }
 
 // wchar_t array vs pointer
 TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsPointer) {
   wchar_t str[] = L"hi \"world\"";
   wchar_t* p = NULL;
   EXPECT_EQ(PrintPointer(str),
             FormatForComparisonFailureMessage(str, p).c_str());
 }
 
 // wchar_t array vs wchar_t array
 TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsWCharArray) {
   const wchar_t str[] = L"hi \"world\"";
   EXPECT_EQ(PrintPointer(str),
             FormatForComparisonFailureMessage(str, str).c_str());
 }
 
 // Tests formatting a char array when it's compared with a string object.
 // In this case we want to print the array as a C string.
 
 #if GTEST_HAS_GLOBAL_STRING
 // char array vs string
 TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsString) {
   const char str[] = "hi \"w\0rld\"";
   EXPECT_STREQ("\"hi \\\"w\"",  // The content should be escaped.
                                 // Embedded NUL terminates the string.
                FormatForComparisonFailureMessage(str, ::string()).c_str());
 }
 #endif
 
 // char array vs std::string
 TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsStdString) {
   const char str[] = "hi \"world\"";
   EXPECT_STREQ("\"hi \\\"world\\\"\"",  // The content should be escaped.
                FormatForComparisonFailureMessage(str, ::std::string()).c_str());
 }
 
 #if GTEST_HAS_GLOBAL_WSTRING
 // wchar_t array vs wstring
 TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsWString) {
   const wchar_t str[] = L"hi \"world\"";
   EXPECT_STREQ("L\"hi \\\"world\\\"\"",  // The content should be escaped.
                FormatForComparisonFailureMessage(str, ::wstring()).c_str());
 }
 #endif
 
 #if GTEST_HAS_STD_WSTRING
 // wchar_t array vs std::wstring
 TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsStdWString) {
   const wchar_t str[] = L"hi \"w\0rld\"";
   EXPECT_STREQ(
       "L\"hi \\\"w\"",  // The content should be escaped.
                         // Embedded NUL terminates the string.
       FormatForComparisonFailureMessage(str, ::std::wstring()).c_str());
 }
 #endif
 
 // Useful for testing PrintToString().  We cannot use EXPECT_EQ()
 // there as its implementation uses PrintToString().  The caller must
 // ensure that 'value' has no side effect.
 #define EXPECT_PRINT_TO_STRING_(value, expected_string)         \
   EXPECT_TRUE(PrintToString(value) == (expected_string))        \
       << " where " #value " prints as " << (PrintToString(value))
 
 TEST(PrintToStringTest, WorksForScalar) {
   EXPECT_PRINT_TO_STRING_(123, "123");
 }
 
 TEST(PrintToStringTest, WorksForPointerToConstChar) {
   const char* p = "hello";
   EXPECT_PRINT_TO_STRING_(p, "\"hello\"");
 }
 
 TEST(PrintToStringTest, WorksForPointerToNonConstChar) {
   char s[] = "hello";
   char* p = s;
   EXPECT_PRINT_TO_STRING_(p, "\"hello\"");
 }
 
 TEST(PrintToStringTest, EscapesForPointerToConstChar) {
   const char* p = "hello\n";
   EXPECT_PRINT_TO_STRING_(p, "\"hello\\n\"");
 }
 
 TEST(PrintToStringTest, EscapesForPointerToNonConstChar) {
   char s[] = "hello\1";
   char* p = s;
   EXPECT_PRINT_TO_STRING_(p, "\"hello\\x1\"");
 }
 
 TEST(PrintToStringTest, WorksForArray) {
   int n[3] = { 1, 2, 3 };
   EXPECT_PRINT_TO_STRING_(n, "{ 1, 2, 3 }");
 }
 
 TEST(PrintToStringTest, WorksForCharArray) {
   char s[] = "hello";
   EXPECT_PRINT_TO_STRING_(s, "\"hello\"");
 }
 
 TEST(PrintToStringTest, WorksForCharArrayWithEmbeddedNul) {
   const char str_with_nul[] = "hello\0 world";
   EXPECT_PRINT_TO_STRING_(str_with_nul, "\"hello\\0 world\"");
 
   char mutable_str_with_nul[] = "hello\0 world";
   EXPECT_PRINT_TO_STRING_(mutable_str_with_nul, "\"hello\\0 world\"");
 }
 
   TEST(PrintToStringTest, ContainsNonLatin) {
   // Sanity test with valid UTF-8. Prints both in hex and as text.
   std::string non_ascii_str = ::std::string("오전 4:30");
   EXPECT_PRINT_TO_STRING_(non_ascii_str,
                           "\"\\xEC\\x98\\xA4\\xEC\\xA0\\x84 4:30\"\n"
                           "    As Text: \"오전 4:30\"");
   non_ascii_str = ::std::string("From ä — ẑ");
   EXPECT_PRINT_TO_STRING_(non_ascii_str,
                           "\"From \\xC3\\xA4 \\xE2\\x80\\x94 \\xE1\\xBA\\x91\""
                           "\n    As Text: \"From ä — ẑ\"");
 }
 
 TEST(IsValidUTF8Test, IllFormedUTF8) {
   // The following test strings are ill-formed UTF-8 and are printed
   // as hex only (or ASCII, in case of ASCII bytes) because IsValidUTF8() is
   // expected to fail, thus output does not contain "As Text:".
 
   static const char *const kTestdata[][2] = {
     // 2-byte lead byte followed by a single-byte character.
     {"\xC3\x74", "\"\\xC3t\""},
     // Valid 2-byte character followed by an orphan trail byte.
     {"\xC3\x84\xA4", "\"\\xC3\\x84\\xA4\""},
     // Lead byte without trail byte.
     {"abc\xC3", "\"abc\\xC3\""},
     // 3-byte lead byte, single-byte character, orphan trail byte.
     {"x\xE2\x70\x94", "\"x\\xE2p\\x94\""},
     // Truncated 3-byte character.
     {"\xE2\x80", "\"\\xE2\\x80\""},
     // Truncated 3-byte character followed by valid 2-byte char.
     {"\xE2\x80\xC3\x84", "\"\\xE2\\x80\\xC3\\x84\""},
     // Truncated 3-byte character followed by a single-byte character.
     {"\xE2\x80\x7A", "\"\\xE2\\x80z\""},
     // 3-byte lead byte followed by valid 3-byte character.
     {"\xE2\xE2\x80\x94", "\"\\xE2\\xE2\\x80\\x94\""},
     // 4-byte lead byte followed by valid 3-byte character.
     {"\xF0\xE2\x80\x94", "\"\\xF0\\xE2\\x80\\x94\""},
     // Truncated 4-byte character.
     {"\xF0\xE2\x80", "\"\\xF0\\xE2\\x80\""},
      // Invalid UTF-8 byte sequences embedded in other chars.
     {"abc\xE2\x80\x94\xC3\x74xyc", "\"abc\\xE2\\x80\\x94\\xC3txyc\""},
     {"abc\xC3\x84\xE2\x80\xC3\x84xyz",
      "\"abc\\xC3\\x84\\xE2\\x80\\xC3\\x84xyz\""},
     // Non-shortest UTF-8 byte sequences are also ill-formed.
     // The classics: xC0, xC1 lead byte.
     {"\xC0\x80", "\"\\xC0\\x80\""},
     {"\xC1\x81", "\"\\xC1\\x81\""},
     // Non-shortest sequences.
     {"\xE0\x80\x80", "\"\\xE0\\x80\\x80\""},
     {"\xf0\x80\x80\x80", "\"\\xF0\\x80\\x80\\x80\""},
     // Last valid code point before surrogate range, should be printed as text,
     // too.
     {"\xED\x9F\xBF", "\"\\xED\\x9F\\xBF\"\n    As Text: \"퟿\""},
     // Start of surrogate lead. Surrogates are not printed as text.
     {"\xED\xA0\x80", "\"\\xED\\xA0\\x80\""},
     // Last non-private surrogate lead.
     {"\xED\xAD\xBF", "\"\\xED\\xAD\\xBF\""},
     // First private-use surrogate lead.
     {"\xED\xAE\x80", "\"\\xED\\xAE\\x80\""},
     // Last private-use surrogate lead.
     {"\xED\xAF\xBF", "\"\\xED\\xAF\\xBF\""},
     // Mid-point of surrogate trail.
     {"\xED\xB3\xBF", "\"\\xED\\xB3\\xBF\""},
     // First valid code point after surrogate range, should be printed as text,
     // too.
     {"\xEE\x80\x80", "\"\\xEE\\x80\\x80\"\n    As Text: \"\""}
   };
 
   for (int i = 0; i < int(sizeof(kTestdata)/sizeof(kTestdata[0])); ++i) {
     EXPECT_PRINT_TO_STRING_(kTestdata[i][0], kTestdata[i][1]);
   }
 }
 
 #undef EXPECT_PRINT_TO_STRING_
 
 TEST(UniversalTersePrintTest, WorksForNonReference) {
   ::std::stringstream ss;
   UniversalTersePrint(123, &ss);
   EXPECT_EQ("123", ss.str());
 }
 
 TEST(UniversalTersePrintTest, WorksForReference) {
   const int& n = 123;
   ::std::stringstream ss;
   UniversalTersePrint(n, &ss);
   EXPECT_EQ("123", ss.str());
 }
 
 TEST(UniversalTersePrintTest, WorksForCString) {
   const char* s1 = "abc";
   ::std::stringstream ss1;
   UniversalTersePrint(s1, &ss1);
   EXPECT_EQ("\"abc\"", ss1.str());
 
   char* s2 = const_cast<char*>(s1);
   ::std::stringstream ss2;
   UniversalTersePrint(s2, &ss2);
   EXPECT_EQ("\"abc\"", ss2.str());
 
   const char* s3 = NULL;
   ::std::stringstream ss3;
   UniversalTersePrint(s3, &ss3);
   EXPECT_EQ("NULL", ss3.str());
 }
 
 TEST(UniversalPrintTest, WorksForNonReference) {
   ::std::stringstream ss;
   UniversalPrint(123, &ss);
   EXPECT_EQ("123", ss.str());
 }
 
 TEST(UniversalPrintTest, WorksForReference) {
   const int& n = 123;
   ::std::stringstream ss;
   UniversalPrint(n, &ss);
   EXPECT_EQ("123", ss.str());
 }
 
 TEST(UniversalPrintTest, WorksForCString) {
   const char* s1 = "abc";
   ::std::stringstream ss1;
   UniversalPrint(s1, &ss1);
   EXPECT_EQ(PrintPointer(s1) + " pointing to \"abc\"", std::string(ss1.str()));
 
   char* s2 = const_cast<char*>(s1);
   ::std::stringstream ss2;
   UniversalPrint(s2, &ss2);
   EXPECT_EQ(PrintPointer(s2) + " pointing to \"abc\"", std::string(ss2.str()));
 
   const char* s3 = NULL;
   ::std::stringstream ss3;
   UniversalPrint(s3, &ss3);
   EXPECT_EQ("NULL", ss3.str());
 }
 
 TEST(UniversalPrintTest, WorksForCharArray) {
   const char str[] = "\"Line\0 1\"\nLine 2";
   ::std::stringstream ss1;
   UniversalPrint(str, &ss1);
   EXPECT_EQ("\"\\\"Line\\0 1\\\"\\nLine 2\"", ss1.str());
 
   const char mutable_str[] = "\"Line\0 1\"\nLine 2";
   ::std::stringstream ss2;
   UniversalPrint(mutable_str, &ss2);
   EXPECT_EQ("\"\\\"Line\\0 1\\\"\\nLine 2\"", ss2.str());
 }
 
 #if GTEST_HAS_TR1_TUPLE
 
 TEST(UniversalTersePrintTupleFieldsToStringsTestWithTr1, PrintsEmptyTuple) {
   Strings result = UniversalTersePrintTupleFieldsToStrings(
       ::std::tr1::make_tuple());
   EXPECT_EQ(0u, result.size());
 }
 
 TEST(UniversalTersePrintTupleFieldsToStringsTestWithTr1, PrintsOneTuple) {
   Strings result = UniversalTersePrintTupleFieldsToStrings(
       ::std::tr1::make_tuple(1));
   ASSERT_EQ(1u, result.size());
   EXPECT_EQ("1", result[0]);
 }
 
 TEST(UniversalTersePrintTupleFieldsToStringsTestWithTr1, PrintsTwoTuple) {
   Strings result = UniversalTersePrintTupleFieldsToStrings(
       ::std::tr1::make_tuple(1, 'a'));
   ASSERT_EQ(2u, result.size());
   EXPECT_EQ("1", result[0]);
   EXPECT_EQ("'a' (97, 0x61)", result[1]);
 }
 
 TEST(UniversalTersePrintTupleFieldsToStringsTestWithTr1, PrintsTersely) {
   const int n = 1;
   Strings result = UniversalTersePrintTupleFieldsToStrings(
       ::std::tr1::tuple<const int&, const char*>(n, "a"));
   ASSERT_EQ(2u, result.size());
   EXPECT_EQ("1", result[0]);
   EXPECT_EQ("\"a\"", result[1]);
 }
 
 #endif  // GTEST_HAS_TR1_TUPLE
 
 #if GTEST_HAS_STD_TUPLE_
 
 TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsEmptyTuple) {
   Strings result = UniversalTersePrintTupleFieldsToStrings(::std::make_tuple());
   EXPECT_EQ(0u, result.size());
 }
 
 TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsOneTuple) {
   Strings result = UniversalTersePrintTupleFieldsToStrings(
       ::std::make_tuple(1));
   ASSERT_EQ(1u, result.size());
   EXPECT_EQ("1", result[0]);
 }
 
 TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsTwoTuple) {
   Strings result = UniversalTersePrintTupleFieldsToStrings(
       ::std::make_tuple(1, 'a'));
   ASSERT_EQ(2u, result.size());
   EXPECT_EQ("1", result[0]);
   EXPECT_EQ("'a' (97, 0x61)", result[1]);
 }
 
 TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsTersely) {
   const int n = 1;
   Strings result = UniversalTersePrintTupleFieldsToStrings(
       ::std::tuple<const int&, const char*>(n, "a"));
   ASSERT_EQ(2u, result.size());
   EXPECT_EQ("1", result[0]);
   EXPECT_EQ("\"a\"", result[1]);
 }
 
 #endif  // GTEST_HAS_STD_TUPLE_
 
 #if GTEST_HAS_ABSL
 
 TEST(PrintOptionalTest, Basic) {
   absl::optional<int> value;
   EXPECT_EQ("(nullopt)", PrintToString(value));
   value = {7};
   EXPECT_EQ("(7)", PrintToString(value));
   EXPECT_EQ("(1.1)", PrintToString(absl::optional<double>{1.1}));
   EXPECT_EQ("(\"A\")", PrintToString(absl::optional<std::string>{"A"}));
 }
 #endif  // GTEST_HAS_ABSL
 
 }  // namespace gtest_printers_test
 }  // namespace testing
diff --git a/googletest/test/gtest_shuffle_test.py b/googletest/test/googletest-shuffle-test.py
similarity index 98%
rename from googletest/test/gtest_shuffle_test.py
rename to googletest/test/googletest-shuffle-test.py
index 30d0303d..5ae96552 100755
--- a/googletest/test/gtest_shuffle_test.py
+++ b/googletest/test/googletest-shuffle-test.py
@@ -1,325 +1,325 @@
 #!/usr/bin/env python
 #
 # Copyright 2009 Google Inc. All Rights Reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Verifies that test shuffling works."""
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import os
 import gtest_test_utils
 
-# Command to run the gtest_shuffle_test_ program.
-COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
+# Command to run the googletest-shuffle-test_ program.
+COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-shuffle-test_')
 
 # The environment variables for test sharding.
 TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
 SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
 
 TEST_FILTER = 'A*.A:A*.B:C*'
 
 ALL_TESTS = []
 ACTIVE_TESTS = []
 FILTERED_TESTS = []
 SHARDED_TESTS = []
 
 SHUFFLED_ALL_TESTS = []
 SHUFFLED_ACTIVE_TESTS = []
 SHUFFLED_FILTERED_TESTS = []
 SHUFFLED_SHARDED_TESTS = []
 
 
 def AlsoRunDisabledTestsFlag():
   return '--gtest_also_run_disabled_tests'
 
 
 def FilterFlag(test_filter):
   return '--gtest_filter=%s' % (test_filter,)
 
 
 def RepeatFlag(n):
   return '--gtest_repeat=%s' % (n,)
 
 
 def ShuffleFlag():
   return '--gtest_shuffle'
 
 
 def RandomSeedFlag(n):
   return '--gtest_random_seed=%s' % (n,)
 
 
 def RunAndReturnOutput(extra_env, args):
   """Runs the test program and returns its output."""
 
   environ_copy = os.environ.copy()
   environ_copy.update(extra_env)
 
   return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
 
 
 def GetTestsForAllIterations(extra_env, args):
   """Runs the test program and returns a list of test lists.
 
   Args:
     extra_env: a map from environment variables to their values
-    args: command line flags to pass to gtest_shuffle_test_
+    args: command line flags to pass to googletest-shuffle-test_
 
   Returns:
     A list where the i-th element is the list of tests run in the i-th
     test iteration.
   """
 
   test_iterations = []
   for line in RunAndReturnOutput(extra_env, args).split('\n'):
     if line.startswith('----'):
       tests = []
       test_iterations.append(tests)
     elif line.strip():
       tests.append(line.strip())  # 'TestCaseName.TestName'
 
   return test_iterations
 
 
 def GetTestCases(tests):
   """Returns a list of test cases in the given full test names.
 
   Args:
     tests: a list of full test names
 
   Returns:
     A list of test cases from 'tests', in their original order.
     Consecutive duplicates are removed.
   """
 
   test_cases = []
   for test in tests:
     test_case = test.split('.')[0]
     if not test_case in test_cases:
       test_cases.append(test_case)
 
   return test_cases
 
 
 def CalculateTestLists():
   """Calculates the list of tests run under different flags."""
 
   if not ALL_TESTS:
     ALL_TESTS.extend(
         GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
 
   if not ACTIVE_TESTS:
     ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
 
   if not FILTERED_TESTS:
     FILTERED_TESTS.extend(
         GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
 
   if not SHARDED_TESTS:
     SHARDED_TESTS.extend(
         GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
                                   SHARD_INDEX_ENV_VAR: '1'},
                                  [])[0])
 
   if not SHUFFLED_ALL_TESTS:
     SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
         {}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
 
   if not SHUFFLED_ACTIVE_TESTS:
     SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
         {}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
 
   if not SHUFFLED_FILTERED_TESTS:
     SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
         {}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
 
   if not SHUFFLED_SHARDED_TESTS:
     SHUFFLED_SHARDED_TESTS.extend(
         GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
                                   SHARD_INDEX_ENV_VAR: '1'},
                                  [ShuffleFlag(), RandomSeedFlag(1)])[0])
 
 
 class GTestShuffleUnitTest(gtest_test_utils.TestCase):
   """Tests test shuffling."""
 
   def setUp(self):
     CalculateTestLists()
 
   def testShufflePreservesNumberOfTests(self):
     self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
     self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
     self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
     self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
 
   def testShuffleChangesTestOrder(self):
     self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
     self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
     self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
                  SHUFFLED_FILTERED_TESTS)
     self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
                  SHUFFLED_SHARDED_TESTS)
 
   def testShuffleChangesTestCaseOrder(self):
     self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
                  GetTestCases(SHUFFLED_ALL_TESTS))
     self.assert_(
         GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
         GetTestCases(SHUFFLED_ACTIVE_TESTS))
     self.assert_(
         GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
         GetTestCases(SHUFFLED_FILTERED_TESTS))
     self.assert_(
         GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
         GetTestCases(SHUFFLED_SHARDED_TESTS))
 
   def testShuffleDoesNotRepeatTest(self):
     for test in SHUFFLED_ALL_TESTS:
       self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
                        '%s appears more than once' % (test,))
     for test in SHUFFLED_ACTIVE_TESTS:
       self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
                        '%s appears more than once' % (test,))
     for test in SHUFFLED_FILTERED_TESTS:
       self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
                        '%s appears more than once' % (test,))
     for test in SHUFFLED_SHARDED_TESTS:
       self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
                        '%s appears more than once' % (test,))
 
   def testShuffleDoesNotCreateNewTest(self):
     for test in SHUFFLED_ALL_TESTS:
       self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
     for test in SHUFFLED_ACTIVE_TESTS:
       self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
     for test in SHUFFLED_FILTERED_TESTS:
       self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
     for test in SHUFFLED_SHARDED_TESTS:
       self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
 
   def testShuffleIncludesAllTests(self):
     for test in ALL_TESTS:
       self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
     for test in ACTIVE_TESTS:
       self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
     for test in FILTERED_TESTS:
       self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
     for test in SHARDED_TESTS:
       self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
 
   def testShuffleLeavesDeathTestsAtFront(self):
     non_death_test_found = False
     for test in SHUFFLED_ACTIVE_TESTS:
       if 'DeathTest.' in test:
         self.assert_(not non_death_test_found,
                      '%s appears after a non-death test' % (test,))
       else:
         non_death_test_found = True
 
   def _VerifyTestCasesDoNotInterleave(self, tests):
     test_cases = []
     for test in tests:
       [test_case, _] = test.split('.')
       if test_cases and test_cases[-1] != test_case:
         test_cases.append(test_case)
         self.assertEqual(1, test_cases.count(test_case),
                          'Test case %s is not grouped together in %s' %
                          (test_case, tests))
 
   def testShuffleDoesNotInterleaveTestCases(self):
     self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
     self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
     self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
     self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
 
   def testShuffleRestoresOrderAfterEachIteration(self):
     # Get the test lists in all 3 iterations, using random seed 1, 2,
     # and 3 respectively.  Google Test picks a different seed in each
     # iteration, and this test depends on the current implementation
     # picking successive numbers.  This dependency is not ideal, but
     # makes the test much easier to write.
     [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
         GetTestsForAllIterations(
             {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
 
     # Make sure running the tests with random seed 1 gets the same
     # order as in iteration 1 above.
     [tests_with_seed1] = GetTestsForAllIterations(
         {}, [ShuffleFlag(), RandomSeedFlag(1)])
     self.assertEqual(tests_in_iteration1, tests_with_seed1)
 
     # Make sure running the tests with random seed 2 gets the same
     # order as in iteration 2 above.  Success means that Google Test
     # correctly restores the test order before re-shuffling at the
     # beginning of iteration 2.
     [tests_with_seed2] = GetTestsForAllIterations(
         {}, [ShuffleFlag(), RandomSeedFlag(2)])
     self.assertEqual(tests_in_iteration2, tests_with_seed2)
 
     # Make sure running the tests with random seed 3 gets the same
     # order as in iteration 3 above.  Success means that Google Test
     # correctly restores the test order before re-shuffling at the
     # beginning of iteration 3.
     [tests_with_seed3] = GetTestsForAllIterations(
         {}, [ShuffleFlag(), RandomSeedFlag(3)])
     self.assertEqual(tests_in_iteration3, tests_with_seed3)
 
   def testShuffleGeneratesNewOrderInEachIteration(self):
     [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
         GetTestsForAllIterations(
             {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
 
     self.assert_(tests_in_iteration1 != tests_in_iteration2,
                  tests_in_iteration1)
     self.assert_(tests_in_iteration1 != tests_in_iteration3,
                  tests_in_iteration1)
     self.assert_(tests_in_iteration2 != tests_in_iteration3,
                  tests_in_iteration2)
 
   def testShuffleShardedTestsPreservesPartition(self):
     # If we run M tests on N shards, the same M tests should be run in
     # total, regardless of the random seeds used by the shards.
     [tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
                                          SHARD_INDEX_ENV_VAR: '0'},
                                         [ShuffleFlag(), RandomSeedFlag(1)])
     [tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
                                          SHARD_INDEX_ENV_VAR: '1'},
                                         [ShuffleFlag(), RandomSeedFlag(20)])
     [tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
                                          SHARD_INDEX_ENV_VAR: '2'},
                                         [ShuffleFlag(), RandomSeedFlag(25)])
     sorted_sharded_tests = tests1 + tests2 + tests3
     sorted_sharded_tests.sort()
     sorted_active_tests = []
     sorted_active_tests.extend(ACTIVE_TESTS)
     sorted_active_tests.sort()
     self.assertEqual(sorted_active_tests, sorted_sharded_tests)
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_shuffle_test_.cc b/googletest/test/googletest-shuffle-test_.cc
similarity index 100%
rename from googletest/test/gtest_shuffle_test_.cc
rename to googletest/test/googletest-shuffle-test_.cc
diff --git a/googletest/test/gtest-test-part_test.cc b/googletest/test/googletest-test-part-test.cc
similarity index 100%
rename from googletest/test/gtest-test-part_test.cc
rename to googletest/test/googletest-test-part-test.cc
diff --git a/googletest/test/gtest-param-test2_test.cc b/googletest/test/googletest-test2_test.cc
similarity index 87%
rename from googletest/test/gtest-param-test2_test.cc
rename to googletest/test/googletest-test2_test.cc
index c3b2d189..e50c259b 100644
--- a/googletest/test/gtest-param-test2_test.cc
+++ b/googletest/test/googletest-test2_test.cc
@@ -1,61 +1,61 @@
 // Copyright 2008, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: vladl@google.com (Vlad Losev)
 //
 // Tests for Google Test itself.  This verifies that the basic constructs of
 // Google Test work.
 
 #include "gtest/gtest.h"
-#include "gtest-param-test_test.h"
+#include "googletest-param-test-test.h"
 
 using ::testing::Values;
 using ::testing::internal::ParamGenerator;
 
 // Tests that generators defined in a different translation unit
-// are functional. The test using extern_gen is defined
-// in gtest-param-test_test.cc.
-ParamGenerator<int> extern_gen = Values(33);
+// are functional. The test using extern_gen_2 is defined
+// in googletest-param-test-test.cc.
+ParamGenerator<int> extern_gen_2 = Values(33);
 
 // Tests that a parameterized test case can be defined in one translation unit
-// and instantiated in another. The test is defined in gtest-param-test_test.cc
+// and instantiated in another. The test is defined in googletest-param-test-test.cc
 // and ExternalInstantiationTest fixture class is defined in
 // gtest-param-test_test.h.
 INSTANTIATE_TEST_CASE_P(MultiplesOf33,
                         ExternalInstantiationTest,
                         Values(33, 66));
 
 // Tests that a parameterized test case can be instantiated
 // in multiple translation units. Another instantiation is defined
-// in gtest-param-test_test.cc and InstantiationInMultipleTranslaionUnitsTest
+// in googletest-param-test-test.cc and InstantiationInMultipleTranslaionUnitsTest
 // fixture is defined in gtest-param-test_test.h
 INSTANTIATE_TEST_CASE_P(Sequence2,
                         InstantiationInMultipleTranslaionUnitsTest,
                         Values(42*3, 42*4, 42*5));
 
diff --git a/googletest/test/gtest_throw_on_failure_test.py b/googletest/test/googletest-throw-on-failure-test.py
similarity index 94%
rename from googletest/test/gtest_throw_on_failure_test.py
rename to googletest/test/googletest-throw-on-failure-test.py
index 5678ffea..26ba32b9 100755
--- a/googletest/test/gtest_throw_on_failure_test.py
+++ b/googletest/test/googletest-throw-on-failure-test.py
@@ -1,171 +1,171 @@
 #!/usr/bin/env python
 #
 # Copyright 2009, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Tests Google Test's throw-on-failure mode with exceptions disabled.
 
-This script invokes gtest_throw_on_failure_test_ (a program written with
+This script invokes googletest-throw-on-failure-test_ (a program written with
 Google Test) with different environments and command line flags.
 """
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import os
 import gtest_test_utils
 
 
 # Constants.
 
 # The command line flag for enabling/disabling the throw-on-failure mode.
 THROW_ON_FAILURE = 'gtest_throw_on_failure'
 
-# Path to the gtest_throw_on_failure_test_ program, compiled with
+# Path to the googletest-throw-on-failure-test_ program, compiled with
 # exceptions disabled.
 EXE_PATH = gtest_test_utils.GetTestExecutablePath(
-    'gtest_throw_on_failure_test_')
+    'googletest-throw-on-failure-test_')
 
 
 # Utilities.
 
 
 def SetEnvVar(env_var, value):
   """Sets an environment variable to a given value; unsets it when the
   given value is None.
   """
 
   env_var = env_var.upper()
   if value is not None:
     os.environ[env_var] = value
   elif env_var in os.environ:
     del os.environ[env_var]
 
 
 def Run(command):
   """Runs a command; returns True/False if its exit code is/isn't 0."""
 
   print 'Running "%s". . .' % ' '.join(command)
   p = gtest_test_utils.Subprocess(command)
   return p.exited and p.exit_code == 0
 
 
 # The tests.  TODO(wan@google.com): refactor the class to share common
-# logic with code in gtest_break_on_failure_unittest.py.
+# logic with code in googletest-break-on-failure-unittest.py.
 class ThrowOnFailureTest(gtest_test_utils.TestCase):
   """Tests the throw-on-failure mode."""
 
   def RunAndVerify(self, env_var_value, flag_value, should_fail):
-    """Runs gtest_throw_on_failure_test_ and verifies that it does
+    """Runs googletest-throw-on-failure-test_ and verifies that it does
     (or does not) exit with a non-zero code.
 
     Args:
       env_var_value:    value of the GTEST_BREAK_ON_FAILURE environment
                         variable; None if the variable should be unset.
       flag_value:       value of the --gtest_break_on_failure flag;
                         None if the flag should not be present.
       should_fail:      True iff the program is expected to fail.
     """
 
     SetEnvVar(THROW_ON_FAILURE, env_var_value)
 
     if env_var_value is None:
       env_var_value_msg = ' is not set'
     else:
       env_var_value_msg = '=' + env_var_value
 
     if flag_value is None:
       flag = ''
     elif flag_value == '0':
       flag = '--%s=0' % THROW_ON_FAILURE
     else:
       flag = '--%s' % THROW_ON_FAILURE
 
     command = [EXE_PATH]
     if flag:
       command.append(flag)
 
     if should_fail:
       should_or_not = 'should'
     else:
       should_or_not = 'should not'
 
     failed = not Run(command)
 
     SetEnvVar(THROW_ON_FAILURE, None)
 
     msg = ('when %s%s, an assertion failure in "%s" %s cause a non-zero '
            'exit code.' %
            (THROW_ON_FAILURE, env_var_value_msg, ' '.join(command),
             should_or_not))
     self.assert_(failed == should_fail, msg)
 
   def testDefaultBehavior(self):
     """Tests the behavior of the default mode."""
 
     self.RunAndVerify(env_var_value=None, flag_value=None, should_fail=False)
 
   def testThrowOnFailureEnvVar(self):
     """Tests using the GTEST_THROW_ON_FAILURE environment variable."""
 
     self.RunAndVerify(env_var_value='0',
                       flag_value=None,
                       should_fail=False)
     self.RunAndVerify(env_var_value='1',
                       flag_value=None,
                       should_fail=True)
 
   def testThrowOnFailureFlag(self):
     """Tests using the --gtest_throw_on_failure flag."""
 
     self.RunAndVerify(env_var_value=None,
                       flag_value='0',
                       should_fail=False)
     self.RunAndVerify(env_var_value=None,
                       flag_value='1',
                       should_fail=True)
 
   def testThrowOnFailureFlagOverridesEnvVar(self):
     """Tests that --gtest_throw_on_failure overrides GTEST_THROW_ON_FAILURE."""
 
     self.RunAndVerify(env_var_value='0',
                       flag_value='0',
                       should_fail=False)
     self.RunAndVerify(env_var_value='0',
                       flag_value='1',
                       should_fail=True)
     self.RunAndVerify(env_var_value='1',
                       flag_value='0',
                       should_fail=False)
     self.RunAndVerify(env_var_value='1',
                       flag_value='1',
                       should_fail=True)
 
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_throw_on_failure_test_.cc b/googletest/test/googletest-throw-on-failure-test_.cc
similarity index 97%
rename from googletest/test/gtest_throw_on_failure_test_.cc
rename to googletest/test/googletest-throw-on-failure-test_.cc
index 2b88fe3d..0617c27a 100644
--- a/googletest/test/gtest_throw_on_failure_test_.cc
+++ b/googletest/test/googletest-throw-on-failure-test_.cc
@@ -1,72 +1,72 @@
 // Copyright 2009, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 
 // Tests Google Test's throw-on-failure mode with exceptions disabled.
 //
 // This program must be compiled with exceptions disabled.  It will be
-// invoked by gtest_throw_on_failure_test.py, and is expected to exit
+// invoked by googletest-throw-on-failure-test.py, and is expected to exit
 // with non-zero in the throw-on-failure mode or 0 otherwise.
 
 #include "gtest/gtest.h"
 
 #include <stdio.h>                      // for fflush, fprintf, NULL, etc.
 #include <stdlib.h>                     // for exit
 #include <exception>                    // for set_terminate
 
 // This terminate handler aborts the program using exit() rather than abort().
 // This avoids showing pop-ups on Windows systems and core dumps on Unix-like
 // ones.
 void TerminateHandler() {
   fprintf(stderr, "%s\n", "Unhandled C++ exception terminating the program.");
   fflush(NULL);
   exit(1);
 }
 
 int main(int argc, char** argv) {
 #if GTEST_HAS_EXCEPTIONS
   std::set_terminate(&TerminateHandler);
 #endif
   testing::InitGoogleTest(&argc, argv);
 
   // We want to ensure that people can use Google Test assertions in
   // other testing frameworks, as long as they initialize Google Test
   // properly and set the throw-on-failure mode.  Therefore, we don't
   // use Google Test's constructs for defining and running tests
   // (e.g. TEST and RUN_ALL_TESTS) here.
 
   // In the throw-on-failure mode with exceptions disabled, this
   // assertion will cause the program to exit with a non-zero code.
   EXPECT_EQ(2, 3);
 
   // When not in the throw-on-failure mode, the control will reach
   // here.
   return 0;
 }
diff --git a/googletest/test/gtest-tuple_test.cc b/googletest/test/googletest-tuple-test.cc
similarity index 100%
rename from googletest/test/gtest-tuple_test.cc
rename to googletest/test/googletest-tuple-test.cc
diff --git a/googletest/test/gtest_uninitialized_test.py b/googletest/test/googletest-uninitialized-test.py
similarity index 96%
rename from googletest/test/gtest_uninitialized_test.py
rename to googletest/test/googletest-uninitialized-test.py
index ae91f2aa..e3df5faf 100755
--- a/googletest/test/gtest_uninitialized_test.py
+++ b/googletest/test/googletest-uninitialized-test.py
@@ -1,69 +1,69 @@
 #!/usr/bin/env python
 #
 # Copyright 2008, Google Inc.
 # All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
 #
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
 # copyright notice, this list of conditions and the following disclaimer
 # in the documentation and/or other materials provided with the
 # distribution.
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 """Verifies that Google Test warns the user when not initialized properly."""
 
 __author__ = 'wan@google.com (Zhanyong Wan)'
 
 import gtest_test_utils
 
-COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_uninitialized_test_')
+COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-uninitialized-test_')
 
 
 def Assert(condition):
   if not condition:
     raise AssertionError
 
 
 def AssertEq(expected, actual):
   if expected != actual:
     print 'Expected: %s' % (expected,)
     print '  Actual: %s' % (actual,)
     raise AssertionError
 
 
 def TestExitCodeAndOutput(command):
   """Runs the given command and verifies its exit code and output."""
 
   # Verifies that 'command' exits with code 1.
   p = gtest_test_utils.Subprocess(command)
   if p.exited and p.exit_code == 0:
     Assert('IMPORTANT NOTICE' in p.output);
   Assert('InitGoogleTest' in p.output)
 
 
 class GTestUninitializedTest(gtest_test_utils.TestCase):
   def testExitCodeAndOutput(self):
     TestExitCodeAndOutput(COMMAND)
 
 
 if __name__ == '__main__':
   gtest_test_utils.Main()
diff --git a/googletest/test/gtest_uninitialized_test_.cc b/googletest/test/googletest-uninitialized-test_.cc
similarity index 100%
rename from googletest/test/gtest_uninitialized_test_.cc
rename to googletest/test/googletest-uninitialized-test_.cc
diff --git a/googletest/test/gtest_all_test.cc b/googletest/test/gtest_all_test.cc
index e6c1b018..656066d9 100644
--- a/googletest/test/gtest_all_test.cc
+++ b/googletest/test/gtest_all_test.cc
@@ -1,47 +1,47 @@
 // Copyright 2009, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 //
 // Tests for Google C++ Testing and Mocking Framework (Google Test)
 //
 // Sometimes it's desirable to build most of Google Test's own tests
 // by compiling a single file.  This file serves this purpose.
-#include "gtest-filepath_test.cc"
-#include "gtest-linked_ptr_test.cc"
-#include "gtest-message_test.cc"
-#include "gtest-options_test.cc"
-#include "gtest-port_test.cc"
+#include "googletest-filepath-test.cc"
+#include "googletest-linked-ptr-test.cc"
+#include "googletest-message-test.cc"
+#include "googletest-options-test.cc"
+#include "googletest-port-test.cc"
 #include "gtest_pred_impl_unittest.cc"
 #include "gtest_prod_test.cc"
-#include "gtest-test-part_test.cc"
+#include "googletest-test-part-test.cc"
 #include "gtest-typed-test_test.cc"
 #include "gtest-typed-test2_test.cc"
 #include "gtest_unittest.cc"
 #include "production.cc"
diff --git a/googletest/test/gtest_test_macro_stack_footprint_test.cc b/googletest/test/gtest_test_macro_stack_footprint_test.cc
new file mode 100644
index 00000000..48958b85
--- /dev/null
+++ b/googletest/test/gtest_test_macro_stack_footprint_test.cc
@@ -0,0 +1,90 @@
+// Copyright 2013, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: wan@google.com (Zhanyong Wan)
+//
+// Each TEST() expands to some static registration logic.  GCC puts all
+// such static initialization logic for a translation unit in a common,
+// internal function.  Since Google's build system restricts how much
+// stack space a function can use, there's a limit on how many TEST()s
+// one can put in a single C++ test file.  This test ensures that a large
+// number of TEST()s can be defined in the same translation unit.
+
+#include "gtest/gtest.h"
+
+// This macro defines 10 dummy tests.
+#define TEN_TESTS_(test_case_name) \
+  TEST(test_case_name, T0) {} \
+  TEST(test_case_name, T1) {} \
+  TEST(test_case_name, T2) {} \
+  TEST(test_case_name, T3) {} \
+  TEST(test_case_name, T4) {} \
+  TEST(test_case_name, T5) {} \
+  TEST(test_case_name, T6) {} \
+  TEST(test_case_name, T7) {} \
+  TEST(test_case_name, T8) {} \
+  TEST(test_case_name, T9) {}
+
+// This macro defines 100 dummy tests.
+#define HUNDRED_TESTS_(test_case_name_prefix) \
+  TEN_TESTS_(test_case_name_prefix ## 0) \
+  TEN_TESTS_(test_case_name_prefix ## 1) \
+  TEN_TESTS_(test_case_name_prefix ## 2) \
+  TEN_TESTS_(test_case_name_prefix ## 3) \
+  TEN_TESTS_(test_case_name_prefix ## 4) \
+  TEN_TESTS_(test_case_name_prefix ## 5) \
+  TEN_TESTS_(test_case_name_prefix ## 6) \
+  TEN_TESTS_(test_case_name_prefix ## 7) \
+  TEN_TESTS_(test_case_name_prefix ## 8) \
+  TEN_TESTS_(test_case_name_prefix ## 9)
+
+// This macro defines 1000 dummy tests.
+#define THOUSAND_TESTS_(test_case_name_prefix) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 0) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 1) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 2) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 3) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 4) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 5) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 6) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 7) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 8) \
+  HUNDRED_TESTS_(test_case_name_prefix ## 9)
+
+// Ensures that we can define 1000 TEST()s in the same translation
+// unit.
+THOUSAND_TESTS_(T)
+
+int main(int argc, char **argv) {
+  testing::InitGoogleTest(&argc, argv);
+
+  // We don't actually need to run the dummy tests - the purpose is to
+  // ensure that they compile.
+  return 0;
+}
diff --git a/googletest/test/gtest_unittest.cc b/googletest/test/gtest_unittest.cc
index 8ebb6674..46eb7b2f 100644
--- a/googletest/test/gtest_unittest.cc
+++ b/googletest/test/gtest_unittest.cc
@@ -1,7797 +1,7797 @@
 // Copyright 2005, Google Inc.
 // All rights reserved.
 //
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
 //
 //     * Redistributions of source code must retain the above copyright
 // notice, this list of conditions and the following disclaimer.
 //     * Redistributions in binary form must reproduce the above
 // copyright notice, this list of conditions and the following disclaimer
 // in the documentation and/or other materials provided with the
 // distribution.
 //     * Neither the name of Google Inc. nor the names of its
 // contributors may be used to endorse or promote products derived from
 // this software without specific prior written permission.
 //
 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //
 // Author: wan@google.com (Zhanyong Wan)
 //
 // Tests for Google Test itself.  This verifies that the basic constructs of
 // Google Test work.
 
 #include "gtest/gtest.h"
 
 // Verifies that the command line flag variables can be accessed in
 // code once "gtest.h" has been #included.
 // Do not move it after other gtest #includes.
 TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {
   bool dummy = testing::GTEST_FLAG(also_run_disabled_tests)
       || testing::GTEST_FLAG(break_on_failure)
       || testing::GTEST_FLAG(catch_exceptions)
       || testing::GTEST_FLAG(color) != "unknown"
       || testing::GTEST_FLAG(filter) != "unknown"
       || testing::GTEST_FLAG(list_tests)
       || testing::GTEST_FLAG(output) != "unknown"
       || testing::GTEST_FLAG(print_time)
       || testing::GTEST_FLAG(random_seed)
       || testing::GTEST_FLAG(repeat) > 0
       || testing::GTEST_FLAG(show_internal_stack_frames)
       || testing::GTEST_FLAG(shuffle)
       || testing::GTEST_FLAG(stack_trace_depth) > 0
       || testing::GTEST_FLAG(stream_result_to) != "unknown"
       || testing::GTEST_FLAG(throw_on_failure);
   EXPECT_TRUE(dummy || !dummy);  // Suppresses warning that dummy is unused.
 }
 
 #include <limits.h>  // For INT_MAX.
 #include <stdlib.h>
 #include <string.h>
 #include <time.h>
 
 #include <map>
 #include <vector>
 #include <ostream>
 #if GTEST_LANG_CXX11
 #include <unordered_set>
 #endif  // GTEST_LANG_CXX11
 
 #include "gtest/gtest-spi.h"
 #include "src/gtest-internal-inl.h"
 
 namespace testing {
 namespace internal {
 
 #if GTEST_CAN_STREAM_RESULTS_
 
 class StreamingListenerTest : public Test {
  public:
   class FakeSocketWriter : public StreamingListener::AbstractSocketWriter {
    public:
     // Sends a string to the socket.
     virtual void Send(const std::string& message) { output_ += message; }
 
     std::string output_;
   };
 
   StreamingListenerTest()
       : fake_sock_writer_(new FakeSocketWriter),
         streamer_(fake_sock_writer_),
         test_info_obj_("FooTest", "Bar", NULL, NULL,
                        CodeLocation(__FILE__, __LINE__), 0, NULL) {}
 
  protected:
   std::string* output() { return &(fake_sock_writer_->output_); }
 
   FakeSocketWriter* const fake_sock_writer_;
   StreamingListener streamer_;
   UnitTest unit_test_;
   TestInfo test_info_obj_;  // The name test_info_ was taken by testing::Test.
 };
 
 TEST_F(StreamingListenerTest, OnTestProgramEnd) {
   *output() = "";
   streamer_.OnTestProgramEnd(unit_test_);
   EXPECT_EQ("event=TestProgramEnd&passed=1\n", *output());
 }
 
 TEST_F(StreamingListenerTest, OnTestIterationEnd) {
   *output() = "";
   streamer_.OnTestIterationEnd(unit_test_, 42);
   EXPECT_EQ("event=TestIterationEnd&passed=1&elapsed_time=0ms\n", *output());
 }
 
 TEST_F(StreamingListenerTest, OnTestCaseStart) {
   *output() = "";
   streamer_.OnTestCaseStart(TestCase("FooTest", "Bar", NULL, NULL));
   EXPECT_EQ("event=TestCaseStart&name=FooTest\n", *output());
 }
 
 TEST_F(StreamingListenerTest, OnTestCaseEnd) {
   *output() = "";
   streamer_.OnTestCaseEnd(TestCase("FooTest", "Bar", NULL, NULL));
   EXPECT_EQ("event=TestCaseEnd&passed=1&elapsed_time=0ms\n", *output());
 }
 
 TEST_F(StreamingListenerTest, OnTestStart) {
   *output() = "";
   streamer_.OnTestStart(test_info_obj_);
   EXPECT_EQ("event=TestStart&name=Bar\n", *output());
 }
 
 TEST_F(StreamingListenerTest, OnTestEnd) {
   *output() = "";
   streamer_.OnTestEnd(test_info_obj_);
   EXPECT_EQ("event=TestEnd&passed=1&elapsed_time=0ms\n", *output());
 }
 
 TEST_F(StreamingListenerTest, OnTestPartResult) {
   *output() = "";
   streamer_.OnTestPartResult(TestPartResult(
       TestPartResult::kFatalFailure, "foo.cc", 42, "failed=\n&%"));
 
   // Meta characters in the failure message should be properly escaped.
   EXPECT_EQ(
       "event=TestPartResult&file=foo.cc&line=42&message=failed%3D%0A%26%25\n",
       *output());
 }
 
 #endif  // GTEST_CAN_STREAM_RESULTS_
 
 // Provides access to otherwise private parts of the TestEventListeners class
 // that are needed to test it.
 class TestEventListenersAccessor {
  public:
   static TestEventListener* GetRepeater(TestEventListeners* listeners) {
     return listeners->repeater();
   }
 
   static void SetDefaultResultPrinter(TestEventListeners* listeners,
                                       TestEventListener* listener) {
     listeners->SetDefaultResultPrinter(listener);
   }
   static void SetDefaultXmlGenerator(TestEventListeners* listeners,
                                      TestEventListener* listener) {
     listeners->SetDefaultXmlGenerator(listener);
   }
 
   static bool EventForwardingEnabled(const TestEventListeners& listeners) {
     return listeners.EventForwardingEnabled();
   }
 
   static void SuppressEventForwarding(TestEventListeners* listeners) {
     listeners->SuppressEventForwarding();
   }
 };
 
 class UnitTestRecordPropertyTestHelper : public Test {
  protected:
   UnitTestRecordPropertyTestHelper() {}
 
   // Forwards to UnitTest::RecordProperty() to bypass access controls.
   void UnitTestRecordProperty(const char* key, const std::string& value) {
     unit_test_.RecordProperty(key, value);
   }
 
   UnitTest unit_test_;
 };
 
 }  // namespace internal
 }  // namespace testing
 
 using testing::AssertionFailure;
 using testing::AssertionResult;
 using testing::AssertionSuccess;
 using testing::DoubleLE;
 using testing::EmptyTestEventListener;
 using testing::Environment;
 using testing::FloatLE;
 using testing::GTEST_FLAG(also_run_disabled_tests);
 using testing::GTEST_FLAG(break_on_failure);
 using testing::GTEST_FLAG(catch_exceptions);
 using testing::GTEST_FLAG(color);
 using testing::GTEST_FLAG(death_test_use_fork);
 using testing::GTEST_FLAG(filter);
 using testing::GTEST_FLAG(list_tests);
 using testing::GTEST_FLAG(output);
 using testing::GTEST_FLAG(print_time);
 using testing::GTEST_FLAG(random_seed);
 using testing::GTEST_FLAG(repeat);
 using testing::GTEST_FLAG(show_internal_stack_frames);
 using testing::GTEST_FLAG(shuffle);
 using testing::GTEST_FLAG(stack_trace_depth);
 using testing::GTEST_FLAG(stream_result_to);
 using testing::GTEST_FLAG(throw_on_failure);
 using testing::IsNotSubstring;
 using testing::IsSubstring;
 using testing::Message;
 using testing::ScopedFakeTestPartResultReporter;
 using testing::StaticAssertTypeEq;
 using testing::Test;
 using testing::TestCase;
 using testing::TestEventListeners;
 using testing::TestInfo;
 using testing::TestPartResult;
 using testing::TestPartResultArray;
 using testing::TestProperty;
 using testing::TestResult;
 using testing::TimeInMillis;
 using testing::UnitTest;
 using testing::internal::AddReference;
 using testing::internal::AlwaysFalse;
 using testing::internal::AlwaysTrue;
 using testing::internal::AppendUserMessage;
 using testing::internal::ArrayAwareFind;
 using testing::internal::ArrayEq;
 using testing::internal::CodePointToUtf8;
 using testing::internal::CompileAssertTypesEqual;
 using testing::internal::CopyArray;
 using testing::internal::CountIf;
 using testing::internal::EqFailure;
 using testing::internal::FloatingPoint;
 using testing::internal::ForEach;
 using testing::internal::FormatEpochTimeInMillisAsIso8601;
 using testing::internal::FormatTimeInMillisAsSeconds;
 using testing::internal::GTestFlagSaver;
 using testing::internal::GetCurrentOsStackTraceExceptTop;
 using testing::internal::GetElementOr;
 using testing::internal::GetNextRandomSeed;
 using testing::internal::GetRandomSeedFromFlag;
 using testing::internal::GetTestTypeId;
 using testing::internal::GetTimeInMillis;
 using testing::internal::GetTypeId;
 using testing::internal::GetUnitTestImpl;
 using testing::internal::ImplicitlyConvertible;
 using testing::internal::Int32;
 using testing::internal::Int32FromEnvOrDie;
 using testing::internal::IsAProtocolMessage;
 using testing::internal::IsContainer;
 using testing::internal::IsContainerTest;
 using testing::internal::IsNotContainer;
 using testing::internal::NativeArray;
 using testing::internal::OsStackTraceGetter;
 using testing::internal::OsStackTraceGetterInterface;
 using testing::internal::ParseInt32Flag;
 using testing::internal::RelationToSourceCopy;
 using testing::internal::RelationToSourceReference;
 using testing::internal::RemoveConst;
 using testing::internal::RemoveReference;
 using testing::internal::ShouldRunTestOnShard;
 using testing::internal::ShouldShard;
 using testing::internal::ShouldUseColor;
 using testing::internal::Shuffle;
 using testing::internal::ShuffleRange;
 using testing::internal::SkipPrefix;
 using testing::internal::StreamableToString;
 using testing::internal::String;
 using testing::internal::TestEventListenersAccessor;
 using testing::internal::TestResultAccessor;
 using testing::internal::UInt32;
 using testing::internal::UnitTestImpl;
 using testing::internal::WideStringToUtf8;
 using testing::internal::edit_distance::CalculateOptimalEdits;
 using testing::internal::edit_distance::CreateUnifiedDiff;
 using testing::internal::edit_distance::EditType;
 using testing::internal::kMaxRandomSeed;
 using testing::internal::kTestTypeIdInGoogleTest;
 using testing::kMaxStackTraceDepth;
 
 #if GTEST_HAS_STREAM_REDIRECTION
 using testing::internal::CaptureStdout;
 using testing::internal::GetCapturedStdout;
 #endif
 
 #if GTEST_IS_THREADSAFE
 using testing::internal::ThreadWithParam;
 #endif
 
 class TestingVector : public std::vector<int> {
 };
 
 ::std::ostream& operator<<(::std::ostream& os,
                            const TestingVector& vector) {
   os << "{ ";
   for (size_t i = 0; i < vector.size(); i++) {
     os << vector[i] << " ";
   }
   os << "}";
   return os;
 }
 
 // This line tests that we can define tests in an unnamed namespace.
 namespace {
 
 TEST(GetRandomSeedFromFlagTest, HandlesZero) {
   const int seed = GetRandomSeedFromFlag(0);
   EXPECT_LE(1, seed);
   EXPECT_LE(seed, static_cast<int>(kMaxRandomSeed));
 }
 
 TEST(GetRandomSeedFromFlagTest, PreservesValidSeed) {
   EXPECT_EQ(1, GetRandomSeedFromFlag(1));
   EXPECT_EQ(2, GetRandomSeedFromFlag(2));
   EXPECT_EQ(kMaxRandomSeed - 1, GetRandomSeedFromFlag(kMaxRandomSeed - 1));
   EXPECT_EQ(static_cast<int>(kMaxRandomSeed),
             GetRandomSeedFromFlag(kMaxRandomSeed));
 }
 
 TEST(GetRandomSeedFromFlagTest, NormalizesInvalidSeed) {
   const int seed1 = GetRandomSeedFromFlag(-1);
   EXPECT_LE(1, seed1);
   EXPECT_LE(seed1, static_cast<int>(kMaxRandomSeed));
 
   const int seed2 = GetRandomSeedFromFlag(kMaxRandomSeed + 1);
   EXPECT_LE(1, seed2);
   EXPECT_LE(seed2, static_cast<int>(kMaxRandomSeed));
 }
 
 TEST(GetNextRandomSeedTest, WorksForValidInput) {
   EXPECT_EQ(2, GetNextRandomSeed(1));
   EXPECT_EQ(3, GetNextRandomSeed(2));
   EXPECT_EQ(static_cast<int>(kMaxRandomSeed),
             GetNextRandomSeed(kMaxRandomSeed - 1));
   EXPECT_EQ(1, GetNextRandomSeed(kMaxRandomSeed));
 
   // We deliberately don't test GetNextRandomSeed() with invalid
   // inputs, as that requires death tests, which are expensive.  This
   // is fine as GetNextRandomSeed() is internal and has a
   // straightforward definition.
 }
 
 static void ClearCurrentTestPartResults() {
   TestResultAccessor::ClearTestPartResults(
       GetUnitTestImpl()->current_test_result());
 }
 
 // Tests GetTypeId.
 
 TEST(GetTypeIdTest, ReturnsSameValueForSameType) {
   EXPECT_EQ(GetTypeId<int>(), GetTypeId<int>());
   EXPECT_EQ(GetTypeId<Test>(), GetTypeId<Test>());
 }
 
 class SubClassOfTest : public Test {};
 class AnotherSubClassOfTest : public Test {};
 
 TEST(GetTypeIdTest, ReturnsDifferentValuesForDifferentTypes) {
   EXPECT_NE(GetTypeId<int>(), GetTypeId<const int>());
   EXPECT_NE(GetTypeId<int>(), GetTypeId<char>());
   EXPECT_NE(GetTypeId<int>(), GetTestTypeId());
   EXPECT_NE(GetTypeId<SubClassOfTest>(), GetTestTypeId());
   EXPECT_NE(GetTypeId<AnotherSubClassOfTest>(), GetTestTypeId());
   EXPECT_NE(GetTypeId<AnotherSubClassOfTest>(), GetTypeId<SubClassOfTest>());
 }
 
 // Verifies that GetTestTypeId() returns the same value, no matter it
 // is called from inside Google Test or outside of it.
 TEST(GetTestTypeIdTest, ReturnsTheSameValueInsideOrOutsideOfGoogleTest) {
   EXPECT_EQ(kTestTypeIdInGoogleTest, GetTestTypeId());
 }
 
 // Tests CanonicalizeForStdLibVersioning.
 
 using ::testing::internal::CanonicalizeForStdLibVersioning;
 
 TEST(CanonicalizeForStdLibVersioning, LeavesUnversionedNamesUnchanged) {
   EXPECT_EQ("std::bind", CanonicalizeForStdLibVersioning("std::bind"));
   EXPECT_EQ("std::_", CanonicalizeForStdLibVersioning("std::_"));
   EXPECT_EQ("std::__foo", CanonicalizeForStdLibVersioning("std::__foo"));
   EXPECT_EQ("gtl::__1::x", CanonicalizeForStdLibVersioning("gtl::__1::x"));
   EXPECT_EQ("__1::x", CanonicalizeForStdLibVersioning("__1::x"));
   EXPECT_EQ("::__1::x", CanonicalizeForStdLibVersioning("::__1::x"));
 }
 
 TEST(CanonicalizeForStdLibVersioning, ElidesDoubleUnderNames) {
   EXPECT_EQ("std::bind", CanonicalizeForStdLibVersioning("std::__1::bind"));
   EXPECT_EQ("std::_", CanonicalizeForStdLibVersioning("std::__1::_"));
 
   EXPECT_EQ("std::bind", CanonicalizeForStdLibVersioning("std::__g::bind"));
   EXPECT_EQ("std::_", CanonicalizeForStdLibVersioning("std::__g::_"));
 
   EXPECT_EQ("std::bind",
             CanonicalizeForStdLibVersioning("std::__google::bind"));
   EXPECT_EQ("std::_", CanonicalizeForStdLibVersioning("std::__google::_"));
 }
 
 // Tests FormatTimeInMillisAsSeconds().
 
 TEST(FormatTimeInMillisAsSecondsTest, FormatsZero) {
   EXPECT_EQ("0", FormatTimeInMillisAsSeconds(0));
 }
 
 TEST(FormatTimeInMillisAsSecondsTest, FormatsPositiveNumber) {
   EXPECT_EQ("0.003", FormatTimeInMillisAsSeconds(3));
   EXPECT_EQ("0.01", FormatTimeInMillisAsSeconds(10));
   EXPECT_EQ("0.2", FormatTimeInMillisAsSeconds(200));
   EXPECT_EQ("1.2", FormatTimeInMillisAsSeconds(1200));
   EXPECT_EQ("3", FormatTimeInMillisAsSeconds(3000));
 }
 
 TEST(FormatTimeInMillisAsSecondsTest, FormatsNegativeNumber) {
   EXPECT_EQ("-0.003", FormatTimeInMillisAsSeconds(-3));
   EXPECT_EQ("-0.01", FormatTimeInMillisAsSeconds(-10));
   EXPECT_EQ("-0.2", FormatTimeInMillisAsSeconds(-200));
   EXPECT_EQ("-1.2", FormatTimeInMillisAsSeconds(-1200));
   EXPECT_EQ("-3", FormatTimeInMillisAsSeconds(-3000));
 }
 
 // Tests FormatEpochTimeInMillisAsIso8601().  The correctness of conversion
 // for particular dates below was verified in Python using
 // datetime.datetime.fromutctimestamp(<timetamp>/1000).
 
 // FormatEpochTimeInMillisAsIso8601 depends on the current timezone, so we
 // have to set up a particular timezone to obtain predictable results.
 class FormatEpochTimeInMillisAsIso8601Test : public Test {
  public:
   // On Cygwin, GCC doesn't allow unqualified integer literals to exceed
   // 32 bits, even when 64-bit integer types are available.  We have to
   // force the constants to have a 64-bit type here.
   static const TimeInMillis kMillisPerSec = 1000;
 
  private:
   virtual void SetUp() {
     saved_tz_ = NULL;
 
     GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996 /* getenv, strdup: deprecated */)
     if (getenv("TZ"))
       saved_tz_ = strdup(getenv("TZ"));
     GTEST_DISABLE_MSC_WARNINGS_POP_()
 
     // Set up the time zone for FormatEpochTimeInMillisAsIso8601 to use.  We
     // cannot use the local time zone because the function's output depends
     // on the time zone.
     SetTimeZone("UTC+00");
   }
 
   virtual void TearDown() {
     SetTimeZone(saved_tz_);
     free(const_cast<char*>(saved_tz_));
     saved_tz_ = NULL;
   }
 
   static void SetTimeZone(const char* time_zone) {
     // tzset() distinguishes between the TZ variable being present and empty
     // and not being present, so we have to consider the case of time_zone
     // being NULL.
 #if _MSC_VER || GTEST_OS_WINDOWS_MINGW
     // ...Unless it's MSVC, whose standard library's _putenv doesn't
     // distinguish between an empty and a missing variable.
     const std::string env_var =
         std::string("TZ=") + (time_zone ? time_zone : "");
     _putenv(env_var.c_str());
     GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996 /* deprecated function */)
     tzset();
     GTEST_DISABLE_MSC_WARNINGS_POP_()
 #else
     if (time_zone) {
       setenv(("TZ"), time_zone, 1);
     } else {
       unsetenv("TZ");
     }
     tzset();
 #endif
   }
 
   const char* saved_tz_;
 };
 
 const TimeInMillis FormatEpochTimeInMillisAsIso8601Test::kMillisPerSec;
 
 TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsTwoDigitSegments) {
   EXPECT_EQ("2011-10-31T18:52:42",
             FormatEpochTimeInMillisAsIso8601(1320087162 * kMillisPerSec));
 }
 
 TEST_F(FormatEpochTimeInMillisAsIso8601Test, MillisecondsDoNotAffectResult) {
   EXPECT_EQ(
       "2011-10-31T18:52:42",
       FormatEpochTimeInMillisAsIso8601(1320087162 * kMillisPerSec + 234));
 }
 
 TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsLeadingZeroes) {
   EXPECT_EQ("2011-09-03T05:07:02",
             FormatEpochTimeInMillisAsIso8601(1315026422 * kMillisPerSec));
 }
 
 TEST_F(FormatEpochTimeInMillisAsIso8601Test, Prints24HourTime) {
   EXPECT_EQ("2011-09-28T17:08:22",
             FormatEpochTimeInMillisAsIso8601(1317229702 * kMillisPerSec));
 }
 
 TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsEpochStart) {
   EXPECT_EQ("1970-01-01T00:00:00", FormatEpochTimeInMillisAsIso8601(0));
 }
 
 #if GTEST_CAN_COMPARE_NULL
 
 # ifdef __BORLANDC__
 // Silences warnings: "Condition is always true", "Unreachable code"
 #  pragma option push -w-ccc -w-rch
 # endif
 
 // Tests that GTEST_IS_NULL_LITERAL_(x) is true when x is a null
 // pointer literal.
 TEST(NullLiteralTest, IsTrueForNullLiterals) {
   EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(NULL));
   EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(0));
   EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(0U));
   EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(0L));
 }
 
 // Tests that GTEST_IS_NULL_LITERAL_(x) is false when x is not a null
 // pointer literal.
 TEST(NullLiteralTest, IsFalseForNonNullLiterals) {
   EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(1));
   EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(0.0));
   EXPECT_FALSE(GTEST_IS_NULL_LITERAL_('a'));
   EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(static_cast<void*>(NULL)));
 }
 
 # ifdef __BORLANDC__
 // Restores warnings after previous "#pragma option push" suppressed them.
 #  pragma option pop
 # endif
 
 #endif  // GTEST_CAN_COMPARE_NULL
 //
 // Tests CodePointToUtf8().
 
 // Tests that the NUL character L'\0' is encoded correctly.
 TEST(CodePointToUtf8Test, CanEncodeNul) {
   EXPECT_EQ("", CodePointToUtf8(L'\0'));
 }
 
 // Tests that ASCII characters are encoded correctly.
 TEST(CodePointToUtf8Test, CanEncodeAscii) {
   EXPECT_EQ("a", CodePointToUtf8(L'a'));
   EXPECT_EQ("Z", CodePointToUtf8(L'Z'));
   EXPECT_EQ("&", CodePointToUtf8(L'&'));
   EXPECT_EQ("\x7F", CodePointToUtf8(L'\x7F'));
 }
 
 // Tests that Unicode code-points that have 8 to 11 bits are encoded
 // as 110xxxxx 10xxxxxx.
 TEST(CodePointToUtf8Test, CanEncode8To11Bits) {
   // 000 1101 0011 => 110-00011 10-010011
   EXPECT_EQ("\xC3\x93", CodePointToUtf8(L'\xD3'));
 
   // 101 0111 0110 => 110-10101 10-110110
   // Some compilers (e.g., GCC on MinGW) cannot handle non-ASCII codepoints
   // in wide strings and wide chars. In order to accommodate them, we have to
   // introduce such character constants as integers.
   EXPECT_EQ("\xD5\xB6",
             CodePointToUtf8(static_cast<wchar_t>(0x576)));
 }
 
 // Tests that Unicode code-points that have 12 to 16 bits are encoded
 // as 1110xxxx 10xxxxxx 10xxxxxx.
 TEST(CodePointToUtf8Test, CanEncode12To16Bits) {
   // 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011
   EXPECT_EQ("\xE0\xA3\x93",
             CodePointToUtf8(static_cast<wchar_t>(0x8D3)));
 
   // 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101
   EXPECT_EQ("\xEC\x9D\x8D",
             CodePointToUtf8(static_cast<wchar_t>(0xC74D)));
 }
 
 #if !GTEST_WIDE_STRING_USES_UTF16_
 // Tests in this group require a wchar_t to hold > 16 bits, and thus
 // are skipped on Windows, Cygwin, and Symbian, where a wchar_t is
 // 16-bit wide. This code may not compile on those systems.
 
 // Tests that Unicode code-points that have 17 to 21 bits are encoded
 // as 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx.
 TEST(CodePointToUtf8Test, CanEncode17To21Bits) {
   // 0 0001 0000 1000 1101 0011 => 11110-000 10-010000 10-100011 10-010011
   EXPECT_EQ("\xF0\x90\xA3\x93", CodePointToUtf8(L'\x108D3'));
 
   // 0 0001 0000 0100 0000 0000 => 11110-000 10-010000 10-010000 10-000000
   EXPECT_EQ("\xF0\x90\x90\x80", CodePointToUtf8(L'\x10400'));
 
   // 1 0000 1000 0110 0011 0100 => 11110-100 10-001000 10-011000 10-110100
   EXPECT_EQ("\xF4\x88\x98\xB4", CodePointToUtf8(L'\x108634'));
 }
 
 // Tests that encoding an invalid code-point generates the expected result.
 TEST(CodePointToUtf8Test, CanEncodeInvalidCodePoint) {
   EXPECT_EQ("(Invalid Unicode 0x1234ABCD)", CodePointToUtf8(L'\x1234ABCD'));
 }
 
 #endif  // !GTEST_WIDE_STRING_USES_UTF16_
 
 // Tests WideStringToUtf8().
 
 // Tests that the NUL character L'\0' is encoded correctly.
 TEST(WideStringToUtf8Test, CanEncodeNul) {
   EXPECT_STREQ("", WideStringToUtf8(L"", 0).c_str());
   EXPECT_STREQ("", WideStringToUtf8(L"", -1).c_str());
 }
 
 // Tests that ASCII strings are encoded correctly.
 TEST(WideStringToUtf8Test, CanEncodeAscii) {
   EXPECT_STREQ("a", WideStringToUtf8(L"a", 1).c_str());
   EXPECT_STREQ("ab", WideStringToUtf8(L"ab", 2).c_str());
   EXPECT_STREQ("a", WideStringToUtf8(L"a", -1).c_str());
   EXPECT_STREQ("ab", WideStringToUtf8(L"ab", -1).c_str());
 }
 
 // Tests that Unicode code-points that have 8 to 11 bits are encoded
 // as 110xxxxx 10xxxxxx.
 TEST(WideStringToUtf8Test, CanEncode8To11Bits) {
   // 000 1101 0011 => 110-00011 10-010011
   EXPECT_STREQ("\xC3\x93", WideStringToUtf8(L"\xD3", 1).c_str());
   EXPECT_STREQ("\xC3\x93", WideStringToUtf8(L"\xD3", -1).c_str());
 
   // 101 0111 0110 => 110-10101 10-110110
   const wchar_t s[] = { 0x576, '\0' };
   EXPECT_STREQ("\xD5\xB6", WideStringToUtf8(s, 1).c_str());
   EXPECT_STREQ("\xD5\xB6", WideStringToUtf8(s, -1).c_str());
 }
 
 // Tests that Unicode code-points that have 12 to 16 bits are encoded
 // as 1110xxxx 10xxxxxx 10xxxxxx.
 TEST(WideStringToUtf8Test, CanEncode12To16Bits) {
   // 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011
   const wchar_t s1[] = { 0x8D3, '\0' };
   EXPECT_STREQ("\xE0\xA3\x93", WideStringToUtf8(s1, 1).c_str());
   EXPECT_STREQ("\xE0\xA3\x93", WideStringToUtf8(s1, -1).c_str());
 
   // 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101
   const wchar_t s2[] = { 0xC74D, '\0' };
   EXPECT_STREQ("\xEC\x9D\x8D", WideStringToUtf8(s2, 1).c_str());
   EXPECT_STREQ("\xEC\x9D\x8D", WideStringToUtf8(s2, -1).c_str());
 }
 
 // Tests that the conversion stops when the function encounters \0 character.
 TEST(WideStringToUtf8Test, StopsOnNulCharacter) {
   EXPECT_STREQ("ABC", WideStringToUtf8(L"ABC\0XYZ", 100).c_str());
 }
 
 // Tests that the conversion stops when the function reaches the limit
 // specified by the 'length' parameter.
 TEST(WideStringToUtf8Test, StopsWhenLengthLimitReached) {
   EXPECT_STREQ("ABC", WideStringToUtf8(L"ABCDEF", 3).c_str());
 }
 
 #if !GTEST_WIDE_STRING_USES_UTF16_
 // Tests that Unicode code-points that have 17 to 21 bits are encoded
 // as 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx. This code may not compile
 // on the systems using UTF-16 encoding.
 TEST(WideStringToUtf8Test, CanEncode17To21Bits) {
   // 0 0001 0000 1000 1101 0011 => 11110-000 10-010000 10-100011 10-010011
   EXPECT_STREQ("\xF0\x90\xA3\x93", WideStringToUtf8(L"\x108D3", 1).c_str());
   EXPECT_STREQ("\xF0\x90\xA3\x93", WideStringToUtf8(L"\x108D3", -1).c_str());
 
   // 1 0000 1000 0110 0011 0100 => 11110-100 10-001000 10-011000 10-110100
   EXPECT_STREQ("\xF4\x88\x98\xB4", WideStringToUtf8(L"\x108634", 1).c_str());
   EXPECT_STREQ("\xF4\x88\x98\xB4", WideStringToUtf8(L"\x108634", -1).c_str());
 }
 
 // Tests that encoding an invalid code-point generates the expected result.
 TEST(WideStringToUtf8Test, CanEncodeInvalidCodePoint) {
   EXPECT_STREQ("(Invalid Unicode 0xABCDFF)",
                WideStringToUtf8(L"\xABCDFF", -1).c_str());
 }
 #else  // !GTEST_WIDE_STRING_USES_UTF16_
 // Tests that surrogate pairs are encoded correctly on the systems using
 // UTF-16 encoding in the wide strings.
 TEST(WideStringToUtf8Test, CanEncodeValidUtf16SUrrogatePairs) {
   const wchar_t s[] = { 0xD801, 0xDC00, '\0' };
   EXPECT_STREQ("\xF0\x90\x90\x80", WideStringToUtf8(s, -1).c_str());
 }
 
 // Tests that encoding an invalid UTF-16 surrogate pair
 // generates the expected result.
 TEST(WideStringToUtf8Test, CanEncodeInvalidUtf16SurrogatePair) {
   // Leading surrogate is at the end of the string.
   const wchar_t s1[] = { 0xD800, '\0' };
   EXPECT_STREQ("\xED\xA0\x80", WideStringToUtf8(s1, -1).c_str());
   // Leading surrogate is not followed by the trailing surrogate.
   const wchar_t s2[] = { 0xD800, 'M', '\0' };
   EXPECT_STREQ("\xED\xA0\x80M", WideStringToUtf8(s2, -1).c_str());
   // Trailing surrogate appearas without a leading surrogate.
   const wchar_t s3[] = { 0xDC00, 'P', 'Q', 'R', '\0' };
   EXPECT_STREQ("\xED\xB0\x80PQR", WideStringToUtf8(s3, -1).c_str());
 }
 #endif  // !GTEST_WIDE_STRING_USES_UTF16_
 
 // Tests that codepoint concatenation works correctly.
 #if !GTEST_WIDE_STRING_USES_UTF16_
 TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {
   const wchar_t s[] = { 0x108634, 0xC74D, '\n', 0x576, 0x8D3, 0x108634, '\0'};
   EXPECT_STREQ(
       "\xF4\x88\x98\xB4"
           "\xEC\x9D\x8D"
           "\n"
           "\xD5\xB6"
           "\xE0\xA3\x93"
           "\xF4\x88\x98\xB4",
       WideStringToUtf8(s, -1).c_str());
 }
 #else
 TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {
   const wchar_t s[] = { 0xC74D, '\n', 0x576, 0x8D3, '\0'};
   EXPECT_STREQ(
       "\xEC\x9D\x8D" "\n" "\xD5\xB6" "\xE0\xA3\x93",
       WideStringToUtf8(s, -1).c_str());
 }
 #endif  // !GTEST_WIDE_STRING_USES_UTF16_
 
 // Tests the Random class.
 
 TEST(RandomDeathTest, GeneratesCrashesOnInvalidRange) {
   testing::internal::Random random(42);
   EXPECT_DEATH_IF_SUPPORTED(
       random.Generate(0),
       "Cannot generate a number in the range \\[0, 0\\)");
   EXPECT_DEATH_IF_SUPPORTED(
       random.Generate(testing::internal::Random::kMaxRange + 1),
       "Generation of a number in \\[0, 2147483649\\) was requested, "
       "but this can only generate numbers in \\[0, 2147483648\\)");
 }
 
 TEST(RandomTest, GeneratesNumbersWithinRange) {
   const UInt32 kRange = 10000;
   testing::internal::Random random(12345);
   for (int i = 0; i < 10; i++) {
     EXPECT_LT(random.Generate(kRange), kRange) << " for iteration " << i;
   }
 
   testing::internal::Random random2(testing::internal::Random::kMaxRange);
   for (int i = 0; i < 10; i++) {
     EXPECT_LT(random2.Generate(kRange), kRange) << " for iteration " << i;
   }
 }
 
 TEST(RandomTest, RepeatsWhenReseeded) {
   const int kSeed = 123;
   const int kArraySize = 10;
   const UInt32 kRange = 10000;
   UInt32 values[kArraySize];
 
   testing::internal::Random random(kSeed);
   for (int i = 0; i < kArraySize; i++) {
     values[i] = random.Generate(kRange);
   }
 
   random.Reseed(kSeed);
   for (int i = 0; i < kArraySize; i++) {
     EXPECT_EQ(values[i], random.Generate(kRange)) << " for iteration " << i;
   }
 }
 
 // Tests STL container utilities.
 
 // Tests CountIf().
 
 static bool IsPositive(int n) { return n > 0; }
 
 TEST(ContainerUtilityTest, CountIf) {
   std::vector<int> v;
   EXPECT_EQ(0, CountIf(v, IsPositive));  // Works for an empty container.
 
   v.push_back(-1);
   v.push_back(0);
   EXPECT_EQ(0, CountIf(v, IsPositive));  // Works when no value satisfies.
 
   v.push_back(2);
   v.push_back(-10);
   v.push_back(10);
   EXPECT_EQ(2, CountIf(v, IsPositive));
 }
 
 // Tests ForEach().
 
 static int g_sum = 0;
 static void Accumulate(int n) { g_sum += n; }
 
 TEST(ContainerUtilityTest, ForEach) {
   std::vector<int> v;
   g_sum = 0;
   ForEach(v, Accumulate);
   EXPECT_EQ(0, g_sum);  // Works for an empty container;
 
   g_sum = 0;
   v.push_back(1);
   ForEach(v, Accumulate);
   EXPECT_EQ(1, g_sum);  // Works for a container with one element.
 
   g_sum = 0;
   v.push_back(20);
   v.push_back(300);
   ForEach(v, Accumulate);
   EXPECT_EQ(321, g_sum);
 }
 
 // Tests GetElementOr().
 TEST(ContainerUtilityTest, GetElementOr) {
   std::vector<char> a;
   EXPECT_EQ('x', GetElementOr(a, 0, 'x'));
 
   a.push_back('a');
   a.push_back('b');
   EXPECT_EQ('a', GetElementOr(a, 0, 'x'));
   EXPECT_EQ('b', GetElementOr(a, 1, 'x'));
   EXPECT_EQ('x', GetElementOr(a, -2, 'x'));
   EXPECT_EQ('x', GetElementOr(a, 2, 'x'));
 }
 
 TEST(ContainerUtilityDeathTest, ShuffleRange) {
   std::vector<int> a;
   a.push_back(0);
   a.push_back(1);
   a.push_back(2);
   testing::internal::Random random(1);
 
   EXPECT_DEATH_IF_SUPPORTED(
       ShuffleRange(&random, -1, 1, &a),
       "Invalid shuffle range start -1: must be in range \\[0, 3\\]");
   EXPECT_DEATH_IF_SUPPORTED(
       ShuffleRange(&random, 4, 4, &a),
       "Invalid shuffle range start 4: must be in range \\[0, 3\\]");
   EXPECT_DEATH_IF_SUPPORTED(
       ShuffleRange(&random, 3, 2, &a),
       "Invalid shuffle range finish 2: must be in range \\[3, 3\\]");
   EXPECT_DEATH_IF_SUPPORTED(
       ShuffleRange(&random, 3, 4, &a),
       "Invalid shuffle range finish 4: must be in range \\[3, 3\\]");
 }
 
 class VectorShuffleTest : public Test {
  protected:
   static const int kVectorSize = 20;
 
   VectorShuffleTest() : random_(1) {
     for (int i = 0; i < kVectorSize; i++) {
       vector_.push_back(i);
     }
   }
 
   static bool VectorIsCorrupt(const TestingVector& vector) {
     if (kVectorSize != static_cast<int>(vector.size())) {
       return true;
     }
 
     bool found_in_vector[kVectorSize] = { false };
     for (size_t i = 0; i < vector.size(); i++) {
       const int e = vector[i];
       if (e < 0 || e >= kVectorSize || found_in_vector[e]) {
         return true;
       }
       found_in_vector[e] = true;
     }
 
     // Vector size is correct, elements' range is correct, no
     // duplicate elements.  Therefore no corruption has occurred.
     return false;
   }
 
   static bool VectorIsNotCorrupt(const TestingVector& vector) {
     return !VectorIsCorrupt(vector);
   }
 
   static bool RangeIsShuffled(const TestingVector& vector, int begin, int end) {
     for (int i = begin; i < end; i++) {
       if (i != vector[i]) {
         return true;
       }
     }
     return false;
   }
 
   static bool RangeIsUnshuffled(
       const TestingVector& vector, int begin, int end) {
     return !RangeIsShuffled(vector, begin, end);
   }
 
   static bool VectorIsShuffled(const TestingVector& vector) {
     return RangeIsShuffled(vector, 0, static_cast<int>(vector.size()));
   }
 
   static bool VectorIsUnshuffled(const TestingVector& vector) {
     return !VectorIsShuffled(vector);
   }
 
   testing::internal::Random random_;
   TestingVector vector_;
 };  // class VectorShuffleTest
 
 const int VectorShuffleTest::kVectorSize;
 
 TEST_F(VectorShuffleTest, HandlesEmptyRange) {
   // Tests an empty range at the beginning...
   ShuffleRange(&random_, 0, 0, &vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   ASSERT_PRED1(VectorIsUnshuffled, vector_);
 
   // ...in the middle...
   ShuffleRange(&random_, kVectorSize/2, kVectorSize/2, &vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   ASSERT_PRED1(VectorIsUnshuffled, vector_);
 
   // ...at the end...
   ShuffleRange(&random_, kVectorSize - 1, kVectorSize - 1, &vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   ASSERT_PRED1(VectorIsUnshuffled, vector_);
 
   // ...and past the end.
   ShuffleRange(&random_, kVectorSize, kVectorSize, &vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   ASSERT_PRED1(VectorIsUnshuffled, vector_);
 }
 
 TEST_F(VectorShuffleTest, HandlesRangeOfSizeOne) {
   // Tests a size one range at the beginning...
   ShuffleRange(&random_, 0, 1, &vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   ASSERT_PRED1(VectorIsUnshuffled, vector_);
 
   // ...in the middle...
   ShuffleRange(&random_, kVectorSize/2, kVectorSize/2 + 1, &vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   ASSERT_PRED1(VectorIsUnshuffled, vector_);
 
   // ...and at the end.
   ShuffleRange(&random_, kVectorSize - 1, kVectorSize, &vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   ASSERT_PRED1(VectorIsUnshuffled, vector_);
 }
 
 // Because we use our own random number generator and a fixed seed,
 // we can guarantee that the following "random" tests will succeed.
 
 TEST_F(VectorShuffleTest, ShufflesEntireVector) {
   Shuffle(&random_, &vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   EXPECT_FALSE(VectorIsUnshuffled(vector_)) << vector_;
 
   // Tests the first and last elements in particular to ensure that
   // there are no off-by-one problems in our shuffle algorithm.
   EXPECT_NE(0, vector_[0]);
   EXPECT_NE(kVectorSize - 1, vector_[kVectorSize - 1]);
 }
 
 TEST_F(VectorShuffleTest, ShufflesStartOfVector) {
   const int kRangeSize = kVectorSize/2;
 
   ShuffleRange(&random_, 0, kRangeSize, &vector_);
 
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   EXPECT_PRED3(RangeIsShuffled, vector_, 0, kRangeSize);
   EXPECT_PRED3(RangeIsUnshuffled, vector_, kRangeSize, kVectorSize);
 }
 
 TEST_F(VectorShuffleTest, ShufflesEndOfVector) {
   const int kRangeSize = kVectorSize / 2;
   ShuffleRange(&random_, kRangeSize, kVectorSize, &vector_);
 
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);
   EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, kVectorSize);
 }
 
 TEST_F(VectorShuffleTest, ShufflesMiddleOfVector) {
   int kRangeSize = kVectorSize/3;
   ShuffleRange(&random_, kRangeSize, 2*kRangeSize, &vector_);
 
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);
   EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, 2*kRangeSize);
   EXPECT_PRED3(RangeIsUnshuffled, vector_, 2*kRangeSize, kVectorSize);
 }
 
 TEST_F(VectorShuffleTest, ShufflesRepeatably) {
   TestingVector vector2;
   for (int i = 0; i < kVectorSize; i++) {
     vector2.push_back(i);
   }
 
   random_.Reseed(1234);
   Shuffle(&random_, &vector_);
   random_.Reseed(1234);
   Shuffle(&random_, &vector2);
 
   ASSERT_PRED1(VectorIsNotCorrupt, vector_);
   ASSERT_PRED1(VectorIsNotCorrupt, vector2);
 
   for (int i = 0; i < kVectorSize; i++) {
     EXPECT_EQ(vector_[i], vector2[i]) << " where i is " << i;
   }
 }
 
 // Tests the size of the AssertHelper class.
 
 TEST(AssertHelperTest, AssertHelperIsSmall) {
   // To avoid breaking clients that use lots of assertions in one
   // function, we cannot grow the size of AssertHelper.
   EXPECT_LE(sizeof(testing::internal::AssertHelper), sizeof(void*));
 }
 
 // Tests String::EndsWithCaseInsensitive().
 TEST(StringTest, EndsWithCaseInsensitive) {
   EXPECT_TRUE(String::EndsWithCaseInsensitive("foobar", "BAR"));
   EXPECT_TRUE(String::EndsWithCaseInsensitive("foobaR", "bar"));
   EXPECT_TRUE(String::EndsWithCaseInsensitive("foobar", ""));
   EXPECT_TRUE(String::EndsWithCaseInsensitive("", ""));
 
   EXPECT_FALSE(String::EndsWithCaseInsensitive("Foobar", "foo"));
   EXPECT_FALSE(String::EndsWithCaseInsensitive("foobar", "Foo"));
   EXPECT_FALSE(String::EndsWithCaseInsensitive("", "foo"));
 }
 
 // C++Builder's preprocessor is buggy; it fails to expand macros that
 // appear in macro parameters after wide char literals.  Provide an alias
 // for NULL as a workaround.
 static const wchar_t* const kNull = NULL;
 
 // Tests String::CaseInsensitiveWideCStringEquals
 TEST(StringTest, CaseInsensitiveWideCStringEquals) {
   EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(NULL, NULL));
   EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(kNull, L""));
   EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(L"", kNull));
   EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(kNull, L"foobar"));
   EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(L"foobar", kNull));
   EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"foobar", L"foobar"));
   EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"foobar", L"FOOBAR"));
   EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"FOOBAR", L"foobar"));
 }
 
 #if GTEST_OS_WINDOWS
 
 // Tests String::ShowWideCString().
 TEST(StringTest, ShowWideCString) {
   EXPECT_STREQ("(null)",
                String::ShowWideCString(NULL).c_str());
   EXPECT_STREQ("", String::ShowWideCString(L"").c_str());
   EXPECT_STREQ("foo", String::ShowWideCString(L"foo").c_str());
 }
 
 # if GTEST_OS_WINDOWS_MOBILE
 TEST(StringTest, AnsiAndUtf16Null) {
   EXPECT_EQ(NULL, String::AnsiToUtf16(NULL));
   EXPECT_EQ(NULL, String::Utf16ToAnsi(NULL));
 }
 
 TEST(StringTest, AnsiAndUtf16ConvertBasic) {
   const char* ansi = String::Utf16ToAnsi(L"str");
   EXPECT_STREQ("str", ansi);
   delete [] ansi;
   const WCHAR* utf16 = String::AnsiToUtf16("str");
   EXPECT_EQ(0, wcsncmp(L"str", utf16, 3));
   delete [] utf16;
 }
 
 TEST(StringTest, AnsiAndUtf16ConvertPathChars) {
   const char* ansi = String::Utf16ToAnsi(L".:\\ \"*?");
   EXPECT_STREQ(".:\\ \"*?", ansi);
   delete [] ansi;
   const WCHAR* utf16 = String::AnsiToUtf16(".:\\ \"*?");
   EXPECT_EQ(0, wcsncmp(L".:\\ \"*?", utf16, 3));
   delete [] utf16;
 }
 # endif  // GTEST_OS_WINDOWS_MOBILE
 
 #endif  // GTEST_OS_WINDOWS
 
 // Tests TestProperty construction.
 TEST(TestPropertyTest, StringValue) {
   TestProperty property("key", "1");
   EXPECT_STREQ("key", property.key());
   EXPECT_STREQ("1", property.value());
 }
 
 // Tests TestProperty replacing a value.
 TEST(TestPropertyTest, ReplaceStringValue) {
   TestProperty property("key", "1");
   EXPECT_STREQ("1", property.value());
   property.SetValue("2");
   EXPECT_STREQ("2", property.value());
 }
 
 // AddFatalFailure() and AddNonfatalFailure() must be stand-alone
 // functions (i.e. their definitions cannot be inlined at the call
 // sites), or C++Builder won't compile the code.
 static void AddFatalFailure() {
   FAIL() << "Expected fatal failure.";
 }
 
 static void AddNonfatalFailure() {
   ADD_FAILURE() << "Expected non-fatal failure.";
 }
 
 class ScopedFakeTestPartResultReporterTest : public Test {
  public:  // Must be public and not protected due to a bug in g++ 3.4.2.
   enum FailureMode {
     FATAL_FAILURE,
     NONFATAL_FAILURE
   };
   static void AddFailure(FailureMode failure) {
     if (failure == FATAL_FAILURE) {
       AddFatalFailure();
     } else {
       AddNonfatalFailure();
     }
   }
 };
 
 // Tests that ScopedFakeTestPartResultReporter intercepts test
 // failures.
 TEST_F(ScopedFakeTestPartResultReporterTest, InterceptsTestFailures) {
   TestPartResultArray results;
   {
     ScopedFakeTestPartResultReporter reporter(
         ScopedFakeTestPartResultReporter::INTERCEPT_ONLY_CURRENT_THREAD,
         &results);
     AddFailure(NONFATAL_FAILURE);
     AddFailure(FATAL_FAILURE);
   }
 
   EXPECT_EQ(2, results.size());
   EXPECT_TRUE(results.GetTestPartResult(0).nonfatally_failed());
   EXPECT_TRUE(results.GetTestPartResult(1).fatally_failed());
 }
 
 TEST_F(ScopedFakeTestPartResultReporterTest, DeprecatedConstructor) {
   TestPartResultArray results;
   {
     // Tests, that the deprecated constructor still works.
     ScopedFakeTestPartResultReporter reporter(&results);
     AddFailure(NONFATAL_FAILURE);
   }
   EXPECT_EQ(1, results.size());
 }
 
 #if GTEST_IS_THREADSAFE
 
 class ScopedFakeTestPartResultReporterWithThreadsTest
   : public ScopedFakeTestPartResultReporterTest {
  protected:
   static void AddFailureInOtherThread(FailureMode failure) {
     ThreadWithParam<FailureMode> thread(&AddFailure, failure, NULL);
     thread.Join();
   }
 };
 
 TEST_F(ScopedFakeTestPartResultReporterWithThreadsTest,
        InterceptsTestFailuresInAllThreads) {
   TestPartResultArray results;
   {
     ScopedFakeTestPartResultReporter reporter(
         ScopedFakeTestPartResultReporter::INTERCEPT_ALL_THREADS, &results);
     AddFailure(NONFATAL_FAILURE);
     AddFailure(FATAL_FAILURE);
     AddFailureInOtherThread(NONFATAL_FAILURE);
     AddFailureInOtherThread(FATAL_FAILURE);
   }
 
   EXPECT_EQ(4, results.size());
   EXPECT_TRUE(results.GetTestPartResult(0).nonfatally_failed());
   EXPECT_TRUE(results.GetTestPartResult(1).fatally_failed());
   EXPECT_TRUE(results.GetTestPartResult(2).nonfatally_failed());
   EXPECT_TRUE(results.GetTestPartResult(3).fatally_failed());
 }
 
 #endif  // GTEST_IS_THREADSAFE
 
 // Tests EXPECT_FATAL_FAILURE{,ON_ALL_THREADS}.  Makes sure that they
 // work even if the failure is generated in a called function rather than
 // the current context.
 
 typedef ScopedFakeTestPartResultReporterTest ExpectFatalFailureTest;
 
 TEST_F(ExpectFatalFailureTest, CatchesFatalFaliure) {
   EXPECT_FATAL_FAILURE(AddFatalFailure(), "Expected fatal failure.");
 }
 
 #if GTEST_HAS_GLOBAL_STRING
 TEST_F(ExpectFatalFailureTest, AcceptsStringObject) {
   EXPECT_FATAL_FAILURE(AddFatalFailure(), ::string("Expected fatal failure."));
 }
 #endif
 
 TEST_F(ExpectFatalFailureTest, AcceptsStdStringObject) {
   EXPECT_FATAL_FAILURE(AddFatalFailure(),
                        ::std::string("Expected fatal failure."));
 }
 
 TEST_F(ExpectFatalFailureTest, CatchesFatalFailureOnAllThreads) {
   // We have another test below to verify that the macro catches fatal
   // failures generated on another thread.
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFatalFailure(),
                                       "Expected fatal failure.");
 }
 
 #ifdef __BORLANDC__
 // Silences warnings: "Condition is always true"
 # pragma option push -w-ccc
 #endif
 
 // Tests that EXPECT_FATAL_FAILURE() can be used in a non-void
 // function even when the statement in it contains ASSERT_*.
 
 int NonVoidFunction() {
   EXPECT_FATAL_FAILURE(ASSERT_TRUE(false), "");
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS(FAIL(), "");
   return 0;
 }
 
 TEST_F(ExpectFatalFailureTest, CanBeUsedInNonVoidFunction) {
   NonVoidFunction();
 }
 
 // Tests that EXPECT_FATAL_FAILURE(statement, ...) doesn't abort the
 // current function even though 'statement' generates a fatal failure.
 
 void DoesNotAbortHelper(bool* aborted) {
   EXPECT_FATAL_FAILURE(ASSERT_TRUE(false), "");
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS(FAIL(), "");
 
   *aborted = false;
 }
 
 #ifdef __BORLANDC__
 // Restores warnings after previous "#pragma option push" suppressed them.
 # pragma option pop
 #endif
 
 TEST_F(ExpectFatalFailureTest, DoesNotAbort) {
   bool aborted = true;
   DoesNotAbortHelper(&aborted);
   EXPECT_FALSE(aborted);
 }
 
 // Tests that the EXPECT_FATAL_FAILURE{,_ON_ALL_THREADS} accepts a
 // statement that contains a macro which expands to code containing an
 // unprotected comma.
 
 static int global_var = 0;
 #define GTEST_USE_UNPROTECTED_COMMA_ global_var++, global_var++
 
 TEST_F(ExpectFatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {
 #ifndef __BORLANDC__
   // ICE's in C++Builder.
   EXPECT_FATAL_FAILURE({
     GTEST_USE_UNPROTECTED_COMMA_;
     AddFatalFailure();
   }, "");
 #endif
 
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS({
     GTEST_USE_UNPROTECTED_COMMA_;
     AddFatalFailure();
   }, "");
 }
 
 // Tests EXPECT_NONFATAL_FAILURE{,ON_ALL_THREADS}.
 
 typedef ScopedFakeTestPartResultReporterTest ExpectNonfatalFailureTest;
 
 TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailure) {
   EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),
                           "Expected non-fatal failure.");
 }
 
 #if GTEST_HAS_GLOBAL_STRING
 TEST_F(ExpectNonfatalFailureTest, AcceptsStringObject) {
   EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),
                           ::string("Expected non-fatal failure."));
 }
 #endif
 
 TEST_F(ExpectNonfatalFailureTest, AcceptsStdStringObject) {
   EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),
                           ::std::string("Expected non-fatal failure."));
 }
 
 TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailureOnAllThreads) {
   // We have another test below to verify that the macro catches
   // non-fatal failures generated on another thread.
   EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddNonfatalFailure(),
                                          "Expected non-fatal failure.");
 }
 
 // Tests that the EXPECT_NONFATAL_FAILURE{,_ON_ALL_THREADS} accepts a
 // statement that contains a macro which expands to code containing an
 // unprotected comma.
 TEST_F(ExpectNonfatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {
   EXPECT_NONFATAL_FAILURE({
     GTEST_USE_UNPROTECTED_COMMA_;
     AddNonfatalFailure();
   }, "");
 
   EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS({
     GTEST_USE_UNPROTECTED_COMMA_;
     AddNonfatalFailure();
   }, "");
 }
 
 #if GTEST_IS_THREADSAFE
 
 typedef ScopedFakeTestPartResultReporterWithThreadsTest
     ExpectFailureWithThreadsTest;
 
 TEST_F(ExpectFailureWithThreadsTest, ExpectFatalFailureOnAllThreads) {
   EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailureInOtherThread(FATAL_FAILURE),
                                       "Expected fatal failure.");
 }
 
 TEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailureOnAllThreads) {
   EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(
       AddFailureInOtherThread(NONFATAL_FAILURE), "Expected non-fatal failure.");
 }
 
 #endif  // GTEST_IS_THREADSAFE
 
 // Tests the TestProperty class.
 
 TEST(TestPropertyTest, ConstructorWorks) {
   const TestProperty property("key", "value");
   EXPECT_STREQ("key", property.key());
   EXPECT_STREQ("value", property.value());
 }
 
 TEST(TestPropertyTest, SetValue) {
   TestProperty property("key", "value_1");
   EXPECT_STREQ("key", property.key());
   property.SetValue("value_2");
   EXPECT_STREQ("key", property.key());
   EXPECT_STREQ("value_2", property.value());
 }
 
 // Tests the TestResult class
 
 // The test fixture for testing TestResult.
 class TestResultTest : public Test {
  protected:
   typedef std::vector<TestPartResult> TPRVector;
 
   // We make use of 2 TestPartResult objects,
   TestPartResult * pr1, * pr2;
 
   // ... and 3 TestResult objects.
   TestResult * r0, * r1, * r2;
 
   virtual void SetUp() {
     // pr1 is for success.
     pr1 = new TestPartResult(TestPartResult::kSuccess,
                              "foo/bar.cc",
                              10,
                              "Success!");
 
     // pr2 is for fatal failure.
     pr2 = new TestPartResult(TestPartResult::kFatalFailure,
                              "foo/bar.cc",
                              -1,  // This line number means "unknown"
                              "Failure!");
 
     // Creates the TestResult objects.
     r0 = new TestResult();
     r1 = new TestResult();
     r2 = new TestResult();
 
     // In order to test TestResult, we need to modify its internal
     // state, in particular the TestPartResult vector it holds.
     // test_part_results() returns a const reference to this vector.
     // We cast it to a non-const object s.t. it can be modified (yes,
     // this is a hack).
     TPRVector* results1 = const_cast<TPRVector*>(
         &TestResultAccessor::test_part_results(*r1));
     TPRVector* results2 = const_cast<TPRVector*>(
         &TestResultAccessor::test_part_results(*r2));
 
     // r0 is an empty TestResult.
 
     // r1 contains a single SUCCESS TestPartResult.
     results1->push_back(*pr1);
 
     // r2 contains a SUCCESS, and a FAILURE.
     results2->push_back(*pr1);
     results2->push_back(*pr2);
   }
 
   virtual void TearDown() {
     delete pr1;
     delete pr2;
 
     delete r0;
     delete r1;
     delete r2;
   }
 
   // Helper that compares two TestPartResults.
   static void CompareTestPartResult(const TestPartResult& expected,
                                     const TestPartResult& actual) {
     EXPECT_EQ(expected.type(), actual.type());
     EXPECT_STREQ(expected.file_name(), actual.file_name());
     EXPECT_EQ(expected.line_number(), actual.line_number());
     EXPECT_STREQ(expected.summary(), actual.summary());
     EXPECT_STREQ(expected.message(), actual.message());
     EXPECT_EQ(expected.passed(), actual.passed());
     EXPECT_EQ(expected.failed(), actual.failed());
     EXPECT_EQ(expected.nonfatally_failed(), actual.nonfatally_failed());
     EXPECT_EQ(expected.fatally_failed(), actual.fatally_failed());
   }
 };
 
 // Tests TestResult::total_part_count().
 TEST_F(TestResultTest, total_part_count) {
   ASSERT_EQ(0, r0->total_part_count());
   ASSERT_EQ(1, r1->total_part_count());
   ASSERT_EQ(2, r2->total_part_count());
 }
 
 // Tests TestResult::Passed().
 TEST_F(TestResultTest, Passed) {
   ASSERT_TRUE(r0->Passed());
   ASSERT_TRUE(r1->Passed());
   ASSERT_FALSE(r2->Passed());
 }
 
 // Tests TestResult::Failed().
 TEST_F(TestResultTest, Failed) {
   ASSERT_FALSE(r0->Failed());
   ASSERT_FALSE(r1->Failed());
   ASSERT_TRUE(r2->Failed());
 }
 
 // Tests TestResult::GetTestPartResult().
 
 typedef TestResultTest TestResultDeathTest;
 
 TEST_F(TestResultDeathTest, GetTestPartResult) {
   CompareTestPartResult(*pr1, r2->GetTestPartResult(0));
   CompareTestPartResult(*pr2, r2->GetTestPartResult(1));
   EXPECT_DEATH_IF_SUPPORTED(r2->GetTestPartResult(2), "");
   EXPECT_DEATH_IF_SUPPORTED(r2->GetTestPartResult(-1), "");
 }
 
 // Tests TestResult has no properties when none are added.
 TEST(TestResultPropertyTest, NoPropertiesFoundWhenNoneAreAdded) {
   TestResult test_result;
   ASSERT_EQ(0, test_result.test_property_count());
 }
 
 // Tests TestResult has the expected property when added.
 TEST(TestResultPropertyTest, OnePropertyFoundWhenAdded) {
   TestResult test_result;
   TestProperty property("key_1", "1");
   TestResultAccessor::RecordProperty(&test_result, "testcase", property);
   ASSERT_EQ(1, test_result.test_property_count());
   const TestProperty& actual_property = test_result.GetTestProperty(0);
   EXPECT_STREQ("key_1", actual_property.key());
   EXPECT_STREQ("1", actual_property.value());
 }
 
 // Tests TestResult has multiple properties when added.
 TEST(TestResultPropertyTest, MultiplePropertiesFoundWhenAdded) {
   TestResult test_result;
   TestProperty property_1("key_1", "1");
   TestProperty property_2("key_2", "2");
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_1);
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_2);
   ASSERT_EQ(2, test_result.test_property_count());
   const TestProperty& actual_property_1 = test_result.GetTestProperty(0);
   EXPECT_STREQ("key_1", actual_property_1.key());
   EXPECT_STREQ("1", actual_property_1.value());
 
   const TestProperty& actual_property_2 = test_result.GetTestProperty(1);
   EXPECT_STREQ("key_2", actual_property_2.key());
   EXPECT_STREQ("2", actual_property_2.value());
 }
 
 // Tests TestResult::RecordProperty() overrides values for duplicate keys.
 TEST(TestResultPropertyTest, OverridesValuesForDuplicateKeys) {
   TestResult test_result;
   TestProperty property_1_1("key_1", "1");
   TestProperty property_2_1("key_2", "2");
   TestProperty property_1_2("key_1", "12");
   TestProperty property_2_2("key_2", "22");
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_1_1);
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_2_1);
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_1_2);
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_2_2);
 
   ASSERT_EQ(2, test_result.test_property_count());
   const TestProperty& actual_property_1 = test_result.GetTestProperty(0);
   EXPECT_STREQ("key_1", actual_property_1.key());
   EXPECT_STREQ("12", actual_property_1.value());
 
   const TestProperty& actual_property_2 = test_result.GetTestProperty(1);
   EXPECT_STREQ("key_2", actual_property_2.key());
   EXPECT_STREQ("22", actual_property_2.value());
 }
 
 // Tests TestResult::GetTestProperty().
 TEST(TestResultPropertyTest, GetTestProperty) {
   TestResult test_result;
   TestProperty property_1("key_1", "1");
   TestProperty property_2("key_2", "2");
   TestProperty property_3("key_3", "3");
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_1);
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_2);
   TestResultAccessor::RecordProperty(&test_result, "testcase", property_3);
 
   const TestProperty& fetched_property_1 = test_result.GetTestProperty(0);
   const TestProperty& fetched_property_2 = test_result.GetTestProperty(1);
   const TestProperty& fetched_property_3 = test_result.GetTestProperty(2);
 
   EXPECT_STREQ("key_1", fetched_property_1.key());
   EXPECT_STREQ("1", fetched_property_1.value());
 
   EXPECT_STREQ("key_2", fetched_property_2.key());
   EXPECT_STREQ("2", fetched_property_2.value());
 
   EXPECT_STREQ("key_3", fetched_property_3.key());
   EXPECT_STREQ("3", fetched_property_3.value());
 
   EXPECT_DEATH_IF_SUPPORTED(test_result.GetTestProperty(3), "");
   EXPECT_DEATH_IF_SUPPORTED(test_result.GetTestProperty(-1), "");
 }
 
 // Tests the Test class.
 //
 // It's difficult to test every public method of this class (we are
 // already stretching the limit of Google Test by using it to test itself!).
 // Fortunately, we don't have to do that, as we are already testing
 // the functionalities of the Test class extensively by using Google Test
 // alone.
 //
 // Therefore, this section only contains one test.
 
 // Tests that GTestFlagSaver works on Windows and Mac.
 
 class GTestFlagSaverTest : public Test {
  protected:
   // Saves the Google Test flags such that we can restore them later, and
   // then sets them to their default values.  This will be called
   // before the first test in this test case is run.
   static void SetUpTestCase() {
     saver_ = new GTestFlagSaver;
 
     GTEST_FLAG(also_run_disabled_tests) = false;
     GTEST_FLAG(break_on_failure) = false;
     GTEST_FLAG(catch_exceptions) = false;
     GTEST_FLAG(death_test_use_fork) = false;
     GTEST_FLAG(color) = "auto";
     GTEST_FLAG(filter) = "";
     GTEST_FLAG(list_tests) = false;
     GTEST_FLAG(output) = "";
     GTEST_FLAG(print_time) = true;
     GTEST_FLAG(random_seed) = 0;
     GTEST_FLAG(repeat) = 1;
     GTEST_FLAG(shuffle) = false;
     GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
     GTEST_FLAG(stream_result_to) = "";
     GTEST_FLAG(throw_on_failure) = false;
   }
 
   // Restores the Google Test flags that the tests have modified.  This will
   // be called after the last test in this test case is run.
   static void TearDownTestCase() {
     delete saver_;
     saver_ = NULL;
   }
 
   // Verifies that the Google Test flags have their default values, and then
   // modifies each of them.
   void VerifyAndModifyFlags() {
     EXPECT_FALSE(GTEST_FLAG(also_run_disabled_tests));
     EXPECT_FALSE(GTEST_FLAG(break_on_failure));
     EXPECT_FALSE(GTEST_FLAG(catch_exceptions));
     EXPECT_STREQ("auto", GTEST_FLAG(color).c_str());
     EXPECT_FALSE(GTEST_FLAG(death_test_use_fork));
     EXPECT_STREQ("", GTEST_FLAG(filter).c_str());
     EXPECT_FALSE(GTEST_FLAG(list_tests));
     EXPECT_STREQ("", GTEST_FLAG(output).c_str());
     EXPECT_TRUE(GTEST_FLAG(print_time));
     EXPECT_EQ(0, GTEST_FLAG(random_seed));
     EXPECT_EQ(1, GTEST_FLAG(repeat));
     EXPECT_FALSE(GTEST_FLAG(shuffle));
     EXPECT_EQ(kMaxStackTraceDepth, GTEST_FLAG(stack_trace_depth));
     EXPECT_STREQ("", GTEST_FLAG(stream_result_to).c_str());
     EXPECT_FALSE(GTEST_FLAG(throw_on_failure));
 
     GTEST_FLAG(also_run_disabled_tests) = true;
     GTEST_FLAG(break_on_failure) = true;
     GTEST_FLAG(catch_exceptions) = true;
     GTEST_FLAG(color) = "no";
     GTEST_FLAG(death_test_use_fork) = true;
     GTEST_FLAG(filter) = "abc";
     GTEST_FLAG(list_tests) = true;
     GTEST_FLAG(output) = "xml:foo.xml";
     GTEST_FLAG(print_time) = false;
     GTEST_FLAG(random_seed) = 1;
     GTEST_FLAG(repeat) = 100;
     GTEST_FLAG(shuffle) = true;
     GTEST_FLAG(stack_trace_depth) = 1;
     GTEST_FLAG(stream_result_to) = "localhost:1234";
     GTEST_FLAG(throw_on_failure) = true;
   }
 
  private:
   // For saving Google Test flags during this test case.
   static GTestFlagSaver* saver_;
 };
 
 GTestFlagSaver* GTestFlagSaverTest::saver_ = NULL;
 
 // Google Test doesn't guarantee the order of tests.  The following two
 // tests are designed to work regardless of their order.
 
 // Modifies the Google Test flags in the test body.
 TEST_F(GTestFlagSaverTest, ModifyGTestFlags) {
   VerifyAndModifyFlags();
 }
 
 // Verifies that the Google Test flags in the body of the previous test were
 // restored to their original values.
 TEST_F(GTestFlagSaverTest, VerifyGTestFlags) {
   VerifyAndModifyFlags();
 }
 
 // Sets an environment variable with the given name to the given
 // value.  If the value argument is "", unsets the environment
 // variable.  The caller must ensure that both arguments are not NULL.
 static void SetEnv(const char* name, const char* value) {
 #if GTEST_OS_WINDOWS_MOBILE
   // Environment variables are not supported on Windows CE.
   return;
 #elif defined(__BORLANDC__) || defined(__SunOS_5_8) || defined(__SunOS_5_9)
   // C++Builder's putenv only stores a pointer to its parameter; we have to
   // ensure that the string remains valid as long as it might be needed.
   // We use an std::map to do so.
   static std::map<std::string, std::string*> added_env;
 
   // Because putenv stores a pointer to the string buffer, we can't delete the
   // previous string (if present) until after it's replaced.
   std::string *prev_env = NULL;
   if (added_env.find(name) != added_env.end()) {
     prev_env = added_env[name];
   }
   added_env[name] = new std::string(
       (Message() << name << "=" << value).GetString());
 
   // The standard signature of putenv accepts a 'char*' argument. Other
   // implementations, like C++Builder's, accept a 'const char*'.
   // We cast away the 'const' since that would work for both variants.
   putenv(const_cast<char*>(added_env[name]->c_str()));
   delete prev_env;
 #elif GTEST_OS_WINDOWS  // If we are on Windows proper.
   _putenv((Message() << name << "=" << value).GetString().c_str());
 #else
   if (*value == '\0') {
     unsetenv(name);
   } else {
     setenv(name, value, 1);
   }
 #endif  // GTEST_OS_WINDOWS_MOBILE
 }
 
 #if !GTEST_OS_WINDOWS_MOBILE
 // Environment variables are not supported on Windows CE.
 
 using testing::internal::Int32FromGTestEnv;
 
 // Tests Int32FromGTestEnv().
 
 // Tests that Int32FromGTestEnv() returns the default value when the
 // environment variable is not set.
 TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenVariableIsNotSet) {
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "");
   EXPECT_EQ(10, Int32FromGTestEnv("temp", 10));
 }
 
 # if !defined(GTEST_GET_INT32_FROM_ENV_)
 
 // Tests that Int32FromGTestEnv() returns the default value when the
 // environment variable overflows as an Int32.
 TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueOverflows) {
   printf("(expecting 2 warnings)\n");
 
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "12345678987654321");
   EXPECT_EQ(20, Int32FromGTestEnv("temp", 20));
 
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "-12345678987654321");
   EXPECT_EQ(30, Int32FromGTestEnv("temp", 30));
 }
 
 // Tests that Int32FromGTestEnv() returns the default value when the
 // environment variable does not represent a valid decimal integer.
 TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueIsInvalid) {
   printf("(expecting 2 warnings)\n");
 
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "A1");
   EXPECT_EQ(40, Int32FromGTestEnv("temp", 40));
 
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "12X");
   EXPECT_EQ(50, Int32FromGTestEnv("temp", 50));
 }
 
 # endif  // !defined(GTEST_GET_INT32_FROM_ENV_)
 
 // Tests that Int32FromGTestEnv() parses and returns the value of the
 // environment variable when it represents a valid decimal integer in
 // the range of an Int32.
 TEST(Int32FromGTestEnvTest, ParsesAndReturnsValidValue) {
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "123");
   EXPECT_EQ(123, Int32FromGTestEnv("temp", 0));
 
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "-321");
   EXPECT_EQ(-321, Int32FromGTestEnv("temp", 0));
 }
 #endif  // !GTEST_OS_WINDOWS_MOBILE
 
 // Tests ParseInt32Flag().
 
 // Tests that ParseInt32Flag() returns false and doesn't change the
 // output value when the flag has wrong format
 TEST(ParseInt32FlagTest, ReturnsFalseForInvalidFlag) {
   Int32 value = 123;
   EXPECT_FALSE(ParseInt32Flag("--a=100", "b", &value));
   EXPECT_EQ(123, value);
 
   EXPECT_FALSE(ParseInt32Flag("a=100", "a", &value));
   EXPECT_EQ(123, value);
 }
 
 // Tests that ParseInt32Flag() returns false and doesn't change the
 // output value when the flag overflows as an Int32.
 TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueOverflows) {
   printf("(expecting 2 warnings)\n");
 
   Int32 value = 123;
   EXPECT_FALSE(ParseInt32Flag("--abc=12345678987654321", "abc", &value));
   EXPECT_EQ(123, value);
 
   EXPECT_FALSE(ParseInt32Flag("--abc=-12345678987654321", "abc", &value));
   EXPECT_EQ(123, value);
 }
 
 // Tests that ParseInt32Flag() returns false and doesn't change the
 // output value when the flag does not represent a valid decimal
 // integer.
 TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueIsInvalid) {
   printf("(expecting 2 warnings)\n");
 
   Int32 value = 123;
   EXPECT_FALSE(ParseInt32Flag("--abc=A1", "abc", &value));
   EXPECT_EQ(123, value);
 
   EXPECT_FALSE(ParseInt32Flag("--abc=12X", "abc", &value));
   EXPECT_EQ(123, value);
 }
 
 // Tests that ParseInt32Flag() parses the value of the flag and
 // returns true when the flag represents a valid decimal integer in
 // the range of an Int32.
 TEST(ParseInt32FlagTest, ParsesAndReturnsValidValue) {
   Int32 value = 123;
   EXPECT_TRUE(ParseInt32Flag("--" GTEST_FLAG_PREFIX_ "abc=456", "abc", &value));
   EXPECT_EQ(456, value);
 
   EXPECT_TRUE(ParseInt32Flag("--" GTEST_FLAG_PREFIX_ "abc=-789",
                              "abc", &value));
   EXPECT_EQ(-789, value);
 }
 
 // Tests that Int32FromEnvOrDie() parses the value of the var or
 // returns the correct default.
 // Environment variables are not supported on Windows CE.
 #if !GTEST_OS_WINDOWS_MOBILE
 TEST(Int32FromEnvOrDieTest, ParsesAndReturnsValidValue) {
   EXPECT_EQ(333, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333));
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", "123");
   EXPECT_EQ(123, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333));
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", "-123");
   EXPECT_EQ(-123, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333));
 }
 #endif  // !GTEST_OS_WINDOWS_MOBILE
 
 // Tests that Int32FromEnvOrDie() aborts with an error message
 // if the variable is not an Int32.
 TEST(Int32FromEnvOrDieDeathTest, AbortsOnFailure) {
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "VAR", "xxx");
   EXPECT_DEATH_IF_SUPPORTED(
       Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123),
       ".*");
 }
 
 // Tests that Int32FromEnvOrDie() aborts with an error message
 // if the variable cannot be represented by an Int32.
 TEST(Int32FromEnvOrDieDeathTest, AbortsOnInt32Overflow) {
   SetEnv(GTEST_FLAG_PREFIX_UPPER_ "VAR", "1234567891234567891234");
   EXPECT_DEATH_IF_SUPPORTED(
       Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123),
       ".*");
 }
 
 // Tests that ShouldRunTestOnShard() selects all tests
 // where there is 1 shard.
 TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereIsOneShard) {
   EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 0));
   EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 1));
   EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 2));
   EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 3));
   EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 4));
 }
 
 class ShouldShardTest : public testing::Test {
  protected:
   virtual void SetUp() {
     index_var_ = GTEST_FLAG_PREFIX_UPPER_ "INDEX";
     total_var_ = GTEST_FLAG_PREFIX_UPPER_ "TOTAL";
   }
 
   virtual void TearDown() {
     SetEnv(index_var_, "");
     SetEnv(total_var_, "");
   }
 
   const char* index_var_;
   const char* total_var_;
 };
 
 // Tests that sharding is disabled if neither of the environment variables
 // are set.
 TEST_F(ShouldShardTest, ReturnsFalseWhenNeitherEnvVarIsSet) {
   SetEnv(index_var_, "");
   SetEnv(total_var_, "");
 
   EXPECT_FALSE(ShouldShard(total_var_, index_var_, false));
   EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
 }
 
 // Tests that sharding is not enabled if total_shards  == 1.
 TEST_F(ShouldShardTest, ReturnsFalseWhenTotalShardIsOne) {
   SetEnv(index_var_, "0");
   SetEnv(total_var_, "1");
   EXPECT_FALSE(ShouldShard(total_var_, index_var_, false));
   EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
 }
 
 // Tests that sharding is enabled if total_shards > 1 and
 // we are not in a death test subprocess.
 // Environment variables are not supported on Windows CE.
 #if !GTEST_OS_WINDOWS_MOBILE
 TEST_F(ShouldShardTest, WorksWhenShardEnvVarsAreValid) {
   SetEnv(index_var_, "4");
   SetEnv(total_var_, "22");
   EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));
   EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
 
   SetEnv(index_var_, "8");
   SetEnv(total_var_, "9");
   EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));
   EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
 
   SetEnv(index_var_, "0");
   SetEnv(total_var_, "9");
   EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));
   EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));
 }
 #endif  // !GTEST_OS_WINDOWS_MOBILE
 
 // Tests that we exit in error if the sharding values are not valid.
 
 typedef ShouldShardTest ShouldShardDeathTest;
 
 TEST_F(ShouldShardDeathTest, AbortsWhenShardingEnvVarsAreInvalid) {
   SetEnv(index_var_, "4");
   SetEnv(total_var_, "4");
   EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*");
 
   SetEnv(index_var_, "4");
   SetEnv(total_var_, "-2");
   EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*");
 
   SetEnv(index_var_, "5");
   SetEnv(total_var_, "");
   EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*");
 
   SetEnv(index_var_, "");
   SetEnv(total_var_, "5");
   EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*");
 }
 
 // Tests that ShouldRunTestOnShard is a partition when 5
 // shards are used.
 TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereAreFiveShards) {
   // Choose an arbitrary number of tests and shards.
   const int num_tests = 17;
   const int num_shards = 5;
 
   // Check partitioning: each test should be on exactly 1 shard.
   for (int test_id = 0; test_id < num_tests; test_id++) {
     int prev_selected_shard_index = -1;
     for (int shard_index = 0; shard_index < num_shards; shard_index++) {
       if (ShouldRunTestOnShard(num_shards, shard_index, test_id)) {
         if (prev_selected_shard_index < 0) {
           prev_selected_shard_index = shard_index;
         } else {
           ADD_FAILURE() << "Shard " << prev_selected_shard_index << " and "
             << shard_index << " are both selected to run test " << test_id;
         }
       }
     }
   }
 
   // Check balance: This is not required by the sharding protocol, but is a
   // desirable property for performance.
   for (int shard_index = 0; shard_index < num_shards; shard_index++) {
     int num_tests_on_shard = 0;
     for (int test_id = 0; test_id < num_tests; test_id++) {
       num_tests_on_shard +=
         ShouldRunTestOnShard(num_shards, shard_index, test_id);
     }
     EXPECT_GE(num_tests_on_shard, num_tests / num_shards);
   }
 }
 
 // For the same reason we are not explicitly testing everything in the
 // Test class, there are no separate tests for the following classes
 // (except for some trivial cases):
 //
 //   TestCase, UnitTest, UnitTestResultPrinter.
 //
 // Similarly, there are no separate tests for the following macros:
 //
 //   TEST, TEST_F, RUN_ALL_TESTS
 
 TEST(UnitTestTest, CanGetOriginalWorkingDir) {
   ASSERT_TRUE(UnitTest::GetInstance()->original_working_dir() != NULL);
   EXPECT_STRNE(UnitTest::GetInstance()->original_working_dir(), "");
 }
 
 TEST(UnitTestTest, ReturnsPlausibleTimestamp) {
   EXPECT_LT(0, UnitTest::GetInstance()->start_timestamp());
   EXPECT_LE(UnitTest::GetInstance()->start_timestamp(), GetTimeInMillis());
 }
 
 // When a property using a reserved key is supplied to this function, it
 // tests that a non-fatal failure is added, a fatal failure is not added,
 // and that the property is not recorded.
 void ExpectNonFatalFailureRecordingPropertyWithReservedKey(
     const TestResult& test_result, const char* key) {
   EXPECT_NONFATAL_FAILURE(Test::RecordProperty(key, "1"), "Reserved key");
   ASSERT_EQ(0, test_result.test_property_count()) << "Property for key '" << key
                                                   << "' recorded unexpectedly.";
 }
 
 void ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
     const char* key) {
   const TestInfo* test_info = UnitTest::GetInstance()->current_test_info();
   ASSERT_TRUE(test_info != NULL);
   ExpectNonFatalFailureRecordingPropertyWithReservedKey(*test_info->result(),
                                                         key);
 }
 
 void ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
     const char* key) {
   const TestCase* test_case = UnitTest::GetInstance()->current_test_case();
   ASSERT_TRUE(test_case != NULL);
   ExpectNonFatalFailureRecordingPropertyWithReservedKey(
       test_case->ad_hoc_test_result(), key);
 }
 
 void ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
     const char* key) {
   ExpectNonFatalFailureRecordingPropertyWithReservedKey(
       UnitTest::GetInstance()->ad_hoc_test_result(), key);
 }
 
 // Tests that property recording functions in UnitTest outside of tests
 // functions correcly.  Creating a separate instance of UnitTest ensures it
 // is in a state similar to the UnitTest's singleton's between tests.
 class UnitTestRecordPropertyTest :
     public testing::internal::UnitTestRecordPropertyTestHelper {
  public:
   static void SetUpTestCase() {
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
         "disabled");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
         "errors");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
         "failures");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
         "name");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
         "tests");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
         "time");
 
     Test::RecordProperty("test_case_key_1", "1");
     const TestCase* test_case = UnitTest::GetInstance()->current_test_case();
     ASSERT_TRUE(test_case != NULL);
 
     ASSERT_EQ(1, test_case->ad_hoc_test_result().test_property_count());
     EXPECT_STREQ("test_case_key_1",
                  test_case->ad_hoc_test_result().GetTestProperty(0).key());
     EXPECT_STREQ("1",
                  test_case->ad_hoc_test_result().GetTestProperty(0).value());
   }
 };
 
 // Tests TestResult has the expected property when added.
 TEST_F(UnitTestRecordPropertyTest, OnePropertyFoundWhenAdded) {
   UnitTestRecordProperty("key_1", "1");
 
   ASSERT_EQ(1, unit_test_.ad_hoc_test_result().test_property_count());
 
   EXPECT_STREQ("key_1",
                unit_test_.ad_hoc_test_result().GetTestProperty(0).key());
   EXPECT_STREQ("1",
                unit_test_.ad_hoc_test_result().GetTestProperty(0).value());
 }
 
 // Tests TestResult has multiple properties when added.
 TEST_F(UnitTestRecordPropertyTest, MultiplePropertiesFoundWhenAdded) {
   UnitTestRecordProperty("key_1", "1");
   UnitTestRecordProperty("key_2", "2");
 
   ASSERT_EQ(2, unit_test_.ad_hoc_test_result().test_property_count());
 
   EXPECT_STREQ("key_1",
                unit_test_.ad_hoc_test_result().GetTestProperty(0).key());
   EXPECT_STREQ("1", unit_test_.ad_hoc_test_result().GetTestProperty(0).value());
 
   EXPECT_STREQ("key_2",
                unit_test_.ad_hoc_test_result().GetTestProperty(1).key());
   EXPECT_STREQ("2", unit_test_.ad_hoc_test_result().GetTestProperty(1).value());
 }
 
 // Tests TestResult::RecordProperty() overrides values for duplicate keys.
 TEST_F(UnitTestRecordPropertyTest, OverridesValuesForDuplicateKeys) {
   UnitTestRecordProperty("key_1", "1");
   UnitTestRecordProperty("key_2", "2");
   UnitTestRecordProperty("key_1", "12");
   UnitTestRecordProperty("key_2", "22");
 
   ASSERT_EQ(2, unit_test_.ad_hoc_test_result().test_property_count());
 
   EXPECT_STREQ("key_1",
                unit_test_.ad_hoc_test_result().GetTestProperty(0).key());
   EXPECT_STREQ("12",
                unit_test_.ad_hoc_test_result().GetTestProperty(0).value());
 
   EXPECT_STREQ("key_2",
                unit_test_.ad_hoc_test_result().GetTestProperty(1).key());
   EXPECT_STREQ("22",
                unit_test_.ad_hoc_test_result().GetTestProperty(1).value());
 }
 
 TEST_F(UnitTestRecordPropertyTest,
        AddFailureInsideTestsWhenUsingTestCaseReservedKeys) {
   ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
       "name");
   ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
       "value_param");
   ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
       "type_param");
   ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
       "status");
   ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
       "time");
   ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
       "classname");
 }
 
 TEST_F(UnitTestRecordPropertyTest,
        AddRecordWithReservedKeysGeneratesCorrectPropertyList) {
   EXPECT_NONFATAL_FAILURE(
       Test::RecordProperty("name", "1"),
       "'classname', 'name', 'status', 'time', 'type_param', and 'value_param'"
       " are reserved");
 }
 
 class UnitTestRecordPropertyTestEnvironment : public Environment {
  public:
   virtual void TearDown() {
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
         "tests");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
         "failures");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
         "disabled");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
         "errors");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
         "name");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
         "timestamp");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
         "time");
     ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
         "random_seed");
   }
 };
 
 // This will test property recording outside of any test or test case.
 static Environment* record_property_env =
     AddGlobalTestEnvironment(new UnitTestRecordPropertyTestEnvironment);
 
 // This group of tests is for predicate assertions (ASSERT_PRED*, etc)
 // of various arities.  They do not attempt to be exhaustive.  Rather,
 // view them as smoke tests that can be easily reviewed and verified.
 // A more complete set of tests for predicate assertions can be found
 // in gtest_pred_impl_unittest.cc.
 
 // First, some predicates and predicate-formatters needed by the tests.
 
 // Returns true iff the argument is an even number.
 bool IsEven(int n) {
   return (n % 2) == 0;
 }
 
 // A functor that returns true iff the argument is an even number.
 struct IsEvenFunctor {
   bool operator()(int n) { return IsEven(n); }
 };
 
 // A predicate-formatter function that asserts the argument is an even
 // number.
 AssertionResult AssertIsEven(const char* expr, int n) {
   if (IsEven(n)) {
     return AssertionSuccess();
   }
 
   Message msg;
   msg << expr << " evaluates to " << n << ", which is not even.";
   return AssertionFailure(msg);
 }
 
 // A predicate function that returns AssertionResult for use in
 // EXPECT/ASSERT_TRUE/FALSE.
 AssertionResult ResultIsEven(int n) {
   if (IsEven(n))
     return AssertionSuccess() << n << " is even";
   else
     return AssertionFailure() << n << " is odd";
 }
 
 // A predicate function that returns AssertionResult but gives no
 // explanation why it succeeds. Needed for testing that
 // EXPECT/ASSERT_FALSE handles such functions correctly.
 AssertionResult ResultIsEvenNoExplanation(int n) {
   if (IsEven(n))
     return AssertionSuccess();
   else
     return AssertionFailure() << n << " is odd";
 }
 
 // A predicate-formatter functor that asserts the argument is an even
 // number.
 struct AssertIsEvenFunctor {
   AssertionResult operator()(const char* expr, int n) {
     return AssertIsEven(expr, n);
   }
 };
 
 // Returns true iff the sum of the arguments is an even number.
 bool SumIsEven2(int n1, int n2) {
   return IsEven(n1 + n2);
 }
 
 // A functor that returns true iff the sum of the arguments is an even
 // number.
 struct SumIsEven3Functor {
   bool operator()(int n1, int n2, int n3) {
     return IsEven(n1 + n2 + n3);
   }
 };
 
 // A predicate-formatter function that asserts the sum of the
 // arguments is an even number.
 AssertionResult AssertSumIsEven4(
     const char* e1, const char* e2, const char* e3, const char* e4,
     int n1, int n2, int n3, int n4) {
   const int sum = n1 + n2 + n3 + n4;
   if (IsEven(sum)) {
     return AssertionSuccess();
   }
 
   Message msg;
   msg << e1 << " + " << e2 << " + " << e3 << " + " << e4
       << " (" << n1 << " + " << n2 << " + " << n3 << " + " << n4
       << ") evaluates to " << sum << ", which is not even.";
   return AssertionFailure(msg);
 }
 
 // A predicate-formatter functor that asserts the sum of the arguments
 // is an even number.
 struct AssertSumIsEven5Functor {
   AssertionResult operator()(
       const char* e1, const char* e2, const char* e3, const char* e4,
       const char* e5, int n1, int n2, int n3, int n4, int n5) {
     const int sum = n1 + n2 + n3 + n4 + n5;
     if (IsEven(sum)) {
       return AssertionSuccess();
     }
 
     Message msg;
     msg << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " + " << e5
         << " ("
         << n1 << " + " << n2 << " + " << n3 << " + " << n4 << " + " << n5
         << ") evaluates to " << sum << ", which is not even.";
     return AssertionFailure(msg);
   }
 };
 
 
 // Tests unary predicate assertions.
 
 // Tests unary predicate assertions that don't use a custom formatter.
 TEST(Pred1Test, WithoutFormat) {
   // Success cases.
   EXPECT_PRED1(IsEvenFunctor(), 2) << "This failure is UNEXPECTED!";
   ASSERT_PRED1(IsEven, 4);
 
   // Failure cases.
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED1(IsEven, 5) << "This failure is expected.";
   }, "This failure is expected.");
   EXPECT_FATAL_FAILURE(ASSERT_PRED1(IsEvenFunctor(), 5),
                        "evaluates to false");
 }
 
 // Tests unary predicate assertions that use a custom formatter.
 TEST(Pred1Test, WithFormat) {
   // Success cases.
   EXPECT_PRED_FORMAT1(AssertIsEven, 2);
   ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), 4)
     << "This failure is UNEXPECTED!";
 
   // Failure cases.
   const int n = 5;
   EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT1(AssertIsEvenFunctor(), n),
                           "n evaluates to 5, which is not even.");
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_PRED_FORMAT1(AssertIsEven, 5) << "This failure is expected.";
   }, "This failure is expected.");
 }
 
 // Tests that unary predicate assertions evaluates their arguments
 // exactly once.
 TEST(Pred1Test, SingleEvaluationOnFailure) {
   // A success case.
   static int n = 0;
   EXPECT_PRED1(IsEven, n++);
   EXPECT_EQ(1, n) << "The argument is not evaluated exactly once.";
 
   // A failure case.
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), n++)
         << "This failure is expected.";
   }, "This failure is expected.");
   EXPECT_EQ(2, n) << "The argument is not evaluated exactly once.";
 }
 
 
 // Tests predicate assertions whose arity is >= 2.
 
 // Tests predicate assertions that don't use a custom formatter.
 TEST(PredTest, WithoutFormat) {
   // Success cases.
   ASSERT_PRED2(SumIsEven2, 2, 4) << "This failure is UNEXPECTED!";
   EXPECT_PRED3(SumIsEven3Functor(), 4, 6, 8);
 
   // Failure cases.
   const int n1 = 1;
   const int n2 = 2;
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED2(SumIsEven2, n1, n2) << "This failure is expected.";
   }, "This failure is expected.");
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_PRED3(SumIsEven3Functor(), 1, 2, 4);
   }, "evaluates to false");
 }
 
 // Tests predicate assertions that use a custom formatter.
 TEST(PredTest, WithFormat) {
   // Success cases.
   ASSERT_PRED_FORMAT4(AssertSumIsEven4, 4, 6, 8, 10) <<
     "This failure is UNEXPECTED!";
   EXPECT_PRED_FORMAT5(AssertSumIsEven5Functor(), 2, 4, 6, 8, 10);
 
   // Failure cases.
   const int n1 = 1;
   const int n2 = 2;
   const int n3 = 4;
   const int n4 = 6;
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED_FORMAT4(AssertSumIsEven4, n1, n2, n3, n4);
   }, "evaluates to 13, which is not even.");
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(), 1, 2, 4, 6, 8)
         << "This failure is expected.";
   }, "This failure is expected.");
 }
 
 // Tests that predicate assertions evaluates their arguments
 // exactly once.
 TEST(PredTest, SingleEvaluationOnFailure) {
   // A success case.
   int n1 = 0;
   int n2 = 0;
   EXPECT_PRED2(SumIsEven2, n1++, n2++);
   EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
   EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
 
   // Another success case.
   n1 = n2 = 0;
   int n3 = 0;
   int n4 = 0;
   int n5 = 0;
   ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(),
                       n1++, n2++, n3++, n4++, n5++)
                         << "This failure is UNEXPECTED!";
   EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
   EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
   EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
   EXPECT_EQ(1, n4) << "Argument 4 is not evaluated exactly once.";
   EXPECT_EQ(1, n5) << "Argument 5 is not evaluated exactly once.";
 
   // A failure case.
   n1 = n2 = n3 = 0;
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED3(SumIsEven3Functor(), ++n1, n2++, n3++)
         << "This failure is expected.";
   }, "This failure is expected.");
   EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
   EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
   EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
 
   // Another failure case.
   n1 = n2 = n3 = n4 = 0;
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED_FORMAT4(AssertSumIsEven4, ++n1, n2++, n3++, n4++);
   }, "evaluates to 1, which is not even.");
   EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once.";
   EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once.";
   EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once.";
   EXPECT_EQ(1, n4) << "Argument 4 is not evaluated exactly once.";
 }
 
 
 // Some helper functions for testing using overloaded/template
 // functions with ASSERT_PREDn and EXPECT_PREDn.
 
 bool IsPositive(double x) {
   return x > 0;
 }
 
 template <typename T>
 bool IsNegative(T x) {
   return x < 0;
 }
 
 template <typename T1, typename T2>
 bool GreaterThan(T1 x1, T2 x2) {
   return x1 > x2;
 }
 
 // Tests that overloaded functions can be used in *_PRED* as long as
 // their types are explicitly specified.
 TEST(PredicateAssertionTest, AcceptsOverloadedFunction) {
   // C++Builder requires C-style casts rather than static_cast.
   EXPECT_PRED1((bool (*)(int))(IsPositive), 5);  // NOLINT
   ASSERT_PRED1((bool (*)(double))(IsPositive), 6.0);  // NOLINT
 }
 
 // Tests that template functions can be used in *_PRED* as long as
 // their types are explicitly specified.
 TEST(PredicateAssertionTest, AcceptsTemplateFunction) {
   EXPECT_PRED1(IsNegative<int>, -5);
   // Makes sure that we can handle templates with more than one
   // parameter.
   ASSERT_PRED2((GreaterThan<int, int>), 5, 0);
 }
 
 
 // Some helper functions for testing using overloaded/template
 // functions with ASSERT_PRED_FORMATn and EXPECT_PRED_FORMATn.
 
 AssertionResult IsPositiveFormat(const char* /* expr */, int n) {
   return n > 0 ? AssertionSuccess() :
       AssertionFailure(Message() << "Failure");
 }
 
 AssertionResult IsPositiveFormat(const char* /* expr */, double x) {
   return x > 0 ? AssertionSuccess() :
       AssertionFailure(Message() << "Failure");
 }
 
 template <typename T>
 AssertionResult IsNegativeFormat(const char* /* expr */, T x) {
   return x < 0 ? AssertionSuccess() :
       AssertionFailure(Message() << "Failure");
 }
 
 template <typename T1, typename T2>
 AssertionResult EqualsFormat(const char* /* expr1 */, const char* /* expr2 */,
                              const T1& x1, const T2& x2) {
   return x1 == x2 ? AssertionSuccess() :
       AssertionFailure(Message() << "Failure");
 }
 
 // Tests that overloaded functions can be used in *_PRED_FORMAT*
 // without explicitly specifying their types.
 TEST(PredicateFormatAssertionTest, AcceptsOverloadedFunction) {
   EXPECT_PRED_FORMAT1(IsPositiveFormat, 5);
   ASSERT_PRED_FORMAT1(IsPositiveFormat, 6.0);
 }
 
 // Tests that template functions can be used in *_PRED_FORMAT* without
 // explicitly specifying their types.
 TEST(PredicateFormatAssertionTest, AcceptsTemplateFunction) {
   EXPECT_PRED_FORMAT1(IsNegativeFormat, -5);
   ASSERT_PRED_FORMAT2(EqualsFormat, 3, 3);
 }
 
 
 // Tests string assertions.
 
 // Tests ASSERT_STREQ with non-NULL arguments.
 TEST(StringAssertionTest, ASSERT_STREQ) {
   const char * const p1 = "good";
   ASSERT_STREQ(p1, p1);
 
   // Let p2 have the same content as p1, but be at a different address.
   const char p2[] = "good";
   ASSERT_STREQ(p1, p2);
 
   EXPECT_FATAL_FAILURE(ASSERT_STREQ("bad", "good"),
                        "  \"bad\"\n  \"good\"");
 }
 
 // Tests ASSERT_STREQ with NULL arguments.
 TEST(StringAssertionTest, ASSERT_STREQ_Null) {
   ASSERT_STREQ(static_cast<const char *>(NULL), NULL);
   EXPECT_FATAL_FAILURE(ASSERT_STREQ(NULL, "non-null"),
                        "non-null");
 }
 
 // Tests ASSERT_STREQ with NULL arguments.
 TEST(StringAssertionTest, ASSERT_STREQ_Null2) {
   EXPECT_FATAL_FAILURE(ASSERT_STREQ("non-null", NULL),
                        "non-null");
 }
 
 // Tests ASSERT_STRNE.
 TEST(StringAssertionTest, ASSERT_STRNE) {
   ASSERT_STRNE("hi", "Hi");
   ASSERT_STRNE("Hi", NULL);
   ASSERT_STRNE(NULL, "Hi");
   ASSERT_STRNE("", NULL);
   ASSERT_STRNE(NULL, "");
   ASSERT_STRNE("", "Hi");
   ASSERT_STRNE("Hi", "");
   EXPECT_FATAL_FAILURE(ASSERT_STRNE("Hi", "Hi"),
                        "\"Hi\" vs \"Hi\"");
 }
 
 // Tests ASSERT_STRCASEEQ.
 TEST(StringAssertionTest, ASSERT_STRCASEEQ) {
   ASSERT_STRCASEEQ("hi", "Hi");
   ASSERT_STRCASEEQ(static_cast<const char *>(NULL), NULL);
 
   ASSERT_STRCASEEQ("", "");
   EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ("Hi", "hi2"),
                        "Ignoring case");
 }
 
 // Tests ASSERT_STRCASENE.
 TEST(StringAssertionTest, ASSERT_STRCASENE) {
   ASSERT_STRCASENE("hi1", "Hi2");
   ASSERT_STRCASENE("Hi", NULL);
   ASSERT_STRCASENE(NULL, "Hi");
   ASSERT_STRCASENE("", NULL);
   ASSERT_STRCASENE(NULL, "");
   ASSERT_STRCASENE("", "Hi");
   ASSERT_STRCASENE("Hi", "");
   EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("Hi", "hi"),
                        "(ignoring case)");
 }
 
 // Tests *_STREQ on wide strings.
 TEST(StringAssertionTest, STREQ_Wide) {
   // NULL strings.
   ASSERT_STREQ(static_cast<const wchar_t *>(NULL), NULL);
 
   // Empty strings.
   ASSERT_STREQ(L"", L"");
 
   // Non-null vs NULL.
   EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"non-null", NULL),
                           "non-null");
 
   // Equal strings.
   EXPECT_STREQ(L"Hi", L"Hi");
 
   // Unequal strings.
   EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc", L"Abc"),
                           "Abc");
 
   // Strings containing wide characters.
   EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc\x8119", L"abc\x8120"),
                           "abc");
 
   // The streaming variation.
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_STREQ(L"abc\x8119", L"abc\x8121") << "Expected failure";
   }, "Expected failure");
 }
 
 // Tests *_STRNE on wide strings.
 TEST(StringAssertionTest, STRNE_Wide) {
   // NULL strings.
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_STRNE(static_cast<const wchar_t *>(NULL), NULL);
   }, "");
 
   // Empty strings.
   EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"", L""),
                           "L\"\"");
 
   // Non-null vs NULL.
   ASSERT_STRNE(L"non-null", NULL);
 
   // Equal strings.
   EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"Hi", L"Hi"),
                           "L\"Hi\"");
 
   // Unequal strings.
   EXPECT_STRNE(L"abc", L"Abc");
 
   // Strings containing wide characters.
   EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"abc\x8119", L"abc\x8119"),
                           "abc");
 
   // The streaming variation.
   ASSERT_STRNE(L"abc\x8119", L"abc\x8120") << "This shouldn't happen";
 }
 
 // Tests for ::testing::IsSubstring().
 
 // Tests that IsSubstring() returns the correct result when the input
 // argument type is const char*.
 TEST(IsSubstringTest, ReturnsCorrectResultForCString) {
   EXPECT_FALSE(IsSubstring("", "", NULL, "a"));
   EXPECT_FALSE(IsSubstring("", "", "b", NULL));
   EXPECT_FALSE(IsSubstring("", "", "needle", "haystack"));
 
   EXPECT_TRUE(IsSubstring("", "", static_cast<const char*>(NULL), NULL));
   EXPECT_TRUE(IsSubstring("", "", "needle", "two needles"));
 }
 
 // Tests that IsSubstring() returns the correct result when the input
 // argument type is const wchar_t*.
 TEST(IsSubstringTest, ReturnsCorrectResultForWideCString) {
   EXPECT_FALSE(IsSubstring("", "", kNull, L"a"));
   EXPECT_FALSE(IsSubstring("", "", L"b", kNull));
   EXPECT_FALSE(IsSubstring("", "", L"needle", L"haystack"));
 
   EXPECT_TRUE(IsSubstring("", "", static_cast<const wchar_t*>(NULL), NULL));
   EXPECT_TRUE(IsSubstring("", "", L"needle", L"two needles"));
 }
 
 // Tests that IsSubstring() generates the correct message when the input
 // argument type is const char*.
 TEST(IsSubstringTest, GeneratesCorrectMessageForCString) {
   EXPECT_STREQ("Value of: needle_expr\n"
                "  Actual: \"needle\"\n"
                "Expected: a substring of haystack_expr\n"
                "Which is: \"haystack\"",
                IsSubstring("needle_expr", "haystack_expr",
                            "needle", "haystack").failure_message());
 }
 
 // Tests that IsSubstring returns the correct result when the input
 // argument type is ::std::string.
 TEST(IsSubstringTest, ReturnsCorrectResultsForStdString) {
   EXPECT_TRUE(IsSubstring("", "", std::string("hello"), "ahellob"));
   EXPECT_FALSE(IsSubstring("", "", "hello", std::string("world")));
 }
 
 #if GTEST_HAS_STD_WSTRING
 // Tests that IsSubstring returns the correct result when the input
 // argument type is ::std::wstring.
 TEST(IsSubstringTest, ReturnsCorrectResultForStdWstring) {
   EXPECT_TRUE(IsSubstring("", "", ::std::wstring(L"needle"), L"two needles"));
   EXPECT_FALSE(IsSubstring("", "", L"needle", ::std::wstring(L"haystack")));
 }
 
 // Tests that IsSubstring() generates the correct message when the input
 // argument type is ::std::wstring.
 TEST(IsSubstringTest, GeneratesCorrectMessageForWstring) {
   EXPECT_STREQ("Value of: needle_expr\n"
                "  Actual: L\"needle\"\n"
                "Expected: a substring of haystack_expr\n"
                "Which is: L\"haystack\"",
                IsSubstring(
                    "needle_expr", "haystack_expr",
                    ::std::wstring(L"needle"), L"haystack").failure_message());
 }
 
 #endif  // GTEST_HAS_STD_WSTRING
 
 // Tests for ::testing::IsNotSubstring().
 
 // Tests that IsNotSubstring() returns the correct result when the input
 // argument type is const char*.
 TEST(IsNotSubstringTest, ReturnsCorrectResultForCString) {
   EXPECT_TRUE(IsNotSubstring("", "", "needle", "haystack"));
   EXPECT_FALSE(IsNotSubstring("", "", "needle", "two needles"));
 }
 
 // Tests that IsNotSubstring() returns the correct result when the input
 // argument type is const wchar_t*.
 TEST(IsNotSubstringTest, ReturnsCorrectResultForWideCString) {
   EXPECT_TRUE(IsNotSubstring("", "", L"needle", L"haystack"));
   EXPECT_FALSE(IsNotSubstring("", "", L"needle", L"two needles"));
 }
 
 // Tests that IsNotSubstring() generates the correct message when the input
 // argument type is const wchar_t*.
 TEST(IsNotSubstringTest, GeneratesCorrectMessageForWideCString) {
   EXPECT_STREQ("Value of: needle_expr\n"
                "  Actual: L\"needle\"\n"
                "Expected: not a substring of haystack_expr\n"
                "Which is: L\"two needles\"",
                IsNotSubstring(
                    "needle_expr", "haystack_expr",
                    L"needle", L"two needles").failure_message());
 }
 
 // Tests that IsNotSubstring returns the correct result when the input
 // argument type is ::std::string.
 TEST(IsNotSubstringTest, ReturnsCorrectResultsForStdString) {
   EXPECT_FALSE(IsNotSubstring("", "", std::string("hello"), "ahellob"));
   EXPECT_TRUE(IsNotSubstring("", "", "hello", std::string("world")));
 }
 
 // Tests that IsNotSubstring() generates the correct message when the input
 // argument type is ::std::string.
 TEST(IsNotSubstringTest, GeneratesCorrectMessageForStdString) {
   EXPECT_STREQ("Value of: needle_expr\n"
                "  Actual: \"needle\"\n"
                "Expected: not a substring of haystack_expr\n"
                "Which is: \"two needles\"",
                IsNotSubstring(
                    "needle_expr", "haystack_expr",
                    ::std::string("needle"), "two needles").failure_message());
 }
 
 #if GTEST_HAS_STD_WSTRING
 
 // Tests that IsNotSubstring returns the correct result when the input
 // argument type is ::std::wstring.
 TEST(IsNotSubstringTest, ReturnsCorrectResultForStdWstring) {
   EXPECT_FALSE(
       IsNotSubstring("", "", ::std::wstring(L"needle"), L"two needles"));
   EXPECT_TRUE(IsNotSubstring("", "", L"needle", ::std::wstring(L"haystack")));
 }
 
 #endif  // GTEST_HAS_STD_WSTRING
 
 // Tests floating-point assertions.
 
 template <typename RawType>
 class FloatingPointTest : public Test {
  protected:
   // Pre-calculated numbers to be used by the tests.
   struct TestValues {
     RawType close_to_positive_zero;
     RawType close_to_negative_zero;
     RawType further_from_negative_zero;
 
     RawType close_to_one;
     RawType further_from_one;
 
     RawType infinity;
     RawType close_to_infinity;
     RawType further_from_infinity;
 
     RawType nan1;
     RawType nan2;
   };
 
   typedef typename testing::internal::FloatingPoint<RawType> Floating;
   typedef typename Floating::Bits Bits;
 
   virtual void SetUp() {
     const size_t max_ulps = Floating::kMaxUlps;
 
     // The bits that represent 0.0.
     const Bits zero_bits = Floating(0).bits();
 
     // Makes some numbers close to 0.0.
     values_.close_to_positive_zero = Floating::ReinterpretBits(
         zero_bits + max_ulps/2);
     values_.close_to_negative_zero = -Floating::ReinterpretBits(
         zero_bits + max_ulps - max_ulps/2);
     values_.further_from_negative_zero = -Floating::ReinterpretBits(
         zero_bits + max_ulps + 1 - max_ulps/2);
 
     // The bits that represent 1.0.
     const Bits one_bits = Floating(1).bits();
 
     // Makes some numbers close to 1.0.
     values_.close_to_one = Floating::ReinterpretBits(one_bits + max_ulps);
     values_.further_from_one = Floating::ReinterpretBits(
         one_bits + max_ulps + 1);
 
     // +infinity.
     values_.infinity = Floating::Infinity();
 
     // The bits that represent +infinity.
     const Bits infinity_bits = Floating(values_.infinity).bits();
 
     // Makes some numbers close to infinity.
     values_.close_to_infinity = Floating::ReinterpretBits(
         infinity_bits - max_ulps);
     values_.further_from_infinity = Floating::ReinterpretBits(
         infinity_bits - max_ulps - 1);
 
     // Makes some NAN's.  Sets the most significant bit of the fraction so that
     // our NaN's are quiet; trying to process a signaling NaN would raise an
     // exception if our environment enables floating point exceptions.
     values_.nan1 = Floating::ReinterpretBits(Floating::kExponentBitMask
         | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 1);
     values_.nan2 = Floating::ReinterpretBits(Floating::kExponentBitMask
         | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 200);
   }
 
   void TestSize() {
     EXPECT_EQ(sizeof(RawType), sizeof(Bits));
   }
 
   static TestValues values_;
 };
 
 template <typename RawType>
 typename FloatingPointTest<RawType>::TestValues
     FloatingPointTest<RawType>::values_;
 
 // Instantiates FloatingPointTest for testing *_FLOAT_EQ.
 typedef FloatingPointTest<float> FloatTest;
 
 // Tests that the size of Float::Bits matches the size of float.
 TEST_F(FloatTest, Size) {
   TestSize();
 }
 
 // Tests comparing with +0 and -0.
 TEST_F(FloatTest, Zeros) {
   EXPECT_FLOAT_EQ(0.0, -0.0);
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(-0.0, 1.0),
                           "1.0");
   EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.5),
                        "1.5");
 }
 
 // Tests comparing numbers close to 0.
 //
 // This ensures that *_FLOAT_EQ handles the sign correctly and no
 // overflow occurs when comparing numbers whose absolute value is very
 // small.
 TEST_F(FloatTest, AlmostZeros) {
   // In C++Builder, names within local classes (such as used by
   // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
   // scoping class.  Use a static local alias as a workaround.
   // We use the assignment syntax since some compilers, like Sun Studio,
   // don't allow initializing references using construction syntax
   // (parentheses).
   static const FloatTest::TestValues& v = this->values_;
 
   EXPECT_FLOAT_EQ(0.0, v.close_to_positive_zero);
   EXPECT_FLOAT_EQ(-0.0, v.close_to_negative_zero);
   EXPECT_FLOAT_EQ(v.close_to_positive_zero, v.close_to_negative_zero);
 
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_FLOAT_EQ(v.close_to_positive_zero,
                     v.further_from_negative_zero);
   }, "v.further_from_negative_zero");
 }
 
 // Tests comparing numbers close to each other.
 TEST_F(FloatTest, SmallDiff) {
   EXPECT_FLOAT_EQ(1.0, values_.close_to_one);
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, values_.further_from_one),
                           "values_.further_from_one");
 }
 
 // Tests comparing numbers far apart.
 TEST_F(FloatTest, LargeDiff) {
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(2.5, 3.0),
                           "3.0");
 }
 
 // Tests comparing with infinity.
 //
 // This ensures that no overflow occurs when comparing numbers whose
 // absolute value is very large.
 TEST_F(FloatTest, Infinity) {
   EXPECT_FLOAT_EQ(values_.infinity, values_.close_to_infinity);
   EXPECT_FLOAT_EQ(-values_.infinity, -values_.close_to_infinity);
 #if !GTEST_OS_SYMBIAN
   // Nokia's STLport crashes if we try to output infinity or NaN.
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, -values_.infinity),
                           "-values_.infinity");
 
   // This is interesting as the representations of infinity and nan1
   // are only 1 DLP apart.
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, values_.nan1),
                           "values_.nan1");
 #endif  // !GTEST_OS_SYMBIAN
 }
 
 // Tests that comparing with NAN always returns false.
 TEST_F(FloatTest, NaN) {
 #if !GTEST_OS_SYMBIAN
 // Nokia's STLport crashes if we try to output infinity or NaN.
 
   // In C++Builder, names within local classes (such as used by
   // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
   // scoping class.  Use a static local alias as a workaround.
   // We use the assignment syntax since some compilers, like Sun Studio,
   // don't allow initializing references using construction syntax
   // (parentheses).
   static const FloatTest::TestValues& v = this->values_;
 
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan1),
                           "v.nan1");
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan2),
                           "v.nan2");
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, v.nan1),
                           "v.nan1");
 
   EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(v.nan1, v.infinity),
                        "v.infinity");
 #endif  // !GTEST_OS_SYMBIAN
 }
 
 // Tests that *_FLOAT_EQ are reflexive.
 TEST_F(FloatTest, Reflexive) {
   EXPECT_FLOAT_EQ(0.0, 0.0);
   EXPECT_FLOAT_EQ(1.0, 1.0);
   ASSERT_FLOAT_EQ(values_.infinity, values_.infinity);
 }
 
 // Tests that *_FLOAT_EQ are commutative.
 TEST_F(FloatTest, Commutative) {
   // We already tested EXPECT_FLOAT_EQ(1.0, values_.close_to_one).
   EXPECT_FLOAT_EQ(values_.close_to_one, 1.0);
 
   // We already tested EXPECT_FLOAT_EQ(1.0, values_.further_from_one).
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.further_from_one, 1.0),
                           "1.0");
 }
 
 // Tests EXPECT_NEAR.
 TEST_F(FloatTest, EXPECT_NEAR) {
   EXPECT_NEAR(-1.0f, -1.1f, 0.2f);
   EXPECT_NEAR(2.0f, 3.0f, 1.0f);
   EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0f,1.5f, 0.25f),  // NOLINT
                           "The difference between 1.0f and 1.5f is 0.5, "
                           "which exceeds 0.25f");
   // To work around a bug in gcc 2.95.0, there is intentionally no
   // space after the first comma in the previous line.
 }
 
 // Tests ASSERT_NEAR.
 TEST_F(FloatTest, ASSERT_NEAR) {
   ASSERT_NEAR(-1.0f, -1.1f, 0.2f);
   ASSERT_NEAR(2.0f, 3.0f, 1.0f);
   EXPECT_FATAL_FAILURE(ASSERT_NEAR(1.0f,1.5f, 0.25f),  // NOLINT
                        "The difference between 1.0f and 1.5f is 0.5, "
                        "which exceeds 0.25f");
   // To work around a bug in gcc 2.95.0, there is intentionally no
   // space after the first comma in the previous line.
 }
 
 // Tests the cases where FloatLE() should succeed.
 TEST_F(FloatTest, FloatLESucceeds) {
   EXPECT_PRED_FORMAT2(FloatLE, 1.0f, 2.0f);  // When val1 < val2,
   ASSERT_PRED_FORMAT2(FloatLE, 1.0f, 1.0f);  // val1 == val2,
 
   // or when val1 is greater than, but almost equals to, val2.
   EXPECT_PRED_FORMAT2(FloatLE, values_.close_to_positive_zero, 0.0f);
 }
 
 // Tests the cases where FloatLE() should fail.
 TEST_F(FloatTest, FloatLEFails) {
   // When val1 is greater than val2 by a large margin,
   EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT2(FloatLE, 2.0f, 1.0f),
                           "(2.0f) <= (1.0f)");
 
   // or by a small yet non-negligible margin,
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED_FORMAT2(FloatLE, values_.further_from_one, 1.0f);
   }, "(values_.further_from_one) <= (1.0f)");
 
 #if !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
   // Nokia's STLport crashes if we try to output infinity or NaN.
   // C++Builder gives bad results for ordered comparisons involving NaNs
   // due to compiler bugs.
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED_FORMAT2(FloatLE, values_.nan1, values_.infinity);
   }, "(values_.nan1) <= (values_.infinity)");
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED_FORMAT2(FloatLE, -values_.infinity, values_.nan1);
   }, "(-values_.infinity) <= (values_.nan1)");
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_PRED_FORMAT2(FloatLE, values_.nan1, values_.nan1);
   }, "(values_.nan1) <= (values_.nan1)");
 #endif  // !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
 }
 
 // Instantiates FloatingPointTest for testing *_DOUBLE_EQ.
 typedef FloatingPointTest<double> DoubleTest;
 
 // Tests that the size of Double::Bits matches the size of double.
 TEST_F(DoubleTest, Size) {
   TestSize();
 }
 
 // Tests comparing with +0 and -0.
 TEST_F(DoubleTest, Zeros) {
   EXPECT_DOUBLE_EQ(0.0, -0.0);
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(-0.0, 1.0),
                           "1.0");
   EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(0.0, 1.0),
                        "1.0");
 }
 
 // Tests comparing numbers close to 0.
 //
 // This ensures that *_DOUBLE_EQ handles the sign correctly and no
 // overflow occurs when comparing numbers whose absolute value is very
 // small.
 TEST_F(DoubleTest, AlmostZeros) {
   // In C++Builder, names within local classes (such as used by
   // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
   // scoping class.  Use a static local alias as a workaround.
   // We use the assignment syntax since some compilers, like Sun Studio,
   // don't allow initializing references using construction syntax
   // (parentheses).
   static const DoubleTest::TestValues& v = this->values_;
 
   EXPECT_DOUBLE_EQ(0.0, v.close_to_positive_zero);
   EXPECT_DOUBLE_EQ(-0.0, v.close_to_negative_zero);
   EXPECT_DOUBLE_EQ(v.close_to_positive_zero, v.close_to_negative_zero);
 
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_DOUBLE_EQ(v.close_to_positive_zero,
                      v.further_from_negative_zero);
   }, "v.further_from_negative_zero");
 }
 
 // Tests comparing numbers close to each other.
 TEST_F(DoubleTest, SmallDiff) {
   EXPECT_DOUBLE_EQ(1.0, values_.close_to_one);
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, values_.further_from_one),
                           "values_.further_from_one");
 }
 
 // Tests comparing numbers far apart.
 TEST_F(DoubleTest, LargeDiff) {
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(2.0, 3.0),
                           "3.0");
 }
 
 // Tests comparing with infinity.
 //
 // This ensures that no overflow occurs when comparing numbers whose
 // absolute value is very large.
 TEST_F(DoubleTest, Infinity) {
   EXPECT_DOUBLE_EQ(values_.infinity, values_.close_to_infinity);
   EXPECT_DOUBLE_EQ(-values_.infinity, -values_.close_to_infinity);
 #if !GTEST_OS_SYMBIAN
   // Nokia's STLport crashes if we try to output infinity or NaN.
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, -values_.infinity),
                           "-values_.infinity");
 
   // This is interesting as the representations of infinity_ and nan1_
   // are only 1 DLP apart.
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, values_.nan1),
                           "values_.nan1");
 #endif  // !GTEST_OS_SYMBIAN
 }
 
 // Tests that comparing with NAN always returns false.
 TEST_F(DoubleTest, NaN) {
 #if !GTEST_OS_SYMBIAN
   // In C++Builder, names within local classes (such as used by
   // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
   // scoping class.  Use a static local alias as a workaround.
   // We use the assignment syntax since some compilers, like Sun Studio,
   // don't allow initializing references using construction syntax
   // (parentheses).
   static const DoubleTest::TestValues& v = this->values_;
 
   // Nokia's STLport crashes if we try to output infinity or NaN.
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan1),
                           "v.nan1");
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan2), "v.nan2");
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, v.nan1), "v.nan1");
   EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(v.nan1, v.infinity),
                        "v.infinity");
 #endif  // !GTEST_OS_SYMBIAN
 }
 
 // Tests that *_DOUBLE_EQ are reflexive.
 TEST_F(DoubleTest, Reflexive) {
   EXPECT_DOUBLE_EQ(0.0, 0.0);
   EXPECT_DOUBLE_EQ(1.0, 1.0);
 #if !GTEST_OS_SYMBIAN
   // Nokia's STLport crashes if we try to output infinity or NaN.
   ASSERT_DOUBLE_EQ(values_.infinity, values_.infinity);
 #endif  // !GTEST_OS_SYMBIAN
 }
 
 // Tests that *_DOUBLE_EQ are commutative.
 TEST_F(DoubleTest, Commutative) {
   // We already tested EXPECT_DOUBLE_EQ(1.0, values_.close_to_one).
   EXPECT_DOUBLE_EQ(values_.close_to_one, 1.0);
 
   // We already tested EXPECT_DOUBLE_EQ(1.0, values_.further_from_one).
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.further_from_one, 1.0),
                           "1.0");
 }
 
 // Tests EXPECT_NEAR.
 TEST_F(DoubleTest, EXPECT_NEAR) {
   EXPECT_NEAR(-1.0, -1.1, 0.2);
   EXPECT_NEAR(2.0, 3.0, 1.0);
   EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0, 1.5, 0.25),  // NOLINT
                           "The difference between 1.0 and 1.5 is 0.5, "
                           "which exceeds 0.25");
   // To work around a bug in gcc 2.95.0, there is intentionally no
   // space after the first comma in the previous statement.
 }
 
 // Tests ASSERT_NEAR.
 TEST_F(DoubleTest, ASSERT_NEAR) {
   ASSERT_NEAR(-1.0, -1.1, 0.2);
   ASSERT_NEAR(2.0, 3.0, 1.0);
   EXPECT_FATAL_FAILURE(ASSERT_NEAR(1.0, 1.5, 0.25),  // NOLINT
                        "The difference between 1.0 and 1.5 is 0.5, "
                        "which exceeds 0.25");
   // To work around a bug in gcc 2.95.0, there is intentionally no
   // space after the first comma in the previous statement.
 }
 
 // Tests the cases where DoubleLE() should succeed.
 TEST_F(DoubleTest, DoubleLESucceeds) {
   EXPECT_PRED_FORMAT2(DoubleLE, 1.0, 2.0);  // When val1 < val2,
   ASSERT_PRED_FORMAT2(DoubleLE, 1.0, 1.0);  // val1 == val2,
 
   // or when val1 is greater than, but almost equals to, val2.
   EXPECT_PRED_FORMAT2(DoubleLE, values_.close_to_positive_zero, 0.0);
 }
 
 // Tests the cases where DoubleLE() should fail.
 TEST_F(DoubleTest, DoubleLEFails) {
   // When val1 is greater than val2 by a large margin,
   EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT2(DoubleLE, 2.0, 1.0),
                           "(2.0) <= (1.0)");
 
   // or by a small yet non-negligible margin,
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED_FORMAT2(DoubleLE, values_.further_from_one, 1.0);
   }, "(values_.further_from_one) <= (1.0)");
 
 #if !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
   // Nokia's STLport crashes if we try to output infinity or NaN.
   // C++Builder gives bad results for ordered comparisons involving NaNs
   // due to compiler bugs.
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.infinity);
   }, "(values_.nan1) <= (values_.infinity)");
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_PRED_FORMAT2(DoubleLE, -values_.infinity, values_.nan1);
   }, " (-values_.infinity) <= (values_.nan1)");
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.nan1);
   }, "(values_.nan1) <= (values_.nan1)");
 #endif  // !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
 }
 
 
 // Verifies that a test or test case whose name starts with DISABLED_ is
 // not run.
 
 // A test whose name starts with DISABLED_.
 // Should not run.
 TEST(DisabledTest, DISABLED_TestShouldNotRun) {
   FAIL() << "Unexpected failure: Disabled test should not be run.";
 }
 
 // A test whose name does not start with DISABLED_.
 // Should run.
 TEST(DisabledTest, NotDISABLED_TestShouldRun) {
   EXPECT_EQ(1, 1);
 }
 
 // A test case whose name starts with DISABLED_.
 // Should not run.
 TEST(DISABLED_TestCase, TestShouldNotRun) {
   FAIL() << "Unexpected failure: Test in disabled test case should not be run.";
 }
 
 // A test case and test whose names start with DISABLED_.
 // Should not run.
 TEST(DISABLED_TestCase, DISABLED_TestShouldNotRun) {
   FAIL() << "Unexpected failure: Test in disabled test case should not be run.";
 }
 
 // Check that when all tests in a test case are disabled, SetUpTestCase() and
 // TearDownTestCase() are not called.
 class DisabledTestsTest : public Test {
  protected:
   static void SetUpTestCase() {
     FAIL() << "Unexpected failure: All tests disabled in test case. "
               "SetUpTestCase() should not be called.";
   }
 
   static void TearDownTestCase() {
     FAIL() << "Unexpected failure: All tests disabled in test case. "
               "TearDownTestCase() should not be called.";
   }
 };
 
 TEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_1) {
   FAIL() << "Unexpected failure: Disabled test should not be run.";
 }
 
 TEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_2) {
   FAIL() << "Unexpected failure: Disabled test should not be run.";
 }
 
 // Tests that disabled typed tests aren't run.
 
 #if GTEST_HAS_TYPED_TEST
 
 template <typename T>
 class TypedTest : public Test {
 };
 
 typedef testing::Types<int, double> NumericTypes;
 TYPED_TEST_CASE(TypedTest, NumericTypes);
 
 TYPED_TEST(TypedTest, DISABLED_ShouldNotRun) {
   FAIL() << "Unexpected failure: Disabled typed test should not run.";
 }
 
 template <typename T>
 class DISABLED_TypedTest : public Test {
 };
 
 TYPED_TEST_CASE(DISABLED_TypedTest, NumericTypes);
 
 TYPED_TEST(DISABLED_TypedTest, ShouldNotRun) {
   FAIL() << "Unexpected failure: Disabled typed test should not run.";
 }
 
 #endif  // GTEST_HAS_TYPED_TEST
 
 // Tests that disabled type-parameterized tests aren't run.
 
 #if GTEST_HAS_TYPED_TEST_P
 
 template <typename T>
 class TypedTestP : public Test {
 };
 
 TYPED_TEST_CASE_P(TypedTestP);
 
 TYPED_TEST_P(TypedTestP, DISABLED_ShouldNotRun) {
   FAIL() << "Unexpected failure: "
          << "Disabled type-parameterized test should not run.";
 }
 
 REGISTER_TYPED_TEST_CASE_P(TypedTestP, DISABLED_ShouldNotRun);
 
 INSTANTIATE_TYPED_TEST_CASE_P(My, TypedTestP, NumericTypes);
 
 template <typename T>
 class DISABLED_TypedTestP : public Test {
 };
 
 TYPED_TEST_CASE_P(DISABLED_TypedTestP);
 
 TYPED_TEST_P(DISABLED_TypedTestP, ShouldNotRun) {
   FAIL() << "Unexpected failure: "
          << "Disabled type-parameterized test should not run.";
 }
 
 REGISTER_TYPED_TEST_CASE_P(DISABLED_TypedTestP, ShouldNotRun);
 
 INSTANTIATE_TYPED_TEST_CASE_P(My, DISABLED_TypedTestP, NumericTypes);
 
 #endif  // GTEST_HAS_TYPED_TEST_P
 
 // Tests that assertion macros evaluate their arguments exactly once.
 
 class SingleEvaluationTest : public Test {
  public:  // Must be public and not protected due to a bug in g++ 3.4.2.
   // This helper function is needed by the FailedASSERT_STREQ test
   // below.  It's public to work around C++Builder's bug with scoping local
   // classes.
   static void CompareAndIncrementCharPtrs() {
     ASSERT_STREQ(p1_++, p2_++);
   }
 
   // This helper function is needed by the FailedASSERT_NE test below.  It's
   // public to work around C++Builder's bug with scoping local classes.
   static void CompareAndIncrementInts() {
     ASSERT_NE(a_++, b_++);
   }
 
  protected:
   SingleEvaluationTest() {
     p1_ = s1_;
     p2_ = s2_;
     a_ = 0;
     b_ = 0;
   }
 
   static const char* const s1_;
   static const char* const s2_;
   static const char* p1_;
   static const char* p2_;
 
   static int a_;
   static int b_;
 };
 
 const char* const SingleEvaluationTest::s1_ = "01234";
 const char* const SingleEvaluationTest::s2_ = "abcde";
 const char* SingleEvaluationTest::p1_;
 const char* SingleEvaluationTest::p2_;
 int SingleEvaluationTest::a_;
 int SingleEvaluationTest::b_;
 
 // Tests that when ASSERT_STREQ fails, it evaluates its arguments
 // exactly once.
 TEST_F(SingleEvaluationTest, FailedASSERT_STREQ) {
   EXPECT_FATAL_FAILURE(SingleEvaluationTest::CompareAndIncrementCharPtrs(),
                        "p2_++");
   EXPECT_EQ(s1_ + 1, p1_);
   EXPECT_EQ(s2_ + 1, p2_);
 }
 
 // Tests that string assertion arguments are evaluated exactly once.
 TEST_F(SingleEvaluationTest, ASSERT_STR) {
   // successful EXPECT_STRNE
   EXPECT_STRNE(p1_++, p2_++);
   EXPECT_EQ(s1_ + 1, p1_);
   EXPECT_EQ(s2_ + 1, p2_);
 
   // failed EXPECT_STRCASEEQ
   EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ(p1_++, p2_++),
                           "Ignoring case");
   EXPECT_EQ(s1_ + 2, p1_);
   EXPECT_EQ(s2_ + 2, p2_);
 }
 
 // Tests that when ASSERT_NE fails, it evaluates its arguments exactly
 // once.
 TEST_F(SingleEvaluationTest, FailedASSERT_NE) {
   EXPECT_FATAL_FAILURE(SingleEvaluationTest::CompareAndIncrementInts(),
                        "(a_++) != (b_++)");
   EXPECT_EQ(1, a_);
   EXPECT_EQ(1, b_);
 }
 
 // Tests that assertion arguments are evaluated exactly once.
 TEST_F(SingleEvaluationTest, OtherCases) {
   // successful EXPECT_TRUE
   EXPECT_TRUE(0 == a_++);  // NOLINT
   EXPECT_EQ(1, a_);
 
   // failed EXPECT_TRUE
   EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(-1 == a_++), "-1 == a_++");
   EXPECT_EQ(2, a_);
 
   // successful EXPECT_GT
   EXPECT_GT(a_++, b_++);
   EXPECT_EQ(3, a_);
   EXPECT_EQ(1, b_);
 
   // failed EXPECT_LT
   EXPECT_NONFATAL_FAILURE(EXPECT_LT(a_++, b_++), "(a_++) < (b_++)");
   EXPECT_EQ(4, a_);
   EXPECT_EQ(2, b_);
 
   // successful ASSERT_TRUE
   ASSERT_TRUE(0 < a_++);  // NOLINT
   EXPECT_EQ(5, a_);
 
   // successful ASSERT_GT
   ASSERT_GT(a_++, b_++);
   EXPECT_EQ(6, a_);
   EXPECT_EQ(3, b_);
 }
 
 #if GTEST_HAS_EXCEPTIONS
 
 void ThrowAnInteger() {
   throw 1;
 }
 
 // Tests that assertion arguments are evaluated exactly once.
 TEST_F(SingleEvaluationTest, ExceptionTests) {
   // successful EXPECT_THROW
   EXPECT_THROW({  // NOLINT
     a_++;
     ThrowAnInteger();
   }, int);
   EXPECT_EQ(1, a_);
 
   // failed EXPECT_THROW, throws different
   EXPECT_NONFATAL_FAILURE(EXPECT_THROW({  // NOLINT
     a_++;
     ThrowAnInteger();
   }, bool), "throws a different type");
   EXPECT_EQ(2, a_);
 
   // failed EXPECT_THROW, throws nothing
   EXPECT_NONFATAL_FAILURE(EXPECT_THROW(a_++, bool), "throws nothing");
   EXPECT_EQ(3, a_);
 
   // successful EXPECT_NO_THROW
   EXPECT_NO_THROW(a_++);
   EXPECT_EQ(4, a_);
 
   // failed EXPECT_NO_THROW
   EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW({  // NOLINT
     a_++;
     ThrowAnInteger();
   }), "it throws");
   EXPECT_EQ(5, a_);
 
   // successful EXPECT_ANY_THROW
   EXPECT_ANY_THROW({  // NOLINT
     a_++;
     ThrowAnInteger();
   });
   EXPECT_EQ(6, a_);
 
   // failed EXPECT_ANY_THROW
   EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(a_++), "it doesn't");
   EXPECT_EQ(7, a_);
 }
 
 #endif  // GTEST_HAS_EXCEPTIONS
 
 // Tests {ASSERT|EXPECT}_NO_FATAL_FAILURE.
 class NoFatalFailureTest : public Test {
  protected:
   void Succeeds() {}
   void FailsNonFatal() {
     ADD_FAILURE() << "some non-fatal failure";
   }
   void Fails() {
     FAIL() << "some fatal failure";
   }
 
   void DoAssertNoFatalFailureOnFails() {
     ASSERT_NO_FATAL_FAILURE(Fails());
     ADD_FAILURE() << "should not reach here.";
   }
 
   void DoExpectNoFatalFailureOnFails() {
     EXPECT_NO_FATAL_FAILURE(Fails());
     ADD_FAILURE() << "other failure";
   }
 };
 
 TEST_F(NoFatalFailureTest, NoFailure) {
   EXPECT_NO_FATAL_FAILURE(Succeeds());
   ASSERT_NO_FATAL_FAILURE(Succeeds());
 }
 
 TEST_F(NoFatalFailureTest, NonFatalIsNoFailure) {
   EXPECT_NONFATAL_FAILURE(
       EXPECT_NO_FATAL_FAILURE(FailsNonFatal()),
       "some non-fatal failure");
   EXPECT_NONFATAL_FAILURE(
       ASSERT_NO_FATAL_FAILURE(FailsNonFatal()),
       "some non-fatal failure");
 }
 
 TEST_F(NoFatalFailureTest, AssertNoFatalFailureOnFatalFailure) {
   TestPartResultArray gtest_failures;
   {
     ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);
     DoAssertNoFatalFailureOnFails();
   }
   ASSERT_EQ(2, gtest_failures.size());
   EXPECT_EQ(TestPartResult::kFatalFailure,
             gtest_failures.GetTestPartResult(0).type());
   EXPECT_EQ(TestPartResult::kFatalFailure,
             gtest_failures.GetTestPartResult(1).type());
   EXPECT_PRED_FORMAT2(testing::IsSubstring, "some fatal failure",
                       gtest_failures.GetTestPartResult(0).message());
   EXPECT_PRED_FORMAT2(testing::IsSubstring, "it does",
                       gtest_failures.GetTestPartResult(1).message());
 }
 
 TEST_F(NoFatalFailureTest, ExpectNoFatalFailureOnFatalFailure) {
   TestPartResultArray gtest_failures;
   {
     ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);
     DoExpectNoFatalFailureOnFails();
   }
   ASSERT_EQ(3, gtest_failures.size());
   EXPECT_EQ(TestPartResult::kFatalFailure,
             gtest_failures.GetTestPartResult(0).type());
   EXPECT_EQ(TestPartResult::kNonFatalFailure,
             gtest_failures.GetTestPartResult(1).type());
   EXPECT_EQ(TestPartResult::kNonFatalFailure,
             gtest_failures.GetTestPartResult(2).type());
   EXPECT_PRED_FORMAT2(testing::IsSubstring, "some fatal failure",
                       gtest_failures.GetTestPartResult(0).message());
   EXPECT_PRED_FORMAT2(testing::IsSubstring, "it does",
                       gtest_failures.GetTestPartResult(1).message());
   EXPECT_PRED_FORMAT2(testing::IsSubstring, "other failure",
                       gtest_failures.GetTestPartResult(2).message());
 }
 
 TEST_F(NoFatalFailureTest, MessageIsStreamable) {
   TestPartResultArray gtest_failures;
   {
     ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);
     EXPECT_NO_FATAL_FAILURE(FAIL() << "foo") << "my message";
   }
   ASSERT_EQ(2, gtest_failures.size());
   EXPECT_EQ(TestPartResult::kNonFatalFailure,
             gtest_failures.GetTestPartResult(0).type());
   EXPECT_EQ(TestPartResult::kNonFatalFailure,
             gtest_failures.GetTestPartResult(1).type());
   EXPECT_PRED_FORMAT2(testing::IsSubstring, "foo",
                       gtest_failures.GetTestPartResult(0).message());
   EXPECT_PRED_FORMAT2(testing::IsSubstring, "my message",
                       gtest_failures.GetTestPartResult(1).message());
 }
 
 // Tests non-string assertions.
 
 std::string EditsToString(const std::vector<EditType>& edits) {
   std::string out;
   for (size_t i = 0; i < edits.size(); ++i) {
     static const char kEdits[] = " +-/";
     out.append(1, kEdits[edits[i]]);
   }
   return out;
 }
 
 std::vector<size_t> CharsToIndices(const std::string& str) {
   std::vector<size_t> out;
   for (size_t i = 0; i < str.size(); ++i) {
     out.push_back(str[i]);
   }
   return out;
 }
 
 std::vector<std::string> CharsToLines(const std::string& str) {
   std::vector<std::string> out;
   for (size_t i = 0; i < str.size(); ++i) {
     out.push_back(str.substr(i, 1));
   }
   return out;
 }
 
 TEST(EditDistance, TestCases) {
   struct Case {
     int line;
     const char* left;
     const char* right;
     const char* expected_edits;
     const char* expected_diff;
   };
   static const Case kCases[] = {
       // No change.
       {__LINE__, "A", "A", " ", ""},
       {__LINE__, "ABCDE", "ABCDE", "     ", ""},
       // Simple adds.
       {__LINE__, "X", "XA", " +", "@@ +1,2 @@\n X\n+A\n"},
       {__LINE__, "X", "XABCD", " ++++", "@@ +1,5 @@\n X\n+A\n+B\n+C\n+D\n"},
       // Simple removes.
       {__LINE__, "XA", "X", " -", "@@ -1,2 @@\n X\n-A\n"},
       {__LINE__, "XABCD", "X", " ----", "@@ -1,5 @@\n X\n-A\n-B\n-C\n-D\n"},
       // Simple replaces.
       {__LINE__, "A", "a", "/", "@@ -1,1 +1,1 @@\n-A\n+a\n"},
       {__LINE__, "ABCD", "abcd", "////",
        "@@ -1,4 +1,4 @@\n-A\n-B\n-C\n-D\n+a\n+b\n+c\n+d\n"},
       // Path finding.
       {__LINE__, "ABCDEFGH", "ABXEGH1", "  -/ -  +",
        "@@ -1,8 +1,7 @@\n A\n B\n-C\n-D\n+X\n E\n-F\n G\n H\n+1\n"},
       {__LINE__, "AAAABCCCC", "ABABCDCDC", "- /   + / ",
        "@@ -1,9 +1,9 @@\n-A\n A\n-A\n+B\n A\n B\n C\n+D\n C\n-C\n+D\n C\n"},
       {__LINE__, "ABCDE", "BCDCD", "-   +/",
        "@@ -1,5 +1,5 @@\n-A\n B\n C\n D\n-E\n+C\n+D\n"},
       {__LINE__, "ABCDEFGHIJKL", "BCDCDEFGJKLJK", "- ++     --   ++",
        "@@ -1,4 +1,5 @@\n-A\n B\n+C\n+D\n C\n D\n"
        "@@ -6,7 +7,7 @@\n F\n G\n-H\n-I\n J\n K\n L\n+J\n+K\n"},
       {}};
   for (const Case* c = kCases; c->left; ++c) {
     EXPECT_TRUE(c->expected_edits ==
                 EditsToString(CalculateOptimalEdits(CharsToIndices(c->left),
                                                     CharsToIndices(c->right))))
         << "Left <" << c->left << "> Right <" << c->right << "> Edits <"
         << EditsToString(CalculateOptimalEdits(
                CharsToIndices(c->left), CharsToIndices(c->right))) << ">";
     EXPECT_TRUE(c->expected_diff == CreateUnifiedDiff(CharsToLines(c->left),
                                                       CharsToLines(c->right)))
         << "Left <" << c->left << "> Right <" << c->right << "> Diff <"
         << CreateUnifiedDiff(CharsToLines(c->left), CharsToLines(c->right))
         << ">";
   }
 }
 
 // Tests EqFailure(), used for implementing *EQ* assertions.
 TEST(AssertionTest, EqFailure) {
   const std::string foo_val("5"), bar_val("6");
   const std::string msg1(
       EqFailure("foo", "bar", foo_val, bar_val, false)
       .failure_message());
   EXPECT_STREQ(
       "Expected equality of these values:\n"
       "  foo\n"
       "    Which is: 5\n"
       "  bar\n"
       "    Which is: 6",
       msg1.c_str());
 
   const std::string msg2(
       EqFailure("foo", "6", foo_val, bar_val, false)
       .failure_message());
   EXPECT_STREQ(
       "Expected equality of these values:\n"
       "  foo\n"
       "    Which is: 5\n"
       "  6",
       msg2.c_str());
 
   const std::string msg3(
       EqFailure("5", "bar", foo_val, bar_val, false)
       .failure_message());
   EXPECT_STREQ(
       "Expected equality of these values:\n"
       "  5\n"
       "  bar\n"
       "    Which is: 6",
       msg3.c_str());
 
   const std::string msg4(
       EqFailure("5", "6", foo_val, bar_val, false).failure_message());
   EXPECT_STREQ(
       "Expected equality of these values:\n"
       "  5\n"
       "  6",
       msg4.c_str());
 
   const std::string msg5(
       EqFailure("foo", "bar",
                 std::string("\"x\""), std::string("\"y\""),
                 true).failure_message());
   EXPECT_STREQ(
       "Expected equality of these values:\n"
       "  foo\n"
       "    Which is: \"x\"\n"
       "  bar\n"
       "    Which is: \"y\"\n"
       "Ignoring case",
       msg5.c_str());
 }
 
 TEST(AssertionTest, EqFailureWithDiff) {
   const std::string left(
       "1\\n2XXX\\n3\\n5\\n6\\n7\\n8\\n9\\n10\\n11\\n12XXX\\n13\\n14\\n15");
   const std::string right(
       "1\\n2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n11\\n12\\n13\\n14");
   const std::string msg1(
       EqFailure("left", "right", left, right, false).failure_message());
   EXPECT_STREQ(
       "Expected equality of these values:\n"
       "  left\n"
       "    Which is: "
       "1\\n2XXX\\n3\\n5\\n6\\n7\\n8\\n9\\n10\\n11\\n12XXX\\n13\\n14\\n15\n"
       "  right\n"
       "    Which is: 1\\n2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n11\\n12\\n13\\n14\n"
       "With diff:\n@@ -1,5 +1,6 @@\n 1\n-2XXX\n+2\n 3\n+4\n 5\n 6\n"
       "@@ -7,8 +8,6 @@\n 8\n 9\n-10\n 11\n-12XXX\n+12\n 13\n 14\n-15\n",
       msg1.c_str());
 }
 
 // Tests AppendUserMessage(), used for implementing the *EQ* macros.
 TEST(AssertionTest, AppendUserMessage) {
   const std::string foo("foo");
 
   Message msg;
   EXPECT_STREQ("foo",
                AppendUserMessage(foo, msg).c_str());
 
   msg << "bar";
   EXPECT_STREQ("foo\nbar",
                AppendUserMessage(foo, msg).c_str());
 }
 
 #ifdef __BORLANDC__
 // Silences warnings: "Condition is always true", "Unreachable code"
 # pragma option push -w-ccc -w-rch
 #endif
 
 // Tests ASSERT_TRUE.
 TEST(AssertionTest, ASSERT_TRUE) {
   ASSERT_TRUE(2 > 1);  // NOLINT
   EXPECT_FATAL_FAILURE(ASSERT_TRUE(2 < 1),
                        "2 < 1");
 }
 
 // Tests ASSERT_TRUE(predicate) for predicates returning AssertionResult.
 TEST(AssertionTest, AssertTrueWithAssertionResult) {
   ASSERT_TRUE(ResultIsEven(2));
 #ifndef __BORLANDC__
   // ICE's in C++Builder.
   EXPECT_FATAL_FAILURE(ASSERT_TRUE(ResultIsEven(3)),
                        "Value of: ResultIsEven(3)\n"
                        "  Actual: false (3 is odd)\n"
                        "Expected: true");
 #endif
   ASSERT_TRUE(ResultIsEvenNoExplanation(2));
   EXPECT_FATAL_FAILURE(ASSERT_TRUE(ResultIsEvenNoExplanation(3)),
                        "Value of: ResultIsEvenNoExplanation(3)\n"
                        "  Actual: false (3 is odd)\n"
                        "Expected: true");
 }
 
 // Tests ASSERT_FALSE.
 TEST(AssertionTest, ASSERT_FALSE) {
   ASSERT_FALSE(2 < 1);  // NOLINT
   EXPECT_FATAL_FAILURE(ASSERT_FALSE(2 > 1),
                        "Value of: 2 > 1\n"
                        "  Actual: true\n"
                        "Expected: false");
 }
 
 // Tests ASSERT_FALSE(predicate) for predicates returning AssertionResult.
 TEST(AssertionTest, AssertFalseWithAssertionResult) {
   ASSERT_FALSE(ResultIsEven(3));
 #ifndef __BORLANDC__
   // ICE's in C++Builder.
   EXPECT_FATAL_FAILURE(ASSERT_FALSE(ResultIsEven(2)),
                        "Value of: ResultIsEven(2)\n"
                        "  Actual: true (2 is even)\n"
                        "Expected: false");
 #endif
   ASSERT_FALSE(ResultIsEvenNoExplanation(3));
   EXPECT_FATAL_FAILURE(ASSERT_FALSE(ResultIsEvenNoExplanation(2)),
                        "Value of: ResultIsEvenNoExplanation(2)\n"
                        "  Actual: true\n"
                        "Expected: false");
 }
 
 #ifdef __BORLANDC__
 // Restores warnings after previous "#pragma option push" suppressed them
 # pragma option pop
 #endif
 
 // Tests using ASSERT_EQ on double values.  The purpose is to make
 // sure that the specialization we did for integer and anonymous enums
 // isn't used for double arguments.
 TEST(ExpectTest, ASSERT_EQ_Double) {
   // A success.
   ASSERT_EQ(5.6, 5.6);
 
   // A failure.
   EXPECT_FATAL_FAILURE(ASSERT_EQ(5.1, 5.2),
                        "5.1");
 }
 
 // Tests ASSERT_EQ.
 TEST(AssertionTest, ASSERT_EQ) {
   ASSERT_EQ(5, 2 + 3);
   EXPECT_FATAL_FAILURE(ASSERT_EQ(5, 2*3),
                        "Expected equality of these values:\n"
                        "  5\n"
                        "  2*3\n"
                        "    Which is: 6");
 }
 
 // Tests ASSERT_EQ(NULL, pointer).
 #if GTEST_CAN_COMPARE_NULL
 TEST(AssertionTest, ASSERT_EQ_NULL) {
   // A success.
   const char* p = NULL;
   // Some older GCC versions may issue a spurious warning in this or the next
   // assertion statement. This warning should not be suppressed with
   // static_cast since the test verifies the ability to use bare NULL as the
   // expected parameter to the macro.
   ASSERT_EQ(NULL, p);
 
   // A failure.
   static int n = 0;
   EXPECT_FATAL_FAILURE(ASSERT_EQ(NULL, &n),
                        "  &n\n    Which is:");
 }
 #endif  // GTEST_CAN_COMPARE_NULL
 
 // Tests ASSERT_EQ(0, non_pointer).  Since the literal 0 can be
 // treated as a null pointer by the compiler, we need to make sure
 // that ASSERT_EQ(0, non_pointer) isn't interpreted by Google Test as
 // ASSERT_EQ(static_cast<void*>(NULL), non_pointer).
 TEST(ExpectTest, ASSERT_EQ_0) {
   int n = 0;
 
   // A success.
   ASSERT_EQ(0, n);
 
   // A failure.
   EXPECT_FATAL_FAILURE(ASSERT_EQ(0, 5.6),
                        "  0\n  5.6");
 }
 
 // Tests ASSERT_NE.
 TEST(AssertionTest, ASSERT_NE) {
   ASSERT_NE(6, 7);
   EXPECT_FATAL_FAILURE(ASSERT_NE('a', 'a'),
                        "Expected: ('a') != ('a'), "
                        "actual: 'a' (97, 0x61) vs 'a' (97, 0x61)");
 }
 
 // Tests ASSERT_LE.
 TEST(AssertionTest, ASSERT_LE) {
   ASSERT_LE(2, 3);
   ASSERT_LE(2, 2);
   EXPECT_FATAL_FAILURE(ASSERT_LE(2, 0),
                        "Expected: (2) <= (0), actual: 2 vs 0");
 }
 
 // Tests ASSERT_LT.
 TEST(AssertionTest, ASSERT_LT) {
   ASSERT_LT(2, 3);
   EXPECT_FATAL_FAILURE(ASSERT_LT(2, 2),
                        "Expected: (2) < (2), actual: 2 vs 2");
 }
 
 // Tests ASSERT_GE.
 TEST(AssertionTest, ASSERT_GE) {
   ASSERT_GE(2, 1);
   ASSERT_GE(2, 2);
   EXPECT_FATAL_FAILURE(ASSERT_GE(2, 3),
                        "Expected: (2) >= (3), actual: 2 vs 3");
 }
 
 // Tests ASSERT_GT.
 TEST(AssertionTest, ASSERT_GT) {
   ASSERT_GT(2, 1);
   EXPECT_FATAL_FAILURE(ASSERT_GT(2, 2),
                        "Expected: (2) > (2), actual: 2 vs 2");
 }
 
 #if GTEST_HAS_EXCEPTIONS
 
 void ThrowNothing() {}
 
 // Tests ASSERT_THROW.
 TEST(AssertionTest, ASSERT_THROW) {
   ASSERT_THROW(ThrowAnInteger(), int);
 
 # ifndef __BORLANDC__
 
   // ICE's in C++Builder 2007 and 2009.
   EXPECT_FATAL_FAILURE(
       ASSERT_THROW(ThrowAnInteger(), bool),
       "Expected: ThrowAnInteger() throws an exception of type bool.\n"
       "  Actual: it throws a different type.");
 # endif
 
   EXPECT_FATAL_FAILURE(
       ASSERT_THROW(ThrowNothing(), bool),
       "Expected: ThrowNothing() throws an exception of type bool.\n"
       "  Actual: it throws nothing.");
 }
 
 // Tests ASSERT_NO_THROW.
 TEST(AssertionTest, ASSERT_NO_THROW) {
   ASSERT_NO_THROW(ThrowNothing());
   EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()),
                        "Expected: ThrowAnInteger() doesn't throw an exception."
                        "\n  Actual: it throws.");
 }
 
 // Tests ASSERT_ANY_THROW.
 TEST(AssertionTest, ASSERT_ANY_THROW) {
   ASSERT_ANY_THROW(ThrowAnInteger());
   EXPECT_FATAL_FAILURE(
       ASSERT_ANY_THROW(ThrowNothing()),
       "Expected: ThrowNothing() throws an exception.\n"
       "  Actual: it doesn't.");
 }
 
 #endif  // GTEST_HAS_EXCEPTIONS
 
 // Makes sure we deal with the precedence of <<.  This test should
 // compile.
 TEST(AssertionTest, AssertPrecedence) {
   ASSERT_EQ(1 < 2, true);
   bool false_value = false;
   ASSERT_EQ(true && false_value, false);
 }
 
 // A subroutine used by the following test.
 void TestEq1(int x) {
   ASSERT_EQ(1, x);
 }
 
 // Tests calling a test subroutine that's not part of a fixture.
 TEST(AssertionTest, NonFixtureSubroutine) {
   EXPECT_FATAL_FAILURE(TestEq1(2),
                        "  x\n    Which is: 2");
 }
 
 // An uncopyable class.
 class Uncopyable {
  public:
   explicit Uncopyable(int a_value) : value_(a_value) {}
 
   int value() const { return value_; }
   bool operator==(const Uncopyable& rhs) const {
     return value() == rhs.value();
   }
  private:
   // This constructor deliberately has no implementation, as we don't
   // want this class to be copyable.
   Uncopyable(const Uncopyable&);  // NOLINT
 
   int value_;
 };
 
 ::std::ostream& operator<<(::std::ostream& os, const Uncopyable& value) {
   return os << value.value();
 }
 
 
 bool IsPositiveUncopyable(const Uncopyable& x) {
   return x.value() > 0;
 }
 
 // A subroutine used by the following test.
 void TestAssertNonPositive() {
   Uncopyable y(-1);
   ASSERT_PRED1(IsPositiveUncopyable, y);
 }
 // A subroutine used by the following test.
 void TestAssertEqualsUncopyable() {
   Uncopyable x(5);
   Uncopyable y(-1);
   ASSERT_EQ(x, y);
 }
 
 // Tests that uncopyable objects can be used in assertions.
 TEST(AssertionTest, AssertWorksWithUncopyableObject) {
   Uncopyable x(5);
   ASSERT_PRED1(IsPositiveUncopyable, x);
   ASSERT_EQ(x, x);
   EXPECT_FATAL_FAILURE(TestAssertNonPositive(),
     "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1");
   EXPECT_FATAL_FAILURE(TestAssertEqualsUncopyable(),
                        "Expected equality of these values:\n"
                        "  x\n    Which is: 5\n  y\n    Which is: -1");
 }
 
 // Tests that uncopyable objects can be used in expects.
 TEST(AssertionTest, ExpectWorksWithUncopyableObject) {
   Uncopyable x(5);
   EXPECT_PRED1(IsPositiveUncopyable, x);
   Uncopyable y(-1);
   EXPECT_NONFATAL_FAILURE(EXPECT_PRED1(IsPositiveUncopyable, y),
     "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1");
   EXPECT_EQ(x, x);
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y),
                           "Expected equality of these values:\n"
                           "  x\n    Which is: 5\n  y\n    Which is: -1");
 }
 
 enum NamedEnum {
   kE1 = 0,
   kE2 = 1
 };
 
 TEST(AssertionTest, NamedEnum) {
   EXPECT_EQ(kE1, kE1);
   EXPECT_LT(kE1, kE2);
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(kE1, kE2), "Which is: 0");
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(kE1, kE2), "Which is: 1");
 }
 
 // The version of gcc used in XCode 2.2 has a bug and doesn't allow
 // anonymous enums in assertions.  Therefore the following test is not
 // done on Mac.
 // Sun Studio and HP aCC also reject this code.
 #if !GTEST_OS_MAC && !defined(__SUNPRO_CC) && !defined(__HP_aCC)
 
 // Tests using assertions with anonymous enums.
 enum {
   kCaseA = -1,
 
 # if GTEST_OS_LINUX
 
   // We want to test the case where the size of the anonymous enum is
   // larger than sizeof(int), to make sure our implementation of the
   // assertions doesn't truncate the enums.  However, MSVC
   // (incorrectly) doesn't allow an enum value to exceed the range of
   // an int, so this has to be conditionally compiled.
   //
   // On Linux, kCaseB and kCaseA have the same value when truncated to
   // int size.  We want to test whether this will confuse the
   // assertions.
   kCaseB = testing::internal::kMaxBiggestInt,
 
 # else
 
   kCaseB = INT_MAX,
 
 # endif  // GTEST_OS_LINUX
 
   kCaseC = 42
 };
 
 TEST(AssertionTest, AnonymousEnum) {
 # if GTEST_OS_LINUX
 
   EXPECT_EQ(static_cast<int>(kCaseA), static_cast<int>(kCaseB));
 
 # endif  // GTEST_OS_LINUX
 
   EXPECT_EQ(kCaseA, kCaseA);
   EXPECT_NE(kCaseA, kCaseB);
   EXPECT_LT(kCaseA, kCaseB);
   EXPECT_LE(kCaseA, kCaseB);
   EXPECT_GT(kCaseB, kCaseA);
   EXPECT_GE(kCaseA, kCaseA);
   EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseB),
                           "(kCaseA) >= (kCaseB)");
   EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseC),
                           "-1 vs 42");
 
   ASSERT_EQ(kCaseA, kCaseA);
   ASSERT_NE(kCaseA, kCaseB);
   ASSERT_LT(kCaseA, kCaseB);
   ASSERT_LE(kCaseA, kCaseB);
   ASSERT_GT(kCaseB, kCaseA);
   ASSERT_GE(kCaseA, kCaseA);
 
 # ifndef __BORLANDC__
 
   // ICE's in C++Builder.
   EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseB),
                        "  kCaseB\n    Which is: ");
   EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC),
                        "\n    Which is: 42");
 # endif
 
   EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC),
                        "\n    Which is: -1");
 }
 
 #endif  // !GTEST_OS_MAC && !defined(__SUNPRO_CC)
 
 #if GTEST_OS_WINDOWS
 
 static HRESULT UnexpectedHRESULTFailure() {
   return E_UNEXPECTED;
 }
 
 static HRESULT OkHRESULTSuccess() {
   return S_OK;
 }
 
 static HRESULT FalseHRESULTSuccess() {
   return S_FALSE;
 }
 
 // HRESULT assertion tests test both zero and non-zero
 // success codes as well as failure message for each.
 //
 // Windows CE doesn't support message texts.
 TEST(HRESULTAssertionTest, EXPECT_HRESULT_SUCCEEDED) {
   EXPECT_HRESULT_SUCCEEDED(S_OK);
   EXPECT_HRESULT_SUCCEEDED(S_FALSE);
 
   EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()),
     "Expected: (UnexpectedHRESULTFailure()) succeeds.\n"
     "  Actual: 0x8000FFFF");
 }
 
 TEST(HRESULTAssertionTest, ASSERT_HRESULT_SUCCEEDED) {
   ASSERT_HRESULT_SUCCEEDED(S_OK);
   ASSERT_HRESULT_SUCCEEDED(S_FALSE);
 
   EXPECT_FATAL_FAILURE(ASSERT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()),
     "Expected: (UnexpectedHRESULTFailure()) succeeds.\n"
     "  Actual: 0x8000FFFF");
 }
 
 TEST(HRESULTAssertionTest, EXPECT_HRESULT_FAILED) {
   EXPECT_HRESULT_FAILED(E_UNEXPECTED);
 
   EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(OkHRESULTSuccess()),
     "Expected: (OkHRESULTSuccess()) fails.\n"
     "  Actual: 0x0");
   EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(FalseHRESULTSuccess()),
     "Expected: (FalseHRESULTSuccess()) fails.\n"
     "  Actual: 0x1");
 }
 
 TEST(HRESULTAssertionTest, ASSERT_HRESULT_FAILED) {
   ASSERT_HRESULT_FAILED(E_UNEXPECTED);
 
 # ifndef __BORLANDC__
 
   // ICE's in C++Builder 2007 and 2009.
   EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(OkHRESULTSuccess()),
     "Expected: (OkHRESULTSuccess()) fails.\n"
     "  Actual: 0x0");
 # endif
 
   EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(FalseHRESULTSuccess()),
     "Expected: (FalseHRESULTSuccess()) fails.\n"
     "  Actual: 0x1");
 }
 
 // Tests that streaming to the HRESULT macros works.
 TEST(HRESULTAssertionTest, Streaming) {
   EXPECT_HRESULT_SUCCEEDED(S_OK) << "unexpected failure";
   ASSERT_HRESULT_SUCCEEDED(S_OK) << "unexpected failure";
   EXPECT_HRESULT_FAILED(E_UNEXPECTED) << "unexpected failure";
   ASSERT_HRESULT_FAILED(E_UNEXPECTED) << "unexpected failure";
 
   EXPECT_NONFATAL_FAILURE(
       EXPECT_HRESULT_SUCCEEDED(E_UNEXPECTED) << "expected failure",
       "expected failure");
 
 # ifndef __BORLANDC__
 
   // ICE's in C++Builder 2007 and 2009.
   EXPECT_FATAL_FAILURE(
       ASSERT_HRESULT_SUCCEEDED(E_UNEXPECTED) << "expected failure",
       "expected failure");
 # endif
 
   EXPECT_NONFATAL_FAILURE(
       EXPECT_HRESULT_FAILED(S_OK) << "expected failure",
       "expected failure");
 
   EXPECT_FATAL_FAILURE(
       ASSERT_HRESULT_FAILED(S_OK) << "expected failure",
       "expected failure");
 }
 
 #endif  // GTEST_OS_WINDOWS
 
 #ifdef __BORLANDC__
 // Silences warnings: "Condition is always true", "Unreachable code"
 # pragma option push -w-ccc -w-rch
 #endif
 
 // Tests that the assertion macros behave like single statements.
 TEST(AssertionSyntaxTest, BasicAssertionsBehavesLikeSingleStatement) {
   if (AlwaysFalse())
     ASSERT_TRUE(false) << "This should never be executed; "
                           "It's a compilation test only.";
 
   if (AlwaysTrue())
     EXPECT_FALSE(false);
   else
     ;  // NOLINT
 
   if (AlwaysFalse())
     ASSERT_LT(1, 3);
 
   if (AlwaysFalse())
     ;  // NOLINT
   else
     EXPECT_GT(3, 2) << "";
 }
 
 #if GTEST_HAS_EXCEPTIONS
 // Tests that the compiler will not complain about unreachable code in the
 // EXPECT_THROW/EXPECT_ANY_THROW/EXPECT_NO_THROW macros.
 TEST(ExpectThrowTest, DoesNotGenerateUnreachableCodeWarning) {
   int n = 0;
 
   EXPECT_THROW(throw 1, int);
   EXPECT_NONFATAL_FAILURE(EXPECT_THROW(n++, int), "");
   EXPECT_NONFATAL_FAILURE(EXPECT_THROW(throw 1, const char*), "");
   EXPECT_NO_THROW(n++);
   EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(throw 1), "");
   EXPECT_ANY_THROW(throw 1);
   EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(n++), "");
 }
 
 TEST(AssertionSyntaxTest, ExceptionAssertionsBehavesLikeSingleStatement) {
   if (AlwaysFalse())
     EXPECT_THROW(ThrowNothing(), bool);
 
   if (AlwaysTrue())
     EXPECT_THROW(ThrowAnInteger(), int);
   else
     ;  // NOLINT
 
   if (AlwaysFalse())
     EXPECT_NO_THROW(ThrowAnInteger());
 
   if (AlwaysTrue())
     EXPECT_NO_THROW(ThrowNothing());
   else
     ;  // NOLINT
 
   if (AlwaysFalse())
     EXPECT_ANY_THROW(ThrowNothing());
 
   if (AlwaysTrue())
     EXPECT_ANY_THROW(ThrowAnInteger());
   else
     ;  // NOLINT
 }
 #endif  // GTEST_HAS_EXCEPTIONS
 
 TEST(AssertionSyntaxTest, NoFatalFailureAssertionsBehavesLikeSingleStatement) {
   if (AlwaysFalse())
     EXPECT_NO_FATAL_FAILURE(FAIL()) << "This should never be executed. "
                                     << "It's a compilation test only.";
   else
     ;  // NOLINT
 
   if (AlwaysFalse())
     ASSERT_NO_FATAL_FAILURE(FAIL()) << "";
   else
     ;  // NOLINT
 
   if (AlwaysTrue())
     EXPECT_NO_FATAL_FAILURE(SUCCEED());
   else
     ;  // NOLINT
 
   if (AlwaysFalse())
     ;  // NOLINT
   else
     ASSERT_NO_FATAL_FAILURE(SUCCEED());
 }
 
 // Tests that the assertion macros work well with switch statements.
 TEST(AssertionSyntaxTest, WorksWithSwitch) {
   switch (0) {
     case 1:
       break;
     default:
       ASSERT_TRUE(true);
   }
 
   switch (0)
     case 0:
       EXPECT_FALSE(false) << "EXPECT_FALSE failed in switch case";
 
   // Binary assertions are implemented using a different code path
   // than the Boolean assertions.  Hence we test them separately.
   switch (0) {
     case 1:
     default:
       ASSERT_EQ(1, 1) << "ASSERT_EQ failed in default switch handler";
   }
 
   switch (0)
     case 0:
       EXPECT_NE(1, 2);
 }
 
 #if GTEST_HAS_EXCEPTIONS
 
 void ThrowAString() {
     throw "std::string";
 }
 
 // Test that the exception assertion macros compile and work with const
 // type qualifier.
 TEST(AssertionSyntaxTest, WorksWithConst) {
     ASSERT_THROW(ThrowAString(), const char*);
 
     EXPECT_THROW(ThrowAString(), const char*);
 }
 
 #endif  // GTEST_HAS_EXCEPTIONS
 
 }  // namespace
 
 namespace testing {
 
 // Tests that Google Test tracks SUCCEED*.
 TEST(SuccessfulAssertionTest, SUCCEED) {
   SUCCEED();
   SUCCEED() << "OK";
   EXPECT_EQ(2, GetUnitTestImpl()->current_test_result()->total_part_count());
 }
 
 // Tests that Google Test doesn't track successful EXPECT_*.
 TEST(SuccessfulAssertionTest, EXPECT) {
   EXPECT_TRUE(true);
   EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
 }
 
 // Tests that Google Test doesn't track successful EXPECT_STR*.
 TEST(SuccessfulAssertionTest, EXPECT_STR) {
   EXPECT_STREQ("", "");
   EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
 }
 
 // Tests that Google Test doesn't track successful ASSERT_*.
 TEST(SuccessfulAssertionTest, ASSERT) {
   ASSERT_TRUE(true);
   EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
 }
 
 // Tests that Google Test doesn't track successful ASSERT_STR*.
 TEST(SuccessfulAssertionTest, ASSERT_STR) {
   ASSERT_STREQ("", "");
   EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());
 }
 
 }  // namespace testing
 
 namespace {
 
 // Tests the message streaming variation of assertions.
 
 TEST(AssertionWithMessageTest, EXPECT) {
   EXPECT_EQ(1, 1) << "This should succeed.";
   EXPECT_NONFATAL_FAILURE(EXPECT_NE(1, 1) << "Expected failure #1.",
                           "Expected failure #1");
   EXPECT_LE(1, 2) << "This should succeed.";
   EXPECT_NONFATAL_FAILURE(EXPECT_LT(1, 0) << "Expected failure #2.",
                           "Expected failure #2.");
   EXPECT_GE(1, 0) << "This should succeed.";
   EXPECT_NONFATAL_FAILURE(EXPECT_GT(1, 2) << "Expected failure #3.",
                           "Expected failure #3.");
 
   EXPECT_STREQ("1", "1") << "This should succeed.";
   EXPECT_NONFATAL_FAILURE(EXPECT_STRNE("1", "1") << "Expected failure #4.",
                           "Expected failure #4.");
   EXPECT_STRCASEEQ("a", "A") << "This should succeed.";
   EXPECT_NONFATAL_FAILURE(EXPECT_STRCASENE("a", "A") << "Expected failure #5.",
                           "Expected failure #5.");
 
   EXPECT_FLOAT_EQ(1, 1) << "This should succeed.";
   EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1, 1.2) << "Expected failure #6.",
                           "Expected failure #6.");
   EXPECT_NEAR(1, 1.1, 0.2) << "This should succeed.";
 }
 
 TEST(AssertionWithMessageTest, ASSERT) {
   ASSERT_EQ(1, 1) << "This should succeed.";
   ASSERT_NE(1, 2) << "This should succeed.";
   ASSERT_LE(1, 2) << "This should succeed.";
   ASSERT_LT(1, 2) << "This should succeed.";
   ASSERT_GE(1, 0) << "This should succeed.";
   EXPECT_FATAL_FAILURE(ASSERT_GT(1, 2) << "Expected failure.",
                        "Expected failure.");
 }
 
 TEST(AssertionWithMessageTest, ASSERT_STR) {
   ASSERT_STREQ("1", "1") << "This should succeed.";
   ASSERT_STRNE("1", "2") << "This should succeed.";
   ASSERT_STRCASEEQ("a", "A") << "This should succeed.";
   EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("a", "A") << "Expected failure.",
                        "Expected failure.");
 }
 
 TEST(AssertionWithMessageTest, ASSERT_FLOATING) {
   ASSERT_FLOAT_EQ(1, 1) << "This should succeed.";
   ASSERT_DOUBLE_EQ(1, 1) << "This should succeed.";
   EXPECT_FATAL_FAILURE(ASSERT_NEAR(1,1.2, 0.1) << "Expect failure.",  // NOLINT
                        "Expect failure.");
   // To work around a bug in gcc 2.95.0, there is intentionally no
   // space after the first comma in the previous statement.
 }
 
 // Tests using ASSERT_FALSE with a streamed message.
 TEST(AssertionWithMessageTest, ASSERT_FALSE) {
   ASSERT_FALSE(false) << "This shouldn't fail.";
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_FALSE(true) << "Expected failure: " << 2 << " > " << 1
                        << " evaluates to " << true;
   }, "Expected failure");
 }
 
 // Tests using FAIL with a streamed message.
 TEST(AssertionWithMessageTest, FAIL) {
   EXPECT_FATAL_FAILURE(FAIL() << 0,
                        "0");
 }
 
 // Tests using SUCCEED with a streamed message.
 TEST(AssertionWithMessageTest, SUCCEED) {
   SUCCEED() << "Success == " << 1;
 }
 
 // Tests using ASSERT_TRUE with a streamed message.
 TEST(AssertionWithMessageTest, ASSERT_TRUE) {
   ASSERT_TRUE(true) << "This should succeed.";
   ASSERT_TRUE(true) << true;
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_TRUE(false) << static_cast<const char *>(NULL)
                        << static_cast<char *>(NULL);
   }, "(null)(null)");
 }
 
 #if GTEST_OS_WINDOWS
 // Tests using wide strings in assertion messages.
 TEST(AssertionWithMessageTest, WideStringMessage) {
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_TRUE(false) << L"This failure is expected.\x8119";
   }, "This failure is expected.");
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_EQ(1, 2) << "This failure is "
                     << L"expected too.\x8120";
   }, "This failure is expected too.");
 }
 #endif  // GTEST_OS_WINDOWS
 
 // Tests EXPECT_TRUE.
 TEST(ExpectTest, EXPECT_TRUE) {
   EXPECT_TRUE(true) << "Intentional success";
   EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << "Intentional failure #1.",
                           "Intentional failure #1.");
   EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << "Intentional failure #2.",
                           "Intentional failure #2.");
   EXPECT_TRUE(2 > 1);  // NOLINT
   EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 < 1),
                           "Value of: 2 < 1\n"
                           "  Actual: false\n"
                           "Expected: true");
   EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 > 3),
                           "2 > 3");
 }
 
 // Tests EXPECT_TRUE(predicate) for predicates returning AssertionResult.
 TEST(ExpectTest, ExpectTrueWithAssertionResult) {
   EXPECT_TRUE(ResultIsEven(2));
   EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(ResultIsEven(3)),
                           "Value of: ResultIsEven(3)\n"
                           "  Actual: false (3 is odd)\n"
                           "Expected: true");
   EXPECT_TRUE(ResultIsEvenNoExplanation(2));
   EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(ResultIsEvenNoExplanation(3)),
                           "Value of: ResultIsEvenNoExplanation(3)\n"
                           "  Actual: false (3 is odd)\n"
                           "Expected: true");
 }
 
 // Tests EXPECT_FALSE with a streamed message.
 TEST(ExpectTest, EXPECT_FALSE) {
   EXPECT_FALSE(2 < 1);  // NOLINT
   EXPECT_FALSE(false) << "Intentional success";
   EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << "Intentional failure #1.",
                           "Intentional failure #1.");
   EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << "Intentional failure #2.",
                           "Intentional failure #2.");
   EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 > 1),
                           "Value of: 2 > 1\n"
                           "  Actual: true\n"
                           "Expected: false");
   EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 < 3),
                           "2 < 3");
 }
 
 // Tests EXPECT_FALSE(predicate) for predicates returning AssertionResult.
 TEST(ExpectTest, ExpectFalseWithAssertionResult) {
   EXPECT_FALSE(ResultIsEven(3));
   EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(ResultIsEven(2)),
                           "Value of: ResultIsEven(2)\n"
                           "  Actual: true (2 is even)\n"
                           "Expected: false");
   EXPECT_FALSE(ResultIsEvenNoExplanation(3));
   EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(ResultIsEvenNoExplanation(2)),
                           "Value of: ResultIsEvenNoExplanation(2)\n"
                           "  Actual: true\n"
                           "Expected: false");
 }
 
 #ifdef __BORLANDC__
 // Restores warnings after previous "#pragma option push" suppressed them
 # pragma option pop
 #endif
 
 // Tests EXPECT_EQ.
 TEST(ExpectTest, EXPECT_EQ) {
   EXPECT_EQ(5, 2 + 3);
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2*3),
                           "Expected equality of these values:\n"
                           "  5\n"
                           "  2*3\n"
                           "    Which is: 6");
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2 - 3),
                           "2 - 3");
 }
 
 // Tests using EXPECT_EQ on double values.  The purpose is to make
 // sure that the specialization we did for integer and anonymous enums
 // isn't used for double arguments.
 TEST(ExpectTest, EXPECT_EQ_Double) {
   // A success.
   EXPECT_EQ(5.6, 5.6);
 
   // A failure.
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5.1, 5.2),
                           "5.1");
 }
 
 #if GTEST_CAN_COMPARE_NULL
 // Tests EXPECT_EQ(NULL, pointer).
 TEST(ExpectTest, EXPECT_EQ_NULL) {
   // A success.
   const char* p = NULL;
   // Some older GCC versions may issue a spurious warning in this or the next
   // assertion statement. This warning should not be suppressed with
   // static_cast since the test verifies the ability to use bare NULL as the
   // expected parameter to the macro.
   EXPECT_EQ(NULL, p);
 
   // A failure.
   int n = 0;
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(NULL, &n),
                           "  &n\n    Which is:");
 }
 #endif  // GTEST_CAN_COMPARE_NULL
 
 // Tests EXPECT_EQ(0, non_pointer).  Since the literal 0 can be
 // treated as a null pointer by the compiler, we need to make sure
 // that EXPECT_EQ(0, non_pointer) isn't interpreted by Google Test as
 // EXPECT_EQ(static_cast<void*>(NULL), non_pointer).
 TEST(ExpectTest, EXPECT_EQ_0) {
   int n = 0;
 
   // A success.
   EXPECT_EQ(0, n);
 
   // A failure.
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(0, 5.6),
                           "  0\n  5.6");
 }
 
 // Tests EXPECT_NE.
 TEST(ExpectTest, EXPECT_NE) {
   EXPECT_NE(6, 7);
 
   EXPECT_NONFATAL_FAILURE(EXPECT_NE('a', 'a'),
                           "Expected: ('a') != ('a'), "
                           "actual: 'a' (97, 0x61) vs 'a' (97, 0x61)");
   EXPECT_NONFATAL_FAILURE(EXPECT_NE(2, 2),
                           "2");
   char* const p0 = NULL;
   EXPECT_NONFATAL_FAILURE(EXPECT_NE(p0, p0),
                           "p0");
   // Only way to get the Nokia compiler to compile the cast
   // is to have a separate void* variable first. Putting
   // the two casts on the same line doesn't work, neither does
   // a direct C-style to char*.
   void* pv1 = (void*)0x1234;  // NOLINT
   char* const p1 = reinterpret_cast<char*>(pv1);
   EXPECT_NONFATAL_FAILURE(EXPECT_NE(p1, p1),
                           "p1");
 }
 
 // Tests EXPECT_LE.
 TEST(ExpectTest, EXPECT_LE) {
   EXPECT_LE(2, 3);
   EXPECT_LE(2, 2);
   EXPECT_NONFATAL_FAILURE(EXPECT_LE(2, 0),
                           "Expected: (2) <= (0), actual: 2 vs 0");
   EXPECT_NONFATAL_FAILURE(EXPECT_LE(1.1, 0.9),
                           "(1.1) <= (0.9)");
 }
 
 // Tests EXPECT_LT.
 TEST(ExpectTest, EXPECT_LT) {
   EXPECT_LT(2, 3);
   EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 2),
                           "Expected: (2) < (2), actual: 2 vs 2");
   EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1),
                           "(2) < (1)");
 }
 
 // Tests EXPECT_GE.
 TEST(ExpectTest, EXPECT_GE) {
   EXPECT_GE(2, 1);
   EXPECT_GE(2, 2);
   EXPECT_NONFATAL_FAILURE(EXPECT_GE(2, 3),
                           "Expected: (2) >= (3), actual: 2 vs 3");
   EXPECT_NONFATAL_FAILURE(EXPECT_GE(0.9, 1.1),
                           "(0.9) >= (1.1)");
 }
 
 // Tests EXPECT_GT.
 TEST(ExpectTest, EXPECT_GT) {
   EXPECT_GT(2, 1);
   EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 2),
                           "Expected: (2) > (2), actual: 2 vs 2");
   EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 3),
                           "(2) > (3)");
 }
 
 #if GTEST_HAS_EXCEPTIONS
 
 // Tests EXPECT_THROW.
 TEST(ExpectTest, EXPECT_THROW) {
   EXPECT_THROW(ThrowAnInteger(), int);
   EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool),
                           "Expected: ThrowAnInteger() throws an exception of "
                           "type bool.\n  Actual: it throws a different type.");
   EXPECT_NONFATAL_FAILURE(
       EXPECT_THROW(ThrowNothing(), bool),
       "Expected: ThrowNothing() throws an exception of type bool.\n"
       "  Actual: it throws nothing.");
 }
 
 // Tests EXPECT_NO_THROW.
 TEST(ExpectTest, EXPECT_NO_THROW) {
   EXPECT_NO_THROW(ThrowNothing());
   EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()),
                           "Expected: ThrowAnInteger() doesn't throw an "
                           "exception.\n  Actual: it throws.");
 }
 
 // Tests EXPECT_ANY_THROW.
 TEST(ExpectTest, EXPECT_ANY_THROW) {
   EXPECT_ANY_THROW(ThrowAnInteger());
   EXPECT_NONFATAL_FAILURE(
       EXPECT_ANY_THROW(ThrowNothing()),
       "Expected: ThrowNothing() throws an exception.\n"
       "  Actual: it doesn't.");
 }
 
 #endif  // GTEST_HAS_EXCEPTIONS
 
 // Make sure we deal with the precedence of <<.
 TEST(ExpectTest, ExpectPrecedence) {
   EXPECT_EQ(1 < 2, true);
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(true, true && false),
                           "  true && false\n    Which is: false");
 }
 
 
 // Tests the StreamableToString() function.
 
 // Tests using StreamableToString() on a scalar.
 TEST(StreamableToStringTest, Scalar) {
   EXPECT_STREQ("5", StreamableToString(5).c_str());
 }
 
 // Tests using StreamableToString() on a non-char pointer.
 TEST(StreamableToStringTest, Pointer) {
   int n = 0;
   int* p = &n;
   EXPECT_STRNE("(null)", StreamableToString(p).c_str());
 }
 
 // Tests using StreamableToString() on a NULL non-char pointer.
 TEST(StreamableToStringTest, NullPointer) {
   int* p = NULL;
   EXPECT_STREQ("(null)", StreamableToString(p).c_str());
 }
 
 // Tests using StreamableToString() on a C string.
 TEST(StreamableToStringTest, CString) {
   EXPECT_STREQ("Foo", StreamableToString("Foo").c_str());
 }
 
 // Tests using StreamableToString() on a NULL C string.
 TEST(StreamableToStringTest, NullCString) {
   char* p = NULL;
   EXPECT_STREQ("(null)", StreamableToString(p).c_str());
 }
 
 // Tests using streamable values as assertion messages.
 
 // Tests using std::string as an assertion message.
 TEST(StreamableTest, string) {
   static const std::string str(
       "This failure message is a std::string, and is expected.");
   EXPECT_FATAL_FAILURE(FAIL() << str,
                        str.c_str());
 }
 
 // Tests that we can output strings containing embedded NULs.
 // Limited to Linux because we can only do this with std::string's.
 TEST(StreamableTest, stringWithEmbeddedNUL) {
   static const char char_array_with_nul[] =
       "Here's a NUL\0 and some more string";
   static const std::string string_with_nul(char_array_with_nul,
                                            sizeof(char_array_with_nul)
                                            - 1);  // drops the trailing NUL
   EXPECT_FATAL_FAILURE(FAIL() << string_with_nul,
                        "Here's a NUL\\0 and some more string");
 }
 
 // Tests that we can output a NUL char.
 TEST(StreamableTest, NULChar) {
   EXPECT_FATAL_FAILURE({  // NOLINT
     FAIL() << "A NUL" << '\0' << " and some more string";
   }, "A NUL\\0 and some more string");
 }
 
 // Tests using int as an assertion message.
 TEST(StreamableTest, int) {
   EXPECT_FATAL_FAILURE(FAIL() << 900913,
                        "900913");
 }
 
 // Tests using NULL char pointer as an assertion message.
 //
 // In MSVC, streaming a NULL char * causes access violation.  Google Test
 // implemented a workaround (substituting "(null)" for NULL).  This
 // tests whether the workaround works.
 TEST(StreamableTest, NullCharPtr) {
   EXPECT_FATAL_FAILURE(FAIL() << static_cast<const char*>(NULL),
                        "(null)");
 }
 
 // Tests that basic IO manipulators (endl, ends, and flush) can be
 // streamed to testing::Message.
 TEST(StreamableTest, BasicIoManip) {
   EXPECT_FATAL_FAILURE({  // NOLINT
     FAIL() << "Line 1." << std::endl
            << "A NUL char " << std::ends << std::flush << " in line 2.";
   }, "Line 1.\nA NUL char \\0 in line 2.");
 }
 
 // Tests the macros that haven't been covered so far.
 
 void AddFailureHelper(bool* aborted) {
   *aborted = true;
   ADD_FAILURE() << "Intentional failure.";
   *aborted = false;
 }
 
 // Tests ADD_FAILURE.
 TEST(MacroTest, ADD_FAILURE) {
   bool aborted = true;
   EXPECT_NONFATAL_FAILURE(AddFailureHelper(&aborted),
                           "Intentional failure.");
   EXPECT_FALSE(aborted);
 }
 
 // Tests ADD_FAILURE_AT.
 TEST(MacroTest, ADD_FAILURE_AT) {
   // Verifies that ADD_FAILURE_AT does generate a nonfatal failure and
   // the failure message contains the user-streamed part.
   EXPECT_NONFATAL_FAILURE(ADD_FAILURE_AT("foo.cc", 42) << "Wrong!", "Wrong!");
 
   // Verifies that the user-streamed part is optional.
   EXPECT_NONFATAL_FAILURE(ADD_FAILURE_AT("foo.cc", 42), "Failed");
 
   // Unfortunately, we cannot verify that the failure message contains
   // the right file path and line number the same way, as
   // EXPECT_NONFATAL_FAILURE() doesn't get to see the file path and
-  // line number.  Instead, we do that in gtest_output_test_.cc.
+  // line number.  Instead, we do that in googletest-output-test_.cc.
 }
 
 // Tests FAIL.
 TEST(MacroTest, FAIL) {
   EXPECT_FATAL_FAILURE(FAIL(),
                        "Failed");
   EXPECT_FATAL_FAILURE(FAIL() << "Intentional failure.",
                        "Intentional failure.");
 }
 
 // Tests SUCCEED
 TEST(MacroTest, SUCCEED) {
   SUCCEED();
   SUCCEED() << "Explicit success.";
 }
 
 // Tests for EXPECT_EQ() and ASSERT_EQ().
 //
 // These tests fail *intentionally*, s.t. the failure messages can be
 // generated and tested.
 //
 // We have different tests for different argument types.
 
 // Tests using bool values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, Bool) {
   EXPECT_EQ(true,  true);
   EXPECT_FATAL_FAILURE({
       bool false_value = false;
       ASSERT_EQ(false_value, true);
     }, "  false_value\n    Which is: false\n  true");
 }
 
 // Tests using int values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, Int) {
   ASSERT_EQ(32, 32);
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(32, 33),
                           "  32\n  33");
 }
 
 // Tests using time_t values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, Time_T) {
   EXPECT_EQ(static_cast<time_t>(0),
             static_cast<time_t>(0));
   EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<time_t>(0),
                                  static_cast<time_t>(1234)),
                        "1234");
 }
 
 // Tests using char values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, Char) {
   ASSERT_EQ('z', 'z');
   const char ch = 'b';
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ('\0', ch),
                           "  ch\n    Which is: 'b'");
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ('a', ch),
                           "  ch\n    Which is: 'b'");
 }
 
 // Tests using wchar_t values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, WideChar) {
   EXPECT_EQ(L'b', L'b');
 
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'\0', L'x'),
                           "Expected equality of these values:\n"
                           "  L'\0'\n"
                           "    Which is: L'\0' (0, 0x0)\n"
                           "  L'x'\n"
                           "    Which is: L'x' (120, 0x78)");
 
   static wchar_t wchar;
   wchar = L'b';
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'a', wchar),
                           "wchar");
   wchar = 0x8119;
   EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<wchar_t>(0x8120), wchar),
                        "  wchar\n    Which is: L'");
 }
 
 // Tests using ::std::string values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, StdString) {
   // Compares a const char* to an std::string that has identical
   // content.
   ASSERT_EQ("Test", ::std::string("Test"));
 
   // Compares two identical std::strings.
   static const ::std::string str1("A * in the middle");
   static const ::std::string str2(str1);
   EXPECT_EQ(str1, str2);
 
   // Compares a const char* to an std::string that has different
   // content
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ("Test", ::std::string("test")),
                           "\"test\"");
 
   // Compares an std::string to a char* that has different content.
   char* const p1 = const_cast<char*>("foo");
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::std::string("bar"), p1),
                           "p1");
 
   // Compares two std::strings that have different contents, one of
   // which having a NUL character in the middle.  This should fail.
   static ::std::string str3(str1);
   str3.at(2) = '\0';
   EXPECT_FATAL_FAILURE(ASSERT_EQ(str1, str3),
                        "  str3\n    Which is: \"A \\0 in the middle\"");
 }
 
 #if GTEST_HAS_STD_WSTRING
 
 // Tests using ::std::wstring values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, StdWideString) {
   // Compares two identical std::wstrings.
   const ::std::wstring wstr1(L"A * in the middle");
   const ::std::wstring wstr2(wstr1);
   ASSERT_EQ(wstr1, wstr2);
 
   // Compares an std::wstring to a const wchar_t* that has identical
   // content.
   const wchar_t kTestX8119[] = { 'T', 'e', 's', 't', 0x8119, '\0' };
   EXPECT_EQ(::std::wstring(kTestX8119), kTestX8119);
 
   // Compares an std::wstring to a const wchar_t* that has different
   // content.
   const wchar_t kTestX8120[] = { 'T', 'e', 's', 't', 0x8120, '\0' };
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_EQ(::std::wstring(kTestX8119), kTestX8120);
   }, "kTestX8120");
 
   // Compares two std::wstrings that have different contents, one of
   // which having a NUL character in the middle.
   ::std::wstring wstr3(wstr1);
   wstr3.at(2) = L'\0';
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(wstr1, wstr3),
                           "wstr3");
 
   // Compares a wchar_t* to an std::wstring that has different
   // content.
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_EQ(const_cast<wchar_t*>(L"foo"), ::std::wstring(L"bar"));
   }, "");
 }
 
 #endif  // GTEST_HAS_STD_WSTRING
 
 #if GTEST_HAS_GLOBAL_STRING
 // Tests using ::string values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, GlobalString) {
   // Compares a const char* to a ::string that has identical content.
   EXPECT_EQ("Test", ::string("Test"));
 
   // Compares two identical ::strings.
   const ::string str1("A * in the middle");
   const ::string str2(str1);
   ASSERT_EQ(str1, str2);
 
   // Compares a ::string to a const char* that has different content.
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::string("Test"), "test"),
                           "test");
 
   // Compares two ::strings that have different contents, one of which
   // having a NUL character in the middle.
   ::string str3(str1);
   str3.at(2) = '\0';
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(str1, str3),
                           "str3");
 
   // Compares a ::string to a char* that has different content.
   EXPECT_FATAL_FAILURE({  // NOLINT
     ASSERT_EQ(::string("bar"), const_cast<char*>("foo"));
   }, "");
 }
 
 #endif  // GTEST_HAS_GLOBAL_STRING
 
 #if GTEST_HAS_GLOBAL_WSTRING
 
 // Tests using ::wstring values in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, GlobalWideString) {
   // Compares two identical ::wstrings.
   static const ::wstring wstr1(L"A * in the middle");
   static const ::wstring wstr2(wstr1);
   EXPECT_EQ(wstr1, wstr2);
 
   // Compares a const wchar_t* to a ::wstring that has identical content.
   const wchar_t kTestX8119[] = { 'T', 'e', 's', 't', 0x8119, '\0' };
   ASSERT_EQ(kTestX8119, ::wstring(kTestX8119));
 
   // Compares a const wchar_t* to a ::wstring that has different
   // content.
   const wchar_t kTestX8120[] = { 'T', 'e', 's', 't', 0x8120, '\0' };
   EXPECT_NONFATAL_FAILURE({  // NOLINT
     EXPECT_EQ(kTestX8120, ::wstring(kTestX8119));
   }, "Test\\x8119");
 
   // Compares a wchar_t* to a ::wstring that has different content.
   wchar_t* const p1 = const_cast<wchar_t*>(L"foo");
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, ::wstring(L"bar")),
                           "bar");
 
   // Compares two ::wstrings that have different contents, one of which
   // having a NUL character in the middle.
   static ::wstring wstr3;
   wstr3 = wstr1;
   wstr3.at(2) = L'\0';
   EXPECT_FATAL_FAILURE(ASSERT_EQ(wstr1, wstr3),
                        "wstr3");
 }
 
 #endif  // GTEST_HAS_GLOBAL_WSTRING
 
 // Tests using char pointers in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, CharPointer) {
   char* const p0 = NULL;
   // Only way to get the Nokia compiler to compile the cast
   // is to have a separate void* variable first. Putting
   // the two casts on the same line doesn't work, neither does
   // a direct C-style to char*.
   void* pv1 = (void*)0x1234;  // NOLINT
   void* pv2 = (void*)0xABC0;  // NOLINT
   char* const p1 = reinterpret_cast<char*>(pv1);
   char* const p2 = reinterpret_cast<char*>(pv2);
   ASSERT_EQ(p1, p1);
 
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2),
                           "  p2\n    Which is:");
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2),
                           "  p2\n    Which is:");
   EXPECT_FATAL_FAILURE(ASSERT_EQ(reinterpret_cast<char*>(0x1234),
                                  reinterpret_cast<char*>(0xABC0)),
                        "ABC0");
 }
 
 // Tests using wchar_t pointers in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, WideCharPointer) {
   wchar_t* const p0 = NULL;
   // Only way to get the Nokia compiler to compile the cast
   // is to have a separate void* variable first. Putting
   // the two casts on the same line doesn't work, neither does
   // a direct C-style to char*.
   void* pv1 = (void*)0x1234;  // NOLINT
   void* pv2 = (void*)0xABC0;  // NOLINT
   wchar_t* const p1 = reinterpret_cast<wchar_t*>(pv1);
   wchar_t* const p2 = reinterpret_cast<wchar_t*>(pv2);
   EXPECT_EQ(p0, p0);
 
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2),
                           "  p2\n    Which is:");
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2),
                           "  p2\n    Which is:");
   void* pv3 = (void*)0x1234;  // NOLINT
   void* pv4 = (void*)0xABC0;  // NOLINT
   const wchar_t* p3 = reinterpret_cast<const wchar_t*>(pv3);
   const wchar_t* p4 = reinterpret_cast<const wchar_t*>(pv4);
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p3, p4),
                           "p4");
 }
 
 // Tests using other types of pointers in {EXPECT|ASSERT}_EQ.
 TEST(EqAssertionTest, OtherPointer) {
   ASSERT_EQ(static_cast<const int*>(NULL),
             static_cast<const int*>(NULL));
   EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<const int*>(NULL),
                                  reinterpret_cast<const int*>(0x1234)),
                        "0x1234");
 }
 
 // A class that supports binary comparison operators but not streaming.
 class UnprintableChar {
  public:
   explicit UnprintableChar(char ch) : char_(ch) {}
 
   bool operator==(const UnprintableChar& rhs) const {
     return char_ == rhs.char_;
   }
   bool operator!=(const UnprintableChar& rhs) const {
     return char_ != rhs.char_;
   }
   bool operator<(const UnprintableChar& rhs) const {
     return char_ < rhs.char_;
   }
   bool operator<=(const UnprintableChar& rhs) const {
     return char_ <= rhs.char_;
   }
   bool operator>(const UnprintableChar& rhs) const {
     return char_ > rhs.char_;
   }
   bool operator>=(const UnprintableChar& rhs) const {
     return char_ >= rhs.char_;
   }
 
  private:
   char char_;
 };
 
 // Tests that ASSERT_EQ() and friends don't require the arguments to
 // be printable.
 TEST(ComparisonAssertionTest, AcceptsUnprintableArgs) {
   const UnprintableChar x('x'), y('y');
   ASSERT_EQ(x, x);
   EXPECT_NE(x, y);
   ASSERT_LT(x, y);
   EXPECT_LE(x, y);
   ASSERT_GT(y, x);
   EXPECT_GE(x, x);
 
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y), "1-byte object <78>");
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y), "1-byte object <79>");
   EXPECT_NONFATAL_FAILURE(EXPECT_LT(y, y), "1-byte object <79>");
   EXPECT_NONFATAL_FAILURE(EXPECT_GT(x, y), "1-byte object <78>");
   EXPECT_NONFATAL_FAILURE(EXPECT_GT(x, y), "1-byte object <79>");
 
   // Code tested by EXPECT_FATAL_FAILURE cannot reference local
   // variables, so we have to write UnprintableChar('x') instead of x.
 #ifndef __BORLANDC__
   // ICE's in C++Builder.
   EXPECT_FATAL_FAILURE(ASSERT_NE(UnprintableChar('x'), UnprintableChar('x')),
                        "1-byte object <78>");
   EXPECT_FATAL_FAILURE(ASSERT_LE(UnprintableChar('y'), UnprintableChar('x')),
                        "1-byte object <78>");
 #endif
   EXPECT_FATAL_FAILURE(ASSERT_LE(UnprintableChar('y'), UnprintableChar('x')),
                        "1-byte object <79>");
   EXPECT_FATAL_FAILURE(ASSERT_GE(UnprintableChar('x'), UnprintableChar('y')),
                        "1-byte object <78>");
   EXPECT_FATAL_FAILURE(ASSERT_GE(UnprintableChar('x'), UnprintableChar('y')),
                        "1-byte object <79>");
 }
 
 // Tests the FRIEND_TEST macro.
 
 // This class has a private member we want to test.  We will test it
 // both in a TEST and in a TEST_F.
 class Foo {
  public:
   Foo() {}
 
  private:
   int Bar() const { return 1; }
 
   // Declares the friend tests that can access the private member
   // Bar().
   FRIEND_TEST(FRIEND_TEST_Test, TEST);
   FRIEND_TEST(FRIEND_TEST_Test2, TEST_F);
 };
 
 // Tests that the FRIEND_TEST declaration allows a TEST to access a
 // class's private members.  This should compile.
 TEST(FRIEND_TEST_Test, TEST) {
   ASSERT_EQ(1, Foo().Bar());
 }
 
 // The fixture needed to test using FRIEND_TEST with TEST_F.
 class FRIEND_TEST_Test2 : public Test {
  protected:
   Foo foo;
 };
 
 // Tests that the FRIEND_TEST declaration allows a TEST_F to access a
 // class's private members.  This should compile.
 TEST_F(FRIEND_TEST_Test2, TEST_F) {
   ASSERT_EQ(1, foo.Bar());
 }
 
 // Tests the life cycle of Test objects.
 
 // The test fixture for testing the life cycle of Test objects.
 //
 // This class counts the number of live test objects that uses this
 // fixture.
 class TestLifeCycleTest : public Test {
  protected:
   // Constructor.  Increments the number of test objects that uses
   // this fixture.
   TestLifeCycleTest() { count_++; }
 
   // Destructor.  Decrements the number of test objects that uses this
   // fixture.
   ~TestLifeCycleTest() { count_--; }
 
   // Returns the number of live test objects that uses this fixture.
   int count() const { return count_; }
 
  private:
   static int count_;
 };
 
 int TestLifeCycleTest::count_ = 0;
 
 // Tests the life cycle of test objects.
 TEST_F(TestLifeCycleTest, Test1) {
   // There should be only one test object in this test case that's
   // currently alive.
   ASSERT_EQ(1, count());
 }
 
 // Tests the life cycle of test objects.
 TEST_F(TestLifeCycleTest, Test2) {
   // After Test1 is done and Test2 is started, there should still be
   // only one live test object, as the object for Test1 should've been
   // deleted.
   ASSERT_EQ(1, count());
 }
 
 }  // namespace
 
 // Tests that the copy constructor works when it is NOT optimized away by
 // the compiler.
 TEST(AssertionResultTest, CopyConstructorWorksWhenNotOptimied) {
   // Checks that the copy constructor doesn't try to dereference NULL pointers
   // in the source object.
   AssertionResult r1 = AssertionSuccess();
   AssertionResult r2 = r1;
   // The following line is added to prevent the compiler from optimizing
   // away the constructor call.
   r1 << "abc";
 
   AssertionResult r3 = r1;
   EXPECT_EQ(static_cast<bool>(r3), static_cast<bool>(r1));
   EXPECT_STREQ("abc", r1.message());
 }
 
 // Tests that AssertionSuccess and AssertionFailure construct
 // AssertionResult objects as expected.
 TEST(AssertionResultTest, ConstructionWorks) {
   AssertionResult r1 = AssertionSuccess();
   EXPECT_TRUE(r1);
   EXPECT_STREQ("", r1.message());
 
   AssertionResult r2 = AssertionSuccess() << "abc";
   EXPECT_TRUE(r2);
   EXPECT_STREQ("abc", r2.message());
 
   AssertionResult r3 = AssertionFailure();
   EXPECT_FALSE(r3);
   EXPECT_STREQ("", r3.message());
 
   AssertionResult r4 = AssertionFailure() << "def";
   EXPECT_FALSE(r4);
   EXPECT_STREQ("def", r4.message());
 
   AssertionResult r5 = AssertionFailure(Message() << "ghi");
   EXPECT_FALSE(r5);
   EXPECT_STREQ("ghi", r5.message());
 }
 
 // Tests that the negation flips the predicate result but keeps the message.
 TEST(AssertionResultTest, NegationWorks) {
   AssertionResult r1 = AssertionSuccess() << "abc";
   EXPECT_FALSE(!r1);
   EXPECT_STREQ("abc", (!r1).message());
 
   AssertionResult r2 = AssertionFailure() << "def";
   EXPECT_TRUE(!r2);
   EXPECT_STREQ("def", (!r2).message());
 }
 
 TEST(AssertionResultTest, StreamingWorks) {
   AssertionResult r = AssertionSuccess();
   r << "abc" << 'd' << 0 << true;
   EXPECT_STREQ("abcd0true", r.message());
 }
 
 TEST(AssertionResultTest, CanStreamOstreamManipulators) {
   AssertionResult r = AssertionSuccess();
   r << "Data" << std::endl << std::flush << std::ends << "Will be visible";
   EXPECT_STREQ("Data\n\\0Will be visible", r.message());
 }
 
 // The next test uses explicit conversion operators -- a C++11 feature.
 #if GTEST_LANG_CXX11
 
 TEST(AssertionResultTest, ConstructibleFromContextuallyConvertibleToBool) {
   struct ExplicitlyConvertibleToBool {
     explicit operator bool() const { return value; }
     bool value;
   };
   ExplicitlyConvertibleToBool v1 = {false};
   ExplicitlyConvertibleToBool v2 = {true};
   EXPECT_FALSE(v1);
   EXPECT_TRUE(v2);
 }
 
 #endif  // GTEST_LANG_CXX11
 
 struct ConvertibleToAssertionResult {
   operator AssertionResult() const { return AssertionResult(true); }
 };
 
 TEST(AssertionResultTest, ConstructibleFromImplicitlyConvertible) {
   ConvertibleToAssertionResult obj;
   EXPECT_TRUE(obj);
 }
 
 // Tests streaming a user type whose definition and operator << are
 // both in the global namespace.
 class Base {
  public:
   explicit Base(int an_x) : x_(an_x) {}
   int x() const { return x_; }
  private:
   int x_;
 };
 std::ostream& operator<<(std::ostream& os,
                          const Base& val) {
   return os << val.x();
 }
 std::ostream& operator<<(std::ostream& os,
                          const Base* pointer) {
   return os << "(" << pointer->x() << ")";
 }
 
 TEST(MessageTest, CanStreamUserTypeInGlobalNameSpace) {
   Message msg;
   Base a(1);
 
   msg << a << &a;  // Uses ::operator<<.
   EXPECT_STREQ("1(1)", msg.GetString().c_str());
 }
 
 // Tests streaming a user type whose definition and operator<< are
 // both in an unnamed namespace.
 namespace {
 class MyTypeInUnnamedNameSpace : public Base {
  public:
   explicit MyTypeInUnnamedNameSpace(int an_x): Base(an_x) {}
 };
 std::ostream& operator<<(std::ostream& os,
                          const MyTypeInUnnamedNameSpace& val) {
   return os << val.x();
 }
 std::ostream& operator<<(std::ostream& os,
                          const MyTypeInUnnamedNameSpace* pointer) {
   return os << "(" << pointer->x() << ")";
 }
 }  // namespace
 
 TEST(MessageTest, CanStreamUserTypeInUnnamedNameSpace) {
   Message msg;
   MyTypeInUnnamedNameSpace a(1);
 
   msg << a << &a;  // Uses <unnamed_namespace>::operator<<.
   EXPECT_STREQ("1(1)", msg.GetString().c_str());
 }
 
 // Tests streaming a user type whose definition and operator<< are
 // both in a user namespace.
 namespace namespace1 {
 class MyTypeInNameSpace1 : public Base {
  public:
   explicit MyTypeInNameSpace1(int an_x): Base(an_x) {}
 };
 std::ostream& operator<<(std::ostream& os,
                          const MyTypeInNameSpace1& val) {
   return os << val.x();
 }
 std::ostream& operator<<(std::ostream& os,
                          const MyTypeInNameSpace1* pointer) {
   return os << "(" << pointer->x() << ")";
 }
 }  // namespace namespace1
 
 TEST(MessageTest, CanStreamUserTypeInUserNameSpace) {
   Message msg;
   namespace1::MyTypeInNameSpace1 a(1);
 
   msg << a << &a;  // Uses namespace1::operator<<.
   EXPECT_STREQ("1(1)", msg.GetString().c_str());
 }
 
 // Tests streaming a user type whose definition is in a user namespace
 // but whose operator<< is in the global namespace.
 namespace namespace2 {
 class MyTypeInNameSpace2 : public ::Base {
  public:
   explicit MyTypeInNameSpace2(int an_x): Base(an_x) {}
 };
 }  // namespace namespace2
 std::ostream& operator<<(std::ostream& os,
                          const namespace2::MyTypeInNameSpace2& val) {
   return os << val.x();
 }
 std::ostream& operator<<(std::ostream& os,
                          const namespace2::MyTypeInNameSpace2* pointer) {
   return os << "(" << pointer->x() << ")";
 }
 
 TEST(MessageTest, CanStreamUserTypeInUserNameSpaceWithStreamOperatorInGlobal) {
   Message msg;
   namespace2::MyTypeInNameSpace2 a(1);
 
   msg << a << &a;  // Uses ::operator<<.
   EXPECT_STREQ("1(1)", msg.GetString().c_str());
 }
 
 // Tests streaming NULL pointers to testing::Message.
 TEST(MessageTest, NullPointers) {
   Message msg;
   char* const p1 = NULL;
   unsigned char* const p2 = NULL;
   int* p3 = NULL;
   double* p4 = NULL;
   bool* p5 = NULL;
   Message* p6 = NULL;
 
   msg << p1 << p2 << p3 << p4 << p5 << p6;
   ASSERT_STREQ("(null)(null)(null)(null)(null)(null)",
                msg.GetString().c_str());
 }
 
 // Tests streaming wide strings to testing::Message.
 TEST(MessageTest, WideStrings) {
   // Streams a NULL of type const wchar_t*.
   const wchar_t* const_wstr = NULL;
   EXPECT_STREQ("(null)",
                (Message() << const_wstr).GetString().c_str());
 
   // Streams a NULL of type wchar_t*.
   wchar_t* wstr = NULL;
   EXPECT_STREQ("(null)",
                (Message() << wstr).GetString().c_str());
 
   // Streams a non-NULL of type const wchar_t*.
   const_wstr = L"abc\x8119";
   EXPECT_STREQ("abc\xe8\x84\x99",
                (Message() << const_wstr).GetString().c_str());
 
   // Streams a non-NULL of type wchar_t*.
   wstr = const_cast<wchar_t*>(const_wstr);
   EXPECT_STREQ("abc\xe8\x84\x99",
                (Message() << wstr).GetString().c_str());
 }
 
 
 // This line tests that we can define tests in the testing namespace.
 namespace testing {
 
 // Tests the TestInfo class.
 
 class TestInfoTest : public Test {
  protected:
   static const TestInfo* GetTestInfo(const char* test_name) {
     const TestCase* const test_case = GetUnitTestImpl()->
         GetTestCase("TestInfoTest", "", NULL, NULL);
 
     for (int i = 0; i < test_case->total_test_count(); ++i) {
       const TestInfo* const test_info = test_case->GetTestInfo(i);
       if (strcmp(test_name, test_info->name()) == 0)
         return test_info;
     }
     return NULL;
   }
 
   static const TestResult* GetTestResult(
       const TestInfo* test_info) {
     return test_info->result();
   }
 };
 
 // Tests TestInfo::test_case_name() and TestInfo::name().
 TEST_F(TestInfoTest, Names) {
   const TestInfo* const test_info = GetTestInfo("Names");
 
   ASSERT_STREQ("TestInfoTest", test_info->test_case_name());
   ASSERT_STREQ("Names", test_info->name());
 }
 
 // Tests TestInfo::result().
 TEST_F(TestInfoTest, result) {
   const TestInfo* const test_info = GetTestInfo("result");
 
   // Initially, there is no TestPartResult for this test.
   ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());
 
   // After the previous assertion, there is still none.
   ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());
 }
 
 #define VERIFY_CODE_LOCATION \
   const int expected_line = __LINE__ - 1; \
   const TestInfo* const test_info = GetUnitTestImpl()->current_test_info(); \
   ASSERT_TRUE(test_info); \
   EXPECT_STREQ(__FILE__, test_info->file()); \
   EXPECT_EQ(expected_line, test_info->line())
 
 TEST(CodeLocationForTEST, Verify) {
   VERIFY_CODE_LOCATION;
 }
 
 class CodeLocationForTESTF : public Test {
 };
 
 TEST_F(CodeLocationForTESTF, Verify) {
   VERIFY_CODE_LOCATION;
 }
 
 class CodeLocationForTESTP : public TestWithParam<int> {
 };
 
 TEST_P(CodeLocationForTESTP, Verify) {
   VERIFY_CODE_LOCATION;
 }
 
 INSTANTIATE_TEST_CASE_P(, CodeLocationForTESTP, Values(0));
 
 template <typename T>
 class CodeLocationForTYPEDTEST : public Test {
 };
 
 TYPED_TEST_CASE(CodeLocationForTYPEDTEST, int);
 
 TYPED_TEST(CodeLocationForTYPEDTEST, Verify) {
   VERIFY_CODE_LOCATION;
 }
 
 template <typename T>
 class CodeLocationForTYPEDTESTP : public Test {
 };
 
 TYPED_TEST_CASE_P(CodeLocationForTYPEDTESTP);
 
 TYPED_TEST_P(CodeLocationForTYPEDTESTP, Verify) {
   VERIFY_CODE_LOCATION;
 }
 
 REGISTER_TYPED_TEST_CASE_P(CodeLocationForTYPEDTESTP, Verify);
 
 INSTANTIATE_TYPED_TEST_CASE_P(My, CodeLocationForTYPEDTESTP, int);
 
 #undef VERIFY_CODE_LOCATION
 
 // Tests setting up and tearing down a test case.
 
 class SetUpTestCaseTest : public Test {
  protected:
   // This will be called once before the first test in this test case
   // is run.
   static void SetUpTestCase() {
     printf("Setting up the test case . . .\n");
 
     // Initializes some shared resource.  In this simple example, we
     // just create a C string.  More complex stuff can be done if
     // desired.
     shared_resource_ = "123";
 
     // Increments the number of test cases that have been set up.
     counter_++;
 
     // SetUpTestCase() should be called only once.
     EXPECT_EQ(1, counter_);
   }
 
   // This will be called once after the last test in this test case is
   // run.
   static void TearDownTestCase() {
     printf("Tearing down the test case . . .\n");
 
     // Decrements the number of test cases that have been set up.
     counter_--;
 
     // TearDownTestCase() should be called only once.
     EXPECT_EQ(0, counter_);
 
     // Cleans up the shared resource.
     shared_resource_ = NULL;
   }
 
   // This will be called before each test in this test case.
   virtual void SetUp() {
     // SetUpTestCase() should be called only once, so counter_ should
     // always be 1.
     EXPECT_EQ(1, counter_);
   }
 
   // Number of test cases that have been set up.
   static int counter_;
 
   // Some resource to be shared by all tests in this test case.
   static const char* shared_resource_;
 };
 
 int SetUpTestCaseTest::counter_ = 0;
 const char* SetUpTestCaseTest::shared_resource_ = NULL;
 
 // A test that uses the shared resource.
 TEST_F(SetUpTestCaseTest, Test1) {
   EXPECT_STRNE(NULL, shared_resource_);
 }
 
 // Another test that uses the shared resource.
 TEST_F(SetUpTestCaseTest, Test2) {
   EXPECT_STREQ("123", shared_resource_);
 }
 
 
 // The ParseFlagsTest test case tests ParseGoogleTestFlagsOnly.
 
 // The Flags struct stores a copy of all Google Test flags.
 struct Flags {
   // Constructs a Flags struct where each flag has its default value.
   Flags() : also_run_disabled_tests(false),
             break_on_failure(false),
             catch_exceptions(false),
             death_test_use_fork(false),
             filter(""),
             list_tests(false),
             output(""),
             print_time(true),
             random_seed(0),
             repeat(1),
             shuffle(false),
             stack_trace_depth(kMaxStackTraceDepth),
             stream_result_to(""),
             throw_on_failure(false) {}
 
   // Factory methods.
 
   // Creates a Flags struct where the gtest_also_run_disabled_tests flag has
   // the given value.
   static Flags AlsoRunDisabledTests(bool also_run_disabled_tests) {
     Flags flags;
     flags.also_run_disabled_tests = also_run_disabled_tests;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_break_on_failure flag has
   // the given value.
   static Flags BreakOnFailure(bool break_on_failure) {
     Flags flags;
     flags.break_on_failure = break_on_failure;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_catch_exceptions flag has
   // the given value.
   static Flags CatchExceptions(bool catch_exceptions) {
     Flags flags;
     flags.catch_exceptions = catch_exceptions;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_death_test_use_fork flag has
   // the given value.
   static Flags DeathTestUseFork(bool death_test_use_fork) {
     Flags flags;
     flags.death_test_use_fork = death_test_use_fork;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_filter flag has the given
   // value.
   static Flags Filter(const char* filter) {
     Flags flags;
     flags.filter = filter;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_list_tests flag has the
   // given value.
   static Flags ListTests(bool list_tests) {
     Flags flags;
     flags.list_tests = list_tests;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_output flag has the given
   // value.
   static Flags Output(const char* output) {
     Flags flags;
     flags.output = output;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_print_time flag has the given
   // value.
   static Flags PrintTime(bool print_time) {
     Flags flags;
     flags.print_time = print_time;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_random_seed flag has the given
   // value.
   static Flags RandomSeed(Int32 random_seed) {
     Flags flags;
     flags.random_seed = random_seed;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_repeat flag has the given
   // value.
   static Flags Repeat(Int32 repeat) {
     Flags flags;
     flags.repeat = repeat;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_shuffle flag has the given
   // value.
   static Flags Shuffle(bool shuffle) {
     Flags flags;
     flags.shuffle = shuffle;
     return flags;
   }
 
   // Creates a Flags struct where the GTEST_FLAG(stack_trace_depth) flag has
   // the given value.
   static Flags StackTraceDepth(Int32 stack_trace_depth) {
     Flags flags;
     flags.stack_trace_depth = stack_trace_depth;
     return flags;
   }
 
   // Creates a Flags struct where the GTEST_FLAG(stream_result_to) flag has
   // the given value.
   static Flags StreamResultTo(const char* stream_result_to) {
     Flags flags;
     flags.stream_result_to = stream_result_to;
     return flags;
   }
 
   // Creates a Flags struct where the gtest_throw_on_failure flag has
   // the given value.
   static Flags ThrowOnFailure(bool throw_on_failure) {
     Flags flags;
     flags.throw_on_failure = throw_on_failure;
     return flags;
   }
 
   // These fields store the flag values.
   bool also_run_disabled_tests;
   bool break_on_failure;
   bool catch_exceptions;
   bool death_test_use_fork;
   const char* filter;
   bool list_tests;
   const char* output;
   bool print_time;
   Int32 random_seed;
   Int32 repeat;
   bool shuffle;
   Int32 stack_trace_depth;
   const char* stream_result_to;
   bool throw_on_failure;
 };
 
 // Fixture for testing ParseGoogleTestFlagsOnly().
 class ParseFlagsTest : public Test {
  protected:
   // Clears the flags before each test.
   virtual void SetUp() {
     GTEST_FLAG(also_run_disabled_tests) = false;
     GTEST_FLAG(break_on_failure) = false;
     GTEST_FLAG(catch_exceptions) = false;
     GTEST_FLAG(death_test_use_fork) = false;
     GTEST_FLAG(filter) = "";
     GTEST_FLAG(list_tests) = false;
     GTEST_FLAG(output) = "";
     GTEST_FLAG(print_time) = true;
     GTEST_FLAG(random_seed) = 0;
     GTEST_FLAG(repeat) = 1;
     GTEST_FLAG(shuffle) = false;
     GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;
     GTEST_FLAG(stream_result_to) = "";
     GTEST_FLAG(throw_on_failure) = false;
   }
 
   // Asserts that two narrow or wide string arrays are equal.
   template <typename CharType>
   static void AssertStringArrayEq(size_t size1, CharType** array1,
                                   size_t size2, CharType** array2) {
     ASSERT_EQ(size1, size2) << " Array sizes different.";
 
     for (size_t i = 0; i != size1; i++) {
       ASSERT_STREQ(array1[i], array2[i]) << " where i == " << i;
     }
   }
 
   // Verifies that the flag values match the expected values.
   static void CheckFlags(const Flags& expected) {
     EXPECT_EQ(expected.also_run_disabled_tests,
               GTEST_FLAG(also_run_disabled_tests));
     EXPECT_EQ(expected.break_on_failure, GTEST_FLAG(break_on_failure));
     EXPECT_EQ(expected.catch_exceptions, GTEST_FLAG(catch_exceptions));
     EXPECT_EQ(expected.death_test_use_fork, GTEST_FLAG(death_test_use_fork));
     EXPECT_STREQ(expected.filter, GTEST_FLAG(filter).c_str());
     EXPECT_EQ(expected.list_tests, GTEST_FLAG(list_tests));
     EXPECT_STREQ(expected.output, GTEST_FLAG(output).c_str());
     EXPECT_EQ(expected.print_time, GTEST_FLAG(print_time));
     EXPECT_EQ(expected.random_seed, GTEST_FLAG(random_seed));
     EXPECT_EQ(expected.repeat, GTEST_FLAG(repeat));
     EXPECT_EQ(expected.shuffle, GTEST_FLAG(shuffle));
     EXPECT_EQ(expected.stack_trace_depth, GTEST_FLAG(stack_trace_depth));
     EXPECT_STREQ(expected.stream_result_to,
                  GTEST_FLAG(stream_result_to).c_str());
     EXPECT_EQ(expected.throw_on_failure, GTEST_FLAG(throw_on_failure));
   }
 
   // Parses a command line (specified by argc1 and argv1), then
   // verifies that the flag values are expected and that the
   // recognized flags are removed from the command line.
   template <typename CharType>
   static void TestParsingFlags(int argc1, const CharType** argv1,
                                int argc2, const CharType** argv2,
                                const Flags& expected, bool should_print_help) {
     const bool saved_help_flag = ::testing::internal::g_help_flag;
     ::testing::internal::g_help_flag = false;
 
 # if GTEST_HAS_STREAM_REDIRECTION
     CaptureStdout();
 # endif
 
     // Parses the command line.
     internal::ParseGoogleTestFlagsOnly(&argc1, const_cast<CharType**>(argv1));
 
 # if GTEST_HAS_STREAM_REDIRECTION
     const std::string captured_stdout = GetCapturedStdout();
 # endif
 
     // Verifies the flag values.
     CheckFlags(expected);
 
     // Verifies that the recognized flags are removed from the command
     // line.
     AssertStringArrayEq(argc1 + 1, argv1, argc2 + 1, argv2);
 
     // ParseGoogleTestFlagsOnly should neither set g_help_flag nor print the
     // help message for the flags it recognizes.
     EXPECT_EQ(should_print_help, ::testing::internal::g_help_flag);
 
 # if GTEST_HAS_STREAM_REDIRECTION
     const char* const expected_help_fragment =
         "This program contains tests written using";
     if (should_print_help) {
       EXPECT_PRED_FORMAT2(IsSubstring, expected_help_fragment, captured_stdout);
     } else {
       EXPECT_PRED_FORMAT2(IsNotSubstring,
                           expected_help_fragment, captured_stdout);
     }
 # endif  // GTEST_HAS_STREAM_REDIRECTION
 
     ::testing::internal::g_help_flag = saved_help_flag;
   }
 
   // This macro wraps TestParsingFlags s.t. the user doesn't need
   // to specify the array sizes.
 
 # define GTEST_TEST_PARSING_FLAGS_(argv1, argv2, expected, should_print_help) \
   TestParsingFlags(sizeof(argv1)/sizeof(*argv1) - 1, argv1, \
                    sizeof(argv2)/sizeof(*argv2) - 1, argv2, \
                    expected, should_print_help)
 };
 
 // Tests parsing an empty command line.
 TEST_F(ParseFlagsTest, Empty) {
   const char* argv[] = {
     NULL
   };
 
   const char* argv2[] = {
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);
 }
 
 // Tests parsing a command line that has no flag.
 TEST_F(ParseFlagsTest, NoFlag) {
   const char* argv[] = {
     "foo.exe",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);
 }
 
 // Tests parsing a bad --gtest_filter flag.
 TEST_F(ParseFlagsTest, FilterBad) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_filter",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     "--gtest_filter",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), true);
 }
 
 // Tests parsing an empty --gtest_filter flag.
 TEST_F(ParseFlagsTest, FilterEmpty) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_filter=",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), false);
 }
 
 // Tests parsing a non-empty --gtest_filter flag.
 TEST_F(ParseFlagsTest, FilterNonEmpty) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_filter=abc",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("abc"), false);
 }
 
 // Tests parsing --gtest_break_on_failure.
 TEST_F(ParseFlagsTest, BreakOnFailureWithoutValue) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_break_on_failure",
     NULL
 };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(true), false);
 }
 
 // Tests parsing --gtest_break_on_failure=0.
 TEST_F(ParseFlagsTest, BreakOnFailureFalse_0) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_break_on_failure=0",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);
 }
 
 // Tests parsing --gtest_break_on_failure=f.
 TEST_F(ParseFlagsTest, BreakOnFailureFalse_f) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_break_on_failure=f",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);
 }
 
 // Tests parsing --gtest_break_on_failure=F.
 TEST_F(ParseFlagsTest, BreakOnFailureFalse_F) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_break_on_failure=F",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);
 }
 
 // Tests parsing a --gtest_break_on_failure flag that has a "true"
 // definition.
 TEST_F(ParseFlagsTest, BreakOnFailureTrue) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_break_on_failure=1",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(true), false);
 }
 
 // Tests parsing --gtest_catch_exceptions.
 TEST_F(ParseFlagsTest, CatchExceptions) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_catch_exceptions",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::CatchExceptions(true), false);
 }
 
 // Tests parsing --gtest_death_test_use_fork.
 TEST_F(ParseFlagsTest, DeathTestUseFork) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_death_test_use_fork",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::DeathTestUseFork(true), false);
 }
 
 // Tests having the same flag twice with different values.  The
 // expected behavior is that the one coming last takes precedence.
 TEST_F(ParseFlagsTest, DuplicatedFlags) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_filter=a",
     "--gtest_filter=b",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("b"), false);
 }
 
 // Tests having an unrecognized flag on the command line.
 TEST_F(ParseFlagsTest, UnrecognizedFlag) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_break_on_failure",
     "bar",  // Unrecognized by Google Test.
     "--gtest_filter=b",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     "bar",
     NULL
   };
 
   Flags flags;
   flags.break_on_failure = true;
   flags.filter = "b";
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, flags, false);
 }
 
 // Tests having a --gtest_list_tests flag
 TEST_F(ParseFlagsTest, ListTestsFlag) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_list_tests",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(true), false);
 }
 
 // Tests having a --gtest_list_tests flag with a "true" value
 TEST_F(ParseFlagsTest, ListTestsTrue) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_list_tests=1",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(true), false);
 }
 
 // Tests having a --gtest_list_tests flag with a "false" value
 TEST_F(ParseFlagsTest, ListTestsFalse) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_list_tests=0",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);
 }
 
 // Tests parsing --gtest_list_tests=f.
 TEST_F(ParseFlagsTest, ListTestsFalse_f) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_list_tests=f",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);
 }
 
 // Tests parsing --gtest_list_tests=F.
 TEST_F(ParseFlagsTest, ListTestsFalse_F) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_list_tests=F",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);
 }
 
 // Tests parsing --gtest_output (invalid).
 TEST_F(ParseFlagsTest, OutputEmpty) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_output",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     "--gtest_output",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), true);
 }
 
 // Tests parsing --gtest_output=xml
 TEST_F(ParseFlagsTest, OutputXml) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_output=xml",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output("xml"), false);
 }
 
 // Tests parsing --gtest_output=xml:file
 TEST_F(ParseFlagsTest, OutputXmlFile) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_output=xml:file",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output("xml:file"), false);
 }
 
 // Tests parsing --gtest_output=xml:directory/path/
 TEST_F(ParseFlagsTest, OutputXmlDirectory) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_output=xml:directory/path/",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2,
                             Flags::Output("xml:directory/path/"), false);
 }
 
 // Tests having a --gtest_print_time flag
 TEST_F(ParseFlagsTest, PrintTimeFlag) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_print_time",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(true), false);
 }
 
 // Tests having a --gtest_print_time flag with a "true" value
 TEST_F(ParseFlagsTest, PrintTimeTrue) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_print_time=1",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(true), false);
 }
 
 // Tests having a --gtest_print_time flag with a "false" value
 TEST_F(ParseFlagsTest, PrintTimeFalse) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_print_time=0",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);
 }
 
 // Tests parsing --gtest_print_time=f.
 TEST_F(ParseFlagsTest, PrintTimeFalse_f) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_print_time=f",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);
 }
 
 // Tests parsing --gtest_print_time=F.
 TEST_F(ParseFlagsTest, PrintTimeFalse_F) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_print_time=F",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);
 }
 
 // Tests parsing --gtest_random_seed=number
 TEST_F(ParseFlagsTest, RandomSeed) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_random_seed=1000",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::RandomSeed(1000), false);
 }
 
 // Tests parsing --gtest_repeat=number
 TEST_F(ParseFlagsTest, Repeat) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_repeat=1000",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Repeat(1000), false);
 }
 
 // Tests having a --gtest_also_run_disabled_tests flag
 TEST_F(ParseFlagsTest, AlsoRunDisabledTestsFlag) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_also_run_disabled_tests",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2,
                               Flags::AlsoRunDisabledTests(true), false);
 }
 
 // Tests having a --gtest_also_run_disabled_tests flag with a "true" value
 TEST_F(ParseFlagsTest, AlsoRunDisabledTestsTrue) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_also_run_disabled_tests=1",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2,
                               Flags::AlsoRunDisabledTests(true), false);
 }
 
 // Tests having a --gtest_also_run_disabled_tests flag with a "false" value
 TEST_F(ParseFlagsTest, AlsoRunDisabledTestsFalse) {
     const char* argv[] = {
       "foo.exe",
       "--gtest_also_run_disabled_tests=0",
       NULL
     };
 
     const char* argv2[] = {
       "foo.exe",
       NULL
     };
 
     GTEST_TEST_PARSING_FLAGS_(argv, argv2,
                               Flags::AlsoRunDisabledTests(false), false);
 }
 
 // Tests parsing --gtest_shuffle.
 TEST_F(ParseFlagsTest, ShuffleWithoutValue) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_shuffle",
     NULL
 };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(true), false);
 }
 
 // Tests parsing --gtest_shuffle=0.
 TEST_F(ParseFlagsTest, ShuffleFalse_0) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_shuffle=0",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(false), false);
 }
 
 // Tests parsing a --gtest_shuffle flag that has a "true" definition.
 TEST_F(ParseFlagsTest, ShuffleTrue) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_shuffle=1",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(true), false);
 }
 
 // Tests parsing --gtest_stack_trace_depth=number.
 TEST_F(ParseFlagsTest, StackTraceDepth) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_stack_trace_depth=5",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::StackTraceDepth(5), false);
 }
 
 TEST_F(ParseFlagsTest, StreamResultTo) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_stream_result_to=localhost:1234",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(
       argv, argv2, Flags::StreamResultTo("localhost:1234"), false);
 }
 
 // Tests parsing --gtest_throw_on_failure.
 TEST_F(ParseFlagsTest, ThrowOnFailureWithoutValue) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_throw_on_failure",
     NULL
 };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false);
 }
 
 // Tests parsing --gtest_throw_on_failure=0.
 TEST_F(ParseFlagsTest, ThrowOnFailureFalse_0) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_throw_on_failure=0",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(false), false);
 }
 
 // Tests parsing a --gtest_throw_on_failure flag that has a "true"
 // definition.
 TEST_F(ParseFlagsTest, ThrowOnFailureTrue) {
   const char* argv[] = {
     "foo.exe",
     "--gtest_throw_on_failure=1",
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false);
 }
 
 # if GTEST_OS_WINDOWS
 // Tests parsing wide strings.
 TEST_F(ParseFlagsTest, WideStrings) {
   const wchar_t* argv[] = {
     L"foo.exe",
     L"--gtest_filter=Foo*",
     L"--gtest_list_tests=1",
     L"--gtest_break_on_failure",
     L"--non_gtest_flag",
     NULL
   };
 
   const wchar_t* argv2[] = {
     L"foo.exe",
     L"--non_gtest_flag",
     NULL
   };
 
   Flags expected_flags;
   expected_flags.break_on_failure = true;
   expected_flags.filter = "Foo*";
   expected_flags.list_tests = true;
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, expected_flags, false);
 }
 # endif  // GTEST_OS_WINDOWS
 
 #if GTEST_USE_OWN_FLAGFILE_FLAG_
 class FlagfileTest : public ParseFlagsTest {
  public:
   virtual void SetUp() {
     ParseFlagsTest::SetUp();
 
     testdata_path_.Set(internal::FilePath(
         testing::TempDir() + internal::GetCurrentExecutableName().string() +
         "_flagfile_test"));
     testing::internal::posix::RmDir(testdata_path_.c_str());
     EXPECT_TRUE(testdata_path_.CreateFolder());
   }
 
   virtual void TearDown() {
     testing::internal::posix::RmDir(testdata_path_.c_str());
     ParseFlagsTest::TearDown();
   }
 
   internal::FilePath CreateFlagfile(const char* contents) {
     internal::FilePath file_path(internal::FilePath::GenerateUniqueFileName(
         testdata_path_, internal::FilePath("unique"), "txt"));
     FILE* f = testing::internal::posix::FOpen(file_path.c_str(), "w");
     fprintf(f, "%s", contents);
     fclose(f);
     return file_path;
   }
 
  private:
   internal::FilePath testdata_path_;
 };
 
 // Tests an empty flagfile.
 TEST_F(FlagfileTest, Empty) {
   internal::FilePath flagfile_path(CreateFlagfile(""));
   std::string flagfile_flag =
       std::string("--" GTEST_FLAG_PREFIX_ "flagfile=") + flagfile_path.c_str();
 
   const char* argv[] = {
     "foo.exe",
     flagfile_flag.c_str(),
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);
 }
 
 // Tests passing a non-empty --gtest_filter flag via --gtest_flagfile.
 TEST_F(FlagfileTest, FilterNonEmpty) {
   internal::FilePath flagfile_path(CreateFlagfile(
       "--"  GTEST_FLAG_PREFIX_  "filter=abc"));
   std::string flagfile_flag =
       std::string("--" GTEST_FLAG_PREFIX_ "flagfile=") + flagfile_path.c_str();
 
   const char* argv[] = {
     "foo.exe",
     flagfile_flag.c_str(),
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("abc"), false);
 }
 
 // Tests passing several flags via --gtest_flagfile.
 TEST_F(FlagfileTest, SeveralFlags) {
   internal::FilePath flagfile_path(CreateFlagfile(
       "--"  GTEST_FLAG_PREFIX_  "filter=abc\n"
       "--"  GTEST_FLAG_PREFIX_  "break_on_failure\n"
       "--"  GTEST_FLAG_PREFIX_  "list_tests"));
   std::string flagfile_flag =
       std::string("--" GTEST_FLAG_PREFIX_ "flagfile=") + flagfile_path.c_str();
 
   const char* argv[] = {
     "foo.exe",
     flagfile_flag.c_str(),
     NULL
   };
 
   const char* argv2[] = {
     "foo.exe",
     NULL
   };
 
   Flags expected_flags;
   expected_flags.break_on_failure = true;
   expected_flags.filter = "abc";
   expected_flags.list_tests = true;
 
   GTEST_TEST_PARSING_FLAGS_(argv, argv2, expected_flags, false);
 }
 #endif  // GTEST_USE_OWN_FLAGFILE_FLAG_
 
 // Tests current_test_info() in UnitTest.
 class CurrentTestInfoTest : public Test {
  protected:
   // Tests that current_test_info() returns NULL before the first test in
   // the test case is run.
   static void SetUpTestCase() {
     // There should be no tests running at this point.
     const TestInfo* test_info =
       UnitTest::GetInstance()->current_test_info();
     EXPECT_TRUE(test_info == NULL)
         << "There should be no tests running at this point.";
   }
 
   // Tests that current_test_info() returns NULL after the last test in
   // the test case has run.
   static void TearDownTestCase() {
     const TestInfo* test_info =
       UnitTest::GetInstance()->current_test_info();
     EXPECT_TRUE(test_info == NULL)
         << "There should be no tests running at this point.";
   }
 };
 
 // Tests that current_test_info() returns TestInfo for currently running
 // test by checking the expected test name against the actual one.
 TEST_F(CurrentTestInfoTest, WorksForFirstTestInATestCase) {
   const TestInfo* test_info =
     UnitTest::GetInstance()->current_test_info();
   ASSERT_TRUE(NULL != test_info)
       << "There is a test running so we should have a valid TestInfo.";
   EXPECT_STREQ("CurrentTestInfoTest", test_info->test_case_name())
       << "Expected the name of the currently running test case.";
   EXPECT_STREQ("WorksForFirstTestInATestCase", test_info->name())
       << "Expected the name of the currently running test.";
 }
 
 // Tests that current_test_info() returns TestInfo for currently running
 // test by checking the expected test name against the actual one.  We
 // use this test to see that the TestInfo object actually changed from
 // the previous invocation.
 TEST_F(CurrentTestInfoTest, WorksForSecondTestInATestCase) {
   const TestInfo* test_info =
     UnitTest::GetInstance()->current_test_info();
   ASSERT_TRUE(NULL != test_info)
       << "There is a test running so we should have a valid TestInfo.";
   EXPECT_STREQ("CurrentTestInfoTest", test_info->test_case_name())
       << "Expected the name of the currently running test case.";
   EXPECT_STREQ("WorksForSecondTestInATestCase", test_info->name())
       << "Expected the name of the currently running test.";
 }
 
 }  // namespace testing
 
 
 // These two lines test that we can define tests in a namespace that
 // has the name "testing" and is nested in another namespace.
 namespace my_namespace {
 namespace testing {
 
 // Makes sure that TEST knows to use ::testing::Test instead of
 // ::my_namespace::testing::Test.
 class Test {};
 
 // Makes sure that an assertion knows to use ::testing::Message instead of
 // ::my_namespace::testing::Message.
 class Message {};
 
 // Makes sure that an assertion knows to use
 // ::testing::AssertionResult instead of
 // ::my_namespace::testing::AssertionResult.
 class AssertionResult {};
 
 // Tests that an assertion that should succeed works as expected.
 TEST(NestedTestingNamespaceTest, Success) {
   EXPECT_EQ(1, 1) << "This shouldn't fail.";
 }
 
 // Tests that an assertion that should fail works as expected.
 TEST(NestedTestingNamespaceTest, Failure) {
   EXPECT_FATAL_FAILURE(FAIL() << "This failure is expected.",
                        "This failure is expected.");
 }
 
 }  // namespace testing
 }  // namespace my_namespace
 
 // Tests that one can call superclass SetUp and TearDown methods--
 // that is, that they are not private.
 // No tests are based on this fixture; the test "passes" if it compiles
 // successfully.
 class ProtectedFixtureMethodsTest : public Test {
  protected:
   virtual void SetUp() {
     Test::SetUp();
   }
   virtual void TearDown() {
     Test::TearDown();
   }
 };
 
 // StreamingAssertionsTest tests the streaming versions of a representative
 // sample of assertions.
 TEST(StreamingAssertionsTest, Unconditional) {
   SUCCEED() << "expected success";
   EXPECT_NONFATAL_FAILURE(ADD_FAILURE() << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(FAIL() << "expected failure",
                        "expected failure");
 }
 
 #ifdef __BORLANDC__
 // Silences warnings: "Condition is always true", "Unreachable code"
 # pragma option push -w-ccc -w-rch
 #endif
 
 TEST(StreamingAssertionsTest, Truth) {
   EXPECT_TRUE(true) << "unexpected failure";
   ASSERT_TRUE(true) << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_TRUE(false) << "expected failure",
                        "expected failure");
 }
 
 TEST(StreamingAssertionsTest, Truth2) {
   EXPECT_FALSE(false) << "unexpected failure";
   ASSERT_FALSE(false) << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_FALSE(true) << "expected failure",
                        "expected failure");
 }
 
 #ifdef __BORLANDC__
 // Restores warnings after previous "#pragma option push" suppressed them
 # pragma option pop
 #endif
 
 TEST(StreamingAssertionsTest, IntegerEquals) {
   EXPECT_EQ(1, 1) << "unexpected failure";
   ASSERT_EQ(1, 1) << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_EQ(1, 2) << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_EQ(1, 2) << "expected failure",
                        "expected failure");
 }
 
 TEST(StreamingAssertionsTest, IntegerLessThan) {
   EXPECT_LT(1, 2) << "unexpected failure";
   ASSERT_LT(1, 2) << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1) << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_LT(2, 1) << "expected failure",
                        "expected failure");
 }
 
 TEST(StreamingAssertionsTest, StringsEqual) {
   EXPECT_STREQ("foo", "foo") << "unexpected failure";
   ASSERT_STREQ("foo", "foo") << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_STREQ("foo", "bar") << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_STREQ("foo", "bar") << "expected failure",
                        "expected failure");
 }
 
 TEST(StreamingAssertionsTest, StringsNotEqual) {
   EXPECT_STRNE("foo", "bar") << "unexpected failure";
   ASSERT_STRNE("foo", "bar") << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_STRNE("foo", "foo") << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_STRNE("foo", "foo") << "expected failure",
                        "expected failure");
 }
 
 TEST(StreamingAssertionsTest, StringsEqualIgnoringCase) {
   EXPECT_STRCASEEQ("foo", "FOO") << "unexpected failure";
   ASSERT_STRCASEEQ("foo", "FOO") << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ("foo", "bar") << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ("foo", "bar") << "expected failure",
                        "expected failure");
 }
 
 TEST(StreamingAssertionsTest, StringNotEqualIgnoringCase) {
   EXPECT_STRCASENE("foo", "bar") << "unexpected failure";
   ASSERT_STRCASENE("foo", "bar") << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_STRCASENE("foo", "FOO") << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("bar", "BAR") << "expected failure",
                        "expected failure");
 }
 
 TEST(StreamingAssertionsTest, FloatingPointEquals) {
   EXPECT_FLOAT_EQ(1.0, 1.0) << "unexpected failure";
   ASSERT_FLOAT_EQ(1.0, 1.0) << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(0.0, 1.0) << "expected failure",
                           "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.0) << "expected failure",
                        "expected failure");
 }
 
 #if GTEST_HAS_EXCEPTIONS
 
 TEST(StreamingAssertionsTest, Throw) {
   EXPECT_THROW(ThrowAnInteger(), int) << "unexpected failure";
   ASSERT_THROW(ThrowAnInteger(), int) << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool) <<
                           "expected failure", "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_THROW(ThrowAnInteger(), bool) <<
                        "expected failure", "expected failure");
 }
 
 TEST(StreamingAssertionsTest, NoThrow) {
   EXPECT_NO_THROW(ThrowNothing()) << "unexpected failure";
   ASSERT_NO_THROW(ThrowNothing()) << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()) <<
                           "expected failure", "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()) <<
                        "expected failure", "expected failure");
 }
 
 TEST(StreamingAssertionsTest, AnyThrow) {
   EXPECT_ANY_THROW(ThrowAnInteger()) << "unexpected failure";
   ASSERT_ANY_THROW(ThrowAnInteger()) << "unexpected failure";
   EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(ThrowNothing()) <<
                           "expected failure", "expected failure");
   EXPECT_FATAL_FAILURE(ASSERT_ANY_THROW(ThrowNothing()) <<
                        "expected failure", "expected failure");
 }
 
 #endif  // GTEST_HAS_EXCEPTIONS
 
 // Tests that Google Test correctly decides whether to use colors in the output.
 
 TEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsYes) {
   GTEST_FLAG(color) = "yes";
 
   SetEnv("TERM", "xterm");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
   EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.
 
   SetEnv("TERM", "dumb");  // TERM doesn't support colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
   EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.
 }
 
 TEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsAliasOfYes) {
   SetEnv("TERM", "dumb");  // TERM doesn't support colors.
 
   GTEST_FLAG(color) = "True";
   EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.
 
   GTEST_FLAG(color) = "t";
   EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.
 
   GTEST_FLAG(color) = "1";
   EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.
 }
 
 TEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsNo) {
   GTEST_FLAG(color) = "no";
 
   SetEnv("TERM", "xterm");  // TERM supports colors.
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
   EXPECT_FALSE(ShouldUseColor(false));  // Stdout is not a TTY.
 
   SetEnv("TERM", "dumb");  // TERM doesn't support colors.
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
   EXPECT_FALSE(ShouldUseColor(false));  // Stdout is not a TTY.
 }
 
 TEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsInvalid) {
   SetEnv("TERM", "xterm");  // TERM supports colors.
 
   GTEST_FLAG(color) = "F";
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
 
   GTEST_FLAG(color) = "0";
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
 
   GTEST_FLAG(color) = "unknown";
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
 }
 
 TEST(ColoredOutputTest, UsesColorsWhenStdoutIsTty) {
   GTEST_FLAG(color) = "auto";
 
   SetEnv("TERM", "xterm");  // TERM supports colors.
   EXPECT_FALSE(ShouldUseColor(false));  // Stdout is not a TTY.
   EXPECT_TRUE(ShouldUseColor(true));    // Stdout is a TTY.
 }
 
 TEST(ColoredOutputTest, UsesColorsWhenTermSupportsColors) {
   GTEST_FLAG(color) = "auto";
 
 #if GTEST_OS_WINDOWS
   // On Windows, we ignore the TERM variable as it's usually not set.
 
   SetEnv("TERM", "dumb");
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "");
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "xterm");
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 #else
   // On non-Windows platforms, we rely on TERM to determine if the
   // terminal supports colors.
 
   SetEnv("TERM", "dumb");  // TERM doesn't support colors.
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "emacs");  // TERM doesn't support colors.
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "vt100");  // TERM doesn't support colors.
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "xterm-mono");  // TERM doesn't support colors.
   EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "xterm");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "xterm-color");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "xterm-256color");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "screen");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "screen-256color");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "tmux");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "tmux-256color");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "rxvt-unicode");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "rxvt-unicode-256color");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "linux");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 
   SetEnv("TERM", "cygwin");  // TERM supports colors.
   EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.
 #endif  // GTEST_OS_WINDOWS
 }
 
 // Verifies that StaticAssertTypeEq works in a namespace scope.
 
 static bool dummy1 GTEST_ATTRIBUTE_UNUSED_ = StaticAssertTypeEq<bool, bool>();
 static bool dummy2 GTEST_ATTRIBUTE_UNUSED_ =
     StaticAssertTypeEq<const int, const int>();
 
 // Verifies that StaticAssertTypeEq works in a class.
 
 template <typename T>
 class StaticAssertTypeEqTestHelper {
  public:
   StaticAssertTypeEqTestHelper() { StaticAssertTypeEq<bool, T>(); }
 };
 
 TEST(StaticAssertTypeEqTest, WorksInClass) {
   StaticAssertTypeEqTestHelper<bool>();
 }
 
 // Verifies that StaticAssertTypeEq works inside a function.
 
 typedef int IntAlias;
 
 TEST(StaticAssertTypeEqTest, CompilesForEqualTypes) {
   StaticAssertTypeEq<int, IntAlias>();
   StaticAssertTypeEq<int*, IntAlias*>();
 }
 
 TEST(HasNonfatalFailureTest, ReturnsFalseWhenThereIsNoFailure) {
   EXPECT_FALSE(HasNonfatalFailure());
 }
 
 static void FailFatally() { FAIL(); }
 
 TEST(HasNonfatalFailureTest, ReturnsFalseWhenThereIsOnlyFatalFailure) {
   FailFatally();
   const bool has_nonfatal_failure = HasNonfatalFailure();
   ClearCurrentTestPartResults();
   EXPECT_FALSE(has_nonfatal_failure);
 }
 
 TEST(HasNonfatalFailureTest, ReturnsTrueWhenThereIsNonfatalFailure) {
   ADD_FAILURE();
   const bool has_nonfatal_failure = HasNonfatalFailure();
   ClearCurrentTestPartResults();
   EXPECT_TRUE(has_nonfatal_failure);
 }
 
 TEST(HasNonfatalFailureTest, ReturnsTrueWhenThereAreFatalAndNonfatalFailures) {
   FailFatally();
   ADD_FAILURE();
   const bool has_nonfatal_failure = HasNonfatalFailure();
   ClearCurrentTestPartResults();
   EXPECT_TRUE(has_nonfatal_failure);
 }
 
 // A wrapper for calling HasNonfatalFailure outside of a test body.
 static bool HasNonfatalFailureHelper() {
   return testing::Test::HasNonfatalFailure();
 }
 
 TEST(HasNonfatalFailureTest, WorksOutsideOfTestBody) {
   EXPECT_FALSE(HasNonfatalFailureHelper());
 }
 
 TEST(HasNonfatalFailureTest, WorksOutsideOfTestBody2) {
   ADD_FAILURE();
   const bool has_nonfatal_failure = HasNonfatalFailureHelper();
   ClearCurrentTestPartResults();
   EXPECT_TRUE(has_nonfatal_failure);
 }
 
 TEST(HasFailureTest, ReturnsFalseWhenThereIsNoFailure) {
   EXPECT_FALSE(HasFailure());
 }
 
 TEST(HasFailureTest, ReturnsTrueWhenThereIsFatalFailure) {
   FailFatally();
   const bool has_failure = HasFailure();
   ClearCurrentTestPartResults();
   EXPECT_TRUE(has_failure);
 }
 
 TEST(HasFailureTest, ReturnsTrueWhenThereIsNonfatalFailure) {
   ADD_FAILURE();
   const bool has_failure = HasFailure();
   ClearCurrentTestPartResults();
   EXPECT_TRUE(has_failure);
 }
 
 TEST(HasFailureTest, ReturnsTrueWhenThereAreFatalAndNonfatalFailures) {
   FailFatally();
   ADD_FAILURE();
   const bool has_failure = HasFailure();
   ClearCurrentTestPartResults();
   EXPECT_TRUE(has_failure);
 }
 
 // A wrapper for calling HasFailure outside of a test body.
 static bool HasFailureHelper() { return testing::Test::HasFailure(); }
 
 TEST(HasFailureTest, WorksOutsideOfTestBody) {
   EXPECT_FALSE(HasFailureHelper());
 }
 
 TEST(HasFailureTest, WorksOutsideOfTestBody2) {
   ADD_FAILURE();
   const bool has_failure = HasFailureHelper();
   ClearCurrentTestPartResults();
   EXPECT_TRUE(has_failure);
 }
 
 class TestListener : public EmptyTestEventListener {
  public:
   TestListener() : on_start_counter_(NULL), is_destroyed_(NULL) {}
   TestListener(int* on_start_counter, bool* is_destroyed)
       : on_start_counter_(on_start_counter),
         is_destroyed_(is_destroyed) {}
 
   virtual ~TestListener() {
     if (is_destroyed_)
       *is_destroyed_ = true;
   }
 
  protected:
   virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {
     if (on_start_counter_ != NULL)
       (*on_start_counter_)++;
   }
 
  private:
   int* on_start_counter_;
   bool* is_destroyed_;
 };
 
 // Tests the constructor.
 TEST(TestEventListenersTest, ConstructionWorks) {
   TestEventListeners listeners;
 
   EXPECT_TRUE(TestEventListenersAccessor::GetRepeater(&listeners) != NULL);
   EXPECT_TRUE(listeners.default_result_printer() == NULL);
   EXPECT_TRUE(listeners.default_xml_generator() == NULL);
 }
 
 // Tests that the TestEventListeners destructor deletes all the listeners it
 // owns.
 TEST(TestEventListenersTest, DestructionWorks) {
   bool default_result_printer_is_destroyed = false;
   bool default_xml_printer_is_destroyed = false;
   bool extra_listener_is_destroyed = false;
   TestListener* default_result_printer = new TestListener(
       NULL, &default_result_printer_is_destroyed);
   TestListener* default_xml_printer = new TestListener(
       NULL, &default_xml_printer_is_destroyed);
   TestListener* extra_listener = new TestListener(
       NULL, &extra_listener_is_destroyed);
 
   {
     TestEventListeners listeners;
     TestEventListenersAccessor::SetDefaultResultPrinter(&listeners,
                                                         default_result_printer);
     TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners,
                                                        default_xml_printer);
     listeners.Append(extra_listener);
   }
   EXPECT_TRUE(default_result_printer_is_destroyed);
   EXPECT_TRUE(default_xml_printer_is_destroyed);
   EXPECT_TRUE(extra_listener_is_destroyed);
 }
 
 // Tests that a listener Append'ed to a TestEventListeners list starts
 // receiving events.
 TEST(TestEventListenersTest, Append) {
   int on_start_counter = 0;
   bool is_destroyed = false;
   TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
   {
     TestEventListeners listeners;
     listeners.Append(listener);
     TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
         *UnitTest::GetInstance());
     EXPECT_EQ(1, on_start_counter);
   }
   EXPECT_TRUE(is_destroyed);
 }
 
 // Tests that listeners receive events in the order they were appended to
 // the list, except for *End requests, which must be received in the reverse
 // order.
 class SequenceTestingListener : public EmptyTestEventListener {
  public:
   SequenceTestingListener(std::vector<std::string>* vector, const char* id)
       : vector_(vector), id_(id) {}
 
  protected:
   virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {
     vector_->push_back(GetEventDescription("OnTestProgramStart"));
   }
 
   virtual void OnTestProgramEnd(const UnitTest& /*unit_test*/) {
     vector_->push_back(GetEventDescription("OnTestProgramEnd"));
   }
 
   virtual void OnTestIterationStart(const UnitTest& /*unit_test*/,
                                     int /*iteration*/) {
     vector_->push_back(GetEventDescription("OnTestIterationStart"));
   }
 
   virtual void OnTestIterationEnd(const UnitTest& /*unit_test*/,
                                   int /*iteration*/) {
     vector_->push_back(GetEventDescription("OnTestIterationEnd"));
   }
 
  private:
   std::string GetEventDescription(const char* method) {
     Message message;
     message << id_ << "." << method;
     return message.GetString();
   }
 
   std::vector<std::string>* vector_;
   const char* const id_;
 
   GTEST_DISALLOW_COPY_AND_ASSIGN_(SequenceTestingListener);
 };
 
 TEST(EventListenerTest, AppendKeepsOrder) {
   std::vector<std::string> vec;
   TestEventListeners listeners;
   listeners.Append(new SequenceTestingListener(&vec, "1st"));
   listeners.Append(new SequenceTestingListener(&vec, "2nd"));
   listeners.Append(new SequenceTestingListener(&vec, "3rd"));
 
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
       *UnitTest::GetInstance());
   ASSERT_EQ(3U, vec.size());
   EXPECT_STREQ("1st.OnTestProgramStart", vec[0].c_str());
   EXPECT_STREQ("2nd.OnTestProgramStart", vec[1].c_str());
   EXPECT_STREQ("3rd.OnTestProgramStart", vec[2].c_str());
 
   vec.clear();
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramEnd(
       *UnitTest::GetInstance());
   ASSERT_EQ(3U, vec.size());
   EXPECT_STREQ("3rd.OnTestProgramEnd", vec[0].c_str());
   EXPECT_STREQ("2nd.OnTestProgramEnd", vec[1].c_str());
   EXPECT_STREQ("1st.OnTestProgramEnd", vec[2].c_str());
 
   vec.clear();
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationStart(
       *UnitTest::GetInstance(), 0);
   ASSERT_EQ(3U, vec.size());
   EXPECT_STREQ("1st.OnTestIterationStart", vec[0].c_str());
   EXPECT_STREQ("2nd.OnTestIterationStart", vec[1].c_str());
   EXPECT_STREQ("3rd.OnTestIterationStart", vec[2].c_str());
 
   vec.clear();
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationEnd(
       *UnitTest::GetInstance(), 0);
   ASSERT_EQ(3U, vec.size());
   EXPECT_STREQ("3rd.OnTestIterationEnd", vec[0].c_str());
   EXPECT_STREQ("2nd.OnTestIterationEnd", vec[1].c_str());
   EXPECT_STREQ("1st.OnTestIterationEnd", vec[2].c_str());
 }
 
 // Tests that a listener removed from a TestEventListeners list stops receiving
 // events and is not deleted when the list is destroyed.
 TEST(TestEventListenersTest, Release) {
   int on_start_counter = 0;
   bool is_destroyed = false;
   // Although Append passes the ownership of this object to the list,
   // the following calls release it, and we need to delete it before the
   // test ends.
   TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
   {
     TestEventListeners listeners;
     listeners.Append(listener);
     EXPECT_EQ(listener, listeners.Release(listener));
     TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
         *UnitTest::GetInstance());
     EXPECT_TRUE(listeners.Release(listener) == NULL);
   }
   EXPECT_EQ(0, on_start_counter);
   EXPECT_FALSE(is_destroyed);
   delete listener;
 }
 
 // Tests that no events are forwarded when event forwarding is disabled.
 TEST(EventListenerTest, SuppressEventForwarding) {
   int on_start_counter = 0;
   TestListener* listener = new TestListener(&on_start_counter, NULL);
 
   TestEventListeners listeners;
   listeners.Append(listener);
   ASSERT_TRUE(TestEventListenersAccessor::EventForwardingEnabled(listeners));
   TestEventListenersAccessor::SuppressEventForwarding(&listeners);
   ASSERT_FALSE(TestEventListenersAccessor::EventForwardingEnabled(listeners));
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
       *UnitTest::GetInstance());
   EXPECT_EQ(0, on_start_counter);
 }
 
 // Tests that events generated by Google Test are not forwarded in
 // death test subprocesses.
 TEST(EventListenerDeathTest, EventsNotForwardedInDeathTestSubprecesses) {
   EXPECT_DEATH_IF_SUPPORTED({
       GTEST_CHECK_(TestEventListenersAccessor::EventForwardingEnabled(
           *GetUnitTestImpl()->listeners())) << "expected failure";},
       "expected failure");
 }
 
 // Tests that a listener installed via SetDefaultResultPrinter() starts
 // receiving events and is returned via default_result_printer() and that
 // the previous default_result_printer is removed from the list and deleted.
 TEST(EventListenerTest, default_result_printer) {
   int on_start_counter = 0;
   bool is_destroyed = false;
   TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
 
   TestEventListeners listeners;
   TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, listener);
 
   EXPECT_EQ(listener, listeners.default_result_printer());
 
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
       *UnitTest::GetInstance());
 
   EXPECT_EQ(1, on_start_counter);
 
   // Replacing default_result_printer with something else should remove it
   // from the list and destroy it.
   TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, NULL);
 
   EXPECT_TRUE(listeners.default_result_printer() == NULL);
   EXPECT_TRUE(is_destroyed);
 
   // After broadcasting an event the counter is still the same, indicating
   // the listener is not in the list anymore.
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
       *UnitTest::GetInstance());
   EXPECT_EQ(1, on_start_counter);
 }
 
 // Tests that the default_result_printer listener stops receiving events
 // when removed via Release and that is not owned by the list anymore.
 TEST(EventListenerTest, RemovingDefaultResultPrinterWorks) {
   int on_start_counter = 0;
   bool is_destroyed = false;
   // Although Append passes the ownership of this object to the list,
   // the following calls release it, and we need to delete it before the
   // test ends.
   TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
   {
     TestEventListeners listeners;
     TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, listener);
 
     EXPECT_EQ(listener, listeners.Release(listener));
     EXPECT_TRUE(listeners.default_result_printer() == NULL);
     EXPECT_FALSE(is_destroyed);
 
     // Broadcasting events now should not affect default_result_printer.
     TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
         *UnitTest::GetInstance());
     EXPECT_EQ(0, on_start_counter);
   }
   // Destroying the list should not affect the listener now, too.
   EXPECT_FALSE(is_destroyed);
   delete listener;
 }
 
 // Tests that a listener installed via SetDefaultXmlGenerator() starts
 // receiving events and is returned via default_xml_generator() and that
 // the previous default_xml_generator is removed from the list and deleted.
 TEST(EventListenerTest, default_xml_generator) {
   int on_start_counter = 0;
   bool is_destroyed = false;
   TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
 
   TestEventListeners listeners;
   TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, listener);
 
   EXPECT_EQ(listener, listeners.default_xml_generator());
 
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
       *UnitTest::GetInstance());
 
   EXPECT_EQ(1, on_start_counter);
 
   // Replacing default_xml_generator with something else should remove it
   // from the list and destroy it.
   TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, NULL);
 
   EXPECT_TRUE(listeners.default_xml_generator() == NULL);
   EXPECT_TRUE(is_destroyed);
 
   // After broadcasting an event the counter is still the same, indicating
   // the listener is not in the list anymore.
   TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
       *UnitTest::GetInstance());
   EXPECT_EQ(1, on_start_counter);
 }
 
 // Tests that the default_xml_generator listener stops receiving events
 // when removed via Release and that is not owned by the list anymore.
 TEST(EventListenerTest, RemovingDefaultXmlGeneratorWorks) {
   int on_start_counter = 0;
   bool is_destroyed = false;
   // Although Append passes the ownership of this object to the list,
   // the following calls release it, and we need to delete it before the
   // test ends.
   TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);
   {
     TestEventListeners listeners;
     TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, listener);
 
     EXPECT_EQ(listener, listeners.Release(listener));
     EXPECT_TRUE(listeners.default_xml_generator() == NULL);
     EXPECT_FALSE(is_destroyed);
 
     // Broadcasting events now should not affect default_xml_generator.
     TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(
         *UnitTest::GetInstance());
     EXPECT_EQ(0, on_start_counter);
   }
   // Destroying the list should not affect the listener now, too.
   EXPECT_FALSE(is_destroyed);
   delete listener;
 }
 
 // Sanity tests to ensure that the alternative, verbose spellings of
 // some of the macros work.  We don't test them thoroughly as that
 // would be quite involved.  Since their implementations are
 // straightforward, and they are rarely used, we'll just rely on the
 // users to tell us when they are broken.
 GTEST_TEST(AlternativeNameTest, Works) {  // GTEST_TEST is the same as TEST.
   GTEST_SUCCEED() << "OK";  // GTEST_SUCCEED is the same as SUCCEED.
 
   // GTEST_FAIL is the same as FAIL.
   EXPECT_FATAL_FAILURE(GTEST_FAIL() << "An expected failure",
                        "An expected failure");
 
   // GTEST_ASSERT_XY is the same as ASSERT_XY.
 
   GTEST_ASSERT_EQ(0, 0);
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_EQ(0, 1) << "An expected failure",
                        "An expected failure");
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_EQ(1, 0) << "An expected failure",
                        "An expected failure");
 
   GTEST_ASSERT_NE(0, 1);
   GTEST_ASSERT_NE(1, 0);
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_NE(0, 0) << "An expected failure",
                        "An expected failure");
 
   GTEST_ASSERT_LE(0, 0);
   GTEST_ASSERT_LE(0, 1);
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_LE(1, 0) << "An expected failure",
                        "An expected failure");
 
   GTEST_ASSERT_LT(0, 1);
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_LT(0, 0) << "An expected failure",
                        "An expected failure");
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_LT(1, 0) << "An expected failure",
                        "An expected failure");
 
   GTEST_ASSERT_GE(0, 0);
   GTEST_ASSERT_GE(1, 0);
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_GE(0, 1) << "An expected failure",
                        "An expected failure");
 
   GTEST_ASSERT_GT(1, 0);
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_GT(0, 1) << "An expected failure",
                        "An expected failure");
   EXPECT_FATAL_FAILURE(GTEST_ASSERT_GT(1, 1) << "An expected failure",
                        "An expected failure");
 }
 
 // Tests for internal utilities necessary for implementation of the universal
 // printing.
 // TODO(vladl@google.com): Find a better home for them.
 
 class ConversionHelperBase {};
 class ConversionHelperDerived : public ConversionHelperBase {};
 
 // Tests that IsAProtocolMessage<T>::value is a compile-time constant.
 TEST(IsAProtocolMessageTest, ValueIsCompileTimeConstant) {
   GTEST_COMPILE_ASSERT_(IsAProtocolMessage<ProtocolMessage>::value,
                         const_true);
   GTEST_COMPILE_ASSERT_(!IsAProtocolMessage<int>::value, const_false);
 }
 
 // Tests that IsAProtocolMessage<T>::value is true when T is
 // proto2::Message or a sub-class of it.
 TEST(IsAProtocolMessageTest, ValueIsTrueWhenTypeIsAProtocolMessage) {
   EXPECT_TRUE(IsAProtocolMessage< ::proto2::Message>::value);
   EXPECT_TRUE(IsAProtocolMessage<ProtocolMessage>::value);
 }
 
 // Tests that IsAProtocolMessage<T>::value is false when T is neither
 // ProtocolMessage nor a sub-class of it.
 TEST(IsAProtocolMessageTest, ValueIsFalseWhenTypeIsNotAProtocolMessage) {
   EXPECT_FALSE(IsAProtocolMessage<int>::value);
   EXPECT_FALSE(IsAProtocolMessage<const ConversionHelperBase>::value);
 }
 
 // Tests that CompileAssertTypesEqual compiles when the type arguments are
 // equal.
 TEST(CompileAssertTypesEqual, CompilesWhenTypesAreEqual) {
   CompileAssertTypesEqual<void, void>();
   CompileAssertTypesEqual<int*, int*>();
 }
 
 // Tests that RemoveReference does not affect non-reference types.
 TEST(RemoveReferenceTest, DoesNotAffectNonReferenceType) {
   CompileAssertTypesEqual<int, RemoveReference<int>::type>();
   CompileAssertTypesEqual<const char, RemoveReference<const char>::type>();
 }
 
 // Tests that RemoveReference removes reference from reference types.
 TEST(RemoveReferenceTest, RemovesReference) {
   CompileAssertTypesEqual<int, RemoveReference<int&>::type>();
   CompileAssertTypesEqual<const char, RemoveReference<const char&>::type>();
 }
 
 // Tests GTEST_REMOVE_REFERENCE_.
 
 template <typename T1, typename T2>
 void TestGTestRemoveReference() {
   CompileAssertTypesEqual<T1, GTEST_REMOVE_REFERENCE_(T2)>();
 }
 
 TEST(RemoveReferenceTest, MacroVersion) {
   TestGTestRemoveReference<int, int>();
   TestGTestRemoveReference<const char, const char&>();
 }
 
 
 // Tests that RemoveConst does not affect non-const types.
 TEST(RemoveConstTest, DoesNotAffectNonConstType) {
   CompileAssertTypesEqual<int, RemoveConst<int>::type>();
   CompileAssertTypesEqual<char&, RemoveConst<char&>::type>();
 }
 
 // Tests that RemoveConst removes const from const types.
 TEST(RemoveConstTest, RemovesConst) {
   CompileAssertTypesEqual<int, RemoveConst<const int>::type>();
   CompileAssertTypesEqual<char[2], RemoveConst<const char[2]>::type>();
   CompileAssertTypesEqual<char[2][3], RemoveConst<const char[2][3]>::type>();
 }
 
 // Tests GTEST_REMOVE_CONST_.
 
 template <typename T1, typename T2>
 void TestGTestRemoveConst() {
   CompileAssertTypesEqual<T1, GTEST_REMOVE_CONST_(T2)>();
 }
 
 TEST(RemoveConstTest, MacroVersion) {
   TestGTestRemoveConst<int, int>();
   TestGTestRemoveConst<double&, double&>();
   TestGTestRemoveConst<char, const char>();
 }
 
 // Tests GTEST_REMOVE_REFERENCE_AND_CONST_.
 
 template <typename T1, typename T2>
 void TestGTestRemoveReferenceAndConst() {
   CompileAssertTypesEqual<T1, GTEST_REMOVE_REFERENCE_AND_CONST_(T2)>();
 }
 
 TEST(RemoveReferenceToConstTest, Works) {
   TestGTestRemoveReferenceAndConst<int, int>();
   TestGTestRemoveReferenceAndConst<double, double&>();
   TestGTestRemoveReferenceAndConst<char, const char>();
   TestGTestRemoveReferenceAndConst<char, const char&>();
   TestGTestRemoveReferenceAndConst<const char*, const char*>();
 }
 
 // Tests that AddReference does not affect reference types.
 TEST(AddReferenceTest, DoesNotAffectReferenceType) {
   CompileAssertTypesEqual<int&, AddReference<int&>::type>();
   CompileAssertTypesEqual<const char&, AddReference<const char&>::type>();
 }
 
 // Tests that AddReference adds reference to non-reference types.
 TEST(AddReferenceTest, AddsReference) {
   CompileAssertTypesEqual<int&, AddReference<int>::type>();
   CompileAssertTypesEqual<const char&, AddReference<const char>::type>();
 }
 
 // Tests GTEST_ADD_REFERENCE_.
 
 template <typename T1, typename T2>
 void TestGTestAddReference() {
   CompileAssertTypesEqual<T1, GTEST_ADD_REFERENCE_(T2)>();
 }
 
 TEST(AddReferenceTest, MacroVersion) {
   TestGTestAddReference<int&, int>();
   TestGTestAddReference<const char&, const char&>();
 }
 
 // Tests GTEST_REFERENCE_TO_CONST_.
 
 template <typename T1, typename T2>
 void TestGTestReferenceToConst() {
   CompileAssertTypesEqual<T1, GTEST_REFERENCE_TO_CONST_(T2)>();
 }
 
 TEST(GTestReferenceToConstTest, Works) {
   TestGTestReferenceToConst<const char&, char>();
   TestGTestReferenceToConst<const int&, const int>();
   TestGTestReferenceToConst<const double&, double>();
   TestGTestReferenceToConst<const std::string&, const std::string&>();
 }
 
 // Tests that ImplicitlyConvertible<T1, T2>::value is a compile-time constant.
 TEST(ImplicitlyConvertibleTest, ValueIsCompileTimeConstant) {
   GTEST_COMPILE_ASSERT_((ImplicitlyConvertible<int, int>::value), const_true);
   GTEST_COMPILE_ASSERT_((!ImplicitlyConvertible<void*, int*>::value),
                         const_false);
 }
 
 // Tests that ImplicitlyConvertible<T1, T2>::value is true when T1 can
 // be implicitly converted to T2.
 TEST(ImplicitlyConvertibleTest, ValueIsTrueWhenConvertible) {
   EXPECT_TRUE((ImplicitlyConvertible<int, double>::value));
   EXPECT_TRUE((ImplicitlyConvertible<double, int>::value));
   EXPECT_TRUE((ImplicitlyConvertible<int*, void*>::value));
   EXPECT_TRUE((ImplicitlyConvertible<int*, const int*>::value));
   EXPECT_TRUE((ImplicitlyConvertible<ConversionHelperDerived&,
                                      const ConversionHelperBase&>::value));
   EXPECT_TRUE((ImplicitlyConvertible<const ConversionHelperBase,
                                      ConversionHelperBase>::value));
 }
 
 // Tests that ImplicitlyConvertible<T1, T2>::value is false when T1
 // cannot be implicitly converted to T2.
 TEST(ImplicitlyConvertibleTest, ValueIsFalseWhenNotConvertible) {
   EXPECT_FALSE((ImplicitlyConvertible<double, int*>::value));
   EXPECT_FALSE((ImplicitlyConvertible<void*, int*>::value));
   EXPECT_FALSE((ImplicitlyConvertible<const int*, int*>::value));
   EXPECT_FALSE((ImplicitlyConvertible<ConversionHelperBase&,
                                       ConversionHelperDerived&>::value));
 }
 
 // Tests IsContainerTest.
 
 class NonContainer {};
 
 TEST(IsContainerTestTest, WorksForNonContainer) {
   EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<int>(0)));
   EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<char[5]>(0)));
   EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<NonContainer>(0)));
 }
 
 TEST(IsContainerTestTest, WorksForContainer) {
   EXPECT_EQ(sizeof(IsContainer),
             sizeof(IsContainerTest<std::vector<bool> >(0)));
   EXPECT_EQ(sizeof(IsContainer),
             sizeof(IsContainerTest<std::map<int, double> >(0)));
 }
 
 #if GTEST_LANG_CXX11
 struct ConstOnlyContainerWithPointerIterator {
   using const_iterator = int*;
   const_iterator begin() const;
   const_iterator end() const;
 };
 
 struct ConstOnlyContainerWithClassIterator {
   struct const_iterator {
     const int& operator*() const;
     const_iterator& operator++(/* pre-increment */);
   };
   const_iterator begin() const;
   const_iterator end() const;
 };
 
 TEST(IsContainerTestTest, ConstOnlyContainer) {
   EXPECT_EQ(sizeof(IsContainer),
             sizeof(IsContainerTest<ConstOnlyContainerWithPointerIterator>(0)));
   EXPECT_EQ(sizeof(IsContainer),
             sizeof(IsContainerTest<ConstOnlyContainerWithClassIterator>(0)));
 }
 #endif  // GTEST_LANG_CXX11
 
 // Tests IsHashTable.
 struct AHashTable {
   typedef void hasher;
 };
 struct NotReallyAHashTable {
   typedef void hasher;
   typedef void reverse_iterator;
 };
 TEST(IsHashTable, Basic) {
   EXPECT_TRUE(testing::internal::IsHashTable<AHashTable>::value);
   EXPECT_FALSE(testing::internal::IsHashTable<NotReallyAHashTable>::value);
 #if GTEST_LANG_CXX11
   EXPECT_FALSE(testing::internal::IsHashTable<std::vector<int>>::value);
   EXPECT_TRUE(testing::internal::IsHashTable<std::unordered_set<int>>::value);
 #endif  // GTEST_LANG_CXX11
 #if GTEST_HAS_HASH_SET_
   EXPECT_TRUE(testing::internal::IsHashTable<__gnu_cxx::hash_set<int>>::value);
 #endif  // GTEST_HAS_HASH_SET_
 }
 
 // Tests ArrayEq().
 
 TEST(ArrayEqTest, WorksForDegeneratedArrays) {
   EXPECT_TRUE(ArrayEq(5, 5L));
   EXPECT_FALSE(ArrayEq('a', 0));
 }
 
 TEST(ArrayEqTest, WorksForOneDimensionalArrays) {
   // Note that a and b are distinct but compatible types.
   const int a[] = { 0, 1 };
   long b[] = { 0, 1 };
   EXPECT_TRUE(ArrayEq(a, b));
   EXPECT_TRUE(ArrayEq(a, 2, b));
 
   b[0] = 2;
   EXPECT_FALSE(ArrayEq(a, b));
   EXPECT_FALSE(ArrayEq(a, 1, b));
 }
 
 TEST(ArrayEqTest, WorksForTwoDimensionalArrays) {
   const char a[][3] = { "hi", "lo" };
   const char b[][3] = { "hi", "lo" };
   const char c[][3] = { "hi", "li" };
 
   EXPECT_TRUE(ArrayEq(a, b));
   EXPECT_TRUE(ArrayEq(a, 2, b));
 
   EXPECT_FALSE(ArrayEq(a, c));
   EXPECT_FALSE(ArrayEq(a, 2, c));
 }
 
 // Tests ArrayAwareFind().
 
 TEST(ArrayAwareFindTest, WorksForOneDimensionalArray) {
   const char a[] = "hello";
   EXPECT_EQ(a + 4, ArrayAwareFind(a, a + 5, 'o'));
   EXPECT_EQ(a + 5, ArrayAwareFind(a, a + 5, 'x'));
 }
 
 TEST(ArrayAwareFindTest, WorksForTwoDimensionalArray) {
   int a[][2] = { { 0, 1 }, { 2, 3 }, { 4, 5 } };
   const int b[2] = { 2, 3 };
   EXPECT_EQ(a + 1, ArrayAwareFind(a, a + 3, b));
 
   const int c[2] = { 6, 7 };
   EXPECT_EQ(a + 3, ArrayAwareFind(a, a + 3, c));
 }
 
 // Tests CopyArray().
 
 TEST(CopyArrayTest, WorksForDegeneratedArrays) {
   int n = 0;
   CopyArray('a', &n);
   EXPECT_EQ('a', n);
 }
 
 TEST(CopyArrayTest, WorksForOneDimensionalArrays) {
   const char a[3] = "hi";
   int b[3];
 #ifndef __BORLANDC__  // C++Builder cannot compile some array size deductions.
   CopyArray(a, &b);
   EXPECT_TRUE(ArrayEq(a, b));
 #endif
 
   int c[3];
   CopyArray(a, 3, c);
   EXPECT_TRUE(ArrayEq(a, c));
 }
 
 TEST(CopyArrayTest, WorksForTwoDimensionalArrays) {
   const int a[2][3] = { { 0, 1, 2 }, { 3, 4, 5 } };
   int b[2][3];
 #ifndef __BORLANDC__  // C++Builder cannot compile some array size deductions.
   CopyArray(a, &b);
   EXPECT_TRUE(ArrayEq(a, b));
 #endif
 
   int c[2][3];
   CopyArray(a, 2, c);
   EXPECT_TRUE(ArrayEq(a, c));
 }
 
 // Tests NativeArray.
 
 TEST(NativeArrayTest, ConstructorFromArrayWorks) {
   const int a[3] = { 0, 1, 2 };
   NativeArray<int> na(a, 3, RelationToSourceReference());
   EXPECT_EQ(3U, na.size());
   EXPECT_EQ(a, na.begin());
 }
 
 TEST(NativeArrayTest, CreatesAndDeletesCopyOfArrayWhenAskedTo) {
   typedef int Array[2];
   Array* a = new Array[1];
   (*a)[0] = 0;
   (*a)[1] = 1;
   NativeArray<int> na(*a, 2, RelationToSourceCopy());
   EXPECT_NE(*a, na.begin());
   delete[] a;
   EXPECT_EQ(0, na.begin()[0]);
   EXPECT_EQ(1, na.begin()[1]);
 
   // We rely on the heap checker to verify that na deletes the copy of
   // array.
 }
 
 TEST(NativeArrayTest, TypeMembersAreCorrect) {
   StaticAssertTypeEq<char, NativeArray<char>::value_type>();
   StaticAssertTypeEq<int[2], NativeArray<int[2]>::value_type>();
 
   StaticAssertTypeEq<const char*, NativeArray<char>::const_iterator>();
   StaticAssertTypeEq<const bool(*)[2], NativeArray<bool[2]>::const_iterator>();
 }
 
 TEST(NativeArrayTest, MethodsWork) {
   const int a[3] = { 0, 1, 2 };
   NativeArray<int> na(a, 3, RelationToSourceCopy());
   ASSERT_EQ(3U, na.size());
   EXPECT_EQ(3, na.end() - na.begin());
 
   NativeArray<int>::const_iterator it = na.begin();
   EXPECT_EQ(0, *it);
   ++it;
   EXPECT_EQ(1, *it);
   it++;
   EXPECT_EQ(2, *it);
   ++it;
   EXPECT_EQ(na.end(), it);
 
   EXPECT_TRUE(na == na);
 
   NativeArray<int> na2(a, 3, RelationToSourceReference());
   EXPECT_TRUE(na == na2);
 
   const int b1[3] = { 0, 1, 1 };
   const int b2[4] = { 0, 1, 2, 3 };
   EXPECT_FALSE(na == NativeArray<int>(b1, 3, RelationToSourceReference()));
   EXPECT_FALSE(na == NativeArray<int>(b2, 4, RelationToSourceCopy()));
 }
 
 TEST(NativeArrayTest, WorksForTwoDimensionalArray) {
   const char a[2][3] = { "hi", "lo" };
   NativeArray<char[3]> na(a, 2, RelationToSourceReference());
   ASSERT_EQ(2U, na.size());
   EXPECT_EQ(a, na.begin());
 }
 
 // Tests SkipPrefix().
 
 TEST(SkipPrefixTest, SkipsWhenPrefixMatches) {
   const char* const str = "hello";
 
   const char* p = str;
   EXPECT_TRUE(SkipPrefix("", &p));
   EXPECT_EQ(str, p);
 
   p = str;
   EXPECT_TRUE(SkipPrefix("hell", &p));
   EXPECT_EQ(str + 4, p);
 }
 
 TEST(SkipPrefixTest, DoesNotSkipWhenPrefixDoesNotMatch) {
   const char* const str = "world";
 
   const char* p = str;
   EXPECT_FALSE(SkipPrefix("W", &p));
   EXPECT_EQ(str, p);
 
   p = str;
   EXPECT_FALSE(SkipPrefix("world!", &p));
   EXPECT_EQ(str, p);
 }
 
 // Tests ad_hoc_test_result().
 
 class AdHocTestResultTest : public testing::Test {
  protected:
   static void SetUpTestCase() {
     FAIL() << "A failure happened inside SetUpTestCase().";
   }
 };
 
 TEST_F(AdHocTestResultTest, AdHocTestResultForTestCaseShowsFailure) {
   const testing::TestResult& test_result = testing::UnitTest::GetInstance()
                                                ->current_test_case()
                                                ->ad_hoc_test_result();
   EXPECT_TRUE(test_result.Failed());
 }
 
 TEST_F(AdHocTestResultTest, AdHocTestResultTestForUnitTestDoesNotShowFailure) {
   const testing::TestResult& test_result =
       testing::UnitTest::GetInstance()->ad_hoc_test_result();
   EXPECT_FALSE(test_result.Failed());
 }
diff --git a/googletest/xcode/Config/DebugProject.xcconfig b/googletest/xcode/Config/DebugProject.xcconfig
index 3d68157d..645701e2 100644
--- a/googletest/xcode/Config/DebugProject.xcconfig
+++ b/googletest/xcode/Config/DebugProject.xcconfig
@@ -1,30 +1,30 @@
 //
 //  DebugProject.xcconfig
 //
 //  These are Debug Configuration project settings for the gtest framework and
 //  examples. It is set in the "Based On:" dropdown in the "Project" info
 //  dialog.
 //  This file is based on the Xcode Configuration files in:
-//  http://code.google.com/p/google-toolbox-for-mac/
+//  https://github.com/google/google-toolbox-for-mac
 // 
 
 #include "General.xcconfig"
 
 // No optimization
 GCC_OPTIMIZATION_LEVEL = 0
 
 // Deployment postprocessing is what triggers Xcode to strip, turn it off
 DEPLOYMENT_POSTPROCESSING = NO
 
 // Dead code stripping off
 DEAD_CODE_STRIPPING = NO
 
 // Debug symbols should be on obviously
 GCC_GENERATE_DEBUGGING_SYMBOLS = YES
 
 // Define the DEBUG macro in all debug builds
 OTHER_CFLAGS = $(OTHER_CFLAGS) -DDEBUG=1
 
 // These are turned off to avoid STL incompatibilities with client code
 // // Turns on special C++ STL checks to "encourage" good STL use
 // GCC_PREPROCESSOR_DEFINITIONS = $(GCC_PREPROCESSOR_DEFINITIONS) _GLIBCXX_DEBUG_PEDANTIC _GLIBCXX_DEBUG _GLIBCPP_CONCEPT_CHECKS
diff --git a/googletest/xcode/Config/FrameworkTarget.xcconfig b/googletest/xcode/Config/FrameworkTarget.xcconfig
index 357b1c8f..77081fbc 100644
--- a/googletest/xcode/Config/FrameworkTarget.xcconfig
+++ b/googletest/xcode/Config/FrameworkTarget.xcconfig
@@ -1,17 +1,17 @@
 //
 //  FrameworkTarget.xcconfig
 //
 //  These are Framework target settings for the gtest framework and examples. It
 //  is set in the "Based On:" dropdown in the "Target" info dialog.
 //  This file is based on the Xcode Configuration files in:
-//  http://code.google.com/p/google-toolbox-for-mac/
+//  https://github.com/google/google-toolbox-for-mac
 // 
 
 // Dynamic libs need to be position independent
 GCC_DYNAMIC_NO_PIC = NO
 
 // Dynamic libs should not have their external symbols stripped.
 STRIP_STYLE = non-global
 
 // Let the user install by specifying the $DSTROOT with xcodebuild
 SKIP_INSTALL = NO
diff --git a/googletest/xcode/Config/General.xcconfig b/googletest/xcode/Config/General.xcconfig
index f23e3222..1aba486f 100644
--- a/googletest/xcode/Config/General.xcconfig
+++ b/googletest/xcode/Config/General.xcconfig
@@ -1,41 +1,41 @@
 //
 //  General.xcconfig
 //
 //  These are General configuration settings for the gtest framework and
 //  examples.
 //  This file is based on the Xcode Configuration files in:
-//  http://code.google.com/p/google-toolbox-for-mac/
+//  https://github.com/google/google-toolbox-for-mac
 // 
 
 // Build for PPC and Intel, 32- and 64-bit
 ARCHS = i386 x86_64 ppc ppc64
 
 // Zerolink prevents link warnings so turn it off
 ZERO_LINK = NO
 
 // Prebinding considered unhelpful in 10.3 and later
 PREBINDING = NO
 
 // Strictest warning policy
 WARNING_CFLAGS = -Wall -Werror -Wendif-labels -Wnewline-eof -Wno-sign-compare -Wshadow
 
 // Work around Xcode bugs by using external strip. See:
 // http://lists.apple.com/archives/Xcode-users/2006/Feb/msg00050.html
 SEPARATE_STRIP = YES
 
 // Force C99 dialect
 GCC_C_LANGUAGE_STANDARD = c99
 
 // not sure why apple defaults this on, but it's pretty risky
 ALWAYS_SEARCH_USER_PATHS = NO
 
 // Turn on position dependent code for most cases (overridden where appropriate)
 GCC_DYNAMIC_NO_PIC = YES
 
 // Default SDK and minimum OS version is 10.4
 SDKROOT = $(DEVELOPER_SDK_DIR)/MacOSX10.4u.sdk
 MACOSX_DEPLOYMENT_TARGET = 10.4
 GCC_VERSION = 4.0
 
 // VERSIONING BUILD SETTINGS (used in Info.plist)
 GTEST_VERSIONINFO_ABOUT =  © 2008 Google Inc.
diff --git a/googletest/xcode/Config/ReleaseProject.xcconfig b/googletest/xcode/Config/ReleaseProject.xcconfig
index 5349f0a0..df9a38f8 100644
--- a/googletest/xcode/Config/ReleaseProject.xcconfig
+++ b/googletest/xcode/Config/ReleaseProject.xcconfig
@@ -1,32 +1,32 @@
 //
 //  ReleaseProject.xcconfig
 //
 //  These are Release Configuration project settings for the gtest framework
 //  and examples. It is set in the "Based On:" dropdown in the "Project" info
 //  dialog.
 //  This file is based on the Xcode Configuration files in:
-//  http://code.google.com/p/google-toolbox-for-mac/
+//  https://github.com/google/google-toolbox-for-mac
 // 
 
 #include "General.xcconfig"
 
 // subconfig/Release.xcconfig
 
 // Optimize for space and size (Apple recommendation)
 GCC_OPTIMIZATION_LEVEL = s
 
 // Deploment postprocessing is what triggers Xcode to strip
 DEPLOYMENT_POSTPROCESSING = YES
 
 // No symbols
 GCC_GENERATE_DEBUGGING_SYMBOLS = NO
 
 // Dead code strip does not affect ObjC code but can help for C
 DEAD_CODE_STRIPPING = YES
 
 // NDEBUG is used by things like assert.h, so define it for general compat.
 // ASSERT going away in release tends to create unused vars.
 OTHER_CFLAGS = $(OTHER_CFLAGS) -DNDEBUG=1 -Wno-unused-variable
 
 // When we strip we want to strip all symbols in release, but save externals.
 STRIP_STYLE = all
diff --git a/googletest/xcode/Config/StaticLibraryTarget.xcconfig b/googletest/xcode/Config/StaticLibraryTarget.xcconfig
index 3922fa51..d2424fe8 100644
--- a/googletest/xcode/Config/StaticLibraryTarget.xcconfig
+++ b/googletest/xcode/Config/StaticLibraryTarget.xcconfig
@@ -1,18 +1,18 @@
 //
 //  StaticLibraryTarget.xcconfig
 //
 //  These are static library target settings for libgtest.a. It
 //  is set in the "Based On:" dropdown in the "Target" info dialog.
 //  This file is based on the Xcode Configuration files in:
-//  http://code.google.com/p/google-toolbox-for-mac/
+//  https://github.com/google/google-toolbox-for-mac
 // 
 
 // Static libs can be included in bundles so make them position independent
 GCC_DYNAMIC_NO_PIC = NO
 
 // Static libs should not have their internal globals or external symbols
 // stripped.
 STRIP_STYLE = debugging
 
 // Let the user install by specifying the $DSTROOT with xcodebuild
 SKIP_INSTALL = NO