diff --git a/runtime/vm/compiler/assembler/assembler_arm.cc b/runtime/vm/compiler/assembler/assembler_arm.cc
index e35c3c1f24b43..1f56574a47e62 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm.cc
@@ -1825,6 +1825,12 @@ void Assembler::LoadClassById(Register result, Register class_id) {
   ldr(result, Address(result, class_id, LSL, kSizeOfClassPairLog2));
 }
 
+void Assembler::LoadClass(Register result, Register object, Register scratch) {
+  ASSERT(scratch != result);
+  LoadClassId(scratch, object);
+  LoadClassById(result, scratch);
+}
+
 void Assembler::CompareClassId(Register object,
                                intptr_t class_id,
                                Register scratch) {
diff --git a/runtime/vm/compiler/assembler/assembler_arm.h b/runtime/vm/compiler/assembler/assembler_arm.h
index 93da8c4e3bedc..e78943db958fe 100644
--- a/runtime/vm/compiler/assembler/assembler_arm.h
+++ b/runtime/vm/compiler/assembler/assembler_arm.h
@@ -860,6 +860,7 @@ class Assembler : public ValueObject {
 
   void LoadClassId(Register result, Register object, Condition cond = AL);
   void LoadClassById(Register result, Register class_id);
+  void LoadClass(Register result, Register object, Register scratch);
   void CompareClassId(Register object, intptr_t class_id, Register scratch);
   void LoadClassIdMayBeSmi(Register result, Register object);
   void LoadTaggedClassIdMayBeSmi(Register result, Register object);
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.cc b/runtime/vm/compiler/assembler/assembler_arm64.cc
index c258d9a57d69a..cdd45c870a4e9 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.cc
+++ b/runtime/vm/compiler/assembler/assembler_arm64.cc
@@ -1115,6 +1115,12 @@ void Assembler::LoadClassById(Register result, Register class_id) {
   ldr(result, Address(result, class_id, UXTX, Address::Scaled));
 }
 
+void Assembler::LoadClass(Register result, Register object) {
+  ASSERT(object != TMP);
+  LoadClassId(TMP, object);
+  LoadClassById(result, TMP);
+}
+
 void Assembler::CompareClassId(Register object,
                                intptr_t class_id,
                                Register scratch) {
diff --git a/runtime/vm/compiler/assembler/assembler_arm64.h b/runtime/vm/compiler/assembler/assembler_arm64.h
index 7807f87ac4ba5..785b0ac02fe13 100644
--- a/runtime/vm/compiler/assembler/assembler_arm64.h
+++ b/runtime/vm/compiler/assembler/assembler_arm64.h
@@ -1531,6 +1531,7 @@ class Assembler : public ValueObject {
   void LoadClassId(Register result, Register object);
   // Overwrites class_id register (it will be tagged afterwards).
   void LoadClassById(Register result, Register class_id);
+  void LoadClass(Register result, Register object);
   void CompareClassId(Register object,
                       intptr_t class_id,
                       Register scratch = kNoRegister);
diff --git a/runtime/vm/compiler/assembler/assembler_ia32.cc b/runtime/vm/compiler/assembler/assembler_ia32.cc
index e2074e5c52d33..1f528e1803444 100644
--- a/runtime/vm/compiler/assembler/assembler_ia32.cc
+++ b/runtime/vm/compiler/assembler/assembler_ia32.cc
@@ -2434,6 +2434,12 @@ void Assembler::LoadClassById(Register result, Register class_id) {
   movl(result, Address(result, class_id, TIMES_8, 0));
 }
 
+void Assembler::LoadClass(Register result, Register object, Register scratch) {
+  ASSERT(scratch != result);
+  LoadClassId(scratch, object);
+  LoadClassById(result, scratch);
+}
+
 void Assembler::CompareClassId(Register object,
                                intptr_t class_id,
                                Register scratch) {
diff --git a/runtime/vm/compiler/assembler/assembler_ia32.h b/runtime/vm/compiler/assembler/assembler_ia32.h
index cb75d28c26c3e..c3b8492f56ce6 100644
--- a/runtime/vm/compiler/assembler/assembler_ia32.h
+++ b/runtime/vm/compiler/assembler/assembler_ia32.h
@@ -666,6 +666,8 @@ class Assembler : public ValueObject {
 
   void LoadClassById(Register result, Register class_id);
 
+  void LoadClass(Register result, Register object, Register scratch);
+
   void CompareClassId(Register object, intptr_t class_id, Register scratch);
 
   void LoadClassIdMayBeSmi(Register result, Register object);
diff --git a/runtime/vm/compiler/assembler/assembler_x64.cc b/runtime/vm/compiler/assembler/assembler_x64.cc
index c91098b9b3b7c..d3df2c37019d5 100644
--- a/runtime/vm/compiler/assembler/assembler_x64.cc
+++ b/runtime/vm/compiler/assembler/assembler_x64.cc
@@ -1952,6 +1952,11 @@ void Assembler::LoadClassById(Register result, Register class_id) {
   movq(result, Address(result, class_id, TIMES_8, 0));
 }
 
+void Assembler::LoadClass(Register result, Register object) {
+  LoadClassId(TMP, object);
+  LoadClassById(result, TMP);
+}
+
 void Assembler::CompareClassId(Register object,
                                intptr_t class_id,
                                Register scratch) {
diff --git a/runtime/vm/compiler/assembler/assembler_x64.h b/runtime/vm/compiler/assembler/assembler_x64.h
index 6090c766d5b41..70f7d9e2784af 100644
--- a/runtime/vm/compiler/assembler/assembler_x64.h
+++ b/runtime/vm/compiler/assembler/assembler_x64.h
@@ -782,6 +782,8 @@ class Assembler : public ValueObject {
   // Overwrites class_id register (it will be tagged afterwards).
   void LoadClassById(Register result, Register class_id);
 
+  void LoadClass(Register result, Register object);
+
   void CompareClassId(Register object,
                       intptr_t class_id,
                       Register scratch = kNoRegister);
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
index 729d9ffda7909..b6a070fe1a720 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc
@@ -248,14 +248,13 @@ FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
   ASSERT(type_class.NumTypeArguments() > 0);
   const Register kInstanceReg = R0;
   Error& bound_error = Error::Handle(zone());
-  const Type& smi_type = Type::Handle(zone(), Type::SmiType());
+  const Type& int_type = Type::Handle(zone(), Type::IntType());
   const bool smi_is_ok =
-      smi_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
+      int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
   // Malformed type should have been handled at graph construction time.
   ASSERT(smi_is_ok || bound_error.IsNull());
   __ tst(kInstanceReg, Operand(kSmiTagMask));
   if (smi_is_ok) {
-    // Fast case for type = FutureOr<int/num/top-type>.
     __ b(is_instance_lbl, EQ);
   } else {
     __ b(is_not_instance_lbl, EQ);
@@ -287,7 +286,7 @@ FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
     ASSERT(!tp_argument.IsMalformed());
     if (tp_argument.IsType()) {
       ASSERT(tp_argument.HasResolvedTypeClass());
-      // Check if type argument is dynamic, Object, or void.
+      // Check if type argument is dynamic or Object.
       const Type& object_type = Type::Handle(zone(), Type::ObjectType());
       if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
         // Instance class test only necessary.
@@ -342,7 +341,6 @@ bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest(
   if (smi_class.IsSubtypeOf(Object::null_type_arguments(), type_class,
                             Object::null_type_arguments(), NULL, NULL,
                             Heap::kOld)) {
-    // Fast case for type = int/num/top-type.
     __ b(is_instance_lbl, EQ);
   } else {
     __ b(is_not_instance_lbl, EQ);
@@ -400,14 +398,7 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup(
     Label* is_not_instance_lbl) {
   __ Comment("Subtype1TestCacheLookup");
   const Register kInstanceReg = R0;
-#if defined(DEBUG)
-  Label ok;
-  __ BranchIfNotSmi(kInstanceReg, &ok);
-  __ Breakpoint();
-  __ Bind(&ok);
-#endif
-  __ LoadClassId(R2, kInstanceReg);
-  __ LoadClassById(R1, R2);
+  __ LoadClass(R1, kInstanceReg, R2);
   // R1: instance class.
   // Check immediate superclass equality.
   __ ldr(R2, FieldAddress(R1, Class::super_type_offset()));
@@ -456,13 +447,12 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
     __ ldr(R3, FieldAddress(kTypeArgumentsReg,
                             TypeArguments::type_at_offset(type_param.index())));
     // R3: concrete type of type.
-    // Check if type argument is dynamic, Object, or void.
+    // Check if type argument is dynamic.
     __ CompareObject(R3, Object::dynamic_type());
     __ b(is_instance_lbl, EQ);
     __ CompareObject(R3, Type::ZoneHandle(zone(), Type::ObjectType()));
     __ b(is_instance_lbl, EQ);
-    __ CompareObject(R3, Object::void_type());
-    __ b(is_instance_lbl, EQ);
+    // TODO(regis): Optimize void type as well once allowed as type argument.
 
     // For Smi check quickly against int and num interfaces.
     Label not_smi;
@@ -472,8 +462,9 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
     __ b(is_instance_lbl, EQ);
     __ CompareObject(R3, Type::ZoneHandle(zone(), Type::Number()));
     __ b(is_instance_lbl, EQ);
-    // Smi can be handled by type test cache.
-    __ Bind(&not_smi);
+    // Smi must be handled in runtime.
+    Label fall_through;
+    __ b(&fall_through);
 
     // If it's guaranteed, by type-parameter bound, that the type parameter will
     // never have a value of a function type, then we can safely do a 4-type
@@ -483,18 +474,17 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
                          ? kTestTypeSixArgs
                          : kTestTypeFourArgs;
 
+    __ Bind(&not_smi);
     const SubtypeTestCache& type_test_cache = SubtypeTestCache::ZoneHandle(
         zone(), GenerateCallSubtypeTestStub(
                     test_kind, kInstanceReg, kInstantiatorTypeArgumentsReg,
                     kFunctionTypeArgumentsReg, kTempReg, is_instance_lbl,
                     is_not_instance_lbl));
+    __ Bind(&fall_through);
     return type_test_cache.raw();
   }
   if (type.IsType()) {
-    // Smi is FutureOr<T>, when T is a top type or int or num.
-    if (!FLAG_strong || !Class::Handle(type.type_class()).IsFutureOrClass()) {
-      __ BranchIfSmi(kInstanceReg, is_not_instance_lbl);
-    }
+    __ BranchIfSmi(kInstanceReg, is_not_instance_lbl);
     __ ldm(IA, SP,
            (1 << kFunctionTypeArgumentsReg) |
                (1 << kInstantiatorTypeArgumentsReg));
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
index 4812ac496591b..eb23398a0e55b 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc
@@ -243,12 +243,11 @@ FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
   ASSERT(type_class.NumTypeArguments() > 0);
   const Register kInstanceReg = R0;
   Error& bound_error = Error::Handle(zone());
-  const Type& smi_type = Type::Handle(zone(), Type::SmiType());
+  const Type& int_type = Type::Handle(zone(), Type::IntType());
   const bool smi_is_ok =
-      smi_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
+      int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
   // Malformed type should have been handled at graph construction time.
   ASSERT(smi_is_ok || bound_error.IsNull());
-  // Fast case for type = FutureOr<int/num/top-type>.
   __ BranchIfSmi(kInstanceReg,
                  smi_is_ok ? is_instance_lbl : is_not_instance_lbl);
 
@@ -279,7 +278,7 @@ FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
     ASSERT(!tp_argument.IsMalformed());
     if (tp_argument.IsType()) {
       ASSERT(tp_argument.HasResolvedTypeClass());
-      // Check if type argument is dynamic, Object, or void.
+      // Check if type argument is dynamic or Object.
       const Type& object_type = Type::Handle(zone(), Type::ObjectType());
       if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
         // Instance class test only necessary.
@@ -332,7 +331,6 @@ bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest(
   if (smi_class.IsSubtypeOf(Object::null_type_arguments(), type_class,
                             Object::null_type_arguments(), NULL, NULL,
                             Heap::kOld)) {
-    // Fast case for type = int/num/top-type.
     __ BranchIfSmi(kInstanceReg, is_instance_lbl);
   } else {
     __ BranchIfSmi(kInstanceReg, is_not_instance_lbl);
@@ -390,14 +388,7 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup(
     Label* is_not_instance_lbl) {
   __ Comment("Subtype1TestCacheLookup");
   const Register kInstanceReg = R0;
-#if defined(DEBUG)
-  Label ok;
-  __ BranchIfNotSmi(kInstanceReg, &ok);
-  __ Breakpoint();
-  __ Bind(&ok);
-#endif
-  __ LoadClassId(TMP, kInstanceReg);
-  __ LoadClassById(R1, TMP);
+  __ LoadClass(R1, kInstanceReg);
   // R1: instance class.
   // Check immediate superclass equality.
   __ LoadFieldFromOffset(R2, R1, Class::super_type_offset());
@@ -444,13 +435,12 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
     __ LoadFieldFromOffset(R3, kTypeArgumentsReg,
                            TypeArguments::type_at_offset(type_param.index()));
     // R3: concrete type of type.
-    // Check if type argument is dynamic, Object, or void.
+    // Check if type argument is dynamic.
     __ CompareObject(R3, Object::dynamic_type());
     __ b(is_instance_lbl, EQ);
     __ CompareObject(R3, Type::ZoneHandle(zone(), Type::ObjectType()));
     __ b(is_instance_lbl, EQ);
-    __ CompareObject(R3, Object::void_type());
-    __ b(is_instance_lbl, EQ);
+    // TODO(regis): Optimize void type as well once allowed as type argument.
 
     // For Smi check quickly against int and num interfaces.
     Label not_smi;
@@ -459,8 +449,9 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
     __ b(is_instance_lbl, EQ);
     __ CompareObject(R3, Type::ZoneHandle(zone(), Type::Number()));
     __ b(is_instance_lbl, EQ);
-    // Smi can be handled by type test cache.
-    __ Bind(&not_smi);
+    // Smi must be handled in runtime.
+    Label fall_through;
+    __ b(&fall_through);
 
     // If it's guaranteed, by type-parameter bound, that the type parameter will
     // never have a value of a function type, then we can safely do a 4-type
@@ -470,18 +461,17 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
                          ? kTestTypeSixArgs
                          : kTestTypeFourArgs;
 
+    __ Bind(&not_smi);
     const SubtypeTestCache& type_test_cache = SubtypeTestCache::ZoneHandle(
         zone(), GenerateCallSubtypeTestStub(
                     test_kind, kInstanceReg, kInstantiatorTypeArgumentsReg,
                     kFunctionTypeArgumentsReg, kTempReg, is_instance_lbl,
                     is_not_instance_lbl));
+    __ Bind(&fall_through);
     return type_test_cache.raw();
   }
   if (type.IsType()) {
-    // Smi is FutureOr<T>, when T is a top type or int or num.
-    if (!FLAG_strong || !Class::Handle(type.type_class()).IsFutureOrClass()) {
-      __ BranchIfSmi(kInstanceReg, is_not_instance_lbl);
-    }
+    __ BranchIfSmi(kInstanceReg, is_not_instance_lbl);
     __ ldp(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg,
            Address(SP, 0 * kWordSize, Address::PairOffset));
     // Uninstantiated type class is known at compile time, but the type
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
index f6dc95a399a2b..d8de557a05c0c 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc
@@ -256,14 +256,13 @@ FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
   ASSERT(type_class.NumTypeArguments() > 0);
   const Register kInstanceReg = EAX;
   Error& bound_error = Error::Handle(zone());
-  const Type& smi_type = Type::Handle(zone(), Type::SmiType());
+  const Type& int_type = Type::Handle(zone(), Type::IntType());
   const bool smi_is_ok =
-      smi_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
+      int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
   // Malformed type should have been handled at graph construction time.
   ASSERT(smi_is_ok || bound_error.IsNull());
   __ testl(kInstanceReg, Immediate(kSmiTagMask));
   if (smi_is_ok) {
-    // Fast case for type = FutureOr<int/num/top-type>.
     __ j(ZERO, is_instance_lbl);
   } else {
     __ j(ZERO, is_not_instance_lbl);
@@ -295,7 +294,7 @@ FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
     ASSERT(!tp_argument.IsMalformed());
     if (tp_argument.IsType()) {
       ASSERT(tp_argument.HasResolvedTypeClass());
-      // Check if type argument is dynamic, Object, or void.
+      // Check if type argument is dynamic or Object.
       const Type& object_type = Type::Handle(zone(), Type::ObjectType());
       if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
         // Instance class test only necessary.
@@ -348,7 +347,6 @@ bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest(
   if (smi_class.IsSubtypeOf(Object::null_type_arguments(), type_class,
                             Object::null_type_arguments(), NULL, NULL,
                             Heap::kOld)) {
-    // Fast case for type = int/num/top-type.
     __ j(ZERO, is_instance_lbl);
   } else {
     __ j(ZERO, is_not_instance_lbl);
@@ -406,14 +404,7 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup(
     Label* is_not_instance_lbl) {
   __ Comment("Subtype1TestCacheLookup");
   const Register kInstanceReg = EAX;
-#if defined(DEBUG)
-  Label ok;
-  __ BranchIfNotSmi(kInstanceReg, &ok);
-  __ Breakpoint();
-  __ Bind(&ok);
-#endif
-  __ LoadClassId(EDI, kInstanceReg);
-  __ LoadClassById(ECX, EDI);
+  __ LoadClass(ECX, kInstanceReg, EDI);
   // ECX: instance class.
   // Check immediate superclass equality.
   __ movl(EDI, FieldAddress(ECX, Class::super_type_offset()));
@@ -464,13 +455,12 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
     __ movl(EDI, FieldAddress(kTypeArgumentsReg, TypeArguments::type_at_offset(
                                                      type_param.index())));
     // EDI: concrete type of type.
-    // Check if type argument is dynamic, Object, or void.
+    // Check if type argument is dynamic.
     __ CompareObject(EDI, Object::dynamic_type());
     __ j(EQUAL, is_instance_lbl);
     __ CompareObject(EDI, Type::ZoneHandle(zone(), Type::ObjectType()));
     __ j(EQUAL, is_instance_lbl);
-    __ CompareObject(EDI, Object::void_type());
-    __ j(EQUAL, is_instance_lbl);
+    // TODO(regis): Optimize void type as well once allowed as type argument.
 
     // For Smi check quickly against int and num interfaces.
     Label not_smi;
@@ -480,8 +470,9 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
     __ j(EQUAL, is_instance_lbl);
     __ CompareObject(EDI, Type::ZoneHandle(zone(), Type::Number()));
     __ j(EQUAL, is_instance_lbl);
-    // Smi can be handled by type test cache.
-    __ Bind(&not_smi);
+    // Smi must be handled in runtime.
+    Label fall_through;
+    __ jmp(&fall_through);
 
     // If it's guaranteed, by type-parameter bound, that the type parameter will
     // never have a value of a function type.
@@ -490,19 +481,18 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
                          ? kTestTypeSixArgs
                          : kTestTypeFourArgs;
 
+    __ Bind(&not_smi);
     const SubtypeTestCache& type_test_cache = SubtypeTestCache::ZoneHandle(
         zone(), GenerateCallSubtypeTestStub(
                     test_kind, kInstanceReg, kInstantiatorTypeArgumentsReg,
                     kFunctionTypeArgumentsReg, kTempReg, is_instance_lbl,
                     is_not_instance_lbl));
+    __ Bind(&fall_through);
     return type_test_cache.raw();
   }
   if (type.IsType()) {
-    // Smi is FutureOr<T>, when T is a top type or int or num.
-    if (!FLAG_strong || !Class::Handle(type.type_class()).IsFutureOrClass()) {
-      __ testl(kInstanceReg, Immediate(kSmiTagMask));  // Is instance Smi?
-      __ j(ZERO, is_not_instance_lbl);
-    }
+    __ testl(kInstanceReg, Immediate(kSmiTagMask));  // Is instance Smi?
+    __ j(ZERO, is_not_instance_lbl);
     __ movl(kInstantiatorTypeArgumentsReg, Address(ESP, 1 * kWordSize));
     __ movl(kFunctionTypeArgumentsReg, Address(ESP, 0 * kWordSize));
     // Uninstantiated type class is known at compile time, but the type
diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
index 2471b4e23b9ee..d3e58d8ea3b72 100644
--- a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
+++ b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc
@@ -248,14 +248,13 @@ FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
   ASSERT(type_class.NumTypeArguments() > 0);
   const Register kInstanceReg = RAX;
   Error& bound_error = Error::Handle(zone());
-  const Type& smi_type = Type::Handle(zone(), Type::SmiType());
+  const Type& int_type = Type::Handle(zone(), Type::IntType());
   const bool smi_is_ok =
-      smi_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
+      int_type.IsSubtypeOf(type, &bound_error, NULL, Heap::kOld);
   // Malformed type should have been handled at graph construction time.
   ASSERT(smi_is_ok || bound_error.IsNull());
   __ testq(kInstanceReg, Immediate(kSmiTagMask));
   if (smi_is_ok) {
-    // Fast case for type = FutureOr<int/num/top-type>.
     __ j(ZERO, is_instance_lbl);
   } else {
     __ j(ZERO, is_not_instance_lbl);
@@ -288,7 +287,7 @@ FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
     ASSERT(!tp_argument.IsMalformed());
     if (tp_argument.IsType()) {
       ASSERT(tp_argument.HasResolvedTypeClass());
-      // Check if type argument is dynamic, Object, or void.
+      // Check if type argument is dynamic or Object.
       const Type& object_type = Type::Handle(zone(), Type::ObjectType());
       if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
         // Instance class test only necessary.
@@ -346,7 +345,6 @@ bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest(
   if (smi_class.IsSubtypeOf(Object::null_type_arguments(), type_class,
                             Object::null_type_arguments(), NULL, NULL,
                             Heap::kOld)) {
-    // Fast case for type = int/num/top-type.
     __ j(ZERO, is_instance_lbl);
   } else {
     __ j(ZERO, is_not_instance_lbl);
@@ -408,14 +406,7 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup(
     Label* is_not_instance_lbl) {
   __ Comment("Subtype1TestCacheLookup");
   const Register kInstanceReg = RAX;
-#if defined(DEBUG)
-  Label ok;
-  __ BranchIfNotSmi(kInstanceReg, &ok);
-  __ Breakpoint();
-  __ Bind(&ok);
-#endif
-  __ LoadClassId(TMP, kInstanceReg);
-  __ LoadClassById(R10, TMP);
+  __ LoadClass(R10, kInstanceReg);
   // R10: instance class.
   // Check immediate superclass equality.
   __ movq(R13, FieldAddress(R10, Class::super_type_offset()));
@@ -467,14 +458,13 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
     __ movq(RDI, FieldAddress(kTypeArgumentsReg, TypeArguments::type_at_offset(
                                                      type_param.index())));
     // RDI: Concrete type of type.
-    // Check if type argument is dynamic, Object, or void.
+    // Check if type argument is dynamic.
     __ CompareObject(RDI, Object::dynamic_type());
     __ j(EQUAL, is_instance_lbl);
     const Type& object_type = Type::ZoneHandle(zone(), Type::ObjectType());
     __ CompareObject(RDI, object_type);
     __ j(EQUAL, is_instance_lbl);
-    __ CompareObject(RDI, Object::void_type());
-    __ j(EQUAL, is_instance_lbl);
+    // TODO(regis): Optimize void type as well once allowed as type argument.
 
     // For Smi check quickly against int and num interfaces.
     Label not_smi;
@@ -484,8 +474,9 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
     __ j(EQUAL, is_instance_lbl);
     __ CompareObject(RDI, Type::ZoneHandle(zone(), Type::Number()));
     __ j(EQUAL, is_instance_lbl);
-    // Smi can be handled by type test cache.
-    __ Bind(&not_smi);
+    // Smi must be handled in runtime.
+    Label fall_through;
+    __ jmp(&fall_through);
 
     // If it's guaranteed, by type-parameter bound, that the type parameter will
     // never have a value of a function type, then we can safely do a 4-type
@@ -495,19 +486,18 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
                          ? kTestTypeSixArgs
                          : kTestTypeFourArgs;
 
+    __ Bind(&not_smi);
     const SubtypeTestCache& type_test_cache = SubtypeTestCache::ZoneHandle(
         zone(), GenerateCallSubtypeTestStub(
                     test_kind, kInstanceReg, kInstantiatorTypeArgumentsReg,
                     kFunctionTypeArgumentsReg, kTempReg, is_instance_lbl,
                     is_not_instance_lbl));
+    __ Bind(&fall_through);
     return type_test_cache.raw();
   }
   if (type.IsType()) {
-    // Smi is FutureOr<T>, when T is a top type or int or num.
-    if (!FLAG_strong || !Class::Handle(type.type_class()).IsFutureOrClass()) {
-      __ testq(kInstanceReg, Immediate(kSmiTagMask));  // Is instance Smi?
-      __ j(ZERO, is_not_instance_lbl);
-    }
+    __ testq(kInstanceReg, Immediate(kSmiTagMask));  // Is instance Smi?
+    __ j(ZERO, is_not_instance_lbl);
     // Uninstantiated type class is known at compile time, but the type
     // arguments are determined at runtime by the instantiator(s).
     return GenerateCallSubtypeTestStub(kTestTypeFourArgs, kInstanceReg,
diff --git a/runtime/vm/runtime_entry.cc b/runtime/vm/runtime_entry.cc
index 5994aed216714..ad00ea5842c00 100644
--- a/runtime/vm/runtime_entry.cc
+++ b/runtime/vm/runtime_entry.cc
@@ -651,17 +651,18 @@ static void UpdateTypeTestCache(
     }
     return;
   }
-  Class& instance_class = Class::Handle(zone);
   if (instance.IsSmi()) {
-    instance_class = Smi::Class();
-  } else {
-    instance_class = instance.clazz();
+    if (FLAG_trace_type_checks) {
+      OS::PrintErr("UpdateTypeTestCache: instance is Smi\n");
+    }
+    return;
   }
   // If the type is uninstantiated and refers to parent function type
   // parameters, the function_type_arguments have been canonicalized
   // when concatenated.
   ASSERT(function_type_arguments.IsNull() ||
          function_type_arguments.IsCanonical());
+  const Class& instance_class = Class::Handle(zone, instance.clazz());
   auto& instance_class_id_or_function = Object::Handle(zone);
   auto& instance_type_arguments = TypeArguments::Handle(zone);
   auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
@@ -747,16 +748,14 @@ static void UpdateTypeTestCache(
         "  Updated test cache %p ix: %" Pd
         " with "
         "(cid-or-fun: %p, type-args: %p, i-type-args: %p, f-type-args: %p, "
-        "p-type-args: %p, d-type-args: %p, result: %s)\n"
+        "result: %s)\n"
         "    instance  [class: (%p '%s' cid: %" Pd
         "),    type-args: %p %s]\n"
         "    test-type [class: (%p '%s' cid: %" Pd
         "), i-type-args: %p %s, f-type-args: %p %s]\n",
         new_cache.raw(), len, instance_class_id_or_function.raw(),
         instance_type_arguments.raw(), instantiator_type_arguments.raw(),
-        function_type_arguments.raw(),
-        instance_parent_function_type_arguments.raw(),
-        instance_delayed_type_arguments.raw(), result.ToCString(),
+        instantiator_type_arguments.raw(), result.ToCString(),
         instance_class.raw(), instance_class_name.ToCString(),
         instance_class.id(), instance_type_arguments.raw(),
         instance_type_arguments.ToCString(), type_class.raw(),
diff --git a/runtime/vm/stub_code_arm.cc b/runtime/vm/stub_code_arm.cc
index 2ddd1dc2f63e1..879101321fa90 100644
--- a/runtime/vm/stub_code_arm.cc
+++ b/runtime/vm/stub_code_arm.cc
@@ -2007,11 +2007,7 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
   __ AddImmediate(kCacheReg, Array::data_offset() - kHeapObjectTag);
 
   Label loop, not_closure;
-  if (n >= 4) {
-    __ LoadClassIdMayBeSmi(kInstanceCidOrFunction, kInstanceReg);
-  } else {
-    __ LoadClassId(kInstanceCidOrFunction, kInstanceReg);
-  }
+  __ LoadClassId(kInstanceCidOrFunction, kInstanceReg);
   __ CompareImmediate(kInstanceCidOrFunction, kClosureCid);
   __ b(&not_closure, NE);
 
@@ -2299,6 +2295,7 @@ void StubCode::GenerateLazySpecializeTypeTestStub(Assembler* assembler) {
 void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) {
   Label done, call_runtime;
 
+  const Register kInstanceReg = R0;
   const Register kFunctionTypeArgumentsReg = R1;
   const Register kDstTypeReg = R8;
   const Register kSubtypeTestCacheReg = R3;
@@ -2306,7 +2303,6 @@ void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) {
   __ EnterStubFrame();
 
 #ifdef DEBUG
-  const Register kInstanceReg = R0;
   // Guaranteed by caller.
   Label no_error;
   __ CompareObject(kInstanceReg, Object::null_object());
@@ -2315,6 +2311,11 @@ void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) {
   __ Bind(&no_error);
 #endif
 
+  // Need to handle slow cases of [Smi]s here because the
+  // [SubtypeTestCache]-based stubs do not handle [Smi]s.
+  Label non_smi_value;
+  __ BranchIfSmi(kInstanceReg, &call_runtime);
+
   // If the subtype-cache is null, it needs to be lazily-created by the runtime.
   __ CompareObject(kSubtypeTestCacheReg, Object::null_object());
   __ BranchIf(EQUAL, &call_runtime);
diff --git a/runtime/vm/stub_code_arm64.cc b/runtime/vm/stub_code_arm64.cc
index fc8727942c581..8aa821379d84c 100644
--- a/runtime/vm/stub_code_arm64.cc
+++ b/runtime/vm/stub_code_arm64.cc
@@ -2271,11 +2271,7 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
   __ AddImmediate(kCacheReg, Array::data_offset() - kHeapObjectTag);
 
   Label loop, not_closure;
-  if (n >= 4) {
-    __ LoadClassIdMayBeSmi(kInstanceCidOrFunction, kInstanceReg);
-  } else {
-    __ LoadClassId(kInstanceCidOrFunction, kInstanceReg);
-  }
+  __ LoadClassId(kInstanceCidOrFunction, kInstanceReg);
   __ CompareImmediate(kInstanceCidOrFunction, kClosureCid);
   __ b(&not_closure, NE);
 
@@ -2571,6 +2567,7 @@ void StubCode::GenerateLazySpecializeTypeTestStub(Assembler* assembler) {
 void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) {
   Label done, call_runtime;
 
+  const Register kInstanceReg = R0;
   const Register kInstantiatorTypeArgumentsReg = R1;
 
   const Register kSubtypeTestCacheReg = R3;
@@ -2579,7 +2576,6 @@ void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) {
   __ EnterStubFrame();
 
 #ifdef DEBUG
-  const Register kInstanceReg = R0;
   // Guaranteed by caller.
   Label no_error;
   __ CompareObject(kInstanceReg, Object::null_object());
@@ -2588,6 +2584,11 @@ void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) {
   __ Bind(&no_error);
 #endif
 
+  // Need to handle slow cases of [Smi]s here because the
+  // [SubtypeTestCache]-based stubs do not handle [Smi]s.
+  Label non_smi_value;
+  __ BranchIfSmi(kInstanceReg, &call_runtime);
+
   // If the subtype-cache is null, it needs to be lazily-created by the runtime.
   __ CompareObject(kSubtypeTestCacheReg, Object::null_object());
   __ BranchIf(EQUAL, &call_runtime);
diff --git a/runtime/vm/stub_code_ia32.cc b/runtime/vm/stub_code_ia32.cc
index d609e2a676c09..4535bad30b4df 100644
--- a/runtime/vm/stub_code_ia32.cc
+++ b/runtime/vm/stub_code_ia32.cc
@@ -1752,11 +1752,8 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
   __ addl(EDX, Immediate(Array::data_offset() - kHeapObjectTag));
 
   Label loop, not_closure;
-  if (n >= 4) {
-    __ LoadClassIdMayBeSmi(kInstanceCidOrFunction, kInstanceReg);
-  } else {
-    __ LoadClassId(kInstanceCidOrFunction, kInstanceReg);
-  }
+
+  __ LoadClassId(kInstanceCidOrFunction, kInstanceReg);
   __ cmpl(kInstanceCidOrFunction, Immediate(kClosureCid));
   __ j(NOT_EQUAL, &not_closure, Assembler::kNearJump);
 
diff --git a/runtime/vm/stub_code_x64.cc b/runtime/vm/stub_code_x64.cc
index 295d7ce427723..c8f42180b7c31 100644
--- a/runtime/vm/stub_code_x64.cc
+++ b/runtime/vm/stub_code_x64.cc
@@ -2291,12 +2291,8 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
   __ addq(RSI, Immediate(Array::data_offset() - kHeapObjectTag));
 
   Label loop, not_closure;
-  if (n >= 4) {
-    __ LoadClassIdMayBeSmi(kInstanceCidOrFunction, kInstanceReg);
-  } else {
-    __ LoadClassId(kInstanceCidOrFunction, kInstanceReg);
-  }
-  __ cmpq(kInstanceCidOrFunction, Immediate(kClosureCid));
+  __ LoadClassId(R10, kInstanceReg);
+  __ cmpq(R10, Immediate(kClosureCid));
   __ j(NOT_EQUAL, &not_closure, Assembler::kNearJump);
 
   // Closure handling.
@@ -2585,13 +2581,13 @@ void StubCode::GenerateLazySpecializeTypeTestStub(Assembler* assembler) {
 void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) {
   Label done, call_runtime;
 
+  const Register kInstanceReg = RAX;
   const Register kDstTypeReg = RBX;
   const Register kSubtypeTestCacheReg = R9;
 
   __ EnterStubFrame();
 
 #ifdef DEBUG
-  const Register kInstanceReg = RAX;
   // Guaranteed by caller.
   Label no_error;
   __ CompareObject(kInstanceReg, Object::null_object());
@@ -2600,6 +2596,10 @@ void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) {
   __ Bind(&no_error);
 #endif
 
+  // Need to handle slow cases of [Smi]s here because the
+  // [SubtypeTestCache]-based stubs do not handle [Smi]s.
+  __ BranchIfSmi(kInstanceReg, &call_runtime);
+
   // If the subtype-cache is null, it needs to be lazily-created by the runtime.
   __ CompareObject(kSubtypeTestCacheReg, Object::null_object());
   __ BranchIf(EQUAL, &call_runtime);