Clean up the time(2) implementation.

This is also slightly faster for the no VDSO case (56ns vs 66ns).

Bug: N/A
Test: ran tests, benchmarks
Change-Id: I2b0edd06ee6942eb57c32678279278a53ca5ee9b
diff --git a/libc/bionic/vdso.cpp b/libc/bionic/vdso.cpp
index 44899e7..c926a58 100644
--- a/libc/bionic/vdso.cpp
+++ b/libc/bionic/vdso.cpp
@@ -61,12 +61,16 @@
 }
 
 time_t time(time_t* t) {
-  auto vdso_time = reinterpret_cast<decltype(&time)>(
-    __libc_globals->vdso[VDSO_TIME].fn);
+  auto vdso_time = reinterpret_cast<decltype(&time)>(__libc_globals->vdso[VDSO_TIME].fn);
   if (__predict_true(vdso_time)) {
     return vdso_time(t);
   }
-  return __time(t);
+
+  // We can't fallback to the time(2) system call because it doesn't exist for most architectures.
+  timeval tv;
+  if (gettimeofday(&tv, nullptr) == -1) return -1;
+  if (t) *t = tv.tv_sec;
+  return tv.tv_sec;
 }
 
 void __libc_init_vdso(libc_globals* globals, KernelArgumentBlock& args) {