[cts]: Generate the case cache on CTS roll

Instead of each test of the CTS.
The cache .json files are packed into a new './webgpu-cts/cache.tar.gz' binary file.

The generation of the cache is parallelized to the number of CPUs on the machine. This requires the CTS CL:
https://github.com/gpuweb/cts/commit/4619a2b1936f2ae34f7de4a028324bc60ed31670

Added a new tool: `./tools/run cts build-cache` to locally re-build the cache file and list.

Change-Id: I97df055f9bf3fe99ac3134a2bde6704a9020932e
Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/155140
Reviewed-by: dan sinclair <dsinclair@chromium.org>
Commit-Queue: Ben Clayton <bclayton@google.com>
Kokoro: Kokoro <noreply+kokoro@google.com>
diff --git a/third_party/gn/webgpu-cts/cache_list.txt b/third_party/gn/webgpu-cts/cache_list.txt
index 7b827cc..c557af7 100644
--- a/third_party/gn/webgpu-cts/cache_list.txt
+++ b/third_party/gn/webgpu-cts/cache_list.txt
@@ -1,3 +1,11 @@
+data/webgpu/shader/execution/case-cache/abs.json
+data/webgpu/shader/execution/case-cache/acos.json
+data/webgpu/shader/execution/case-cache/acosh.json
+data/webgpu/shader/execution/case-cache/asin.json
+data/webgpu/shader/execution/case-cache/asinh.json
+data/webgpu/shader/execution/case-cache/atan.json
+data/webgpu/shader/execution/case-cache/atan2.json
+data/webgpu/shader/execution/case-cache/atanh.json
 data/webgpu/shader/execution/case-cache/binary/af_addition.json
 data/webgpu/shader/execution/case-cache/binary/af_logical.json
 data/webgpu/shader/execution/case-cache/binary/af_matrix_addition.json
@@ -5,8 +13,8 @@
 data/webgpu/shader/execution/case-cache/binary/af_multiplication.json
 data/webgpu/shader/execution/case-cache/binary/af_subtraction.json
 data/webgpu/shader/execution/case-cache/binary/f16_addition.json
-data/webgpu/shader/execution/case-cache/binary/f16_logical.json
 data/webgpu/shader/execution/case-cache/binary/f16_division.json
+data/webgpu/shader/execution/case-cache/binary/f16_logical.json
 data/webgpu/shader/execution/case-cache/binary/f16_matrix_addition.json
 data/webgpu/shader/execution/case-cache/binary/f16_matrix_matrix_multiplication.json
 data/webgpu/shader/execution/case-cache/binary/f16_matrix_scalar_multiplication.json
@@ -16,8 +24,8 @@
 data/webgpu/shader/execution/case-cache/binary/f16_remainder.json
 data/webgpu/shader/execution/case-cache/binary/f16_subtraction.json
 data/webgpu/shader/execution/case-cache/binary/f32_addition.json
-data/webgpu/shader/execution/case-cache/binary/f32_logical.json
 data/webgpu/shader/execution/case-cache/binary/f32_division.json
+data/webgpu/shader/execution/case-cache/binary/f32_logical.json
 data/webgpu/shader/execution/case-cache/binary/f32_matrix_addition.json
 data/webgpu/shader/execution/case-cache/binary/f32_matrix_matrix_multiplication.json
 data/webgpu/shader/execution/case-cache/binary/f32_matrix_scalar_multiplication.json
@@ -30,14 +38,6 @@
 data/webgpu/shader/execution/case-cache/binary/i32_comparison.json
 data/webgpu/shader/execution/case-cache/binary/u32_arithmetic.json
 data/webgpu/shader/execution/case-cache/binary/u32_comparison.json
-data/webgpu/shader/execution/case-cache/abs.json
-data/webgpu/shader/execution/case-cache/acos.json
-data/webgpu/shader/execution/case-cache/acosh.json
-data/webgpu/shader/execution/case-cache/asin.json
-data/webgpu/shader/execution/case-cache/asinh.json
-data/webgpu/shader/execution/case-cache/atan.json
-data/webgpu/shader/execution/case-cache/atan2.json
-data/webgpu/shader/execution/case-cache/atanh.json
 data/webgpu/shader/execution/case-cache/bitcast.json
 data/webgpu/shader/execution/case-cache/ceil.json
 data/webgpu/shader/execution/case-cache/clamp.json
@@ -83,11 +83,6 @@
 data/webgpu/shader/execution/case-cache/tanh.json
 data/webgpu/shader/execution/case-cache/transpose.json
 data/webgpu/shader/execution/case-cache/trunc.json
-data/webgpu/shader/execution/case-cache/unpack2x16float.json
-data/webgpu/shader/execution/case-cache/unpack2x16snorm.json
-data/webgpu/shader/execution/case-cache/unpack2x16unorm.json
-data/webgpu/shader/execution/case-cache/unpack4x8snorm.json
-data/webgpu/shader/execution/case-cache/unpack4x8unorm.json
 data/webgpu/shader/execution/case-cache/unary/af_arithmetic.json
 data/webgpu/shader/execution/case-cache/unary/af_assignment.json
 data/webgpu/shader/execution/case-cache/unary/bool_conversion.json
@@ -100,3 +95,8 @@
 data/webgpu/shader/execution/case-cache/unary/i32_conversion.json
 data/webgpu/shader/execution/case-cache/unary/u32_complement.json
 data/webgpu/shader/execution/case-cache/unary/u32_conversion.json
+data/webgpu/shader/execution/case-cache/unpack2x16float.json
+data/webgpu/shader/execution/case-cache/unpack2x16snorm.json
+data/webgpu/shader/execution/case-cache/unpack2x16unorm.json
+data/webgpu/shader/execution/case-cache/unpack4x8snorm.json
+data/webgpu/shader/execution/case-cache/unpack4x8unorm.json
diff --git a/tools/src/cmd/cts/build_cache/build_cache.go b/tools/src/cmd/cts/build_cache/build_cache.go
new file mode 100644
index 0000000..07f9f9b
--- /dev/null
+++ b/tools/src/cmd/cts/build_cache/build_cache.go
@@ -0,0 +1,76 @@
+// Copyright 2023 The Dawn Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package build_cache
+
+import (
+	"context"
+	"flag"
+	"fmt"
+	"io/ioutil"
+	"path/filepath"
+	"strings"
+
+	"dawn.googlesource.com/dawn/tools/src/cmd/cts/common"
+	"dawn.googlesource.com/dawn/tools/src/fileutils"
+)
+
+func init() {
+	common.Register(&cmd{})
+}
+
+type cmd struct {
+	flags struct {
+		nodePath     string
+		ctsDir       string
+		tarGzOut     string
+		cacheListOut string
+	}
+}
+
+func (cmd) Name() string { return "build-cache" }
+
+func (cmd) Desc() string { return "builds the CTS test case cache.tar.gz file" }
+
+func (c *cmd) RegisterFlags(ctx context.Context, cfg common.Config) ([]string, error) {
+	dawnRoot := fileutils.DawnRoot()
+	ctsPath := filepath.Join(dawnRoot, "third_party", "webgpu-cts")
+	cacheTarGzPath := filepath.Join(dawnRoot, "webgpu-cts", "cache.tar.gz")
+	cacheListPath := filepath.Join(dawnRoot, "third_party", "gn", "webgpu-cts", "cache_list.txt")
+	flag.StringVar(&c.flags.nodePath, "node", fileutils.NodePath(), "path to node")
+	flag.StringVar(&c.flags.ctsDir, "cts", ctsPath, "path to CTS")
+	flag.StringVar(&c.flags.tarGzOut, "out-tar", cacheTarGzPath, "path to cache.tar.gz output file")
+	flag.StringVar(&c.flags.cacheListOut, "out-list", cacheListPath, "path to cache_list.txt output file")
+
+	return nil, nil
+}
+
+func (c *cmd) Run(ctx context.Context, cfg common.Config) error {
+	cache, err := common.BuildCache(ctx, c.flags.ctsDir, c.flags.nodePath)
+
+	if err != nil {
+		return fmt.Errorf("failed to build cache: %w", err)
+	}
+
+	if err := ioutil.WriteFile(c.flags.tarGzOut, cache.TarGz, 0666); err != nil {
+		return fmt.Errorf("failed to write cache to '%v': %w", c.flags.tarGzOut, err)
+	}
+
+	list := strings.Join(cache.FileList, "\n") + "\n"
+	if err := ioutil.WriteFile(c.flags.cacheListOut, []byte(list), 0666); err != nil {
+		return fmt.Errorf("failed to write cache to '%v': %w", c.flags.cacheListOut, err)
+	}
+
+	return nil
+}
diff --git a/tools/src/cmd/cts/common/cache.go b/tools/src/cmd/cts/common/cache.go
new file mode 100644
index 0000000..d674089
--- /dev/null
+++ b/tools/src/cmd/cts/common/cache.go
@@ -0,0 +1,161 @@
+// Copyright 2023 The Dawn Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package common
+
+import (
+	"archive/tar"
+	"bytes"
+	"compress/gzip"
+	"context"
+	"fmt"
+	"io"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"sort"
+	"sync"
+
+	"dawn.googlesource.com/dawn/tools/src/glob"
+)
+
+// Cache is the result of BuildCache()
+type Cache struct {
+	// The list of files
+	FileList []string
+	// The .tar.gz content
+	TarGz []byte
+}
+
+// BuildCache builds the CTS case cache
+func BuildCache(ctx context.Context, ctsDir, nodePath string) (*Cache, error) {
+	// Create a temporary directory for cache files
+	cacheDir, err := os.MkdirTemp("", "dawn-cts-cache")
+	if err != nil {
+		return nil, err
+	}
+	defer os.RemoveAll(cacheDir)
+
+	// Build the case cache .json files with numCPUs concurrent processes
+	errs := make(chan error, 8)
+	numCPUs := runtime.NumCPU()
+	wg := sync.WaitGroup{}
+	wg.Add(numCPUs)
+	for i := 0; i < numCPUs; i++ {
+		go func(i int) {
+			defer wg.Done()
+			// Run 'src/common/runtime/cmdline.ts' to build the case cache
+			cmd := exec.CommandContext(ctx, nodePath,
+				"-e", "require('./src/common/tools/setup-ts-in-node.js');require('./src/common/tools/gen_cache.ts');",
+				"--", // Start of arguments
+				// src/common/runtime/helper/sys.ts expects 'node file.js <args>'
+				// and slices away the first two arguments. When running with '-e', args
+				// start at 1, so just inject a placeholder argument.
+				"placeholder-arg",
+				cacheDir,
+				"src/webgpu",
+				"--verbose",
+				"--nth", fmt.Sprintf("%v/%v", i, numCPUs),
+			)
+			out := &bytes.Buffer{}
+			cmd.Stdout = io.MultiWriter(out, os.Stdout)
+			cmd.Stderr = out
+			cmd.Dir = ctsDir
+
+			if err := cmd.Run(); err != nil {
+				errs <- fmt.Errorf("failed to generate case cache: %w\n%v", err, out.String())
+			}
+		}(i)
+	}
+
+	go func() {
+		wg.Wait()
+		close(errs)
+	}()
+
+	for err := range errs {
+		return nil, err
+	}
+
+	files, err := glob.Glob(filepath.Join(cacheDir, "**.json"))
+	if err != nil {
+		return nil, fmt.Errorf("failed to glob cached files: %w", err)
+	}
+
+	// Absolute path -> relative path
+	for i, absPath := range files {
+		relPath, err := filepath.Rel(cacheDir, absPath)
+		if err != nil {
+			return nil, fmt.Errorf("failed to get relative path for '%v': %w", absPath, err)
+		}
+		files[i] = relPath
+	}
+
+	sort.Strings(files)
+
+	tarBuffer := &bytes.Buffer{}
+	t := tar.NewWriter(tarBuffer)
+
+	for _, relPath := range files {
+		absPath := filepath.Join(cacheDir, relPath)
+
+		fi, err := os.Stat(absPath)
+		if err != nil {
+			return nil, fmt.Errorf("failed to stat '%v': %w", relPath, err)
+		}
+
+		header, err := tar.FileInfoHeader(fi, relPath)
+		if err != nil {
+			return nil, fmt.Errorf("failed to create tar file info header for '%v': %w", relPath, err)
+		}
+
+		header.Name = relPath // Use the relative path
+
+		if err := t.WriteHeader(header); err != nil {
+			return nil, fmt.Errorf("failed to write tar header for '%v': %w", relPath, err)
+		}
+
+		file, err := os.Open(absPath)
+		if err != nil {
+			return nil, fmt.Errorf("failed to open  '%v': %w", file, err)
+		}
+		defer file.Close()
+
+		if _, err := io.Copy(t, file); err != nil {
+			return nil, fmt.Errorf("failed to write '%v' to tar: %w", file, err)
+		}
+
+		if err := t.Flush(); err != nil {
+			return nil, fmt.Errorf("failed to flush tar for '%v': %w", relPath, err)
+		}
+	}
+
+	if err := t.Close(); err != nil {
+		return nil, fmt.Errorf("failed to close the tar: %w", err)
+	}
+
+	compressed := &bytes.Buffer{}
+	gz, err := gzip.NewWriterLevel(compressed, gzip.BestCompression)
+	if err != nil {
+		return nil, fmt.Errorf("failed to create a gzip writer: %w", err)
+	}
+	if _, err := gz.Write(tarBuffer.Bytes()); err != nil {
+		return nil, fmt.Errorf("failed to write to gzip writer: %w", err)
+	}
+	if err := gz.Close(); err != nil {
+		return nil, fmt.Errorf("failed to close the gzip writer: %w", err)
+	}
+	return &Cache{files, compressed.Bytes()}, nil
+}
diff --git a/tools/src/cmd/cts/main.go b/tools/src/cmd/cts/main.go
index e7e4eba..1ae4329 100644
--- a/tools/src/cmd/cts/main.go
+++ b/tools/src/cmd/cts/main.go
@@ -28,6 +28,7 @@
 	"dawn.googlesource.com/dawn/tools/src/subcmd"
 
 	// Register sub-commands
+	_ "dawn.googlesource.com/dawn/tools/src/cmd/cts/build_cache"
 	_ "dawn.googlesource.com/dawn/tools/src/cmd/cts/export"
 	_ "dawn.googlesource.com/dawn/tools/src/cmd/cts/format"
 	_ "dawn.googlesource.com/dawn/tools/src/cmd/cts/merge"
diff --git a/tools/src/cmd/cts/roll/roll.go b/tools/src/cmd/cts/roll/roll.go
index a013873..fc4070a 100644
--- a/tools/src/cmd/cts/roll/roll.go
+++ b/tools/src/cmd/cts/roll/roll.go
@@ -61,6 +61,7 @@
 	tsSourcesRelPath     = "third_party/gn/webgpu-cts/ts_sources.txt"
 	testListRelPath      = "third_party/gn/webgpu-cts/test_list.txt"
 	cacheListRelPath     = "third_party/gn/webgpu-cts/cache_list.txt"
+	cacheTarGz           = "third_party/gn/webgpu-cts/cache.tar.gz"
 	resourceFilesRelPath = "third_party/gn/webgpu-cts/resource_files.txt"
 	webTestsPath         = "webgpu-cts/webtests"
 	refMain              = "refs/heads/main"
@@ -96,11 +97,10 @@
 func (c *cmd) RegisterFlags(ctx context.Context, cfg common.Config) ([]string, error) {
 	gitPath, _ := exec.LookPath("git")
 	npmPath, _ := exec.LookPath("npm")
-	nodePath, _ := exec.LookPath("node")
 	c.flags.auth.Register(flag.CommandLine, commonAuth.DefaultAuthOptions())
 	flag.StringVar(&c.flags.gitPath, "git", gitPath, "path to git")
 	flag.StringVar(&c.flags.npmPath, "npm", npmPath, "path to npm")
-	flag.StringVar(&c.flags.nodePath, "node", nodePath, "path to node")
+	flag.StringVar(&c.flags.nodePath, "node", fileutils.NodePath(), "path to node")
 	flag.StringVar(&c.flags.cacheDir, "cache", common.DefaultCacheDir, "path to the results cache")
 	flag.BoolVar(&c.flags.force, "force", false, "create a new roll, even if CTS is up to date")
 	flag.BoolVar(&c.flags.rebuild, "rebuild", false, "rebuild the expectation file from scratch")
@@ -683,11 +683,24 @@
 		}
 	}()
 
+	// Generate case cache
+	wg.Add(1)
+	go func() {
+		defer wg.Done()
+		if caseCache, err := common.BuildCache(ctx, r.ctsDir, r.flags.nodePath); err == nil {
+			mutex.Lock()
+			defer mutex.Unlock()
+			files[cacheListRelPath] = strings.Join(caseCache.FileList, "\n") + "\n"
+			files[cacheTarGz] = string(caseCache.TarGz)
+		} else {
+			errs <- fmt.Errorf("failed to create case cache: %v", err)
+		}
+	}()
+
 	// Generate typescript sources list, test list, resources file list.
 	for relPath, generator := range map[string]func(context.Context) (string, error){
 		tsSourcesRelPath:     r.genTSDepList,
 		testListRelPath:      r.genTestList,
-		cacheListRelPath:     r.genCacheList,
 		resourceFilesRelPath: r.genResourceFilesList,
 	} {
 		relPath, generator := relPath, generator // Capture values, not iterators
@@ -802,40 +815,6 @@
 	return strings.Join(tests, "\n"), nil
 }
 
-// genCacheList returns the file list of cached data
-func (r *roller) genCacheList(ctx context.Context) (string, error) {
-	// Run 'src/common/runtime/cmdline.ts' to obtain the full test list
-	cmd := exec.CommandContext(ctx, r.flags.nodePath,
-		"-e", "require('./src/common/tools/setup-ts-in-node.js');require('./src/common/tools/gen_cache.ts');",
-		"--", // Start of arguments
-		// src/common/runtime/helper/sys.ts expects 'node file.js <args>'
-		// and slices away the first two arguments. When running with '-e', args
-		// start at 1, so just inject a placeholder argument.
-		"placeholder-arg",
-		".",
-		"src/webgpu",
-		"--list",
-	)
-	cmd.Dir = r.ctsDir
-
-	stderr := bytes.Buffer{}
-	cmd.Stderr = &stderr
-
-	out, err := cmd.Output()
-	if err != nil {
-		return "", fmt.Errorf("failed to generate cache list: %w\n%v", err, stderr.String())
-	}
-
-	files := []string{}
-	for _, file := range strings.Split(string(out), "\n") {
-		if file != "" {
-			files = append(files, strings.TrimPrefix(file, "./"))
-		}
-	}
-
-	return strings.Join(files, "\n") + "\n", nil
-}
-
 // genResourceFilesList returns a list of resource files, for the CTS checkout at r.ctsDir
 // This list can be used to populate the resource_files.txt file.
 func (r *roller) genResourceFilesList(ctx context.Context) (string, error) {
diff --git a/tools/src/cmd/run-cts/main.go b/tools/src/cmd/run-cts/main.go
index 6b5092c..a113789 100644
--- a/tools/src/cmd/run-cts/main.go
+++ b/tools/src/cmd/run-cts/main.go
@@ -162,7 +162,7 @@
 	var flags dawnNodeFlags
 	flag.StringVar(&bin, "bin", defaultBinPath(), "path to the directory holding cts.js and dawn.node")
 	flag.StringVar(&cts, "cts", defaultCtsPath(), "root directory of WebGPU CTS")
-	flag.StringVar(&node, "node", defaultNodePath(), "path to node executable")
+	flag.StringVar(&node, "node", fileutils.NodePath(), "path to node executable")
 	flag.StringVar(&npx, "npx", "", "path to npx executable")
 	flag.StringVar(&resultsPath, "output", "", "path to write test results file")
 	flag.StringVar(&expectationsPath, "expect", "", "path to expectations file")
@@ -1359,37 +1359,6 @@
 	return nil
 }
 
-// defaultNodePath looks for the node binary, first in dawn's third_party
-// directory, falling back to PATH. This is used as the default for the --node
-// command line flag.
-func defaultNodePath() string {
-	if dawnRoot := fileutils.DawnRoot(); dawnRoot != "" {
-		node := filepath.Join(dawnRoot, "third_party/node")
-		if info, err := os.Stat(node); err == nil && info.IsDir() {
-			path := ""
-			switch fmt.Sprintf("%v/%v", runtime.GOOS, runtime.GOARCH) { // See `go tool dist list`
-			case "darwin/amd64":
-				path = filepath.Join(node, "node-darwin-x64/bin/node")
-			case "darwin/arm64":
-				path = filepath.Join(node, "node-darwin-arm64/bin/node")
-			case "linux/amd64":
-				path = filepath.Join(node, "node-linux-x64/bin/node")
-			case "windows/amd64":
-				path = filepath.Join(node, "node.exe")
-			}
-			if _, err := os.Stat(path); err == nil {
-				return path
-			}
-		}
-	}
-
-	if path, err := exec.LookPath("node"); err == nil {
-		return path
-	}
-
-	return ""
-}
-
 // defaultBinPath looks for the binary output directory at <dawn>/out/active.
 // This is used as the default for the --bin command line flag.
 func defaultBinPath() string {
diff --git a/tools/src/fileutils/paths.go b/tools/src/fileutils/paths.go
index 741d3b6..a89bb1b 100644
--- a/tools/src/fileutils/paths.go
+++ b/tools/src/fileutils/paths.go
@@ -17,6 +17,7 @@
 import (
 	"fmt"
 	"os"
+	"os/exec"
 	"path/filepath"
 	"runtime"
 	"strings"
@@ -75,3 +76,33 @@
 	}
 	return path
 }
+
+// NodePath looks for the node binary, first in dawn's third_party directory,
+// falling back to PATH.
+func NodePath() string {
+	if dawnRoot := DawnRoot(); dawnRoot != "" {
+		node := filepath.Join(dawnRoot, "third_party/node")
+		if info, err := os.Stat(node); err == nil && info.IsDir() {
+			path := ""
+			switch fmt.Sprintf("%v/%v", runtime.GOOS, runtime.GOARCH) { // See `go tool dist list`
+			case "darwin/amd64":
+				path = filepath.Join(node, "node-darwin-x64/bin/node")
+			case "darwin/arm64":
+				path = filepath.Join(node, "node-darwin-arm64/bin/node")
+			case "linux/amd64":
+				path = filepath.Join(node, "node-linux-x64/bin/node")
+			case "windows/amd64":
+				path = filepath.Join(node, "node.exe")
+			}
+			if _, err := os.Stat(path); err == nil {
+				return path
+			}
+		}
+	}
+
+	if path, err := exec.LookPath("node"); err == nil {
+		return path
+	}
+
+	return ""
+}
diff --git a/webgpu-cts/cache.tar.gz b/webgpu-cts/cache.tar.gz
new file mode 100644
index 0000000..09eb0e5
--- /dev/null
+++ b/webgpu-cts/cache.tar.gz
Binary files differ
diff --git a/webgpu-cts/scripts/gen_cache.py b/webgpu-cts/scripts/gen_cache.py
index 4f3f276..b7ba018 100644
--- a/webgpu-cts/scripts/gen_cache.py
+++ b/webgpu-cts/scripts/gen_cache.py
@@ -15,37 +15,32 @@
 # limitations under the License.
 
 import argparse
+import tarfile
+import datetime
 import os
 import sys
 
-from dir_paths import node_dir
+
+def gen_cache(out_dir):
+    script_directory = os.path.dirname(os.path.abspath(sys.argv[0]))
+    tar = tarfile.open(os.path.join(script_directory, '../cache.tar.gz'))
+    tar.extractall(out_dir)
+    # Update timestamps
+    now = datetime.datetime.now().timestamp()
+    for name in tar.getnames():
+        path = os.path.join(out_dir, name)
+        os.utime(path, (now, now))
+    tar.close()
 
 
-def gen_cache(js_script, out_dir):
-    old_sys_path = sys.path
-    try:
-        sys.path = old_sys_path + [node_dir]
-        from node import RunNode
-    finally:
-        sys.path = old_sys_path
-
-    # Save the cwd. gen_cache.js needs to be run from a specific directory.
-    cwd = os.getcwd()
-    cts_dir = os.path.realpath(
-        os.path.join(cwd, os.path.dirname(js_script), '..', '..', '..'))
-    os.chdir(cts_dir)
-    RunNode([
-        os.path.join(cwd, js_script),
-        os.path.join(cwd, out_dir),
-        os.path.join('src-node', 'webgpu')
-    ])
-
-
-# Generate a cache for CTS runs.
+# Extract the cache for CTS runs.
 if __name__ == '__main__':
     parser = argparse.ArgumentParser()
+
+    # TODO(bclayton): Unused. Remove
     parser.add_argument('js_script', help='Path to gen_cache.js')
+
     parser.add_argument('out_dir', help='Output directory for the cache')
     args = parser.parse_args()
 
-    gen_cache(args.js_script, args.out_dir)
+    gen_cache(args.out_dir)