webgpu.h: use UINT32_MAX to define WGPU_DEPTH_SLICE_UNDEFINED macro
Bug: dawn:2226
Change-Id: Id4bb8cca800d001883d40c792018282feb7061f3
Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/164962
Reviewed-by: Austin Eng <enga@chromium.org>
Kokoro: Kokoro <noreply+kokoro@google.com>
Commit-Queue: Austin Eng <enga@chromium.org>
Reviewed-by: Corentin Wallez <cwallez@chromium.org>
diff --git a/dawn.json b/dawn.json
index 9cb54c3..73394e7 100644
--- a/dawn.json
+++ b/dawn.json
@@ -3756,7 +3756,7 @@
"depth slice undefined" : {
"category": "constant",
"type": "uint32_t",
- "value": "(0xffffffffUL)"
+ "value": "UINT32_MAX"
},
"query set index undefined" : {
"category": "constant",