refactor(ext/kv): align error messages (#25500)

Towards https://github.com/denoland/deno/issues/25269
This commit is contained in:
Ian Bull 2024-09-19 00:50:09 -07:00 committed by GitHub
parent 3035cdddb8
commit d420829908
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 58 additions and 54 deletions

View File

@ -60,13 +60,15 @@ const maxQueueDelay = 30 * 24 * 60 * 60 * 1000;
function validateQueueDelay(delay: number) {
if (delay < 0) {
throw new TypeError("delay cannot be negative");
throw new TypeError(`Delay must be >= 0: received ${delay}`);
}
if (delay > maxQueueDelay) {
throw new TypeError("delay cannot be greater than 30 days");
throw new TypeError(
`Delay cannot be greater than 30 days: received ${delay}`,
);
}
if (NumberIsNaN(delay)) {
throw new TypeError("delay cannot be NaN");
throw new TypeError("Delay cannot be NaN");
}
}
@ -75,7 +77,7 @@ const maxQueueBackoffInterval = 60 * 60 * 1000;
function validateBackoffSchedule(backoffSchedule: number[]) {
if (backoffSchedule.length > maxQueueBackoffIntervals) {
throw new TypeError("invalid backoffSchedule");
throw new TypeError("Invalid backoffSchedule");
}
for (let i = 0; i < backoffSchedule.length; ++i) {
const interval = backoffSchedule[i];
@ -83,7 +85,7 @@ function validateBackoffSchedule(backoffSchedule: number[]) {
interval < 0 || interval > maxQueueBackoffInterval ||
NumberIsNaN(interval)
) {
throw new TypeError("invalid backoffSchedule");
throw new TypeError("Invalid backoffSchedule");
}
}
}
@ -115,7 +117,7 @@ class Kv {
constructor(rid: number = undefined, symbol: symbol = undefined) {
if (kvSymbol !== symbol) {
throw new TypeError(
"Deno.Kv can not be constructed, use Deno.openKv instead.",
"Deno.Kv can not be constructed: use Deno.openKv instead",
);
}
this.#rid = rid;
@ -213,7 +215,7 @@ class Kv {
} = { __proto__: null },
): KvListIterator {
if (options.limit !== undefined && options.limit <= 0) {
throw new Error("limit must be positive");
throw new Error(`Limit must be positive: received ${options.limit}`);
}
let batchSize = options.batchSize ?? (options.limit ?? 100);
@ -291,7 +293,7 @@ class Kv {
handler: (message: unknown) => Promise<void> | void,
): Promise<void> {
if (this.#isClosed) {
throw new Error("already closed");
throw new Error("Queue already closed");
}
const finishMessageOps = new SafeMap<number, Promise<void>>();
while (true) {
@ -368,7 +370,7 @@ class Kv {
if (updates[i] === "unchanged") {
if (lastEntries[i] === undefined) {
throw new Error(
"watch: invalid unchanged update (internal error)",
"'watch': invalid unchanged update (internal error)",
);
}
continue;
@ -449,7 +451,7 @@ class AtomicOperation {
case "delete":
type = "delete";
if (mutation.value) {
throw new TypeError("invalid mutation 'delete' with value");
throw new TypeError("Invalid mutation 'delete' with value");
}
break;
case "set":
@ -462,7 +464,7 @@ class AtomicOperation {
case "max":
type = mutation.type;
if (!ObjectHasOwn(mutation, "value")) {
throw new TypeError(`invalid mutation '${type}' without value`);
throw new TypeError(`Invalid mutation '${type}' without value`);
}
value = serializeValue(mutation.value);
break;
@ -559,7 +561,7 @@ class AtomicOperation {
then() {
throw new TypeError(
"`Deno.AtomicOperation` is not a promise. Did you forget to call `commit()`?",
"'Deno.AtomicOperation' is not a promise: did you forget to call 'commit()'",
);
}
}
@ -572,13 +574,15 @@ class KvU64 {
constructor(value: bigint) {
if (typeof value !== "bigint") {
throw new TypeError("value must be a bigint");
throw new TypeError(`Value must be a bigint: received ${typeof value}`);
}
if (value < MIN_U64) {
throw new RangeError("value must be a positive bigint");
throw new RangeError(
`Value must be a positive bigint: received ${value}`,
);
}
if (value > MAX_U64) {
throw new RangeError("value must fit in a 64-bit unsigned integer");
throw new RangeError("Value must fit in a 64-bit unsigned integer");
}
this.value = value;
ObjectFreeze(this);
@ -709,7 +713,7 @@ class KvListIterator extends AsyncIterator
if (prefix) {
if (start && end) {
throw new TypeError(
"Selector can not specify both 'start' and 'end' key when specifying 'prefix'.",
"Selector can not specify both 'start' and 'end' key when specifying 'prefix'",
);
}
if (start) {
@ -724,7 +728,7 @@ class KvListIterator extends AsyncIterator
this.#selector = { start, end };
} else {
throw new TypeError(
"Selector must specify either 'prefix' or both 'start' and 'end' key.",
"Selector must specify either 'prefix' or both 'start' and 'end' key",
);
}
}

View File

@ -279,7 +279,7 @@ where
if ranges.len() > config.max_read_ranges {
return Err(type_error(format!(
"too many ranges (max {})",
"Too many ranges (max {})",
config.max_read_ranges
)));
}
@ -309,7 +309,7 @@ where
if total_entries > config.max_read_entries {
return Err(type_error(format!(
"too many entries (max {})",
"Too many entries (max {})",
config.max_read_entries
)));
}
@ -391,7 +391,7 @@ where
if keys.len() > config.max_watched_keys {
return Err(type_error(format!(
"too many keys (max {})",
"Too many keys (max {})",
config.max_watched_keys
)));
}
@ -542,10 +542,10 @@ fn mutation_from_v8(
MutationKind::SetSuffixVersionstampedKey(value.try_into()?)
}
(op, Some(_)) => {
return Err(type_error(format!("invalid mutation '{op}' with value")))
return Err(type_error(format!("Invalid mutation '{op}' with value")))
}
(op, None) => {
return Err(type_error(format!("invalid mutation '{op}' without value")))
return Err(type_error(format!("Invalid mutation '{op}' without value")))
}
};
Ok(Mutation {
@ -611,7 +611,7 @@ impl RawSelector {
(Some(prefix), Some(start), None) => {
if !start.starts_with(&prefix) || start.len() == prefix.len() {
return Err(type_error(
"start key is not in the keyspace defined by prefix",
"Start key is not in the keyspace defined by prefix",
));
}
Ok(Self::Prefixed {
@ -623,7 +623,7 @@ impl RawSelector {
(Some(prefix), None, Some(end)) => {
if !end.starts_with(&prefix) || end.len() == prefix.len() {
return Err(type_error(
"end key is not in the keyspace defined by prefix",
"End key is not in the keyspace defined by prefix",
));
}
Ok(Self::Prefixed {
@ -634,7 +634,7 @@ impl RawSelector {
}
(None, Some(start), Some(end)) => {
if start > end {
return Err(type_error("start key is greater than end key"));
return Err(type_error("Start key is greater than end key"));
}
Ok(Self::Range { start, end })
}
@ -642,7 +642,7 @@ impl RawSelector {
let end = start.iter().copied().chain(Some(0)).collect();
Ok(Self::Range { start, end })
}
_ => Err(type_error("invalid range")),
_ => Err(type_error("Invalid range")),
}
}
@ -704,7 +704,7 @@ fn encode_cursor(
) -> Result<String, AnyError> {
let common_prefix = selector.common_prefix();
if !boundary_key.starts_with(common_prefix) {
return Err(type_error("invalid boundary key"));
return Err(type_error("Invalid boundary key"));
}
Ok(BASE64_URL_SAFE.encode(&boundary_key[common_prefix.len()..]))
}
@ -786,14 +786,14 @@ where
if checks.len() > config.max_checks {
return Err(type_error(format!(
"too many checks (max {})",
"Too many checks (max {})",
config.max_checks
)));
}
if mutations.len() + enqueues.len() > config.max_mutations {
return Err(type_error(format!(
"too many mutations (max {})",
"Too many mutations (max {})",
config.max_mutations
)));
}
@ -807,7 +807,7 @@ where
.into_iter()
.map(|mutation| mutation_from_v8((mutation, current_timestamp)))
.collect::<Result<Vec<Mutation>, AnyError>>()
.with_context(|| "invalid mutation")?;
.with_context(|| "Invalid mutation")?;
let enqueues = enqueues
.into_iter()
.map(|e| enqueue_from_v8(e, current_timestamp))
@ -848,14 +848,14 @@ where
if total_payload_size > config.max_total_mutation_size_bytes {
return Err(type_error(format!(
"total mutation size too large (max {} bytes)",
"Total mutation size too large (max {} bytes)",
config.max_total_mutation_size_bytes
)));
}
if total_key_size > config.max_total_key_size_bytes {
return Err(type_error(format!(
"total key size too large (max {} bytes)",
"Total key size too large (max {} bytes)",
config.max_total_key_size_bytes
)));
}
@ -889,7 +889,7 @@ fn op_kv_encode_cursor(
fn check_read_key_size(key: &[u8], config: &KvConfig) -> Result<(), AnyError> {
if key.len() > config.max_read_key_size_bytes {
Err(type_error(format!(
"key too large for read (max {} bytes)",
"Key too large for read (max {} bytes)",
config.max_read_key_size_bytes
)))
} else {
@ -903,7 +903,7 @@ fn check_write_key_size(
) -> Result<usize, AnyError> {
if key.len() > config.max_write_key_size_bytes {
Err(type_error(format!(
"key too large for write (max {} bytes)",
"Key too large for write (max {} bytes)",
config.max_write_key_size_bytes
)))
} else {
@ -923,7 +923,7 @@ fn check_value_size(
if payload.len() > config.max_value_size_bytes {
Err(type_error(format!(
"value too large (max {} bytes)",
"Value too large (max {} bytes)",
config.max_value_size_bytes
)))
} else {

View File

@ -8,6 +8,6 @@ Deno.test({}, async function queueTestDbClose() {
await db.listenQueue(() => {});
assertFalse(false);
} catch (e) {
assertEquals((e as Error).message, "already closed");
assertEquals((e as Error).message, "Queue already closed");
}
});

View File

@ -707,7 +707,7 @@ dbTest("list prefix with start equal to prefix", async (db) => {
await assertRejects(
async () => await collect(db.list({ prefix: ["a"], start: ["a"] })),
TypeError,
"start key is not in the keyspace defined by prefix",
"Start key is not in the keyspace defined by prefix",
);
});
@ -716,7 +716,7 @@ dbTest("list prefix with start out of bounds", async (db) => {
await assertRejects(
async () => await collect(db.list({ prefix: ["b"], start: ["a"] })),
TypeError,
"start key is not in the keyspace defined by prefix",
"Start key is not in the keyspace defined by prefix",
);
});
@ -740,7 +740,7 @@ dbTest("list prefix with end equal to prefix", async (db) => {
await assertRejects(
async () => await collect(db.list({ prefix: ["a"], end: ["a"] })),
TypeError,
"end key is not in the keyspace defined by prefix",
"End key is not in the keyspace defined by prefix",
);
});
@ -749,7 +749,7 @@ dbTest("list prefix with end out of bounds", async (db) => {
await assertRejects(
async () => await collect(db.list({ prefix: ["a"], end: ["b"] })),
TypeError,
"end key is not in the keyspace defined by prefix",
"End key is not in the keyspace defined by prefix",
);
});
@ -1061,7 +1061,7 @@ dbTest("list range with start greater than end", async (db) => {
await assertRejects(
async () => await collect(db.list({ start: ["b"], end: ["a"] })),
TypeError,
"start key is greater than end key",
"Start key is greater than end key",
);
});
@ -1177,13 +1177,13 @@ dbTest("key size limit", async (db) => {
await assertRejects(
async () => await db.set([firstInvalidKey], 1),
TypeError,
"key too large for write (max 2048 bytes)",
"Key too large for write (max 2048 bytes)",
);
await assertRejects(
async () => await db.get([firstInvalidKey]),
TypeError,
"key too large for read (max 2049 bytes)",
"Key too large for read (max 2049 bytes)",
);
});
@ -1201,7 +1201,7 @@ dbTest("value size limit", async (db) => {
await assertRejects(
async () => await db.set(["b"], firstInvalidValue),
TypeError,
"value too large (max 65536 bytes)",
"Value too large (max 65536 bytes)",
);
});
@ -1225,7 +1225,7 @@ dbTest("operation size limit", async (db) => {
await assertRejects(
async () => await db.getMany(firstInvalidKeys),
TypeError,
"too many ranges (max 10)",
"Too many ranges (max 10)",
);
const res2 = await collect(db.list({ prefix: ["a"] }, { batchSize: 1000 }));
@ -1234,7 +1234,7 @@ dbTest("operation size limit", async (db) => {
await assertRejects(
async () => await collect(db.list({ prefix: ["a"] }, { batchSize: 1001 })),
TypeError,
"too many entries (max 1000)",
"Too many entries (max 1000)",
);
// when batchSize is not specified, limit is used but is clamped to 500
@ -1271,7 +1271,7 @@ dbTest("operation size limit", async (db) => {
.commit();
},
TypeError,
"too many checks (max 100)",
"Too many checks (max 100)",
);
const validMutateKeys: Deno.KvKey[] = new Array(1000).fill(0).map((
@ -1311,7 +1311,7 @@ dbTest("operation size limit", async (db) => {
.commit();
},
TypeError,
"too many mutations (max 1000)",
"Too many mutations (max 1000)",
);
});
@ -1339,7 +1339,7 @@ dbTest("total mutation size limit", async (db) => {
await atomic.commit();
},
TypeError,
"total mutation size too large (max 819200 bytes)",
"Total mutation size too large (max 819200 bytes)",
);
});
@ -1354,7 +1354,7 @@ dbTest("total key size limit", async (db) => {
await assertRejects(
() => atomic.commit(),
TypeError,
"total key size too large (max 81920 bytes)",
"Total key size too large (max 81920 bytes)",
);
});
@ -1655,7 +1655,7 @@ queueTest("queue retries with backoffSchedule", async (db) => {
let count = 0;
const listener = db.listenQueue((_msg) => {
count += 1;
throw new TypeError("dequeue error");
throw new TypeError("Dequeue error");
});
try {
await db.enqueue("test", { backoffSchedule: [1] });
@ -1945,20 +1945,20 @@ Deno.test({
},
});
dbTest("invalid backoffSchedule", async (db) => {
dbTest("Invalid backoffSchedule", async (db) => {
await assertRejects(
async () => {
await db.enqueue("foo", { backoffSchedule: [1, 1, 1, 1, 1, 1] });
},
TypeError,
"invalid backoffSchedule",
"Invalid backoffSchedule",
);
await assertRejects(
async () => {
await db.enqueue("foo", { backoffSchedule: [3600001] });
},
TypeError,
"invalid backoffSchedule",
"Invalid backoffSchedule",
);
});