From 183bbe94cd55fec7c27431b7f4c1339a3186df3f Mon Sep 17 00:00:00 2001 From: daurnimator Date: Sun, 12 Nov 2017 14:35:34 +1100 Subject: src/defs.js: the highest unicode codepoint is U+10FFFF utf8 dropped support for 5 and 6 byte sequences back in 2003 As we're converting from javascript strings (which are utf-16), codepoints above U+10FFFF can't occur anyway --- src/defs.js | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) (limited to 'src/defs.js') diff --git a/src/defs.js b/src/defs.js index f76471a..6fb7d3f 100644 --- a/src/defs.js +++ b/src/defs.js @@ -203,21 +203,8 @@ const to_luastring = function(str, cache) { outU8Array[outIdx++] = 0xE0 | (u >> 12); outU8Array[outIdx++] = 0x80 | ((u >> 6) & 63); outU8Array[outIdx++] = 0x80 | (u & 63); - } else if (u <= 0x1FFFFF) { - outU8Array[outIdx++] = 0xF0 | (u >> 18); - outU8Array[outIdx++] = 0x80 | ((u >> 12) & 63); - outU8Array[outIdx++] = 0x80 | ((u >> 6) & 63); - outU8Array[outIdx++] = 0x80 | (u & 63); - } else if (u <= 0x3FFFFFF) { - outU8Array[outIdx++] = 0xF8 | (u >> 24); - outU8Array[outIdx++] = 0x80 | ((u >> 18) & 63); - outU8Array[outIdx++] = 0x80 | ((u >> 12) & 63); - outU8Array[outIdx++] = 0x80 | ((u >> 6) & 63); - outU8Array[outIdx++] = 0x80 | (u & 63); } else { - outU8Array[outIdx++] = 0xFC | (u >> 30); - outU8Array[outIdx++] = 0x80 | ((u >> 24) & 63); - outU8Array[outIdx++] = 0x80 | ((u >> 18) & 63); + outU8Array[outIdx++] = 0xF0 | (u >> 18); outU8Array[outIdx++] = 0x80 | ((u >> 12) & 63); outU8Array[outIdx++] = 0x80 | ((u >> 6) & 63); outU8Array[outIdx++] = 0x80 | (u & 63); -- cgit v1.2.3-54-g00ecf