Files
mir_server/Gateway/srvlib/3rd/lua/llex.c
aixianling 5c9f1dae4a init
2025-01-09 17:45:40 +08:00

552 lines
15 KiB
C
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

/*
** $Id: llex.c 11549 2011-04-01 10:35:32Z miros $
** Lexical Analyzer
** See Copyright Notice in lua.h
*/
#include <ctype.h>
#include <locale.h>
#include <string.h>
#define llex_c
#define LUA_CORE
#include "lua.h"
#include "ldo.h"
#include "llex.h"
#include "lobject.h"
#include "lparser.h"
#include "lstate.h"
#include "lstring.h"
#include "ltable.h"
#include "lzio.h"
#define next(ls) (ls->current = zgetc(ls->z))
#define currIsNewline(ls) (ls->current == '\n' || ls->current == '\r')
/* ORDER RESERVED */
const char *const luaX_tokens [] = {
"and", "break", "do", "else", "elseif",
"end", "false", "for", "function", "if",
"in", "local", "nil", "not", "or", "repeat",
"return", "then", "true", "until", "while",
"..", "...", "==", ">=", "<=", "~=",
"<number>", "<name>", "<string>", "<eof>",
NULL
};
#define save_and_next(ls) (save(ls, ls->current), next(ls))
static void save (LexState *ls, int c) {
Mbuffer *b = ls->buff;
if (b->n + 1 > b->buffsize) {
size_t newsize;
if (b->buffsize >= MAX_SIZET/2)
luaX_lexerror(ls, "lexical element too long", 0);
newsize = b->buffsize * 2;
luaZ_resizebuffer(ls->L, b, newsize);
}
b->buffer[b->n++] = cast(char, c);
}
void luaX_init (lua_State *L) {
int i;
for (i=0; i<NUM_RESERVED; i++) {
TString *ts = luaS_new(L, luaX_tokens[i]);
luaS_fix(ts); /* reserved words are never collected */
lua_assert(strlen(luaX_tokens[i])+1 <= TOKEN_LEN);
ts->tsv.reserved = cast_byte(i+1); /* reserved word */
}
}
#define MAXSRC 80
const char *luaX_token2str (LexState *ls, int token) {
if (token < FIRST_RESERVED) {
lua_assert(token == cast(unsigned char, token));
return (iscntrl(token)) ? luaO_pushfstring(ls->L, "char(%d)", token) :
luaO_pushfstring(ls->L, "%c", token);
}
else
return luaX_tokens[token-FIRST_RESERVED];
}
static const char *txtToken (LexState *ls, int token) {
switch (token) {
case TK_NAME:
case TK_STRING:
case TK_NUMBER:
save(ls, '\0');
return luaZ_buffer(ls->buff);
default:
return luaX_token2str(ls, token);
}
}
void luaX_lexerror (LexState *ls, const char *msg, int token) {
char buff[MAXSRC];
luaO_chunkid(buff, getstr(ls->source), MAXSRC);
msg = luaO_pushfstring(ls->L, "%s:%d: %s", buff, ls->linenumber, msg);
if (token)
luaO_pushfstring(ls->L, "%s near " LUA_QS, msg, txtToken(ls, token));
luaD_throw(ls->L, LUA_ERRSYNTAX);
}
void luaX_syntaxerror (LexState *ls, const char *msg) {
luaX_lexerror(ls, msg, ls->t.token);
}
TString *luaX_newstring (LexState *ls, const char *str, size_t l) {
lua_State *L = ls->L;
TString *ts = luaS_newlstr(L, str, l);
TValue *o = luaH_setstr(L, ls->fs->h, ts); /* entry for `str' */
if (ttisnil(o)) {
setbvalue(o, 1); /* make sure `str' will not be collected */
luaC_checkGC(L);
}
return ts;
}
static void inclinenumber (LexState *ls) {
int old = ls->current;
lua_assert(currIsNewline(ls));
next(ls); /* skip `\n' or `\r' */
if (currIsNewline(ls) && ls->current != old)
next(ls); /* skip `\n\r' or `\r\n' */
if (++ls->linenumber >= MAX_INT)
luaX_syntaxerror(ls, "chunk has too many lines");
}
void luaX_setinput (lua_State *L, LexState *ls, ZIO *z, TString *source) {
ls->decpoint = '.';
ls->L = L;
ls->lookahead.token = TK_EOS; /* no look-ahead token */
ls->z = z;
ls->fs = NULL;
ls->linenumber = 1;
ls->lastline = 1;
ls->source = source;
ls->prevtks[0].token = TK_EOS; /* no prev token */
ls->prevtks[1].token = TK_EOS; /* no prev token */
luaZ_resizebuffer(ls->L, ls->buff, LUA_MINBUFFER); /* initialize buffer */
next(ls); /* read first char */
}
/*
** =======================================================
** LEXICAL ANALYZER
** =======================================================
*/
static int check_next (LexState *ls, const char *set) {
if (!strchr(set, ls->current))
return 0;
save_and_next(ls);
return 1;
}
static void buffreplace (LexState *ls, char from, char to) {
size_t n = luaZ_bufflen(ls->buff);
char *p = luaZ_buffer(ls->buff);
while (n--)
if (p[n] == from) p[n] = to;
}
static void trydecpoint (LexState *ls, SemInfo *seminfo) {
/* format error: try to update decimal point separator */
struct lconv *cv = localeconv();
char old = ls->decpoint;
ls->decpoint = (cv ? cv->decimal_point[0] : '.');
buffreplace(ls, old, ls->decpoint); /* try updated decimal separator */
if (!luaO_str2d(luaZ_buffer(ls->buff), &seminfo->r)) {
/* format error with correct decimal point: no more options */
buffreplace(ls, ls->decpoint, '.'); /* undo change (for error message) */
luaX_lexerror(ls, "malformed number", TK_NUMBER);
}
}
/* LUA_NUMBER */
static void read_numeral (LexState *ls, SemInfo *seminfo) {
lua_assert(isdigit(ls->current));
do {
save_and_next(ls);
} while (isdigit(ls->current) || ls->current == '.');
if (check_next(ls, "Ee")) /* `E'? */
check_next(ls, "+-"); /* optional exponent sign */
while (isalnum(ls->current) || ls->current == '_')
save_and_next(ls);
save(ls, '\0');
buffreplace(ls, '.', ls->decpoint); /* follow locale for decimal point */
if (!luaO_str2d(luaZ_buffer(ls->buff), &seminfo->r)) /* format error? */
trydecpoint(ls, seminfo); /* try to update decimal point separator */
}
static int skip_sep (LexState *ls) {
int count = 0;
int s = ls->current;
lua_assert(s == '[' || s == ']');
save_and_next(ls);
while (ls->current == '=') {
save_and_next(ls);
count++;
}
return (ls->current == s) ? count : (-count) - 1;
}
static void read_long_string (LexState *ls, SemInfo *seminfo, int sep) {
int cont = 0;
(void)(cont); /* avoid warnings when `cont' is not used */
save_and_next(ls); /* skip 2nd `[' */
if (currIsNewline(ls)) /* string starts with a newline? */
inclinenumber(ls); /* skip it */
for (;;) {
switch (ls->current) {
case EOZ:
luaX_lexerror(ls, (seminfo) ? "unfinished long string" :
"unfinished long comment", TK_EOS);
break; /* to avoid warnings */
#if defined(LUA_COMPAT_LSTR)
case '[': {
if (skip_sep(ls) == sep) {
save_and_next(ls); /* skip 2nd `[' */
cont++;
#if LUA_COMPAT_LSTR == 1
if (sep == 0)
luaX_lexerror(ls, "nesting of [[...]] is deprecated", '[');
#endif
}
break;
}
#endif
case ']': {
if (skip_sep(ls) == sep) {
save_and_next(ls); /* skip 2nd `]' */
#if defined(LUA_COMPAT_LSTR) && LUA_COMPAT_LSTR == 2
cont--;
if (sep == 0 && cont >= 0) break;
#endif
goto endloop;
}
break;
}
case '\n':
case '\r': {
save(ls, '\n');
inclinenumber(ls);
if (!seminfo) luaZ_resetbuffer(ls->buff); /* avoid wasting space */
break;
}
default: {
if (seminfo) save_and_next(ls);
else next(ls);
}
}
} endloop:
if (seminfo)
seminfo->ts = luaX_newstring(ls, luaZ_buffer(ls->buff) + (2 + sep),
luaZ_bufflen(ls->buff) - 2*(2 + sep));
}
#define isgb2312cnschr(c) (((unsigned char)(c)) >= 0x7F)
static void read_string (LexState *ls, int del, SemInfo *seminfo) {
save_and_next(ls);
while (ls->current != del) {
switch (ls->current) {
case EOZ:
luaX_lexerror(ls, "unfinished string", TK_EOS);
continue; /* to avoid warnings */
case '\n':
case '\r':
luaX_lexerror(ls, "unfinished string", TK_STRING);
continue; /* to avoid warnings */
case '\\': {
int c;
next(ls); /* do not save the `\' */
switch (ls->current) {
case 'a': c = '\a'; break;
case 'b': c = '\b'; break;
case 'f': c = '\f'; break;
case 'n': c = '\n'; break;
case 'r': c = '\r'; break;
case 't': c = '\t'; break;
case 'v': c = '\v'; break;
case '\n': /* go through */
case '\r': save(ls, '\n'); inclinenumber(ls); continue;
case EOZ: continue; /* will raise an error next loop */
default: {
if (!isdigit(ls->current))
save_and_next(ls); /* handles \\, \", \', and \? */
else { /* \xxx */
int i = 0;
c = 0;
do {
c = 10*c + (ls->current-'0');
next(ls);
} while (++i<3 && isdigit(ls->current));
if (c > UCHAR_MAX)
luaX_lexerror(ls, "escape sequence too large", TK_STRING);
save(ls, c);
}
continue;
}
}
save(ls, c);
next(ls);
continue;
}
default:{
#ifdef _CP_FOR_GBK_
/* if first char is multibyte lead char, read two char */
if (isgb2312cnschr(ls->current))
save_and_next(ls);
#endif
save_and_next(ls);
}
}
}
save_and_next(ls); /* skip delimiter */
seminfo->ts = luaX_newstring(ls, luaZ_buffer(ls->buff) + 1,
luaZ_bufflen(ls->buff) - 2);
}
static int llex (LexState *ls, SemInfo *seminfo) {
luaZ_resetbuffer(ls->buff);
for (;;) {
switch (ls->current) {
case '\n':
case '\r': {
inclinenumber(ls);
continue;
}
case '-': {
next(ls);
if (ls->current != '-') return '-';
/* else is a comment */
next(ls);
if (ls->current == '[') {
int sep = skip_sep(ls);
luaZ_resetbuffer(ls->buff); /* `skip_sep' may dirty the buffer */
if (sep >= 0) {
read_long_string(ls, NULL, sep); /* long comment */
luaZ_resetbuffer(ls->buff);
continue;
}
}
/* else short comment */
while (!currIsNewline(ls) && ls->current != EOZ)
next(ls);
continue;
}
case '[': {
int sep = skip_sep(ls);
if (sep >= 0) {
read_long_string(ls, seminfo, sep);
return TK_STRING;
}
else if (sep == -1) return '[';
else luaX_lexerror(ls, "invalid long string delimiter", TK_STRING);
}
case '=': {
next(ls);
if (ls->current != '=') return '=';
else { next(ls); return TK_EQ; }
}
case '<': {
next(ls);
if (ls->current != '=') return '<';
else { next(ls); return TK_LE; }
}
case '>': {
next(ls);
if (ls->current != '=') return '>';
else { next(ls); return TK_GE; }
}
case '~': {
next(ls);
if (ls->current != '=') return '~';
else { next(ls); return TK_NE; }
}
case '"':
case '\'': {
read_string(ls, ls->current, seminfo);
return TK_STRING;
}
case '.': {
save_and_next(ls);
if (check_next(ls, ".")) {
if (check_next(ls, "."))
return TK_DOTS; /* ... */
else return TK_CONCAT; /* .. */
}
else if (!isdigit(ls->current)) return '.';
else {
read_numeral(ls, seminfo);
return TK_NUMBER;
}
}
case EOZ: {
return TK_EOS;
}
default: {
if (isspace(ls->current)) {
lua_assert(!currIsNewline(ls));
next(ls);
continue;
}
else if (isdigit(ls->current)) {
read_numeral(ls, seminfo);
return TK_NUMBER;
}
/*
isgb2312cnschr mecro use for:
chinese identifier support
<09><><EFBFBD>ı<EFBFBD>ʶ<EFBFBD><CAB6>֧<EFBFBD><D6A7>, miros 2009-05-26
*/
else if (isalpha(ls->current) || ls->current == '_' || isgb2312cnschr(ls->current)) {
/* identifier or reserved word */
TString *ts;
do {
#ifdef _CP_FOR_GBK_
/* if first char is multibyte lead char, read two char */
if (isgb2312cnschr(ls->current))
save_and_next(ls);
#endif
save_and_next(ls);
} while (isalnum(ls->current) || ls->current == '_' || isgb2312cnschr(ls->current));
ts = luaX_newstring(ls, luaZ_buffer(ls->buff),
luaZ_bufflen(ls->buff));
if (ts->tsv.reserved > 0) /* reserved word? */
return ts->tsv.reserved - 1 + FIRST_RESERVED;
else {
seminfo->ts = ts;
return TK_NAME;
}
}
else {
int c = ls->current;
next(ls);
return c; /* single-char tokens (+ - / ...) */
}
}
}
}
}
TString* luaX_mergestring(LexState *ls, TString* a, TString* b)
{
stringtable *tb;
char *newstr;
size_t size1, size2;
TString *ts;
tb = &G(ls->L)->strt;
size1 = a->tsv.len;
size2 = b->tsv.len;
//alloc temp buffer
newstr = luaM_malloc( ls->L, size1 + size2 + 1 );
/* concat the strings */
strncpy( newstr, getstr(a), size1 );
strncpy( &newstr[size1], getstr(b), size2 );
newstr[size1 + size2] = 0; /* set string terminate char */
/* alloc new string */
ts = luaX_newstring( ls, newstr, size1 + size2 );
//free temp buffer
luaM_free( ls->L, newstr );
return ts;
}
void luaX_next (LexState *ls) {
//miros 2009-05-26
//<2F><><EFBFBD>ӳ<EFBFBD><D3B3><EFBFBD><EFBFBD>ַ<EFBFBD><D6B7><EFBFBD><EFBFBD>ϲ<EFBFBD><CFB2><EFBFBD><EFBFBD><EFBFBD>
// merge conatant string concat operation
// "abc" .. "def" --> "abcdef"
//ls->lastline = ls->linenumber;
//if (ls->lookahead.token != TK_EOS) { /* is there a look-ahead token? */
// ls->t = ls->lookahead; /* use this one */
// ls->lookahead.token = TK_EOS; /* and discharge it */
//}
//else
// ls->t.token = llex(ls, &ls->t.seminfo); /* read next token */
ls->lastline = ls->linenumber;
if (ls->lookahead.token != TK_EOS) /* is there a look-ahead token? */
{
ls->t = ls->lookahead; /* use this one */
ls->lookahead.token = TK_EOS; /* and discharge it */
}
else if ( ls->prevtks[0].token != TK_EOS )
{
ls->t = ls->prevtks[0]; /* use prev one */
ls->prevtks[0] = ls->prevtks[1];
ls->prevtks[1].token = TK_EOS;
}
else
{
ls->t.token = llex(ls, &ls->t.seminfo); /* read next token */
while ( ls->t.token == TK_STRING )
{
/* read token after string */
ls->prevtks[0].token = llex(ls, &ls->prevtks[0].seminfo);
/* check the token after string is concat operator */
if ( ls->prevtks[0].token != TK_CONCAT )
break;
/* read token after concat operator */
ls->prevtks[1].token = llex(ls, &ls->prevtks[1].seminfo);
/* if the token after concat operator is string */
if ( ls->prevtks[1].token != TK_STRING )
break;
/* merge to a new string and free old strings */
ls->t.seminfo.ts = luaX_mergestring( ls, ls->t.seminfo.ts, ls->prevtks[1].seminfo.ts );
ls->prevtks[0].token = TK_EOS; /* set prev tokens to EOS */
ls->prevtks[1].token = TK_EOS; /* set prev tokens to EOS */
}
}
}
void luaX_lookahead (LexState *ls) {
lua_assert(ls->lookahead.token == TK_EOS);
ls->lookahead.token = llex(ls, &ls->lookahead.seminfo);
}