aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorgingerBill <bill@gingerbill.org>2025-03-31 16:32:55 +0100
committergingerBill <bill@gingerbill.org>2025-03-31 16:32:55 +0100
commitc1333d75ef9886fb4f1e89fc5dec6ad1a7bb523e (patch)
tree1d97393d67163bbd64b3cdcab8df4fb8981d586c
parent70ddb74e402fe5c2c1b139c3e7a66a9eaf566930 (diff)
Add `##` and `##=` operators
-rw-r--r--src/check_expr.cpp6
-rw-r--r--src/llvm_backend_expr.cpp37
-rw-r--r--src/parser.cpp2
-rw-r--r--src/tokenizer.cpp11
4 files changed, 54 insertions, 2 deletions
diff --git a/src/check_expr.cpp b/src/check_expr.cpp
index 91d9e669f..72de88f61 100644
--- a/src/check_expr.cpp
+++ b/src/check_expr.cpp
@@ -1995,6 +1995,12 @@ gb_internal bool check_binary_op(CheckerContext *c, Operand *o, Token op) {
return false;
}
break;
+ case Token_Concat:
+ case Token_ConcatEq:
+ if (!is_type_integer(type)) {
+ error(op, "Operator '%.*s' is only allowed with integer expressions", LIT(op.string));
+ }
+ break;
case Token_Add:
if (is_type_string(type)) {
diff --git a/src/llvm_backend_expr.cpp b/src/llvm_backend_expr.cpp
index 0c82180ec..29c690096 100644
--- a/src/llvm_backend_expr.cpp
+++ b/src/llvm_backend_expr.cpp
@@ -1427,6 +1427,38 @@ handle_op:;
return res;
}
break;
+
+
+ case Token_Concat:
+ {
+ lbValue left = lb_emit_conv(p, lhs, type);
+ lbValue right = lb_emit_conv(p, rhs, type);
+
+ lbValue ten = lb_const_int(p->module, type, 10);
+ lbAddr pow = lb_add_local_generated(p, type, false);
+ lb_addr_store(p, pow, ten);
+
+ lbBlock *loop = lb_create_block(p, "concat.loop");
+ lbBlock *body = lb_create_block(p, "concat.body");
+ lbBlock *done = lb_create_block(p, "concat.done");
+
+ lb_emit_jump(p, loop);
+ lb_start_block(p, loop);
+
+ lbValue cond = lb_emit_comp(p, Token_GtEq, right, lb_addr_load(p, pow));
+
+ lb_emit_if(p, cond, body, done);
+ lb_start_block(p, body);
+
+ lbValue pow_10 = lb_emit_arith(p, Token_Mul, lb_addr_load(p, pow), ten, type);
+ lb_addr_store(p, pow, pow_10);
+
+ lb_emit_jump(p, loop);
+ lb_start_block(p, done);
+
+ lbValue x = lb_emit_arith(p, Token_Mul, left, lb_addr_load(p, pow), type);
+ return lb_emit_arith(p, Token_Add, x, right, type);
+ }
}
GB_PANIC("unhandled operator of lb_emit_arith");
@@ -1517,7 +1549,9 @@ gb_internal lbValue lb_build_binary_expr(lbProcedure *p, Ast *expr) {
case Token_And:
case Token_Or:
case Token_Xor:
- case Token_AndNot: {
+ case Token_AndNot:
+ case Token_Concat:
+ case Token_ConcatEq: {
Type *type = default_type(tv.type);
lbValue left = lb_build_expr(p, be->left);
lbValue right = lb_build_expr(p, be->right);
@@ -1604,6 +1638,7 @@ gb_internal lbValue lb_build_binary_expr(lbProcedure *p, Ast *expr) {
lbValue right = lb_build_expr(p, be->right);
return lb_build_binary_in(p, left, right, be->op.kind);
}
+
default:
GB_PANIC("Invalid binary expression");
break;
diff --git a/src/parser.cpp b/src/parser.cpp
index f38f79607..a38351220 100644
--- a/src/parser.cpp
+++ b/src/parser.cpp
@@ -3490,6 +3490,7 @@ gb_internal i32 token_precedence(AstFile *f, TokenKind t) {
case Token_AndNot:
case Token_Shl:
case Token_Shr:
+ case Token_Concat:
return 7;
}
return 0;
@@ -3778,6 +3779,7 @@ gb_internal Ast *parse_simple_stmt(AstFile *f, u32 flags) {
case Token_AndNotEq:
case Token_CmpAndEq:
case Token_CmpOrEq:
+ case Token_ConcatEq:
{
if (f->curr_proc == nullptr) {
syntax_error(f->curr_token, "You cannot use a simple statement in the file scope");
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index 53f6135d0..3a5828a3a 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -33,6 +33,7 @@ TOKEN_KIND(Token__OperatorBegin, ""), \
TOKEN_KIND(Token_AndNot, "&~"), \
TOKEN_KIND(Token_Shl, "<<"), \
TOKEN_KIND(Token_Shr, ">>"), \
+ TOKEN_KIND(Token_Concat, "##"), \
TOKEN_KIND(Token_CmpAnd, "&&"), \
TOKEN_KIND(Token_CmpOr, "||"), \
\
@@ -49,6 +50,7 @@ TOKEN_KIND(Token__AssignOpBegin, ""), \
TOKEN_KIND(Token_AndNotEq, "&~="), \
TOKEN_KIND(Token_ShlEq, "<<="), \
TOKEN_KIND(Token_ShrEq, ">>="), \
+ TOKEN_KIND(Token_ConcatEq, "##="), \
TOKEN_KIND(Token_CmpAndEq, "&&="), \
TOKEN_KIND(Token_CmpOrEq, "||="), \
TOKEN_KIND(Token__AssignOpEnd, ""), \
@@ -937,7 +939,14 @@ gb_internal void tokenizer_get_token(Tokenizer *t, Token *token, int repeat=0) {
break;
case '#':
token->kind = Token_Hash;
- if (t->curr_rune == '!') {
+ if (t->curr_rune == '#') {
+ advance_to_next_rune(t);
+ token->kind = Token_Concat;
+ if (t->curr_rune == '=') {
+ advance_to_next_rune(t);
+ token->kind = Token_ConcatEq;
+ }
+ } else if (t->curr_rune == '!') {
token->kind = Token_Comment;
tokenizer_skip_line(t);
} else if (t->curr_rune == '+') {