From 64375c16c8316b8381ad59fa3538ad84732d90b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20H=C3=B6rnquist?= Date: Thu, 14 Jul 2022 23:44:03 +0200 Subject: work --- tests/test/cpp/lex2.scm | 54 +++++++++++++++++++++++++++ tests/test/cpp/preprocessor2.scm | 80 ++++++++++++++++++++++++++++++++++++---- 2 files changed, 126 insertions(+), 8 deletions(-) (limited to 'tests') diff --git a/tests/test/cpp/lex2.scm b/tests/test/cpp/lex2.scm index 47bb4a16..b7087c3b 100644 --- a/tests/test/cpp/lex2.scm +++ b/tests/test/cpp/lex2.scm @@ -92,3 +92,57 @@ (lexeme type: 'preprocessing-token body: '(punctuator ".")) (lexeme type: 'preprocessing-token body: '(identifier "dir"))) (lex "..\\listing.dir")) + + +(test-equal "Propper H-string" + (list (lexeme type: 'preprocessing-token body: '(header-name (h-string "a")))) + (lex "")) + +(test-equal "Unexpected h-string" + (list (lexeme type: 'preprocessing-token body: '(pp-number "1")) + (lexeme type: 'whitespace body: " ") + (lexeme type: 'preprocessing-token body: '(header-name (h-string " 2 "))) + (lexeme type: 'whitespace body: " ") + (lexeme type: 'preprocessing-token body: '(pp-number "3"))) + (lex "1 < 2 > 3")) + +(test-equal "Quotation mark inside h-string" + (list (lexeme type: 'preprocessing-token body: '(header-name (h-string "a\"b")))) + (lex "")) + +(test-equal "Interaction of h-strings and regular strings" + (test-equal "Less than string, not h-string" + (list (lexeme type: 'preprocessing-token body: '(pp-number "1")) + (lexeme type: 'preprocessing-token body: '(string-literal "<")) + (lexeme type: 'preprocessing-token body: '(punctuator ">"))) + (lex "1\"<\">")) + + (test-equal "H-string, not string" + (list (lexeme type: 'preprocessing-token body: '(pp-number "1")) + (lexeme type: 'preprocessing-token body: '(header-name (h-string "\""))) + (lexeme type: 'other body: "\"")) + (lex "1<\">\""))) + +(test-equal "Q-strings are lexed as regular strings" + (list (lexeme type: 'preprocessing-token body: '(punctuator "#")) + (lexeme type: 'preprocessing-token body: '(identifier "include")) + (lexeme type: 'whitespace body: " ") + (lexeme type: 'preprocessing-token body: '(string-literal "test"))) + ;; # include here, since generated tokens could possible depend on that context, + ;; and the reason regular strings are returned is since the lexer doesn't check + ;; that context + (lex "#include \"test\"") + ) + + + +(test-group "Unicode" + (test-equal "In string literals" + (list (lexeme type: 'preprocessing-token body: '(string-literal "åäö"))) + (lex "\"åäö\"")) + + (test-equal "Outside string literals" + (list (lexeme type: 'other body: "å") + (lexeme type: 'other body: "ä") + (lexeme type: 'other body: "ö")) + (lex "åäö"))) diff --git a/tests/test/cpp/preprocessor2.scm b/tests/test/cpp/preprocessor2.scm index 9f308c9e..71e0c1a0 100644 --- a/tests/test/cpp/preprocessor2.scm +++ b/tests/test/cpp/preprocessor2.scm @@ -38,7 +38,7 @@ :use-module (c lex2) ) -;; TODO Redefinition code isn't yet written +;; TODO Redefinition checking code isn't yet written (test-skip "Example 6") ;; See (c preprocessor2) TODO#1 @@ -63,8 +63,11 @@ (define parse-parameter-list (@@ (c preprocessor2) parse-parameter-list)) (define resolve-define (@@ (c preprocessor2) resolve-define)) (define resolve-token-stream (@@ (c preprocessor2) resolve-token-stream)) -(define tokenize (@@ (c preprocessor2) tokenize)) - +;; (define tokenize (@@ (c preprocessor2) tokenize)) +(define resolve-h-file (@@ (c preprocessor2) resolve-h-file)) +(define resolve-q-file (@@ (c preprocessor2) resolve-q-file)) +(define resolve-header (@@ (c preprocessor2) resolve-header)) +;; (define include-header (@@ (c preprocessor2) include-header)) ;; Remove the noexpand list from each token. @@ -78,6 +81,16 @@ (let ((env tokens (handle-preprocessing-tokens env (tokenize str)))) (drop-whitespace-both (remove-noexpand tokens)))) + (define (call-with-tmp-header string proc) + (let* ((filename (string-copy "/tmp/headerfile-XXXXXXX")) + (port (mkstemp! filename))) + (with-output-to-port port + (lambda () (display string) + )) + (close-port port) + (proc filename))) + + (test-group "Tokens until End Of Line" (call-with-values @@ -578,6 +591,7 @@ + (test-group "Line directive" (let ((e (make-environment))) (test-equal "#line " @@ -595,10 +609,60 @@ (lex "x")))))) -;; TODO -;; resolve-h-file -;; resolve-q-file -;; include +;; NOTE these tests assume a "regular" Unix system +(test-group "#include" + (test-group "Resolve header paths" + (test-equal "Find in path" + "/usr/include/stdio.h" + (resolve-h-file "stdio.h")) + + (test-error "Fail if not in path" + 'cpp-error + (resolve-h-file "This file doesn't exist")) + + (test-equal "Q-string with absolute path" + "/dev/null" + (resolve-q-file "/dev/null")) + (test-error "Q-File fails for missing file" + 'cpp-error (resolve-q-file "This file doesn't exists")) + + (test-equal "Q-strings also look in path" + "/usr/include/stdio.h" + (resolve-q-file "stdio.h"))) + + (test-group "resolve-header returns paths from pp tokens (from #include directive)" + (test-equal "H-string" + "/usr/include/stdio.h" + (resolve-header (make-environment) + (lex ""))) + (test-equal "Q-string" + "/usr/include/stdio.h" + (resolve-header (make-environment) + (lex "\"stdio.h\"")))) + + ;; TODO #include is subject to macro expansion + ;; test with through resolve-header + + + (test-group "Actually including stuff" + (call-with-tmp-header " +#define X 10 +int x; +" (lambda (filename) + (test-equal "Include through #include" + (lex "int x;\n\n10") + (run (format #f " +#include \"~a\" +X +" filename)))))) + + ;; NOTE should really be below "regular" __LINE__ tests + (call-with-tmp-header "__LINE__" (lambda (path) + (test-equal "__LINE__ in other file" + (lex "1") + (run (format #f "#include \"~a\"\n" path)))))) + + @@ -941,7 +1005,7 @@ g(x+(3,4)-w) | h 5) & m p() i[q()] = { q(1), r(2,3), r(4,), r(,5), r(,) }; char c[2][6] = { str(hello), str() };")))) - ;; TODO Example 4 skipped due to #include + ;; TODO Example 4 skipped due to #include in output (test-equal "Example 5" (unlex-aggressive (lex "int j[] = { 123, 45, 67, 89, 10, 11, 12, };")) -- cgit v1.2.3