Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
S
sv2v
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lvzhengyang
sv2v
Commits
cf4c2a54
Commit
cf4c2a54
authored
Jan 14, 2020
by
Zachary Snow
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
expanded support for macros in preprocessor directives
parent
6f0fa58a
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
56 additions
and
24 deletions
+56
-24
src/Language/SystemVerilog/Parser/Lex.x
+48
-23
test/lex/line.sv
+5
-0
test/lex/line.v
+2
-1
test/lex/line.vh
+1
-0
No files found.
src/Language/SystemVerilog/Parser/Lex.x
View file @
cf4c2a54
...
...
@@ -32,6 +32,7 @@ module Language.SystemVerilog.Parser.Lex
import System.FilePath (dropFileName)
import System.Directory (findFile)
import System.IO.Unsafe (unsafePerformIO)
import Text.Read (readMaybe)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Data.List (span, elemIndex, dropWhileEnd)
...
...
@@ -732,21 +733,31 @@ dropWhitespace = do
else return()
[] -> return ()
-- removes and returns a quoted string such as <foo.bar> or "foo.bar"
takeQuotedString :: Alex String
takeQuotedString = do
dropSpaces
ch <- takeChar
end <-
case ch of
'"' -> return '"'
'<' -> return '>'
_ -> lexicalError $ "bad beginning of include arg: " ++ (show ch)
rest <- takeThrough end
let res = ch : rest
if end == '>'
then lexicalError $ "library includes are not supported: " ++ res
else return res
-- lex the remainder of the current line into tokens and return them, rather
-- than storing them in the lexer state
tokenizeLine :: Alex [Token]
tokenizeLine = do
-- read in the rest of the current line
str <- takeUntilNewline
dropWhitespace
-- save the current lexer state
currInput <- alexGetInput
currFile <- getCurrentFile
currToks <- gets lsToks
-- parse the line into tokens (which includes macro processing)
modify $ \s -> s { lsToks = [] }
let newInput = (alexStartPos, ' ', [], str)
alexSetInput newInput
alexMonadScan
toks <- gets lsToks
-- return to the previous state
alexSetInput currInput
setCurrentFile currFile
modify $ \s -> s { lsToks = currToks }
-- remove macro boundary tokens and put the tokens in order
let isntMacroBoundary = \(Token t _ _ ) -> t /= MacroBoundary
let toks' = filter isntMacroBoundary toks
return $ reverse toks'
-- removes and returns a decimal number
takeNumber :: Alex Int
...
...
@@ -772,9 +783,9 @@ takeNumber = do
peekChar :: Alex Char
peekChar = do
(_, _, _, str) <- alexGetInput
return $
if null str
then
'\n'
else head str
if null str
then
lexicalError "unexpected end of input"
else
return $
head str
takeMacroDefinition :: Alex (String, [(String, Maybe String)])
takeMacroDefinition = do
...
...
@@ -856,6 +867,7 @@ findUnescapedQuote ('`' : '\\' : '`' : '"' : rest) = ('\\' : '"' : start, end)
findUnescapedQuote ('\\' : '"' : rest) = ('\\' : '"' : start, end)
where (start, end) = findUnescapedQuote rest
findUnescapedQuote ('"' : rest) = ("\"", rest)
findUnescapedQuote ('`' : '"' : rest) = ("\"", rest)
findUnescapedQuote (ch : rest) = (ch : start, end)
where (start, end) = findUnescapedQuote rest
...
...
@@ -939,7 +951,10 @@ handleDirective (posOrig, _, _, strOrig) len = do
"resetall" -> passThrough
"begin_keywords" -> do
quotedSpec <- takeQuotedString
toks <- tokenizeLine
quotedSpec <- case toks of
[Token Lit_string str _] -> return str
_ -> lexicalError $ "unexpected tokens following `begin_keywords: " ++ show toks
let spec = tail $ init quotedSpec
case Map.lookup spec specMap of
Nothing ->
...
...
@@ -973,9 +988,16 @@ handleDirective (posOrig, _, _, strOrig) len = do
alexMonadScan
"line" -> do
lineNumber <- takeNumber
quotedFilename <- takeQuotedString
levelNumber <- takeNumber -- level, ignored
toks <- tokenizeLine
(lineNumber, quotedFilename, levelNumber) <-
case toks of
[ Token Lit_number lineStr _,
Token Lit_string filename _,
Token Lit_number levelStr _] -> do
let Just line = readMaybe lineStr :: Maybe Int
let Just level = readMaybe levelStr :: Maybe Int
return (line, filename, level)
_ -> lexicalError $ "unexpected tokens following `begin_keywords: " ++ show toks
let filename = init $ tail quotedFilename
setCurrentFile filename
(AlexPn f _ c, prev, _, str) <- alexGetInput
...
...
@@ -985,7 +1007,10 @@ handleDirective (posOrig, _, _, strOrig) len = do
else lexicalError "line directive invalid level number"
"include" -> do
quotedFilename <- takeQuotedString
toks <- tokenizeLine
quotedFilename <- case toks of
[Token Lit_string str _] -> return str
_ -> lexicalError $ "unexpected tokens following `include: " ++ show toks
inputFollow <- alexGetInput
fileFollow <- getCurrentFile
-- process the included file
...
...
test/lex/line.sv
View file @
cf4c2a54
...
...
@@ -3,5 +3,10 @@ module top;
$
display
(
`__FILE__
,
`__LINE__
)
;
`line
101
"fake.v"
1
$
display
(
`__FILE__
,
`__LINE__
)
;
`define
foo
(
filename
)
`"
filename
.
vh
`"
`include
`foo
(
line
)
`define
new_line_num 200
`line
`new_line_num
`foo
(
line
)
1
$
display
(
`__FILE__
,
`__LINE__
)
;
end
endmodule
test/lex/line.v
View file @
cf4c2a54
module
top
;
initial
begin
$
display
(
"line.sv"
,
`__LINE__
)
;
;
$
display
(
"fake.v"
,
102
)
;
$
display
(
"via include: "
,
"./line.vh"
,
1
)
;
$
display
(
"line.vh"
,
201
)
;
end
endmodule
test/lex/line.vh
0 → 100644
View file @
cf4c2a54
$display("via include: ", `__FILE__, `__LINE__);
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment