@@ -17,6 +17,7 @@ _end_ | |||
require_relative "corinth/io.rb" | |||
require_relative "corinth/token.rb" | |||
require_relative "corinth/tokenstream.rb" | |||
require_relative "corinth/parsestate.rb" | |||
def common_main | |||
begin | |||
@@ -61,12 +61,14 @@ class PosReader | |||
end | |||
end | |||
def read_until_from text, s = String.new | |||
def read_until_from text, s = "" | |||
s = +s | |||
until yield c = text.next do s << c end | |||
-s | |||
end | |||
def read_while_from text, s = String.new | |||
def read_while_from text, s = "" | |||
s = +s | |||
while yield text.peek do s << text.next end | |||
-s | |||
end | |||
@@ -0,0 +1,45 @@ | |||
#!/usr/bin/env ruby | |||
# frozen_string_literal: true | |||
## ---------------------------------------------------------------------------| | |||
## | |||
## Distributed under the CC0 public domain license. | |||
## By Alison G. Watson. Attribution is encouraged, though not required. | |||
## See licenses/cc0.txt for more information. | |||
## | |||
## ---------------------------------------------------------------------------| | |||
## | |||
## Parser state. | |||
## | |||
## ---------------------------------------------------------------------------| | |||
class ParseState | |||
attr_reader :out_file, :base_dir | |||
attr_accessor :tks | |||
def initialize filename | |||
@tks = tokenize filename | |||
@out_file = String.new | |||
@base_dir = File.dirname filename | |||
@out = [] | |||
end | |||
def with tks | |||
new = self.clone | |||
new.tks = tks | |||
new | |||
end | |||
def write data | |||
@out.append data | |||
end | |||
def each_out &block | |||
if block_given? | |||
@out.each &block | |||
else | |||
to_enum :each_out | |||
end | |||
end | |||
end | |||
## EOF |
@@ -12,6 +12,12 @@ | |||
## | |||
## ---------------------------------------------------------------------------| | |||
class Array | |||
def tk_to_s | |||
self.map do |sy| sy.tk_to_s end.join ", " | |||
end | |||
end | |||
class Symbol | |||
def tk_to_s | |||
case self | |||
@@ -34,6 +40,11 @@ class Symbol | |||
else "unknown token" | |||
end | |||
end | |||
def tk_is? other | |||
other = [other] if other.is_a? Symbol | |||
other.include? self | |||
end | |||
end | |||
class Token | |||
@@ -64,10 +75,8 @@ class Token | |||
end | |||
def expect_in nx | |||
nx = [nx] if nx.is_a? Symbol | |||
unless nx.include? @type | |||
nx = nx.map do |sy| sy.tk_to_s end.join ", " | |||
raise "#{@pos}: #{yield nx}" | |||
unless @type.tk_is? nx | |||
raise "#{@pos}: #{yield nx.tk_to_s}" | |||
end | |||
self | |||
end | |||
@@ -95,13 +104,10 @@ def tokenize_from read | |||
tok_s = lambda do |p, type, text| tokens.push Token.new p, type, -text end | |||
tok_1 = lambda do |p, type| tokens.push Token.new p, type end | |||
tok_2 = lambda do |p, nx, type_a, type_b| | |||
c = data.peek | |||
if c == nx then data.next | |||
tok_1.(p, type_a) | |||
else | |||
tok_1.(p, type_b) | |||
if c == nx then data.next; tok_1.(p, type_a) | |||
else tok_1.(p, type_b) | |||
end | |||
end | |||
@@ -125,13 +131,13 @@ def tokenize_from read | |||
s = read_until_from data do |c| c == '"' end | |||
tok_s.(pos, :string, s) | |||
when Token::IDENS | |||
c = "" if c == "\\" | |||
c = data.next if c == "\\" | |||
pos = read.pos | |||
s = read_while_from data, +c do |c| c =~ Token::IDENC end | |||
s = read_while_from data, c do |c| c =~ Token::IDENC end | |||
tok_s.(pos, :identi, s) | |||
when Token::NUMRS | |||
pos = read.pos | |||
s = read_while_from data, +c do |c| c =~ Token::NUMRC end | |||
s = read_while_from data, c do |c| c =~ Token::NUMRC end | |||
tok_s.(pos, :number, s) | |||
else | |||
raise "#{read.pos}: invalid character `#{c}'" | |||
@@ -35,7 +35,7 @@ class TokenStream | |||
end | |||
def peek_or type, default = nil | |||
if self.peek.type == type | |||
if self.peek.type.tk_is? type | |||
tok = self.next | |||
if block_given? | |||
yield tok | |||
@@ -48,7 +48,7 @@ class TokenStream | |||
end | |||
def drop type | |||
if self.peek.type == type | |||
if self.peek.type.tk_is? type | |||
self.next | |||
else | |||
nil | |||
@@ -59,6 +59,17 @@ class TokenStream | |||
drop(type) != nil | |||
end | |||
def while_is type | |||
if block_given? | |||
loop do | |||
break unless self.peek.type.tk_is? type | |||
yield self.next | |||
end | |||
else | |||
to_enum :while_is, type | |||
end | |||
end | |||
def while_drop type | |||
loop do | |||
yield | |||
@@ -178,21 +178,21 @@ ninja | |||
end | |||
end | |||
TextEnt = Struct.new :i | |||
TEXT = [ | |||
TextEnt.new("Text.txt"), | |||
TxtcEnt = Struct.new :i | |||
TXTC = [ | |||
TxtcEnt.new("Text.txt"), | |||
] | |||
def proc_text ctx | |||
def proc_txtc ctx | |||
ctx.fp << <<ninja | |||
rule text | |||
command = $#{TOOLS}/compilefs.rb $in | |||
description = CompileFS | |||
rule txtc | |||
command = $#{TOOLS}/txtc.rb $in | |||
description = TxtC | |||
ninja | |||
each_fake_dep ctx, TEXT, "text_" do |ent| | |||
each_fake_dep ctx, TXTC, "txtc_" do |ent| | |||
i = txt ent.i | |||
o = i + "_" | |||
ctx.fp << "build #{o}: text #{i} | $#{TOOLS}/compilefs.rb\n" | |||
ctx.fp << "build #{o}: txtc #{i} | $#{TOOLS}/txtc.rb\n" | |||
o | |||
end | |||
end | |||
@@ -207,11 +207,12 @@ def proc_hsfs ctx | |||
rule hsfs | |||
command = $#{TOOLS}/hashfs.rb $out $in $#{DIR} | |||
description = HashFS | |||
build _fake_: phony | |||
ninja | |||
each_fake_dep ctx, HSFS, "hsfs_" do |ent| | |||
ctx.fp << <<ninja | |||
build #{ent.o}: hsfs | $#{TOOLS}/hashfs.rb | |||
build #{ent.o}: hsfs | _fake_ $#{TOOLS}/hashfs.rb | |||
#{DIR} = #{ent.p} #{ent.d} | |||
ninja | |||
@@ -219,21 +220,21 @@ ninja | |||
end | |||
end | |||
SndsEnt = Struct.new :i | |||
SNDS = [ | |||
SndsEnt.new("Sounds.txt"), | |||
SndcEnt = Struct.new :i | |||
SNDC = [ | |||
SndcEnt.new("Sounds.txt"), | |||
] | |||
def proc_snds ctx | |||
def proc_sndc ctx | |||
ctx.fp << <<ninja | |||
rule snds | |||
command = $#{TOOLS}/compilesnd.rb $in | |||
description = CompileSnd | |||
rule sndc | |||
command = $#{TOOLS}/sndc.rb $in | |||
description = SndC | |||
ninja | |||
each_fake_dep ctx, SNDS, "snds_" do |ent| | |||
each_fake_dep ctx, SNDC, "sndc_" do |ent| | |||
i = txt ent.i | |||
o = i + "_" | |||
ctx.fp << "build #{o}: snds #{i} | $#{TOOLS}/compilesnd.rb\n" | |||
ctx.fp << "build #{o}: sndc #{i} | $#{TOOLS}/sndc.rb\n" | |||
o | |||
end | |||
end | |||
@@ -486,9 +487,9 @@ proc_wepc ctx | |||
proc_monc ctx | |||
proc_infc ctx | |||
proc_zcpp ctx | |||
proc_text ctx | |||
proc_txtc ctx | |||
proc_hsfs ctx | |||
proc_snds ctx | |||
proc_sndc ctx | |||
proc_libr ctx | |||
proc_srcs ctx | |||
proc_link ctx | |||
@@ -60,17 +60,13 @@ class OutDbl < OutDir | |||
end | |||
end | |||
class ParseState | |||
attr_reader :out_file, :snds_dir, :file_dir, :base_dir, :path | |||
attr_accessor :tks | |||
class SndParseState < ParseState | |||
attr_reader :snds_dir, :file_dir, :path | |||
def initialize base, tks | |||
@tks = tks | |||
@out_file = String.new | |||
def initialize filename | |||
super | |||
@snds_dir = String.new | |||
@file_dir = String.new | |||
@base_dir = base.dup | |||
@out = [] | |||
@ignore = [] | |||
@path = [] | |||
end | |||
@@ -83,16 +79,6 @@ class ParseState | |||
@ignore.include? s | |||
end | |||
def with tks | |||
new = self.clone | |||
new.tks = tks | |||
new | |||
end | |||
def write data | |||
@out.append data | |||
end | |||
def prepend_file_dir s | |||
if @file_dir.empty? | |||
s.dup | |||
@@ -100,14 +86,6 @@ class ParseState | |||
@file_dir + "/" + s | |||
end | |||
end | |||
def each_out &block | |||
if block_given? | |||
@out.each &block | |||
else | |||
to_enum :each_out | |||
end | |||
end | |||
end | |||
def parse_path state, orig | |||
@@ -305,20 +283,16 @@ def parse_statement state | |||
end | |||
def parse state | |||
loop do | |||
parse_statement state | |||
end | |||
loop do parse_statement state end | |||
end | |||
common_main do | |||
for filename in ARGV | |||
base_dir = File.dirname filename | |||
state = ParseState.new base_dir, tokenize(filename) | |||
state = SndParseState.new filename | |||
parse state | |||
fp = open state.out_file, "wt" | |||
fp.puts generated_header "compilesnd" | |||
fp.puts generated_header "sndc" | |||
for out in state.each_out | |||
if out.is_a? OutDef |
@@ -14,9 +14,9 @@ | |||
require_relative "corinth.rb" | |||
Alias = Struct.new :name, :text | |||
Language = Struct.new :name, :data | |||
ParseState = Struct.new :cwd, :out, :langs | |||
Alias = Struct.new :name, :text | |||
Language = Struct.new :name, :data | |||
TxtParseState = Struct.new :cwd, :out, :langs | |||
def escape text | |||
text.gsub(/((?<m>\\)(?!c))|(?<m>")/, "\\\\\\k<m>").gsub("\n", "\\n") | |||
@@ -130,12 +130,12 @@ end | |||
for filename in ARGV | |||
filename = split_name filename | |||
state = ParseState.new [], nil, {} | |||
state = TxtParseState.new [], nil, {} | |||
parse_file state, filename, nil | |||
out = open state.out, "w" | |||
out.puts generated_header "compilefs" | |||
out.puts generated_header "txtc" | |||
sorted = state.langs.sort_by do |k, v| k end | |||
for lnam, lang in sorted |