Sped up compiling PBS files by about 5x, fixed bug from earlier commit about writing PBS lines with many optional values

This commit is contained in:
Maruno17
2023-04-05 21:41:55 +01:00
parent cb4a1fd8af
commit 3d9d31621b
4 changed files with 224 additions and 60 deletions

View File

@@ -967,9 +967,9 @@ def pbPokemonEditor
if param_type.nil?
evo[2] = nil
elsif param_type == Integer
evo[2] = Compiler.csvPosInt!(evo[2])
evo[2] = Compiler.cast_csv_value(evo[2], "u")
elsif param_type != String
evo[2] = Compiler.csvEnumField!(evo[2], param_type, "Evolutions", species_hash[:id])
evo[2] = Compiler.cast_csv_value(evo[2], "e", param_type)
end
end
# Add species' data to records

View File

@@ -248,9 +248,55 @@ module Compiler
end
end
#=============================================================================
# Splits a string containing comma-separated values into an array of those
# values.
#=============================================================================
def split_csv_line(string)
# Split the string into an array of values, using a comma as the separator
values = string.split(",")
# Check for quote marks in each value, as we may need to recombine some values
# to make proper results
(0...values.length).each do |i|
value = values[i]
next if !value || value.empty?
quote_count = value.count('"') #scan(/(?:^|\G|[^\\])(\\)*"/).length
if !quote_count.zero?
# Quote marks found in value
(i...(values.length - 1)).each do |j|
quote_count = values[i].count('"')
if quote_count == 2 && value.start_with?('\\"') && values[i].end_with?('\\"')
# Two quote marks around the whole value; remove them
values[i] = values[i][2..-3]
break
elsif quote_count.even?
break
end
# Odd number of quote marks in value; concatenate the next value to it and
# see if that's any better
values[i] += "," + values[j + 1]
values[j + 1] = nil
end
# Recheck for enclosing quote marks to remove
if quote_count != 2
if value.count('"') == 2 && value.start_with?('\\"') && value.end_with?('\\"')
values[i] = values[i][2..-3]
end
end
end
# Remove leading and trailing whitespace from value
values[i].strip!
end
# Remove nil values caused by concatenating values above
values.compact!
return values
end
#=============================================================================
# Convert a string to certain kinds of values
#=============================================================================
# Unused
# NOTE: This method is about 10 times slower than split_csv_line.
def csvfield!(str)
ret = ""
str.sub!(/^\s*/, "")
@@ -286,16 +332,15 @@ module Compiler
return ret
end
# Unused
def csvBoolean!(str, _line = -1)
field = csvfield!(str)
if field[/^1|[Tt][Rr][Uu][Ee]|[Yy][Ee][Ss]|[Yy]$/]
return true
elsif field[/^0|[Ff][Aa][Ll][Ss][Ee]|[Nn][Oo]|[Nn]$/]
return false
end
return true if field[/^(?:1|TRUE|YES|Y)$/i]
return false if field[/^(?:0|FALSE|NO|N)$/i]
raise _INTL("Field {1} is not a Boolean value (true, false, 1, 0)\r\n{2}", field, FileLineData.linereport)
end
# Unused
def csvInt!(str, _line = -1)
ret = csvfield!(str)
if !ret[/^\-?\d+$/]
@@ -304,6 +349,7 @@ module Compiler
return ret.to_i
end
# Unused
def csvPosInt!(str, _line = -1)
ret = csvfield!(str)
if !ret[/^\d+$/]
@@ -312,22 +358,83 @@ module Compiler
return ret.to_i
end
# Unused
def csvFloat!(str, _line = -1)
ret = csvfield!(str)
return Float(ret) rescue raise _INTL("Field {1} is not a number\r\n{2}", ret, FileLineData.linereport)
end
# Unused
def csvEnumField!(value, enumer, _key, _section)
ret = csvfield!(value)
return checkEnumField(ret, enumer)
end
# Unused
def csvEnumFieldOrInt!(value, enumer, _key, _section)
ret = csvfield!(value)
return ret.to_i if ret[/\-?\d+/]
return checkEnumField(ret, enumer)
end
# Turns a value (a string) into another data type as determined by the given
# schema.
# @param value [String]
# @param schema [String]
def cast_csv_value(value, schema, enumer = nil)
case schema.downcase
when "i" # Integer
if !value[/^\-?\d+$/]
raise _INTL("Field {1} is not an integer\r\n{2}", value, FileLineData.linereport)
end
return value.to_i
when "u" # Positive integer or zero
if !value[/^\d+$/]
raise _INTL("Field {1} is not a positive integer or 0\r\n{2}", value, FileLineData.linereport)
end
return value.to_i
when "v" # Positive integer
if !value[/^\d+$/]
raise _INTL("Field {1} is not a positive integer\r\n{2}", value, FileLineData.linereport)
end
if value.to_i == 0
raise _INTL("Field '{1}' must be greater than 0\r\n{2}", value, FileLineData.linereport)
end
return value.to_i
when "x" # Hexadecimal number
if !value[/^[A-F0-9]+$/i]
raise _INTL("Field '{1}' is not a hexadecimal number\r\n{2}", value, FileLineData.linereport)
end
return value.hex
when "f" # Floating point number
if !value[/^\-?^\d*\.?\d*$/]
raise _INTL("Field {1} is not a number\r\n{2}", value, FileLineData.linereport)
end
return value.to_f
when "b" # Boolean
return true if value[/^(?:1|TRUE|YES|Y)$/i]
return false if value[/^(?:0|FALSE|NO|N)$/i]
raise _INTL("Field {1} is not a Boolean value (true, false, 1, 0)\r\n{2}", value, FileLineData.linereport)
when "n" # Name
if !value[/^(?![0-9])\w+$/]
raise _INTL("Field '{1}' must contain only letters, digits, and\r\nunderscores and can't begin with a number.\r\n{2}", value, FileLineData.linereport)
end
when "s" # String
when "q" # Unformatted text
when "m" # Symbol
if !value[/^(?![0-9])\w+$/]
raise _INTL("Field '{1}' must contain only letters, digits, and\r\nunderscores and can't begin with a number.\r\n{2}", value, FileLineData.linereport)
end
return value.to_sym
when "e" # Enumerable
return checkEnumField(value, enumer)
when "y" # Enumerable or integer
return value.to_i if value[/^\-?\d+$/]
return checkEnumField(value, enumer)
end
return value
end
def checkEnumField(ret, enumer)
case enumer
when Module
@@ -376,6 +483,7 @@ module Compiler
raise _INTL("Enumeration not defined\r\n{1}", FileLineData.linereport)
end
# Unused
def checkEnumFieldOrNil(ret, enumer)
case enumer
when Module
@@ -403,7 +511,10 @@ module Compiler
#=============================================================================
# Convert a string to values using a schema
#=============================================================================
# Unused
# @deprecated This method is slated to be removed in v22.
def pbGetCsvRecord(rec, lineno, schema)
Deprecation.warn_method("pbGetCsvRecord", "v22", "get_csv_record")
record = []
repeat = false
schema_length = schema[1].length
@@ -573,6 +684,58 @@ module Compiler
return (!repeat && schema_length == 1) ? record[0] : record
end
#=============================================================================
# Convert a string to values using a schema
#=============================================================================
def get_csv_record(rec, schema)
ret = []
repeat = false
start = 0
schema_length = schema[1].length
case schema[1][0, 1] # First character in schema
when "*"
repeat = true
start = 1
when "^"
start = 1
schema_length -= 1
end
subarrays = repeat && schema[1].length - start > 1 # Whether ret is an array of arrays
# Split the string on commas into an array of values to apply the schema to
values = split_csv_line(rec)
# Apply the schema to each value in the line
idx = -1 # Index of value to look at in values
loop do
record = []
(start...schema[1].length).each do |i|
idx += 1
sche = schema[1][i, 1]
if sche[/[A-Z]/] # Upper case = optional
if nil_or_empty?(values[idx])
record.push(nil)
next
end
end
if sche.downcase == "q" # Unformatted text
record.push(rec)
idx = values.length
break
else
record.push(cast_csv_value(values[idx], sche, schema[2 + i - start]))
end
end
if !record.empty?
if subarrays
ret.push(record)
else
ret.concat(record)
end
end
break if !repeat || idx >= values.length - 1
end
return (!repeat && schema_length == 1) ? ret[0] : ret
end
#=============================================================================
# Write values to a file using a schema
#=============================================================================
@@ -584,9 +747,16 @@ module Compiler
(start...schema[1].length).each do |i|
index += 1
value = rec[index]
if schema[1][i, 1].upcase != schema[1][i, 1] || !value.nil?
file.write(",") if index > 0
if schema[1][i, 1][/[A-Z]/] # Optional
# Check the rest of the values for non-nil things
later_value_found = false
(index...rec.length).each do |j|
later_value_found = true if !rec[j].nil?
break if later_value_found
end
break if !later_value_found
end
file.write(",") if index > 0
if value.nil?
# do nothing
elsif value.is_a?(String)

View File

@@ -31,7 +31,7 @@ module Compiler
schema.each_key do |key|
FileLineData.setSection(section_name, key, contents[key]) # For error reporting
if key == "SectionName"
data_hash[schema[key][0]] = pbGetCsvRecord(section_name, key, schema[key])
data_hash[schema[key][0]] = get_csv_record(section_name, schema[key])
next
end
# Skip empty properties
@@ -39,14 +39,14 @@ module Compiler
# Compile value for key
if schema[key][1][0] == "^"
contents[key].each do |val|
value = pbGetCsvRecord(val, key, schema[key])
value = get_csv_record(val, schema[key])
value = nil if value.is_a?(Array) && value.empty?
data_hash[schema[key][0]] ||= []
data_hash[schema[key][0]].push(value)
end
data_hash[schema[key][0]].compact!
else
value = pbGetCsvRecord(contents[key], key, schema[key])
value = get_csv_record(contents[key], schema[key])
value = nil if value.is_a?(Array) && value.empty?
data_hash[schema[key][0]] = value
end
@@ -102,28 +102,22 @@ module Compiler
# Compile map connections
#=============================================================================
def compile_connections(*paths)
records = []
paths.each do |path|
compile_pbs_file_message_start(path)
pbCompilerEachPreppedLine(path) do |line, lineno|
hashenum = {
"N" => "N", "North" => "N",
"E" => "E", "East" => "E",
"S" => "S", "South" => "S",
"W" => "W", "West" => "W"
}
record = []
thisline = line.dup
record.push(csvInt!(thisline, lineno))
record.push(csvEnumFieldOrInt!(thisline, hashenum, "", sprintf("(line %d)", lineno)))
record.push(csvInt!(thisline, lineno))
record.push(csvInt!(thisline, lineno))
record.push(csvEnumFieldOrInt!(thisline, hashenum, "", sprintf("(line %d)", lineno)))
record.push(csvInt!(thisline, lineno))
schema = [nil, "iyiiyi", nil, hashenum, nil, nil, hashenum]
records = []
paths.each do |path|
compile_pbs_file_message_start(path)
pbCompilerEachPreppedLine(path) do |line, lineno|
FileLineData.setLine(line, lineno)
record = get_csv_record(line, schema)
if !pbRgssExists?(sprintf("Data/Map%03d.rxdata", record[0]))
print _INTL("Warning: Map {1}, as mentioned in the map connection data, was not found.\r\n{2}", record[0], FileLineData.linereport)
end
if !pbRgssExists?(sprintf("Data/Map%03d.rxdata", record[3]))
elsif !pbRgssExists?(sprintf("Data/Map%03d.rxdata", record[3]))
print _INTL("Warning: Map {1}, as mentioned in the map connection data, was not found.\r\n{2}", record[3], FileLineData.linereport)
end
case record[1]
@@ -333,21 +327,21 @@ module Compiler
FileLineData.setSection(species.id.to_s, "Offspring", nil) # For error reporting
offspring = species.offspring
offspring.each_with_index do |sp, i|
offspring[i] = csvEnumField!(sp, :Species, "Offspring", species.id)
offspring[i] = cast_csv_value(sp, "e", :Species)
end
end
# Enumerate all evolution species and parameters (this couldn't be done earlier)
GameData::Species.each do |species|
FileLineData.setSection(species.id.to_s, "Evolutions", nil) # For error reporting
species.evolutions.each do |evo|
evo[0] = csvEnumField!(evo[0], :Species, "Evolutions", species.id)
evo[0] = cast_csv_value(evo[0], "e", :Species)
param_type = GameData::Evolution.get(evo[1]).parameter
if param_type.nil?
evo[2] = nil
elsif param_type == Integer
evo[2] = csvPosInt!(evo[2])
evo[2] = cast_csv_value(evo[2], "u")
elsif param_type != String
evo[2] = csvEnumField!(evo[2], param_type, "Evolutions", species.id)
evo[2] = cast_csv_value(evo[2], "e", param_type)
end
end
end
@@ -407,7 +401,7 @@ module Compiler
schema.each_key do |key|
FileLineData.setSection(section_name, key, contents[key]) # For error reporting
if key == "SectionName"
data_hash[schema[key][0]] = pbGetCsvRecord(section_name, key, schema[key])
data_hash[schema[key][0]] = get_csv_record(section_name, schema[key])
next
end
# Skip empty properties
@@ -415,14 +409,14 @@ module Compiler
# Compile value for key
if schema[key][1][0] == "^"
contents[key].each do |val|
value = pbGetCsvRecord(val, key, schema[key])
value = get_csv_record(val, schema[key])
value = nil if value.is_a?(Array) && value.empty?
data_hash[schema[key][0]] ||= []
data_hash[schema[key][0]].push(value)
end
data_hash[schema[key][0]].compact!
else
value = pbGetCsvRecord(contents[key], key, schema[key])
value = get_csv_record(contents[key], schema[key])
value = nil if value.is_a?(Array) && value.empty?
data_hash[schema[key][0]] = value
end
@@ -655,7 +649,7 @@ module Compiler
raise _INTL("Expected a species entry line for encounter type {1} for map '{2}', got \"{3}\" instead.\r\n{4}",
GameData::EncounterType.get(current_type).real_name, encounter_hash[:map], line, FileLineData.linereport)
end
values = pbGetCsvRecord(line, line_no, [0, "vevV", nil, :Species])
values = get_csv_record(line, [nil, "vevV", nil, :Species])
values[3] = values[2] if !values[3]
if values[2] > max_level
raise _INTL("Level number {1} is not valid (max. {2}).\r\n{3}", values[2], max_level, FileLineData.linereport)
@@ -800,7 +794,7 @@ module Compiler
data_hash = {
:pbs_file_suffix => file_suffix
}
data_hash[schema["SectionName"][0]] = pbGetCsvRecord(section_name.clone, line_no, schema["SectionName"])
data_hash[schema["SectionName"][0]] = get_csv_record(section_name.clone, schema["SectionName"])
data_hash[schema["Pokemon"][0]] = []
current_pkmn = nil
elsif line[/^\s*(\w+)\s*=\s*(.*)$/]
@@ -810,7 +804,7 @@ module Compiler
end
key = $~[1]
if schema[key] # Property of the trainer
property_value = pbGetCsvRecord($~[2], line_no, schema[key])
property_value = get_csv_record($~[2], schema[key])
if key == "Pokemon"
current_pkmn = {
:species => property_value[0],
@@ -824,7 +818,7 @@ module Compiler
if !current_pkmn
raise _INTL("Pokémon hasn't been defined yet!\r\n{1}", FileLineData.linereport)
end
current_pkmn[sub_schema[key][0]] = pbGetCsvRecord($~[2], line_no, sub_schema[key])
current_pkmn[sub_schema[key][0]] = get_csv_record($~[2], sub_schema[key])
end
end
end
@@ -967,7 +961,7 @@ module Compiler
schema = btTrainersRequiredTypes[key]
next if key == "Challenges" && name == "DefaultTrainerList"
next if !schema
record = pbGetCsvRecord(section[key], 0, schema)
record = get_csv_record(section[key], schema)
rsection[schema[0]] = record
end
if !rsection[0]
@@ -1028,7 +1022,7 @@ module Compiler
FileLineData.setSection(name, key, section[key])
schema = requiredtypes[key]
next if !schema
record = pbGetCsvRecord(section[key], 0, schema)
record = get_csv_record(section[key], schema)
rsection[schema[0]] = record
end
trainernames.push(rsection[1])
@@ -1079,7 +1073,7 @@ module Compiler
schema.each_key do |key|
FileLineData.setSection(section_name, key, contents[key]) # For error reporting
if key == "SectionName"
data_hash[schema[key][0]] = pbGetCsvRecord(section_name, key, schema[key])
data_hash[schema[key][0]] = get_csv_record(section_name, schema[key])
next
end
# Skip empty properties
@@ -1087,14 +1081,14 @@ module Compiler
# Compile value for key
if schema[key][1][0] == "^"
contents[key].each do |val|
value = pbGetCsvRecord(val, key, schema[key])
value = get_csv_record(val, schema[key])
value = nil if value.is_a?(Array) && value.empty?
data_hash[schema[key][0]] ||= []
data_hash[schema[key][0]].push(value)
end
data_hash[schema[key][0]].compact!
else
value = pbGetCsvRecord(contents[key], key, schema[key])
value = get_csv_record(contents[key], schema[key])
value = nil if value.is_a?(Array) && value.empty?
data_hash[schema[key][0]] = value
end
@@ -1230,7 +1224,7 @@ module Compiler
hash[:id] = "default"
hash[:trainer_type] = hash[:id]
else
line_data = pbGetCsvRecord(hash[:id], -1, [0, "esU", :TrainerType])
line_data = get_csv_record(hash[:id], [nil, "esU", :TrainerType])
hash[:trainer_type] = line_data[0]
hash[:real_name] = line_data[1]
hash[:version] = line_data[2] || 0

View File

@@ -4,33 +4,33 @@
Name = Essen
Filename = mapRegion0.png
Point = 13,12,Lappet Town,Oak's Lab,2,8,8
Point = 13,11,Route 1,
Point = 13,11,Route 1
Point = 13,10,Cedolan City,Cedolan Dept. Store,7,47,11
Point = 14,10,Cedolan City,,7,47,11
Point = 14,9,Route 2,
Point = 14,8,Route 2,
Point = 14,9,Route 2
Point = 14,8,Route 2
Point = 15,8,Lerucean Town,,23,11,15
Point = 16,8,Natural Park,
Point = 15,7,Route 3,
Point = 16,8,Natural Park
Point = 15,7,Route 3
Point = 15,6,Route 3,Ice Cave
Point = 14,6,Route 3,
Point = 14,6,Route 3
Point = 13,6,Ingido Plateau,,35,17,7
Point = 12,6,Route 4,
Point = 11,6,Route 4,
Point = 12,6,Route 4
Point = 11,6,Route 4
Point = 11,7,Route 5,Cycle Road
Point = 11,8,Route 5,Cycle Road
Point = 11,9,Route 5,Cycle Road
Point = 11,10,Route 6,
Point = 12,10,Route 6,
Point = 15,10,Route 7,
Point = 11,10,Route 6
Point = 12,10,Route 6
Point = 15,10,Route 7
Point = 16,10,Route 7,Rock Cave
Point = 17,10,Battle Frontier,,52,17,14
Point = 12,12,Safari Zone,
Point = 12,12,Safari Zone
Point = 13,13,Route 8,Diving area
Point = 18,17,Berth Island,,51
Point = 22,16,Faraday Island,,52
Point = 18,17,Berth Island,,,,,51
Point = 22,16,Faraday Island,,,,,52
#-------------------------------
[1]
Name = Tiall
Filename = mapRegion1.png
Point = 13,16,Here,
Point = 13,16,Here