mirror of
https://github.com/mwpenny/portal64-still-alive.git
synced 2024-10-19 22:27:36 -04:00
Generate cutscenes and logic from yaml files instead of blend files
This commit is contained in:
parent
6395378285
commit
445ad90eed
|
@ -10,14 +10,6 @@ local entities = require('tools.level_scripts.entities')
|
|||
local signals = require('tools.level_scripts.signals')
|
||||
local animation = require('tools.level_scripts.animation')
|
||||
local dynamic_collision = require('tools.level_scripts.dynamic_collision_export')
|
||||
local yaml_loader = require('tools.level_scripts.yaml_loader')
|
||||
|
||||
local jsonFile = {
|
||||
operators = signals.operator_json,
|
||||
cutscenes = trigger.cutscene_json,
|
||||
}
|
||||
|
||||
yaml_loader.dump_json(jsonFile)
|
||||
|
||||
sk_definition_writer.add_definition("level", "struct LevelDefinition", "_geo", {
|
||||
collisionQuads = sk_definition_writer.reference_to(collision_export.collision_objects, 1),
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
local sk_definition_writer = require('sk_definition_writer')
|
||||
local sk_scene = require('sk_scene')
|
||||
local yaml_loader = require('tools.level_scripts.yaml_loader')
|
||||
local util = require('tools.level_scripts.util')
|
||||
|
||||
local name_to_index = {}
|
||||
local signal_count = 0
|
||||
|
@ -17,74 +19,6 @@ local function signal_index_for_name(name)
|
|||
return result
|
||||
end
|
||||
|
||||
local function determine_signal_order(operators, result, used_signals, signal_producers, operation_index)
|
||||
-- check if the signal has already been added
|
||||
if used_signals[operation_index] then
|
||||
return
|
||||
end
|
||||
|
||||
used_signals[operation_index] = true
|
||||
|
||||
local input_signal = operators[operation_index]
|
||||
|
||||
for _, signal_name in pairs(input_signal.input) do
|
||||
for _, producer_index in pairs(signal_producers[signal_name] or {}) do
|
||||
determine_signal_order(operators, result, used_signals, signal_producers, operation_index)
|
||||
end
|
||||
end
|
||||
|
||||
table.insert(result, input_signal)
|
||||
end
|
||||
|
||||
local function order_signals(operators)
|
||||
local signal_producers = {}
|
||||
|
||||
for index, operation in pairs(operators) do
|
||||
if signal_producers[operation.output] then
|
||||
table.insert(signal_producers[operation.output], index)
|
||||
else
|
||||
signal_producers[operation.output] = {index}
|
||||
end
|
||||
end
|
||||
|
||||
local result = {}
|
||||
local used_signals = {}
|
||||
|
||||
for operation_index = 1,#operators do
|
||||
determine_signal_order(operators, result, used_signals, signal_producers, operation_index)
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
local unordered_operators = {}
|
||||
|
||||
for _, and_operator in pairs(sk_scene.nodes_for_type('@and')) do
|
||||
table.insert(unordered_operators, {
|
||||
type = 'SignalOperatorTypeAnd',
|
||||
output = and_operator.arguments[1],
|
||||
input = {table.unpack(and_operator.arguments, 2)},
|
||||
})
|
||||
end
|
||||
|
||||
for _, or_operator in pairs(sk_scene.nodes_for_type('@or')) do
|
||||
table.insert(unordered_operators, {
|
||||
type = 'SignalOperatorTypeOr',
|
||||
output = or_operator.arguments[1],
|
||||
input = {table.unpack(or_operator.arguments, 2)},
|
||||
})
|
||||
end
|
||||
|
||||
for _, not_operator in pairs(sk_scene.nodes_for_type('@not')) do
|
||||
table.insert(unordered_operators, {
|
||||
type = 'SignalOperatorTypeNot',
|
||||
output = not_operator.arguments[1],
|
||||
input = {not_operator.arguments[2]},
|
||||
})
|
||||
end
|
||||
|
||||
local ordered_operators = order_signals(unordered_operators)
|
||||
|
||||
local function generate_operator_data(operator)
|
||||
return {
|
||||
sk_definition_writer.raw(operator.type),
|
||||
|
@ -102,28 +36,48 @@ local function generate_operator_data(operator)
|
|||
}
|
||||
end
|
||||
|
||||
local function format_operation(operation)
|
||||
local result = operation.output .. ' = '
|
||||
local function parse_operation(str_value)
|
||||
local pairs = util.string_split(str_value, '=')
|
||||
|
||||
if operation.type == 'SignalOperatorTypeNot' then
|
||||
return result .. 'not ' .. operation.input[1]
|
||||
elseif operation.type == 'SignalOperatorTypeAnd' then
|
||||
return result .. table.concat(operation.input, ' and ')
|
||||
elseif operation.type == 'SignalOperatorTypeOr' then
|
||||
return result .. table.concat(operation.input, ' or ')
|
||||
if #pairs ~= 2 then
|
||||
error('operators need to have a single equal sign')
|
||||
end
|
||||
|
||||
return result;
|
||||
local output = util.trim(pairs[1])
|
||||
|
||||
local inputs = util.string_split(util.trim(pairs[2]), ' ')
|
||||
|
||||
if inputs[1] == 'not' and #inputs == 2 then
|
||||
return {
|
||||
type = 'SignalOperatorTypeNot',
|
||||
output = output,
|
||||
input = {inputs[2]},
|
||||
}
|
||||
end
|
||||
|
||||
if inputs[2] == 'and' and #inputs == 3 then
|
||||
return {
|
||||
type = 'SignalOperatorTypeAnd',
|
||||
output = output,
|
||||
input = {inputs[1], inputs[3]},
|
||||
}
|
||||
end
|
||||
|
||||
if inputs[2] == 'or' and #inputs == 3 then
|
||||
return {
|
||||
type = 'SignalOperatorTypeOr',
|
||||
output = output,
|
||||
input = {inputs[1], inputs[3]},
|
||||
}
|
||||
end
|
||||
|
||||
error('operator must be of the form not a, a and b, a or b')
|
||||
end
|
||||
|
||||
local operators = {}
|
||||
|
||||
local operator_json = {}
|
||||
|
||||
for _, operation in pairs(ordered_operators) do
|
||||
table.insert(operators, generate_operator_data(operation))
|
||||
|
||||
table.insert(operator_json, format_operation(operation))
|
||||
for _, operation in pairs(yaml_loader.json_contents.operators or {}) do
|
||||
table.insert(operators, generate_operator_data(parse_operation(operation)))
|
||||
end
|
||||
|
||||
sk_definition_writer.add_definition('signal_operations', 'struct SignalOperator[]', '_geo', operators)
|
||||
|
@ -131,5 +85,4 @@ sk_definition_writer.add_definition('signal_operations', 'struct SignalOperator[
|
|||
return {
|
||||
signal_index_for_name = signal_index_for_name,
|
||||
operators = operators,
|
||||
operator_json = operator_json,
|
||||
}
|
|
@ -4,23 +4,11 @@ local sk_scene = require('sk_scene')
|
|||
local room_export = require('tools.level_scripts.room_export')
|
||||
local signals = require('tools.level_scripts.signals')
|
||||
local animation = require('tools.level_scripts.animation')
|
||||
local yaml_loader = require('tools.level_scripts.yaml_loader')
|
||||
local util = require('tools.level_scripts.util')
|
||||
|
||||
sk_definition_writer.add_header('"../build/src/audio/clips.h"')
|
||||
|
||||
|
||||
local function does_belong_to_cutscene(first_step, step)
|
||||
local offset = step.position - first_step.position
|
||||
local local_pos = first_step.rotation * offset
|
||||
|
||||
return local_pos.z >= 0 and local_pos.x * local_pos.x + local_pos.y * local_pos.y < 0.1
|
||||
end
|
||||
|
||||
local function distance_from_start(first_step, step)
|
||||
local offset = step.position - first_step.position
|
||||
local local_pos = first_step.rotation * offset
|
||||
return local_pos.z
|
||||
end
|
||||
|
||||
local function cutscene_index(cutscenes, name)
|
||||
for _, cutscene in pairs(cutscenes) do
|
||||
if cutscene.name == name then
|
||||
|
@ -247,62 +235,31 @@ local function generate_cutscene_step(cutscene_name, step, step_index, label_loc
|
|||
end
|
||||
|
||||
local function generate_cutscenes()
|
||||
local step_nodes = sk_scene.nodes_for_type("@cutscene")
|
||||
|
||||
local steps = {}
|
||||
local cutscenes = {}
|
||||
|
||||
for _, node_info in pairs(step_nodes) do
|
||||
if #node_info.arguments > 0 then
|
||||
local command = node_info.arguments[1]
|
||||
local args = {table.unpack(node_info.arguments, 2)}
|
||||
|
||||
local position, rotation, scale = node_info.node.transformation:decompose()
|
||||
|
||||
local step = {
|
||||
command = command,
|
||||
args = args,
|
||||
position = position,
|
||||
rotation = rotation:conjugate(),
|
||||
}
|
||||
|
||||
if command == "start" and #args >= 1 then
|
||||
table.insert(cutscenes, {
|
||||
name = args[1],
|
||||
steps = {step},
|
||||
macro = sk_definition_writer.raw(sk_definition_writer.add_macro("CUTSCENE_" .. args[1], #cutscenes)),
|
||||
})
|
||||
else
|
||||
table.insert(steps, step)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
for _, cutscene in pairs(cutscenes) do
|
||||
for _, step in pairs(steps) do
|
||||
if does_belong_to_cutscene(cutscene.steps[1], step) then
|
||||
table.insert(cutscene.steps, step)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local cutscenes_result = {}
|
||||
local cutscene_data = {}
|
||||
|
||||
local cutscene_json = {}
|
||||
local cutscene_json = yaml_loader.json_contents.cutscenes or {}
|
||||
|
||||
local cutscenes = {}
|
||||
|
||||
for cutscene_name, cutscene_steps in pairs(cutscene_json) do
|
||||
table.insert(cutscenes, {
|
||||
name = cutscene_name,
|
||||
steps = cutscene_steps,
|
||||
macro = sk_definition_writer.raw(sk_definition_writer.add_macro("CUTSCENE_" .. cutscene_name, #cutscenes)),
|
||||
})
|
||||
end
|
||||
|
||||
for _, cutscene in pairs(cutscenes) do
|
||||
local first_step = cutscene.steps[1]
|
||||
local other_steps = {table.unpack(cutscene.steps, 2)}
|
||||
local other_steps = {}
|
||||
|
||||
table.sort(other_steps, function(a, b)
|
||||
return distance_from_start(first_step, a) < distance_from_start(first_step, b)
|
||||
end)
|
||||
for _, step_string in pairs(cutscene.steps) do
|
||||
local args = util.string_split(step_string, ' ')
|
||||
|
||||
local string_steps = {}
|
||||
|
||||
for step_index, step in pairs(other_steps) do
|
||||
table.insert(string_steps, step.command .. ' ' .. table.concat(step.args, ' '))
|
||||
table.insert(other_steps, {
|
||||
command = args[1],
|
||||
args = {table.unpack(args, 2)},
|
||||
})
|
||||
end
|
||||
|
||||
local label_locations = find_label_locations(other_steps)
|
||||
|
@ -313,23 +270,21 @@ local function generate_cutscenes()
|
|||
table.insert(steps, generate_cutscene_step(cutscene.name, step, step_index, label_locations, cutscenes))
|
||||
end
|
||||
|
||||
cutscene_json[cutscene.name] = string_steps
|
||||
|
||||
sk_definition_writer.add_definition(cutscene.name .. '_steps', 'struct CutsceneStep[]', '_geo', steps)
|
||||
|
||||
table.insert(cutscenes_result, {
|
||||
name = cutscene.name,
|
||||
steps = steps,
|
||||
macro = cutscene.macro,
|
||||
macro = sk_definition_writer.raw(sk_definition_writer.add_macro("CUTSCENE_" .. cutscene.name, #cutscenes_result)),
|
||||
})
|
||||
|
||||
table.insert(cutscene_data, {
|
||||
sk_definition_writer.reference_to(steps, 1),
|
||||
#cutscene.steps - 1,
|
||||
#steps,
|
||||
})
|
||||
end
|
||||
|
||||
return cutscenes_result, cutscene_data, cutscene_json
|
||||
return cutscenes_result, cutscene_data
|
||||
end
|
||||
|
||||
local function generate_triggers(cutscenes)
|
||||
|
@ -352,7 +307,7 @@ local function generate_triggers(cutscenes)
|
|||
|
||||
return result
|
||||
end
|
||||
local cutscenes, cutscene_data, cutscene_json = generate_cutscenes()
|
||||
local cutscenes, cutscene_data = generate_cutscenes()
|
||||
local triggers = generate_triggers(cutscenes)
|
||||
|
||||
sk_definition_writer.add_definition("triggers", "struct Trigger[]", "_geo", triggers)
|
||||
|
@ -365,7 +320,6 @@ end
|
|||
return {
|
||||
triggers = triggers,
|
||||
cutscene_data = cutscene_data,
|
||||
cutscene_json = cutscene_json,
|
||||
location_data = location_data,
|
||||
find_location_index = find_location_index,
|
||||
find_cutscene_index = find_cutscene_index,
|
||||
|
|
35
tools/level_scripts/util.lua
Normal file
35
tools/level_scripts/util.lua
Normal file
|
@ -0,0 +1,35 @@
|
|||
|
||||
local function string_split(inputstr, sep)
|
||||
if sep == nil then
|
||||
sep = "%s"
|
||||
end
|
||||
local t={}
|
||||
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
|
||||
table.insert(t, str)
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
local function trim(inputstr)
|
||||
local start_index = 1
|
||||
local end_index = #inputstr
|
||||
|
||||
while start_index <= #inputstr and string.sub(inputstr, start_index, start_index) == ' ' do
|
||||
start_index = start_index + 1
|
||||
end
|
||||
|
||||
while end_index >= 1 and string.sub(inputstr, end_index, end_index) == ' ' do
|
||||
end_index = end_index - 1
|
||||
end
|
||||
|
||||
if end_index < start_index then
|
||||
return ''
|
||||
end
|
||||
|
||||
return string.sub(inputstr, start_index, end_index)
|
||||
end
|
||||
|
||||
return {
|
||||
string_split = string_split,
|
||||
trim = trim,
|
||||
}
|
|
@ -7,17 +7,6 @@ local input_file = io.open(file_location, 'r')
|
|||
local json_contents = yaml.parse(input_file:read('a'))
|
||||
input_file:close()
|
||||
|
||||
local function dump_json(value)
|
||||
local file_contents = yaml.stringify(value)
|
||||
print(file_contents)
|
||||
print(file_location)
|
||||
|
||||
local file = io.open(file_location, 'w');
|
||||
file:write(file_contents)
|
||||
file:close()
|
||||
end
|
||||
|
||||
return {
|
||||
dump_json = dump_json,
|
||||
json_contents = json_contents,
|
||||
}
|
Loading…
Reference in a new issue