fixed crash loading quake4 1.3 entity definitions

git-svn-id: svn://svn.icculus.org/gtkradiant/GtkRadiant/trunk@100 8a3a26a2-13c4-0310-b231-cf6edde360e5
This commit is contained in:
spog 2006-08-22 17:37:07 +00:00
parent 3e076b28de
commit 3f1cbdde23
3 changed files with 223 additions and 106 deletions

10
CHANGES
View File

@ -1,6 +1,16 @@
This is the changelog for developers, != changelog for the end user This is the changelog for developers, != changelog for the end user
that we distribute with the binaries. (see changelog) that we distribute with the binaries. (see changelog)
22/08/2006
SPoG
- Added VFS support for locating the archive a file was loaded from.
- Changed Doom3 entity definition parser to gracefully handle parse errors.
- Fixed crash when loading entity definitions in quake4 1.3 point release.
13/08/2006
SPoG
- Disabled 'detail' content flag checkbox in quake2 Surface Inspector.
22/07/2006 22/07/2006
SPoG SPoG
- Fixed doom3 func_static with model not appearing to move when dragged. - Fixed doom3 func_static with model not appearing to move when dragged.

View File

@ -57,6 +57,7 @@ ArchiveModules& FileSystemQ3API_getArchiveModules();
#include "generic/callback.h" #include "generic/callback.h"
#include "string/string.h" #include "string/string.h"
#include "container/array.h"
#include "stream/stringstream.h" #include "stream/stringstream.h"
#include "os/path.h" #include "os/path.h"
#include "moduleobservers.h" #include "moduleobservers.h"
@ -141,7 +142,14 @@ static void InitPakFile (ArchiveModules& archiveModules, const char *filename)
if(table != 0) if(table != 0)
{ {
archive_entry_t entry; archive_entry_t entry;
entry.name = filename;
std::size_t length = string_length(filename);
Array<char> tmp(length + 2);
std::copy(filename, filename + length, tmp.begin());
tmp[length] = ':';
tmp[length + 1] = '\0';
entry.name = tmp.begin();
entry.archive = table->m_pfnOpenArchive(filename); entry.archive = table->m_pfnOpenArchive(filename);
entry.is_pakfile = true; entry.is_pakfile = true;
g_archives.push_back(entry); g_archives.push_back(entry);
@ -520,7 +528,7 @@ const char* FindFile(const char* relative)
{ {
for(archives_t::iterator i = g_archives.begin(); i != g_archives.end(); ++i) for(archives_t::iterator i = g_archives.begin(); i != g_archives.end(); ++i)
{ {
if(!(*i).is_pakfile && (*i).archive->containsFile(relative)) if((*i).archive->containsFile(relative))
{ {
return (*i).name.c_str(); return (*i).name.c_str();
} }
@ -533,7 +541,7 @@ const char* FindPath(const char* absolute)
{ {
for(archives_t::iterator i = g_archives.begin(); i != g_archives.end(); ++i) for(archives_t::iterator i = g_archives.begin(); i != g_archives.end(); ++i)
{ {
if(!(*i).is_pakfile && path_equal_n(absolute, (*i).name.c_str(), string_length((*i).name.c_str()))) if(path_equal_n(absolute, (*i).name.c_str(), string_length((*i).name.c_str())))
{ {
return (*i).name.c_str(); return (*i).name.c_str();
} }

View File

@ -86,21 +86,66 @@ void EntityClassDoom3_forEach(EntityClassVisitor& visitor)
} }
} }
void EntityClassDoom3_parseUnknown(Tokeniser& tokeniser) inline void printParseError(const char* message)
{
globalErrorStream() << message;
}
#define PARSE_RETURN_FALSE_IF_FAIL(expression) if(!(expression)) { printParseError(FILE_LINE "\nparse failed: " #expression "\n"); return false; } else
bool EntityClassDoom3_parseToken(Tokeniser& tokeniser)
{
const char* token = tokeniser.getToken();
PARSE_RETURN_FALSE_IF_FAIL(token != 0);
return true;
}
bool EntityClassDoom3_parseToken(Tokeniser& tokeniser, const char* string)
{
const char* token = tokeniser.getToken();
PARSE_RETURN_FALSE_IF_FAIL(token != 0);
return string_equal(token, string);
}
bool EntityClassDoom3_parseString(Tokeniser& tokeniser, const char*& s)
{
const char* token = tokeniser.getToken();
PARSE_RETURN_FALSE_IF_FAIL(token != 0);
s = token;
return true;
}
bool EntityClassDoom3_parseString(Tokeniser& tokeniser, CopiedString& s)
{
const char* token = tokeniser.getToken();
PARSE_RETURN_FALSE_IF_FAIL(token != 0);
s = token;
return true;
}
bool EntityClassDoom3_parseString(Tokeniser& tokeniser, StringOutputStream& s)
{
const char* token = tokeniser.getToken();
PARSE_RETURN_FALSE_IF_FAIL(token != 0);
s << token;
return true;
}
bool EntityClassDoom3_parseUnknown(Tokeniser& tokeniser)
{ {
//const char* name = //const char* name =
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
//globalOutputStream() << "parsing unknown block " << makeQuoted(name) << "\n"; //globalOutputStream() << "parsing unknown block " << makeQuoted(name) << "\n";
const char* token = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser, "{"));
ASSERT_MESSAGE(string_equal(token, "{"), "error parsing entity definition");
tokeniser.nextLine(); tokeniser.nextLine();
std::size_t depth = 1; std::size_t depth = 1;
for(;;) for(;;)
{ {
const char* token = tokeniser.getToken(); const char* token;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, token));
if(string_equal(token, "}")) if(string_equal(token, "}"))
{ {
if(--depth == 0) if(--depth == 0)
@ -115,6 +160,7 @@ void EntityClassDoom3_parseUnknown(Tokeniser& tokeniser)
} }
tokeniser.nextLine(); tokeniser.nextLine();
} }
return true;
} }
@ -159,19 +205,20 @@ void Model_resolveInheritance(const char* name, Model& model)
} }
} }
void EntityClassDoom3_parseModel(Tokeniser& tokeniser) bool EntityClassDoom3_parseModel(Tokeniser& tokeniser)
{ {
const char* name = tokeniser.getToken(); const char* name;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, name));
Model& model = g_models[name]; Model& model = g_models[name];
const char* token = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser, "{"));
ASSERT_MESSAGE(string_equal(token, "{"), "error parsing model definition");
tokeniser.nextLine(); tokeniser.nextLine();
for(;;) for(;;)
{ {
const char* parameter = tokeniser.getToken(); const char* parameter;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, parameter));
if(string_equal(parameter, "}")) if(string_equal(parameter, "}"))
{ {
tokeniser.nextLine(); tokeniser.nextLine();
@ -179,38 +226,43 @@ void EntityClassDoom3_parseModel(Tokeniser& tokeniser)
} }
else if(string_equal(parameter, "inherit")) else if(string_equal(parameter, "inherit"))
{ {
model.m_parent = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, model.m_parent));
tokeniser.nextLine();
} }
else if(string_equal(parameter, "remove")) else if(string_equal(parameter, "remove"))
{ {
//const char* remove = //const char* remove =
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
tokeniser.nextLine();
} }
else if(string_equal(parameter, "mesh")) else if(string_equal(parameter, "mesh"))
{ {
model.m_mesh = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, model.m_mesh));
tokeniser.nextLine();
} }
else if(string_equal(parameter, "skin")) else if(string_equal(parameter, "skin"))
{ {
model.m_skin = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, model.m_skin));
tokeniser.nextLine();
} }
else if(string_equal(parameter, "offset")) else if(string_equal(parameter, "offset"))
{ {
tokeniser.getToken(); // ( PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser, "("));
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
tokeniser.getToken(); // ) PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser, ")"));
tokeniser.nextLine(); tokeniser.nextLine();
} }
else if(string_equal(parameter, "channel")) else if(string_equal(parameter, "channel"))
{ {
//const char* channelName = //const char* channelName =
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
tokeniser.getToken(); // ( PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser, "("));
for(;;) for(;;)
{ {
const char* end = tokeniser.getToken(); const char* end;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, end));
if(string_equal(end, ")")) if(string_equal(end, ")"))
{ {
tokeniser.nextLine(); tokeniser.nextLine();
@ -220,23 +272,27 @@ void EntityClassDoom3_parseModel(Tokeniser& tokeniser)
} }
else if(string_equal(parameter, "anim")) else if(string_equal(parameter, "anim"))
{ {
CopiedString animName(tokeniser.getToken()); CopiedString animName;
const char* animFile = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, animName));
const char* animFile;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, animFile));
model.m_anims.insert(Model::Anims::value_type(animName, animFile)); model.m_anims.insert(Model::Anims::value_type(animName, animFile));
const char* token = tokeniser.getToken(); const char* token;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, token));
while(string_equal(token, ",")) while(string_equal(token, ","))
{ {
animFile = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, animFile));
token = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, token));
} }
if(string_equal(token, "{")) if(string_equal(token, "{"))
{ {
for(;;) for(;;)
{ {
const char* end = tokeniser.getToken(); const char* end;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, end));
if(string_equal(end, "}")) if(string_equal(end, "}"))
{ {
tokeniser.nextLine(); tokeniser.nextLine();
@ -252,10 +308,12 @@ void EntityClassDoom3_parseModel(Tokeniser& tokeniser)
} }
else else
{ {
ERROR_MESSAGE("unknown model parameter: " << makeQuoted(parameter)); globalErrorStream() << "unknown model parameter: " << makeQuoted(parameter) << "\n";
return false;
} }
tokeniser.nextLine(); tokeniser.nextLine();
} }
return true;
} }
inline bool char_isSpaceOrTab(char c) inline bool char_isSpaceOrTab(char c)
@ -292,15 +350,11 @@ inline const char* string_findFirstNonSpaceOrTab(const char* string)
} }
void EntityClassDoom3_parseEntityDef(Tokeniser& tokeniser) static bool EntityClass_parse(EntityClass& entityClass, Tokeniser& tokeniser)
{ {
EntityClass* entityClass = Eclass_Alloc(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, entityClass.m_name));
entityClass->free = &Eclass_Free;
entityClass->m_name = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser, "{"));
const char* token = tokeniser.getToken();
ASSERT_MESSAGE(string_equal(token, "{"), "error parsing entity definition");
tokeniser.nextLine(); tokeniser.nextLine();
StringOutputStream usage(256); StringOutputStream usage(256);
@ -310,7 +364,8 @@ void EntityClassDoom3_parseEntityDef(Tokeniser& tokeniser)
for(;;) for(;;)
{ {
const char* key = tokeniser.getToken(); const char* key;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, key));
const char* last = string_findFirstSpaceOrTab(key); const char* last = string_findFirstSpaceOrTab(key);
CopiedString first(StringRange(key, last)); CopiedString first(StringRange(key, last));
@ -323,7 +378,8 @@ void EntityClassDoom3_parseEntityDef(Tokeniser& tokeniser)
if(currentString != 0 && string_equal(key, "\\")) if(currentString != 0 && string_equal(key, "\\"))
{ {
tokeniser.nextLine(); tokeniser.nextLine();
*currentString << " " << tokeniser.getToken(); *currentString << " ";
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, *currentString));
continue; continue;
} }
@ -342,58 +398,62 @@ void EntityClassDoom3_parseEntityDef(Tokeniser& tokeniser)
} }
else if(string_equal(key, "model")) else if(string_equal(key, "model"))
{ {
entityClass->fixedsize = true; const char* token;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, token));
entityClass.fixedsize = true;
StringOutputStream buffer(256); StringOutputStream buffer(256);
buffer << PathCleaned(tokeniser.getToken()); buffer << PathCleaned(token);
entityClass->m_modelpath = buffer.c_str(); entityClass.m_modelpath = buffer.c_str();
} }
else if(string_equal(key, "editor_color")) else if(string_equal(key, "editor_color"))
{ {
const char* value = tokeniser.getToken(); const char* value;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, value));
if(!string_empty(value)) if(!string_empty(value))
{ {
entityClass->colorSpecified = true; entityClass.colorSpecified = true;
bool success = string_parse_vector3(value, entityClass->color); bool success = string_parse_vector3(value, entityClass.color);
ASSERT_MESSAGE(success, "editor_color: parse error"); ASSERT_MESSAGE(success, "editor_color: parse error");
} }
} }
else if(string_equal(key, "editor_ragdoll")) else if(string_equal(key, "editor_ragdoll"))
{ {
//bool ragdoll = atoi(tokeniser.getToken()) != 0; //bool ragdoll = atoi(tokeniser.getToken()) != 0;
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
} }
else if(string_equal(key, "editor_mins")) else if(string_equal(key, "editor_mins"))
{ {
entityClass->sizeSpecified = true; entityClass.sizeSpecified = true;
const char* value = tokeniser.getToken(); const char* value;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, value));
if(!string_empty(value) && !string_equal(value, "?")) if(!string_empty(value) && !string_equal(value, "?"))
{ {
entityClass->fixedsize = true; entityClass.fixedsize = true;
bool success = string_parse_vector3(value, entityClass->mins); bool success = string_parse_vector3(value, entityClass.mins);
ASSERT_MESSAGE(success, "editor_mins: parse error"); ASSERT_MESSAGE(success, "editor_mins: parse error");
} }
} }
else if(string_equal(key, "editor_maxs")) else if(string_equal(key, "editor_maxs"))
{ {
entityClass->sizeSpecified = true; entityClass.sizeSpecified = true;
const char* value = tokeniser.getToken(); const char* value;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, value));
if(!string_empty(value) && !string_equal(value, "?")) if(!string_empty(value) && !string_equal(value, "?"))
{ {
entityClass->fixedsize = true; entityClass.fixedsize = true;
bool success = string_parse_vector3(value, entityClass->maxs); bool success = string_parse_vector3(value, entityClass.maxs);
ASSERT_MESSAGE(success, "editor_maxs: parse error"); ASSERT_MESSAGE(success, "editor_maxs: parse error");
} }
} }
else if(string_equal(key, "editor_usage")) else if(string_equal(key, "editor_usage"))
{ {
const char* value = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, usage));
usage << value;
currentString = &usage; currentString = &usage;
} }
else if(string_equal_n(key, "editor_usage", 12)) else if(string_equal_n(key, "editor_usage", 12))
{ {
const char* value = tokeniser.getToken(); usage << "\n";
usage << "\n" << value; PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, usage));
currentString = &usage; currentString = &usage;
} }
else if(string_equal(key, "editor_rotatable") else if(string_equal(key, "editor_rotatable")
@ -405,139 +465,167 @@ void EntityClassDoom3_parseEntityDef(Tokeniser& tokeniser)
|| (!string_empty(last) && string_equal(first.c_str(), "editor_gui")) || (!string_empty(last) && string_equal(first.c_str(), "editor_gui"))
|| string_equal_n(key, "editor_copy", 11)) || string_equal_n(key, "editor_copy", 11))
{ {
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
} }
else if(!string_empty(last) && (string_equal(first.c_str(), "editor_var") || string_equal(first.c_str(), "editor_string"))) else if(!string_empty(last) && (string_equal(first.c_str(), "editor_var") || string_equal(first.c_str(), "editor_string")))
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, last).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, last).second;
attribute.m_type = "string"; attribute.m_type = "string";
currentDescription = &attribute.m_description; currentDescription = &attribute.m_description;
currentString = &description; currentString = &description;
description << tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, description));
} }
else if(!string_empty(last) && string_equal(first.c_str(), "editor_float")) else if(!string_empty(last) && string_equal(first.c_str(), "editor_float"))
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, last).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, last).second;
attribute.m_type = "string"; attribute.m_type = "string";
currentDescription = &attribute.m_description; currentDescription = &attribute.m_description;
currentString = &description; currentString = &description;
description << tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, description));
} }
else if(!string_empty(last) && string_equal(first.c_str(), "editor_snd")) else if(!string_empty(last) && string_equal(first.c_str(), "editor_snd"))
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, last).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, last).second;
attribute.m_type = "sound"; attribute.m_type = "sound";
currentDescription = &attribute.m_description; currentDescription = &attribute.m_description;
currentString = &description; currentString = &description;
description << tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, description));
} }
else if(!string_empty(last) && string_equal(first.c_str(), "editor_bool")) else if(!string_empty(last) && string_equal(first.c_str(), "editor_bool"))
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, last).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, last).second;
attribute.m_type = "boolean"; attribute.m_type = "boolean";
currentDescription = &attribute.m_description; currentDescription = &attribute.m_description;
currentString = &description; currentString = &description;
description << tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, description));
} }
else if(!string_empty(last) && string_equal(first.c_str(), "editor_int")) else if(!string_empty(last) && string_equal(first.c_str(), "editor_int"))
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, last).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, last).second;
attribute.m_type = "integer"; attribute.m_type = "integer";
currentDescription = &attribute.m_description; currentDescription = &attribute.m_description;
currentString = &description; currentString = &description;
description << tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, description));
} }
else if(!string_empty(last) && string_equal(first.c_str(), "editor_model")) else if(!string_empty(last) && string_equal(first.c_str(), "editor_model"))
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, last).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, last).second;
attribute.m_type = "model"; attribute.m_type = "model";
currentDescription = &attribute.m_description; currentDescription = &attribute.m_description;
currentString = &description; currentString = &description;
description << tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, description));
} }
else if(!string_empty(last) && string_equal(first.c_str(), "editor_color")) else if(!string_empty(last) && string_equal(first.c_str(), "editor_color"))
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, last).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, last).second;
attribute.m_type = "color"; attribute.m_type = "color";
currentDescription = &attribute.m_description; currentDescription = &attribute.m_description;
currentString = &description; currentString = &description;
description << tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, description));
} }
else if(!string_empty(last) && (string_equal(first.c_str(), "editor_material") || string_equal(first.c_str(), "editor_mat"))) else if(!string_empty(last) && (string_equal(first.c_str(), "editor_material") || string_equal(first.c_str(), "editor_mat")))
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, last).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, last).second;
attribute.m_type = "shader"; attribute.m_type = "shader";
currentDescription = &attribute.m_description; currentDescription = &attribute.m_description;
currentString = &description; currentString = &description;
description << tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, description));
} }
else if(string_equal(key, "inherit")) else if(string_equal(key, "inherit"))
{ {
entityClass->inheritanceResolved = false; entityClass.inheritanceResolved = false;
ASSERT_MESSAGE(entityClass->m_parent.empty(), "only one 'inherit' supported per entityDef"); ASSERT_MESSAGE(entityClass.m_parent.empty(), "only one 'inherit' supported per entityDef");
entityClass->m_parent.push_back(tokeniser.getToken()); const char* token;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, token));
entityClass.m_parent.push_back(token);
} }
// begin quake4-specific keys // begin quake4-specific keys
else if(string_equal(key, "editor_targetonsel")) else if(string_equal(key, "editor_targetonsel"))
{ {
//const char* value = //const char* value =
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
} }
else if(string_equal(key, "editor_menu")) else if(string_equal(key, "editor_menu"))
{ {
//const char* value = //const char* value =
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
} }
else if(string_equal(key, "editor_ignore")) else if(string_equal(key, "editor_ignore"))
{ {
//const char* value = //const char* value =
tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseToken(tokeniser));
} }
// end quake4-specific keys // end quake4-specific keys
else else
{ {
CopiedString tmp(key);
ASSERT_MESSAGE(!string_equal_n(key, "editor_", 7), "unsupported editor key: " << makeQuoted(key)); ASSERT_MESSAGE(!string_equal_n(key, "editor_", 7), "unsupported editor key: " << makeQuoted(key));
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, key).second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, key).second;
attribute.m_type = "string"; attribute.m_type = "string";
attribute.m_value = tokeniser.getToken(); const char* value;
PARSE_RETURN_FALSE_IF_FAIL(EntityClassDoom3_parseString(tokeniser, value));
if(string_equal(value, "}")) // hack for quake4 powerups.def bug
{
globalErrorStream() << "entityDef " << makeQuoted(entityClass.m_name.c_str()) << " key " << makeQuoted(tmp.c_str()) << " has no value\n";
break;
}
else
{
attribute.m_value = value;
}
} }
tokeniser.nextLine(); tokeniser.nextLine();
} }
entityClass->m_comments = usage.c_str(); entityClass.m_comments = usage.c_str();
if(string_equal(entityClass->m_name.c_str(), "light")) if(string_equal(entityClass.m_name.c_str(), "light"))
{ {
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, "light_radius").second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, "light_radius").second;
attribute.m_type = "vector3"; attribute.m_type = "vector3";
attribute.m_value = "300 300 300"; attribute.m_value = "300 300 300";
} }
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, "light_center").second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, "light_center").second;
attribute.m_type = "vector3"; attribute.m_type = "vector3";
} }
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, "noshadows").second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, "noshadows").second;
attribute.m_type = "boolean"; attribute.m_type = "boolean";
attribute.m_value = "0"; attribute.m_value = "0";
} }
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, "nospecular").second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, "nospecular").second;
attribute.m_type = "boolean"; attribute.m_type = "boolean";
attribute.m_value = "0"; attribute.m_value = "0";
} }
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, "nodiffuse").second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, "nodiffuse").second;
attribute.m_type = "boolean"; attribute.m_type = "boolean";
attribute.m_value = "0"; attribute.m_value = "0";
} }
{ {
EntityClassAttribute& attribute = EntityClass_insertAttribute(*entityClass, "falloff").second; EntityClassAttribute& attribute = EntityClass_insertAttribute(entityClass, "falloff").second;
attribute.m_type = "real"; attribute.m_type = "real";
} }
} }
return true;
}
bool EntityClassDoom3_parseEntityDef(Tokeniser& tokeniser)
{
EntityClass* entityClass = Eclass_Alloc();
entityClass->free = &Eclass_Free;
if(!EntityClass_parse(*entityClass, tokeniser))
{
eclass_capture_state(entityClass); // finish constructing the entity so that it can be destroyed cleanly.
entityClass->free(entityClass);
return false;
}
EntityClass* inserted = EntityClassDoom3_insertUnique(entityClass); EntityClass* inserted = EntityClassDoom3_insertUnique(entityClass);
if(inserted != entityClass) if(inserted != entityClass)
{ {
@ -545,9 +633,26 @@ void EntityClassDoom3_parseEntityDef(Tokeniser& tokeniser)
eclass_capture_state(entityClass); // finish constructing the entity so that it can be destroyed cleanly. eclass_capture_state(entityClass); // finish constructing the entity so that it can be destroyed cleanly.
entityClass->free(entityClass); entityClass->free(entityClass);
} }
return true;
} }
void EntityClassDoom3_parse(TextInputStream& inputStream) bool EntityClassDoom3_parseBlock(Tokeniser& tokeniser, const char* blockType)
{
if(string_equal(blockType, "entityDef"))
{
return EntityClassDoom3_parseEntityDef(tokeniser);
}
else if(string_equal(blockType, "model"))
{
return EntityClassDoom3_parseModel(tokeniser);
}
else
{
return EntityClassDoom3_parseUnknown(tokeniser);
}
}
bool EntityClassDoom3_parse(TextInputStream& inputStream, const char* filename)
{ {
Tokeniser& tokeniser = GlobalScriptLibrary().m_pfnNewScriptTokeniser(inputStream); Tokeniser& tokeniser = GlobalScriptLibrary().m_pfnNewScriptTokeniser(inputStream);
@ -558,19 +663,13 @@ void EntityClassDoom3_parse(TextInputStream& inputStream)
const char* blockType = tokeniser.getToken(); const char* blockType = tokeniser.getToken();
if(blockType == 0) if(blockType == 0)
{ {
break; return true;
} }
if(string_equal(blockType, "entityDef")) CopiedString tmp(blockType);
if(!EntityClassDoom3_parseBlock(tokeniser, tmp.c_str()))
{ {
EntityClassDoom3_parseEntityDef(tokeniser); globalErrorStream() << GlobalFileSystem().findFile(filename) << filename << ":" << tokeniser.getLine() << ": " << tmp.c_str() << " parse failed, skipping rest of file\n";
} return false;
else if(string_equal(blockType, "model"))
{
EntityClassDoom3_parseModel(tokeniser);
}
else
{
EntityClassDoom3_parseUnknown(tokeniser);
} }
} }
@ -588,7 +687,7 @@ void EntityClassDoom3_loadFile(const char* filename)
ArchiveTextFile* file = GlobalFileSystem().openTextFile(fullname.c_str()); ArchiveTextFile* file = GlobalFileSystem().openTextFile(fullname.c_str());
if(file != 0) if(file != 0)
{ {
EntityClassDoom3_parse(file->getInputStream()); EntityClassDoom3_parse(file->getInputStream(), fullname.c_str());
file->release(); file->release();
} }
} }