OgreCompiler2Pass.h

Go to the documentation of this file.
00001 /*
00002 -----------------------------------------------------------------------------
00003 This source file is part of OGRE
00004 (Object-oriented Graphics Rendering Engine)
00005 For the latest info, see http://www.ogre3d.org
00006 
00007 Copyright (c) 2000-2006 Torus Knot Software Ltd
00008 Also see acknowledgements in Readme.html
00009 
00010 This program is free software; you can redistribute it and/or modify it under
00011 the terms of the GNU Lesser General Public License as published by the Free Software
00012 Foundation; either version 2 of the License, or (at your option) any later
00013 version.
00014 
00015 This program is distributed in the hope that it will be useful, but WITHOUT
00016 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
00017 FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
00018 
00019 You should have received a copy of the GNU Lesser General Public License along with
00020 this program; if not, write to the Free Software Foundation, Inc., 59 Temple
00021 Place - Suite 330, Boston, MA 02111-1307, USA, or go to
00022 http://www.gnu.org/copyleft/lesser.txt.
00023 
00024 You may alternatively use this source under the terms of a specific version of
00025 the OGRE Unrestricted License provided you have obtained such a license from
00026 Torus Knot Software Ltd.
00027 -----------------------------------------------------------------------------
00028 */
00029 
00030 
00031 #ifndef __Compiler2Pass_H__
00032 #define __Compiler2Pass_H__
00033 
00034 #include "OgrePrerequisites.h"
00035 
00036 namespace Ogre {
00037 
00151 class _OgreExport Compiler2Pass : public ScriptTranslatorAlloc
00152     {
00153 
00154     protected:
00155 
00156         // BNF operation types
00157         enum OperationType {otUNKNOWN, otRULE, otAND, otOR, otOPTIONAL,
00158                             otREPEAT, otDATA, otNOT_TEST, otINSERT_TOKEN, otEND};
00159 
00163         struct TokenRule
00164         {
00165             OperationType operation;
00166             size_t tokenID;
00167 
00168             TokenRule(void) : operation(otUNKNOWN), tokenID(0) {}
00169             TokenRule(const OperationType ot, const size_t token)
00170                 : operation(ot), tokenID(token) {}
00171         };
00172 
00173         typedef std::vector<TokenRule> TokenRuleContainer;
00174         typedef TokenRuleContainer::iterator TokenRuleIterator;
00175 
00176         static const size_t SystemTokenBase = 1000;
00177         enum SystemRuleToken {
00178             _no_token_ = SystemTokenBase,
00179             _character_,
00180             _value_,
00181             _no_space_skip_
00182         };
00183 
00184         enum BNF_ID {BNF_UNKOWN = 0,
00185             BNF_SYNTAX, BNF_RULE, BNF_IDENTIFIER, BNF_IDENTIFIER_RIGHT, BNF_IDENTIFIER_CHARACTERS, BNF_ID_BEGIN, BNF_ID_END,
00186             BNF_CONSTANT_BEGIN, BNF_SET_RULE, BNF_EXPRESSION,
00187             BNF_AND_TERM, BNF_OR_TERM, BNF_TERM, BNF_TERM_ID, BNF_CONSTANT, BNF_OR, BNF_TERMINAL_SYMBOL, BNF_TERMINAL_START,
00188             BNF_REPEAT_EXPRESSION, BNF_REPEAT_BEGIN, BNF_REPEAT_END, BNF_SET, BNF_SET_BEGIN, BNF_SET_END,
00189             BNF_NOT_TEST, BNF_NOT_TEST_BEGIN, BNF_CONDITIONAL_TOKEN_INSERT, BNF_OPTIONAL_EXPRESSION,
00190             BNF_NOT_EXPRESSION, BNF_NOT_CHK,
00191             BNF_OPTIONAL_BEGIN, BNF_OPTIONAL_END, BNF_NO_TOKEN_START, BNF_SINGLEQUOTE, BNF_SINGLE_QUOTE_EXC, BNF_SET_END_EXC,
00192             BNF_ANY_CHARACTER, BNF_SPECIAL_CHARACTERS1,
00193             BNF_SPECIAL_CHARACTERS2, BNF_WHITE_SPACE_CHK,
00194 
00195             BNF_LETTER, BNF_LETTER_DIGIT, BNF_DIGIT, BNF_WHITE_SPACE,
00196             BNF_ALPHA_SET, BNF_NUMBER_SET, BNF_SPECIAL_CHARACTER_SET1,
00197             BNF_SPECIAL_CHARACTER_SET2, BNF_SPECIAL_CHARACTER_SET3, BNF_NOT_CHARS,
00198 
00199             // do not remove - this indicates where manually defined tokens end and where auto-gen ones start
00200             BNF_AUTOTOKENSTART
00201         };
00202 
00203 
00205         struct LexemeTokenDef
00206         {
00207             size_t ID;                  
00208             bool hasAction;            
00209             bool isNonTerminal;        
00210             size_t ruleID;              
00211             bool isCaseSensitive;        
00212             String lexeme;             
00213 
00214             LexemeTokenDef(void) : ID(0), hasAction(false), isNonTerminal(false), ruleID(0), isCaseSensitive(false) {}
00215             LexemeTokenDef( const size_t ID, const String& lexeme, const bool hasAction = false, const bool caseSensitive = false )
00216                 : ID(ID)
00217                 , hasAction(hasAction)
00218                 , isNonTerminal(false)
00219                 , ruleID(0)
00220                 , isCaseSensitive(caseSensitive)
00221                 , lexeme(lexeme)
00222             {
00223             }
00224 
00225         };
00226 
00227         typedef std::vector<LexemeTokenDef> LexemeTokenDefContainer;
00228         typedef LexemeTokenDefContainer::iterator LexemeTokenDefIterator;
00229 
00230         typedef std::map<String, size_t> LexemeTokenMap;
00231         typedef LexemeTokenMap::iterator TokenKeyIterator;
00233 
00234 
00236         struct TokenInst
00237         {
00238         size_t NTTRuleID;           
00239         size_t tokenID;                 
00240         size_t line;                
00241         size_t pos;             
00242         bool found;                
00243         };
00244 
00245         typedef std::vector<TokenInst> TokenInstContainer;
00246         typedef TokenInstContainer::iterator TokenInstIterator;
00247 
00248         // token que, definitions, rules
00249         struct TokenState
00250         {
00251             TokenInstContainer       tokenQue;
00252             LexemeTokenDefContainer  lexemeTokenDefinitions;
00253             TokenRuleContainer       rootRulePath;
00254             LexemeTokenMap           lexemeTokenMap;
00255         };
00256 
00257         TokenState* mClientTokenState;
00258 
00260         TokenState* mActiveTokenState;
00262         mutable size_t mPass2TokenQuePosition;
00266         size_t mPreviousActionQuePosition;
00269         size_t mNextActionQuePosition;
00270 
00272         const String* mSource;
00274         String mSourceName;
00275         size_t mEndOfSource;
00276 
00277         size_t mCurrentLine; 
00278         size_t mCharPos;     
00279         size_t mErrorCharPos; 
00280 
00283         std::map<size_t, float> mConstants;
00286         typedef std::map<size_t, String> LabelContainer;
00287         LabelContainer mLabels;
00290         bool mLabelIsActive;
00293         size_t mActiveLabelKey;
00295         String* mActiveLabel;
00298         bool mNoSpaceSkip;
00301         bool mNoTerminalToken;
00304         size_t mInsertTokenID;
00305 
00307         uint mActiveContexts;
00308 
00317         bool doPass1();
00318 
00328         bool doPass2();
00329 
00336         virtual void executeTokenAction(const size_t tokenID) = 0;
00343         virtual size_t getAutoTokenIDStart() const = 0;
00346         virtual void setupTokenDefinitions(void) = 0;
00355         const TokenInst& getNextToken(const size_t expectedTokenID = 0) const
00356         {
00357             skipToken();
00358             return getCurrentToken(expectedTokenID);
00359         }
00368         const TokenInst& getCurrentToken(const size_t expectedTokenID = 0) const;
00376         bool testNextTokenID(const size_t expectedTokenID) const;
00377 
00381         bool testCurrentTokenID(const size_t expectedTokenID) const
00382         {
00383             return mActiveTokenState->tokenQue[mPass2TokenQuePosition].tokenID == expectedTokenID;
00384         }
00387         void skipToken(void) const;
00390         void replaceToken(void);
00396         float getNextTokenValue(void) const
00397         {
00398             skipToken();
00399             return getCurrentTokenValue();
00400         }
00407         float getCurrentTokenValue(void) const;
00415         const String& getNextTokenLabel(void) const
00416         {
00417             skipToken();
00418             return getCurrentTokenLabel();
00419         }
00424         const String& getCurrentTokenLabel(void) const;
00427         size_t getNextTokenID(void) const { return getNextToken().tokenID; }
00430         size_t getCurrentTokenID(void) const { return getCurrentToken().tokenID; }
00434         const String& getNextTokenLexeme(void) const
00435         {
00436             skipToken();
00437             return getCurrentTokenLexeme();
00438         }
00442         const String& getCurrentTokenLexeme(void) const;
00445         size_t getPass2TokenQueCount(void) const;
00450         size_t getRemainingTokensForAction(void) const;
00461         void setPass2TokenQuePosition(size_t pos, const bool activateAction = false);
00464         size_t getPass2TokenQuePosition(void) const { return mPass2TokenQuePosition; }
00474         bool setNextActionQuePosition(size_t pos, const bool search = false);
00486         size_t addLexemeToken(const String& lexeme, const size_t token, const bool hasAction = false, const bool caseSensitive = false);
00487 
00498         void setClientBNFGrammer(void);
00499 
00500 
00501 
00503         void findEOL();
00504 
00512         bool isFloatValue(float& fvalue, size_t& charsize) const;
00513 
00522         bool isCharacterLabel(const size_t rulepathIDX);
00530         bool isLexemeMatch(const String& lexeme, const bool caseSensitive) const;
00532         bool isEndOfSource() const { return mCharPos >= mEndOfSource; }
00534         bool positionToNextLexeme();
00555         bool processRulePath( size_t rulepathIDX);
00556 
00557 
00560         void setActiveContexts(const uint contexts){ mActiveContexts = contexts; }
00561 
00563         void skipComments();
00564 
00566         void skipEOL();
00567 
00569         void skipWhiteSpace();
00570 
00571 
00580         bool ValidateToken(const size_t rulepathIDX, const size_t activeRuleID);
00581 
00586         void verifyTokenRuleLinks(const String& grammerName);
00590         void checkTokenActionTrigger(void);
00597         String getBNFGrammerTextFromRulePath(size_t ruleID, const size_t level = 0);
00598 
00599 
00600     private:
00601         // used for interpreting BNF script
00602         // keep it as static so that only one structure is created
00603         // no matter how many times this class is instantiated.
00604         static TokenState mBNFTokenState;
00605         // maintain a map of BNF grammar
00606         typedef std::map<String, TokenState> TokenStateContainer;
00607         static TokenStateContainer mClientTokenStates;
00609         void activatePreviousTokenAction(void);
00611         void initBNFCompiler(void);
00613         void buildClientBNFRulePaths(void);
00615         void modifyLastRule(const OperationType pendingRuleOp, const size_t tokenID);
00620         size_t getClientLexemeTokenID(const String& lexeme, const bool isCaseSensitive = false);
00622         void extractNonTerminal(const OperationType pendingRuleOp);
00624         void extractTerminal(const OperationType pendingRuleOp, const bool notoken = false);
00626         void extractSet(const OperationType pendingRuleOp);
00628         void extractNumericConstant(const OperationType pendingRuleOp);
00630         void setConditionalTokenInsert(void);
00632         String getLexemeText(size_t& ruleID, const size_t level = 0);
00633 
00634 
00635     public:
00636 
00638         Compiler2Pass();
00639         virtual ~Compiler2Pass() {}
00640 
00652         bool compile(const String& source, const String& sourceName);
00655         virtual const String& getClientBNFGrammer(void) const = 0;
00656 
00659         virtual const String& getClientGrammerName(void) const = 0;
00660 
00661     };
00662 
00663 }
00664 
00665 #endif
00666 

Copyright © 2008 Torus Knot Software Ltd
Creative Commons License
This work is licensed under a Creative Commons Attribution-ShareAlike 2.5 License.
Last modified Sun Sep 27 22:02:22 2009