Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
bruce-dunwiddie committed Apr 10, 2017
2 parents 7e44491 + 9914244 commit c3550a0
Show file tree
Hide file tree
Showing 73 changed files with 2,058 additions and 1,188 deletions.
6 changes: 4 additions & 2 deletions TSQL_Parser/TSQL_Parser/Clauses/Parsers/ITSQLClauseParser.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@
using System.Text;
using System.Threading.Tasks;

using TSQL.Tokens;

namespace TSQL.Clauses.Parsers
{
public interface ITSQLClauseParser
internal interface ITSQLClauseParser
{
TSQLClause Parse(TSQLTokenizer tokenizer);
TSQLClause Parse(IEnumerator<TSQLToken> tokenizer);
}
}
254 changes: 127 additions & 127 deletions TSQL_Parser/TSQL_Parser/Clauses/Parsers/TSQLFromClauseParser.cs
Original file line number Diff line number Diff line change
@@ -1,127 +1,127 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

using TSQL.Statements;
using TSQL.Statements.Parsers;
using TSQL.Tokens;

namespace TSQL.Clauses.Parsers
{
public class TSQLFromClauseParser : ITSQLClauseParser
{
public TSQLFromClause Parse(TSQLTokenizer tokenizer)
{
TSQLFromClause from = new TSQLFromClause();

if (
tokenizer.Current == null ||
tokenizer.Current.Type != TSQLTokenType.Keyword ||
tokenizer.Current.AsKeyword.Keyword != TSQLKeywords.FROM)
{
throw new ApplicationException("FROM expected.");
}

from.Tokens.Add(tokenizer.Current);

// derived tables
// TVF
int nestedLevel = 0;

while (
tokenizer.Read() &&
!(
tokenizer.Current.Type == TSQLTokenType.Character &&
tokenizer.Current.AsCharacter.Character == TSQLCharacters.Semicolon
) &&
!(
nestedLevel == 0 &&
tokenizer.Current.Type == TSQLTokenType.Character &&
tokenizer.Current.AsCharacter.Character == TSQLCharacters.CloseParentheses
) &&
(
nestedLevel > 0 ||
tokenizer.Current.Type != TSQLTokenType.Keyword ||
(
tokenizer.Current.Type == TSQLTokenType.Keyword &&
tokenizer.Current.AsKeyword.Keyword.In
(
TSQLKeywords.JOIN,
TSQLKeywords.ON,
TSQLKeywords.INNER,
TSQLKeywords.LEFT,
TSQLKeywords.RIGHT,
TSQLKeywords.OUTER,
TSQLKeywords.CROSS,
TSQLKeywords.FULL,
TSQLKeywords.AS,
TSQLKeywords.PIVOT,
TSQLKeywords.UNPIVOT,
TSQLKeywords.WITH,
TSQLKeywords.MERGE,
TSQLKeywords.TABLESAMPLE,
TSQLKeywords.FOR,
TSQLKeywords.FROM, // FOR SYSTEM_TIME FROM
TSQLKeywords.BETWEEN,
TSQLKeywords.AND,
TSQLKeywords.IN,
TSQLKeywords.REPEATABLE,
TSQLKeywords.ALL
)
)
))
{
from.Tokens.Add(tokenizer.Current);

if (tokenizer.Current.Type == TSQLTokenType.Character)
{
TSQLCharacters character = tokenizer.Current.AsCharacter.Character;

if (character == TSQLCharacters.OpenParentheses)
{
// should we recurse for derived tables?
nestedLevel++;

if (tokenizer.Read())
{
if (
tokenizer.Current.Type == TSQLTokenType.Keyword &&
tokenizer.Current.AsKeyword.Keyword == TSQLKeywords.SELECT)
{
TSQLSelectStatement selectStatement = new TSQLSelectStatementParser().Parse(tokenizer);

from.Tokens.AddRange(selectStatement.Tokens);

if (
tokenizer.Current != null &&
tokenizer.Current.Type == TSQLTokenType.Character &&
tokenizer.Current.AsCharacter.Character == TSQLCharacters.CloseParentheses)
{
nestedLevel--;
from.Tokens.Add(tokenizer.Current);
}
}
else
{
from.Tokens.Add(tokenizer.Current);
}
}
}
else if (character == TSQLCharacters.CloseParentheses)
{
nestedLevel--;
}
}
}

return from;
}

TSQLClause ITSQLClauseParser.Parse(TSQLTokenizer tokenizer)
{
return Parse(tokenizer);
}
}
}
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

using TSQL.Statements;
using TSQL.Statements.Parsers;
using TSQL.Tokens;

namespace TSQL.Clauses.Parsers
{
internal class TSQLFromClauseParser : ITSQLClauseParser
{
public TSQLFromClause Parse(IEnumerator<TSQLToken> tokenizer)
{
TSQLFromClause from = new TSQLFromClause();

if (
tokenizer.Current == null ||
tokenizer.Current.Type != TSQLTokenType.Keyword ||
tokenizer.Current.AsKeyword.Keyword != TSQLKeywords.FROM)
{
throw new ApplicationException("FROM expected.");
}

from.Tokens.Add(tokenizer.Current);

// derived tables
// TVF
int nestedLevel = 0;

while (
tokenizer.MoveNext() &&
!(
tokenizer.Current.Type == TSQLTokenType.Character &&
tokenizer.Current.AsCharacter.Character == TSQLCharacters.Semicolon
) &&
!(
nestedLevel == 0 &&
tokenizer.Current.Type == TSQLTokenType.Character &&
tokenizer.Current.AsCharacter.Character == TSQLCharacters.CloseParentheses
) &&
(
nestedLevel > 0 ||
tokenizer.Current.Type != TSQLTokenType.Keyword ||
(
tokenizer.Current.Type == TSQLTokenType.Keyword &&
tokenizer.Current.AsKeyword.Keyword.In
(
TSQLKeywords.JOIN,
TSQLKeywords.ON,
TSQLKeywords.INNER,
TSQLKeywords.LEFT,
TSQLKeywords.RIGHT,
TSQLKeywords.OUTER,
TSQLKeywords.CROSS,
TSQLKeywords.FULL,
TSQLKeywords.AS,
TSQLKeywords.PIVOT,
TSQLKeywords.UNPIVOT,
TSQLKeywords.WITH,
TSQLKeywords.MERGE,
TSQLKeywords.TABLESAMPLE,
TSQLKeywords.FOR,
TSQLKeywords.FROM, // FOR SYSTEM_TIME FROM
TSQLKeywords.BETWEEN,
TSQLKeywords.AND,
TSQLKeywords.IN,
TSQLKeywords.REPEATABLE,
TSQLKeywords.ALL
)
)
))
{
from.Tokens.Add(tokenizer.Current);

if (tokenizer.Current.Type == TSQLTokenType.Character)
{
TSQLCharacters character = tokenizer.Current.AsCharacter.Character;

if (character == TSQLCharacters.OpenParentheses)
{
// should we recurse for derived tables?
nestedLevel++;

if (tokenizer.MoveNext())
{
if (
tokenizer.Current.Type == TSQLTokenType.Keyword &&
tokenizer.Current.AsKeyword.Keyword == TSQLKeywords.SELECT)
{
TSQLSelectStatement selectStatement = new TSQLSelectStatementParser().Parse(tokenizer);

from.Tokens.AddRange(selectStatement.Tokens);

if (
tokenizer.Current != null &&
tokenizer.Current.Type == TSQLTokenType.Character &&
tokenizer.Current.AsCharacter.Character == TSQLCharacters.CloseParentheses)
{
nestedLevel--;
from.Tokens.Add(tokenizer.Current);
}
}
else
{
from.Tokens.Add(tokenizer.Current);
}
}
}
else if (character == TSQLCharacters.CloseParentheses)
{
nestedLevel--;
}
}
}

return from;
}

TSQLClause ITSQLClauseParser.Parse(IEnumerator<TSQLToken> tokenizer)
{
return Parse(tokenizer);
}
}
}
Loading

0 comments on commit c3550a0

Please sign in to comment.