sqlode/query_analyzer/token_utils

Types

pub type EqualityMatch {
  EqualityMatch(
    column_name: String,
    table_qualifier: option.Option(String),
    placeholder: String,
  )
}

Constructors

  • EqualityMatch(
      column_name: String,
      table_qualifier: option.Option(String),
      placeholder: String,
    )
pub type InsertParts {
  InsertParts(
    table_name: String,
    columns: List(String),
    values: List(List(lexer.Token)),
  )
}

Constructors

  • InsertParts(
      table_name: String,
      columns: List(String),
      values: List(List(lexer.Token)),
    )
pub type TypeCast {
  TypeCast(placeholder: String, cast_type: String)
}

Constructors

  • TypeCast(placeholder: String, cast_type: String)

Values

pub fn collect_paren_contents(
  tokens: List(lexer.Token),
) -> #(List(lexer.Token), List(lexer.Token))

Collect tokens inside the next parenthesized group. Expects tokens starting right after the opening LParen. Returns #(inner_tokens, remaining_tokens_after_RParen).

pub fn extract_placeholders(
  tokens: List(lexer.Token),
) -> List(String)

Extract all placeholder token strings from a token list.

pub fn extract_table_names(
  tokens: List(lexer.Token),
) -> List(String)

Extract all table names referenced in a token list (FROM, INTO, UPDATE, JOIN).

pub fn find_equality_patterns(
  tokens: List(lexer.Token),
) -> List(EqualityMatch)

Find all column [op] placeholder patterns in tokens.

pub fn find_in_patterns(
  tokens: List(lexer.Token),
) -> List(EqualityMatch)

Find all column IN (placeholder) patterns in tokens.

pub fn find_insert_parts(
  tokens: List(lexer.Token),
) -> option.Option(InsertParts)

Find INSERT INTO table (columns) VALUES (values) structure in tokens.

pub fn find_set_patterns(
  tokens: List(lexer.Token),
) -> List(EqualityMatch)

Find all column = placeholder patterns in SET clauses.

pub fn find_type_casts(
  tokens: List(lexer.Token),
) -> List(TypeCast)

Find all $N::type patterns in tokens (PostgreSQL only).

pub fn parse_placeholder_index(
  placeholder: String,
) -> Result(Int, Nil)

Parse a placeholder string like “$3” into its integer index.

pub fn read_subquery_alias(
  tokens: List(lexer.Token),
) -> #(option.Option(String), List(lexer.Token))

Read an optional AS followed by an identifier alias, optionally followed by a parenthesised column list. Returns the alias and the token stream positioned after the alias (and column list, if any).

pub fn read_table_name(
  tokens: List(lexer.Token),
) -> #(option.Option(String), List(lexer.Token))

Read a table name from the current token position, handling schema-qualified names (schema.table) and subqueries in parentheses.

pub fn skip_parens(
  tokens: List(lexer.Token),
  depth: Int,
) -> List(lexer.Token)

Skip tokens until all parentheses at the given depth are closed.

pub fn split_on_commas(
  tokens: List(lexer.Token),
) -> List(List(lexer.Token))

Split tokens on top-level commas (depth 0).

pub fn structure_tokens(
  tokens: List(lexer.Token),
) -> query_ir.SqlStatement

Build a SqlStatement from a token list. This function identifies the statement kind and decomposes it into its major clauses. Sub-expressions (WHERE predicates, etc.) remain as raw token lists — this is intentionally a thin IR that avoids building a full expression AST.

Search Document