diff --git a/src/backend/libpq/hba.c b/src/backend/libpq/hba.c index 07f046f..8148dc3 100644 --- a/src/backend/libpq/hba.c +++ b/src/backend/libpq/hba.c @@ -75,6 +75,13 @@ typedef struct HbaToken bool quoted; } HbaToken; +typedef struct TokenizedLine +{ + List *tokens; /* List of tokens in a line */ + int line_num; /* Line number */ + char *raw_line; /* raw line */ +} TokenizedLine; + /* * pre-parsed content of HBA config file: list of HbaLine structs. * parsed_hba_context is the memory context where it lives. @@ -95,7 +102,7 @@ static MemoryContext parsed_ident_context = NULL; static MemoryContext tokenize_file(const char *filename, FILE *file, - List **lines, List **line_nums, List **raw_lines); + List **tok_lines); static List *tokenize_inc_file(List *tokens, const char *outer_filename, const char *inc_filename); static bool parse_hba_auth_opt(char *name, char *val, HbaLine *hbaline, @@ -305,7 +312,6 @@ tokenize_inc_file(List *tokens, char *inc_fullname; FILE *inc_file; List *inc_lines; - List *inc_line_nums; ListCell *inc_line; MemoryContext linecxt; @@ -337,14 +343,15 @@ tokenize_inc_file(List *tokens, } /* There is possible recursion here if the file contains @ */ - linecxt = tokenize_file(inc_fullname, inc_file, &inc_lines, &inc_line_nums, NULL); + linecxt = tokenize_file(inc_fullname, inc_file, &inc_lines); FreeFile(inc_file); pfree(inc_fullname); foreach(inc_line, inc_lines) { - List *inc_fields = lfirst(inc_line); + TokenizedLine *tok_line = lfirst(inc_line); + List *inc_fields = lfirst(list_head(tok_line->tokens)); ListCell *inc_field; foreach(inc_field, inc_fields) @@ -378,21 +385,21 @@ tokenize_inc_file(List *tokens, * this function. */ static MemoryContext -tokenize_file(const char *filename, FILE *file, - List **lines, List **line_nums, List **raw_lines) +tokenize_file(const char *filename, FILE *file, List **tok_lines) { List *current_line = NIL; List *current_field = NIL; int line_number = 1; MemoryContext linecxt; MemoryContext oldcxt; + TokenizedLine *tok_line = NULL; linecxt = AllocSetContextCreate(CurrentMemoryContext, "tokenize_file", ALLOCSET_SMALL_SIZES); oldcxt = MemoryContextSwitchTo(linecxt); - *lines = *line_nums = NIL; + *tok_lines = NIL; while (!feof(file) && !ferror(file)) { @@ -424,12 +431,15 @@ tokenize_file(const char *filename, FILE *file, { if (current_line == NIL) { - /* make a new line List, record its line number */ + tok_line = palloc0(sizeof(TokenizedLine)); + + /* make a new line tokens list */ current_line = lappend(current_line, current_field); - *lines = lappend(*lines, current_line); - *line_nums = lappend_int(*line_nums, line_number); - if (raw_lines) - *raw_lines = lappend(*raw_lines, pstrdup(rawline)); + tok_line->tokens = lappend(tok_line->tokens, current_line); + tok_line->line_num = line_number; + tok_line->raw_line = pstrdup(rawline); + + *tok_lines = lappend(*tok_lines, tok_line); } else { @@ -1769,11 +1779,7 @@ load_hba(void) { FILE *file; List *hba_lines = NIL; - List *hba_line_nums = NIL; - List *hba_raw_lines = NIL; - ListCell *line, - *line_num, - *raw_line; + ListCell *line; List *new_parsed_lines = NIL; bool ok = true; MemoryContext linecxt; @@ -1790,7 +1796,7 @@ load_hba(void) return false; } - linecxt = tokenize_file(HbaFileName, file, &hba_lines, &hba_line_nums, &hba_raw_lines); + linecxt = tokenize_file(HbaFileName, file, &hba_lines); FreeFile(file); /* Now parse all the lines */ @@ -1799,11 +1805,12 @@ load_hba(void) "hba parser context", ALLOCSET_SMALL_SIZES); oldcxt = MemoryContextSwitchTo(hbacxt); - forthree(line, hba_lines, line_num, hba_line_nums, raw_line, hba_raw_lines) + foreach(line, hba_lines) { HbaLine *newline; + TokenizedLine *tok_line = lfirst(line); - if ((newline = parse_hba_line(lfirst(line), lfirst_int(line_num), lfirst(raw_line))) == NULL) + if ((newline = parse_hba_line(lfirst(list_head(tok_line->tokens)), tok_line->line_num, tok_line->raw_line)) == NULL) { /* * Parse error in the file, so indicate there's a problem. NB: a @@ -2144,9 +2151,7 @@ load_ident(void) { FILE *file; List *ident_lines = NIL; - List *ident_line_nums = NIL; ListCell *line_cell, - *num_cell, *parsed_line_cell; List *new_parsed_lines = NIL; bool ok = true; @@ -2166,7 +2171,7 @@ load_ident(void) return false; } - linecxt = tokenize_file(IdentFileName, file, &ident_lines, &ident_line_nums, NULL); + linecxt = tokenize_file(IdentFileName, file, &ident_lines); FreeFile(file); /* Now parse all the lines */ @@ -2175,9 +2180,11 @@ load_ident(void) "ident parser context", ALLOCSET_SMALL_SIZES); oldcxt = MemoryContextSwitchTo(ident_context); - forboth(line_cell, ident_lines, num_cell, ident_line_nums) + foreach(line_cell, ident_lines) { - if ((newline = parse_ident_line(lfirst(line_cell), lfirst_int(num_cell))) == NULL) + TokenizedLine *tok_line = lfirst(line_cell); + + if ((newline = parse_ident_line(lfirst(list_head(tok_line->tokens)), tok_line->line_num)) == NULL) { /* * Parse error in the file, so indicate there's a problem. Free