Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commitefc9816

Browse files
committed
Rework memory contexts in charge of HBA/ident tokenization
The list of TokenizedAuthLines generated at parsing for the HBA andident files is now stored in a static context called tokenize_context,where only all the parsed tokens are stored. This context is createdwhen opening the first authentication file of a HBA/ident set (hba_fileor ident_file), and is cleaned up once we are done all the work aroundit through a new routine called free_auth_file(). One call ofopen_auth_file() should have one matching call of free_auth_file(), thecreation and deletion of the tokenization context is controlled by therecursion depth of the tokenization.Rather than having tokenize_auth_file() return a memory context thatincludes all the records, the tokenization logic now creates and deletesone memory context each time this function is called. This willsimplify recursive calls to this routine for the upcoming inclusionrecord logic.While on it, rename tokenize_inc_file() to tokenize_expand_file() asthis would conflict with the upcoming patch that will add inclusionrecords for HBA/ident files. An '@' file has its tokens added to anexisting list.Reloading HBA/indent configuration in a tight loop shows no leaks, as ofone type of test done (with and without -DEXEC_BACKEND).Author: Michael PaquierReviewed-by: Julien RouhaudDiscussion:https://postgr.es/m/Y324HvGKiWxW2yxe@paquier.xyz
1 parentcee1209 commitefc9816

File tree

3 files changed

+124
-51
lines changed

3 files changed

+124
-51
lines changed

‎src/backend/libpq/hba.c

Lines changed: 117 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,13 @@ typedef struct
7676
#definetoken_is_keyword(t,k)(!t->quoted && strcmp(t->string, k) == 0)
7777
#definetoken_matches(t,k) (strcmp(t->string, k) == 0)
7878

79+
/*
80+
* Memory context holding the list of TokenizedAuthLines when parsing
81+
* HBA or ident configuration files. This is created when opening the first
82+
* file (depth of 0).
83+
*/
84+
staticMemoryContexttokenize_context=NULL;
85+
7986
/*
8087
* pre-parsed content of HBA config file: list of HbaLine structs.
8188
* parsed_hba_context is the memory context where it lives.
@@ -121,9 +128,9 @@ static const char *const UserAuthName[] =
121128
};
122129

123130

124-
staticList*tokenize_inc_file(List*tokens,constchar*outer_filename,
125-
constchar*inc_filename,intelevel,
126-
intdepth,char**err_msg);
131+
staticList*tokenize_expand_file(List*tokens,constchar*outer_filename,
132+
constchar*inc_filename,intelevel,
133+
intdepth,char**err_msg);
127134
staticboolparse_hba_auth_opt(char*name,char*val,HbaLine*hbaline,
128135
intelevel,char**err_msg);
129136
staticintregcomp_auth_token(AuthToken*token,char*filename,intline_num,
@@ -437,43 +444,53 @@ next_field_expand(const char *filename, char **lineptr,
437444

438445
/* Is this referencing a file? */
439446
if (!initial_quote&&buf[0]=='@'&&buf[1]!='\0')
440-
tokens=tokenize_inc_file(tokens,filename,buf+1,
441-
elevel,depth+1,err_msg);
447+
tokens=tokenize_expand_file(tokens,filename,buf+1,
448+
elevel,depth+1,err_msg);
442449
else
450+
{
451+
MemoryContextoldcxt;
452+
453+
/*
454+
* lappend() may do its own allocations, so move to the context
455+
* for the list of tokens.
456+
*/
457+
oldcxt=MemoryContextSwitchTo(tokenize_context);
443458
tokens=lappend(tokens,make_auth_token(buf,initial_quote));
459+
MemoryContextSwitchTo(oldcxt);
460+
}
444461
}while (trailing_comma&& (*err_msg==NULL));
445462

446463
returntokens;
447464
}
448465

449466
/*
450-
*tokenize_inc_file
467+
*tokenize_expand_file
451468
*Expand a file included from another file into an hba "field"
452469
*
453470
* Opens and tokenises a file included from another HBA config file with @,
454471
* and returns all values found therein as a flat list of AuthTokens. If a
455472
* @-token is found, recursively expand it. The newly read tokens are
456473
* appended to "tokens" (so that foo,bar,@baz does what you expect).
457-
* All new tokens are allocated in caller's memory context.
474+
* All new tokens are allocated in the memory context dedicated to the
475+
* list of TokenizedAuthLines, aka tokenize_context.
458476
*
459477
* In event of an error, log a message at ereport level elevel, and also
460478
* set *err_msg to a string describing the error. Note that the result
461479
* may be non-NIL anyway, so *err_msg must be tested to determine whether
462480
* there was an error.
463481
*/
464482
staticList*
465-
tokenize_inc_file(List*tokens,
466-
constchar*outer_filename,
467-
constchar*inc_filename,
468-
intelevel,
469-
intdepth,
470-
char**err_msg)
483+
tokenize_expand_file(List*tokens,
484+
constchar*outer_filename,
485+
constchar*inc_filename,
486+
intelevel,
487+
intdepth,
488+
char**err_msg)
471489
{
472490
char*inc_fullname;
473491
FILE*inc_file;
474492
List*inc_lines;
475493
ListCell*inc_line;
476-
MemoryContextlinecxt;
477494

478495
inc_fullname=AbsoluteConfigLocation(inc_filename,outer_filename);
479496
inc_file=open_auth_file(inc_fullname,elevel,depth,err_msg);
@@ -486,13 +503,15 @@ tokenize_inc_file(List *tokens,
486503
}
487504

488505
/* There is possible recursion here if the file contains @ */
489-
linecxt=tokenize_auth_file(inc_fullname,inc_file,&inc_lines,elevel,
490-
depth);
506+
tokenize_auth_file(inc_fullname,inc_file,&inc_lines,elevel,
507+
depth);
491508

492-
FreeFile(inc_file);
493509
pfree(inc_fullname);
494510

495-
/* Copy all tokens found in the file and append to the tokens list */
511+
/*
512+
* Move all the tokens found in the file to the tokens list. These are
513+
* already saved in tokenize_context.
514+
*/
496515
foreach(inc_line,inc_lines)
497516
{
498517
TokenizedAuthLine*tok_line= (TokenizedAuthLine*)lfirst(inc_line);
@@ -513,16 +532,40 @@ tokenize_inc_file(List *tokens,
513532
foreach(inc_token,inc_tokens)
514533
{
515534
AuthToken*token=lfirst(inc_token);
535+
MemoryContextoldcxt;
516536

517-
tokens=lappend(tokens,copy_auth_token(token));
537+
/*
538+
* lappend() may do its own allocations, so move to the
539+
* context for the list of tokens.
540+
*/
541+
oldcxt=MemoryContextSwitchTo(tokenize_context);
542+
tokens=lappend(tokens,token);
543+
MemoryContextSwitchTo(oldcxt);
518544
}
519545
}
520546
}
521547

522-
MemoryContextDelete(linecxt);
548+
free_auth_file(inc_file,depth);
523549
returntokens;
524550
}
525551

552+
/*
553+
* free_auth_file
554+
*Free a file opened by open_auth_file().
555+
*/
556+
void
557+
free_auth_file(FILE*file,intdepth)
558+
{
559+
FreeFile(file);
560+
561+
/* If this is the last cleanup, remove the tokenization context */
562+
if (depth==0)
563+
{
564+
MemoryContextDelete(tokenize_context);
565+
tokenize_context=NULL;
566+
}
567+
}
568+
526569
/*
527570
* open_auth_file
528571
*Open the given file.
@@ -558,6 +601,22 @@ open_auth_file(const char *filename, int elevel, int depth,
558601
returnNULL;
559602
}
560603

604+
/*
605+
* When opening the top-level file, create the memory context used for the
606+
* tokenization. This will be closed with this file when coming back to
607+
* this level of cleanup.
608+
*/
609+
if (depth==0)
610+
{
611+
/*
612+
* A context may be present, but assume that it has been eliminated
613+
* already.
614+
*/
615+
tokenize_context=AllocSetContextCreate(CurrentMemoryContext,
616+
"tokenize_context",
617+
ALLOCSET_START_SMALL_SIZES);
618+
}
619+
561620
file=AllocateFile(filename,"r");
562621
if (file==NULL)
563622
{
@@ -570,6 +629,8 @@ open_auth_file(const char *filename, int elevel, int depth,
570629
if (err_msg)
571630
*err_msg=psprintf("could not open file \"%s\": %s",
572631
filename,strerror(save_errno));
632+
/* the caller may care about some specific errno */
633+
errno=save_errno;
573634
returnNULL;
574635
}
575636

@@ -593,32 +654,34 @@ tokenize_error_callback(void *arg)
593654
*Tokenize the given file.
594655
*
595656
* The output is a list of TokenizedAuthLine structs; see the struct definition
596-
* in libpq/hba.h.
657+
* in libpq/hba.h. This is the central piece in charge of parsing the
658+
* authentication files. All the operations of this function happen in its own
659+
* local memory context, easing the cleanup of anything allocated here. This
660+
* matters a lot when reloading authentication files in the postmaster.
597661
*
598662
* filename: the absolute path to the target file
599663
* file: the already-opened target file
600-
* tok_lines: receives output list
664+
* tok_lines: receives output list, saved into tokenize_context
601665
* elevel: message logging level
602666
* depth: level of recursion when tokenizing the target file
603667
*
604668
* Errors are reported by logging messages at ereport level elevel and by
605669
* adding TokenizedAuthLine structs containing non-null err_msg fields to the
606670
* output list.
607-
*
608-
* Return value is a memory context which contains all memory allocated by
609-
* this function (it's a child of caller's context).
610671
*/
611-
MemoryContext
672+
void
612673
tokenize_auth_file(constchar*filename,FILE*file,List**tok_lines,
613674
intelevel,intdepth)
614675
{
615676
intline_number=1;
616677
StringInfoDatabuf;
617678
MemoryContextlinecxt;
618-
MemoryContextoldcxt;
679+
MemoryContextfunccxt;/* context of this function's caller */
619680
ErrorContextCallbacktokenerrcontext;
620681
tokenize_error_callback_argcallback_arg;
621682

683+
Assert(tokenize_context);
684+
622685
callback_arg.filename=filename;
623686
callback_arg.linenum=line_number;
624687

@@ -627,14 +690,19 @@ tokenize_auth_file(const char *filename, FILE *file, List **tok_lines,
627690
tokenerrcontext.previous=error_context_stack;
628691
error_context_stack=&tokenerrcontext;
629692

693+
/*
694+
* Do all the local tokenization in its own context, to ease the cleanup
695+
* of any memory allocated while tokenizing.
696+
*/
630697
linecxt=AllocSetContextCreate(CurrentMemoryContext,
631698
"tokenize_auth_file",
632699
ALLOCSET_SMALL_SIZES);
633-
oldcxt=MemoryContextSwitchTo(linecxt);
700+
funccxt=MemoryContextSwitchTo(linecxt);
634701

635702
initStringInfo(&buf);
636703

637-
*tok_lines=NIL;
704+
if (depth==0)
705+
*tok_lines=NIL;
638706

639707
while (!feof(file)&& !ferror(file))
640708
{
@@ -694,7 +762,17 @@ tokenize_auth_file(const char *filename, FILE *file, List **tok_lines,
694762
elevel,depth,&err_msg);
695763
/* add field to line, unless we are at EOL or comment start */
696764
if (current_field!=NIL)
765+
{
766+
MemoryContextoldcxt;
767+
768+
/*
769+
* lappend() may do its own allocations, so move to the
770+
* context for the list of tokens.
771+
*/
772+
oldcxt=MemoryContextSwitchTo(tokenize_context);
697773
current_line=lappend(current_line,current_field);
774+
MemoryContextSwitchTo(oldcxt);
775+
}
698776
}
699777

700778
/*
@@ -703,25 +781,27 @@ tokenize_auth_file(const char *filename, FILE *file, List **tok_lines,
703781
if (current_line!=NIL||err_msg!=NULL)
704782
{
705783
TokenizedAuthLine*tok_line;
784+
MemoryContextoldcxt;
706785

786+
oldcxt=MemoryContextSwitchTo(tokenize_context);
707787
tok_line= (TokenizedAuthLine*)palloc(sizeof(TokenizedAuthLine));
708788
tok_line->fields=current_line;
709789
tok_line->file_name=pstrdup(filename);
710790
tok_line->line_num=line_number;
711791
tok_line->raw_line=pstrdup(buf.data);
712-
tok_line->err_msg=err_msg;
792+
tok_line->err_msg=err_msg ?pstrdup(err_msg) :NULL;
713793
*tok_lines=lappend(*tok_lines,tok_line);
794+
MemoryContextSwitchTo(oldcxt);
714795
}
715796

716797
line_number+=continuations+1;
717798
callback_arg.linenum=line_number;
718799
}
719800

720-
MemoryContextSwitchTo(oldcxt);
801+
MemoryContextSwitchTo(funccxt);
802+
MemoryContextDelete(linecxt);
721803

722804
error_context_stack=tokenerrcontext.previous;
723-
724-
returnlinecxt;
725805
}
726806

727807

@@ -2409,7 +2489,6 @@ load_hba(void)
24092489
ListCell*line;
24102490
List*new_parsed_lines=NIL;
24112491
boolok= true;
2412-
MemoryContextlinecxt;
24132492
MemoryContextoldcxt;
24142493
MemoryContexthbacxt;
24152494

@@ -2420,8 +2499,7 @@ load_hba(void)
24202499
return false;
24212500
}
24222501

2423-
linecxt=tokenize_auth_file(HbaFileName,file,&hba_lines,LOG,0);
2424-
FreeFile(file);
2502+
tokenize_auth_file(HbaFileName,file,&hba_lines,LOG,0);
24252503

24262504
/* Now parse all the lines */
24272505
Assert(PostmasterContext);
@@ -2472,7 +2550,7 @@ load_hba(void)
24722550
}
24732551

24742552
/* Free tokenizer memory */
2475-
MemoryContextDelete(linecxt);
2553+
free_auth_file(file,0);
24762554
MemoryContextSwitchTo(oldcxt);
24772555

24782556
if (!ok)
@@ -2776,7 +2854,6 @@ load_ident(void)
27762854
*parsed_line_cell;
27772855
List*new_parsed_lines=NIL;
27782856
boolok= true;
2779-
MemoryContextlinecxt;
27802857
MemoryContextoldcxt;
27812858
MemoryContextident_context;
27822859
IdentLine*newline;
@@ -2789,8 +2866,7 @@ load_ident(void)
27892866
return false;
27902867
}
27912868

2792-
linecxt=tokenize_auth_file(IdentFileName,file,&ident_lines,LOG,0);
2793-
FreeFile(file);
2869+
tokenize_auth_file(IdentFileName,file,&ident_lines,LOG,0);
27942870

27952871
/* Now parse all the lines */
27962872
Assert(PostmasterContext);
@@ -2826,7 +2902,7 @@ load_ident(void)
28262902
}
28272903

28282904
/* Free tokenizer memory */
2829-
MemoryContextDelete(linecxt);
2905+
free_auth_file(file,0);
28302906
MemoryContextSwitchTo(oldcxt);
28312907

28322908
if (!ok)

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp