OSDN Git Service

Add statistics of pg_store_plans
[pgstoreplans/pg_store_plans.git] / pgsp_json.c
index c39931f..e0731ec 100644 (file)
@@ -2,10 +2,10 @@
  *
  * pgsp_json.c: Plan handler for JSON/XML/YAML style plans
  *
- * Copyright (c) 2012-2016, NIPPON TELEGRAPH AND TELEPHONE CORPORATION
+ * Copyright (c) 2012-2021, NIPPON TELEGRAPH AND TELEPHONE CORPORATION
  *
  * IDENTIFICATION
- *       pg_store_plan/pgsp_json.c
+ *       pg_store_plans/pgsp_json.c
  *
  *-------------------------------------------------------------------------
  */
 #include "parser/gram.h"
 #include "utils/xml.h"
 #include "utils/json.h"
+#if PG_VERSION_NUM < 130000
 #include "utils/jsonapi.h"
-
+#else
+#include "common/jsonapi.h"
+#endif
 #include "pgsp_json.h"
 #include "pgsp_json_int.h"
 
@@ -97,10 +100,17 @@ word_table propfields[] =
        {P_GroupKey,            "-" ,"Group Key",                       NULL, true,  NULL,                              SETTER(group_key)},
        {P_GroupSets,           "=" ,"Grouping Sets",           NULL, true,  NULL,                              NULL},
        {P_GroupKeys,           "\\" ,"Group Keys",                     NULL, true,  NULL,                              SETTER(group_key)},
+
+       {P_HashKeys,            "~" ,"Hash Keys",                       NULL, true,  NULL,                              SETTER(hash_key)},
+       {P_HashKey,             "|" ,"Hash Key",                        NULL, true,  NULL,                              SETTER(hash_key)},
+
        {P_Parallel,            "`" ,"Parallel Aware",          NULL, true,  NULL,                              SETTER(parallel_aware)},
+       {P_PartialMode,         ">" ,"Partial Mode",            NULL, true,  conv_partialmode,SETTER(partial_mode)},
        {P_WorkersPlanned,      "{" ,"Workers Planned",         NULL, true,  NULL,                              SETTER(workers_planned)},
        {P_WorkersLaunched, "}" ,"Workers Launched",    NULL, true,  NULL,                              SETTER(workers_launched)},
-                                                                                                                 
+       {P_InnerUnique,         "?" ,"Inner Unique",            NULL, true,  NULL,                              SETTER(inner_unique)},
+       {P_AsyncCapable,        "ac", "Async Capable",          NULL, true,  NULL,                              SETTER(async_capable)},
+
        /* Values of these properties are ignored on normalization */
        {P_FunctionCall,        "y" ,"Function Call",           NULL, false, NULL,                              SETTER(func_call)},
        {P_StartupCost,         "1" ,"Startup Cost",            NULL, false, NULL,                              SETTER(startup_cost)},
@@ -145,11 +155,21 @@ word_table propfields[] =
        {P_ConfArbitIdx,    "@" ,"Conflict Arbiter Indexes",NULL, false,  NULL,                 SETTER(conflict_arbiter_indexes)},
        {P_TuplesInserted,  "^" ,"Tuples Inserted",             NULL, false,  NULL,                             SETTER(tuples_inserted)},
        {P_ConfTuples,          "+" ,"Conflicting Tuples",      NULL, false,  NULL,                             SETTER(conflicting_tuples)},
-       {P_SamplingMethod,  ""  ,"Sampling Method" ,    NULL, false,  NULL,                             SETTER(sampling_method)},
-       {P_SamplingParams,  ""  ,"Sampling Parameters" , NULL, false,  NULL,                    SETTER(sampling_params)},
-       {P_RepeatableSeed,  ""  ,"Repeatable Seed" ,    NULL, false,  NULL,                             SETTER(repeatable_seed)},
+       {P_SamplingMethod,  ":"  ,"Sampling Method" ,   NULL, false,  NULL,                             SETTER(sampling_method)},
+       {P_SamplingParams,  ";"  ,"Sampling Parameters" , NULL, false,  NULL,                   SETTER(sampling_params)},
+       {P_RepeatableSeed,  "<"  ,"Repeatable Seed" ,   NULL, false,  NULL,                             SETTER(repeatable_seed)},
        {P_Workers,             "[" ,"Workers",                         NULL, false,  NULL,                             NULL},
        {P_WorkerNumber,    "]" ,"Worker Number",               NULL, false,  NULL,                             SETTER(worker_number)},
+       {P_TableFuncName,   "aa" ,"Table Function Name",NULL, false,  NULL,                     SETTER(table_func_name)},
+
+       {P_PresortedKey,    "pk" ,"Presorted Key"          ,NULL, false,  NULL,                 SETTER(presorted_key)},
+       {P_FullsortGroups,  "fg" ,"Full-sort Groups"   ,NULL, false,  NULL,                     NULL},
+       {P_SortMethodsUsed, "su" ,"Sort Methods Used"  ,NULL, false,  NULL,                     SETTER(sortmethod_used)},
+       {P_SortSpaceMemory, "sm" ,"Sort Space Memory"  ,NULL, false,  NULL,                     SETTER(sortspace_mem)},
+       {P_GroupCount,          "gc" ,"Group Count"        ,NULL, false,  NULL,                 SETTER(group_count)},
+       {P_AvgSortSpcUsed,  "as" ,"Average Sort Space Used",NULL, false,  NULL,         SETTER(avg_sortspc_used)},
+       {P_PeakSortSpcUsed, "ps" ,"Peak Sort Space Used",NULL, false,  NULL,            SETTER(peak_sortspc_used)},
+       {P_PreSortedGroups, "pg" ,"Pre-sorted Groups"  ,NULL, false,  NULL,                     NULL},
 
        {P_Invalid, NULL, NULL, NULL, false, NULL, NULL}
 };
@@ -180,7 +200,7 @@ word_table nodetypes[] =
        {T_NestLoop,    "t" ,"Nested Loop",     NULL, false, NULL, NULL},
        {T_MergeJoin,   "u" ,"Merge Join",              "Merge", false, NULL, NULL},
        {T_HashJoin,    "v" ,"Hash Join",               "Hash", false, NULL, NULL},
-       {T_Material,    "w" ,"Materialize",     NULL, false, NULL, NULL},
+       {T_Material,    "w" ,"Materialize",             NULL, false, NULL, NULL},
        {T_Sort,                "x" ,"Sort",                    NULL, false, NULL, NULL},
        {T_Group,               "y" ,"Group",                   NULL, false, NULL, NULL},
        {T_Agg,                 "z" ,"Aggregate",               NULL, false, NULL, NULL},
@@ -190,9 +210,26 @@ word_table nodetypes[] =
        {T_SetOp,               "3" ,"SetOp",                   NULL, false, NULL, NULL},
        {T_LockRows,    "4" ,"LockRows",                NULL, false, NULL, NULL},
        {T_Limit,               "5" ,"Limit",                   NULL, false, NULL, NULL},
+#if PG_VERSION_NUM >= 90500
+       {T_SampleScan,  "B" ,"Sample Scan",             NULL, false, NULL, NULL},
+#endif
 #if PG_VERSION_NUM >= 90600
        {T_Gather,              "6" ,"Gather",                  NULL, false, NULL, NULL},
 #endif
+#if PG_VERSION_NUM >= 100000
+       {T_ProjectSet,  "7" ,"ProjectSet",              NULL, false, NULL, NULL},
+       {T_TableFuncScan,"8","Table Function Scan",     NULL, false, NULL, NULL},
+       {T_NamedTuplestoreScan,"9","Named Tuplestore Scan",     NULL, false, NULL, NULL},
+       {T_GatherMerge, "A" ,"Gather Merge",    NULL, false, NULL, NULL},
+#endif
+#if PG_VERSION_NUM >= 130000
+       {T_IncrementalSort,     "C" ,"Incremental Sort", NULL, false, NULL, NULL},
+#endif
+#if PG_VERSION_NUM >= 140000
+       {T_TidRangeScan,"D", "Tid Range Scan",  NULL, false, NULL, NULL},
+       {T_Memoize,             "E", "Memoize",                 NULL, false, NULL, NULL},
+#endif
+
        {T_Invalid,             NULL, NULL, NULL, false, NULL, NULL}
 };
 
@@ -220,6 +257,7 @@ word_table strategies[] =
        {S_Plain,       "p" ,"Plain", NULL, false, NULL, NULL},
        {S_Sorted,      "s" ,"Sorted", NULL, false, NULL, NULL},
        {S_Hashed,      "h" ,"Hashed", NULL, false, NULL, NULL},
+       {S_Mixed,       "m" ,"Mixed", NULL, false, NULL, NULL},
        {S_Invalid,     NULL, NULL, NULL, false, NULL, NULL}
 };
 
@@ -268,11 +306,20 @@ word_table sortspacetype[] =
        {T_Invalid, NULL, NULL, NULL, false, NULL, NULL}
 };
 
+word_table partialmode[] =
+{
+       {T_Invalid,  "p" ,"Partial",    NULL, false, NULL, NULL},
+       {T_Invalid,  "f" ,"Finalize",NULL, false, NULL, NULL},
+       {T_Invalid,  "s" ,"Simple",NULL, false, NULL, NULL},
+       {T_Invalid, NULL, NULL, NULL, false, NULL, NULL}
+};
+
+
 word_table *
 search_word_table(word_table *tbl, const char *word, int mode)
 {
        word_table *p;
-       
+
        bool longname =
                (mode == PGSP_JSON_SHORTEN || mode == PGSP_JSON_NORMALIZE);
 
@@ -372,6 +419,7 @@ conv_strategy(const char *src, pgsp_parser_mode mode)
                    tok == TRUE_P || tok == FALSE_P || \
                        tok == CURRENT_DATE || tok == CURRENT_TIME || \
                    tok == LOCALTIME || tok == LOCALTIMESTAMP)
+#define IS_INDENTED_ARRAY(v) ((v) == P_GroupKeys || (v) == P_HashKeys)
 
 /*
  * norm_yylex: core_yylex with replacing some tokens.
@@ -396,7 +444,7 @@ norm_yylex(char *str, core_YYSTYPE *yylval, YYLTYPE *yylloc, core_yyscan_t yysca
                return -1;
        }
        PG_END_TRY();
-       
+
        /*
         * '?' alone is assumed to be an IDENT.  If there's a real
         * operator '?', this should be confused but there's hardly be.
@@ -424,6 +472,11 @@ norm_yylex(char *str, core_YYSTYPE *yylval, YYLTYPE *yylloc, core_yyscan_t yysca
  * uniqueness, preserve_space puts one space for one existent whitespace for
  * more readability.
  */
+/* scanner interface is changed in PG12 */
+#if PG_VERSION_NUM < 120000
+#define ScanKeywords (*ScanKeywords)
+#define ScanKeywordTokens NumScanKeywords
+#endif
 void
 normalize_expr(char *expr, bool preserve_space)
 {
@@ -439,9 +492,16 @@ normalize_expr(char *expr, bool preserve_space)
        wp = expr;
        yyscanner = scanner_init(expr,
                                                         &yyextra,
-                                                        ScanKeywords,
-                                                        NumScanKeywords);
+                                                        &ScanKeywords,
+                                                        ScanKeywordTokens);
 
+       /*
+        * The warnings about nonstandard escape strings is already emitted in the
+        * core. Just silence them here.
+        */
+#if PG_VERSION_NUM >= 90500
+       yyextra.escape_string_warning = false;
+#endif
        lasttok = 0;
        lastloc = -1;
 
@@ -454,7 +514,7 @@ normalize_expr(char *expr, bool preserve_space)
                if (lastloc >= 0)
                {
                        int i, i2;
-                       
+
                        /* Skipping preceding whitespaces */
                        for(i = lastloc ; i < start && IS_WSCHAR(expr[i]) ; i++);
 
@@ -467,6 +527,18 @@ normalize_expr(char *expr, bool preserve_space)
                                memcpy(wp, expr + i, i2 - i);
                                wp += i2 - i;
                        }
+#if PG_VERSION_NUM >= 100000
+                       /*
+                        * Since PG10 pg_stat_statements doesn't store trailing semicolon
+                        * in the column "query". Normalization is basically useless in the
+                        * version but still usefull to match utility commands so follow
+                        * the behavior change.
+                        */
+                       else if (lasttok == ';')
+                       {
+                               /* Just do nothing */
+                       }
+#endif
                        else
                        {
                                /* Upcase keywords */
@@ -487,7 +559,7 @@ normalize_expr(char *expr, bool preserve_space)
                         */
                        if (tok > 0 &&
                                i2 < start &&
-                               (preserve_space || 
+                               (preserve_space ||
                                 (tok >= IDENT && lasttok >= IDENT &&
                                  !IS_CONST(tok) && !IS_CONST(lasttok))))
                                *wp++ = ' ';
@@ -508,7 +580,7 @@ normalize_expr(char *expr, bool preserve_space)
                 */
                if (tok == '-')
                        tok = norm_yylex(expr, &yylval, &yylloc, yyscanner);
-               
+
                /* Exit on parse error. */
                if (tok < 0)
                {
@@ -519,7 +591,7 @@ normalize_expr(char *expr, bool preserve_space)
                if (IS_CONST(tok))
                {
                        YYLTYPE end;
-                       
+
                        tok = norm_yylex(expr, &yylval, &end, yyscanner);
 
                        /* Exit on parse error. */
@@ -540,7 +612,8 @@ normalize_expr(char *expr, bool preserve_space)
                                end++;
                        }
 
-                       while (expr[end - 1] == ' ') end--;                     
+                       while (expr[end - 1] == ' ')
+                               end--;
 
                        *wp++ = '?';
                        yylloc = end;
@@ -600,6 +673,12 @@ conv_sortspacetype(const char *src, pgsp_parser_mode mode)
        return converter_core(sortspacetype, src, mode);
 }
 
+const char *
+conv_partialmode(const char *src, pgsp_parser_mode mode)
+{
+       return converter_core(partialmode, src, mode);
+}
+
 /**** Parser callbacks ****/
 
 /* JSON */
@@ -650,7 +729,7 @@ json_arrstart(void *state)
 {
        pgspParserContext *ctx = (pgspParserContext *)state;
 
-       if (ctx->current_list == P_GroupKeys)
+       if (IS_INDENTED_ARRAY(ctx->current_list))
                ctx->wlist_level++;
 
        appendStringInfoChar(ctx->dest, '[');
@@ -665,11 +744,11 @@ json_arrend(void *state)
 {
        pgspParserContext *ctx = (pgspParserContext *)state;
 
-       if (ctx->current_list == P_GroupKeys)
+       if (IS_INDENTED_ARRAY(ctx->current_list))
                ctx->wlist_level--;
 
        if (ctx->mode == PGSP_JSON_INFLATE &&
-               (ctx->current_list == P_GroupKeys ?
+               (IS_INDENTED_ARRAY(ctx->current_list) ?
                 ctx->wlist_level == 0 : ctx->last_elem_is_object))
        {
                appendStringInfoChar(ctx->dest, '\n');
@@ -694,7 +773,7 @@ json_ofstart(void *state, char *fname, bool isnull)
                ereport(DEBUG1,
                                (errmsg("JSON parser encoutered unknown field name: \"%s\".", fname),
                                 errdetail_log("INPUT: \"%s\"", ctx->org_string)));
-       }               
+       }
 
        ctx->remove = (ctx->mode == PGSP_JSON_NORMALIZE &&
                                   (!p || !p->normalize_use));
@@ -735,7 +814,7 @@ json_ofstart(void *state, char *fname, bool isnull)
        if (ctx->mode == PGSP_JSON_INFLATE)
                appendStringInfoChar(ctx->dest, ' ');
 
-       if (p && p->tag == P_GroupKeys)
+       if (p && IS_INDENTED_ARRAY(p->tag))
        {
                ctx->current_list = p->tag;
                ctx->list_fname = fname;
@@ -762,7 +841,7 @@ json_aestart(void *state, bool isnull)
        if (ctx->remove)
                return;
 
-       if (ctx->current_list == P_GroupKeys &&
+       if (IS_INDENTED_ARRAY(ctx->current_list) &&
                ctx->wlist_level == 1)
        {
                if (!bms_is_member(ctx->level, ctx->first))
@@ -876,7 +955,7 @@ yaml_ofstart(void *state, char *fname, bool isnull)
                ereport(DEBUG1,
                                (errmsg("Short JSON parser encoutered unknown field name: \"%s\".", fname),
                                 errdetail_log("INPUT: \"%s\"", ctx->org_string)));
-       }               
+       }
        s = (p ? p->longname : fname);
 
        if (!bms_is_member(ctx->level, ctx->first))
@@ -994,7 +1073,7 @@ xml_ofstart(void *state, char *fname, bool isnull)
                ereport(DEBUG1,
                                (errmsg("Short JSON parser encoutered unknown field name: \"%s\".", fname),
                                 errdetail_log("INPUT: \"%s\"", ctx->org_string)));
-       }               
+       }
        s = (p ? p->longname : fname);
 
        /*
@@ -1034,7 +1113,7 @@ xml_ofend(void *state, char *fname, bool isnull)
 
        p =     search_word_table(propfields, fname, ctx->mode);
        s = (p ? p->longname : fname);
-       
+
        appendStringInfoString(ctx->dest, "</");
        appendStringInfoString(ctx->dest, escape_xml(hyphenate_words(ctx, s)));
        appendStringInfoChar(ctx->dest, '>');
@@ -1130,6 +1209,9 @@ init_parser_context(pgspParserContext *ctx, int mode,
 bool
 run_pg_parse_json(JsonLexContext *lex, JsonSemAction *sem)
 {
+#if PG_VERSION_NUM >= 130000
+       return pg_parse_json(lex, sem) == JSON_SUCCESS;
+#else
        MemoryContext ccxt = CurrentMemoryContext;
        uint32 saved_IntrHoldoffCount;
 
@@ -1153,7 +1235,7 @@ run_pg_parse_json(JsonLexContext *lex, JsonSemAction *sem)
 
                ecxt = MemoryContextSwitchTo(ccxt);
                errdata = CopyErrorData();
-               
+
                if (errdata->sqlerrcode == ERRCODE_INVALID_TEXT_REPRESENTATION)
                {
                        FlushErrorState();
@@ -1168,6 +1250,7 @@ run_pg_parse_json(JsonLexContext *lex, JsonSemAction *sem)
        PG_END_TRY();
 
        return true;
+#endif
 }
 
 void
@@ -1242,7 +1325,7 @@ pgsp_json_inflate(char *json)
                if (ctx.dest->len > 0 &&
                        ctx.dest->data[ctx.dest->len - 1] != '\n')
                        appendStringInfoChar(ctx.dest, '\n');
-               
+
                if (ctx.dest->len == 0)
                        appendStringInfoString(ctx.dest, "<Input was not JSON>");
                else
@@ -1278,7 +1361,7 @@ pgsp_json_yamlize(char *json)
                if (ctx.dest->len > 0 &&
                        ctx.dest->data[ctx.dest->len - 1] != '\n')
                        appendStringInfoChar(ctx.dest, '\n');
-               
+
                if (ctx.dest->len == 0)
                        appendStringInfoString(ctx.dest, "<Input was not JSON>");
                else
@@ -1320,7 +1403,7 @@ pgsp_json_xmlize(char *json)
                if (ctx.dest->len > start_len &&
                        ctx.dest->data[ctx.dest->len - 1] != '\n')
                        appendStringInfoChar(ctx.dest, '\n');
-               
+
                if (ctx.dest->len == start_len)
                {
                        resetStringInfo(ctx.dest);