/* Copyright 2006 Joachim Zobel . * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This file implements a filter, that builds a node tree from the SAX * buckets at EOS, executes a given transformation on the tree and * serializes the tree back into SAX buckets. */ #include #include #include #include #include #include #include #include #include #include #include module AP_MODULE_DECLARE_DATA xml2_module; //#include #include #include #include #include #include "frag_buffer.h" #include "buckets_sax.h" #include "sax_util.h" #include "sxpath.h" #include "tree_transform.h" /***************************************************************************** * libxml2 DOM filter *****************************************************************************/ /* * Context for the xml2_xpath_filter */ typedef struct { // in CDATA section? int cdata; bucket_stack_t *newns; //apr_array_header_t *atts; xmlParserCtxtPtr parser; xml2_tree_transform transform; // This is passed to each call to // transform. void *param; // The sax context is needed to recreate // SAX buckets after transforming. // This has the transform filter as its // current filter sax_ctx *sax; // The outbound brigade apr_bucket_brigade *bb_out; } xml2_tree_ctx; /* * transform_filter_y_connect */ void transform_filter_y_connect(ap_filter_t * trans_filter, ap_filter_t *f) { trans_filter->next->next = f->next; ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "Connecting the %s filter to the y leg.", f->next->frec->name); } /* * transform_filter_create */ ap_filter_t *transform_filter_create(ap_filter_t * f, sax_ctx * sax, xml2_tree_transform transform, void *param) { ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "transform_filter_create called for %s.", f->frec->name); // Create the filter context xml2_tree_ctx *tctx = apr_palloc(f->r->pool, sizeof(xml2_tree_ctx)); // and initialize it. memset(tctx, 0, sizeof(xml2_tree_ctx)); // pass the transform function to the filter tctx->transform = transform; tctx->param = param; tctx->sax = sax; // with an outgoing brigade tctx->bb_out = apr_brigade_create(f->r->pool, f->c->bucket_alloc); // Create the pre_include _t filter ap_filter_t *pi_filter = apr_palloc(f->r->pool, sizeof(ap_filter_t)); // and initialize it. pi_filter->ctx = NULL; // Retrieve the pre_include filter pi_filter->frec = ap_get_output_filter_handle("pre_include"); pi_filter->r = f->r; pi_filter->c = f->c; // Retrieve the _transform filter ap_filter_rec_t *frec = ap_get_output_filter_handle("_transform"); // Create the _t filter ap_filter_t *trans_filter = apr_palloc(f->r->pool, sizeof(ap_filter_t)); // and initialize it. trans_filter->ctx = tctx; trans_filter->frec = frec; trans_filter->r = f->r; trans_filter->c = f->c; // Set up the y chain trans_filter->next = pi_filter; transform_filter_y_connect(trans_filter, f); if (sax) { transform_filter_set_sax(trans_filter, sax); } // Call the filter_init function. This will not be called // automatically since this is an y-shaped filter chain // and this filter is not in the straight line. if (OK != frec->filter_init_func(trans_filter)) { ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, f->r, "filter_init_func for _transform failed."); return NULL; } return trans_filter; } /* * transform_filter_set_sax */ void transform_filter_set_sax(ap_filter_t * f, sax_ctx * sax) { xml2_tree_ctx *tctx = f->ctx; // This is a possible streaming memory leak tctx->sax = apr_pcalloc(f->r->pool, sizeof(sax_ctx)); sax_ctx_init_again(tctx->sax, &sax->bctx, sax->mctx, tctx->bb_out, f, NULL); } /* * Technical declarations */ static apr_status_t xml2_tree2sax(sax_ctx * c, ap_filter_t * f, xmlDocPtr doc); //typedef const xml_char_t *xml_char_cp; //typedef xml_char_cp ns_t[2]; //typedef xml_char_cp at_t[5]; const apr_size_t SZ_AT = 5 * sizeof(const xml_char_t *); const apr_size_t SZ_NS = 2 * sizeof(const xml_char_t *); /* * xml2_tree_filter_init */ int xml2_tree_filter_init(ap_filter_t * f) { xml2_tree_ctx *fctx = f->ctx; ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "xml2_tree_filter_init called."); xml2_tree_log_filter_chain(APLOG_MARK, f); fctx->cdata = 0; fctx->newns = apr_array_make(f->r->pool, 5, SZ_NS); // fctx->atts = apr_array_make(f->r->pool, 10, sizeof(at_t)); // Since we will not do any SAX parsing, we give neither // handler nor context. fctx->parser = xmlCreatePushParserCtxt(NULL, NULL, NULL, 0, NULL); if (!fctx->parser) { ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, f->r, "xmlCreatePushParserCtxt failed."); } else { fctx->parser->myDoc = xmlNewDoc("1.0"); } apr_pool_cleanup_register(f->r->pool, fctx->parser, (void *) xmlFreeParserCtxt, apr_pool_cleanup_null); return OK; } /** * Dump a libxml2 document to the log. * @param file - the current source file (usually from APLOG_MARK) * @param line - the current source line * @param r - the request * @param doc - will be dumped */ static void xml2_tree_log_dump_doc(const char *file, int line, request_rec * r, xmlDocPtr doc) { /* xml_char_t **buf; int sz; sax_check_pool(r->pool); // Corrupts request pool xmlDocDumpFormatMemory(doc, buf, &sz, 1); sax_check_pool(r->pool); ap_log_rerror(file, line, APLOG_DEBUG, 0, r, *buf); sax_check_pool(r->pool); xmlFree(*buf); sax_check_pool(r->pool); */ } /* * xml2_tree_log_filter_chain */ void xml2_tree_log_filter_chain(const char *file, int line, ap_filter_t *f) { request_rec *r = f?f->r:NULL; while (f) { ap_log_rerror(file, line, APLOG_DEBUG, 0, r, "Filter: %s",f->frec->name); f = f->next; } } /* * xml2_tree_filter */ int xml2_tree_filter(ap_filter_t * f, apr_bucket_brigade * bb) { apr_bucket *b; xml2_tree_ctx *fctx = f->ctx; apr_bucket_brigade *bb_out = fctx->bb_out; apr_status_t rv = APR_SUCCESS; ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "xml2_tree_filter called."); xml2_tree_log_filter_chain(APLOG_MARK, f); for (b = APR_BRIGADE_FIRST(bb); !APR_BRIGADE_EMPTY(bb); b = APR_BRIGADE_FIRST(bb)) { APR_BRIGADE_CHECK_CONSISTENCY(bb); if (!BUCKET_IS_SAX(b)) { if (APR_BUCKET_IS_EOS(b)) { ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "EOS processing."); xmlSAX2EndDocument(fctx->parser); xmlDocPtr doc = fctx->parser->myDoc; // Call configured DOM handler ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "Calling transform:"); sax_check_pool(f->r->pool); xml2_tree_log_dump_doc(APLOG_MARK, f->r, doc); sax_check_pool(f->r->pool); rv = fctx->transform(fctx->param, doc); sax_check_pool(f->r->pool); // Clean up the brigade before refilling it. apr_brigade_cleanup(bb_out); ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "After transform:"); xml2_tree_log_dump_doc(APLOG_MARK, f->r, doc); // doc => SAX // This converts the tree to sax buckets // that are appended to the SAX contexts brigade. ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "Converting transformed tree into SAX buckets."); rv = xml2_tree2sax(fctx->sax, f, doc); xmlClearParserCtxt(fctx->parser); if (rv != APR_SUCCESS) { return rv; } ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "Finished converting tree into SAX buckets."); } else { ap_log_rerror(APLOG_MARK, APLOG_WARNING, 0, f->r, "Dropping non-SAX bucket in tree filter."); } apr_bucket_delete(b); continue; } ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "Sending %s event to libXML.", sax_event_which_to_str(sax_inspect_which(b)) ); switch (sax_inspect_which(b)) { case START_NS: { // Push ns onto the newns stack const xml_char_t **ns = apr_array_push(fctx->newns); start_ns_t *sn = sax_inspect_event(b); ns[0] = sn->prefix; ns[1] = sn->uri; } break; case END_NS: // ignored break; case START_ELT: { start_elt_t *se = sax_inspect_event(b); attr_t *attr = se->atts; apr_size_t sza = 0; // Count the attributes for (sza = 0; attr[sza].name.name; sza++) { } const apr_size_t at_sz = sza; const apr_size_t ns_sz = fctx->newns->nelts; char *mem = apr_bucket_alloc(at_sz * SZ_AT, b->list); const xml_char_t **at_par = (const xml_char_t **) mem; // Fill the attributes for (sza = 0; attr[sza].name.name; sza++) { const apr_size_t start = 5 * sza; at_par[start + 0] = attr[sza].name.name; at_par[start + 1] = attr[sza].name.prefix; at_par[start + 2] = attr[sza].name.uri; at_par[start + 3] = attr[sza].value; if (attr[sza].value) at_par[start + 4] = attr[sza].value + strlen(attr[sza].value); else // This is weird, but xmlSAX2AttributeNs // dereferences valuend. at_par[start + 4] = ""; } const xml_char_t **ns_par = (const xml_char_t **) fctx->newns->elts; /* ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "at_par has length %d.", at_sz); ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "ns_par has length %d.", ns_sz); ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "at_par %s,%s,%s,%s,%s.", at_par[0], at_par[1], at_par[2], at_par[3], at_par[4]); ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "ns_par %s,%s.", ns_par[0], ns_par[1]); */ xmlSAX2StartElementNs(fctx->parser, se->name.name, se->name.prefix, se->name.uri, ns_sz, ns_par, at_sz, 0, at_par); // cleanup newns fctx->newns->nelts = 0; // cleanup at_par + ns_par apr_bucket_free(at_par); } break; case END_ELT: { end_elt_t *ee = sax_inspect_event(b); xmlSAX2EndElementNs(fctx->parser, ee->name.name, ee->name.prefix, ee->name.uri); } break; case XML_DECL: { xml_decl_t *xd = sax_inspect_event(b); // cleanup newns fctx->newns->nelts = 0; fctx->parser->version = xmlCharStrdup(xd->version); fctx->parser->encoding = xmlCharStrdup(xd->encoding); fctx->parser->standalone = xd->standalone; xmlSAX2StartDocument(fctx->parser); // Reset fctx fctx->cdata = 0; fctx->newns->nelts = 0; fctx->sax->starts->nelts = 0; } break; case COMMENT: { character_t *c = sax_inspect_event(b); xmlSAX2Comment(fctx->parser, c->text); } break; case START_CD: fctx->cdata = 1; break; case END_CD: fctx->cdata = 0; break; case WHITE: case CHARACTER: { character_t *c = sax_inspect_event(b); if (fctx->cdata) { xmlSAX2CDataBlock(fctx->parser, c->text, c->len); } else { xmlSAX2Characters(fctx->parser, c->text, c->len); } } break; case PROC_INSTR: { proc_instr_t *pi = sax_inspect_event(b); xmlSAX2ProcessingInstruction(fctx->parser, pi->target, pi->data); } break; } /* ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, "Deleting bucket %x. Sentinel %x, first %x, last %x. Neighbours %x, %x", b, APR_BRIGADE_SENTINEL(bb), APR_BRIGADE_FIRST(bb), APR_BRIGADE_LAST(bb), APR_BUCKET_PREV(b), APR_BUCKET_NEXT(b)); */ apr_bucket_delete(b); sax_check_pool(f->r->pool); } rv = ap_pass_brigade(f->next, bb_out); apr_brigade_cleanup(bb_out); return rv; } /***************************************************************************** * Turn XML2 xmlNodes back into SAX buckets *****************************************************************************/ static apr_status_t xml2_tree2sax_walk_children(sax_ctx * c, xmlNode * nd); static apr_status_t xml2_tree2sax_process_node(sax_ctx * c, xmlNode * nd); /** * Convert a tree into a stream of sax buckets. * @param c - The sax_ctx. * @param f - The current filter. * @param doc - The document tree to convert. * @return APR_SUCCESS on success. */ static apr_status_t xml2_tree2sax(sax_ctx * c, ap_filter_t * f, xmlDocPtr doc) { apr_status_t rv = APR_SUCCESS; // Pass the bb as part of the sax_ctx. Bad. // apr_bucket_brigade *bb_org = c->bb; // c->bb = bb; sax_check_pool(c->r->pool); sax_bucket_append(c, sax_bucket_create_xml_decl(c, doc->version, doc->encoding, doc->standalone)); xmlNode *root = xmlDocGetRootElement(doc); // Tis is a hack. We exclude all root node namespaces from // the conversion to sax buckets, because we assume they // have been added while creating the partial document. xmlNs *rootNs = root->nsDef; root->nsDef = NULL; rv = xml2_tree2sax_process_node(c, root); if (rv != APR_SUCCESS) { ap_log_rerror(APLOG_MARK, APLOG_WARNING, 0, c->bctx.r_log, "root node processing returned %d.", rv); root->nsDef = rootNs; return rv; } root->nsDef = rootNs; APR_BRIGADE_INSERT_TAIL(c->bb, apr_bucket_eos_create(c->list)); sax_check_pool(c->r->pool); rv = sax_pass_buckets(c, 0); // restore bb // c->bb = bb_org; return rv; } typedef struct { // The number of attributes apr_size_t cnt; // The buffer size needed for all \0 terminated values. apr_size_t len_val; // The attributes array. This is allocated with bucket alloc. xml_char_t **attr; } tree_attr_t; /** * Do the attribute processing. * @param c - The sax context. * @param at - The attribute to process. * @return An array of length 5 for use with sax_bucket_create_elt_2 * (localname/prefix/URI/value/end). This must be freed with bucket_free * after use. */ #define xml2_tree2sax_process_attr(c, at) (xml2_tree2sax_process_attr_rec(c, at, 0, 0)) /* * The actual implementation. This is done recursively because it makes it * easier to get along with one allocation. */ static tree_attr_t xml2_tree2sax_process_attr_rec(sax_ctx * c, xmlAttr * at, apr_size_t len, apr_size_t cnt_a) { xmlNode *chld; apr_size_t loc_len = 0; if (!at) { tree_attr_t empty = { 0, 0, NULL }; return empty; } // Walk the content text nodes to extract the value frag_buffer_t *frag_buf = c->mctx->frag_buf; for (chld = at->children; chld; chld = chld->next) { ap_assert(chld->type == XML_TEXT_NODE); const apr_size_t len_s = strlen(chld->content); loc_len += len_s; frag_write(frag_buf, chld->content, len_s); } // Terminate frag_write(frag_buf, "\0", 1); loc_len += 1; cnt_a += 1; len += loc_len; tree_attr_t tat; if (at->next) { tat = xml2_tree2sax_process_attr_rec(c, at->next, len, cnt_a); } else { // Now we allocate the needed memory tat.cnt = cnt_a; tat.len_val = len; tat.attr = apr_bucket_alloc(cnt_a * sizeof(const xml_char_t *) * 5 + len, c->list); // and copy the values frag. buffer into the upper part. const apr_size_t rv = frag_to_buffer(frag_buf, 0, (xml_char_t *) (tat.attr + tat.cnt * 5), len); ap_assert(rv == 0); frag_clean(frag_buf); } // The current 5 attribute entries const xml_char_t **a5 = (const xml_char_t **) tat.attr + (cnt_a - 1) * 5; // The current value buffer const xml_char_t *buf = (const xml_char_t *) (tat.attr + tat.cnt * 5) + tat.len_val - loc_len; a5[0] = at->name; a5[1] = at->ns ? at->ns->prefix : NULL; a5[2] = at->ns ? at->ns->href : NULL; a5[3] = buf; a5[4] = buf + loc_len; return tat; } /** * Do the element processing. * @param c - The sax context. * @param nd - The node to process. * @return APR_SUCCESS if no error occured. */ static apr_status_t xml2_tree2sax_process_elt(sax_ctx * c, xmlNode * nd) { apr_status_t rv = APR_SUCCESS; tree_attr_t tat = xml2_tree2sax_process_attr(c, nd->properties); bucket_sax *bs = sax_bucket_create_elt_2(c, nd->name, nd->ns->prefix, nd->ns->href, tat.cnt, (const xml_char_t **) tat.attr); if (tat.attr) { // apr_bucket_free should do this apr_bucket_free(tat.attr); } sax_event_set_start_id(c, bs->event); sax_bucket_append(c, bs); rv = xml2_tree2sax_walk_children(c, nd); if (rv != APR_SUCCESS) { return rv; } bs = sax_bucket_create_elt_2(c, nd->name, nd->ns->prefix, nd->ns->href, 0, NULL); sax_bucket_set_which(bs, END_ELT); sax_event_set_end_id(c, bs->event); sax_bucket_append(c, bs); return rv; } /** * Do the namespace definition part for a given node. If there is no ns def. to * process xml2_tree2sax_process_elt is called. * @param c - The sax context. * @param nd - The node to process. * @return APR_SUCCESS if no error occured. */ static apr_status_t xml2_tree2sax_process_ns(sax_ctx * c, xmlNode * nd, xmlNs * nsDef) { apr_status_t rv = APR_SUCCESS; if (nsDef) { // Process namespace defs. // Create START_NS bucket_sax *bs = sax_bucket_create_ns(c, nsDef->prefix, nsDef->href); sax_event_set_start_id(c, bs->event); sax_bucket_append(c, bs); // copy the event on the namespaces stack start_ns_t *ns = apr_array_push(c->bctx.namespaces); *ns = *((start_ns_t *) bs->event); // recursion rv = xml2_tree2sax_process_ns(c, nd, nsDef->next); if (rv != APR_SUCCESS) { return rv; } // pop to forget apr_array_pop(c->bctx.namespaces); // Create END_NS bs = sax_bucket_create_ns(c, nsDef->prefix, nsDef->href); sax_bucket_set_which(bs, END_NS); sax_event_set_end_id(c, bs->event); sax_bucket_append(c, bs); } else { rv = xml2_tree2sax_process_elt(c, nd); } return rv; } /** * Do the text processing. * @param c - The sax context. * @param nd - The node to process. * @param encode - needs encoding on output * @return APR_SUCCESS if no error occured. */ static apr_status_t xml2_tree2sax_process_text(sax_ctx * c, xmlNode * nd, int encode) { bucket_sax *bs = sax_bucket_create_char(c, nd->content, strlen(nd->content), encode); sax_bucket_append(c, bs); return APR_SUCCESS; } /** * Do the comment processing. * @param c - The sax context. * @param nd - The node to process. * @param encode - needs encoding on output * @return APR_SUCCESS if no error occured. */ static apr_status_t xml2_tree2sax_process_comment(sax_ctx * c, xmlNode * nd) { bucket_sax *bs = sax_bucket_create_char(c, nd->content, strlen(nd->content), 0); sax_bucket_set_which(bs, COMMENT); sax_bucket_append(c, bs); return APR_SUCCESS; } /** * Do the comment processing. * @param c - The sax context. * @param nd - The node to process. * @param encode - needs encoding on output * @return APR_SUCCESS if no error occured. */ static apr_status_t xml2_tree2sax_process_proc_instr(sax_ctx * c, xmlNode * nd) { bucket_sax *bs = sax_bucket_create_proc_instr(c, nd->name, nd->content); sax_bucket_append(c, bs); return APR_SUCCESS; } /** * Turn the given node into SAX buckets. These are appended to the given brigade. * @param c - The sax context. * @param nd - The node to process. * @return APR_SUCCESS if no error occured. */ static apr_status_t xml2_tree2sax_process_node(sax_ctx * c, xmlNode * nd) { apr_status_t rv = APR_SUCCESS; ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, c->bctx.r_log, "Processing xmlNode of type %d.", nd->type); if ((nd->type != XML_CDATA_SECTION_NODE) && c->is_cdata) { // End of CDATA section sax_bucket_append(c, sax_bucket_create_empty(c, END_CD)); c->is_cdata = 0; } switch (nd->type) { case XML_ELEMENT_NODE: rv = xml2_tree2sax_process_ns(c, nd, nd->nsDef); break; case XML_CDATA_SECTION_NODE: if (!c->is_cdata) { sax_bucket_append(c, sax_bucket_create_empty(c, START_CD)); c->is_cdata = 1; } // fall through case XML_TEXT_NODE: rv = xml2_tree2sax_process_text(c, nd, !c->is_cdata); break; case XML_COMMENT_NODE: rv = xml2_tree2sax_process_comment(c, nd); break; case XML_PI_NODE: rv = xml2_tree2sax_process_proc_instr(c, nd); break; default: ap_log_rerror(APLOG_MARK, APLOG_WARNING, 0, c->bctx.r_log, "Ignoring xmlNode of type %d.", nd->type); break; } return rv; } /** * Walks the children and calls calls *_process_node on each. * @param c - The sax context. * @param nd - The node to process. * @return APR_SUCCESS if no error occured. */ static apr_status_t xml2_tree2sax_walk_children(sax_ctx * c, xmlNode * nd) { xmlNode *chld; apr_status_t rv = APR_SUCCESS; ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, c->bctx.r_log, "Walking children."); for (chld = nd->children; chld; chld = chld->next) { rv = xml2_tree2sax_process_node(c, chld); if (rv != APR_SUCCESS) { return rv; } } return rv; } /***************************************************************************** * Helper functions for transfom support *****************************************************************************/ /* * transform_start_faked_doc */ apr_status_t transform_start_faked_doc(sax_ctx * sax, apr_bucket_brigade * bb_out, apr_bucket * b_xml_decl, apr_array_header_t * namespaces) { apr_bucket *b_xml_decl_new; request_rec *r = sax->r; ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, r, "starting faked document."); apr_bucket_copy(b_xml_decl, &b_xml_decl_new); APR_BRIGADE_INSERT_TAIL(bb_out, b_xml_decl_new); int i; for (i = 0; i < namespaces->nelts; i++) { start_ns_t *sn = (start_ns_t *) (namespaces->elts + i * namespaces->elt_size); bucket_sax *bs = sax_bucket_create_ns(sax, sn->prefix, sn->uri); start_ns_t *sn_new = bs->event; // We need to know the se_id when the end_ns // bucket is created. sn_new->se_id = sn->se_id; apr_bucket *b_start_ns = sax_bucket_wrap(sax, bs); APR_BRIGADE_INSERT_TAIL(bb_out, b_start_ns); } return APR_SUCCESS; } /* * transform_end_faked_doc */ apr_status_t transform_end_faked_doc(sax_ctx * sax, apr_bucket_brigade * bb_out, apr_array_header_t * namespaces) { request_rec *r = sax->r; ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, r, "ending faked document."); // Walk down the namespaces int i; ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, r, "Sending %d END_NS buckets.", namespaces->nelts); for (i = namespaces->nelts; i > 0; i--) { start_ns_t *sn = (start_ns_t *) (namespaces->elts + (i - 1) * namespaces->elt_size); // We create a matching end for each start bucket bucket_sax *bs = sax_bucket_create_ns(sax, sn->prefix, sn->uri); end_ns_t *en = bs->event; sax_bucket_set_which(bs, END_NS); en->se_id = -sn->se_id; apr_bucket *b_end_ns = sax_bucket_wrap(sax, bs); ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, r, "Done with %d.", i); // and append it. APR_BRIGADE_INSERT_TAIL(bb_out, b_end_ns); } APR_BRIGADE_INSERT_TAIL(bb_out, apr_bucket_eos_create(bb_out->bucket_alloc)); return APR_SUCCESS; }