public byte[] Bld(Xowe_wiki cur_wiki, Xoae_page page, Xow_popup_itm popup_itm, Bry_bfr wrdx_bfr) {
   if (output_js_clean) cur_wiki.Html_mgr().Js_cleaner().Clean_bfr(wiki, page.Ttl(), wrdx_bfr, 0);
   if (output_tidy)
     cur_wiki.Html_mgr().Tidy_mgr().Exec_tidy(wrdx_bfr, Bool_.Y, page.Url_bry_safe());
   byte[] hdom_bry = wrdx_bfr.To_bry_and_clear();
   String page_url =
       wrdx_bfr.Add(page.Wiki().Domain_bry())
           .Add(gplx.xowa.htmls.hrefs.Xoh_href_.Bry__wiki)
           .Add(
               gplx.langs.htmls.encoders.Gfo_url_encoder_.Href.Encode(
                   page.Ttl()
                       .Full_db())) // NOTE: was page.Url().Raw(), but that doesn't work for
                                    // Special:Search; PAGE:en.w:Earth and "Quotations";
                                    // DATE:2014-06-29
           .To_str_and_clear();
   fmtr_popup.Bld_bfr_many(
       wrdx_bfr,
       hdom_bry,
       wiki.Lang().Dir_ltr_bry(),
       page_url,
       String_.new_u8(page.Ttl().Full_txt_w_ttl_case()),
       popup_itm.Popup_id(),
       Xow_popup_html_bldr_.Bld_fmtr_wiki(
           fmtr_wiki,
           wrdx_bfr,
           cur_wiki.Domain_bry(),
           page.Wiki().Domain_bry()) // NOTE: use cur_wiki, not page_wiki; DATE:2014-06-28
       ,
       gplx.core.ios.Io_size_.To_str(page.Db().Text().Text_bry().length),
       page.Db().Page().Modified_on().XtoStr_fmt_yyyy_MM_dd_HH_mm_ss(),
       Xow_popup_html_bldr_.Bld_fmtr_viewed(fmtr_viewed, app, wiki, wrdx_bfr, page.Ttl()),
       app.Fsys_mgr().Root_dir().To_http_file_bry());
   return wrdx_bfr.To_bry_and_clear();
 }
 public void Bfr_arg__add(Bry_bfr bfr) {
   if (jdoc == null) return; // TEST: wdoc doesn't have jdoc
   jdoc.Root_nde().Print_as_json(tmp_bfr, 0);
   fmtr.Bld_bfr_many(
       bfr,
       toc_data.Href(),
       toc_data.Text(),
       toggle_itm.Html_toggle_btn(),
       toggle_itm.Html_toggle_hdr(),
       tmp_bfr.To_bry_and_clear());
 }
 public byte[] List_to_str(byte[][] segs_ary) {
   int len = segs_ary.length;
   switch (len) {
     case 0:
       return Bry_.Empty;
     case 1:
       return segs_ary[0];
     default:
       if (Msg_and == null) List_to_str_init();
       int last_idx = len - 1;
       for (int i = 0; i < last_idx; i++) {
         if (i != 0) tmp_bfr.Add(Msg_comma_separator);
         tmp_bfr.Add(segs_ary[i]);
       }
       tmp_bfr.Add(Msg_and).Add(Msg_word_separator).Add(segs_ary[last_idx]);
       return tmp_bfr.To_bry_and_clear();
   }
 }
 public byte[] Convert_to_local_urls(byte[] rel_url_prefix, byte[] src, List_adp list) {
   try {
     int src_len = src.length;
     int prv_pos = 0;
     Bry_bfr bfr = Bry_bfr_.New_w_size(src_len);
     Hash_adp img_hash = Hash_adp_bry.cs();
     while (true) {
       int url_pos = Bry_find_.Find_fwd(src, Bry_url, prv_pos);
       if (url_pos == Bry_find_.Not_found) {
         bfr.Add_mid(src, prv_pos, src_len);
         break;
       } // no more "url("; exit;
       int bgn_pos = url_pos + Bry_url_len; // set bgn_pos after "url("
       byte bgn_byte = src[bgn_pos];
       byte end_byte = Byte_ascii.Null;
       boolean quoted = true;
       switch (bgn_byte) { // find end_byte
         case Byte_ascii.Quote:
         case Byte_ascii.Apos: // quoted; end_byte is ' or "
           end_byte = bgn_byte;
           ++bgn_pos;
           break;
         default: // not quoted; end byte is ")"
           end_byte = Byte_ascii.Paren_end;
           quoted = false;
           break;
       }
       int end_pos = Bry_find_.Find_fwd(src, end_byte, bgn_pos, src_len);
       if (end_pos
           == Bry_find_.Not_found) { // unclosed "url("; exit since nothing else will be found
         usr_dlg.Warn_many(
             GRP_KEY,
             "parse.invalid_url.end_missing",
             "could not find end_sequence for 'url(': bgn='~{0}' end='~{1}'",
             prv_pos,
             String_.new_u8__by_len(src, prv_pos, prv_pos + 25));
         bfr.Add_mid(src, prv_pos, src_len);
         break;
       }
       if (end_pos - bgn_pos == 0) { // empty; "url()"; ignore
         usr_dlg.Warn_many(
             GRP_KEY,
             "parse.invalid_url.empty",
             "'url(' is empty: bgn='~{0}' end='~{1}'",
             prv_pos,
             String_.new_u8__by_len(src, prv_pos, prv_pos + 25));
         bfr.Add_mid(src, prv_pos, bgn_pos);
         prv_pos = bgn_pos;
         continue;
       }
       byte[] img_raw = Bry_.Mid(src, bgn_pos, end_pos);
       int img_raw_len = img_raw.length;
       if (Bry_.Has_at_bgn(img_raw, Bry_data_image, 0, img_raw_len)) { // base64
         bfr.Add_mid(src, prv_pos, end_pos); // nothing to download; just add entire String
         prv_pos = end_pos;
         continue;
       }
       int import_url_end =
           Import_url_chk(
               rel_url_prefix,
               src,
               src_len,
               prv_pos,
               url_pos,
               img_raw,
               bfr); // check for embedded stylesheets via @import tag
       if (import_url_end != Bry_find_.Not_found) {
         prv_pos = import_url_end;
         continue;
       }
       byte[] img_cleaned = Xob_url_fixer.Fix(wiki_domain, img_raw, img_raw_len);
       if (img_cleaned == null) { // could not clean img
         usr_dlg.Warn_many(
             GRP_KEY,
             "parse.invalid_url.clean_failed",
             "could not extract valid http src: bgn='~{0}' end='~{1}'",
             prv_pos,
             String_.new_u8(img_raw));
         bfr.Add_mid(src, prv_pos, bgn_pos);
         prv_pos = bgn_pos;
         continue;
       }
       if (!img_hash.Has(img_cleaned)) { // only add unique items for download;
         img_hash.Add_as_key_and_val(img_cleaned);
         list.Add(String_.new_u8(img_cleaned));
       }
       img_cleaned =
           Replace_invalid_chars(
               Bry_.Copy(
                   img_cleaned)); // NOTE: must call ByteAry.Copy else img_cleaned will change
                                  // *inside* hash
       bfr.Add_mid(src, prv_pos, bgn_pos);
       if (!quoted) bfr.Add_byte(Byte_ascii.Quote);
       bfr.Add(img_cleaned);
       if (!quoted) bfr.Add_byte(Byte_ascii.Quote);
       prv_pos = end_pos;
     }
     return bfr.To_bry_and_clear();
   } catch (Exception e) {
     usr_dlg.Warn_many(
         "",
         "",
         "failed to convert local_urls: ~{0} ~{1}",
         String_.new_u8(rel_url_prefix),
         Err_.Message_gplx_full(e));
     return src;
   }
 }
 public void Write_page(Bry_bfr rv, Xoae_page page, Xop_ctx ctx) {
   synchronized (thread_lock_1) {
     this.page = page;
     this.wiki = page.Wikie();
     this.app = wiki.Appe();
     ctx.Page_(page); // HACK: must update page for toc_mgr; WHEN: Xoae_page rewrite
     Bry_fmtr fmtr = null;
     if (mgr.Html_capable()) {
       wdata_lang_wtr.Page_(page);
       byte view_mode = page_mode;
       switch (page_mode) {
         case Xopg_page_.Tid_edit:
           fmtr = mgr.Page_edit_fmtr();
           break;
         case Xopg_page_.Tid_html:
           fmtr = mgr.Page_read_fmtr();
           view_mode = Xopg_page_.Tid_read;
           break; // set view_mode to read, so that "read" is highlighted in HTML
         case Xopg_page_.Tid_read:
           fmtr = mgr.Page_read_fmtr();
           // ctx.Page().Redlink_list().Clear();	// not sure if this is the best place to put it,
           // but redlinks (a) must only fire once; (b) must fire before html generation; (c)
           // cannot fire during edit (preview will handle separately); NOTE: probably put in to
           // handle reusable redlink lists; redlink lists are now instantiated per page, so clear
           // is not useful
           break;
       }
       Bry_bfr page_bfr =
           wiki.Utl__bfr_mkr()
               .Get_m001(); // NOTE: get separate page rv to output page; do not reuse tmp_bfr b/c
                            // it will be used inside Fmt_do
       Xoh_wtr_ctx hctx = null;
       if (page_mode == Xopg_page_.Tid_html
           && wiki.App().Api_root().Wiki().Hdump().Html_mode().Tid_is_hdump_save()) {
         hctx = Xoh_wtr_ctx.Hdump;
         Write_body(page_bfr, ctx, hctx, page);
         Write_page_by_tid(
             ctx,
             hctx,
             page_mode,
             rv,
             mgr.Page_html_fmtr(),
             Gfh_utl.Escape_html_as_bry(page_bfr.To_bry_and_clear()));
       } else {
         hctx = Xoh_wtr_ctx.Basic;
         Write_body(page_bfr, ctx, hctx, page);
         Write_page_by_tid(ctx, hctx, view_mode, rv, fmtr, page_bfr.To_bry_and_rls());
         new gplx.xowa.addons.apps.scripts.Xoscript_mgr().Write(rv, wiki, page);
         if (page_mode
             == Xopg_page_
                 .Tid_html) // if html, write page again, but wrap it in html skin this time
         Write_page_by_tid(
               ctx,
               hctx,
               page_mode,
               rv,
               mgr.Page_html_fmtr(),
               Gfh_utl.Escape_html_as_bry(rv.To_bry_and_clear()));
         wdata_lang_wtr.Page_(null);
       }
     } else Write_body(rv, ctx, Xoh_wtr_ctx.Basic, page);
     this.page = null;
   }
 }
  private void Write_body_wikitext(
      Bry_bfr bfr,
      Xoae_app app,
      Xowe_wiki wiki,
      byte[] data_raw,
      Xop_ctx ctx,
      Xoh_wtr_ctx hctx,
      Xoae_page page,
      byte page_tid,
      int ns_id) {
    // dump and exit if pre-generated html from html dumps
    byte[] hdump_data = page.Db().Html().Html_bry();
    if (Bry_.Len_gt_0(hdump_data)) {
      bfr.Add(hdump_data);
      return;
    }

    // dump and exit if MediaWiki message;
    if (ns_id
        == Xow_ns_
            .Tid__mediawiki) { // if MediaWiki and wikitext, must be a message; convert args back to
                               // php; DATE:2014-06-13
      bfr.Add(Gfs_php_converter.Xto_php(tmp_bfr, Bool_.N, data_raw));
      return;
    }

    // if [[File]], add boilerplate header; note that html is XOWA-generated so does not need to be
    // tidied
    if (ns_id == Xow_ns_.Tid__file)
      app.Ns_file_page_mgr()
          .Bld_html(wiki, ctx, page, bfr, page.Ttl(), wiki.Cfg_file_page(), page.File_queue());

    // get separate bfr; note that bfr already has <html> and <head> written to it, so this can't be
    // passed to tidy; DATE:2014-06-11
    Bry_bfr tidy_bfr = wiki.Utl__bfr_mkr().Get_m001();

    // write wikitext
    if (page.Html_data().Skip_parse()) {
      tidy_bfr.Add(page.Html_data().Custom_body());
    } else {
      if (page.Root()
          != null) { // NOTE: will be null if blank; occurs for one test:
                     // Logo_has_correct_main_page; DATE:2015-09-29
        page.Html_data()
            .Toc_mgr()
            .Clear(); // NOTE: always clear tocs before writing html; toc_itms added when writing
                      // html_hdr; DATE:2016-07-17
        wiki.Html_mgr()
            .Html_wtr()
            .Write_doc(tidy_bfr, ctx, hctx, page.Root().Data_mid(), page.Root());
        if (wiki.Html_mgr().Html_wtr().Cfg().Toc__show())
          gplx.xowa.htmls.core.wkrs.tocs.Xoh_toc_wtr.Write_toc(tidy_bfr, page, hctx);
      }
    }

    // if [[Category]], add catpage data
    if (ns_id == Xow_ns_.Tid__category) tidy_bfr.Add_safe(page.Html_data().Catpage_data());
    // if (ns_id == Xow_ns_.Tid__category) wiki.Ctg__catpage_mgr().Write_catpage(tidy_bfr, page,
    // hctx);

    // tidy html
    wiki.Html_mgr().Tidy_mgr().Exec_tidy(tidy_bfr, !hctx.Mode_is_hdump(), page.Url_bry_safe());

    // add back to main bfr
    bfr.Add_bfr_and_clear(tidy_bfr);
    tidy_bfr.Mkr_rls();

    // handle Categories at bottom of page; note that html is XOWA-generated so does not need to be
    // tidied
    int ctgs_len = page.Wtxt().Ctgs__len();
    if (ctgs_enabled
        && ctgs_len > 0 // skip if no categories found while parsing wikitext
        && !wiki.Html_mgr()
            .Importing_ctgs() // do not show categories if importing categories, page will wait for
                              // category import to be done; DATE:2014-10-15
        && !hctx.Mode_is_hdump() // do not dump categories during hdump; DATE:2016-10-12
    ) {
      if (app.Mode().Tid_is_gui())
        app.Usr_dlg().Prog_many("", "", "loading categories: count=~{0}", ctgs_len);
      Xoctg_pagebox_itm[] pagebox_itms = wiki.Ctg__pagebox_wtr().Get_catlinks_by_page(wiki, page);
      boolean hidden_enabled = wiki.App().Api_root().Addon().Wikis__ctgs__hidden_enabled();
      wiki.Ctg__pagebox_wtr().Write_pagebox(hidden_enabled, bfr, wiki, page, pagebox_itms);
    }

    // translate if variants are enabled
    Xol_vnt_mgr vnt_mgr = wiki.Lang().Vnt_mgr();
    if (vnt_mgr.Enabled())
      bfr.Add(
          vnt_mgr.Convert_lang()
              .Parse_page(vnt_mgr.Cur_itm(), page.Db().Page().Id(), bfr.To_bry_and_clear()));
  }