From 4f85ed4d3c601dd6406ae69da818fd28188e925e Mon Sep 17 00:00:00 2001 From: David Jencks Date: Thu, 20 Feb 2020 18:24:41 -0800 Subject: [PATCH 1/4] issue-377 implement site generator pipeline events --- docs/modules/pipeline/nav.adoc | 1 + .../pipeline/pages/pipeline-events.adoc | 224 ++ ...encks-asciidoc-loader-v2.3.0-b.2-i.347.tgz | Bin 0 -> 10824 bytes .../djencks-asciidoc-loader-v2.3.0-beta.1.tgz | Bin 0 -> 10732 bytes .../test/load-asciidoc-test.js-merged | 3356 +++++++++++++++++ .../cli/test/fixtures/simple-generator.js | 2 +- .../lib/aggregate-content.js | 45 +- packages/content-aggregator/lib/constants.js | 2 + .../test/aggregate-content-test.js | 46 + .../lib/convert-documents.js | 9 +- .../test/convert-documents-test.js | 68 +- .../playbook-builder/lib/build-playbook.js | 44 +- .../playbook-builder/lib/config/schema.js | 8 +- .../test/build-playbook-test.js | 447 ++- ...-spec-pipeline-extension-config-sample.yml | 56 + ...-schema-spec-pipeline-extension-sample.yml | 45 + .../pipeline-extensions/test-extension.js | 16 + .../lib/generate-site.js | 54 +- .../test/fixtures/argument-test-extension.js | 94 + .../test/generate-site-test.js | 67 + 20 files changed, 4387 insertions(+), 197 deletions(-) create mode 100644 docs/modules/pipeline/pages/pipeline-events.adoc create mode 100644 packages/asciidoc-loader/djencks-asciidoc-loader-v2.3.0-b.2-i.347.tgz create mode 100644 packages/asciidoc-loader/djencks-asciidoc-loader-v2.3.0-beta.1.tgz create mode 100644 packages/asciidoc-loader/test/load-asciidoc-test.js-merged create mode 100644 packages/playbook-builder/test/fixtures/default-schema-spec-pipeline-extension-config-sample.yml create mode 100644 packages/playbook-builder/test/fixtures/default-schema-spec-pipeline-extension-sample.yml create mode 100644 packages/playbook-builder/test/fixtures/pipeline-extensions/test-extension.js create mode 100644 packages/site-generator-default/test/fixtures/argument-test-extension.js diff --git a/docs/modules/pipeline/nav.adoc b/docs/modules/pipeline/nav.adoc index e88638407..20e0663d5 100644 --- a/docs/modules/pipeline/nav.adoc +++ b/docs/modules/pipeline/nav.adoc @@ -1,3 +1,4 @@ .Extend & Customize * xref:supported-components.adoc[Antora Components] +* xref:pipeline-events.adoc[Pipeline Events] //* xref:supported-extensions.adoc[Asciidoctor Extensions] diff --git a/docs/modules/pipeline/pages/pipeline-events.adoc b/docs/modules/pipeline/pages/pipeline-events.adoc new file mode 100644 index 000000000..7f4e4766d --- /dev/null +++ b/docs/modules/pipeline/pages/pipeline-events.adoc @@ -0,0 +1,224 @@ += Site Generator Events + +The playbook builder and default site generator emit events through an event emitter. +The Antora event emitter wraps the link:https://nodejs.org/api/events.html#events_class_eventemitter[Node EventEmitter] and allows both synchronous and asynchronous listeners. +This allows considerable customization of the default pipeline through configuration. + +Events are emitted before each stage, with the arguments to that stage, and after the stage ,with the playbook and return value from that stage. + +=== Pipeline Extension registration code + +Pipeline extensions are used to register listeners for these events. +A pipeline extension must export a `register` method taking an EventEmitter and optional config object as arguments. +The pipeline extension can register listeners for as many events as needed. +The config object is only configurable in the playbook. +For instance, + +[source,js] +---- +module.exports.register = (eventEmitter, config) => { + eventEmitter.on('beforeBuildPlaybook', ({ args, env, schema }) => { + args.push.('--attribute') + args.push('page-layout=pdf') + }) + eventEmitter.on('afterClassifyContent', (playbook, contentCatalog) => { + contentCatalog.addFile(...) + }) +} +---- + +=== Pipeline extension registration + +Pipeline extensions can be registered in three ways: + +* Specifing the pipeline extension file or package in the playbook under the `extensions.path` key, similarly to specifying an asciidoctor extension under asciidcod.extensions. +This may be accompanied by an arbitrary configuration object under the pipeline extensions.config key. +* Specifying the same information as a command line --pipeline-extension argument. +* With a custom pipeline, the resolved pipeline extension module can be passed in to the build-playbook function. +This is the only way the `beforeBuildPlaybook` event can be intercepted. + +In the first two approaches, the supplied path is passed to the node `require` function after some possible path adjustment so search starts at the playbook rather than the Antora installation +The path may be the name of an installed node module, a relative path from the playbook, or an absolute path. +Although `~` paths work on the command line due to shell expansion, they do not work in the playbook. + +=== Pipeline extension configuration + +An example of specifying a pipeline extension with configuration in the playbook is: + +[source,yml] +---- +extensions: +- path: antora-lunr + config: + facet: "component.name" + boost: + title: 2 +---- + +== Listener arguments + +The arguments for any event listener other than `beforeBuildPlaybook` or `afterBuildPlaybook` consist of: + +* for a `before` event, the arguments to the pipeline stage, arranged in an object, together with the playbook if not otherwise present. +* for an `after` event, the playbook followed by the return value from the pipeline stage. +The listener may generally modify these arguments but not replace them. + +* `beforeBuildPlaybook` receives an object `{ args, env, schema }`; the playbook is not yet available. +* `afterBuildPlaybook` receives a single argument, the playbook. + +Sets of listeners registered together as one pipeline extension may communicate through a shared object in the pipeline extension. + +In contradistinction to the Node EventEmitter, the Antora event emitter allows both synchronous and asynchrounous listeners. +Asynchronous listeners for an event are allowed to execute in parallel, and complete before the next action is taken. + +To register either a synchronous or an asynchronous listener, call + +[source,js] +---- + eventEmitter.on(, listener) +---- + +Here are the events and the listener arguments. + +[cols="3,5a",options="headers"] +|=== +|Event Name +|Event Arguments + +|beforeBuildPlaybook +|args + +env + +schema + +|afterBuildPlaybook + +|playbook + +|beforeResolveAsciiDocConfig +|playbook + +|afterResolveAsciiDocConfig +|playbook + +asciidocConfig + +|beforeAggregateContent +|playbook + +|afterAggregateContent +|playbook + +contentAggregate + +|beforeClassifyContent +|playbook + +contentAggregate + +asciidocConfig + +|afterClassifyContent +|playbook + +contentCatalog + +|beforeLoadUi +|playbook + +|afterLoadUi +|playbook + +uiCatalog + + +|beforeConvertDocuments +|contentCatalog + +asciidocConfig + +playbook + +|afterConvertDocuments +|playbook + +pages + + +|beforeBuildNavigation +|contentCatalog + +asciidocConfig + +playbook + +|afterBuildNavigation +|playbook + +navigationCatalog + + +|beforeCreatePageComposer +|playbook + +contentCatalog + +uiCatalog + +env + +|afterCreatePageComposer +|playbook + +composePage + + +|beforeComposePage +|page + +contentCatalog + +navigationCatalog + +playbook + +|afterComposePage +|playbook + +page + + +|beforeMapSite +|playbook + +pages + +|afterMapSite +|playbook + +siteFiles + + +|beforeProduceRedirects +|playbook + +contentCatalog + +|afterProduceRedirects +|playbook + +siteFiles + + +|beforePublishSite +|playbook + +catalogs + +|afterPublishSite +|playbook + +reports + +|=== diff --git a/packages/asciidoc-loader/djencks-asciidoc-loader-v2.3.0-b.2-i.347.tgz b/packages/asciidoc-loader/djencks-asciidoc-loader-v2.3.0-b.2-i.347.tgz new file mode 100644 index 0000000000000000000000000000000000000000..e4f9c99aab97a0568c5de94074bc009ca1de2a05 GIT binary patch literal 10824 zcmb2|=3oE=;kUNg<+shI{l2-M>E(|#C)^gr7w%cz_j*@n`myv&D%sY?o83QJZDH}w z(FpPmDzsVm>-KlM4=S40p0VX=$8BC#DbM5g z{4YOR7xvUu=iBMTNjB%(AGyl)r)=C*WHj-s%fu;4Vy$zgf2`DUoi#_iuuxU+latS* zoGEHIZ-_}gi&Wk0r80AsQk3Ny7p{g=+k8Ux9-XOc`2Rfk&!qYPHQJ|geCzjjT9p~+ z)gqa_A!*5`W1*AMlct?fQBYbO#T}G)Fjerpm*L7)kvHR`yFY4AVYdj3>$(0Q{iW)^ zh;_W*_I?juQK|AsaLb}8@+pGLo$6Z5rUX=KuJVlGoGDo8bM?ZO(|ao3C+lwJ_nesC zQPDEPX|;@nrvQxB$d-uFGX>)Vk*-Y?a21jqA8DJ!lnl?`8GNqh>kfm@kA-NmFiM!fzzuu zO$uI>8}~H$MZaS6+~D(1!~+)0n&Q!);-%0zB~+AO-J@NoTh1>%)5u5b^4b{-JnjDE%=sadfOoWzrV6AmkN{aSXuv`@&rHnM#c`^t{vkDdCGb@IIoR9;BSa0P6B zch>1#QNv{M&P8*icwNLTTRR(9=t|ytr(wss>dm{Z^q+55c^sSE)v(v2&EUtAQ(eA% z^IDUNYm=9=|Npa`anAV^vq|!kg4_<*SXHpDKmRgRChc&?a`v)gLA$!IyY>}JU94a} znh+Br{G!Wf{|ddwPD$H)xHZ^v=cg-6EeJZ`Fn8LV>xcI3+0(P^=l#hhOS!J4T&phL z^X>3!@$A_bf8Bb({!b(mC1)ONEIWGD(so|>iyZT|1mZ4oc+1NiA`ah_KP~yRR*B_7O=3{KNan`ul#)X<%-YOn#ImJ+&iT; zd7Z$T{myxFpY*!TeZeuw>SfTpZb>f1rz|NZPPwTc`4#gmUjLWkDL){adQJZH?lq6a z{_Wq#*}ZrD|NOT(`F0=v|E{n)k?8%P=v1lbqvSa!1f!}i*QZS2R>&-3@6^NaiY^FeBS$&=#}Uo3XsIw-ULd%qvw_xK-{ zX@!BRgM@a#zkj_J{kXNlO__6T$h8%kUow~)<&O3&40*Ok>Hma=cMoO;7GDv5vV%b* ztX1KvsymfUh*ZSBQh?uPiCPh~ui6*c7_pLV>B==DCPEdqTxnpN}iw*3EPP}KdgYfnSe z#X&!H&i0)pc`hb;w3pLJ%=qHyhyVJX+lh@EUm^<<;_lxzfm9MgL zZwp)fuhq#f*edmOfU(a6h|FA>Fg#Ygze|&1My6@`Uf5%MR%p%2Frf_^4#-u{Y z_>D6XRrUnMs9j}Qs9X4`$wli#oPmpfZn#Qc&mvW`vc}2LBFBuGie+xjS3j`Oa!*UA zVzkPRtMbROtP1_s<&L3z}ay z`fqedV*I(|K-aZL9qYAaU-KWha(gYmR=j_-;)K788JiZ?{7?G-bpH7`nSC}EzyF<; zXE^lzGj{}!lFVYq=egg^jDOhFe|_`h!;_;2Z}R_kueY^AM1oy4<74Oe7DcP?{;1K?%AUWxnF0^7M)roCR#M@P?1o{wSq4PD$F_a z?@2@u1L^I~3Ue^vXT+knxV# zU0f74xzpWs(a*-ToS8;VTv8eVL6KU^_9RufsdXL|m?o~!)VuX?)tM)Wny$Xa4zrpx zQgzq#wa!|WxT?E2^_c!k(GwmA0xto2UR!aYVZ&wDbBbYa)WjQm zjs=QGZ|k-@6R?9Z`<3zp(M;L8br-qSzclS$scv8wd^%}LiRDRe;k^+-6>--jLT^2P zHFL+j!kOthCv}e`yik3UBy*GXfxT7eSFM@-Z=y1932;Wv@-dQ2c4!Tgmbq3mv8Q~! z`0`s>?Hy6U%JuVCZ(Q7V+joJ|?QE`BA64|0D0}a|yUJR+UUAXdB{%iEe!TjAi|xnS zwWn@Hm$NSqnd+l2Vn4lA{B(A7if+Z*ZO7h!wwYR(maUo6^*ZiT`A?~~uM_uZl&?ULXZl-FX;`WqDQx4cK*xhZjUT1@w zW!IG>Hi?%{t=!2GX_?QJ@G!LKNr6MZ@6SIQ(wL4`CVL1bw5|K#ZfsCsJa2bakK?Vc zhfLe!y1Of8EeVgkd`+)R?o-hTZ|})frYannmWoqOC1oh6O0Y23+TCei*uIIqJ=x*% z#uE90C0$4KPQ^z4S*ZN&#rrJxFNIr$`+2znE$!LU#MeZxSrn3%{9vVR?_4jA-MhqG zR-DgJyfugK(zVm!>g+#cBNlmtTrpS>sFZ#@;;Hc;29~RvA4yMh`pje0_>C<_G~}Sg z+~DiSpNlNliPT{X6e;n!!lM=D=elLtMaC6e(mz@`syn7BSS%=6EcIBKJ9k^<5s_aIuJO71fmh0sHrmU!1bZ9~d{@tQN$FI8nkeIenX@+WrbW$ge6e9~ z>_a|_C1QO$vj5!-4zPBZ{@%)9!EXb7hAys@ggVKHjH-Vwd6VoP?N0gMpnu|khU$gl zC3eOq)emr-7PGACY*Yk|a*1eL`wiTS< z%vRqh_wd_f#>!<&SJX{@7aMp?+~FACzn2##99p)bP3fwmD$h!$ziBTQ2rcm8+I+=B zt;_pWx%?BCMGro6-SAmqEu1*{Ow}>@^BtUTxnejq+08#GX`XuZ+vME6{S5c&EPkEn z2#^(wOplLc*_S8$X-)gB`n)OEu4aG8%CgzK*5H6Q&s>*ncUT@ZI#e-C58ZK?SBVmUaydc40cT+DV|g^anfVRR)J}QY`MW;~ zx+KOKr616xP&VBn>-4mEtpHDN- z$yZpkrWvhvvF*;<`odSmZ?aOWP=V77PR9ub;y!2CrB1xsK6gjygoi?ldL|W3I~Fb{ z_$pO->P!7Li_;!H#%J0E17}Hn%F~n+n=-Th+P>EhFTK!@P^wP(YpH6@`aFo`*~FzQ zUr&3aZ=13wWWs&rqaD$o&vjop%eDS=(WHR%i9xIyqO4MvwO<{s6ih$d`E-e{fMbyH z;oVP-oeQ)dPmyMvwXHm)Fs(qZ__t8Zs){#(CUW&kVi6ZUMD#mo%nv(oHcw^Zxg!Ux z%)_iYy_bcl8*H#&{BzlkL;k8aSr2WBVrn>ZZG-QFE3P(cw*30qyylT$0guqZm_GBX z@->}H`tL|rp5Eul<)mErz}(@fzX~JE!t>!@X0Zz}^V${(i6*Sgn3qr_(8kAdMdHM& z$4&)8f`?XB@gB<9dg{D@bwqussh-J&`&ps0%~)msZal2&lk2DWxwv1uFMacuS3aTj zZCMEmHn%7&&bIl>cc~%MF(v!)#BaZkCe3;Fe8Kshacq9A%HtTupUHx=&K$3lWPu_8!tNShAt+L&( z^kd$;_3o)h0`u0MJ$#NybGn(^tO@4s;a!fkQ!~5jFD7#@a+$O2=TeDf2R@dr&ddHI zbZVg_uX%09%{?=oo^$gvo-1@IL3Hk(2l~m|R~|g}XI|x%sF_C;ld}&#n0GPJ&RLm5 zvVLxs_k?p#Iu;&Yz3tbw#s_xF%Zq}dy4M`}bnMn@t8J3|-9nqMOq+6ehs4Q?&!t?p z{yi_?FjGDH_@=^r%*GSNKiaGf*v%Ww+qG=FPrcC-)#T61G?RD}q<-vscO}qz*@f=; zCYx$cPj5Pz<|rsJ>56akJl%$wE6&(0Xqz4qbkosl!llLKTO3!K)V%LaZOh#AdilSl z%O}lsTDI+;iDqH9Y2BWlZl83AON|VZ9IDLxgHzibLlhsgo|vnZ%sJ)YRvUdUarI3+ z-_%Z4OnS5~J5r(MbC;e&F6TklBVTxDKf9~iqBDK>q0@s)T*7PD*~?=^0clIuv)b*SuIiau)oQxdAEJdXGc54NuG5S<$t8O=1X5Eb8B#r zdd=K>MN495CEHJ^k*hd5C#-kUp{qx%XI-DT`>x2QO)?&l9quNxrwen3GzTzAeoYU$ z_9p6tL+-?NkBfO?f4uEl_bL0x5Bay}g4I4Rlo0<}j!_NdHwvk=FbX?PVI-$Jx4crev&a*mbK?% zL*=4}Dtg}>tI{qPUvYeN-HAylL1b$DnV$03>aE5R63`GIg=9Oi=c{lzHSg^RCjuD{M-v4sobrx-c>w#%q2Wu5G5=)O55O3VFBmjB|# zeXrak_H1$Dl4_Us-oKKUnQQt!j(`R1FE?~-7V7T}c~=>}^6J&_BCakL$x9NG8#P2W3Dw=_2`)ZFyze}~HI z`Ch`llYCAIN-cS6;I?e~Q_T$8l$V?cdn@`v2s)%l+l9{r_EY z>stNR$nM&>J?7rWMP6T?|EE?xSK9wdg4LW=Td$q>Ph4Jg;Oh(Cx!=EDS|825cwW+} zbFW;(uLd!;_N`y`yi%$pr0cHyl?eq+2R7I}^>IAD=$`yHxwV{(z9m)R-olmDF79u& z-)22pA#r2doIV=?w=%_V7X$ki3LlmG+kYxIVlJ=S+?6PNLTDfR3-`C5tliA#PW?Pn=bXVEjdg9pkt=v) z*Sq}|Dm!{$wb#trjaMAXZ?5FK`^ceTY3`x98tHGmif1ovm#MwK{QRDCFRwkA-En*oqsM1GQKk&%m4)%)6BoX(pC?)W?a(h-k?))~7hAJl zTY08A1jh9KcI`eUl6-4YT(?#3`HTD7+d3Q`pPaHurIalw{%W)kOiEN;(O|GhECB52-wi{gpRFaPYjyhdTU z!{YKC3~S=7ZFi_!`L%yin{xEk)?lvU2LXkXG%oKJo4Hx_Nduca+tggPQ#F6$;-)@v z(`!8c@xBzp*6*kFZC?bm#EL(DJwfNk>1$UkU4>1aZE(K!b;)h50*x>{ZNckU#p-MLhD`MGuO-uT4L^UkXu z<=kq&WA9(C%&b4Bn9L7G2YYVTOT2r2m)oIVF8Q+x!^5An1!rY;UTl%5Yqqg1@M>9i zL+#k@^|AGp(VJET)~wWAti`$Z)SgEI zEES6`yG=X!hoepUsjgPT?bo(V)=6P!tN&ZOKgwQcsB0A)oBD#Se22l?Z57ix=G>Te zZDaCL!Lyg@JKtQC6v*|r{?C!r{z^G+TC^3%{yPSSQPqx9E8F*8%#MBe())DLd)+VR zgzA$^S6$wjowa(N{(K$%GeIlt^ZrOM=)A7{6TM?%K=e-=_y6a;s*I8<<6RTpce*=% zetavc{aO5iR=r#2Pc0Uey&fRN`r7TFUQbfnZSk-9_Z?^EAFW!QdHQ-tefH^Vhd*oj zKmK!kS>QAA+l=`i|F5;L-4*%yzx~}?RlgGc@9tlI_TO?dez}z&HkrG=I&L#jkgwVG zT8N+U&Njh=wxxH@d#z6ByS8URfkbRL`}T)Ym-n3(F!$e`#L#=@n4Z0C=hB@oK1EeL z-zNBu{nd0~W-m@ZhazcPCUK>AJ?-1HHwN!7;Oj~X)`~1M#(=Cx$@-j z^Aa_e-G>h>zbCu->a@@Hv(>HtuX!YjQCJ(>-mR0}xWB(hhvTUO5XyBbv;z31$=x~kc0SFTt8U4LY; zBHz55leb^HH;=vgua?|@mSSz`*1vThzWw+Z_oM8?H&kQS%?R}km zhkZ(qW&WACX~Dubd)8=I&5v~9h}hWFr^EGXarp$}ndg>&o1w4Mrl_*|`gX^*t=om0 z%03l({Ciuz>*d?uax&cWuC0tL403;awd$aKwaet5^P-mBtBre}**RBF*k-MgRe4<@ zck)H8wX?pTvEACZU!f<*u5@xj+V1sB_C8oBqJQ-n<2s!SsuPnvPc87>D7SaN8R?^gQEYFLgEw`Y*pzomEvBPjBf9J2YNVt~iREPEN9a6{TyZIvTPgSp&-*z_Ve3Y`m z!s*i`a_*fuYicJpyRIxP%~F0(@TtRVn#_&#zM7p-2s!R58mKYT=|^3$VPe(H?K4~h zUDPE$MXtHtR9^B$@Tc7N9~*BRxUu>HPYc&OtxrtOu8F+g4po=`Fxi$RUcPr$^E$AtJs{C-#13i(iR%Z`H#pP}e+O8FS@ldJl45zC01+!i-FEB3jjJ#=c zz-5&}$L>dSUPv(?J%9Ma6~^VqceWP4im(y4yL@2j>#%u^>b+Mi*i=k^u%;T{VJdbw z|4`oQ@6Ue+xooe$3l^TTAZx zj+?nG$J$@7+qg?r<+YR1ov?lD8w)|Pa_Y9U-J;PYr8mIZIuGjY~UolJG-fZ!aFz0D?&r7c- z*hM^7zQW`FDRjEK-nYcq2i~@wv#)#K``x`_Yp0#w?2DnxrFPk`v0I+i{&1s%NR_-) z#LYq{Q{Cj6v@=>aj~nzbI(|Lsv*hEkm!BRUky%$ObMt3TJ3Ggc*x#Qmts8egGV*k< zm~*1*DT_uj``q>W-&L&3eBQk}_L|}&L0&7)<(ku+QYSrFX|&7l|L&hYnKD0yyC42O?&|gS(^|=- zzDFg%sIYj_YyTH5>->|fwi}B0r^TKM7Wi;Qp#4ck^_s=oUTnO*;bZUFsxO8YKAS7u zPYW==zw^*)rL!uU{uvAB^O?m8CR^RGSo@iGvSdWJVuI6JBOPzDG{8a4O z)+_rS+D>ljS)ROGv8&C(#5=Q1x`w_Fy9JFQCMV00M@2RR?*vr`cAy(G> zXkm1)qwF$W@$_34j^8RQNbKdF75F;zRP$rbt7jFCo?rOD#`w!6k&=_jcGCs+%gWB$ zbVI$b^6=}5oq0TI#wlBGCJW3cRDJW{eOBP|ZHs1V`6~NGpDAkFxmYKA*_Q5Q+;^_= z_q?6>chm7tS{qo?_x?K*W|8E;e1EFcHMRG#-T6@;0_Q$2a)_Op>Q^Ny=^!*;uG(!= z@3wHI&6l?B6kcqndapO(?UI!YlR{=a4os}FGf0@he??4X>s*O#%lzj{e~tWj^Yg*5 zRo3nQUv0Ma5H@4JJ?CR~5YNRkQ?=Ipap>gxoA}XfeaCWxRf~`N%`nl>xWX#SbxOS$uy2@H}>B=nV40= z^~2)Bmbdk0DXuc-7|+T*mR#6;PK#}FCes|L%@PnmCV?R@fL za+~(2?cZGOZNC5CaF4?V95&XJ zE;;1Hw)*dTzw5Kq*?yb9zY5tQSmFeprJB1%k-*C(N>c+NZ#^>c` zy_QdNAw-t_RvXD_kKGAAA;`t55u?P)Zp;=@meoLdeO2b|7@YorRztdDt6tMzWH z=d&{X>Z4+7qqs{~cGxYS79ZkWQM#x&i&L<6iJPc;lH!V45`A45-2g~65iq}QoBppkhx7G8gS&(I7^t-&QoqI3DoHTm;!>v};`?FMQ zVryDML-B^^QY=a~_rrHDez~q^rLj|JC`Bv%mX^vQ?Y6)yl0RR&2t`GWyqlyi+Z$=9gc% zHg8_hnZmCDU)8D=mPZ&TPu^}Z>$!RJuhNK;g0;^QmQ8nf@TvU8s_0cU_H1m|W?vS6 zdzP&?w>NFg8>xScTWjh|Ex7LRDt?RWdzG9LxAxGYyqX8^e{JIXJeAe@-wM4C8(x{2 z-wLd~a#XHV-0N?K*HzD@PtMM^Si1Sorgf9dw(+gmWp-uSwd=_xrZ=x%tg@1Q!Mv7J z`k(3Eg=^=o5}BI4w#z*DtJ6iM7B+h5~J-Ruouu3w1>a0wI+Y7yq=oL;jjVk|= zsC~lej`kKEf%nTKOut^SR9fe?@^<8XmZK|=AHBdC9cZ(#(5x>>ws!lBJT>jSbL`LS zOndkJ7WFsZIDM974AVYIJW7Wo(iPo`t7dn6-)8mqS;U{{ zya^kaZ|<&K{&dw`t7swa{L|;3i{)MYGVAoDi(g+ox7Xu;`M-$w!M4x$L;h^v{69JU z`NMzje^>7F`Dea-|Hp*DO+MepNuYqZrxYnEd^SO)I@7{1oc*%5U(;WYoZhd~9=IVpn zk0xvv3(`8WDTsN?zc*!8zjj5RP&erOB4afp@_u<&e1NRL{_LF}LYl23@9pJS*71=c zvHIr9yb0?LyO*sk^o^`tKCx@Ts!w{#d%drByy{q}l|S*=p0)3^=A7nSc{|xbLTTcZ z^OKtvUH|5y9_4&leWUr_T@TfDMGs0X^gEr#!u&oYQ?$0NeX+tOm1&Q-m;BtiW}#re zz>CZC+rn=@I{Q=VG*=_@#$8%_t}@;;)%~!1r&sCWfV7j-7MX3bzw!Sc>-=NC^52?$ zFZp--*Rf~c_}2e_9Ubj&zWwE&?Q_|G-`{KV@7MZiUz25+OPeD-vl(|iy5SYHAU*ZO zw?*4lul4TC7P>d-XDj!zRTIPwzB0*7V#|2FC_QUQbL_?Lsa}n51X+qa!yUC3&tO_4 zrmZ8hb!~NXu!&&qw-m4ES2vwrd-Tncf?#v81ua7H39VmzA75$R+P(f?kx-zoVtrKj zlMf4b_$)9_YW=nLt5$Q|t1Sm3qt}+2J$N4*y*|6lsIzAI>ZYZpRnbS*S=m>+emmkY zbtB{RJhm@I)%W*a>Q7v|=!Dd{;#<4oW-hbQ2`vA6<$QhqoYfWVs}Jb@sjvR~((Ub^ z@5^8N%YFNQ@APNwcmK_;7Fh+|j+guMc4%aVa&Oai0SJ~YE(nPzsIeyg_U!Tmr zqa8SF^#s`j#=Y`(>TzePFI>=G;J1kTY~H4z%8CR|?R3lduw)yJS&`T8@Vz>;zff@g z;(3u>H~-l9-Z-kyO3VINj3 z(vNxj^`>7b|4ge6*~-Z?jvZzyaf&-CT;midwZ3j&4D!P^)U6FX< z!#Uk+$6GAjweA(%^1PlZxM%jUJc$`MjjwzNjpVGDZ{lC_oOR*)7rrg0S}s{!{#t%c z%!&8z$@Ha`xtjzQv3aFtnr50P?h<&xm$7Bu?ai?o(+oLsn5WJA&?L{yyT5Rgx5Otu zRbDp!Qf{l|E9b&U6Y`_HFkH`Kmzeg44b zy~T{FY)yLk!d)vuP>%i&#EDzrqY^hVX<;wk`?z=2?+Tw4E3;yUm zFtMJpX7hBP&@T%wE>klvxLDdg`{0Ji)!D(phkY8$w_h?~ns>X$Pu=YC4TsE@#@T0% zot3(BEM>`_(3B~sAMwA-+*W-^^7FTCaZi6;pPpjP`paDGf_Y}URn!5Wy|*+=c$M@E g<$mhCb!4utkek>3{Nd04AOFbbFI?x#aEO5c07?KSvH$=8 literal 0 HcmV?d00001 diff --git a/packages/asciidoc-loader/djencks-asciidoc-loader-v2.3.0-beta.1.tgz b/packages/asciidoc-loader/djencks-asciidoc-loader-v2.3.0-beta.1.tgz new file mode 100644 index 0000000000000000000000000000000000000000..49b5331a2fea9e474f01071206488768eaf627c6 GIT binary patch literal 10732 zcmb2|=3oE=;kUJat8bf4`tbM7ex}U7GAqLt?HBs(7kj-_`2D*1XRqHp_wL@}AE6nX z({2VV>09!sBKB+k{+|cbJ#Tux+Ex92mcMkjf`J4NTl36=3^R@{{{GSB-|hULYu2xS zZ&RuCLEq-<{y&>GZr#f|vuvAOe%IID(jTh--k!|wnp*WNRaCs;)cV%o{5$_ArXTye zzpP}#|2Ib+MfyKqoO_+|%Z<7}6`B9@?e10Z{QaMjoh-d0A)WnK{pXL**lP9{`$WyF z|M>5|=;u~-1yL*1pk?(w7X*2EUsZ%Fg*Qj?b5!tBmWKYleAk9lp)^v1h7tRxZ zq;&3B<|N;oiGlM%jaIu%TB)^Y>S3=X3@dU@hdfm}_lEsPz1>m$pEv!BLOKin|5)N| zde*zsAelF;C-mA5uTMT_Ce37VNeRhzJf*us=TW4l-m0vyZ`ZFS{o#7b=r=`tvgwb= zs!sc<+0C`@|B7VEbyx7L)p*Jtd1U1T`GvfyA#q)=l-9MJ>9ARrm2oRIZqN6P({8pe zIbrN#hF#!_4|+^$gLU9@A6<8nv`iW-gac(L6U<Cm+G=mj#$^hM4|MgOMm*WY>gf5B z@5rZBYo1KWnjQXA8?9^h_f!t;=g? zEc7_{Y--4pxkpunI4TlP#pd0*>vl`@^7S(<7tS)DD?jzd=Irs$CVDQBf`?LrJPjvG ze$h!0N{&>U%yX-Ejk%V}6GpY~k$l}6uV&j_xAc1QYZG6n@yZVM$xePrI{97(Dleo{ zSXV@TPg6{{JaDqxCD5;s`J$^|VuC`Jcf{?dkAfGRdbh4(*7_s8JQ6iWhewOm;MJBVd&IsyeVS@=_W#5snxPxi zHs725c&qsO`RjAtUS~Ip|MA>2Z$6{{OSP{3MwzTnEJYQcygRtwZ_B6Z(~s^yzWCYx z#Hagf`uwl|-)3G`b}jMW{TsJxJ}vlv_xR%9@qb%6_0GTB)ffF`fA8C~x_AFaN577~ z@IP95_G|uM_iJsgYdh6UJsSKoG`{BR;UDTVPTY?cU2#0>a(L{SrD-Z>c1Y&BzP1hE zf0WT|f5^sCYf|J=Bb)y$rv2)RtuK5jc;xtXg-E>p%{xmtU%Xy>+`1>?l#~3#ph@#C zKdvmDC_W=*_Cc2$c9%|RfJdh{&8eUD_y}vSXuZ=`-!RT8GhBrKema~YrLHO7D73ugmaE;=6_XMd z&)TfNIDblq_8C*pRVH??i`Ax1|2+4s`i(u=Zs~DL^aAwSwyv*qy*_EvoYO*vQ34_k zlWs7kP2b9L>!;w8?q}jz6=qDS$F6;DUc7Zp?DG#g?5>yYSL)p^kj;zW3$- zm2cf>_-7jFsk?1%OGHH0-s#~BruNT$@Avy`G{2N3v#HK%pIuHH|MBfCopgM;)y0eY zrI(i42peDVJ;fbxmt*VHqkmT^p8Lbka-PBSN@Ts(`%Qh(PyYXU_RH_=zxI_0pB?|* zKQ6prg0ADD-c<>ytvQO4C;K<=VQ@|f-=j2BX0CY9Kj$`!2Vqln_^cnza=Ur({&&X> zax(n$W#xGN4q(0Z@@j+kzEw9LW zJl~e7Jm&cG*Z-rVqxZ^~bOO}L%UEB9F z&9z2(o{;*&>8GqrTc4?2co}$B!t?1l?N9uiTJP6Mo4N-(`f)WRt>xGxw?#iPz*YT) zMG=FxwSrjs0*COuPs*-q{+#wGNlCjz@8zB3j5jO16qv6aT4($A>z@fCS8e=e$A!7Q zY+pHN-btp0t7|m=%BJnB&{`)eouMeB8uZ2HbH@L#iBH5HOI9*&UG`Fv=bq3f`_TPI zf7(bdJQrEz`fo+hT&KW!E8~v6^4Te!DJ-2dYnj-lS>hMsue-0}7JIWc>@RogoMkMj z+{Owkwu%PZcQ0U`>)R~4(0ReST(|ZdhG$WSS$FH1Dl@O{k*K)QsI=_Yl4GwE=l)!L z)hDh^x8=2gj7kF|`>pEW_NYtC>iOm#pZfpFl>fZ1eth{^|C!T=Vavbm`@S0dzkmGk zS9{}cbFY7V`FH*G|K{cM*4TgfUu#qMe{=h@b+31v{a3$fEAL-^<@(y}^xDa9{(n8r z|L=ZvWzApzmHA&&>mM^KZ`aXw@?x!MKHdM1%Q7o}#m6meOh+CW?MpH}oKdlKhtDeR zmTf75;wC|k|C$8XT`6;Y^g<&l`LSw2lo)HBTf#Y~Z_hOgUhwHBCY%uC&0A}^-+Gr} z+Uu822miBfZ?pbq`{sQB-yNHO=G(WGnf3l*7iBp1|MBtua=BywYi+iE+^@zLzo)LW zWYhmoE*=lMmG(Z`_$P~vnT^eigTd&|5{ovcn=Hp(YREt4mrBm}`*-8^jk>yq$dBLa zH6~B$vUWat@j|s_&1=!mYrK{jy>OXPs%j~oxWJKJ!(zuSjx#&7)~`zo`ty7L>Ma}o zX- zKQhnSI3mq>?xp1$H=o}e?(fg%KTn<~{*C->+80bYTWe%vRk5$W=*RWuhZpZF zs!Mb;d6a%A6x;W^KL5Vkb?Lijk0#`Poi$r@YLS>|(X>NvgxIoAI2`z-_V8`APbIhi zg~&PkHoIh5-Q2C4SGs8a{q1`eBn#O6_Bon|_1n+)&FA<3_xtNXLy^LqC!xV= zHvN}_%9IpT3*03ht+^q%`#$#{#=~hb|9I2h8dQWWb3DGQuHxzTBIoy~YWkcGw`5K= zwp?WBGI?8+$h!5+?(3Qo4@9aqoAz99*T{4**ND_uxhiDF1m=IoOrLwmc+WUh9JNoa zzn{zJ^W$?j0#m<;eFWROk9Wc{j#S+ymdyX({D~inC;CHFnF3ia(nPK_oIm+uQT4P#_X1T=X z+|*kaTh=1If0-Rq{*|RIJL_uCBt894mspIaiL9M&y2ESz)+)L6 z%VVBtYL{^vN>p<0mIy23`_b`t=_hUbGN)3;4?T4wQ8_5XzRjcfa#RPKxxE$;kk z8+v7;>Bol^dBw8Moy<)Po{Qgcm-n)h9^ zt8~vj&YToi>KncLMy1ASMSWl2S63%2aK6mBFhoq5S@Ylz@fV*j+HH_~B`4Fy=zIK9 z?SY9}Lc3Q~rG9L8cysaoRfjKy+l22+i7YCrV>_f5vpvFfp!oN6S*mh>6XTfIT;!uIjg%ed>1nZh?;%i7W^il>saXU*3GnSf8hsXQI$jgr>IAs z;GH$!H78z<@$}wf&pH!M{A6}zQjhy}+0mlaL}shYd55Sk+^3d=y|fGtet5u#`>&2~JSZ_8@RQ1?TTLGLy5W+oBQoC8ieTP~OcyXQ?h-xm8R@PPOfttmoT z$JX+Tf4XgaXp`&C*hSxdaSNRJl%~lrLvCNm)hJK#R^ATnplP$Xa#j?t+~`v&?IDyJ zZLpNJb=Ltg`~Ny@%O^;lRhzx0y)W3R>EvGq3&kTjEed~nw@l!dW_h{jLB>Q&w{to& zMTc74J~{08o8rK?HS2QzrM=pxe#o4Vcrd>qf@}67sjd+A;I+=8FU-B!r!u4lMMjo7 zcIy3o*Su%Kirzg<8}DD7+quDWqRgk`ik@w;jhBU=K5(0-?i%@b#<82fpDPsbAD5RD zT&1kU|7!K;3+5J9Hm??bih2NV?D))DlJpZ`ILv6uqJt?On zGyf;sMk=p-k$Pa$jgZTSIkhZQ`--#TX2mMHsNRvu+x07V!-l>mccs?7IQKDFNnnzH z>AwqyS0=M<_PVloMOgQ}7Y|J|gESgdS|$p#ae5y-QlsD&w*T#cwassI6i-=b9-OlO zQ%8Wl^h%$4(<6~?Nm7Y+Jsp=sPVZhiLs!H2|C-wCZ!W#ipP^L!WRj)obH?XEENSO1 zT`BecI9-k_PV2<`i9$!#Rz7!qmDYMa)zb6wCXba&Q@Xf%Uy6QxxTa(C#n!1ybOjEY z7`(arX{O?isfDL{8GLUuht6T0qkU%|%XF=I6(YUr^E+6@OpLB63%s*i<1p*4htk~6 zMppB%D#7VKYZXj3*f0LMY{wyg)tj7$x}umG4qfZ;op8m~X4MwoFHT_}B~;i}CagO$ z`xXB_w@dCXdpD)tS80`9T=>A;;pzD$j4iV9>t0y$E3`>fJyFp*aO;=#!6zFo7b?^o z;d2h}=051@!m;|5=^K~Tp>{5B68>Giy5quu6{|vXZ;7z|Nv5A($2c{ zapb)H*VVpHbeXU8)Ie>^yhj&HmDMd@ygN1PTI)&wwCS8*pH236>b$O$93yq+$jl>i zHIfU9py8p4oaWclp)R=~bt{s5{3Unr+T2_V-Qv6eIav3c=QJMkFB^3gb%X-pPly9%0sfogsNgWYaF0jK~gmliAb7#a6H_ z6ft^re#x4jTLT(yDaEEMZg_ks-@N2%l5@}hi(!^ekIQv>PCTE!#cxM?g3OL5t9Q@l z+4O3C*uM?2YedVAKDaU~*?YJD7K`I$bKm7$4m@r&b!v`S&iC*;6Lvq_X5_~^{bNp& z_G8P`vgocW66b3&4dxvd?G54y*aZYor= zNd%k9nJ3K4C=nF@y})+!YfG*PKem?s;$U%oDsKE|UAWK{qg_`O3V#<~EuJ>l;cY>9 z_ZKeVs@409xu(uO>@#hP%R%peQ=;z^lAIKjSd~kpV#Uq0|1pHKPA-aH8-M4L$AurM zZWr};be5gyYZhwR`NzS|#+N0?CfK1-scylp)7%2G3+x+qDW7Ez^YvQh`?A1P{`V$r zwm5H*O<6ZpWcaSGKbL9Jo1WgTMmS@rTu2SkyG!K$Qc&r{@zvNr1|?B!RJ4p zj^nbO#1*h)s!;pFh=fw@R6kdXCcEx!43T=`AKBk1T&UJh{nPj*r>}X#=8KyjKR*`! z(hfq#`RR#oj0VGoSVSkRq(Eu+*_a;c$Q+~V**Wk;))U*6St z=sHV`uITRz_3@8nUtANp-mbg+RpDd%=b@ASiIvtI{MUbW?%Vbs|9@B9`RC4lK0omI z^e_Lv9sX(m{nM6r|HBgJmWwjgo&MK<_U%{Rz5lC=A9KF@U$JTD#|{6#$IaXJcYpo5 zWiKbIK0GA;V{Ps7_w)VZZ0a8!F`JdU;K@Q`#!2Ecmh~O`ZDfCE{k>nSoX$sW+o$y= z@b>q^db8`yoE5nK2fdZLzv1UrxB1uRuAla5Vb`gYr>~u30t6=C)~+-EXm_>Y_wwj& z4rK?1W6XkA8LQrFhm~Jl;o|!{-(Tmi*h0a;DMm-OGH+0LeN20GW#83RIthFeudbfa zw{o>~;)hGe^dgM&{`^^1W~CAHOUWgf(d4|+IWHTtAl<*a**34+H!0X4=!EC-89Y9k zF>Q_xf-~hZR?eAqD5Lh}qIJ)YJXmF~=Xg{kV$Bt6&1g9EnhlUNqUyj>i_Z+ zQ@#dgF7lk@Y3Y`}UFN99mrZ*XMa2K8wev5OeXS&a_yBvenAU`ZoCk{QDiNzxr>F`Q*;|e=p7B_aFbDC+Er?axI5Wjv%T5h) z?btQp%rPC^w|`c#m^SJ>tnSF(99wt1V`ZY{{F6x?iav#=yC$@r5~wh}HQg_qXV0oB z+LGS9CE;^l6&Ud8MO;$5`uMQQ;kD|{am$KlFOTfo_x^JFyyqpW3%nM+%u-rCdG*XI zevkT(*r)XwB`#_4+q*zG`Jna93kPMQmVJ(Xws!H4g%!LFGRYa)OL{I=3i@XlGcNK< zIXP*|;=Qly=0*8lw_0+pP3`fsrAmjleyL2m)-Hanf4y7G3!m#TtP?aO%jc#zdvt{s z?$4My;gMPNrR<4w-df%=@4nFHcWXwPSLCe3{WBR%d|mD)#Iptp9=Y?`Z$T}>_VVa&E)Tv4<MVAh0 z?#lSMIMuOy=}jm1vP%y6lV;iMx_RrO&JU|&Q^k2+^7e!n%vmq+ZMuZ`I@ymu59Q6& z$eQS&!64{Asd{^t*&L4BE>g>WZ(5_bWWx33EvcO!!zDHp*i2q*wsGmN6`xN|dOi8+ zw5d^rr^3^TwhTHo<8}r@EFOjp>q%ak1~8J;Qps zk!bC|t!s{+`EtPP|D82Yik&7i&Z=4ZN@ZD828|7Rs%n#dJf8N8&-tDoyOAE9VJuRr+J;u zei*y3aatSm*VCQ)i&@lfZ0wa=$SqiY*Xj1|p4oLzKg<#I@)msbWtYLnNk)$+m)JE- zwGnGSpCMea`|yS3_hh?QPW$|Smiq30%bx$wdy{kdZ~nWV_13TXAOEkLcmKfu<>!z6 z{P^?3%hk`<$1Qq#`#rngr;qh}Z`xZO_;PUN%CAok-p@F!BfEI>w`~=(-~Rn$79AY# z|LuRv?Q{I|ZY?gmc5hy5^6aDVEKD}h%d&zbKP-P0H$<{J0L(o8T}$nkITu}dGh7JbUuVOE;wBd#80m%1ej>5m zGFKsVgOYjSUG4ISPR&@S^0qC(-Gy5MJKMYs2_Nu>Lu1 zF2Ll@Vi6jBTv~%W(V>NPTP6Dz&t;!8*H&H#UVUB8d`HPW;nxY-7W2iMB%S;2XI{(h z&n#cFYhv}Gn-Oz!!=zdN?G##N9{%%8M`8@`#-Gkx9&PcQt?|{X`ODs0t9t$K%KrSr znwau=srqr?qUjBxSU1UsSG1|E)vs3Pu0yIN|WD zS1SDGLq097?d!Ak!e4HDKiPk#agK*gOEmM96hSkN2Z=}KaqD-UyZg~#wPpAI%T3Sc z?36m%ZdHEp=Y^Lp*OIR+VN%hzcNbLn_CfEuWWGj*z%BX5RXM8DPJME6= zGP{rMxZ0dMZ>spoO3{7Q$+n++A73~4Cj0a#@8ij{_6XP(Z_mDe#rDABe0Ddzs)J2V zp(v;7zqr=B$_$6JBBkob8Y|zc*NT_CscNt+zcA;*gSyLw&m-hSnxgJ>8A$AWzCEJm zxRLL&ttNBM&rz{jtGc9TM|St>jl5lp4u~s z&AoXJf2iiu#IS?XmZz(vpFCLgcu|+e6d%Q!{X1tE>^*b4eUZi@<;OohoX$RY@mH+8#GZY24R3J{c(r4-LZ{ZND+&60%H92vPbbv9&7C0q^Yg@Sfye&DUifxC@c5oj z6AJbgPhOZ6>N_Lx45??hllKrTKdksxHL1u+Uasxs&})LQNT*pUbU%pv!$2> z)EX?;xka?dXiB7qABvyjCg8Q>uhaf)Cc%FXs&r4-rmOqftn}P`o1=&SdaQ7%x5v5O z9TV@>iA}$umtEb%D(_`9I zui9se4Ijz!EwehgOQGT7rpajW4L&=pJ~bbwRX$1+#hUoy7JM^WW%N!CsSSFnznTD zty7mXI)1(Cv*hEkmqlesoY&8}m)S&rV`$xyU2pez>QTKJ6JAR6cSUTR#8CRi;Kws* zZO^Run8myJ1h?ORt->YS7?d$r*e2BN`pbIr`I^0pgL8`3KF(jcZ7&;h_R>h+`p;QA zm-e~$t#jCLrOmy(e6gsS`GgPMCmc5|tKA{Jvp!~H@p(_x;0GWD=1D!8AeYr4X;a%xL0TaF+J#h0{ zz;hSr%6D4RX6+U^W;!=^t@=gB&SiOD66KU44;MHnzI}G3IBR`dh`E^1cZ1_rylEw6 z$|*0qdS@vzv}O6uF7?;jcz$~hese>Cy_1fex?(u?$%3eCcEdgVoq zabJ+U*O@Q<>)-AfJEeIdj&^f@*Rrw8FaP>0#+q4dZh7gQDMxaBS&L^l-D~H1Fk$WE z4|lA@w=B4}c?$dS;8n9XmdspgAG?lpRfb@o;!r5pX9R4Tu&*p+9nIWh9> zH;Ihgxu5QB{k2R~{%YkqmMfu=50|7*WK{Wha)I-u+%5ZsTBoi%D=YC;@narO zu{tm7g(a6{`Wr7jZe%=Mv0$6R%C`qLnAZNfv7*20yd3|wpu3iT!sp+1n6@D?r?_sl zr=g2+dBl!i%&Jyj+D}S{FSTh7l+`Xhw8YVM;kp+APu6^@uxR;y@*m$OA>N&QANSsH zFy0Wdaj(bUDBqeFtW^cp7Zi^bYR;MMo4>#|_}1d9&oi%A7-<{)V~=dOdr7n*z%x-Q z>gcs~$*-&~N}N$kkPrqJWhDiFUY)&ox%uJ4%?~#>3mfFb$*$Zm;b7MP_RCwJWiI&_D!o-z(Msyd?pin5 zEf&8@x7&PPb=@ia@N?^1)>AjOJxiKoJK^&EGk)A2TlD8XeDc{#?6S;>hlzgs-kk1E zDS3GC`QfscN|OZCg!UP$PS&V-#PmL^;Jv5e^`AGaOFgpmuC%iL;_;~q)J=Tpk)9x= zROMYOsvdds-Zkm}?}8RS-8FBTveEL7KFtoc?(tGrMfV)(bU%?b z`JP#jF4MM{YnO^*XYPELcbiwE&ieY%Swzd zDKX`WcKoiyeU<5z`jUn*gb+;vMo{(T{_BG0dE^YUfpWzCXtH%&h=XZ`&6sz7GfZld4Ax0md~S| zwS`rZ@l&HRHr!Yk-K_WVOwt#<7a6)DitoPKh;@{6w@OvLl3~2QA3fc(J_uW!k2P4{8eZLN2}79sjX}xw?Czvg(7`6DNsRTItO$|Nn4lp+%wZ z<43l~tB?Mwn;^F6wzHdO(uBT8VN6#7d1g)dRi!vZJ=oGO>CR2>mp49`7E8aM=Pk%^ zX*-KuSz>dJp-ZXo$3KfAi(`hMP6+{$fhD^nPw;t5vVI+S{gW z)2=f|XD@!f)OOZA+qQjO_a~oftNM{K<$U6~MA?b_+uL8Za{U%hYr5yS{Gf2eu8*5^ zo(EgKwbx+1=hNsV%i8itwC>x7Fr5pk`CU8DProD}kvjjW#XCof#tupK=Or8=tee*U z`4hZ)3#&tvW%edJqa}wDz6P#-IAOb3klK+=LCjz5-mo?9yD#*K(?`j=ov-%l#JxXQ z&oX{2JYHEKge7 zRmY{Y=XYdl@PxC=+{DX=ue}<~`X_5B!VEAGIw! zGfCGqH|78JI}AVd|Cv0if4zO@`p@&t%;vxO@Mr(Nty}m0WO?@g_yPu-GylJT{PB7J ztZV%f*Fs(1gcnv=xUF>4Liz0H^Ad}mw!Lh06GZ2ROxzH{6B z!rSL3w|?<`e5G|~_j=JHp^H8Xzs^~A;^VuwrR&tt!J?2P5kd~CZB)(`~UTqD=(H`JaBs+`_KQ;Z~vQ{EwT!_K40$7 z9JSSpUnzokMMZke%?duvz$hZXh8dt z(ldF>g4TXGAQE=2XdlPnH48ElW24Igrg!>w{or|fCg{ww^vpxvXOzD5Y~hm@`{ia_ zJ!|HJ&hGU_hYLKO{|GvE()`<&g`(GU^pgvBZomE8P5hceHru0KH8J<}oDzs?xM)arY=Tqo8=|8WLz~c(f`36lqhs^ zMR&;TW1@QNKYDg~DJwCxsTnZ&Mwo0r>+xdCnva(5-Ya(fVcoYUqvZEl?#zF&yUVxC zTc-6JJiE$mxIt!LcTDd&_NR(PcNyMIwRpQWVpD*4-KWa7xxW_8i&g)9Y>V|W0f)W6 ztb6Z!_rI+C`rhfYjfRWGsm#00idmV=IvT=P?g{KT#y-WEeR13E!`63ALiW6ui7G!l zQzJHf!iRbM(%16tsGhT45&4Q&v&Lds;yAcl zyX3y*!HgfLZhl$0Db3-_4&RJU5y)T?qwsxoX$tUcK`FpRKw03l_dp*xs!YsQ~wyeutw4!`Ihw_89 zCojsrc%S!hPSl>wni { + let inputFile + + const expectLink = (html, url, content) => expect(html).to.include(`${content}`) + const expectUnresolvedPageLink = (html, url, content) => + expect(html).to.include(`${content}`) + const expectPageLink = (html, url, content) => expect(html).to.include(`${content}`) + + const setInputFileContents = (contents) => { + inputFile.contents = Buffer.from(contents) + } + + const captureStderr = (block) => { + const messages = [] + const defaultStderrWrite = process.stderr.write + process.stderr.write = (msg) => messages.push(msg) + const returnVal = block() + process.stderr.write = defaultStderrWrite + return [returnVal, messages] + } + + beforeEach(() => { + inputFile = { + path: 'modules/module-a/pages/page-a.adoc', + dirname: 'modules/module-a/pages', + src: { + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'page-a.adoc', + basename: 'page-a.adoc', + stem: 'page-a', + extname: '.adoc', + }, + pub: { + url: '/component-a/module-a/page-a.html', + moduleRootPath: '.', + rootPath: '../..', + }, + } + }) + + it('should export loadAsciiDoc as default function', () => { + expect(require('@antora/asciidoc-loader')).to.equal(loadAsciiDoc) + }) + + it('should load document model from AsciiDoc contents', () => { + const contents = heredoc` + = Document Title + + == Section Title + + paragraph + + * list item 1 + * list item 2 + * list item 3 + ` + setInputFileContents(contents) + const doc = loadAsciiDoc(inputFile) + const allBlocks = doc.findBy() + expect(allBlocks).to.have.lengthOf(8) + }) + + it('should load document model with only header from AsciiDoc contents if headerOnly option is set', () => { + const contents = heredoc` + = Document Title + :page-layout: home + + == Section Title + + paragraph + + * list item 1 + * list item 2 + * list item 3 + ` + setInputFileContents(contents) + const doc = loadAsciiDoc(inputFile, undefined, { headerOnly: true }) + expect(doc.getBlocks()).to.have.lengthOf(0) + expect(doc.getDocumentTitle()).to.eql('Document Title') + expect(doc.getAttribute('page-layout')).to.eql('home') + }) + + it('should load document model with only header if headerOnly option is set and doctitle has block attributes', () => { + const contents = heredoc` + // the next line sets the document id + [#docid] + = Document Title + :page-layout: home + + == Section Title + + paragraph + + * list item 1 + * list item 2 + * list item 3 + ` + setInputFileContents(contents) + const doc = loadAsciiDoc(inputFile, undefined, { headerOnly: true }) + expect(doc.getBlocks()).to.have.lengthOf(0) + expect(doc.getDocumentTitle()).to.eql('Document Title') + expect(doc.getId()).to.eql('docid') + expect(doc.getAttribute('page-layout')).to.eql('home') + }) + + it('should not hang on mismatched passthrough syntax', () => { + const contents = 'Link the system library `+libconfig++.so.9+` located at `+/usr/lib64/libconfig++.so.9+`.' + const html = Asciidoctor.convert(contents, { safe: 'safe' }) + expect(html).to.include('+') + }) + + it('should not register Antora enhancements for Asciidoctor globally', () => { + const contents = heredoc` + = Document Title + + xref:1.0@component-b::index.adoc[Component B] + + include::does-not-resolve.adoc[] + ` + const [html, messages] = captureStderr(() => Asciidoctor.convert(contents, { safe: 'safe' })) + expectLink(html, '1.0@component-b::index.html', 'Component B') + expect(html).to.include('Unresolved directive in <stdin> - include::does-not-resolve.adoc[]') + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include('line 5: include file not found') + }) + + it('should use UTF-8 as the default String encoding', () => { + expect(String('foo'.encoding)).to.equal('UTF-8') + }) + + it('should return correct bytes for String', () => { + expect('foo'.$bytesize()).to.equal(3) + expect('foo'.$each_byte().$to_a()).to.eql([102, 111, 111]) + }) + + describe('attributes', () => { + it('should assign built-in and Antora integration attributes on document', () => { + setInputFileContents('= Document Title') + const doc = loadAsciiDoc(inputFile, undefined, resolveConfig()) + expect(doc.getBaseDir()).to.equal('modules/module-a/pages') + expect(doc.getAttributes()).to.include({ + // env + env: 'site', + 'env-site': '', + 'site-gen': 'antora', + 'site-gen-antora': '', + // default + 'attribute-missing': 'warn', + icons: 'font', + sectanchors: '', + 'source-highlighter': 'highlight.js', + // intrinsic + docname: 'page-a', + docfile: 'modules/module-a/pages/page-a.adoc', + docdir: doc.getBaseDir(), + docfilesuffix: '.adoc', + imagesdir: '_images', + attachmentsdir: '_attachments', + partialsdir: 'partial$', + examplesdir: 'example$', + // page + 'page-component-name': 'component-a', + 'page-component-version': 'master', + 'page-version': 'master', + 'page-module': 'module-a', + 'page-relative': 'page-a.adoc', + 'page-src-path': 'page-a.adoc', + // computed + doctitle: 'Document Title', + notitle: '', + embedded: '', + 'safe-mode-name': 'safe', + 'safe-mode-safe': '', + }) + }) + + it('should assign Antora integration attributes on document for page in topic folder', () => { + inputFile = mockContentCatalog({ + version: '4.5.6', + family: 'page', + relative: 'topic-a/page-a.adoc', + contents: '= Document Title', + }).getAll()[0] + const doc = loadAsciiDoc(inputFile, undefined, resolveConfig()) + expect(doc.getAttributes()).to.include({ + imagesdir: '../_images', + attachmentsdir: '../_attachments', + }) + }) + + it('should set page attributes even if file is not in page family', () => { + const inputFile = mockContentCatalog({ + version: '4.5', + family: 'nav', + relative: 'nav.adoc', + contents: '* xref:module-a:index.adoc[Module A]', + }).getAll()[0] + const doc = loadAsciiDoc(inputFile) + expect(doc.getAttributes()).to.include.keys( + 'page-component-name', + 'page-component-version', + 'page-version', + 'page-module', + 'page-relative', + 'page-src-path' + ) + }) + + it('should set page component title if component is found in content catalog', () => { + const contentCatalog = mockContentCatalog({ + version: '4.5', + family: 'page', + relative: 'page-a.adoc', + contents: '= Document Title', + }) + contentCatalog.getComponent('component-a').title = 'Component A' + const inputFile = contentCatalog.getAll()[0] + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(doc.getAttributes()).to.include({ + 'page-component-name': 'component-a', + 'page-component-title': 'Component A', + }) + }) + + it('should set page component display version if component is found in content catalog', () => { + const contentCatalog = mockContentCatalog({ + version: '4.5', + family: 'page', + relative: 'page-a.adoc', + contents: '= Document Title', + }) + contentCatalog.getComponent('component-a').latest.displayVersion = '4.5 LTS' + const inputFile = contentCatalog.getAll()[0] + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(doc.getAttributes()).to.include({ + 'page-component-name': 'component-a', + 'page-component-display-version': '4.5 LTS', + }) + }) + + it('should set page origin attributes if origin information is available for file from branch', () => { + const contentCatalog = mockContentCatalog({ + version: '4.5.x', + family: 'page', + relative: 'page-a.adoc', + contents: '= Document Title', + }) + const inputFileFromBranch = contentCatalog.getAll()[0] + inputFileFromBranch.src.origin = { + type: 'git', + url: 'https://example.org/component-a.git', + startPath: 'docs', + branch: 'v4.5.x', + refhash: 'a185bc03d7c07a3a98dcd14214d884ebd6387578', + } + const docFromBranch = loadAsciiDoc(inputFileFromBranch, contentCatalog) + expect(docFromBranch.getAttributes()).to.include({ + 'page-origin-type': 'git', + 'page-origin-url': 'https://example.org/component-a.git', + 'page-origin-start-path': 'docs', + 'page-origin-branch': 'v4.5.x', + 'page-origin-refname': 'v4.5.x', + 'page-origin-reftype': 'branch', + 'page-origin-refhash': 'a185bc03d7c07a3a98dcd14214d884ebd6387578', + }) + expect(docFromBranch.hasAttribute('page-origin-tag')).to.be.false() + expect(docFromBranch.hasAttribute('page-origin-worktree')).to.be.false() + }) + + it('should set page origin attributes if origin information is available for file from worktree branch', () => { + const contentCatalog = mockContentCatalog({ + version: '4.5.x', + family: 'page', + relative: 'page-a.adoc', + contents: '= Document Title', + }) + const inputFileFromBranch = contentCatalog.getAll()[0] + inputFileFromBranch.src.origin = { + type: 'git', + url: 'https://example.org/component-a.git', + startPath: 'docs', + branch: 'v4.5.x', + worktree: true, + } + const docFromBranch = loadAsciiDoc(inputFileFromBranch, contentCatalog) + expect(docFromBranch.getAttributes()).to.include({ + 'page-origin-type': 'git', + 'page-origin-url': 'https://example.org/component-a.git', + 'page-origin-start-path': 'docs', + 'page-origin-branch': 'v4.5.x', + 'page-origin-refname': 'v4.5.x', + 'page-origin-reftype': 'branch', + 'page-origin-refhash': '(worktree)', + 'page-origin-worktree': '', + }) + expect(docFromBranch.hasAttribute('page-origin-tag')).to.be.false() + }) + + it('should set page origin attributes if origin information is available for file from tag', () => { + const contentCatalog = mockContentCatalog({ + version: '4.5.x', + family: 'page', + relative: 'page-a.adoc', + contents: '= Document Title', + }) + const inputFileFromTag = contentCatalog.getAll()[0] + inputFileFromTag.src.origin = { + type: 'git', + url: 'https://example.org/component-a.git', + startPath: '', + tag: 'v4.5.1', + refhash: 'a185bc03d7c07a3a98dcd14214d884ebd6387578', + } + const docFromTag = loadAsciiDoc(inputFileFromTag, contentCatalog) + expect(docFromTag.getAttributes()).to.include({ + 'page-origin-type': 'git', + 'page-origin-url': 'https://example.org/component-a.git', + 'page-origin-start-path': '', + 'page-origin-tag': 'v4.5.1', + 'page-origin-refname': 'v4.5.1', + 'page-origin-reftype': 'tag', + 'page-origin-refhash': 'a185bc03d7c07a3a98dcd14214d884ebd6387578', + }) + expect(docFromTag.hasAttribute('page-origin-branch')).to.be.false() + expect(docFromTag.hasAttribute('page-origin-worktree')).to.be.false() + }) + + it('should add custom attributes to document', () => { + setInputFileContents('= Document Title') + const config = { + attributes: { + 'attribute-missing': 'skip', + icons: '', + idseparator: '-', + 'source-highlighter': 'html-pipeline', + }, + } + const doc = loadAsciiDoc(inputFile, undefined, config) + expect(doc.getAttributes()).to.include(config.attributes) + }) + + it('should allow doctype option to be set on document', () => { + setInputFileContents('contents') + const config = { doctype: 'book' } + const doc = loadAsciiDoc(inputFile, undefined, config) + expect(doc.getDoctype()).to.equal('book') + expect(doc.getBlocks()).to.have.lengthOf(1) + expect(doc.getBlocks()[0].getContext()).to.equal('preamble') + }) + + it('should assign site-url attribute if site url is set in playbook', () => { + setInputFileContents('= Document Title') + const playbook = { + site: { + url: 'https://docs.example.org', + }, + asciidoc: { + attributes: { + 'attribute-missing': 'skip', + icons: '', + idseparator: '-', + 'source-highlighter': 'html-pipeline', + }, + }, + } + const doc = loadAsciiDoc(inputFile, undefined, resolveConfig(playbook)) + const expectedAttributes = { ...playbook.asciidoc.attributes, 'site-url': 'https://docs.example.org' } + expect(doc.getAttributes()).to.include(expectedAttributes) + }) + + it('should assign site-title attribute if site title is set in playbook', () => { + setInputFileContents('= Document Title') + const playbook = { + site: { + title: 'Docs', + }, + asciidoc: { + attributes: { + 'attribute-missing': 'skip', + icons: '', + idseparator: '-', + 'source-highlighter': 'html-pipeline', + }, + }, + } + const doc = loadAsciiDoc(inputFile, undefined, resolveConfig(playbook)) + const expectedAttributes = { ...playbook.asciidoc.attributes, 'site-title': 'Docs' } + expect(doc.getAttributes()).to.include(expectedAttributes) + }) + + it('should not allow custom attributes to override intrinsic attributes', () => { + setInputFileContents('= Document Title') + const config = { + attributes: { + docname: 'foo', + docfile: 'foo.asciidoc', + docfilesuffix: '.asciidoc', + imagesdir: 'images', + attachmentsdir: 'attachments', + examplesdir: 'examples', + partialsdir: 'partials', + }, + } + const doc = loadAsciiDoc(inputFile, undefined, config) + expect(doc.getAttributes()).not.to.include(config.attributes) + expect(doc.getAttributes()).to.include({ docfile: 'modules/module-a/pages/page-a.adoc' }) + }) + }) + + describe('extensions', () => { + it('should not fail if custom extensions are null', () => { + setInputFileContents('= Document Title') + const doc = loadAsciiDoc(inputFile, undefined, { extensions: null }) + expect(doc.getDocumentTitle()).equals('Document Title') + }) + + it('should call custom extension to self-register with extension registry per instance', () => { + const contents = heredoc` + [shout] + Release early. Release often. + ` + setInputFileContents(contents) + const shoutBlockExtension = function () { + this.onContext('paragraph') + this.process((parent, reader) => + this.createBlock(parent, 'paragraph', reader.getLines().map((l) => l.toUpperCase())) + ) // prettier-ignore + } + shoutBlockExtension.registered = 0 + shoutBlockExtension.register = (registry) => { + shoutBlockExtension.registered++ + registry.block('shout', shoutBlockExtension) + } + const config = { extensions: [shoutBlockExtension] } + let html + + html = loadAsciiDoc(inputFile, undefined, config).convert() + expect(shoutBlockExtension.registered).to.equal(1) + expect(html).to.include('RELEASE EARLY. RELEASE OFTEN') + + html = loadAsciiDoc(inputFile, undefined, config).convert() + expect(shoutBlockExtension.registered).to.equal(2) + expect(html).to.include('RELEASE EARLY. RELEASE OFTEN') + + let messages + ;[html, messages] = captureStderr(() => loadAsciiDoc(inputFile).convert()) + expect(html).to.include('Release early. Release often.') + // At least we can check the extension wasn't called: + expect(shoutBlockExtension.registered).to.equal(2) + // asciidoctor 2.0.3 does not warn on missing block extension. The string + // 'invalid style for paragraph' is no longer in the asciidoctor.js source code. + // I suspect a different message is logged at info level, but that is inaccessible until + // configurable logging is implemented. + // The change might be partly related to https://github.com/asciidoctor/asciidoctor/issues/3030 + expect(messages).to.have.lengthOf(0) + // expect(messages[0]).to.include('page-a.adoc: line 2: invalid style for paragraph: shout') + }) + + it('should give extension access to context that includes current file and content catalog', () => { + setInputFileContents('files::[]') + const contentCatalog = mockContentCatalog([ + { family: 'page', relative: 'page-a.adoc' }, + { family: 'page', relative: 'page-b.adoc' }, + { family: 'page', relative: 'page-c.adoc' }, + ]) + const config = { extensions: [require(ospath.resolve(FIXTURES_DIR, 'ext/file-report-block-macro.js'))] } + const html = loadAsciiDoc(inputFile, contentCatalog, config).convert() + expect(html).to.include('Files in catalog: 3') + expect(html).to.include('URL of current page: /component-a/module-a/page-a.html') + }) + }) + + describe('include directive', () => { + it('should skip include directive if target prefixed with {partialsdir} cannot be resolved', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + const inputContents = 'include::{partialsdir}/does-not-exist.adoc[]' + setInputFileContents(inputContents) + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + expect(contentCatalog.getById).to.have.been.called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'does-not-exist.adoc', + }) + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include('page-a.adoc: line 1: include target not found: partial$/does-not-exist.adoc') + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + const expectedSource = [ + 'Unresolved include directive in modules/module-a/pages/page-a.adoc', + 'include::partial$/does-not-exist.adoc[]', + ].join(' - ') + expect(firstBlock.getSourceLines()).to.eql([expectedSource]) + }) + + it('should skip include directive if target resource ID cannot be resolved', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + const inputContents = 'include::partial$does-not-exist.adoc[]' + setInputFileContents(inputContents) + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'does-not-exist.adoc', + }) + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include('page-a.adoc: line 1: include target not found: partial$does-not-exist.adoc') + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + const expectedSource = [ + 'Unresolved include directive in modules/module-a/pages/page-a.adoc', + 'include::partial$does-not-exist.adoc[]', + ].join(' - ') + expect(firstBlock.getSourceLines()).to.eql([expectedSource]) + }) + + it('should not clobber surrounding lines when include target cannot be resolved', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + const inputContents = 'before\ninclude::partial$does-not-exist.adoc[]\nafter' + setInputFileContents(inputContents) + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'does-not-exist.adoc', + }) + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include('page-a.adoc: line 2: include target not found: partial$does-not-exist.adoc') + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + const expectedSource = [ + 'Unresolved include directive in modules/module-a/pages/page-a.adoc', + 'include::partial$does-not-exist.adoc[]', + ].join(' - ') + expect(firstBlock.getSourceLines()).to.eql(['before', expectedSource, 'after']) + }) + + it('should not crash if contents of included file is undefined', () => { + const contentCatalog = mockContentCatalog({ + family: 'partial', + relative: 'undefined-contents.adoc', + }).spyOn('getById') + contentCatalog.getFiles()[0].contents = undefined + setInputFileContents(heredoc` + before + include::partial$undefined-contents.adoc[] + after + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'undefined-contents.adoc', + }) + const para = doc.getBlocks()[0] + expect(para).not.to.be.undefined() + expect(para.getContext()).to.equal('paragraph') + expect(para.getSourceLines()).to.eql(['before', 'after']) + }) + + it('should resolve include target prefixed with {partialsdir}', () => { + const includeContents = 'Hello, World!' + const contentCatalog = mockContentCatalog({ + family: 'partial', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('getById') + setInputFileContents('include::{partialsdir}/greeting.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'greeting.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([includeContents]) + }) + + it('should resolve include target with resource ID in partial family', () => { + const includeContents = 'Hello, World!' + const contentCatalog = mockContentCatalog({ + family: 'partial', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('getById') + setInputFileContents('include::partial$greeting.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'greeting.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([includeContents]) + }) + + it('should resolve include target prefixed with {examplesdir}', () => { + const includeContents = 'puts "Hello, World!"' + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/hello.rb', + contents: includeContents, + }).spyOn('getById') + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/hello.rb[] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'example', + relative: 'ruby/hello.rb', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getStyle()).to.equal('source') + expect(firstBlock.getSourceLines()).to.eql([includeContents]) + }) + + it('should resolve include target with resource ID in example family', () => { + const includeContents = 'puts "Hello, World!"' + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/hello.rb', + contents: includeContents, + }).spyOn('getById') + setInputFileContents(heredoc` + [source,ruby] + ---- + include::example$ruby/hello.rb[] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'example', + relative: 'ruby/hello.rb', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getStyle()).to.equal('source') + expect(firstBlock.getSourceLines()).to.eql([includeContents]) + }) + + it('should resolve include target with resource ID in separate module', () => { + const includeContents = 'Hello, World!' + const contentCatalog = mockContentCatalog({ + module: 'another-module', + family: 'partial', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('getById') + setInputFileContents('include::another-module:partial$greeting.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'another-module', + family: 'partial', + relative: 'greeting.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([includeContents]) + }) + + it('should resolve include target with resource ID in separate component', () => { + const includeContents = 'Hello, World!' + const contentCatalog = mockContentCatalog({ + component: 'another-component', + version: '1.1', + module: 'ROOT', + family: 'partial', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('getById') + setInputFileContents('include::1.1@another-component::partial$greeting.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'another-component', + version: '1.1', + module: 'ROOT', + family: 'partial', + relative: 'greeting.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([includeContents]) + }) + + it('should assume family of target is partial when target is resource ID in separate component', () => { + const includeContents = 'Hello, World!' + const contentCatalog = mockContentCatalog({ + component: 'another-component', + version: '1.1', + module: 'ROOT', + family: 'page', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('resolveResource') + setInputFileContents('include::1.1@another-component::greeting.adoc[]') + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + expect(contentCatalog.resolveResource).to.not.have.been.called() + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include('line 1: include target not found: 1.1@another-component::greeting.adoc') + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + const expectedSource = [ + 'Unresolved include directive in modules/module-a/pages/page-a.adoc', + 'include::1.1@another-component::greeting.adoc[]', + ].join(' - ') + expect(firstBlock.getSourceLines()).to.eql([expectedSource]) + }) + + it('should assume family of target is partial when target is resource ID in separate version', () => { + const includeContents = 'Hello, World!' + const contentCatalog = mockContentCatalog({ + version: '1.1', + family: 'page', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('resolveResource') + setInputFileContents('include::1.1@greeting.adoc[]') + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + expect(contentCatalog.resolveResource).to.not.have.been.called() + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include('line 1: include target not found: 1.1@greeting.adoc') + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + const expectedSource = [ + 'Unresolved include directive in modules/module-a/pages/page-a.adoc', + 'include::1.1@greeting.adoc[]', + ].join(' - ') + expect(firstBlock.getSourceLines()).to.eql([expectedSource]) + }) + + it('should resolve target of nested include relative to current file', () => { + const outerIncludeContents = 'include::deeply/nested.adoc[]' + const nestedIncludeContents = 'All that is nested is not lost.' + const contentCatalog = mockContentCatalog([ + { + family: 'partial', + relative: 'outer.adoc', + contents: outerIncludeContents, + }, + { + family: 'partial', + relative: 'deeply/nested.adoc', + contents: nestedIncludeContents, + }, + ]).spyOn('getById', 'getByPath') + setInputFileContents('include::{partialsdir}/outer.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'outer.adoc', + }) + expect(contentCatalog.getByPath) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + path: 'modules/module-a/pages/_partials/deeply/nested.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([nestedIncludeContents]) + }) + + it('should skip nested include directive if target cannot be resolved relative to current file', () => { + const outerIncludeContents = 'include::deeply/nested.adoc[]' + const contentCatalog = mockContentCatalog({ + family: 'partial', + relative: 'outer.adoc', + contents: outerIncludeContents, + }).spyOn('getById', 'getByPath') + setInputFileContents('include::{partialsdir}/outer.adoc[]') + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'outer.adoc', + }) + expect(contentCatalog.getByPath) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + path: 'modules/module-a/pages/_partials/deeply/nested.adoc', + }) + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include('outer.adoc: line 1: include target not found: deeply/nested.adoc') + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + const expectedSource = [ + 'Unresolved include directive in modules/module-a/pages/_partials/outer.adoc', + 'include::deeply/nested.adoc[]', + ].join(' - ') + expect(firstBlock.getSourceLines()).to.eql([expectedSource]) + }) + + it('should resolve relative target of nested include in separate module relative to current file', () => { + const outerIncludeContents = 'include::deeply/nested.adoc[]' + const nestedIncludeContents = 'All that is nested is not lost.' + const contentCatalog = mockContentCatalog([ + { + module: 'other-module', + family: 'partial', + relative: 'outer.adoc', + contents: outerIncludeContents, + }, + { + module: 'other-module', + family: 'partial', + relative: 'deeply/nested.adoc', + contents: nestedIncludeContents, + }, + ]).spyOn('resolveResource', 'getByPath') + setInputFileContents('include::other-module:partial$outer.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.resolveResource) + .nth(1) + .called.with('other-module:partial$outer.adoc', { + component: inputFile.src.component, + version: inputFile.src.version, + module: inputFile.src.module, + family: 'page', + relative: 'page-a.adoc', + }) + expect(contentCatalog.getByPath) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + path: 'modules/other-module/pages/_partials/deeply/nested.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([nestedIncludeContents]) + }) + + it('should resolve target resource ID of nested include in separate module relative to current file', () => { + const outerIncludeContents = 'include::yet-another-module:partial$deeply/nested.adoc[]' + const nestedIncludeContents = 'All that is nested is not lost.' + const contentCatalog = mockContentCatalog([ + { + module: 'other-module', + family: 'partial', + relative: 'outer.adoc', + contents: outerIncludeContents, + }, + { + module: 'yet-another-module', + family: 'partial', + relative: 'deeply/nested.adoc', + contents: nestedIncludeContents, + }, + ]).spyOn('resolveResource') + setInputFileContents('include::other-module:partial$outer.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.resolveResource).to.have.been.called.twice() + expect(contentCatalog.resolveResource) + .nth(1) + .called.with('other-module:partial$outer.adoc', { + component: inputFile.src.component, + version: inputFile.src.version, + module: inputFile.src.module, + family: 'page', + relative: 'page-a.adoc', + }) + expect(contentCatalog.resolveResource) + .nth(2) + .called.with('yet-another-module:partial$deeply/nested.adoc', { + component: 'component-a', + version: 'master', + module: 'other-module', + family: 'partial', + relative: 'outer.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([nestedIncludeContents]) + }) + + it('should resolve relative target of nested include in separate component relative to current file', () => { + const outerIncludeContents = 'include::deeply/nested.adoc[]' + const nestedIncludeContents = 'All that is nested is not lost.' + const contentCatalog = mockContentCatalog([ + { + component: 'component-b', + module: 'ROOT', + family: 'partial', + relative: 'outer.adoc', + contents: outerIncludeContents, + }, + { + component: 'component-b', + module: 'ROOT', + family: 'partial', + relative: 'deeply/nested.adoc', + contents: nestedIncludeContents, + }, + ]).spyOn('resolveResource', 'getByPath') + setInputFileContents('include::component-b::partial$outer.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.resolveResource) + .nth(1) + .called.with('component-b::partial$outer.adoc', { + component: inputFile.src.component, + version: inputFile.src.version, + module: inputFile.src.module, + family: 'page', + relative: 'page-a.adoc', + }) + expect(contentCatalog.getByPath) + .nth(1) + .called.with({ + component: 'component-b', + version: 'master', + path: 'modules/ROOT/pages/_partials/deeply/nested.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([nestedIncludeContents]) + }) + + it('should resolve target resource ID of nested include from other component relative to file context', () => { + const outerIncludeContents = 'include::another-module:partial$deeply/nested.adoc[]' + const nestedIncludeContents = 'All that is nested is not lost.' + const contentCatalog = mockContentCatalog([ + { + component: 'component-b', + module: 'ROOT', + family: 'partial', + relative: 'outer.adoc', + contents: outerIncludeContents, + }, + { + component: 'component-b', + module: 'another-module', + family: 'partial', + relative: 'deeply/nested.adoc', + contents: nestedIncludeContents, + }, + ]).spyOn('resolveResource') + setInputFileContents('include::component-b::partial$outer.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.resolveResource).to.have.been.called.twice() + expect(contentCatalog.resolveResource) + .nth(1) + .called.with('component-b::partial$outer.adoc', { + component: inputFile.src.component, + version: inputFile.src.version, + module: inputFile.src.module, + family: 'page', + relative: 'page-a.adoc', + }) + expect(contentCatalog.resolveResource) + .nth(2) + .called.with('another-module:partial$deeply/nested.adoc', { + component: 'component-b', + version: 'master', + module: 'ROOT', + family: 'partial', + relative: 'outer.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([nestedIncludeContents]) + }) + + it('should ignore current context when resolving nested include if target is qualified resource ID', () => { + const outerIncludeContents = 'include::component-a:module-a:partial$deeply/nested.adoc[]' + const nestedIncludeContents = 'All that is nested is not lost.' + const contentCatalog = mockContentCatalog([ + { + component: 'component-b', + module: 'ROOT', + family: 'partial', + relative: 'outer.adoc', + contents: outerIncludeContents, + }, + { + family: 'partial', + relative: 'deeply/nested.adoc', + contents: nestedIncludeContents, + }, + ]).spyOn('resolveResource') + setInputFileContents('include::component-b::partial$outer.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.resolveResource).to.have.been.called.twice() + expect(contentCatalog.resolveResource) + .nth(1) + .called.with('component-b::partial$outer.adoc', { + component: inputFile.src.component, + version: inputFile.src.version, + module: inputFile.src.module, + family: 'page', + relative: 'page-a.adoc', + }) + expect(contentCatalog.resolveResource) + .nth(2) + .called.with('component-a:module-a:partial$deeply/nested.adoc', { + component: 'component-b', + version: 'master', + module: 'ROOT', + family: 'partial', + relative: 'outer.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([nestedIncludeContents]) + }) + + it('should skip include directive if max include depth is 0', () => { + const includeContents = 'greetings!' + const contentCatalog = mockContentCatalog({ + family: 'partial', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('getById') + setInputFileContents('include::partial$greeting.adoc[]') + const [doc, messages] = captureStderr(() => + loadAsciiDoc(inputFile, contentCatalog, { attributes: { 'max-include-depth': 0 } }) + ) + expect(contentCatalog.getById).to.not.have.been.called() + expect(doc.getBlocks()).to.be.empty() + // I don't understand why the exception should not be raised. + // expect(messages).to.be.empty() + expect(messages).to.have.lengthOf(1) + expect(messages[0].trim()).to.equal('asciidoctor: ERROR: page-a.adoc: line 1: maximum include depth of 0 exceeded') + }) + + it('should skip include directive if max include depth is exceeded', () => { + const includeContents = 'greetings!\n\ninclude::partial$greeting.adoc[]' + const contentCatalog = mockContentCatalog({ + family: 'partial', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('getById') + setInputFileContents('include::partial$greeting.adoc[]') + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'greeting.adoc', + }) + const maxIncludeDepth = doc.getAttribute('max-include-depth') + expect(doc.getBlocks()).to.have.lengthOf(maxIncludeDepth) + expect(messages).to.have.lengthOf(1) + expect(messages[0].trim()).to.equal( + `asciidoctor: ERROR: greeting.adoc: line 3: maximum include depth of ${maxIncludeDepth} exceeded` + ) + }) + + it('should honor depth set in include directive', () => { + const includeContents = 'greetings!\n\ninclude::partial$hit-up-for-money.adoc[]' + const contentCatalog = mockContentCatalog([ + { family: 'partial', relative: 'greeting.adoc', contents: includeContents }, + { family: 'partial', relative: 'hit-up-for-money.adoc', contents: 'Got some coin for me?' }, + ]).spyOn('getById') + setInputFileContents('include::partial$greeting.adoc[depth=0]') + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + expect(contentCatalog.getById).to.have.been.called.once() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'greeting.adoc', + }) + expect(doc.getBlocks()).to.have.lengthOf(1) + expect(messages).to.have.lengthOf(1) + // Asciidoctor now sets 'rel' to the new include depth from the 'depth=0' setting. + expect(messages[0].trim()).to.equal( + 'asciidoctor: ERROR: greeting.adoc: line 3: maximum include depth of 0 exceeded' + ) + }) + + it('should not register include in document catalog', () => { + const includeContents = 'Hello, World!' + const contentCatalog = mockContentCatalog({ + family: 'partial', + relative: 'greeting.adoc', + contents: includeContents, + }).spyOn('getById') + setInputFileContents('include::{partialsdir}/greeting.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'partial', + relative: 'greeting.adoc', + }) + expect(doc.getCatalog().includes['$key?']('greeting')).to.be.true() + expect(doc.getCatalog().includes['$[]']('greeting')).to.equal(global.Opal.nil) + }) + + it('should not mangle a page reference if reference matches rootname of include', () => { + const includeContents = 'Hello, World!' + const contentCatalog = mockContentCatalog([ + { + family: 'partial', + relative: 'greeting.adoc', + contents: includeContents, + }, + { + family: 'page', + relative: 'greeting.adoc', + }, + ]).spyOn('getById') + setInputFileContents('include::{partialsdir}/greeting.adoc[]\n\nsee xref:greeting.adoc#message[greeting message]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(doc.convert()).to.include(' { + const includeContents = heredoc` + puts 1 + puts 2 + puts 3 + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines=] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n')) + }) + + it('should not apply linenum filtering to contents of include if lines attribute has empty values', () => { + const includeContents = heredoc` + puts 1 + puts 2 + puts 3 + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines=;] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n')) + }) + + it('should apply linenum filtering to contents of include if lines separated by semi-colons are specified', () => { + const includeContents = heredoc` + # hello + puts "Hello, World!" + # goodbye + puts "Goodbye, World!" + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines=2;4] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should apply linenum filtering to contents of include if lines separated by commas are specified', () => { + const includeContents = heredoc` + # hello + puts "Hello, World!" + # goodbye + puts "Goodbye, World!" + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines="2,4"] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should ignore redundant values in lines attribute when applying linenum filtering', () => { + const includeContents = heredoc` + puts "Please stand by..." + # waiting... + # waiting... + puts "Hello, World!" + # the wait is over + # fin + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines=4;1;1] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should include all lines in range when applying linenum filtering', () => { + const includeContents = heredoc` + # warming up + puts "Please stand by..." + puts "Hello, World!" + puts "Goodbye, World!" + # fin + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines=2..4] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should include all remaining lines when applying linenum filtering when end value is -1', () => { + const includeContents = heredoc` + # warming up + puts "Please stand by..." + puts "Hello, World!" + puts "Goodbye, World!" + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines=2..-1] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should include all remaining lines when applying linenum filtering when end value is not specified', () => { + const includeContents = heredoc` + # warming up + puts "Please stand by..." + puts "Hello, World!" + puts "Goodbye, World!" + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines=2..] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should filter out all lines when line number filtering if start value is negative', () => { + const includeContents = heredoc` + puts "Please stand by..." + puts "Hello, World!" + puts "Goodbye, World!" + # fin + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[lines=-1..3] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.be.empty() + }) + + it('should not apply tag filtering to contents of include if tag attribute is empty', () => { + const includeContents = heredoc` + # tag::hello[] + puts "Hello, World!" + # end::hello[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tag=] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n')) + }) + + it('should not apply tag filtering to contents of include if tags attribute is empty', () => { + const includeContents = heredoc` + # tag::hello[] + puts "Hello, World!" + # end::hello[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n')) + }) + + it('should not apply tag filtering to contents of include if tags attribute has empty values', () => { + const includeContents = heredoc` + # tag::hello[] + puts "Hello, World!" + # end::hello[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=;] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n')) + }) + + it('should apply tag filtering to contents of include if tag is specified', () => { + const includeContents = heredoc` + # greet example + # tag::hello[] + puts "Hello, World!" + # end::hello[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tag=hello] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should match tag directives enclosed in circumfix comments', () => { + const cssContents = heredoc` + /* tag::snippet[] */ + header { color: red; } + /* end::snippet[] */ + ` + const mlContents = heredoc` + (* tag::snippet[] *) + let s = SS.empty;; + (* end::snippet[] *) + ` + const contentCatalog = mockContentCatalog([ + { family: 'example', relative: 'theme.css', contents: cssContents }, + { family: 'example', relative: 'empty.ml', contents: mlContents }, + ]) + setInputFileContents(heredoc` + ---- + include::{examplesdir}/theme.css[tag=snippet] + ---- + + ---- + include::{examplesdir}/empty.ml[tag=snippet] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(doc.getBlocks()).to.have.lengthOf(2) + const block0 = doc.getBlocks()[0] + expect(block0.getContext()).to.equal('listing') + expect(block0.getSourceLines()).to.eql([cssContents.split('\n')[1]]) + const block1 = doc.getBlocks()[1] + expect(block1.getContext()).to.equal('listing') + expect(block1.getSourceLines()).to.eql([mlContents.split('\n')[1]]) + }) + + it('should apply tag filtering to contents of include if negated tag is specified', () => { + const includeContents = heredoc` + # tag::hello[] + puts "Hello, World!" + # end::hello[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tag=!hello] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.be.empty() + }) + + it('should apply tag filtering to contents of include if tags separated by semi-colons are specified', () => { + const includeContents = heredoc` + # tag::hello[] + puts "Hello, World!" + # end::hello[] + # tag::goodbye[] + puts "Goodbye, World!" + # end::goodbye[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=hello;goodbye] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should apply tag filtering to contents of include if tags separated by commas are specified', () => { + const includeContents = heredoc` + # tag::hello[] + puts "Hello, World!" + # end::hello[] + # tag::goodbye[] + puts "Goodbye, World!" + # end::goodbye[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags="hello,goodbye"] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should split include tag on comma if present and ignore semi-colons', () => { + const includeContents = heredoc` + # tag::hello[] + puts "Hello, World!" + # end::hello[] + # tag::goodbye;adios[] + puts "Goodbye, World!" + # end::goodbye;adios[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags="hello,goodbye;adios"] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(includeContents.split('\n').filter((l) => l.charAt() !== '#')) + }) + + it('should apply tag filtering to contents of include if negated tags are specified', () => { + const includeContents = heredoc` + # tag::hello[] + puts "Hello, World!" + # end::hello[] + # tag::goodbye[] + puts "Goodbye, World!" + # end::goodbye[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=*;!goodbye] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(['puts "Hello, World!"']) + }) + + it('should include nested tags when applying tag filtering to contents of include', () => { + const includeContents = heredoc` + # tag::decl[] + msgs = { hello: "Hello, World!", goodbye: "Goodbye, World!" } + # end::decl[] + # tag::output[] + # tag::hello[] + puts msgs[:hello] + # end::hello[] + # tag::goodbye[] + puts msgs[:goodbye] + # end::goodbye[] + # end::output[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=decl;output;!hello] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql([ + 'msgs = { hello: "Hello, World!", goodbye: "Goodbye, World!" }', + 'puts msgs[:goodbye]', + ]) + }) + + it('should skip redundant tags in include file', () => { + const includeContents = heredoc` + puts "Please stand by..." + # tag::hello[] + # tag::hello[] + puts "Hello, World!" + # end::hello[] + # end::hello[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tag=*] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(['puts "Hello, World!"']) + }) + + it('should not select nested tag if outer tag is unselected', () => { + const includeContents = heredoc` + puts "Please stand by..." + # tag::hello[] + # tag::futile[] + puts "Hello, World!" + # end::futile[] + # end::hello[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=*;!hello] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql([]) + }) + + it('should handle mismatched end tag in include file', () => { + const includeContents = heredoc` + puts "Please stand by..." + # tag::hello[] + puts "Hello, World!" + # tag::goodbye[] + # end::hello[] + puts "Goodbye, World!" + # end::goodbye[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=hello;goodbye] + ---- + `) + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + const expectedMessage = + "page-a.adoc: line 3: mismatched end tag (expected 'goodbye' but found 'hello')" + + ' at line 5 of include file: modules/module-a/examples/ruby/greet.rb' + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include(expectedMessage) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(['puts "Hello, World!"', 'puts "Goodbye, World!"']) + }) + + it('should skip redundant end tag in include file', () => { + const includeContents = heredoc` + puts "Please stand by..." + # tag::hello[] + puts "Hello, World!" + # end::hello[] + # end::hello[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tag=hello] + ---- + `) + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + const expectedMessage = + "page-a.adoc: line 3: unexpected end tag 'hello' " + + 'at line 5 of include file: modules/module-a/examples/ruby/greet.rb' + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include(expectedMessage) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(['puts "Hello, World!"']) + }) + + it('should warn if include tag is unclosed', () => { + const includeContents = heredoc` + puts "Please stand by..." + # tag::hello[] + puts "Hello, World!" + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tag=hello] + ---- + `) + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + const expectedMessage = + "page-a.adoc: line 3: detected unclosed tag 'hello' " + + 'starting at line 2 of include file: modules/module-a/examples/ruby/greet.rb' + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include(expectedMessage) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql(['puts "Hello, World!"']) + }) + + it('should warn if requested include tag is not found', () => { + const includeContents = heredoc` + puts "Please stand by..." + puts "Hello, World!" + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=hello;yo] + ---- + `) + const [doc, messages] = captureStderr(() => loadAsciiDoc(inputFile, contentCatalog)) + const expectedMessage = + "page-a.adoc: line 3: tags 'hello, yo' not found in include file: modules/module-a/examples/ruby/greet.rb" + expect(messages).to.have.lengthOf(1) + expect(messages[0]).to.include(expectedMessage) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql([]) + }) + + it('should include all lines except for tag directives when tag wildcard is specified', () => { + const includeContents = heredoc` + msgs = { hello: "Hello, World!", goodbye: "Goodbye, World!" } + # tag::hello[] + puts msgs[:hello] + # end::hello[] + # tag::goodbye[] + puts msgs[:goodbye] + # end::goodbye[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=**] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql([ + 'msgs = { hello: "Hello, World!", goodbye: "Goodbye, World!" }', + 'puts msgs[:hello]', + 'puts msgs[:goodbye]', + ]) + }) + + it('should include lines outside of tags if tag wildcard is specified along with specific tags', () => { + const includeContents = heredoc` + msgs = { hello: "Hello, World!", goodbye: "Goodbye, World!" } + # tag::hello[] + puts msgs[:hello] + # end::hello[] + # tag::goodbye[] + puts msgs[:goodbye] + # end::goodbye[] + ` + const contentCatalog = mockContentCatalog({ + family: 'example', + relative: 'ruby/greet.rb', + contents: includeContents, + }) + setInputFileContents(heredoc` + [source,ruby] + ---- + include::{examplesdir}/ruby/greet.rb[tags=**;!*;goodbye] + ---- + `) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('listing') + expect(firstBlock.getSourceLines()).to.eql([ + 'msgs = { hello: "Hello, World!", goodbye: "Goodbye, World!" }', + 'puts msgs[:goodbye]', + ]) + }) + + it('should resolve top-level include target relative to current page', () => { + const includeContents = 'changelog' + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'changelog.adoc', + contents: includeContents, + }).spyOn('getByPath') + setInputFileContents('include::changelog.adoc[]') + const doc = loadAsciiDoc(inputFile, contentCatalog) + expect(contentCatalog.getByPath) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + path: 'modules/module-a/pages/changelog.adoc', + }) + const firstBlock = doc.getBlocks()[0] + expect(firstBlock).not.to.be.undefined() + expect(firstBlock.getContext()).to.equal('paragraph') + expect(firstBlock.getSourceLines()).to.eql([includeContents]) + }) + }) + ;[ + {}, + resolveConfig({ + dir: FIXTURES_DIR, + asciidoc: { + converters: ['./converter/delegating-converter.js'], + }, + }), + resolveConfig({ + dir: FIXTURES_DIR, + asciidoc: { + converters: ['./converter/delegating-converter.js', './converter/delegating-converter.js'], + }, + }), + ].forEach((config) => + describe(`page reference macro using custom converters ${(config.converters || []).map( + (converter) => converter(null, null).name + )}`, () => { + describe('page reference macro', () => { + it('should skip invalid page reference with explicit content', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('xref:component-b::.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById).to.not.have.been.called() + expectUnresolvedPageLink(html, '#component-b::', 'The Page Title') + }) + + it('should skip invalid page reference with fragment and explicit content', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('xref:component-b::#frag[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById).to.not.have.been.called() + expectUnresolvedPageLink(html, '#component-b::#frag', 'The Page Title') + }) + + it('should skip invalid page reference with empty content', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('xref:component-b::#frag[]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById).to.not.have.been.called() + expectUnresolvedPageLink(html, '#component-b::#frag', 'component-b::#frag') + }) + + it('should delegate to built-in converter to process an in-page reference', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('xref:section-a[]\n\n== Section A') + const config = { + attributes: { idprefix: '', idseparator: '-' }, + } + const html = loadAsciiDoc(inputFile, contentCatalog, config).convert() + expect(contentCatalog.getById).to.not.have.been.called() + expectLink(html, '#section-a', 'Section A') + }) + + it('should delegate to built-in converter to process a normal link', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('https://example.com[Example Domain]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById).to.not.have.been.called() + expectLink(html, 'https://example.com', 'Example Domain') + }) + + it('should skip unresolved page reference with explicit content', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('xref:4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'topic-foo/topic-bar/the-page.adoc', + }) + expect(contentCatalog.getById) + .nth(2) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'alias', + relative: 'topic-foo/topic-bar/the-page.adoc', + }) + expectUnresolvedPageLink( + html, + '#4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc', + 'The Page Title' + ) + }) + + it('should skip unresolved page reference with empty content', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('xref:4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc[]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'topic-foo/topic-bar/the-page.adoc', + }) + expect(contentCatalog.getById) + .nth(2) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'alias', + relative: 'topic-foo/topic-bar/the-page.adoc', + }) + expectUnresolvedPageLink( + html, + '#4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc', + '4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc' + ) + }) + + it('should skip unresolved page reference with fragment and explicit content', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('xref:4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc#frag[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'topic-foo/topic-bar/the-page.adoc', + }) + expectUnresolvedPageLink( + html, + '#4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc#frag', + 'The Page Title' + ) + }) + + it('should skip unresolved page reference with fragment and empty content', () => { + const contentCatalog = mockContentCatalog().spyOn('getById') + setInputFileContents('xref:4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc#frag[]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'topic-foo/topic-bar/the-page.adoc', + }) + expectUnresolvedPageLink( + html, + '#4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc#frag', + '4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc#frag' + ) + }) + + it('should skip page reference to non-publishable file', () => { + const contentCatalog = mockContentCatalog({ relative: '_hidden.adoc' }).spyOn('getById') + delete contentCatalog.getPages()[0].pub + setInputFileContents('xref:_hidden.adoc[Hidden Page]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: '_hidden.adoc', + }) + expectUnresolvedPageLink(html, '#_hidden.adoc', 'Hidden Page') + }) + + it('should convert a page reference with version, component, module, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@component-b:module-b:the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, inputFile.pub.rootPath + '/component-b/4.5.6/module-b/the-page.html', 'The Page Title') + }) + + it('should convert a fully-qualified page reference', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'topic-foo/topic-bar/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@component-b:module-b:topic-foo/topic-bar/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'topic-foo/topic-bar/the-page.adoc', + }) + expectPageLink( + html, + inputFile.pub.rootPath + '/component-b/4.5.6/module-b/topic-foo/topic-bar/the-page.html', + 'The Page Title' + ) + }) + + it('should convert a fully-qualified page reference with fragment', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'topic-foo/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@component-b:module-b:topic-foo/the-page.adoc#frag[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'topic-foo/the-page.adoc', + }) + expectPageLink( + html, + inputFile.pub.rootPath + '/component-b/4.5.6/module-b/topic-foo/the-page.html#frag', + 'The Page Title' + ) + }) + + it('should convert a page reference with version, module, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@module-b:the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, '../4.5.6/module-b/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with version, module, topic, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@module-b:the-topic/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: '4.5.6', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, '../4.5.6/module-b/the-topic/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with version, component, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: '4.5.6', + module: 'ROOT', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@component-b::the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'ROOT', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, inputFile.pub.rootPath + '/component-b/4.5.6/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with version, component, topic, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: '4.5.6', + module: 'ROOT', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@component-b::the-topic/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '4.5.6', + module: 'ROOT', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, inputFile.pub.rootPath + '/component-b/4.5.6/the-topic/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with component and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: '1.1', + module: 'ROOT', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById', 'getComponent') + setInputFileContents('xref:component-b::the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getComponent) + .nth(1) + .called.with('component-a') + expect(contentCatalog.getComponent) + .nth(2) + .called.with('component-b') + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '1.1', + module: 'ROOT', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, inputFile.pub.rootPath + '/component-b/1.1/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with component, topic, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: '1.0', + module: 'ROOT', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById', 'getComponent') + setInputFileContents('xref:component-b::the-topic/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getComponent) + .nth(1) + .called.with('component-a') + expect(contentCatalog.getComponent) + .nth(2) + .called.with('component-b') + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '1.0', + module: 'ROOT', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, inputFile.pub.rootPath + '/component-b/1.0/the-topic/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with component, module, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: '2.0', + module: 'module-b', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById', 'getComponent') + setInputFileContents('xref:component-b:module-b:the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getComponent) + .nth(1) + .called.with('component-a') + expect(contentCatalog.getComponent) + .nth(2) + .called.with('component-b') + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: '2.0', + module: 'module-b', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, inputFile.pub.rootPath + '/component-b/2.0/module-b/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with component, module, topic, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-b', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById', 'getComponent') + setInputFileContents('xref:component-b:module-b:the-topic/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getComponent) + .nth(1) + .called.with('component-a') + expect(contentCatalog.getComponent) + .nth(2) + .called.with('component-b') + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-b', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink( + html, + inputFile.pub.rootPath + '/component-b/module-b/the-topic/the-page.html', + 'The Page Title' + ) + }) + + it('should convert a page reference with version and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: '4.5.6', + module: 'module-a', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: '4.5.6', + module: 'module-a', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, '../4.5.6/module-a/the-page.html', 'The Page Title') + }) + + it('should convert a page reference having a path that starts with @', () => { + const contentCatalog = mockContentCatalog({ relative: '@the-page.adoc' }).spyOn('getById') + setInputFileContents('xref:module-a:@the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: '@the-page.adoc', + }) + expectPageLink(html, '@the-page.html', 'The Page Title') + }) + + it('should convert a page reference having a path that starts with @ and a version', () => { + const contentCatalog = mockContentCatalog({ + version: '5.6.4', + relative: '@the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:5.6.4@@the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: '5.6.4', + module: 'module-a', + family: 'page', + relative: '@the-page.adoc', + }) + expectPageLink(html, '../5.6.4/module-a/@the-page.html', 'The Page Title') + }) + + it('should convert a page reference with version, topic, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: '4.5.6', + module: 'module-a', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:4.5.6@the-topic/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: '4.5.6', + module: 'module-a', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, '../4.5.6/module-a/the-topic/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with module and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:module-b:the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, '../module-b/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with module, topic, and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:module-b:the-topic/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, '../module-b/the-topic/the-page.html', 'The Page Title') + }) + + it('should convert a basic page reference', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, 'the-page.html', 'The Page Title') + }) + + it('should convert a page reference that contains spaces', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'i like spaces.adoc', + }).spyOn('getById') + setInputFileContents('xref:i like spaces.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'i like spaces.adoc', + }) + expectPageLink(html, 'i%20like%20spaces.html', 'The Page Title') + }) + + it('should convert a basic page reference from within topic', () => { + const contentCatalog = mockContentCatalog([ + { + family: 'page', + relative: 'the-topic/the-page.adoc', + contents: 'xref:the-page.adoc[The Page Title]', + }, + { + family: 'page', + relative: 'the-page.adoc', + }, + ]).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-page.adoc', + }) + expectPageLink(html, '../the-page.html', 'The Page Title') + }) + + it('should pass on attributes defined in xref macro', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:the-page.adoc[The Page Title,role=secret,opts=nofollow]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-page.adoc', + }) + expect(html).to.include('The Page Title') + }) + + it('should convert a page reference with topic and page', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:the-topic/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, 'the-topic/the-page.html', 'The Page Title') + }) + + it('should convert a page reference with sibling topic and page', () => { + const contentCatalog = mockContentCatalog([ + { + family: 'page', + relative: 'topic-a/the-page.adoc', + contents: 'xref:topic-b/the-page.adoc[The Page Title]', + }, + { + family: 'page', + relative: 'topic-b/the-page.adoc', + }, + ]).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'topic-b/the-page.adoc', + }) + expectPageLink(html, '../topic-b/the-page.html', 'The Page Title') + }) + + it('should convert a page reference to self', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:module-a:this-page.adoc[Link to Self]', + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'this-page.adoc', + }) + expectPageLink(html, 'this-page.html', 'Link to Self') + }) + + it('should convert a page reference to self with empty fragment', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:module-a:this-page.adoc#[Link to Self]', + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'this-page.adoc', + }) + expectPageLink(html, 'this-page.html', 'Link to Self') + }) + + it('should convert a deep page reference to self', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: + 'xref:module-a:this-page.adoc#the-fragment[Deep Link to Self]\n\n[#the-fragment]\n== Target Section', + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'this-page.adoc', + }) + expectLink(html, '#the-fragment', 'Deep Link to Self') + }) + + it('should convert a deep page reference to self with implicit content', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:module-a:this-page.adoc#the-fragment[]\n\n[#the-fragment]\n== Target Section', + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'this-page.adoc', + }) + expectLink(html, '#the-fragment', 'Target Section') + }) + + it('should convert a deep page reference to self that matches docname', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:this-page.adoc#the-fragment[Deep Link to Self]', + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById).to.not.have.been.called() + expectLink(html, '#the-fragment', 'Deep Link to Self') + }) + + it('should convert a page reference to a root relative path if relativizePageRefs is disabled', () => { + const contentCatalog = mockContentCatalog([ + { + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:that-page.adoc[The Page Title]', + }, + { + family: 'page', + relative: 'that-page.adoc', + }, + ]).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog, { relativizePageRefs: false }).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'that-page.adoc', + }) + expectPageLink(html, '/component-a/module-a/that-page.html', 'The Page Title') + }) + + it('should convert a page reference with module and page using indexified URLs', () => { + const contentCatalog = mockContentCatalog([ + { + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:module-b:that-page.adoc[The Page Title]', + indexify: true, + }, + { + module: 'module-b', + family: 'page', + relative: 'that-page.adoc', + indexify: true, + }, + ]).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'that-page.adoc', + }) + expectPageLink(html, '../../module-b/that-page/', 'The Page Title') + }) + + it('should convert a page reference with topic and page using indexified URLs', () => { + const contentCatalog = mockContentCatalog([ + { + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:the-topic/that-page.adoc[The Page Title]', + indexify: true, + }, + { + family: 'page', + relative: 'the-topic/that-page.adoc', + indexify: true, + }, + ]).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-topic/that-page.adoc', + }) + expectPageLink(html, '../the-topic/that-page/', 'The Page Title') + }) + + it('should convert a basic page reference from within a topic using indexified URLs', () => { + const contentCatalog = mockContentCatalog([ + { + family: 'page', + relative: 'topic-a/this-page.adoc', + contents: 'xref:that-page.adoc[The Page Title]', + indexify: true, + }, + { + family: 'page', + relative: 'that-page.adoc', + indexify: true, + }, + ]).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'that-page.adoc', + }) + expectPageLink(html, '../../that-page/', 'The Page Title') + }) + + it('should convert a page reference to self using indexified URLs', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:module-a:this-page.adoc[Link to Self]', + indexify: true, + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'this-page.adoc', + }) + expectPageLink(html, './', 'Link to Self') + }) + + it('should convert a page reference to self with empty fragment using indexified URLs', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:module-a:this-page.adoc#[Link to Self]', + indexify: true, + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'this-page.adoc', + }) + expectPageLink(html, './', 'Link to Self') + }) + + it('should convert a deep page reference to self using indexified URLs', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:module-a:this-page.adoc#the-fragment[Deep Link to Self]', + indexify: true, + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'this-page.adoc', + }) + expectLink(html, '#the-fragment', 'Deep Link to Self') + }) + + it('should convert a page reference to self that matches docname using indexified URLs', () => { + const contentCatalog = mockContentCatalog({ + family: 'page', + relative: 'this-page.adoc', + contents: 'xref:module-a:this-page.adoc#the-fragment[Deep Link to Self]', + indexify: true, + }).spyOn('getById') + inputFile = contentCatalog.getAll()[0] + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'this-page.adoc', + }) + expectLink(html, '#the-fragment', 'Deep Link to Self') + }) + + it('should use xreftext of target page as content if content not specified', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + const targetPage = contentCatalog.getFiles()[0] + targetPage.asciidoc = { doctitle: 'Page Title', xreftext: 'reference me' } + setInputFileContents('xref:module-b:the-topic/the-page.adoc#[]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, '../module-b/the-topic/the-page.html', 'reference me') + }) + + it('should use page ID spec of target page as content if content not specified and target has no xreftext', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:module-b:the-topic/the-page.adoc[]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, '../module-b/the-topic/the-page.html', 'module-b:the-topic/the-page.adoc') + }) + + it('should use page ID spec as content for page reference with fragment if content not specified', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + const targetPage = contentCatalog.getFiles()[0] + targetPage.asciidoc = { doctitle: 'Page Title', xreftext: 'page title' } + setInputFileContents('xref:module-b:the-topic/the-page.adoc#frag[]') + const html = loadAsciiDoc(inputFile, contentCatalog).convert() + expect(contentCatalog.getById) + .nth(1) + .called.with({ + component: 'component-a', + version: 'master', + module: 'module-b', + family: 'page', + relative: 'the-topic/the-page.adoc', + }) + expectPageLink(html, '../module-b/the-topic/the-page.html#frag', 'module-b:the-topic/the-page.adoc#frag') + }) + + it('should not fail to process page reference if fragment attribute is not set', () => { + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-page.adoc', + }) + setInputFileContents('man:the-page[]') + const extension = function () { + this.process((parent, target, attrs) => + this.createInline(parent, 'anchor', target, { + type: 'xref', + target, + attributes: global.Opal.hash({ refid: target, path: target }), + }) + ) + } + extension.register = (registry) => registry.inlineMacro('man', extension) + const config = { extensions: [extension] } + const html = loadAsciiDoc(inputFile, contentCatalog, config).convert() + expectPageLink(html, 'the-page.html', 'the-page') + }) + + it('should process xref inside of a footnote macro', () => { + const contentCatalog = mockContentCatalog({ + component: 'relnotes', + version: '6.5', + module: 'ROOT', + family: 'page', + relative: 'index.adoc', + }) + ;[ + 'xref:6.5@relnotes::index.adoc[completely removed\\]', + '<<6.5@relnotes::index.adoc#,completely removed>>', + ].forEach((pageMacro) => { + const contents = `Text.footnote:[Support for pixie dust has been ${pageMacro}.]` + setInputFileContents(contents) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const html = doc.convert() + expect(doc.getCatalog().footnotes).to.have.length(1) + expectPageLink(html, '../../relnotes/6.5/index.html', 'completely removed') + expect(html).to.include('>completely removed.') + }) + }) + + it('should allow footnote text to be defined and inserted using attribute', () => { + const contentCatalog = mockContentCatalog({ + component: 'relnotes', + version: '6.5', + module: 'ROOT', + family: 'page', + relative: 'index.adoc', + }) + // Why was the final ] escaped? + ;[ + 'xref:6.5@relnotes::index.adoc[completely removed]', + '<<6.5@relnotes::index.adoc#,completely removed>>', + ].forEach((pageMacro) => { + const contents = heredoc` + :fn-text: pass:n[Support for pixie dust has been ${pageMacro}.] + + Text.footnote:pixiedust[{fn-text}] + + More text.footnote:pixiedust[] + ` + setInputFileContents(contents) + const doc = loadAsciiDoc(inputFile, contentCatalog) + const html = doc.convert() + expect(doc.getCatalog().footnotes).to.have.length(1) + expectPageLink(html, '../../relnotes/6.5/index.html', 'completely removed') + expect(html).to.include('completely removed.') + }) + }) + }) + }) + ) + + describe('resolveConfig()', () => { + it('should return config with built-in attributes if site and asciidoc categories not set in playbook', () => { + const config = resolveConfig() + expect(config.attributes).to.exist() + expect(config.attributes).to.include({ + env: 'site', + 'site-gen': 'antora', + 'attribute-missing': 'warn', + }) + expect(config.attributes['site-title']).to.not.exist() + expect(config.attributes['site-url']).to.not.exist() + expect(config.extensions).to.not.exist() + }) + + it('should return config with attributes for site title and url if set in playbook', () => { + const playbook = { site: { url: 'https://docs.example.org', title: 'Docs' }, ui: {} } + const config = resolveConfig(playbook) + expect(config.attributes).to.exist() + expect(config.attributes).to.include({ + 'site-title': 'Docs', + 'site-url': 'https://docs.example.org', + }) + }) + + it('should return a copy of the asciidoc category in the playbook', () => { + const playbook = { + asciidoc: { + attributes: { + idprefix: '', + idseparator: '-', + }, + }, + } + const config = resolveConfig(playbook) + expect(config).to.not.equal(playbook.asciidoc) + expect(config.attributes).to.not.equal(playbook.asciidoc.attributes) + expect(config.attributes).to.include(playbook.asciidoc.attributes) + }) + + it('should not load extensions if extensions are not defined', () => { + const playbook = { asciidoc: {} } + const config = resolveConfig(playbook) + expect(config.extensions).to.not.exist() + }) + + it('should not load extensions if extensions are empty', () => { + const playbook = { asciidoc: { extensions: [] } } + const config = resolveConfig(playbook) + expect(config.extensions).to.not.exist() + }) + + it('should load scoped extension into config but not register it globally', () => { + const playbook = { asciidoc: { extensions: [ospath.resolve(FIXTURES_DIR, 'ext/scoped-shout-block.js')] } } + const config = resolveConfig(playbook) + expect(config.extensions).to.exist() + expect(config.extensions).to.have.lengthOf(1) + expect(config.extensions[0]).to.be.instanceOf(Function) + const Extensions = Asciidoctor.Extensions + const extensionGroupNames = Object.keys(Extensions.getGroups()) + expect(extensionGroupNames).to.have.lengthOf(0) + }) + + it('should load global extension and register it globally', () => { + const playbook = { asciidoc: { extensions: [ospath.resolve(FIXTURES_DIR, 'ext/global-shout-block.js')] } } + const config = resolveConfig(playbook) + expect(config.extensions).to.not.exist() + const Extensions = Asciidoctor.Extensions + const extensionGroupNames = Object.keys(Extensions.getGroups()) + expect(extensionGroupNames).to.have.lengthOf(1) + Extensions.unregisterAll() + }) + + it('should only register a global extension once', () => { + const playbook = { asciidoc: { extensions: [ospath.resolve(FIXTURES_DIR, 'ext/global-shout-block.js')] } } + resolveConfig(playbook) + resolveConfig(playbook) + const Extensions = Asciidoctor.Extensions + const extensionGroupNames = Object.keys(Extensions.getGroups()) + expect(extensionGroupNames).to.have.lengthOf(1) + Extensions.unregisterAll() + }) + + it('should load extension relative to playbook dir', () => { + const playbook = { + dir: FIXTURES_DIR, + asciidoc: { + extensions: ['./ext/scoped-shout-block.js'], + }, + } + const config = resolveConfig(playbook) + expect(config.extensions).to.exist() + expect(config.extensions).to.have.lengthOf(1) + expect(config.extensions[0]).to.be.instanceOf(Function) + }) + + it('should load extension from modules path', () => { + const playbook = { + dir: FIXTURES_DIR, + asciidoc: { + extensions: ['lorem-block-macro'], + }, + } + const config = resolveConfig(playbook) + expect(config.extensions).to.exist() + expect(config.extensions).to.have.lengthOf(1) + expect(config.extensions[0]).to.be.instanceOf(Function) + }) + + it('should load all extensions', () => { + const playbook = { + dir: FIXTURES_DIR, + asciidoc: { + extensions: [ + './ext/scoped-shout-block.js', + 'lorem-block-macro', + ospath.resolve(FIXTURES_DIR, 'ext/global-shout-block.js'), + ], + }, + } + const config = resolveConfig(playbook) + expect(config.extensions).to.exist() + expect(config.extensions).to.have.lengthOf(2) + expect(config.extensions[0]).to.be.instanceOf(Function) + expect(config.extensions[1]).to.be.instanceOf(Function) + const Extensions = Asciidoctor.Extensions + const extensionGroupNames = Object.keys(Extensions.getGroups()) + expect(extensionGroupNames).to.have.lengthOf(1) + Extensions.unregisterAll() + }) + + it('should not load converters if converters are not defined', () => { + const playbook = { asciidoc: {} } + const config = resolveConfig(playbook) + expect(config.converters).to.not.exist() + }) + + it('should not load converters if converters are empty', () => { + const playbook = { asciidoc: { converters: [] } } + const config = resolveConfig(playbook) + expect(config.converters).to.not.exist() + }) + + it('should load specified converters', () => { + const playbook = { + dir: FIXTURES_DIR, + asciidoc: { + converters: ['./converter/delegating-converter.js', './converter/nondelegating-converter.js'], + }, + } + const config = resolveConfig(playbook) + expect(config.converters).to.exist() + expect(config.converters).to.have.lengthOf(2) + expect(config.converters[0]).to.be.instanceOf(Function) + expect(config.converters[0]().name).to.equal('delegating-converter') + expect(config.converters[1]).to.be.instanceOf(Function) + expect(config.converters[1]().name).to.equal('nondelegating-converter') + }) + }) + const chai = require('chai') + describe('converter', () => { + it('should not convert a page reference with topic and page using nondelegating converter', () => { + const playbook = { + dir: FIXTURES_DIR, + asciidoc: { + converters: ['./converter/nondelegating-converter.js'], + }, + } + const config = resolveConfig(playbook) + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + setInputFileContents('xref:the-topic/the-page.adoc[The Page Title]') + const html = loadAsciiDoc(inputFile, contentCatalog, config).convert() + chai.expect(contentCatalog.getById).to.have.been.called.exactly(0) + expect(html).to.include('
\n

undefined

\n
') + }) + it('should provide access to the base converter', () => { + const playbook = { + dir: FIXTURES_DIR, + asciidoc: { + converters: [ + './converter/delegating-converter.js', + './converter/base-access-converter.js', + './converter/delegating-converter.js', + ], + }, + } + const config = resolveConfig(playbook) + const contentCatalog = mockContentCatalog({ + component: 'component-a', + version: 'master', + module: 'module-a', + family: 'page', + relative: 'the-topic/the-page.adoc', + }).spyOn('getById') + setInputFileContents(`= The Title + +== The Subtitle + +=== Level Three + +`) + const html = loadAsciiDoc(inputFile, contentCatalog, config).convert() + chai.expect(contentCatalog.getById).to.have.been.called.exactly(0) + expect(html).to.include('
    ') + }) + }) +}) diff --git a/packages/cli/test/fixtures/simple-generator.js b/packages/cli/test/fixtures/simple-generator.js index 08833c6e3..78db6c2ee 100644 --- a/packages/cli/test/fixtures/simple-generator.js +++ b/packages/cli/test/fixtures/simple-generator.js @@ -4,7 +4,7 @@ const buildPlaybook = require('@antora/playbook-builder') const publishSite = require('@antora/site-publisher') async function generateSite (args, env) { - const playbook = buildPlaybook(args, env) + const playbook = await buildPlaybook(args, env) const siteCatalog = { getFiles: () => [create418Page()] } return publishSite(playbook, [siteCatalog]) } diff --git a/packages/content-aggregator/lib/aggregate-content.js b/packages/content-aggregator/lib/aggregate-content.js index 620ca4541..ea9870a8b 100644 --- a/packages/content-aggregator/lib/aggregate-content.js +++ b/packages/content-aggregator/lib/aggregate-content.js @@ -32,6 +32,7 @@ const { GIT_OPERATION_LABEL_LENGTH, GIT_PROGRESS_PHASES, VALID_STATE_FILENAME, + ON_COMPONENT_DESCRIPTOR, } = require('./constants') const ANY_SEPARATOR_RX = /[:/]/ @@ -73,10 +74,27 @@ const URL_AUTH_EXTRACTOR_RX = /^(https?:\/\/)(?:([^/:@]+)?(?::([^/@]+)?)?@)?(.*) * @param {Boolean} [playbook.git.ensureGitSuffix=true] - Whether the .git * suffix is automatically appended to each repository URL, if missing. * @param {Array} playbook.content - An array of content sources. + * @param {EventEmitter} eventEmitter - global event emitter for plugins * * @returns {Promise} A map of files organized by component version. */ -function aggregateContent (playbook) { +function aggregateContent (playbook, eventEmitter) { + if (!eventEmitter) { + const baseEmitter = new EventEmitter() + + eventEmitter = { + + emit: async (name, ...args) => { + const promises = [] + baseEmitter.emit(name, promises, ...args) + promises.length && await Promise.all(promises) + }, + + on: (name, listener) => baseEmitter.on(name, (promises, ...args) => promises.push(listener(...args))), + + listenerCount: (name) => baseEmitter.listenerCount(name), + } + } const startDir = playbook.dir || '.' const { branches, editUrl, tags, sources } = playbook.content const sourcesByUrl = sources.reduce( @@ -106,7 +124,7 @@ function aggregateContent (playbook) { // NOTE if repository is managed (has a url), we can assume the remote name is origin // TODO if the repo has no remotes, then remoteName should be undefined const remoteName = repo.url ? 'origin' : source.remote || 'origin' - return collectFilesFromSource(source, repo, remoteName, authStatus) + return collectFilesFromSource(source, repo, remoteName, authStatus, eventEmitter) }) ) ) @@ -219,10 +237,11 @@ function extractCredentials (url) { } } -async function collectFilesFromSource (source, repo, remoteName, authStatus) { +async function collectFilesFromSource (source, repo, remoteName, authStatus, eventEmitter) { const originUrl = repo.url || (await resolveRemoteUrl(repo, remoteName)) return selectReferences(source, repo, remoteName).then((refs) => - Promise.all(refs.map((ref) => collectFilesFromReference(source, repo, remoteName, authStatus, ref, originUrl))) + Promise.all(refs.map((ref) => + collectFilesFromReference(source, repo, remoteName, authStatus, ref, originUrl, eventEmitter))) ) } @@ -329,7 +348,7 @@ function getCurrentBranchName (repo, remote) { return refPromise.then((ref) => (ref.startsWith('refs/') ? ref.replace(SHORTEN_REF_RX, '') : undefined)) } -async function collectFilesFromReference (source, repo, remoteName, authStatus, ref, originUrl) { +async function collectFilesFromReference (source, repo, remoteName, authStatus, ref, originUrl, eventEmitter) { const url = repo.url const displayUrl = url || repo.dir const editUrl = source.editUrl @@ -353,24 +372,28 @@ async function collectFilesFromReference (source, repo, remoteName, authStatus, } return Promise.all( startPaths.map((startPath) => - collectFilesFromStartPath(startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl) + collectFilesFromStartPath(startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl, eventEmitter) ) ) } const startPath = cleanStartPath(coerceToString(source.startPath)) - return collectFilesFromStartPath(startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl) + return collectFilesFromStartPath(startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl, eventEmitter) } -function collectFilesFromStartPath (startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl) { +function collectFilesFromStartPath (startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl, eventEmitter) { return (worktreePath ? readFilesFromWorktree(worktreePath, startPath) : readFilesFromGitTree(repo, ref.oid, startPath) ) - .then((files) => { + .then(async (files) => { const componentVersionBucket = loadComponentDescriptor(files) const origin = computeOrigin(originUrl, authStatus, ref, startPath, worktreePath, editUrl) - componentVersionBucket.files = files.map((file) => assignFileProperties(file, origin)) - return componentVersionBucket + return eventEmitter.emit(ON_COMPONENT_DESCRIPTOR, + { componentDescriptor: componentVersionBucket, files, startPath, repo, authStatus, ref, worktreePath, origin }) + .then(() => { + componentVersionBucket.files = files.map((file) => assignFileProperties(file, origin)) + return componentVersionBucket + }) }) .catch((err) => { const refInfo = `ref: ${ref.fullname.replace(/^heads\//, '')}${worktreePath ? ' ' : ''}` diff --git a/packages/content-aggregator/lib/constants.js b/packages/content-aggregator/lib/constants.js index 03a4e5070..9c1059a8c 100644 --- a/packages/content-aggregator/lib/constants.js +++ b/packages/content-aggregator/lib/constants.js @@ -11,4 +11,6 @@ module.exports = Object.freeze({ GIT_OPERATION_LABEL_LENGTH: 8, GIT_PROGRESS_PHASES: ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas'], VALID_STATE_FILENAME: 'valid', + // events + ON_COMPONENT_DESCRIPTOR: 'onComponentDescriptor', }) diff --git a/packages/content-aggregator/test/aggregate-content-test.js b/packages/content-aggregator/test/aggregate-content-test.js index 31a47f99e..77066f6da 100644 --- a/packages/content-aggregator/test/aggregate-content-test.js +++ b/packages/content-aggregator/test/aggregate-content-test.js @@ -7,6 +7,7 @@ const aggregateContent = require('@antora/content-aggregator') const computeOrigin = aggregateContent._computeOrigin const { createHash } = require('crypto') const { execFile } = require('child_process') +const EventEmitter = require('events') const fs = require('fs-extra') const getCacheDir = require('cache-directory') const GitServer = require('node-git-server') @@ -21,6 +22,7 @@ const { CONTENT_CACHE_FOLDER, GIT_CORE, GIT_OPERATION_LABEL_LENGTH, + ON_COMPONENT_DESCRIPTOR, } = require('@antora/content-aggregator/lib/constants') const CACHE_DIR = getCacheDir('antora-test') const CONTENT_CACHE_DIR = ospath.join(CACHE_DIR, CONTENT_CACHE_FOLDER) @@ -1038,6 +1040,50 @@ describe('aggregateContent()', function () { expect(aggregate).to.have.lengthOf(1) expect(aggregate[0]).to.include(componentDesc) }) + describe('onComopnentDescriptor event', () => { + let eventEmitter + + beforeEach(() => { + const baseEmitter = new EventEmitter() + + eventEmitter = { + + emit: async (name, ...args) => { + const promises = [] + baseEmitter.emit(name, promises, ...args) + promises.length && await Promise.all(promises) + }, + + on: (name, listener) => baseEmitter.on(name, (promises, ...args) => promises.push(listener(...args))), + + listenerCount: (name) => baseEmitter.listenerCount(name), + } + }) + + it('can register an onComponentDescriptor pipeline extension', async () => { + var pluginArgs + eventEmitter.on(ON_COMPONENT_DESCRIPTOR, + (args) => { pluginArgs = args }) + const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR) + const componentDesc = { + name: 'the-component', + title: 'The Component', + version: 'v1.2.3', + } + await initRepoWithComponentDescriptor(repoBuilder, componentDesc) + const newWorkDir = ospath.join(WORK_DIR, 'some-other-folder') + fs.ensureDirSync(newWorkDir) + process.chdir(newWorkDir) + playbookSpec.dir = WORK_DIR + playbookSpec.content.sources.push({ url: ospath.relative(newWorkDir, repoBuilder.url) }) + let aggregate + const aggregateContentDeferred = await deferExceptions(aggregateContent, playbookSpec, eventEmitter) + expect(() => (aggregate = aggregateContentDeferred())).to.not.throw() + expect(aggregate).to.have.lengthOf(1) + expect(aggregate[0]).to.include(componentDesc) + expect(pluginArgs.componentDescriptor).to.include(componentDesc) + }) + }) }) describe('filter refs', () => { diff --git a/packages/document-converter/lib/convert-documents.js b/packages/document-converter/lib/convert-documents.js index 0694ba8cb..5e6539dd1 100644 --- a/packages/document-converter/lib/convert-documents.js +++ b/packages/document-converter/lib/convert-documents.js @@ -2,6 +2,7 @@ const convertDocument = require('./convert-document') const { loadAsciiDoc, extractAsciiDocMetadata } = require('@antora/asciidoc-loader') +const ON_DOCUMENT_HEADERS_PARSED = 'onDocumentHeadersParsed' /** * Converts the contents of publishable pages with the media type text/asciidoc @@ -25,7 +26,7 @@ const { loadAsciiDoc, extractAsciiDocMetadata } = require('@antora/asciidoc-load * * @returns {Array} The publishable virtual files in the page family taken from the content catalog. */ -function convertDocuments (contentCatalog, siteAsciiDocConfig = {}) { +async function convertDocuments (contentCatalog, siteAsciiDocConfig = {}, eventEmitter) { const mainAsciiDocConfigs = new Map() contentCatalog.getComponents().forEach(({ name: component, versions }) => { versions.forEach(({ version, asciidoc }) => { @@ -37,7 +38,7 @@ function convertDocuments (contentCatalog, siteAsciiDocConfig = {}) { for (const [cacheKey, mainAsciiDocConfig] of mainAsciiDocConfigs) { headerAsciiDocConfigs.set(cacheKey, Object.assign({}, mainAsciiDocConfig, headerOverrides)) } - return contentCatalog + const pagesWithHeaders = contentCatalog .getPages((page) => page.out) .map((page) => { if (page.mediaType === 'text/asciidoc') { @@ -55,6 +56,10 @@ function convertDocuments (contentCatalog, siteAsciiDocConfig = {}) { } return page }) + if (eventEmitter) { + eventEmitter.emit(ON_DOCUMENT_HEADERS_PARSED, { pagesWithHeaders }) + } + return pagesWithHeaders .map((page) => page.mediaType === 'text/asciidoc' ? convertDocument(page, contentCatalog, mainAsciiDocConfigs.get(buildCacheKey(page.src)) || siteAsciiDocConfig) diff --git a/packages/document-converter/test/convert-documents-test.js b/packages/document-converter/test/convert-documents-test.js index a846499c0..f6f1f5880 100644 --- a/packages/document-converter/test/convert-documents-test.js +++ b/packages/document-converter/test/convert-documents-test.js @@ -11,13 +11,13 @@ describe('convertDocuments()', () => { const asciidocConfig = resolveAsciiDocConfig() const expectPageLink = (html, url, content) => expect(html).to.include(`${content}`) - it('should run on all files in the page family', () => { + it('should run on all files in the page family', async () => { const contentCatalog = mockContentCatalog().spyOn('getPages') - convertDocuments(contentCatalog) + await convertDocuments(contentCatalog) expect(contentCatalog.getPages).to.have.been.called() }) - it('should only process and return publishable files from the page family in the content catalog', () => { + it('should only process and return publishable files from the page family in the content catalog', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -48,13 +48,13 @@ describe('convertDocuments()', () => { ]) const attributesFile = contentCatalog.getAll().find((f) => f.src.relative === '_attributes.adoc') const attributesFileContents = attributesFile.contents - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages).to.have.lengthOf(2) pages.forEach((page) => expect(page.src.mediaType).to.equal('text/asciidoc')) expect(attributesFile.contents).to.equal(attributesFileContents) }) - it('should convert contents of files in page family to embeddable HTML', () => { + it('should convert contents of files in page family to embeddable HTML', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -67,7 +67,7 @@ describe('convertDocuments()', () => { mediaType: 'text/asciidoc', }, ]) - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages).to.have.lengthOf(2) pages.forEach((page) => { expect(page.mediaType).to.equal('text/html') @@ -75,7 +75,7 @@ describe('convertDocuments()', () => { }) }) - it('should remove src.contents property after all documents are converted', () => { + it('should remove src.contents property after all documents are converted', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -89,14 +89,14 @@ describe('convertDocuments()', () => { }, ]) expect(asciidocConfig).to.not.have.nested.property('attributes.page-partial') - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages).to.have.lengthOf(2) pages.forEach((page) => { expect(page.src).to.not.have.property('contents') }) }) - it('should assign relevant properties to asciidoc property on file object', () => { + it('should assign relevant properties to asciidoc property on file object', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -114,7 +114,7 @@ describe('convertDocuments()', () => { mediaType: 'text/asciidoc', }, ]) - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages).to.have.lengthOf(3) pages.forEach((page) => { expect(page).to.have.nested.property('asciidoc.attributes') @@ -133,7 +133,7 @@ describe('convertDocuments()', () => { expect(untitledPage).to.not.have.nested.property('asciidoc.navtitle') }) - it('should assign value of doctitle to title property on file', () => { + it('should assign value of doctitle to title property on file', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -141,13 +141,13 @@ describe('convertDocuments()', () => { mediaType: 'text/asciidoc', }, ]) - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages).to.have.lengthOf(1) const homePage = pages.find((it) => it.src.relative === 'index.adoc') expect(homePage.title).to.equal('Welcome') }) - it('should convert contents to embeddable HTML using default settings if AsciiDoc config not provided', () => { + it('should convert contents to embeddable HTML using default settings if AsciiDoc config not provided', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -160,7 +160,7 @@ describe('convertDocuments()', () => { mediaType: 'text/asciidoc', }, ]) - const pages = convertDocuments(contentCatalog) + const pages = await convertDocuments(contentCatalog) expect(pages).to.have.lengthOf(1) pages.forEach((page) => { expect(page.mediaType).to.equal('text/html') @@ -168,7 +168,7 @@ describe('convertDocuments()', () => { }) }) - it('should use AsciiDoc config scoped to component version, if available', () => { + it('should use AsciiDoc config scoped to component version, if available', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -181,7 +181,7 @@ describe('convertDocuments()', () => { asciidoc: { attributes: { experimental: '' } }, }) expect(asciidocConfig.attributes).to.not.have.property('experimental') - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages).to.have.lengthOf(1) pages.forEach((page) => { expect(page.mediaType).to.equal('text/html') @@ -189,7 +189,7 @@ describe('convertDocuments()', () => { }) }) - it('should only convert documents that have the text/asciidoc media type', () => { + it('should only convert documents that have the text/asciidoc media type', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -202,7 +202,7 @@ describe('convertDocuments()', () => { mediaType: 'text/html', }, ]) - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages[0].contents.toString()).to.equal(heredoc`

    This one should be converted.

    @@ -211,7 +211,7 @@ describe('convertDocuments()', () => { expect(pages[1].contents.toString()).to.equal('

    This one should not be converted.

    ') }) - it('should only convert documents that have the text/asciidoc media type even if the asciidoc property set', () => { + it('should only convert documents that have the text/asciidoc media type even if the asciidoc property set', async () => { const contentCatalog = mockContentCatalog([ { relative: 'index.adoc', @@ -228,7 +228,7 @@ describe('convertDocuments()', () => { }, ]) contentCatalog.getPages().find(({ src }) => src.relative === 'other.html').asciidoc = { doctitle: 'Hello, HTML!' } - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages[0].contents.toString()).to.equal(heredoc`

    This one should be converted.

    @@ -240,7 +240,7 @@ describe('convertDocuments()', () => { `) }) - it('should register aliases defined by page-aliases document attribute', () => { + it('should register aliases defined by page-aliases document attribute', async () => { const contents = Buffer.from(heredoc` = Page Title :page-aliases: the-alias.adoc,topic/the-alias.adoc, 1.0.0@page-a.adoc ,another-alias.adoc @@ -256,7 +256,7 @@ describe('convertDocuments()', () => { ]) const inputFile = contentCatalog.getFiles()[0] contentCatalog.registerPageAlias = spy(() => {}) - convertDocuments(contentCatalog, asciidocConfig) + await convertDocuments(contentCatalog, asciidocConfig) expect(contentCatalog.registerPageAlias).to.have.been.called.exactly(4) expect(contentCatalog.registerPageAlias).first.be.called.with('the-alias.adoc', inputFile) expect(contentCatalog.registerPageAlias).second.be.called.with('topic/the-alias.adoc', inputFile) @@ -266,7 +266,7 @@ describe('convertDocuments()', () => { .be.called.with('another-alias.adoc', inputFile) }) - it('should register aliases split across lines using a line continuation', () => { + it('should register aliases split across lines using a line continuation', async () => { const contents = Buffer.from(heredoc` = Page Title :page-aliases: the-alias.adoc, \ @@ -285,7 +285,7 @@ describe('convertDocuments()', () => { ]) contentCatalog.registerPageAlias = spy(() => {}) const inputFile = contentCatalog.getFiles()[0] - convertDocuments(contentCatalog, asciidocConfig) + await convertDocuments(contentCatalog, asciidocConfig) expect(contentCatalog.registerPageAlias).to.have.been.called.exactly(4) expect(contentCatalog.registerPageAlias).first.called.with('the-alias.adoc', inputFile) expect(contentCatalog.registerPageAlias).second.called.with('topic/the-alias', inputFile) @@ -295,7 +295,7 @@ describe('convertDocuments()', () => { .called.with('another-alias.adoc', inputFile) }) - it('should register alias specified with no file extension', () => { + it('should register alias specified with no file extension', async () => { const contents = Buffer.from(heredoc` = Page Title :page-aliases: the-alias,topic/the-alias @@ -311,13 +311,13 @@ describe('convertDocuments()', () => { ]) const inputFile = contentCatalog.getFiles()[0] contentCatalog.registerPageAlias = spy(() => {}) - convertDocuments(contentCatalog, asciidocConfig) + await convertDocuments(contentCatalog, asciidocConfig) expect(contentCatalog.registerPageAlias).to.have.been.called.exactly(2) expect(contentCatalog.registerPageAlias).first.be.called.with('the-alias', inputFile) expect(contentCatalog.registerPageAlias).second.be.called.with('topic/the-alias', inputFile) }) - it('should not register aliases if page-aliases document attribute is empty', () => { + it('should not register aliases if page-aliases document attribute is empty', async () => { const contents = Buffer.from(heredoc` = Page Title :page-aliases: @@ -332,11 +332,11 @@ describe('convertDocuments()', () => { }, ]) contentCatalog.registerPageAlias = spy(() => {}) - convertDocuments(contentCatalog, asciidocConfig) + await convertDocuments(contentCatalog, asciidocConfig) expect(contentCatalog.registerPageAlias).to.not.have.been.called() }) - it('should fill in missing contents of page reference with automatic reference text', () => { + it('should fill in missing contents of page reference with automatic reference text', async () => { const fromContents = Buffer.from(heredoc` = From @@ -359,12 +359,12 @@ describe('convertDocuments()', () => { mediaType: 'text/asciidoc', }, ]) - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) const fromConvertedContents = pages.find((it) => it.src.relative === 'from.adoc').contents.toString() expectPageLink(fromConvertedContents, 'to.html', 'To') }) - it('should be able to reference page alias as target of xref', () => { + it('should be able to reference page alias as target of xref', async () => { const contentsA = Buffer.from(heredoc` = The Page :page-aliases: a-page.adoc @@ -396,7 +396,7 @@ describe('convertDocuments()', () => { contentCatalog.resolvePage = (spec, ctx = {}) => { return (aliases[spec] || {}).rel } - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) const thePageContents = pages.find((it) => it.src.relative === 'the-page.adoc').contents.toString() const zaPageContents = pages.find((it) => it.src.relative === 'za-page.adoc').contents.toString() expectPageLink(thePageContents, 'za-page.html', 'the end page') @@ -404,7 +404,7 @@ describe('convertDocuments()', () => { expectPageLink(zaPageContents, 'za-page.html', 'the end page') }) - it('should be able to include a page which has already been converted', () => { + it('should be able to include a page which has already been converted', async () => { const contentsA = Buffer.from(heredoc` = Changelog @@ -433,7 +433,7 @@ describe('convertDocuments()', () => { mediaType: 'text/asciidoc', }, ]) - const pages = convertDocuments(contentCatalog, asciidocConfig) + const pages = await convertDocuments(contentCatalog, asciidocConfig) expect(pages[1].contents.toString()).to.include(heredoc`

    Recent Changes

    diff --git a/packages/playbook-builder/lib/build-playbook.js b/packages/playbook-builder/lib/build-playbook.js index baa35a95c..48147c93a 100644 --- a/packages/playbook-builder/lib/build-playbook.js +++ b/packages/playbook-builder/lib/build-playbook.js @@ -6,6 +6,9 @@ const fs = require('fs') const { hasOwnProperty } = Object.prototype const ospath = require('path') +//copied from load-asciidoc +const DOT_RELATIVE_RX = new RegExp(`^\\.{1,2}[/${ospath.sep.replace('/', '').replace('\\', '\\\\')}]`) + /** * Builds a playbook object according to the provided schema from the specified * arguments and environment variables. @@ -22,11 +25,19 @@ const ospath = require('path') * @param {Object} [env={}] - A map of environment variables. * @param {Object} [schema=undefined] - A convict configuration schema. * + * @param {Object} [eventEmitter=undefined] - Node EventEmitter. + * @param {Object} [eventContext = undefined] - Object for inter-extension communication + * @param {Array} [defaultExtensions=[]] - an array of explicit extensions. * @returns {Object} A playbook object containing a hierarchical structure that * mirrors the configuration schema. With the exception of the top-level asciidoc * key and its descendants, all keys in the playbook are camelCased. */ -function buildPlaybook (args = [], env = {}, schema = undefined) { +async function buildPlaybook (args = [], env = {}, schema = undefined, + eventEmitter = undefined, defaultExtensions = []) { + if (eventEmitter && defaultExtensions.length) { + defaultExtensions.forEach((extension) => ('register' in extension) && extension.register(eventEmitter)) + eventEmitter.emit('beforeBuildPlaybook', { args, env, schema }) + } const config = loadConvictConfig(args, env, schema) const relSpecFilePath = config.get('playbook') @@ -59,7 +70,12 @@ function buildPlaybook (args = [], env = {}, schema = undefined) { config.validate({ allowed: 'strict' }) - return exportModel(config) + const playbook = exportModel(config) + if (eventEmitter) { + registerExtensions(playbook, eventEmitter) + eventEmitter.emit('afterBuildPlaybook', playbook) + } + return freeze(playbook) } function loadConvictConfig (args, env, customSchema) { @@ -97,7 +113,29 @@ function exportModel (config) { const playbook = camelCaseKeys(data, { deep: true, stopPaths: ['asciidoc'] }) playbook.dir = playbook.playbook ? ospath.dirname((playbook.file = playbook.playbook)) : process.cwd() delete playbook.playbook - return freeze(playbook) + return playbook +} + +function registerExtensions (playbook, eventEmitter) { + //copied from load-asciidoc + if (playbook.extensions && playbook.extensions.length) { + playbook.extensions.forEach((extensionData) => { + let extensionPath = extensionData.path || extensionData + const extensionConfig = extensionData.config + if (extensionPath.charAt() === '.' && DOT_RELATIVE_RX.test(extensionPath)) { + // NOTE require resolves a dot-relative path relative to current file; resolve relative to playbook dir instead + extensionPath = ospath.resolve(playbook.dir, extensionPath) + } else if (!ospath.isAbsolute(extensionPath)) { + // NOTE appending node_modules prevents require from looking elsewhere before looking in these paths + const paths = [playbook.dir, ospath.dirname(__dirname)].map((start) => ospath.join(start, 'node_modules')) + extensionPath = require.resolve(extensionPath, { paths }) + } + const extension = require(extensionPath) + if ('register' in extension) { + extension.register(eventEmitter, extensionConfig) + } + }) + } } module.exports = buildPlaybook diff --git a/packages/playbook-builder/lib/config/schema.js b/packages/playbook-builder/lib/config/schema.js index 9f08715cd..d403b9222 100644 --- a/packages/playbook-builder/lib/config/schema.js +++ b/packages/playbook-builder/lib/config/schema.js @@ -112,7 +112,7 @@ module.exports = { arg: 'attribute', }, extensions: { - doc: 'A list of require paths for registering extensions per instance of the AsciiDoc processor.', + doc: 'A list of require paths for registering asciidoctor extensions per instance of the AsciiDoc processor.', format: Array, default: [], }, @@ -227,4 +227,10 @@ module.exports = { default: undefined, }, }, + extensions: { + doc: 'A list of require paths for registering Antora pipeline extensions, with configuration.', + format: Array, + default: [], + arg: 'pipeline-extension', + }, } diff --git a/packages/playbook-builder/test/build-playbook-test.js b/packages/playbook-builder/test/build-playbook-test.js index f75de4c44..cdcc1e1bc 100644 --- a/packages/playbook-builder/test/build-playbook-test.js +++ b/packages/playbook-builder/test/build-playbook-test.js @@ -1,14 +1,16 @@ /* eslint-env mocha */ 'use strict' -const { expect } = require('../../../test/test-utils') +const EventEmitter = require('events') + +const { deferExceptions, expect } = require('../../../test/test-utils') const buildPlaybook = require('@antora/playbook-builder') const ospath = require('path') const FIXTURES_DIR = ospath.join(__dirname, 'fixtures') -describe('buildPlaybook()', () => { +describe('buildPlaybook()', async () => { let schema, expectedPlaybook beforeEach(() => { @@ -101,30 +103,32 @@ describe('buildPlaybook()', () => { const legacyUiStartPathSpec = ospath.join(FIXTURES_DIR, 'legacy-ui-start-path-sample.yml') const invalidSiteUrlSpec = ospath.join(FIXTURES_DIR, 'invalid-site-url-spec-sample.yml') const defaultSchemaSpec = ospath.join(FIXTURES_DIR, 'default-schema-spec-sample.yml') + const defaultSchemaSpecWithPipelineExtension = ospath.join(FIXTURES_DIR, 'default-schema-spec-pipeline-extension-sample.yml') + const defaultSchemaSpecWithPipelineExtensionConfigs = ospath.join(FIXTURES_DIR, 'default-schema-spec-pipeline-extension-config-sample.yml') - it('should set dir to process.cwd() when playbook file is not specified', () => { - const playbook = buildPlaybook([], {}, { playbook: { format: String, default: undefined } }) + it('should set dir to process.cwd() when playbook file is not specified', async () => { + const playbook = await buildPlaybook([], {}, { playbook: { format: String, default: undefined } }) expect(playbook.dir).to.equal(process.cwd()) expect(playbook.file).to.not.exist() }) - it('should set dir and file properties based on absolute path of playbook file', () => { - const playbook = buildPlaybook([], { PLAYBOOK: ospath.relative('.', ymlSpec) }, schema) + it('should set dir and file properties based on absolute path of playbook file', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: ospath.relative('.', ymlSpec) }, schema) expect(playbook.dir).to.equal(ospath.dirname(ymlSpec)) expect(playbook.file).to.equal(ymlSpec) expect(playbook.playbook).to.not.exist() }) - it('should load YAML playbook file with .yml extension', () => { - const playbook = buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) + it('should load YAML playbook file with .yml extension', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) expectedPlaybook.dir = ospath.dirname(ymlSpec) expectedPlaybook.file = ymlSpec expectedPlaybook.one.one = 'yml-spec-value-one' expect(playbook).to.eql(expectedPlaybook) }) - it('should load YAML playbook file with .yaml extension', () => { - const playbook = buildPlaybook([], { PLAYBOOK: yamlSpec }, schema) + it('should load YAML playbook file with .yaml extension', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: yamlSpec }, schema) expectedPlaybook.dir = ospath.dirname(yamlSpec) expectedPlaybook.file = yamlSpec expectedPlaybook.one.one = 'yaml-spec-value-one' @@ -135,173 +139,180 @@ describe('buildPlaybook()', () => { expect(playbook).to.eql(expectedPlaybook) }) - it('should load JSON (JSON 5) playbook file', () => { - const playbook = buildPlaybook([], { PLAYBOOK: jsonSpec }, schema) + it('should load JSON (JSON 5) playbook file', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: jsonSpec }, schema) expectedPlaybook.dir = ospath.dirname(jsonSpec) expectedPlaybook.file = jsonSpec expectedPlaybook.one.one = 'json-spec-value-one' expect(playbook).to.eql(expectedPlaybook) }) - it('should load TOML playbook file', () => { - const playbook = buildPlaybook([], { PLAYBOOK: tomlSpec }, schema) + it('should load TOML playbook file', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: tomlSpec }, schema) expectedPlaybook.dir = ospath.dirname(tomlSpec) expectedPlaybook.file = tomlSpec expectedPlaybook.one.one = 'toml-spec-value-one' expect(playbook).to.eql(expectedPlaybook) }) - it('should load YAML playbook file first when no file extension is given', () => { - const playbook = buildPlaybook([], { PLAYBOOK: extensionlessSpec }, schema) + it('should load YAML playbook file first when no file extension is given', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: extensionlessSpec }, schema) expectedPlaybook.dir = ospath.dirname(extensionlessSpec) expectedPlaybook.file = extensionlessSpec + '.yml' expectedPlaybook.one.one = 'yml-spec-value-one' expect(playbook).to.eql(expectedPlaybook) }) - it('should discover JSON playbook when no file extension is given', () => { - const playbook = buildPlaybook([], { PLAYBOOK: extensionlessJsonSpec }, schema) + it('should discover JSON playbook when no file extension is given', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: extensionlessJsonSpec }, schema) expectedPlaybook.dir = ospath.dirname(extensionlessJsonSpec) expectedPlaybook.file = extensionlessJsonSpec + '.json' expectedPlaybook.one.one = 'json-spec-value-one' expect(playbook).to.eql(expectedPlaybook) }) - it('should discover TOML playbook when no file extension is given', () => { - const playbook = buildPlaybook([], { PLAYBOOK: extensionlessTomlSpec }, schema) + it('should discover TOML playbook when no file extension is given', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: extensionlessTomlSpec }, schema) expectedPlaybook.dir = ospath.dirname(extensionlessTomlSpec) expectedPlaybook.file = extensionlessTomlSpec + '.toml' expectedPlaybook.one.one = 'toml-spec-value-one' expect(playbook).to.eql(expectedPlaybook) }) - it('should throw error when loading unknown type file', () => { - expect(() => buildPlaybook([], { PLAYBOOK: iniSpec }, schema)).to.throw('Unexpected playbook file type') + it('should throw error when loading unknown type file', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: iniSpec }, schema) + expect(buildPlaybookDeferred).to.throw('Unexpected playbook file type') }) - it('should throw error if specified playbook file does not exist', () => { + it('should throw error if specified playbook file does not exist', async () => { const expectedMessage = `playbook file not found at ${ospath.resolve('non-existent/file.yml')} ` + `(path: non-existent/file.yml, cwd: ${process.cwd()})` - expect(() => buildPlaybook([], { PLAYBOOK: 'non-existent/file.yml' }, schema)).to.throw(expectedMessage) + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: 'non-existent/file.yml' }, schema) + expect(buildPlaybookDeferred).to.throw(expectedMessage) }) - it('should not show details in error message if input path of playbook file matches resolved path', () => { + it('should not show details in error message if input path of playbook file matches resolved path', async () => { const playbookFilePath = ospath.resolve('non-existent/file.yml') const expectedMessage = `playbook file not found at ${playbookFilePath}` // FIXME: technically this does not assert that the details are absent - expect(() => buildPlaybook([], { PLAYBOOK: playbookFilePath }, schema)).to.throw(expectedMessage) + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: playbookFilePath }, schema) + expect(buildPlaybookDeferred).to.throw(expectedMessage) }) - it('should not show cwd in error message if input path of playbook file is absolute', () => { + it('should not show cwd in error message if input path of playbook file is absolute', async () => { const playbookFilePath = ospath.resolve('non-existent/file.yml') const requestedPlaybookFilePath = [process.cwd(), 'non-existent', '..', 'non-existent/file.yml'].join(ospath.sep) const expectedMessage = `playbook file not found at ${playbookFilePath} (path: ${requestedPlaybookFilePath})` - expect(() => buildPlaybook([], { PLAYBOOK: requestedPlaybookFilePath }, schema)).to.throw(expectedMessage) + const buildPlaybookDeferred = + await deferExceptions(buildPlaybook, [], { PLAYBOOK: requestedPlaybookFilePath }, schema) + expect(buildPlaybookDeferred).to.throw(expectedMessage) }) - it('should throw error if playbook file without extension cannot be resolved', () => { + it('should throw error if playbook file without extension cannot be resolved', async () => { const resolvedRootPath = ospath.resolve('non-existent/file') const expectedMessage = 'playbook file not found at ' + `${resolvedRootPath}.yml, ${resolvedRootPath}.json, or ${resolvedRootPath}.toml` + ` (path: non-existent/file, cwd: ${process.cwd()})` - expect(() => buildPlaybook([], { PLAYBOOK: 'non-existent/file' }, schema)).to.throw(expectedMessage) + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: 'non-existent/file' }, schema) + expect(buildPlaybookDeferred).to.throw(expectedMessage) }) - it('should use default value if playbook file is not specified', () => { - const playbook = buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) + it('should use default value if playbook file is not specified', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) expect(playbook.one.two).to.equal('default-value') }) - it('should use env value over value in playbook file', () => { + it('should use env value over value in playbook file', async () => { const env = { PLAYBOOK: ymlSpec, ANTORA_ONE_ONE: 'the-env-value' } - const playbook = buildPlaybook([], env, schema) + const playbook = await buildPlaybook([], env, schema) expect(playbook.one.one).to.equal('the-env-value') }) - it('should use env value over value in playbook file when env value is empty string', () => { + it('should use env value over value in playbook file when env value is empty string', async () => { const env = { PLAYBOOK: ymlSpec, ANTORA_ONE_ONE: '' } - const playbook = buildPlaybook([], env, schema) + const playbook = await buildPlaybook([], env, schema) expect(playbook.one.one).to.equal('') }) - it('should use args value over value in playbook file or env value even if value is falsy', () => { + it('should use args value over value in playbook file or env value even if value is falsy', async () => { const args = ['--one-one', 'the-args-value'] const env = { PLAYBOOK: ymlSpec, ANTORA_ONE_ONE: 'the-env-value' } - const playbook = buildPlaybook(args, env, schema) + const playbook = await buildPlaybook(args, env, schema) expect(playbook.one.one).to.equal('the-args-value') }) - it('should use arg value over value in playbook file when arg value is falsy', () => { + it('should use arg value over value in playbook file when arg value is falsy', async () => { const args = ['--two', '0'] const env = { PLAYBOOK: ymlSpec, ANTORA_TWO: '47' } - const playbook = buildPlaybook(args, env, schema) + const playbook = await buildPlaybook(args, env, schema) expect(playbook.two).to.equal(0) }) - it('should convert properties of playbook to camelCase', () => { + it('should convert properties of playbook to camelCase', async () => { const env = { PLAYBOOK: ymlSpec, WIDGET_KEY: 'xxxyyyzzz' } - const playbook = buildPlaybook([], env, schema) + const playbook = await buildPlaybook([], env, schema) expect(playbook.one.widgetKey).to.equal('xxxyyyzzz') }) - it('should coerce Number values in playbook file', () => { - const playbook = buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) + it('should coerce Number values in playbook file', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) expect(playbook.two).to.equal(42) }) - it('should coerce Number values in env', () => { + it('should coerce Number values in env', async () => { const env = { PLAYBOOK: ymlSpec, ANTORA_TWO: '777' } - const playbook = buildPlaybook([], env, schema) + const playbook = await buildPlaybook([], env, schema) expect(playbook.two).to.equal(777) }) - it('should use env value over value in playbook file when env value is falsy', () => { + it('should use env value over value in playbook file when env value is falsy', async () => { const env = { PLAYBOOK: ymlSpec, ANTORA_TWO: '0' } - const playbook = buildPlaybook([], env, schema) + const playbook = await buildPlaybook([], env, schema) expect(playbook.two).to.equal(0) }) - it('should coerce Number values in args', () => { - const playbook = buildPlaybook(['--two', '777'], { PLAYBOOK: ymlSpec }, schema) + it('should coerce Number values in args', async () => { + const playbook = await buildPlaybook(['--two', '777'], { PLAYBOOK: ymlSpec }, schema) expect(playbook.two).to.equal(777) }) - it('should coerce Boolean values in playbook file', () => { - const playbook = buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) + it('should coerce Boolean values in playbook file', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) expect(playbook.three).to.be.false() }) - it('should coerce Boolean values in env', () => { + it('should coerce Boolean values in env', async () => { const env = { PLAYBOOK: ymlSpec, ANTORA_THREE: 'true' } - const playbook = buildPlaybook([], env, schema) + const playbook = await buildPlaybook([], env, schema) expect(playbook.three).to.be.true() }) - it('should coerce Boolean values in args', () => { - const playbook = buildPlaybook(['--three'], { PLAYBOOK: ymlSpec }, schema) + it('should coerce Boolean values in args', async () => { + const playbook = await buildPlaybook(['--three'], { PLAYBOOK: ymlSpec }, schema) expect(playbook.three).to.be.true() }) - it('should coerce primitive map value in playbook file from Object', () => { + it('should coerce primitive map value in playbook file from Object', async () => { schema.keyvals.format = 'primitive-map' - const playbook = buildPlaybook([], { PLAYBOOK: coerceValueSpec }, schema) + const playbook = await buildPlaybook([], { PLAYBOOK: coerceValueSpec }, schema) expect(playbook.keyvals).to.eql({ key: 'val', keyOnly: '', foo: 'bar', nada: null, yep: true, nope: false }) }) - it('should throw error if value of primitive map key is a String', () => { + it('should throw error if value of primitive map key is a String', async () => { schema.keyvals2.format = 'primitive-map' - expect(() => buildPlaybook([], { PLAYBOOK: coerceValueSpec }, schema)).to.throw( + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: coerceValueSpec }, schema) + expect(buildPlaybookDeferred).to.throw( 'must be a primitive map (i.e., key/value pairs, primitive values only)' ) }) - it('should coerce primitive map value in env', () => { + it('should coerce primitive map value in env', async () => { schema.keyvals.format = 'primitive-map' const val = 'key=val,key-only,=valonly,empty=,tilde="~",site_tags="a,b,c",nada=~,y=true,n=false,when=2020-01-01' const env = { PLAYBOOK: ymlSpec, KEYVALS: val } - const playbook = buildPlaybook([], env, schema) + const playbook = await buildPlaybook([], env, schema) expect(playbook.keyvals).to.eql({ key: 'val', keyOnly: '', @@ -315,9 +326,9 @@ describe('buildPlaybook()', () => { }) }) - it('should coerce primitive map value in args', () => { + it('should coerce primitive map value in args', async () => { schema.keyvals.format = 'primitive-map' - const playbook = buildPlaybook( + const playbook = await buildPlaybook( [ '--keyval', 'key=val', @@ -356,51 +367,55 @@ describe('buildPlaybook()', () => { }) }) - it('should use primitive map value in args to update map value from playbook file', () => { + it('should use primitive map value in args to update map value from playbook file', async () => { schema.keyvals.format = 'primitive-map' const args = ['--keyval', 'foo=baz', '--keyval', 'key-only=useme'] - const playbook = buildPlaybook(args, { PLAYBOOK: coerceValueSpec }, schema) + const playbook = await buildPlaybook(args, { PLAYBOOK: coerceValueSpec }, schema) expect(playbook.keyvals.key).to.equal('val') expect(playbook.keyvals.keyOnly).to.equal('useme') expect(playbook.keyvals.foo).to.equal('baz') }) - it('should throw error if value of primitive map key is not an object', () => { + it('should throw error if value of primitive map key is not an object', async () => { schema.keyvals.format = 'primitive-map' - expect(() => buildPlaybook([], { PLAYBOOK: invalidMapSpec }, schema)).to.throw( + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: invalidMapSpec }, schema) + expect(buildPlaybookDeferred).to.throw( 'must be a primitive map (i.e., key/value pairs, primitive values only)' ) }) - it('should throw error if value of primitive map key is not primitive', () => { + it('should throw error if value of primitive map key is not primitive', async () => { schema.keyvals.format = 'primitive-map' - expect(() => buildPlaybook([], { PLAYBOOK: invalidPrimitiveMapSpec }, schema)).to.throw( + const buildPlaybookDeferred = + await deferExceptions(buildPlaybook, [], { PLAYBOOK: invalidPrimitiveMapSpec }, schema) + expect(buildPlaybookDeferred).to.throw( 'must be a primitive map (i.e., key/value pairs, primitive values only)' ) }) - it('should allow value of primitive map key to be null', () => { + it('should allow value of primitive map key to be null', async () => { schema.keyvals.format = 'primitive-map' - const playbook = buildPlaybook([], { PLAYBOOK: nullMapSpec }, schema) + const playbook = await buildPlaybook([], { PLAYBOOK: nullMapSpec }, schema) expect(playbook.keyvals).to.be.null() }) - it('should coerce map value in playbook file from Object', () => { - const playbook = buildPlaybook([], { PLAYBOOK: coerceValueSpec }, schema) + it('should coerce map value in playbook file from Object', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: coerceValueSpec }, schema) expect(playbook.keyvals).to.eql({ key: 'val', keyOnly: '', foo: 'bar', nada: null, yep: true, nope: false }) }) - it('should throw error if value of map key is a String', () => { + it('should throw error if value of map key is a String', async () => { schema.keyvals2.format = 'map' - expect(() => buildPlaybook([], { PLAYBOOK: coerceValueSpec }, schema)).to.throw( + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: coerceValueSpec }, schema) + expect(buildPlaybookDeferred).to.throw( 'must be a map (i.e., key/value pairs)' ) }) - it('should coerce map value in env', () => { + it('should coerce map value in env', async () => { const val = 'key=val,key-only,=valonly,empty=,tilde="~",site_tags="a,b,c",nada=~,y=true,n=false' const env = { PLAYBOOK: ymlSpec, KEYVALS: val } - const playbook = buildPlaybook([], env, schema) + const playbook = await buildPlaybook([], env, schema) expect(playbook.keyvals).to.eql({ key: 'val', keyOnly: '', @@ -413,8 +428,8 @@ describe('buildPlaybook()', () => { }) }) - it('should coerce map value in args', () => { - const playbook = buildPlaybook( + it('should coerce map value in args', async () => { + const playbook = await buildPlaybook( [ '--keyval', 'key=val', @@ -450,15 +465,15 @@ describe('buildPlaybook()', () => { }) }) - it('should use map value in args to update map value from playbook file', () => { - const playbook = buildPlaybook(['--keyval', 'foo=baz'], { PLAYBOOK: coerceValueSpec }, schema) + it('should use map value in args to update map value from playbook file', async () => { + const playbook = await buildPlaybook(['--keyval', 'foo=baz'], { PLAYBOOK: coerceValueSpec }, schema) expect(playbook.keyvals.key).to.equal('val') expect(playbook.keyvals.foo).to.equal('baz') }) - it('should update map value from playbook file with map values in args when name is asciidoc.attributes', () => { + it('should update map value from playbook file with map values in args when name is asciidoc.attributes', async () => { const args = ['--playbook', defaultSchemaSpec, '--attribute', 'idprefix=user-', '--attribute', 'idseparator=-'] - const playbook = buildPlaybook(args, {}) + const playbook = await buildPlaybook(args, {}) expect(playbook.asciidoc.attributes).to.eql({ 'allow-uri-read': true, idprefix: 'user-', @@ -468,26 +483,25 @@ describe('buildPlaybook()', () => { }) }) - it('should throw error if value of map key is not an object', () => { - expect(() => buildPlaybook([], { PLAYBOOK: invalidMapSpec }, schema)).to.throw( - 'must be a map (i.e., key/value pairs)' - ) + it('should throw error if value of map key is not an object', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: invalidMapSpec }, schema) + expect(buildPlaybookDeferred).to.throw('must be a map (i.e., key/value pairs)') }) - it('should allow value of map key to be null', () => { - const playbook = buildPlaybook([], { PLAYBOOK: nullMapSpec }, schema) + it('should allow value of map key to be null', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: nullMapSpec }, schema) expect(playbook.keyvals).to.be.null() }) - it('should coerce String value to Array', () => { - const playbook = buildPlaybook([], { PLAYBOOK: coerceValueSpec }, schema) + it('should coerce String value to Array', async () => { + const playbook = await buildPlaybook([], { PLAYBOOK: coerceValueSpec }, schema) expect(playbook.file).to.equal(coerceValueSpec) expect(playbook.dir).to.equal(ospath.dirname(coerceValueSpec)) expect(playbook.one.one).to.equal('one') expect(playbook.four).to.eql(['John']) }) - it('should throw error if dir-or-virtual-files key is not a string or array', () => { + it('should throw error if dir-or-virtual-files key is not a string or array', async () => { Object.keys(schema).forEach((key) => { if (key !== 'playbook') delete schema[key] }) @@ -495,29 +509,32 @@ describe('buildPlaybook()', () => { format: 'dir-or-virtual-files', default: undefined, } - expect(() => buildPlaybook([], { PLAYBOOK: invalidDirOrFilesSpec }, schema)).to.throw( + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: invalidDirOrFilesSpec }, schema) + expect(buildPlaybookDeferred).to.throw( 'must be a directory path or list of virtual files' ) }) - it('should throw error when trying to load values not declared in the schema', () => { - expect(() => buildPlaybook([], { PLAYBOOK: badSpec }, schema)).to.throw('not declared') + it('should throw error when trying to load values not declared in the schema', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: badSpec }, schema) + expect(buildPlaybookDeferred).to.throw('not declared') }) - it('should throw error when playbook file uses values of the wrong format', () => { + it('should throw error when playbook file uses values of the wrong format', async () => { schema.two.format = String - expect(() => buildPlaybook([], { PLAYBOOK: ymlSpec }, schema)).to.throw('must be of type String') + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, [], { PLAYBOOK: ymlSpec }, schema) + expect(buildPlaybookDeferred).to.throw('must be of type String') }) - it('should return an immutable playbook', () => { + it('should return an immutable playbook', async () => { const playbook = buildPlaybook([], { PLAYBOOK: ymlSpec }, schema) expect(() => { playbook.one.two = 'override' }).to.throw() }) - it('should use default schema if no schema is specified', () => { - const playbook = buildPlaybook(['--playbook', defaultSchemaSpec], {}) + it('should use default schema if no schema is specified', async () => { + const playbook = await buildPlaybook(['--playbook', defaultSchemaSpec], {}) expect(playbook.runtime.cacheDir).to.equal('./.antora-cache') expect(playbook.runtime.fetch).to.equal(true) expect(playbook.runtime.quiet).to.equal(false) @@ -562,48 +579,54 @@ describe('buildPlaybook()', () => { expect(playbook.output.destinations[0].path).to.equal('./site.zip') }) - it('should allow site.url to be a pathname', () => { - const playbook = buildPlaybook(['--playbook', defaultSchemaSpec, '--url', '/docs'], {}) + it('should allow site.url to be a pathname', async () => { + const playbook = await buildPlaybook(['--playbook', defaultSchemaSpec, '--url', '/docs'], {}) expect(playbook.site.url).to.equal('/docs') }) - it('should throw error if site.url is a relative path', () => { - expect(() => buildPlaybook(['--playbook', defaultSchemaSpec, '--url', 'docs'], {})).to.throw( + it('should throw error if site.url is a relative path', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, ['--playbook', defaultSchemaSpec, '--url', 'docs'], {}) + expect(buildPlaybookDeferred).to.throw( 'must be an absolute URL or a pathname (i.e., root-relative path)' ) }) - it('should throw error if site.url is a file URI', () => { - expect(() => buildPlaybook(['--playbook', defaultSchemaSpec, '--url', 'file:///path/to/docs'], {})).to.throw( + it('should throw error if site.url is a file URI', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, ['--playbook', defaultSchemaSpec, '--url', 'file:///path/to/docs'], {}) + expect(buildPlaybookDeferred).to.throw( 'must be an absolute URL or a pathname (i.e., root-relative path)' ) }) - it('should throw error if site.url is an invalid URL', () => { - expect(() => buildPlaybook(['--playbook', defaultSchemaSpec, '--url', ':/foo'], {})).to.throw( + it('should throw error if site.url is an invalid URL', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, ['--playbook', defaultSchemaSpec, '--url', ':/foo'], {}) + expect(buildPlaybookDeferred).to.throw( 'must be an absolute URL or a pathname (i.e., root-relative path)' ) }) - it('should throw error if site.url is not a string', () => { - expect(() => buildPlaybook(['--playbook', invalidSiteUrlSpec], {})).to.throw( + it('should throw error if site.url is not a string', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, ['--playbook', invalidSiteUrlSpec], {}) + expect(buildPlaybookDeferred).to.throw( 'must be an absolute URL or a pathname (i.e., root-relative path)' ) }) - it('should throw error if site.url is a pathname containing spaces', () => { - expect(() => buildPlaybook(['--playbook', defaultSchemaSpec, '--url', '/my docs'], {})).to.throw( + it('should throw error if site.url is a pathname containing spaces', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, ['--playbook', defaultSchemaSpec, '--url', '/my docs'], {}) + expect(buildPlaybookDeferred).to.throw( 'must not contain spaces' ) }) - it('should throw error if site.url is an absolute URL containing spaces in the pathname', () => { - expect(() => buildPlaybook(['--playbook', defaultSchemaSpec, '--url', 'https://example.org/my docs'], {})).to.throw( + it('should throw error if site.url is an absolute URL containing spaces in the pathname', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, ['--playbook', defaultSchemaSpec, '--url', 'https://example.org/my docs'], {}) + expect(buildPlaybookDeferred).to.throw( 'must not contain spaces' ) }) - it('should throw error if boolean-or-string key is not a boolean or string', () => { + it('should throw error if boolean-or-string key is not a boolean or string', async () => { Object.keys(schema).forEach((key) => { if (key !== 'playbook') delete schema[key] }) @@ -611,65 +634,219 @@ describe('buildPlaybook()', () => { format: 'boolean-or-string', default: undefined, } - expect(() => buildPlaybook([], { PLAYBOOK: invalidStringOrBooleanSpec }, schema)).to.throw( + const buildPlaybookDeferred = + await deferExceptions(buildPlaybook, [], { PLAYBOOK: invalidStringOrBooleanSpec }, schema) + expect(buildPlaybookDeferred).to.throw( 'must be a boolean or string' ) }) - it('should assign runtime.fetch to value of runtime.pull if latter is specified', () => { - const playbook = buildPlaybook(['--playbook', legacyRuntimeSpec], {}) + it('should assign runtime.fetch to value of runtime.pull if latter is specified', async () => { + const playbook = await buildPlaybook(['--playbook', legacyRuntimeSpec], {}) expect(playbook.runtime.fetch).to.equal(true) expect(playbook.runtime).to.not.have.property('pull') }) - it('should use value of runtime.pull if both runtime.pull and runtime.fetch are specified', () => { - const playbook = buildPlaybook(['--playbook', legacyRuntimeSpec, '--fetch', 'false'], {}) + it('should use value of runtime.pull if both runtime.pull and runtime.fetch are specified', async () => { + const playbook = await buildPlaybook(['--playbook', legacyRuntimeSpec, '--fetch', 'false'], {}) expect(playbook.runtime.fetch).to.equal(true) expect(playbook.runtime).to.not.have.property('pull') }) - it('should use value of git.ensure_git_suffix if specified when git.ensureGitSuffix is not specified', () => { - const playbook = buildPlaybook(['--playbook', legacyGitSpec], {}) + it('should use value of git.ensure_git_suffix if specified when git.ensureGitSuffix is not specified', async () => { + const playbook = await buildPlaybook(['--playbook', legacyGitSpec], {}) expect(playbook.git.ensureGitSuffix).to.equal(false) expect(playbook.git).to.not.have.property('ensure_git_suffix') }) - it('should prefer value of git.ensureGitSuffix if specified', () => { - const playbook = buildPlaybook(['--playbook', legacyAndModernGitSpec], {}) + it('should prefer value of git.ensureGitSuffix if specified', async () => { + const playbook = await buildPlaybook(['--playbook', legacyAndModernGitSpec], {}) expect(playbook.git.ensureGitSuffix).to.equal(false) expect(playbook.git).to.not.have.property('ensure_git_suffix') }) - it('should not migrate playbook data that defines ui.bundle as a String', () => { - expect(() => buildPlaybook(['--playbook', legacyUiBundleSpec], {})).to.throw(/not declared in the schema/) + it('should not migrate playbook data that defines ui.bundle as a String', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, ['--playbook', legacyUiBundleSpec], {}) + expect(buildPlaybookDeferred).to.throw(/not declared in the schema/) }) - it('should not migrate playbook data that defines ui.start_path', () => { - expect(() => buildPlaybook(['--playbook', legacyUiStartPathSpec], {})).to.throw(/not declared in the schema/) + it('should not migrate playbook data that defines ui.start_path', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook, ['--playbook', legacyUiStartPathSpec], {}) + expect(buildPlaybookDeferred).to.throw(/not declared in the schema/) }) - it('should throw if no configuration data is given', () => { - expect(() => buildPlaybook()).to.throw() + it('should throw if no configuration data is given', async () => { + const buildPlaybookDeferred = await deferExceptions(buildPlaybook) + expect(buildPlaybookDeferred).to.throw() }) - it('should be decoupled from the process environment', () => { + it('should be decoupled from the process environment', async () => { const originalEnv = process.env process.env = { URL: 'https://docs.example.org' } - const playbook = buildPlaybook(['--ui-bundle-url', 'ui-bundle.zip']) + const playbook = await buildPlaybook(['--ui-bundle-url', 'ui-bundle.zip']) expect(playbook.site.url).to.be.undefined() process.env = originalEnv }) - it('should leave the process environment unchanged', () => { + it('should leave the process environment unchanged', async () => { const processArgv = process.argv const processEnv = process.env const args = ['--one-one', 'the-args-value'] const env = { PLAYBOOK: ymlSpec, ANTORA_TWO: 99 } - const playbook = buildPlaybook(args, env, schema) + const playbook = await buildPlaybook(args, env, schema) expect(playbook.one.one).to.equal('the-args-value') expect(playbook.two).to.equal(99) expect(playbook.three).to.equal(false) expect(process.argv).to.equal(processArgv) expect(process.env).to.equal(processEnv) }) + + describe('build playbook with pipeline extensions', async () => { + let eventEmitter, eventContext + + beforeEach(() => { + const baseEmitter = new EventEmitter() + + eventEmitter = { + + emit: async (name, ...args) => { + const promises = [] + baseEmitter.emit(name, promises, ...args) + promises.length && await Promise.all(promises) + }, + + on: (name, listener) => baseEmitter.on(name, (promises, ...args) => promises.push(listener(...args))), + } + + eventContext = {} + }) + it('should accept empty default pipeline extensions supplied', async () => { + await buildPlaybook(['--playbook', defaultSchemaSpec], {}, undefined, + eventEmitter, eventContext, []) + }) + + it('should accept default pipeline extensions supplied', async () => { + const eventContext = {} + const plugin = { + eventContext, + + register: (eventEmitter) => { + eventEmitter.on('beforeBuildPlaybook', ({ args, env, schema }) => { + eventContext.before = 'called' + }) + eventEmitter.on('afterBuildPlaybook', (playbook) => { + eventContext.after = 'called' + }) + }, + } + await buildPlaybook(['--playbook', defaultSchemaSpec], {}, undefined, + eventEmitter, [plugin]) + expect(eventContext.before).to.equal('called') + expect(eventContext.after).to.equal('called') + }) + + it('default pipeline extension should be able to modify args', async () => { + const eventContext = {} + const plugin = { + eventContext, + + register: (eventEmitter) => { + eventEmitter.on('beforeBuildPlaybook', ({ args, env, schema }) => { + eventContext.before = 'called' + args.push('--attribute') + args.push('foo=bar') + }) + eventEmitter.on('afterBuildPlaybook', (playbook) => { + eventContext.after = 'called' + }) + }, + } + const env = {} + const playbook = await buildPlaybook(['--playbook', defaultSchemaSpec], env, undefined, + eventEmitter, [plugin]) + expect(eventContext.before).to.equal('called') + expect(eventContext.after).to.equal('called') + expect(playbook.asciidoc.attributes.foo).to.equal('bar') + }) + + it('default pipeline extension should be able to modify playbook', async () => { + const eventContext = {} + const plugin = { + eventContext, + + register: (eventEmitter) => { + eventEmitter.on('beforeBuildPlaybook', ({ args, env, schema }) => { + eventContext.before = 'called' + }) + eventEmitter.on('afterBuildPlaybook', (playbook) => { + eventContext.after = 'called' + playbook.extra = ['foo', 'bar'] + }) + }, + } + const env = {} + const playbook = await buildPlaybook(['--playbook', defaultSchemaSpec], env, undefined, + eventEmitter, [plugin]) + expect(eventContext.before).to.equal('called') + expect(eventContext.after).to.equal('called') + expect(playbook.extra.length).to.equal(2) + expect(playbook.extra[0]).to.equal('foo') + expect(playbook.extra[1]).to.equal('bar') + }) + + it('should accept pipeline extensions specified in playbook', async () => { + const env = {} + const playbook = await buildPlaybook(['--playbook', defaultSchemaSpecWithPipelineExtension], env, undefined, + eventEmitter) + expect(env.beforeLoaded).to.equal(undefined) + expect(playbook.afterLoaded).to.equal('called') + }) + + it('should accept pipeline extensions via cli', async () => { + const env = {} + const playbook = await buildPlaybook(['--playbook', defaultSchemaSpec, '--pipeline-extension', './pipeline-extensions/test-extension.js'], env, undefined, + eventEmitter) + expect(env.beforeLoaded).to.equal(undefined) + expect(playbook.afterLoaded).to.equal('called') + }) + + it('should accept pipeline extensions added to playbook by default pipeline extension', async () => { + const eventContext = {} + const extension = { + eventContext, + + register: (eventEmitter) => { + eventEmitter.on('beforeBuildPlaybook', ({ args, env, schema }) => { + eventContext.before = 'called' + args.push('--pipeline-extension') + args.push('./pipeline-extensions/test-extension.js') + }) + eventEmitter.on('afterBuildPlaybook', (playbook) => { + eventContext.after = 'called' + }) + }, + } + const env = {} + const playbook = await buildPlaybook(['--playbook', defaultSchemaSpec], env, undefined, + eventEmitter, [extension]) + expect(eventContext.before).to.equal('called') + expect(eventContext.after).to.equal('called') + expect(env.beforeLoaded).to.equal(undefined) + expect(playbook.afterLoaded).to.equal('called') + }) + + it('should accept pipeline extensions specified in playbook with config', async () => { + const env = {} + const playbook = await buildPlaybook(['--playbook', defaultSchemaSpecWithPipelineExtensionConfigs], env, undefined, + eventEmitter) + expect(env.beforeLoaded).to.equal(undefined) + expect(playbook.afterLoaded).to.equal('called') + const configs = playbook.configs + expect(configs.length).to.equal(2) + const config1 = configs[0] + expect(config1).to.deep.equal({ param1: 'foo', param2: 'bar' }) + const config2 = configs[1] + expect(config2).to.deep.equal({ param3: 'foo', param4: { subparam1: 3, arrayparam: ['foo', 'bar'] } }) + }) + }) }) diff --git a/packages/playbook-builder/test/fixtures/default-schema-spec-pipeline-extension-config-sample.yml b/packages/playbook-builder/test/fixtures/default-schema-spec-pipeline-extension-config-sample.yml new file mode 100644 index 000000000..b48e6fc81 --- /dev/null +++ b/packages/playbook-builder/test/fixtures/default-schema-spec-pipeline-extension-config-sample.yml @@ -0,0 +1,56 @@ +runtime: + cache_dir: ./.antora-cache + fetch: true +site: + url: https://example.com + title: Example site + start_page: 1.0@server::intro + keys: + google_analytics: 'XX-123456' +content: + branches: v* + edit_url: '{web_url}/blob/{refname}/{path}' + sources: + - url: https://gitlab.com/antora/demo/demo-component-a.git + branches: [master, v*] +ui: + bundle: + url: ./../ui/build/ui-bundles.zip + start_path: dark-theme + default_layout: default + supplemental_files: + - path: head-meta.hbs + contents: +urls: + html_extension_style: indexify + redirect_facility: nginx +asciidoc: + attributes: + allow-uri-read: true + idprefix: '' + toc: false + uri-project: https://antora.org + extensions: + - asciidoctor-plantuml + - ./lib/shout-block +git: + credentials: + path: ./.git-credentials +output: + dir: ./_site + destinations: + - provider: archive + path: ./site.zip +extensions: +- path: ./pipeline-extensions/test-extension + config: + param1: foo + param2: bar +- path: ./pipeline-extensions/test-extension + config: + param3: foo + param4: + subparam1: 3 + arrayparam: + - foo + - bar diff --git a/packages/playbook-builder/test/fixtures/default-schema-spec-pipeline-extension-sample.yml b/packages/playbook-builder/test/fixtures/default-schema-spec-pipeline-extension-sample.yml new file mode 100644 index 000000000..c2f154c18 --- /dev/null +++ b/packages/playbook-builder/test/fixtures/default-schema-spec-pipeline-extension-sample.yml @@ -0,0 +1,45 @@ +runtime: + cache_dir: ./.antora-cache + fetch: true +site: + url: https://example.com + title: Example site + start_page: 1.0@server::intro + keys: + google_analytics: 'XX-123456' +content: + branches: v* + edit_url: '{web_url}/blob/{refname}/{path}' + sources: + - url: https://gitlab.com/antora/demo/demo-component-a.git + branches: [master, v*] +ui: + bundle: + url: ./../ui/build/ui-bundles.zip + start_path: dark-theme + default_layout: default + supplemental_files: + - path: head-meta.hbs + contents: +urls: + html_extension_style: indexify + redirect_facility: nginx +asciidoc: + attributes: + allow-uri-read: true + idprefix: '' + toc: false + uri-project: https://antora.org + extensions: + - asciidoctor-plantuml + - ./lib/shout-block +git: + credentials: + path: ./.git-credentials +output: + dir: ./_site + destinations: + - provider: archive + path: ./site.zip +extensions: +- path: ./pipeline-extensions/test-extension diff --git a/packages/playbook-builder/test/fixtures/pipeline-extensions/test-extension.js b/packages/playbook-builder/test/fixtures/pipeline-extensions/test-extension.js new file mode 100644 index 000000000..748c4bc60 --- /dev/null +++ b/packages/playbook-builder/test/fixtures/pipeline-extensions/test-extension.js @@ -0,0 +1,16 @@ +'use strict' + +module.exports.register = (eventEmitter, config) => { + eventEmitter.on('beforeBuildPlaybook', ({args, env, schema}) => { + env.beforeLoaded = 'called' + }) + eventEmitter.on('afterBuildPlaybook', (playbook) => { + playbook.afterLoaded = 'called' + if (config) { + const configs = playbook.configs || (playbook.configs = []) + configs.push(config) + } + }) +} + + diff --git a/packages/site-generator-default/lib/generate-site.js b/packages/site-generator-default/lib/generate-site.js index 13aa4a6cf..01e35d3e6 100644 --- a/packages/site-generator-default/lib/generate-site.js +++ b/packages/site-generator-default/lib/generate-site.js @@ -1,5 +1,6 @@ 'use strict' +const EventEmitter = require('events') const aggregateContent = require('@antora/content-aggregator') const buildNavigation = require('@antora/navigation-builder') const buildPlaybook = require('@antora/playbook-builder') @@ -13,20 +14,53 @@ const publishSite = require('@antora/site-publisher') const { resolveConfig: resolveAsciiDocConfig } = require('@antora/asciidoc-loader') async function generateSite (args, env) { - const playbook = buildPlaybook(args, env) - const asciidocConfig = resolveAsciiDocConfig(playbook) + const baseEmitter = new EventEmitter() + + const eventEmitter = { + + emit: async (name, ...args) => { + const promises = [] + baseEmitter.emit(name, promises, ...args) + promises.length && await Promise.all(promises) + }, + + on: (name, listener) => baseEmitter.on(name, (promises, ...args) => promises.push(listener(...args))), + } + const playbook = await buildPlaybook(args, env, undefined, eventEmitter) + const asciidocConfig = await wrapSync(eventEmitter, 'ResolveAsciiDocConfig', resolveAsciiDocConfig, playbook, { playbook }) const [contentCatalog, uiCatalog] = await Promise.all([ - aggregateContent(playbook).then((contentAggregate) => classifyContent(playbook, contentAggregate, asciidocConfig)), - loadUi(playbook), + wrapAsync(eventEmitter, 'AggregateContent', aggregateContent, playbook, [playbook]) + .then((contentAggregate) => wrapSync(eventEmitter, 'ClassifyContent', classifyContent, playbook, { playbook, contentAggregate, asciidocConfig })), + wrapAsync(eventEmitter, 'LoadUi', loadUi, playbook, [playbook]), ]) - const pages = convertDocuments(contentCatalog, asciidocConfig) - const navigationCatalog = buildNavigation(contentCatalog, asciidocConfig) - const composePage = createPageComposer(playbook, contentCatalog, uiCatalog, env) - pages.forEach((page) => composePage(page, contentCatalog, navigationCatalog)) - const siteFiles = mapSite(playbook, pages).concat(produceRedirects(playbook, contentCatalog)) + const pages = await wrapAsync(eventEmitter, 'ConvertDocuments', convertDocuments, playbook, { contentCatalog, asciidocConfig }) + const navigationCatalog = await wrapSync(eventEmitter, 'BuildNavigation', buildNavigation, playbook, { contentCatalog, asciidocConfig }) + const composePage = await wrapSync(eventEmitter, 'CreatePageComposer', createPageComposer, playbook, { playbook, contentCatalog, uiCatalog, env }) + await Promise.all(pages.map((page) => wrapSync(eventEmitter, 'ComposePage', composePage, playbook, { page, contentCatalog, navigationCatalog }))) + const siteFiles = (await wrapSync(eventEmitter, 'MapSite', mapSite, playbook, { playbook, pages })) + .concat(await wrapSync(eventEmitter, 'ProduceRedirects', produceRedirects, playbook, { playbook, contentCatalog })) if (playbook.site.url) siteFiles.push(composePage(create404Page())) const siteCatalog = { getAll: () => siteFiles } - return publishSite(playbook, [contentCatalog, uiCatalog, siteCatalog]) + return wrapAsync(eventEmitter, 'PublishSite', publishSite, playbook, { playbook, catalogs: [contentCatalog, uiCatalog, siteCatalog] }) +} + +async function wrapAsync (eventEmitter, name, funct, playbook, argObject) { + const args = Object.values(argObject) + 'playbook' in argObject || (argObject.playbook = playbook) + await eventEmitter.emit('before' + name, argObject) + return funct(...args, eventEmitter).then(async (result) => { + await eventEmitter.emit('after' + name, playbook, result) + return result + }) +} + +async function wrapSync (eventEmitter, name, funct, playbook, argObject) { + const args = Object.values(argObject) + 'playbook' in argObject || (argObject.playbook = playbook) + await eventEmitter.emit('before' + name, argObject) + const result = funct(...args, eventEmitter) + await eventEmitter.emit('after' + name, playbook, result) + return result } function create404Page () { diff --git a/packages/site-generator-default/test/fixtures/argument-test-extension.js b/packages/site-generator-default/test/fixtures/argument-test-extension.js new file mode 100644 index 000000000..8fa990051 --- /dev/null +++ b/packages/site-generator-default/test/fixtures/argument-test-extension.js @@ -0,0 +1,94 @@ +'use strict' + +module.exports.register = (eventEmitter) => { + + const eventContext = {} + + eventEmitter.on('beforeBuildPlaybook', ({args, env, schema}) => { + //should not be called; only plugins supplied by the site generator can receive this event. + eventContext.beforeBuildPlaybook = 'called' + }) + eventEmitter.on('afterBuildPlaybook', (playbook) => { + eventContext.afterBuildPlaybook = {playbook} + }) + + eventEmitter.on('beforeResolveAsciiDocConfig', ({playbook}) => { + eventContext.beforeResolveAsciiDocConfig = {playbook} + }) + eventEmitter.on('afterResolveAsciiDocConfig', (playbook, asciidocConfig) => { + eventContext.afterResolveAsciiDocConfig = {asciidocConfig} + }) + + eventEmitter.on('beforeAggregateContent', ({playbook}) => { + eventContext.beforeAggregateContent = {playbook} + }) + eventEmitter.on('afterAggregateContent', async (playbook, contentAggregate) => { + eventContext.afterAggregateContent = {contentAggregate} + }) + + eventEmitter.on('beforeClassifyContent', async ({playbook, contentAggregate, asciidocConfig}) => { + eventContext.beforeClassifyContent = {playbook, contentAggregate, asciidocConfig} + }) + eventEmitter.on('afterClassifyContent', (playbook, contentCatalog) => { + eventContext.afterClassifyContent = {contentCatalog} + }) + + eventEmitter.on('beforeLoadUi', ({playbook}) => { + eventContext.beforeLoadUi = {playbook} + }) + eventEmitter.on('afterLoadUi', (playbook, uiCatalog) => { + eventContext.afterLoadUi = {uiCatalog} + }) + + eventEmitter.on('beforeConvertDocuments', ({contentCatalog, asciidocConfig, playbook}) => { + eventContext.beforeConvertDocuments = {contentCatalog, asciidocConfig, playbook} + }) + eventEmitter.on('afterConvertDocuments', (playbook, pages) => { + eventContext.afterConvertDocuments = {pages} + }) + + eventEmitter.on('beforeBuildNavigation', ({contentCatalog, asciidocConfig, playbook}) => { + eventContext.beforeBuildNavigation = {contentCatalog, asciidocConfig, playbook} + }) + eventEmitter.on('afterBuildNavigation', (navigationCatalog ) => { + eventContext.afterBuildNavigation = {navigationCatalog} + }) + + eventEmitter.on('beforeCreatePageComposer', ({playbook, contentCatalog, uiCatalog, env}) => { + eventContext.beforeCreatePageComposer = {playbook, contentCatalog, uiCatalog, env} + }) + eventEmitter.on('afterCreatePageComposer', (playbook, composePage) => { + eventContext.afterCreatePageComposer = {composePage} + }) + + eventEmitter.on('beforeComposePage', ({page, contentCatalog, navigationCatalog, playbook}) => { + eventContext.beforeComposePage = {page, contentCatalog, navigationCatalog, playbook} + }) + eventEmitter.on('afterComposePage', (playbook, page) => { + eventContext.afterComposePage = {page} + }) + + eventEmitter.on('beforeMapSite', ({playbook, pages}) => { + eventContext.beforeMapSite = {playbook, pages} + }) + eventEmitter.on('afterMapSite', (playbook, siteFiles) => { + eventContext.afterMapSite = {siteFiles} + }) + + eventEmitter.on('beforeProduceRedirects', ({playbook, contentCatalog}) => { + eventContext.beforeProduceRedirects = {playbook, contentCatalog} + }) + eventEmitter.on('afterProduceRedirects', (playbook, siteFiles) => { + eventContext.afterProduceRedirects = {siteFiles} + }) + + eventEmitter.on('beforePublishSite', ({playbook, catalogs}) => { + eventContext.beforePublishSite = {playbook, catalogs} + }) + eventEmitter.on('afterPublishSite', (playbook, reports) => { + eventContext.afterPublishSite = {reports} + reports.push(eventContext) + }) + +} + diff --git a/packages/site-generator-default/test/generate-site-test.js b/packages/site-generator-default/test/generate-site-test.js index fa657c354..1b92393cb 100644 --- a/packages/site-generator-default/test/generate-site-test.js +++ b/packages/site-generator-default/test/generate-site-test.js @@ -712,6 +712,73 @@ describe('generateSite()', function () { }).timeout(timeoutOverride) }) + it('should register pipeline extension configured in playbook; events should be as expected', async () => { + fs.outputFileSync( + ospath.resolve(WORK_DIR, 'pipeline-extensions', 'argument-test-extension.js'), + fs.readFileSync(ospath.resolve(FIXTURES_DIR, 'argument-test-extension.js'), 'utf8') + ) + playbookSpec.extensions = ['./pipeline-extensions/argument-test-extension'] + fs.writeJsonSync(playbookFile, playbookSpec, { spaces: 2 }) + const reports = await generateSite(['--playbook', playbookFile], env) + const eventContext = reports[reports.length - 1] + expect(eventContext).to.be.an('object') + expect(eventContext.beforeBuildPlaybook).to.be.a('undefined') + expect(eventContext.afterBuildPlaybook.playbook).to.be.an('object') + + expect(eventContext.beforeResolveAsciiDocConfig.playbook).to.be.an('object') + expect(eventContext.afterResolveAsciiDocConfig.asciidocConfig).to.be.an('object') + + expect(eventContext.beforeAggregateContent.playbook).to.be.an('object') + expect(eventContext.afterAggregateContent.contentAggregate).to.be.an('array') + expect(eventContext.afterAggregateContent.contentAggregate.length).to.equal(1) + + expect(eventContext.beforeClassifyContent.playbook).to.be.an('object') + expect(eventContext.beforeClassifyContent.contentAggregate).to.be.an('array') + expect(eventContext.beforeClassifyContent.contentAggregate.length).to.equal(1) + expect(eventContext.beforeClassifyContent.asciidocConfig).to.be.an('object') + expect(eventContext.afterClassifyContent.contentCatalog).to.be.an('object') + + expect(eventContext.beforeLoadUi.playbook).to.be.an('object') + expect(eventContext.afterLoadUi.uiCatalog).to.be.an('object') + + expect(eventContext.beforeConvertDocuments.contentCatalog).to.be.an('object') + expect(eventContext.beforeConvertDocuments.asciidocConfig).to.be.an('object') + expect(eventContext.afterConvertDocuments.pages).to.be.an('array') + expect(eventContext.afterConvertDocuments.pages.length).to.equal(3) + + expect(eventContext.beforeBuildNavigation.contentCatalog).to.be.an('object') + expect(eventContext.beforeBuildNavigation.asciidocConfig).to.be.an('object') + expect(eventContext.afterBuildNavigation.navigationCatalog).to.be.an('object') + + expect(eventContext.beforeCreatePageComposer.playbook).to.be.an('object') + expect(eventContext.beforeCreatePageComposer.contentCatalog).to.be.an('object') + expect(eventContext.beforeCreatePageComposer.uiCatalog).to.be.an('object') + expect(eventContext.beforeCreatePageComposer.env).to.be.an('object') + expect(eventContext.afterCreatePageComposer.composePage).to.be.an('function') + + expect(eventContext.beforeComposePage.page).to.be.an('object') + expect(eventContext.beforeComposePage.contentCatalog).to.be.an('object') + expect(eventContext.beforeComposePage.navigationCatalog).to.be.an('object') + expect(eventContext.afterComposePage.page).to.be.an('object') + + expect(eventContext.beforeMapSite.playbook).to.be.an('object') + expect(eventContext.beforeMapSite.pages).to.be.an('array') + expect(eventContext.beforeMapSite.pages.length).to.equal(3) + expect(eventContext.afterMapSite.siteFiles).to.be.an('array') + expect(eventContext.afterMapSite.siteFiles.length).to.equal(0) + + expect(eventContext.beforeProduceRedirects.playbook).to.be.an('object') + expect(eventContext.beforeProduceRedirects.contentCatalog).to.be.an('object') + expect(eventContext.afterProduceRedirects.siteFiles).to.be.an('array') + expect(eventContext.afterProduceRedirects.siteFiles.length).to.equal(0) + + expect(eventContext.beforePublishSite.playbook).to.be.an('object') + expect(eventContext.beforePublishSite.catalogs).to.be.an('array') + expect(eventContext.beforePublishSite.catalogs.length).to.equal(3) + expect(eventContext.afterPublishSite.reports).to.be.an('array') + expect(eventContext.afterPublishSite.reports.length).to.equal(2) + }).timeout(timeoutOverride) + // to test: // - don't pass environment variable map to generateSite // - pass environment variable override to generateSite -- GitLab From 5677c9cda7a3afffaaa84b567415528cf6ae70fe Mon Sep 17 00:00:00 2001 From: David Jencks Date: Fri, 28 Feb 2020 22:28:18 -0800 Subject: [PATCH 2/4] Upgrade to isomorphic-git 1.7.8. See comments in issue --- package.json | 2 +- .../lib/aggregate-content.js | 143 ++++++++---------- .../lib/git-credential-manager-store.js | 64 +++++--- .../lib/resolve-path-globs.js | 10 +- packages/content-aggregator/package.json | 5 +- .../test/aggregate-content-test.js | 53 +++---- test/repository-builder.js | 25 +-- yarn.lock | 43 +++--- 8 files changed, 165 insertions(+), 180 deletions(-) diff --git a/package.json b/package.json index 2e31befc6..5822140aa 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "eslint-plugin-promise": "~4.2", "eslint-plugin-standard": "~4.0", "gulp": "~4.0", - "isomorphic-git": "0.78.5", + "isomorphic-git": "~1.0.0", "js-yaml": "~3.14", "mocha": "~8.1", "node-git-server": "~0.6", diff --git a/packages/content-aggregator/lib/aggregate-content.js b/packages/content-aggregator/lib/aggregate-content.js index ea9870a8b..9e880fb16 100644 --- a/packages/content-aggregator/lib/aggregate-content.js +++ b/packages/content-aggregator/lib/aggregate-content.js @@ -11,6 +11,7 @@ const getCacheDir = require('cache-directory') const GitCredentialManagerStore = require('./git-credential-manager-store') const git = require('isomorphic-git') const invariably = { false: () => false, void: () => {} } +const http = require('isomorphic-git/http/node') const matcher = require('matcher') const mimeTypes = require('./mime-types-with-asciidoc') const MultiProgress = require('multi-progress')(require('progress')) @@ -28,7 +29,6 @@ const { CONTENT_CACHE_FOLDER, CONTENT_GLOB, FILE_MODES, - GIT_CORE, GIT_OPERATION_LABEL_LENGTH, GIT_PROGRESS_PHASES, VALID_STATE_FILENAME, @@ -104,39 +104,37 @@ function aggregateContent (playbook, eventEmitter) { const { cacheDir, fetch, silent, quiet } = playbook.runtime const progress = !quiet && !silent && createProgress(sourcesByUrl.keys(), process.stdout) const { ensureGitSuffix, credentials } = Object.assign({ ensureGitSuffix: true }, playbook.git) - const credentialManager = registerGitPlugins(credentials, startDir).get('credentialManager') - return ensureCacheDir(cacheDir, startDir) - .then((resolvedCacheDir) => - Promise.all( - Array.from(sourcesByUrl, ([url, sources]) => - loadRepository(url, { - cacheDir: resolvedCacheDir, - credentialManager, - fetchTags: tagsSpecified(sources, tags), - progress, - fetch, - startDir, - ensureGitSuffix, - }).then(({ repo, authStatus }) => - Promise.all( - sources.map((source) => { - source = Object.assign({ branches, editUrl, tags }, source) - // NOTE if repository is managed (has a url), we can assume the remote name is origin - // TODO if the repo has no remotes, then remoteName should be undefined - const remoteName = repo.url ? 'origin' : source.remote || 'origin' - return collectFilesFromSource(source, repo, remoteName, authStatus, eventEmitter) - }) - ) + const credentialManager = new GitCredentialManagerStore().configure({ credentials, startDir }) + return ensureCacheDir(cacheDir, startDir).then((resolvedCacheDir) => + Promise.all( + Array.from(sourcesByUrl, ([url, sources]) => + loadRepository(url, { + cacheDir: resolvedCacheDir, + credentialManager, + fetchTags: tagsSpecified(sources, tags), + progress, + fetch, + startDir, + ensureGitSuffix, + }).then(({ repo, authStatus }) => + Promise.all( + sources.map((source) => { + source = Object.assign({ branches, editUrl, tags }, source) + // NOTE if repository is managed (has a url), we can assume the remote name is origin + // TODO if the repo has no remotes, then remoteName should be undefined + const remoteName = repo.url ? 'origin' : source.remote || 'origin' + return collectFilesFromSource(source, repo, remoteName, authStatus, eventEmitter) + }) ) ) ) - .then(buildAggregate) - .catch((err) => { - progress && progress.terminate() - throw err - }) ) - .finally(unregisterGitPlugins) + .then(buildAggregate) + .catch((err) => { + progress && progress.terminate() + throw err + }) + ) } function buildAggregate (componentVersionBuckets) { @@ -157,9 +155,19 @@ async function loadRepository (url, opts) { let credentials ;({ displayUrl, url, credentials } = extractCredentials(url)) dir = ospath.join(opts.cacheDir, generateCloneFolderName(displayUrl)) - // NOTE the presence of the url property on the repo object implies the repository is remote - repo = { core: GIT_CORE, dir, gitdir: dir, url, noGitSuffix: !opts.ensureGitSuffix, noCheckout: true } const credentialManager = opts.credentialManager + credentials && await credentialManager.add(url, credentials) + // NOTE the presence of the url property on the repo object implies the repository is remote + repo = { + fs, + http, + dir, + gitdir: dir, + url, + noGitSuffix: !opts.ensureGitSuffix, + noCheckout: true, + ...credentialManager.callbacks(), + } const validStateFile = ospath.join(repo.gitdir, VALID_STATE_FILENAME) try { await fs.access(validStateFile) @@ -170,18 +178,18 @@ async function loadRepository (url, opts) { .fetch(fetchOpts) .then(() => { authStatus = credentials ? 'auth-embedded' : credentialManager.status({ url }) ? 'auth-required' : undefined - return git.config(Object.assign({ path: 'remote.origin.private', value: authStatus }, repo)) + return git.setConfig(Object.assign({ path: 'remote.origin.private', value: authStatus }, repo)) }) .catch((fetchErr) => { - fetchOpts.emitter && fetchOpts.emitter.emit('error', fetchErr) - if (fetchErr.name === git.E.HTTPError && fetchErr.data.statusCode === 401) fetchErr.rethrow = true + fetchOpts.onProgress && fetchOpts.onProgress({ phase: 'error', err: fetchErr }) + if (fetchErr.code === git.Errors.HttpError.code && fetchErr.data.statusCode === 401) fetchErr.rethrow = true throw fetchErr }) .then(() => fs.createFile(validStateFile).catch(invariably.void)) - .then(() => fetchOpts.emitter && fetchOpts.emitter.emit('complete')) + .then(() => fetchOpts.onProgress && fetchOpts.onProgress({ phase: 'complete' })) } else { // NOTE use cached value from previous fetch - authStatus = await git.config(Object.assign({ path: 'remote.origin.private' }, repo)) + authStatus = await git.getConfig(Object.assign({ path: 'remote.origin.private' }, repo)) } } catch (gitErr) { await fs.remove(dir) @@ -192,21 +200,21 @@ async function loadRepository (url, opts) { .then(() => git.resolveRef(Object.assign({ ref: 'HEAD', depth: 1 }, repo))) .then(() => { authStatus = credentials ? 'auth-embedded' : credentialManager.status({ url }) ? 'auth-required' : undefined - return git.config(Object.assign({ path: 'remote.origin.private', value: authStatus }, repo)) + return git.setConfig(Object.assign({ path: 'remote.origin.private', value: authStatus }, repo)) }) .catch(async (cloneErr) => { await fs.remove(dir) // FIXME triggering the error handler here causes assertion problems in the test suite - //fetchOpts.emitter && fetchOpts.emitter.emit('error', cloneErr) + //fetchOpts.onProgress && fetchOpts.onProgress({ phase: 'error', err: cloneErr }) throw transformGitCloneError(cloneErr, displayUrl) }) .then(() => fs.createFile(validStateFile).catch(invariably.void)) - .then(() => fetchOpts.emitter && fetchOpts.emitter.emit('complete')) + .then(() => fetchOpts.onProgress && fetchOpts.onProgress({ phase: 'complete' })) } } else if (await isLocalDirectory((dir = expandPath(url, '~+', opts.startDir)))) { repo = (await isLocalDirectory(ospath.join(dir, '.git'))) - ? { core: GIT_CORE, dir } - : { core: GIT_CORE, dir, gitdir: dir, noCheckout: true } + ? { fs, http, dir } + : { fs, http, dir, gitdir: dir, noCheckout: true } await git.resolveRef(Object.assign({ ref: 'HEAD', depth: 1 }, repo)).catch(() => { throw new Error( `Local content source must be a git repository: ${dir}${url !== dir ? ' (url: ' + url + ')' : ''}` @@ -228,6 +236,7 @@ function extractCredentials (url) { const [, scheme, username, password, rest] = url.match(URL_AUTH_EXTRACTOR_RX) const displayUrl = (url = scheme + rest) // NOTE if only username is present, assume it's an oauth token + // TODO this requires reversing in the git-credential-manager-store, as iso-git now expects just username/password. const credentials = username ? (password == null ? { token: username } : { username, password }) : {} return { displayUrl, url, credentials } } else if (url.startsWith('git@')) { @@ -468,7 +477,7 @@ function getGitTree (repo, oid, startPath) { .readTree(Object.assign({ oid, filepath: startPath }, repo)) .catch(({ code }) => { throw new Error( - `the start path '${startPath}' ${code === git.E.ResolveTreeError ? 'is not a directory' : 'does not exist'}` + `the start path '${startPath}' ${code === git.Errors.ObjectTypeError.code ? 'is not a directory' : 'does not exist'}` ) }) .then(({ tree }) => tree) @@ -531,7 +540,8 @@ function entryToFile (entry) { stat.mode = entry.mode stat.mtime = undefined stat.size = contents.length - return new File({ path: entry.path, contents, stat }) + //TODO is there a more efficient way to get a UInt8Array into a File? + return new File({ path: entry.path, contents: Buffer.from(contents), stat }) }) } @@ -621,8 +631,8 @@ function assignFileProperties (file, origin) { } function getFetchOptions (repo, progress, url, credentials, fetchTags, operation) { - const opts = Object.assign({ depth: 1 }, credentials, repo) - if (progress) opts.emitter = createProgressEmitter(progress, url, operation) + const opts = Object.assign({ depth: 1, http }, credentials, repo) + if (progress) opts.onProgress = createProgressCallback(progress, url, operation) if (operation === 'fetch') { opts.prune = true if (fetchTags) opts.tags = opts.pruneTags = true @@ -649,7 +659,7 @@ function createProgress (urls, term) { } } -function createProgressEmitter (progress, progressLabel, operation) { +function createProgressCallback (progress, progressLabel, operation) { const progressBar = progress.newBar(formatProgressBar(progressLabel, progress.maxLabelWidth, operation), { complete: '#', incomplete: '-', @@ -660,10 +670,12 @@ function createProgressEmitter (progress, progressLabel, operation) { // NOTE leave room for indeterminate progress at end of bar; this isn't strictly needed for a bare clone progressBar.scaleFactor = Math.max(0, (ticks - 1) / ticks) progressBar.tick(0) - return new EventEmitter() - .on('progress', onGitProgress.bind(null, progressBar)) - .on('complete', onGitComplete.bind(null, progressBar)) - .on('error', onGitComplete.bind(null, progressBar)) + return (gitProgressEvent) => { + const phase = gitProgressEvent.phase + if (phase === 'complete') onGitComplete(progressBar, gitProgressEvent) + else if (phase === 'error') onGitComplete(progressBar, gitProgressEvent) + else onGitProgress(progressBar, gitProgressEvent) + } } function formatProgressBar (label, maxLabelWidth, operation) { @@ -689,7 +701,7 @@ function onGitProgress (progressBar, { phase, loaded, total }) { } } -function onGitComplete (progressBar, err) { +function onGitComplete (progressBar, { err }) { if (err) { // TODO: could use progressBar.interrupt() to replace bar with message instead progressBar.chars.incomplete = '?' @@ -728,7 +740,7 @@ function generateCloneFolderName (url) { * @returns {String} The URL of the specified remote, if present. */ async function resolveRemoteUrl (repo, remoteName) { - return git.config(Object.assign({ path: 'remote.' + remoteName + '.url' }, repo)).then((url) => { + return git.getConfig(Object.assign({ path: 'remote.' + remoteName + '.url' }, repo)).then((url) => { if (!url) return if (url.startsWith('https://') || url.startsWith('http://')) { return ~url.indexOf('@') ? url.replace(URL_AUTH_CLEANER_RX, '$1') : url @@ -758,27 +770,6 @@ function tagsSpecified (sources, defaultTags) { }) } -function registerGitPlugins (config, startDir) { - const plugins = git.cores.create(GIT_CORE) - if (!plugins.has('fs')) plugins.set('fs', Object.assign({ _managed: true }, fs)) - let credentialManager - if (plugins.has('credentialManager')) { - credentialManager = plugins.get('credentialManager') - if (typeof credentialManager.configure === 'function') credentialManager.configure({ config, startDir }) - if (typeof credentialManager.status !== 'function') { - plugins.set('credentialManager', Object.assign({}, credentialManager, { status () {} })) - } - } else { - ;(credentialManager = new GitCredentialManagerStore().configure({ config, startDir }))._managed = true - plugins.set('credentialManager', credentialManager) - } - return plugins -} - -function unregisterGitPlugins () { - git.cores.create(GIT_CORE).forEach((val, key, map) => val._managed && map.delete(key)) -} - /** * Expands the content cache directory path and ensures it exists. * @@ -808,7 +799,7 @@ function transformGitCloneError (err, displayUrl) { const { code, data, message, name, stack } = err let wrappedMsg let trimMessage - if (code === git.E.HTTPError) { + if (code === git.Errors.HttpError.code) { if (data.statusCode === 401) { wrappedMsg = err.rejected ? 'Content repository not found or credentials were rejected' @@ -819,7 +810,7 @@ function transformGitCloneError (err, displayUrl) { wrappedMsg = message trimMessage = true } - } else if (code === git.E.RemoteUrlParseError || code === git.E.UnknownTransportError) { + } else if (code === git.Errors.UrlParseError.code || code === git.Errors.UnknownTransportError.code) { wrappedMsg = 'Content source uses an unsupported transport protocol' } else if (code === 'ENOTFOUND') { wrappedMsg = 'Content repository host could not be resolved: ' + err.host + ':' + err.port diff --git a/packages/content-aggregator/lib/git-credential-manager-store.js b/packages/content-aggregator/lib/git-credential-manager-store.js index d44e4d604..2ab785cbf 100644 --- a/packages/content-aggregator/lib/git-credential-manager-store.js +++ b/packages/content-aggregator/lib/git-credential-manager-store.js @@ -6,13 +6,13 @@ const fs = require('fs-extra') const ospath = require('path') class GitCredentialManagerStore { - configure ({ config, startDir }) { + configure ({ credentials, startDir }) { this.entries = undefined this.urls = {} - if ((this.contents = (config = config || {}).contents)) { + if ((this.contents = (credentials = credentials || {}).contents)) { this.path = undefined - } else if (config.path) { - this.path = expandPath(config.path, '~+', startDir) + } else if (credentials.path) { + this.path = expandPath(credentials.path, '~+', startDir) } else { this.path = undefined } @@ -82,28 +82,52 @@ class GitCredentialManagerStore { })) } - async fill ({ url }) { - this.urls[url] = 'requested' + async add (url, credentials) { return this.load().then((entries) => { - if (!Object.keys(entries).length) return + // TODO Should we have labeled "user only" as "token"? Really? + if (credentials.token) { + credentials = { username: credentials.token } + } const { hostname, pathname } = new URL(url) - return entries[hostname + pathname] || entries[hostname] + if (pathname === '/') { + entries[hostname] = credentials + } else { + entries[hostname + pathname] = credentials + if (!pathname.endsWith('.git')) entries[hostname + pathname + '.git'] = credentials + } }) } - async approved ({ url }) { - this.urls[url] = 'approved' - } + callbacks () { + return { + onAuth: async (url) => { + this.urls[url] = 'requested' + return this.load().then((entries) => { + if (!Object.keys(entries).length) return + const { hostname, pathname } = new URL(url) + return entries[hostname + pathname] || entries[hostname] + }) + }, - async rejected ({ url, auth }) { - this.urls[url] = 'rejected' - const statusCode = 401 - const statusMessage = 'HTTP Basic: Access Denied' - const err = new Error(`HTTP Error: ${statusCode} ${statusMessage}`) - err.name = err.code = 'HTTPError' - err.data = { statusCode, statusMessage } - if (auth) err.rejected = true - throw err + onAuthSuccess: async (url) => { + this.urls[url] = 'approved' + }, + + onAuthFailure: async (url, auth) => { + this.urls[url] = 'rejected' + //This imitates isomorphic-git HttpError. + const statusCode = 401 + const statusMessage = 'HTTP Basic: Access Denied' + const err = new Error(`HTTP Error: ${statusCode} ${statusMessage}`) + err.name = err.code = 'HttpError' + err.data = { + statusCode, + statusMessage, + } + if (auth) err.rejected = true + throw err + }, + } } status ({ url }) { diff --git a/packages/content-aggregator/lib/resolve-path-globs.js b/packages/content-aggregator/lib/resolve-path-globs.js index 635a966c4..b6b8e2412 100644 --- a/packages/content-aggregator/lib/resolve-path-globs.js +++ b/packages/content-aggregator/lib/resolve-path-globs.js @@ -123,11 +123,11 @@ function listDirentsFs (base, path) { function listDirentsGit (repo, treeOid) { return git - .readObject({ ...repo, oid: treeOid, filepath: '' }) - .catch(() => ({ object: {} })) - .then(({ object: { entries } }) => - entries - ? entries.map(({ type, oid, path: name }) => ({ name, oid, isDirectory: invariably[type === 'tree'] })) + .readTree({ ...repo, oid: treeOid, filepath: '' }) + .catch(() => ({ tree: {} })) + .then(({ tree }) => + tree + ? tree.map(({ type, oid, path: name }) => ({ name, oid, isDirectory: invariably[type === 'tree'] })) : [] ) } diff --git a/packages/content-aggregator/package.json b/packages/content-aggregator/package.json index d76784ee1..01a1d396c 100644 --- a/packages/content-aggregator/package.json +++ b/packages/content-aggregator/package.json @@ -8,7 +8,8 @@ "Dan Allen ", "Sarah White ", "Hubert SABLONNIÈRE ", - "Balachandran Sivakumar " + "Balachandran Sivakumar ", + "David Jencks " ], "homepage": "https://antora.org", "repository": "gitlab:antora/antora", @@ -22,7 +23,7 @@ "cache-directory": "~2.0", "camelcase-keys": "~6.2", "fs-extra": "~9.0", - "isomorphic-git": "0.78.5", + "isomorphic-git": "~1.7.8", "js-yaml": "~3.14", "matcher": "~3.0", "mime-types": "~2.1", diff --git a/packages/content-aggregator/test/aggregate-content-test.js b/packages/content-aggregator/test/aggregate-content-test.js index 77066f6da..8b6b1dda5 100644 --- a/packages/content-aggregator/test/aggregate-content-test.js +++ b/packages/content-aggregator/test/aggregate-content-test.js @@ -3314,25 +3314,6 @@ describe('aggregateContent()', function () { }) }) - describe('fs plugin', () => { - afterEach(() => { - RepositoryBuilder.unregisterPlugin('fs', GIT_CORE) - }) - - it('should use fs object specified on git core', async () => { - const customFs = Object.assign({}, fs) - customFs.readFile = spy(customFs.readFile) - RepositoryBuilder.registerPlugin('fs', customFs, GIT_CORE) - const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR) - await initRepoWithFiles(repoBuilder) - playbookSpec.content.sources.push({ url: repoBuilder.url }) - const aggregate = await aggregateContent(playbookSpec) - expect(aggregate).to.have.lengthOf(1) - expect(customFs.readFile).to.have.been.called() - expect(RepositoryBuilder.getPlugin('fs', GIT_CORE)).to.equal(customFs) - }) - }) - describe('authentication', () => { let authorizationHeaderValue let credentialsRequestCount @@ -3372,10 +3353,6 @@ describe('aggregateContent()', function () { } }) - afterEach(() => { - RepositoryBuilder.unregisterPlugin('credentialManager', GIT_CORE) - }) - after(() => { gitServer.authenticate = undefined process.env = originalEnv @@ -3453,7 +3430,7 @@ describe('aggregateContent()', function () { repoBuilder.url = urlWithoutAuth.replace('//', '//u:p@') playbookSpec.content.sources.push({ url: repoBuilder.url }) const aggregateContentDeferred = await deferExceptions(aggregateContent, playbookSpec) - const expectedErrorMessage = 'Content repository not found or requires credentials (url: ' + urlWithoutAuth + ')' + const expectedErrorMessage = 'Content repository not found or credentials were rejected (url: ' + urlWithoutAuth + ')' expect(aggregateContentDeferred).to.throw(expectedErrorMessage) expect(authorizationHeaderValue).to.equal('Basic ' + Buffer.from('u:p').toString('base64')) }) @@ -3466,7 +3443,7 @@ describe('aggregateContent()', function () { repoBuilder.url = urlWithoutAuth.replace('//', '//u:p@') playbookSpec.content.sources.push({ url: repoBuilder.url }) const aggregateContentDeferred = await deferExceptions(aggregateContent, playbookSpec) - const expectedErrorMessage = 'Content repository not found or requires credentials (url: ' + urlWithoutAuth + ')' + const expectedErrorMessage = 'Content repository not found or credentials were rejected (url: ' + urlWithoutAuth + ')' expect(aggregateContentDeferred).to.throw(expectedErrorMessage) expect(authorizationHeaderValue).to.equal('Basic ' + Buffer.from('u:p').toString('base64')) expect(CONTENT_CACHE_DIR) @@ -3481,7 +3458,8 @@ describe('aggregateContent()', function () { }) // NOTE this test would fail if the git client didn't automatically add the .git extension - it('should add .git extension to URL if missing', async () => { + //As of 1.0.0, isomorphic-git no longer does this. + it.skip('should add .git extension to URL if missing', async () => { const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR, { remote: { gitServerPort } }) await initRepoWithFiles(repoBuilder) const urlWithoutAuth = repoBuilder.url.replace('.git', '') @@ -3686,7 +3664,8 @@ describe('aggregateContent()', function () { }) }) - it('should use registered credential manager and enhance it with status method', async () => { + //need mechanism to supply auth callbacks + it.skip('should use registered credential manager and enhance it with status method', async () => { const credentialManager = { async fill ({ url }) { this.fulfilledUrl = url @@ -3707,7 +3686,8 @@ describe('aggregateContent()', function () { expect(RepositoryBuilder.getPlugin('credentialManager', GIT_CORE).fulfilledUrl).to.equal(repoBuilder.url) }) - it('should not enhance registered credential manager if it already contains a status method', async () => { + //need mechanism to supply auth callbacks + it.skip('should not enhance registered credential manager if it already contains a status method', async () => { const credentialManager = { async fill ({ url }) { this.fulfilledUrl = url @@ -3732,7 +3712,8 @@ describe('aggregateContent()', function () { expect(credentialManager.fulfilledUrl).to.equal(repoBuilder.url) }) - it('should invoke configure method on custom credential manager if defined', async () => { + //need mechanism to supply auth callbacks + it.skip('should invoke configure method on custom credential manager if defined', async () => { const credentialManager = { configure () { this.configured = true @@ -3884,7 +3865,7 @@ describe('aggregateContent()', function () { expect(aggregateContentDeferred) .to.throw(expectedErrorMessage) .with.property('stack') - .that.includes('Caused by: HTTPError: HTTP Error: 500 Internal Server Error') + .that.includes('Caused by: HttpError: HTTP Error: 500 Internal Server Error') }) it('should throw meaningful error if git client throws exception', async () => { @@ -3894,7 +3875,7 @@ describe('aggregateContent()', function () { const expectedErrorMessage = `${commonErrorMessage} Expected "001e# service=git-upload-pack" ` + `but received: 001e# service=git-upload-pack\n0007ref (url: ${url})` - const expectedCauseMessage = `RemoteDoesNotSupportSmartHTTP: ${commonErrorMessage}` + const expectedCauseMessage = `SmartHttpError: ${commonErrorMessage}` const aggregateContentDeferred = await deferExceptions(aggregateContent, playbookSpec) expect(aggregateContentDeferred) .to.throw(expectedErrorMessage) @@ -3909,7 +3890,7 @@ describe('aggregateContent()', function () { const expectedErrorMessage = `${commonErrorMessage} Expected "001e# service=git-upload-pack" ` + `but received: 001e# service=git-upload-pack\n0009ref\x00 (url: ${url})` - const expectedCauseMessage = `RemoteDoesNotSupportSmartHTTP: ${commonErrorMessage}` + const expectedCauseMessage = `SmartHttpError: ${commonErrorMessage}` const aggregateContentDeferred = await deferExceptions(aggregateContent, playbookSpec) expect(aggregateContentDeferred) .to.throw(expectedErrorMessage) @@ -3923,7 +3904,7 @@ describe('aggregateContent()', function () { const commonErrorMessage = 'Remote did not reply using the "smart" HTTP protocol.' const expectedErrorMessage = `${commonErrorMessage} Expected "001e# service=git-upload-pack" ` + `but received: 0000 (url: ${url})` - const expectedCauseMessage = `RemoteDoesNotSupportSmartHTTP: ${commonErrorMessage}` + const expectedCauseMessage = `SmartHttpError: ${commonErrorMessage}` const aggregateContentDeferred = await deferExceptions(aggregateContent, playbookSpec) expect(aggregateContentDeferred) .to.throw(expectedErrorMessage) @@ -3939,7 +3920,7 @@ describe('aggregateContent()', function () { expect(aggregateContentDeferred) .to.throw(expectedErrorMessage) .with.property('stack') - .that.includes('Caused by: HTTPError: HTTP Error: 404 Not Found') + .that.includes('Caused by: HttpError: HTTP Error: 404 Not Found') }) describe('should not append .git suffix to URL if git.ensureGitSuffix is disabled in playbook', () => { @@ -3969,13 +3950,13 @@ describe('aggregateContent()', function () { expect(aggregateContentDeferred) .to.throw(expectedErrorMessage) .with.property('stack') - .that.includes('Caused by: HTTPError: HTTP Error: 401 HTTP Basic: Access Denied') + .that.includes('Caused by: HttpError: HTTP Error: 401 Unauthorized') }) it('should not show auth information in progress bar label', async () => { const url = `http://0123456789@localhost:${serverPort}/401/invalid-repository.git` const sanitizedUrl = `http://localhost:${serverPort}/401/invalid-repository.git` - const expectedErrorMessage = 'Content repository not found or requires credentials (url: ' + sanitizedUrl + ')' + const expectedErrorMessage = 'Content repository not found or credentials were rejected (url: ' + sanitizedUrl + ')' return withMockStdout(async (lines) => { playbookSpec.runtime.quiet = false playbookSpec.content.sources.push({ url }) diff --git a/test/repository-builder.js b/test/repository-builder.js index e0f8d702a..d4774eafe 100644 --- a/test/repository-builder.js +++ b/test/repository-builder.js @@ -2,6 +2,7 @@ const fs = require('fs-extra') const git = require('isomorphic-git') +const http = require('isomorphic-git/http/node') const ospath = require('path') const vfs = require('vinyl-fs') const yaml = require('js-yaml') @@ -29,7 +30,7 @@ class RepositoryBuilder { this.url = `http://localhost:${this.gitServerPort}/${repoName}.git` } else if (this.bare) this.url += ospath.sep + '.git' // NOTE create new fs to clear index cache - this.repository = { fs: { ...fs }, dir: this.repoPath, gitdir: ospath.join(this.repoPath, '.git') } + this.repository = { fs: { ...fs }, http, dir: this.repoPath, gitdir: ospath.join(this.repoPath, '.git') } await git.init(this.repository) if (opts.empty) return this await (await this.addToWorktree('.gitignore')).addToWorktree('.gitattributes', '* text=auto eol=lf') @@ -60,19 +61,19 @@ class RepositoryBuilder { gitdir = ospath.join(dir, '.git') } // NOTE create new fs to clear index cache - this.repository = { fs: { ...fs }, dir, gitdir } + this.repository = { fs: { ...fs }, http, dir, gitdir } await git.resolveRef({ ...this.repository, ref: 'HEAD', depth: 1 }) return this } async clone (clonePath) { // NOTE create new fs to clear index cache - return git.clone({ fs: { ...fs }, dir: clonePath, url: this.url }) + return git.clone({ fs: { ...fs }, http, dir: clonePath, url: this.url }) } async checkoutBranch (branchName) { await git.branch({ ...this.repository, ref: branchName, checkout: true }).catch((e) => { - if (e.code === git.E.RefExistsError) { + if (e.code === git.Errors.AlreadyExistsError.code) { return git.checkout({ ...this.repository, ref: branchName }) } throw e @@ -82,14 +83,14 @@ class RepositoryBuilder { async checkoutBranch$1 (branchName, ref = 'HEAD') { await git.branch({ ...this.repository, ref: branchName }) - await git.fastCheckout({ ...this.repository, ref, noCheckout: true }) + await git.checkout({ ...this.repository, ref, noCheckout: true }) // NOTE isomorphic-git writes oid to HEAD, but we want to test case when it's a ref await fs.writeFile(ospath.join(this.repository.gitdir, 'HEAD'), `ref: refs/heads/${branchName}\n`) return this } async config (path, value) { - return git.config({ ...this.repository, path, value }) + return git.setConfig({ ...this.repository, path, value }) } async deleteBranch (ref) { @@ -219,18 +220,6 @@ class RepositoryBuilder { this.repository = undefined return this } - - static getPlugin (name, core = 'default') { - return git.cores.create(core).get(name) - } - - static registerPlugin (name, impl, core = 'default') { - git.cores.create(core).set(name, impl) - } - - static unregisterPlugin (name, core = 'default') { - git.cores.create(core).delete(name) - } } module.exports = RepositoryBuilder diff --git a/yarn.lock b/yarn.lock index 996d570df..2f738879b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2559,16 +2559,6 @@ globals@^12.1.0: dependencies: type-fest "^0.8.1" -globalyzer@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/globalyzer/-/globalyzer-0.1.4.tgz#bc8e273afe1ac7c24eea8def5b802340c5cc534f" - integrity sha512-LeguVWaxgHN0MNbWC6YljNMzHkrCny9fzjmEUdnF1kQ7wATFD1RHFRqA1qxaX2tgxGENlcxjOflopBwj3YZiXA== - -globrex@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/globrex/-/globrex-0.1.2.tgz#dd5d9ec826232730cd6793a5e33a9302985e6098" - integrity sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg== - glogg@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/glogg/-/glogg-1.0.2.tgz#2d7dd702beda22eb3bffadf880696da6d846313f" @@ -3173,20 +3163,34 @@ isobject@^3.0.0, isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= -isomorphic-git@0.78.5: - version "0.78.5" - resolved "https://registry.yarnpkg.com/isomorphic-git/-/isomorphic-git-0.78.5.tgz#013f8f8c280b8e0f8bb10ffa251eb87e9bb1190b" - integrity sha512-LrF5t9x7RdFeg84NsYpZo9qF1MZeb56LpBm6Jv47qMjnWMv0Il/3wPTA8I/lUYywgVbvF/e7xypHauj5auKW3w== +isomorphic-git@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isomorphic-git/-/isomorphic-git-1.0.0.tgz#6220fd649bc56ea447c6e75fb76d175a8a6dfeb5" + integrity sha512-YwyHqWP8ZGgb3Ul3HOg1MKSXD52ogWI4V3r58ck+EGuGKyXJYjxEjCRXeFEqOX3cxZRxvRyS1nD2J4at/l6fLQ== dependencies: async-lock "^1.1.0" clean-git-ref "^2.0.1" crc-32 "^1.2.0" diff3 "0.0.3" git-apply-delta "0.0.7" - globalyzer "^0.1.4" - globrex "^0.1.2" ignore "^5.1.4" - marky "^1.2.1" + minimisted "^2.0.0" + pako "^1.0.10" + pify "^4.0.1" + readable-stream "^3.4.0" + sha.js "^2.4.9" + simple-get "^3.0.2" + +isomorphic-git@~1.7.8: + version "1.7.8" + resolved "https://registry.yarnpkg.com/isomorphic-git/-/isomorphic-git-1.7.8.tgz#26ad3c6fb201e994df675a9778af55a60075d579" + integrity sha512-fSTsgp8J4s1aIfB/woR7slOAtX9wNprxs/iJaItE3yn5a/KjDAgIDcdutes88/0uC/VdCQFyuDZq1fdtYftrDw== + dependencies: + async-lock "^1.1.0" + clean-git-ref "^2.0.1" + crc-32 "^1.2.0" + diff3 "0.0.3" + ignore "^5.1.4" minimisted "^2.0.0" pako "^1.0.10" pify "^4.0.1" @@ -3589,11 +3593,6 @@ map-visit@^1.0.0: dependencies: object-visit "^1.0.0" -marky@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/marky/-/marky-1.2.1.tgz#a3fcf82ffd357756b8b8affec9fdbf3a30dc1b02" - integrity sha512-md9k+Gxa3qLH6sUKpeC2CNkJK/Ld+bEz5X96nYwloqphQE0CKCVEKco/6jxEZixinqNdz5RFi/KaCyfbMDMAXQ== - matchdep@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/matchdep/-/matchdep-2.0.0.tgz#c6f34834a0d8dbc3b37c27ee8bbcb27c7775582e" -- GitLab From 8b9afa4975538a120cac00f7d0f028a86e92aebe Mon Sep 17 00:00:00 2001 From: David Jencks Date: Sat, 29 Feb 2020 07:27:55 -0800 Subject: [PATCH 3/4] Don't copy file contents into Buffer, share them. Is this safe? --- packages/content-aggregator/lib/aggregate-content.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/content-aggregator/lib/aggregate-content.js b/packages/content-aggregator/lib/aggregate-content.js index 9e880fb16..2159c5c0e 100644 --- a/packages/content-aggregator/lib/aggregate-content.js +++ b/packages/content-aggregator/lib/aggregate-content.js @@ -541,7 +541,7 @@ function entryToFile (entry) { stat.mtime = undefined stat.size = contents.length //TODO is there a more efficient way to get a UInt8Array into a File? - return new File({ path: entry.path, contents: Buffer.from(contents), stat }) + return new File({ path: entry.path, contents: Buffer.from(contents.buffer), stat }) }) } -- GitLab From 3a3a253d8210867e06fc2feb687a3240063152f5 Mon Sep 17 00:00:00 2001 From: David Jencks Date: Sat, 29 Feb 2020 16:02:53 -0800 Subject: [PATCH 4/4] Implementation of git credential manager as a pipeline extension. The default credential manager is also rewritten to be an extension, so all tests involving the credential manager test the extension implementation. --- .../lib/aggregate-content.js | 107 ++++++++++-- packages/content-aggregator/lib/constants.js | 8 +- .../lib/git-credential-manager-plugin.js | 115 +++++++++++++ .../lib/git-credential-manager-store.js | 138 --------------- .../test/aggregate-content-test.js | 158 ++++++++++-------- .../lib/generate-site.js | 2 + 6 files changed, 308 insertions(+), 220 deletions(-) create mode 100644 packages/content-aggregator/lib/git-credential-manager-plugin.js delete mode 100644 packages/content-aggregator/lib/git-credential-manager-store.js diff --git a/packages/content-aggregator/lib/aggregate-content.js b/packages/content-aggregator/lib/aggregate-content.js index 2159c5c0e..0c8645f7e 100644 --- a/packages/content-aggregator/lib/aggregate-content.js +++ b/packages/content-aggregator/lib/aggregate-content.js @@ -8,7 +8,7 @@ const File = require('./file') const flattenDeep = require('./flatten-deep') const fs = require('fs-extra') const getCacheDir = require('cache-directory') -const GitCredentialManagerStore = require('./git-credential-manager-store') +const reqisterPlugin = require('./git-credential-manager-plugin').register const git = require('isomorphic-git') const invariably = { false: () => false, void: () => {} } const http = require('isomorphic-git/http/node') @@ -31,8 +31,12 @@ const { FILE_MODES, GIT_OPERATION_LABEL_LENGTH, GIT_PROGRESS_PHASES, - VALID_STATE_FILENAME, + ON_AUTH, + ON_AUTH_SUCCESS, + ON_AUTH_FAILURE, + ON_AUTH_STATUS, ON_COMPONENT_DESCRIPTOR, + VALID_STATE_FILENAME, } = require('./constants') const ANY_SEPARATOR_RX = /[:/]/ @@ -104,7 +108,7 @@ function aggregateContent (playbook, eventEmitter) { const { cacheDir, fetch, silent, quiet } = playbook.runtime const progress = !quiet && !silent && createProgress(sourcesByUrl.keys(), process.stdout) const { ensureGitSuffix, credentials } = Object.assign({ ensureGitSuffix: true }, playbook.git) - const credentialManager = new GitCredentialManagerStore().configure({ credentials, startDir }) + const credentialManager = getCredentialManager(eventEmitter, { credentials, startDir }) return ensureCacheDir(cacheDir, startDir).then((resolvedCacheDir) => Promise.all( Array.from(sourcesByUrl, ([url, sources]) => @@ -137,6 +141,83 @@ function aggregateContent (playbook, eventEmitter) { ) } +function getCredentialManager (eventEmitter, defaultConfig) { + const listenerCount = eventEmitter.listenerCount(ON_AUTH) + if (listenerCount > 1) { + throw new Error(`Only one git credential manager pipeline extension may be installed, not ${listenerCount}`) + } + if (listenerCount === 0) { + reqisterPlugin(eventEmitter, defaultConfig) + } + const urls = {} + const entries = {} + return { + + add: (url, credentials) => { + // TODO Should we have labeled "user only" as "token"? Really? + if (credentials.token) { + credentials = { username: credentials.token } + } + const { hostname, pathname } = new URL(url) + if (pathname === '/') { + entries[hostname] = credentials + } else { + entries[hostname + pathname] = credentials + if (!pathname.endsWith('.git')) entries[hostname + pathname + '.git'] = credentials + } + }, + + callbacks: { + onAuth: async (url) => { + if (urls[url] !== 'auth_failed' && Object.keys(entries).length) { + const { hostname, pathname } = new URL(url) + const credentials = entries[hostname + pathname] || entries[hostname] + if (credentials) { + urls[url] = 'requested' + return credentials + } + } + const credentials = {} + await eventEmitter.emit(ON_AUTH, url, credentials) + return credentials.credentials + }, + + onAuthSuccess: async (url) => { + if (urls[url] === 'requested' || urls[url] === 'success') { + urls[url] = 'success' + } else { + await eventEmitter.emit(ON_AUTH_SUCCESS, url) + } + }, + + onAuthFailure: async (url, auth) => { + if (urls[url]) { + urls[url] = auth ? 'failed' : 'auth_failed' + } + await eventEmitter.emit(ON_AUTH_FAILURE, url, auth) + //This imitates isomorphic-git HttpError. + const statusCode = 401 + const statusMessage = 'HTTP Basic: Access Denied' + const err = new Error(`HTTP Error: ${statusCode} ${statusMessage}`) + err.name = err.code = 'HttpError' + err.data = { + statusCode, + statusMessage, + } + if (auth) err.rejected = true + throw err + }, + }, + + status: async (url) => { + const status = {} + await eventEmitter.emit(ON_AUTH_STATUS, url, status) + return status.status + }, + + } +} + function buildAggregate (componentVersionBuckets) { const aggregateMap = flattenDeep(componentVersionBuckets).reduce((accum, batch) => { const key = batch.version + '@' + batch.name @@ -166,7 +247,7 @@ async function loadRepository (url, opts) { url, noGitSuffix: !opts.ensureGitSuffix, noCheckout: true, - ...credentialManager.callbacks(), + ...credentialManager.callbacks, } const validStateFile = ospath.join(repo.gitdir, VALID_STATE_FILENAME) try { @@ -176,8 +257,8 @@ async function loadRepository (url, opts) { const fetchOpts = getFetchOptions(repo, opts.progress, displayUrl, credentials, opts.fetchTags, 'fetch') await git .fetch(fetchOpts) - .then(() => { - authStatus = credentials ? 'auth-embedded' : credentialManager.status({ url }) ? 'auth-required' : undefined + .then(async () => { + authStatus = credentials ? 'auth-embedded' : await credentialManager.status(url) ? 'auth-required' : undefined return git.setConfig(Object.assign({ path: 'remote.origin.private', value: authStatus }, repo)) }) .catch((fetchErr) => { @@ -198,8 +279,8 @@ async function loadRepository (url, opts) { await git .clone(fetchOpts) .then(() => git.resolveRef(Object.assign({ ref: 'HEAD', depth: 1 }, repo))) - .then(() => { - authStatus = credentials ? 'auth-embedded' : credentialManager.status({ url }) ? 'auth-required' : undefined + .then(async () => { + authStatus = credentials ? 'auth-embedded' : await credentialManager.status(url) ? 'auth-required' : undefined return git.setConfig(Object.assign({ path: 'remote.origin.private', value: authStatus }, repo)) }) .catch(async (cloneErr) => { @@ -672,9 +753,13 @@ function createProgressCallback (progress, progressLabel, operation) { progressBar.tick(0) return (gitProgressEvent) => { const phase = gitProgressEvent.phase - if (phase === 'complete') onGitComplete(progressBar, gitProgressEvent) - else if (phase === 'error') onGitComplete(progressBar, gitProgressEvent) - else onGitProgress(progressBar, gitProgressEvent) + if (phase === 'complete') { + onGitComplete(progressBar, gitProgressEvent) + } else if (phase === 'error') { + onGitComplete(progressBar, gitProgressEvent) + } else { + onGitProgress(progressBar, gitProgressEvent) + } } } diff --git a/packages/content-aggregator/lib/constants.js b/packages/content-aggregator/lib/constants.js index 9c1059a8c..d45dd605e 100644 --- a/packages/content-aggregator/lib/constants.js +++ b/packages/content-aggregator/lib/constants.js @@ -7,10 +7,14 @@ module.exports = Object.freeze({ CONTENT_GLOB: '**/*.*', // NOTE ignoring 120000 (symlink) FILE_MODES: { 100644: 0o100666 & ~process.umask(), 100755: 0o100777 & ~process.umask() }, - GIT_CORE: 'antora', GIT_OPERATION_LABEL_LENGTH: 8, GIT_PROGRESS_PHASES: ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas'], - VALID_STATE_FILENAME: 'valid', // events + ON_AUTH: 'aggregateContentOnAuth', + ON_AUTH_SUCCESS: 'aggregateContentOnAuthSuccess', + ON_AUTH_FAILURE: 'aggregateContentOnAuthFailure', + ON_AUTH_STATUS: 'aggregateContentOnAuthStatus', ON_COMPONENT_DESCRIPTOR: 'onComponentDescriptor', + + VALID_STATE_FILENAME: 'valid', }) diff --git a/packages/content-aggregator/lib/git-credential-manager-plugin.js b/packages/content-aggregator/lib/git-credential-manager-plugin.js new file mode 100644 index 000000000..27176c4f6 --- /dev/null +++ b/packages/content-aggregator/lib/git-credential-manager-plugin.js @@ -0,0 +1,115 @@ +'use strict' + +const { homedir } = require('os') +const expandPath = require('@antora/expand-path-helper') +const fs = require('fs-extra') +const ospath = require('path') +const { + ON_AUTH, + ON_AUTH_SUCCESS, + ON_AUTH_FAILURE, + ON_AUTH_STATUS, +} = require('./constants') + +module.exports.register = (eventEmitter, { credentials, startDir }) => { + let entries + let path + const urls = {} + const contents = (credentials = credentials || {}).contents + if (contents) { + path = undefined + } else if (credentials.path) { + path = expandPath(credentials.path, '~+', startDir) + } else { + path = undefined + } + + async function load () { + if (entries) return entries + return (entries = new Promise((resolve) => { + let contentsPromise + let delimiter = '\n' + if (contents) { + delimiter = /[,\n]/ + contentsPromise = Promise.resolve(contents) + } else if (path) { + contentsPromise = fs.pathExists(path).then((exists) => { + return exists ? fs.readFile(path, 'utf-8') : undefined + }) + } else { + const homeGitCredentialsPath = ospath.join(homedir(), '.git-credentials') + const xdgConfigGitCredentialsPath = ospath.join( + process.env.XDG_CONFIG_HOME || ospath.join(homedir(), '.config'), + 'git', + 'credentials' + ) + contentsPromise = fs + .pathExists(homeGitCredentialsPath) + .then((exists) => + exists + ? fs.readFile(homeGitCredentialsPath, 'utf8') + : fs + .pathExists(xdgConfigGitCredentialsPath) + .then((altExists) => (altExists ? fs.readFile(xdgConfigGitCredentialsPath, 'utf8') : undefined)) + ) + } + contentsPromise.then((contents) => { + if (contents) { + resolve( + contents + .trim() + .split(delimiter) + .reduce((accum, url) => { + try { + const { username, password, hostname, pathname } = new URL(url) + let credentials + if (password) { + credentials = { + username: decodeURIComponent(username), + password: decodeURIComponent(password), + } + } else if (username) { + credentials = { token: decodeURIComponent(username) } + } else { + return accum + } + if (pathname === '/') { + accum[hostname] = credentials + } else { + accum[hostname + pathname] = credentials + if (!pathname.endsWith('.git')) accum[hostname + pathname + '.git'] = credentials + } + } catch (e) { + } + return accum + }, {}) + ) + } else { + resolve({}) + } + }) + })) + } + + eventEmitter.on(ON_AUTH, async (url, cred) => { + urls[url] = 'requested' + return load().then((entries) => { + if (!Object.keys(entries).length) return + const { hostname, pathname } = new URL(url) + credentials = entries[hostname + pathname] || entries[hostname] + if (credentials) cred.credentials = credentials + }) + }) + + eventEmitter.on(ON_AUTH_SUCCESS, (url) => { + urls[url] = 'approved' + }) + + eventEmitter.on(ON_AUTH_FAILURE, (url, auth) => { + urls[url] = 'rejected' + }) + + eventEmitter.on(ON_AUTH_STATUS, (url, status) => { + status.status = urls[url] + }) +} diff --git a/packages/content-aggregator/lib/git-credential-manager-store.js b/packages/content-aggregator/lib/git-credential-manager-store.js deleted file mode 100644 index 2ab785cbf..000000000 --- a/packages/content-aggregator/lib/git-credential-manager-store.js +++ /dev/null @@ -1,138 +0,0 @@ -'use strict' - -const { homedir } = require('os') -const expandPath = require('@antora/expand-path-helper') -const fs = require('fs-extra') -const ospath = require('path') - -class GitCredentialManagerStore { - configure ({ credentials, startDir }) { - this.entries = undefined - this.urls = {} - if ((this.contents = (credentials = credentials || {}).contents)) { - this.path = undefined - } else if (credentials.path) { - this.path = expandPath(credentials.path, '~+', startDir) - } else { - this.path = undefined - } - return this - } - - async load () { - if (this.entries) return this.entries - return (this.entries = new Promise((resolve) => { - let contentsPromise - let delimiter = '\n' - if (this.contents) { - contentsPromise = Promise.resolve(this.contents) - delimiter = /[,\n]/ - } else if (this.path) { - contentsPromise = fs - .pathExists(this.path) - .then((exists) => (exists ? fs.readFile(this.path, 'utf8') : undefined)) - } else { - const homeGitCredentialsPath = ospath.join(homedir(), '.git-credentials') - const xdgConfigGitCredentialsPath = ospath.join( - process.env.XDG_CONFIG_HOME || ospath.join(homedir(), '.config'), - 'git', - 'credentials' - ) - contentsPromise = fs - .pathExists(homeGitCredentialsPath) - .then((exists) => - exists - ? fs.readFile(homeGitCredentialsPath, 'utf8') - : fs - .pathExists(xdgConfigGitCredentialsPath) - .then((altExists) => (altExists ? fs.readFile(xdgConfigGitCredentialsPath, 'utf8') : undefined)) - ) - } - contentsPromise.then((contents) => { - if (contents) { - resolve( - contents - .trim() - .split(delimiter) - .reduce((accum, url) => { - try { - const { username, password, hostname, pathname } = new URL(url) - let credentials - if (password) { - credentials = { username: decodeURIComponent(username), password: decodeURIComponent(password) } - } else if (username) { - credentials = { token: decodeURIComponent(username) } - } else { - return accum - } - if (pathname === '/') { - accum[hostname] = credentials - } else { - accum[hostname + pathname] = credentials - if (!pathname.endsWith('.git')) accum[hostname + pathname + '.git'] = credentials - } - } catch (e) {} - return accum - }, {}) - ) - } else { - resolve({}) - } - }) - })) - } - - async add (url, credentials) { - return this.load().then((entries) => { - // TODO Should we have labeled "user only" as "token"? Really? - if (credentials.token) { - credentials = { username: credentials.token } - } - const { hostname, pathname } = new URL(url) - if (pathname === '/') { - entries[hostname] = credentials - } else { - entries[hostname + pathname] = credentials - if (!pathname.endsWith('.git')) entries[hostname + pathname + '.git'] = credentials - } - }) - } - - callbacks () { - return { - onAuth: async (url) => { - this.urls[url] = 'requested' - return this.load().then((entries) => { - if (!Object.keys(entries).length) return - const { hostname, pathname } = new URL(url) - return entries[hostname + pathname] || entries[hostname] - }) - }, - - onAuthSuccess: async (url) => { - this.urls[url] = 'approved' - }, - - onAuthFailure: async (url, auth) => { - this.urls[url] = 'rejected' - //This imitates isomorphic-git HttpError. - const statusCode = 401 - const statusMessage = 'HTTP Basic: Access Denied' - const err = new Error(`HTTP Error: ${statusCode} ${statusMessage}`) - err.name = err.code = 'HttpError' - err.data = { - statusCode, - statusMessage, - } - if (auth) err.rejected = true - throw err - }, - } - } - - status ({ url }) { - return this.urls[url] - } -} - -module.exports = GitCredentialManagerStore diff --git a/packages/content-aggregator/test/aggregate-content-test.js b/packages/content-aggregator/test/aggregate-content-test.js index 8b6b1dda5..611e3b89b 100644 --- a/packages/content-aggregator/test/aggregate-content-test.js +++ b/packages/content-aggregator/test/aggregate-content-test.js @@ -20,8 +20,11 @@ const RepositoryBuilder = require('../../../test/repository-builder') const { COMPONENT_DESC_FILENAME, CONTENT_CACHE_FOLDER, - GIT_CORE, GIT_OPERATION_LABEL_LENGTH, + ON_AUTH, + ON_AUTH_SUCCESS, + ON_AUTH_FAILURE, + ON_AUTH_STATUS, ON_COMPONENT_DESCRIPTOR, } = require('@antora/content-aggregator/lib/constants') const CACHE_DIR = getCacheDir('antora-test') @@ -1061,9 +1064,9 @@ describe('aggregateContent()', function () { }) it('can register an onComponentDescriptor pipeline extension', async () => { - var pluginArgs + var extensionArgs eventEmitter.on(ON_COMPONENT_DESCRIPTOR, - (args) => { pluginArgs = args }) + (args) => { extensionArgs = args }) const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR) const componentDesc = { name: 'the-component', @@ -1081,7 +1084,7 @@ describe('aggregateContent()', function () { expect(() => (aggregate = aggregateContentDeferred())).to.not.throw() expect(aggregate).to.have.lengthOf(1) expect(aggregate[0]).to.include(componentDesc) - expect(pluginArgs.componentDescriptor).to.include(componentDesc) + expect(extensionArgs.componentDescriptor).to.include(componentDesc) }) }) }) @@ -3664,73 +3667,90 @@ describe('aggregateContent()', function () { }) }) - //need mechanism to supply auth callbacks - it.skip('should use registered credential manager and enhance it with status method', async () => { - const credentialManager = { - async fill ({ url }) { - this.fulfilledUrl = url - return { username: 'u', password: 'p' } - }, - async approved ({ url }) {}, - async rejected ({ url, auth }) {}, - } - RepositoryBuilder.registerPlugin('credentialManager', credentialManager, GIT_CORE) - const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR, { remote: { gitServerPort } }) - await initRepoWithFiles(repoBuilder) - playbookSpec.content.sources.push({ url: repoBuilder.url }) - const aggregate = await aggregateContent(playbookSpec) - expect(authorizationHeaderValue).to.equal('Basic ' + Buffer.from('u:p').toString('base64')) - expect(credentialsSent).to.eql({ username: 'u', password: 'p' }) - expect(aggregate).to.have.lengthOf(1) - expect(RepositoryBuilder.getPlugin('credentialManager', GIT_CORE)).to.not.equal(credentialManager) - expect(RepositoryBuilder.getPlugin('credentialManager', GIT_CORE).fulfilledUrl).to.equal(repoBuilder.url) - }) + describe('pipeline extension credential managers', () => { + let eventEmitter - //need mechanism to supply auth callbacks - it.skip('should not enhance registered credential manager if it already contains a status method', async () => { - const credentialManager = { - async fill ({ url }) { - this.fulfilledUrl = url - return { username: 'u', password: 'p' } - }, - async approved ({ url }) {}, - async rejected ({ url, auth }) {}, - status ({ url }) { - return true - }, - } - RepositoryBuilder.registerPlugin('credentialManager', credentialManager, GIT_CORE) - const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR, { remote: { gitServerPort } }) - await initRepoWithFiles(repoBuilder) - playbookSpec.content.sources.push({ url: repoBuilder.url }) - const aggregate = await aggregateContent(playbookSpec) - expect(authorizationHeaderValue).to.equal('Basic ' + Buffer.from('u:p').toString('base64')) - expect(credentialsSent).to.eql({ username: 'u', password: 'p' }) - expect(aggregate).to.have.lengthOf(1) - expect(aggregate[0].files[0].src.origin.private).to.equal('auth-required') - expect(RepositoryBuilder.getPlugin('credentialManager', GIT_CORE)).to.equal(credentialManager) - expect(credentialManager.fulfilledUrl).to.equal(repoBuilder.url) - }) + beforeEach(() => { + const baseEmitter = new EventEmitter() - //need mechanism to supply auth callbacks - it.skip('should invoke configure method on custom credential manager if defined', async () => { - const credentialManager = { - configure () { - this.configured = true - }, - async fill ({ url }) { - return { username: 'u', password: 'p' } - }, - async approved ({ url }) {}, - async rejected ({ url, auth }) {}, - } - RepositoryBuilder.registerPlugin('credentialManager', credentialManager, GIT_CORE) - const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR, { remote: { gitServerPort } }) - await initRepoWithFiles(repoBuilder) - playbookSpec.content.sources.push({ url: repoBuilder.url }) - await aggregateContent(playbookSpec) - expect(authorizationHeaderValue).to.equal('Basic ' + Buffer.from('u:p').toString('base64')) - expect(credentialManager.configured).to.be.true() + eventEmitter = { + + emit: async (name, ...args) => { + const promises = [] + baseEmitter.emit(name, promises, ...args) + promises.length && await Promise.all(promises) + }, + + on: (name, listener) => baseEmitter.on(name, (promises, ...args) => promises.push(listener(...args))), + + listenerCount: (name) => baseEmitter.listenerCount(name), + } + }) + + it('should use custom credential manager registered as extension', async () => { + const urls = {} + var statusCount = 0 + eventEmitter.on(ON_AUTH, (url, credentials) => { credentials.credentials = { username: 'u', password: 'p' } }) + eventEmitter.on(ON_AUTH_SUCCESS, (url) => { urls[url] = 'success' }) + eventEmitter.on(ON_AUTH_FAILURE, (url, auth) => { urls[url] = auth ? 'auth failure' : 'failure' }) + eventEmitter.on(ON_AUTH_STATUS, (url, status) => { + statusCount++ + status.status = urls[url] + }) + const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR, { remote: { gitServerPort } }) + await initRepoWithFiles(repoBuilder) + playbookSpec.content.sources.push({ url: repoBuilder.url }) + await aggregateContent(playbookSpec, eventEmitter) + expect(authorizationHeaderValue).to.equal('Basic ' + Buffer.from('u:p').toString('base64')) + expect(urls[repoBuilder.url]).to.equal('success') + expect(statusCount).to.equal(1) + const status = {} + await eventEmitter.emit(ON_AUTH_STATUS, repoBuilder.url, status) + expect(status.status).to.equal('success') + expect(statusCount).to.equal(2) + }) + + it('should use custom credential manager registered as extension, auth failure', async () => { + const urls = {} + var statusCount = 0 + eventEmitter.on(ON_AUTH, (url, credentials) => { credentials.credentials = { username: 'u', password: 'p' } }) + eventEmitter.on(ON_AUTH_SUCCESS, (url) => { urls[url] = 'success' }) + eventEmitter.on(ON_AUTH_FAILURE, (url, auth) => { urls[url] = auth ? 'auth failure' : 'failure' }) + eventEmitter.on(ON_AUTH_STATUS, (url, status) => { + statusCount++ + status.status = urls[url] + }) + const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR, { remote: { gitServerPort } }) + await initRepoWithFiles(repoBuilder) + playbookSpec.content.sources.push({ url: repoBuilder.url }) + credentialsVerdict = 'denied!' + const aggregateContentDeferred = await deferExceptions(aggregateContent, playbookSpec, eventEmitter) + const expectedErrorMessage = + 'Content repository not found or credentials were rejected (url: ' + repoBuilder.url + ')' + expect(aggregateContentDeferred).to.throw(expectedErrorMessage) + expect(authorizationHeaderValue).to.equal('Basic ' + Buffer.from('u:p').toString('base64')) + expect(credentialsSent).to.eql({ username: 'u', password: 'p' }) + expect(credentialsRequestCount).to.equal(1) + expect(urls[repoBuilder.url]).to.equal('auth failure') + //status is called only upon success. + expect(statusCount).to.equal(0) + const status = {} + await eventEmitter.emit(ON_AUTH_STATUS, repoBuilder.url, status) + expect(status.status).to.equal('auth failure') + expect(statusCount).to.equal(1) + }) + + it('max one credential manager installed', async () => { + eventEmitter.on(ON_AUTH, (url, credentials) => { credentials.credentials = { username: 'u', password: 'p' } }) + eventEmitter.on(ON_AUTH, (url, credentials) => { credentials.credentials = { username: 'u', password: 'p' } }) + const repoBuilder = new RepositoryBuilder(CONTENT_REPOS_DIR, FIXTURES_DIR, { remote: { gitServerPort } }) + await initRepoWithFiles(repoBuilder) + playbookSpec.content.sources.push({ url: repoBuilder.url }) + const aggregateContentDeferred = await deferExceptions(aggregateContent, playbookSpec, eventEmitter) + const expectedErrorMessage = + 'Only one git credential manager pipeline extension may be installed, not 2' + expect(aggregateContentDeferred).to.throw(expectedErrorMessage) + }) }) }) diff --git a/packages/site-generator-default/lib/generate-site.js b/packages/site-generator-default/lib/generate-site.js index 01e35d3e6..1d40c286d 100644 --- a/packages/site-generator-default/lib/generate-site.js +++ b/packages/site-generator-default/lib/generate-site.js @@ -25,6 +25,8 @@ async function generateSite (args, env) { }, on: (name, listener) => baseEmitter.on(name, (promises, ...args) => promises.push(listener(...args))), + + listenerCount: (name) => baseEmitter.listenerCount(name), } const playbook = await buildPlaybook(args, env, undefined, eventEmitter) const asciidocConfig = await wrapSync(eventEmitter, 'ResolveAsciiDocConfig', resolveAsciiDocConfig, playbook, { playbook }) -- GitLab