site: wiki pages: get links from Home; prepend page name headings
[ci skip]
This commit is contained in:
parent
aeb7d58c31
commit
d0996213cd
37
Shake.hs
37
Shake.hs
@ -115,7 +115,7 @@ main = do
|
|||||||
commandmds <- filter (".md" `isSuffixOf`) . map (commandsdir </>) <$> S.getDirectoryContents commandsdir
|
commandmds <- filter (".md" `isSuffixOf`) . map (commandsdir </>) <$> S.getDirectoryContents commandsdir
|
||||||
let commandtxts = map (-<.> "txt") commandmds
|
let commandtxts = map (-<.> "txt") commandmds
|
||||||
let wikidir = "wiki"
|
let wikidir = "wiki"
|
||||||
wikipagenames <- map dropExtension . filter (".md" `isSuffixOf`) <$> S.getDirectoryContents wikidir
|
wikipagefilenames <- map dropExtension . filter (".md" `isSuffixOf`) <$> S.getDirectoryContents wikidir
|
||||||
|
|
||||||
shakeArgs
|
shakeArgs
|
||||||
shakeOptions{
|
shakeOptions{
|
||||||
@ -221,7 +221,7 @@ main = do
|
|||||||
)
|
)
|
||||||
|
|
||||||
-- website pages kept in the wiki: cookbook content
|
-- website pages kept in the wiki: cookbook content
|
||||||
wikipageshtml = map (normalise . ("site/_site" </>) . (<.> ".html")) wikipagenames
|
wikipageshtml = map (normalise . ("site/_site" </>) . (<.> ".html")) wikipagefilenames
|
||||||
|
|
||||||
-- manuals rendered to markdown and combined, ready for web rendering
|
-- manuals rendered to markdown and combined, ready for web rendering
|
||||||
webmancombined = "site/manual.md"
|
webmancombined = "site/manual.md"
|
||||||
@ -379,12 +379,12 @@ main = do
|
|||||||
|
|
||||||
-- embed the wiki's table of contents into the main site's home page
|
-- embed the wiki's table of contents into the main site's home page
|
||||||
"site/index.md" %> \out -> do
|
"site/index.md" %> \out -> do
|
||||||
wikicontent <- readFile' "wiki/_Sidebar.md"
|
wikicontent <- dropWhile (not . ("#" `isPrefixOf`)) . lines <$> readFile' "wiki/Home.md"
|
||||||
old <- liftIO $ readFileStrictly "site/index.md"
|
old <- liftIO $ readFileStrictly "site/index.md"
|
||||||
let (startmarker, endmarker) = ("<!-- WIKICONTENT -->", "<!-- ENDWIKICONTENT -->")
|
let (startmarker, endmarker) = ("<!-- WIKICONTENT -->", "<!-- ENDWIKICONTENT -->")
|
||||||
(before, after') = break (startmarker `isPrefixOf`) $ lines old
|
(before, after') = break (startmarker `isPrefixOf`) $ lines old
|
||||||
(_, after) = break (endmarker `isPrefixOf`) $ after'
|
(_, after) = break (endmarker `isPrefixOf`) $ after'
|
||||||
new = unlines $ concat [before, [startmarker], lines wikicontent, after]
|
new = unlines $ concat [before, [startmarker], wikicontent, after]
|
||||||
liftIO $ writeFile out new
|
liftIO $ writeFile out new
|
||||||
|
|
||||||
-- render all web pages from the main repo (manuals, home, download, relnotes etc) as html, saved in site/_site/
|
-- render all web pages from the main repo (manuals, home, download, relnotes etc) as html, saved in site/_site/
|
||||||
@ -398,20 +398,21 @@ main = do
|
|||||||
-- In case it's a wiki page, we capture pandoc's output for final processing,
|
-- In case it's a wiki page, we capture pandoc's output for final processing,
|
||||||
-- and hyperlink any github-style wikilinks.
|
-- and hyperlink any github-style wikilinks.
|
||||||
"site/_site//*.html" %> \out -> do
|
"site/_site//*.html" %> \out -> do
|
||||||
let name = takeBaseName out
|
let filename = takeBaseName out
|
||||||
iswikipage = name `elem` wikipagenames
|
pagename = fileNameToPageName filename
|
||||||
|
iswikipage = filename `elem` wikipagefilenames
|
||||||
source
|
source
|
||||||
| iswikipage = "wiki" </> name <.> "md"
|
| iswikipage = "wiki" </> filename <.> "md"
|
||||||
| otherwise = "site" </> name <.> "md"
|
| otherwise = "site" </> filename <.> "md"
|
||||||
template = "site/site.tmpl"
|
template = "site/site.tmpl"
|
||||||
siteRoot = if "site/_site/doc//*" ?== out then "../.." else "."
|
siteRoot = if "site/_site/doc//*" ?== out then "../.." else "."
|
||||||
need [source, template]
|
need [source, template]
|
||||||
-- read markdown source, link any wikilinks, pipe it to pandoc, write html out
|
-- read markdown source, link any wikilinks, pipe it to pandoc, write html out
|
||||||
Stdin . wikify <$> (readFile' source) >>=
|
Stdin . wikify (if iswikipage then Just (fileNameToPageName filename) else Nothing) <$> (readFile' source) >>=
|
||||||
(cmd Shell pandoc fromsrcmd "-t html"
|
(cmd Shell pandoc fromsrcmd "-t html"
|
||||||
"--template" template
|
"--template" template
|
||||||
("--metadata=siteRoot:" ++ siteRoot)
|
("--metadata=siteRoot:" ++ siteRoot)
|
||||||
("--metadata=title:" ++ name)
|
("--metadata=\"title:" ++ pagename ++ "\"")
|
||||||
"--lua-filter=tools/pandoc-site.lua"
|
"--lua-filter=tools/pandoc-site.lua"
|
||||||
"-o" out )
|
"-o" out )
|
||||||
|
|
||||||
@ -687,9 +688,13 @@ getCurrentDay = do
|
|||||||
t <- getZonedTime
|
t <- getZonedTime
|
||||||
return $ localDay (zonedTimeToLocalTime t)
|
return $ localDay (zonedTimeToLocalTime t)
|
||||||
|
|
||||||
|
type Markdown = String
|
||||||
|
|
||||||
-- | Convert Github-style wikilinks to hledger website links.
|
-- | Convert Github-style wikilinks to hledger website links.
|
||||||
wikify :: String -> String
|
-- If a heading is provided, prepend that as a top-level markdown heading.
|
||||||
wikify =
|
wikify :: Maybe String -> Markdown -> Markdown
|
||||||
|
wikify mheading =
|
||||||
|
maybe id ((++).(++"\n\n").("# "++)) mheading .
|
||||||
replaceBy wikilinkre wikilinkReplace .
|
replaceBy wikilinkre wikilinkReplace .
|
||||||
replaceBy labelledwikilinkre labelledwikilinkReplace
|
replaceBy labelledwikilinkre labelledwikilinkReplace
|
||||||
|
|
||||||
@ -705,16 +710,18 @@ wikilinkReplace _ _ Capture{capturedText} =
|
|||||||
Just $ "["++name++"]("++uri++")"
|
Just $ "["++name++"]("++uri++")"
|
||||||
where
|
where
|
||||||
name = init $ init $ drop 2 capturedText
|
name = init $ init $ drop 2 capturedText
|
||||||
uri = nameToUri name
|
uri = pageNameToUri name
|
||||||
|
|
||||||
-- labelledwikilinkReplace _ loc@RELocation{locationCapture} cap@Capture{capturedText} =
|
-- labelledwikilinkReplace _ loc@RELocation{locationCapture} cap@Capture{capturedText} =
|
||||||
labelledwikilinkReplace _ _ Capture{capturedText} =
|
labelledwikilinkReplace _ _ Capture{capturedText} =
|
||||||
Just $ "["++label++"]("++uri++")"
|
Just $ "["++label++"]("++uri++")"
|
||||||
where
|
where
|
||||||
[label,name] = take 2 $ (splitOn "|" $ init $ init $ drop 2 capturedText) ++ [""]
|
[label,name] = take 2 $ (splitOn "|" $ init $ init $ drop 2 capturedText) ++ [""]
|
||||||
uri = nameToUri name
|
uri = pageNameToUri name
|
||||||
|
|
||||||
nameToUri = (++".html") . intercalate "-" . words
|
pageNameToUri = (++".html") . intercalate "-" . words
|
||||||
|
|
||||||
|
fileNameToPageName = unwords . splitOn "-"
|
||||||
|
|
||||||
-- | Easier regex replace helper. Replaces each occurrence of a
|
-- | Easier regex replace helper. Replaces each occurrence of a
|
||||||
-- regular expression in src, by transforming each matched text with
|
-- regular expression in src, by transforming each matched text with
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user