Merge pull request #1 from cryogen-project/master
Update cryogen-core to latest version
This commit is contained in:
commit
ca2513f954
14 changed files with 628 additions and 201 deletions
15
project.clj
15
project.clj
|
@ -1,16 +1,17 @@
|
|||
(defproject cryogen-core "0.1.24"
|
||||
(defproject cryogen-core "0.1.43"
|
||||
:description "Cryogen's compiler"
|
||||
:url "https://github.com/cryogen-project/cryogen-core"
|
||||
:license {:name "Eclipse Public License"
|
||||
:url "http://www.eclipse.org/legal/epl-v10.html"}
|
||||
:dependencies [[org.clojure/clojure "1.7.0"]
|
||||
[clj-rss "0.1.9"]
|
||||
:dependencies [[org.clojure/clojure "1.8.0"]
|
||||
[clj-rss "0.2.3"]
|
||||
[me.raynes/fs "1.4.6"]
|
||||
[crouton "0.1.2"]
|
||||
[cheshire "5.5.0"]
|
||||
[cheshire "5.6.3"]
|
||||
[clj-text-decoration "0.0.3"]
|
||||
[io.aviso/pretty "0.1.18"]
|
||||
[io.aviso/pretty "0.1.33"]
|
||||
[hiccup "1.0.5"]
|
||||
[selmer "0.8.5"]
|
||||
[pandect "0.5.2"]
|
||||
[selmer "1.10.1"]
|
||||
[pandect "0.6.1"]
|
||||
[hawk "0.2.11"]
|
||||
[clj-tagsoup "0.3.0" :exclusions [org.clojure/clojure]]])
|
||||
|
|
0
resources/.gitkeep
Normal file
0
resources/.gitkeep
Normal file
|
@ -11,51 +11,65 @@
|
|||
[cryogen-core.sass :as sass]
|
||||
[cryogen-core.markup :as m]
|
||||
[cryogen-core.io :refer
|
||||
[get-resource find-assets create-folder wipe-public-folder copy-resources
|
||||
copy-resources-from-theme]]
|
||||
[get-resource find-assets create-folder create-file-recursive create-file wipe-public-folder
|
||||
copy-resources copy-resources-from-theme path]]
|
||||
[cryogen-core.sitemap :as sitemap]
|
||||
[cryogen-core.rss :as rss]))
|
||||
[cryogen-core.rss :as rss])
|
||||
(:import java.util.Locale))
|
||||
|
||||
(cache-off!)
|
||||
|
||||
(def public "resources/public")
|
||||
|
||||
(defn root-path
|
||||
"Creates the root path for posts, tags and pages"
|
||||
[config k]
|
||||
(if-let [root (k config)]
|
||||
(str "/" root "/") "/"))
|
||||
(defn root-uri
|
||||
"Creates the uri for posts and pages. Returns root-path by default"
|
||||
[k config]
|
||||
(if-let [uri (k config)]
|
||||
uri
|
||||
(config (-> k (name) (s/replace #"-uri$" "") (keyword)))))
|
||||
|
||||
(defn re-pattern-from-ext
|
||||
"Creates a properly quoted regex pattern for the given file extension"
|
||||
[ext]
|
||||
(re-pattern (str (s/replace ext "." "\\.") "$")))
|
||||
|
||||
(defn find-entries
|
||||
"Returns a list of files under the templates directory according to the
|
||||
implemented Markup protocol and specified root directory. It defaults to
|
||||
looking under the implemented protocol's subdirectory, but fallsback to look
|
||||
at the templates directory."
|
||||
[root mu ignored-files]
|
||||
(let [assets (find-assets (path "templates" (m/dir mu) root)
|
||||
(m/ext mu)
|
||||
ignored-files)]
|
||||
(if (seq assets)
|
||||
assets
|
||||
(find-assets (path "templates" root)
|
||||
(m/ext mu)
|
||||
ignored-files))))
|
||||
|
||||
(defn find-posts
|
||||
"Returns a list of markdown files representing posts under the post root in templates/md"
|
||||
"Returns a list of markdown files representing posts under the post root."
|
||||
[{:keys [post-root ignored-files]} mu]
|
||||
(find-assets (str "templates/" (m/dir mu) post-root) (m/ext mu) ignored-files))
|
||||
(find-entries post-root mu ignored-files))
|
||||
|
||||
(defn find-pages
|
||||
"Returns a list of markdown files representing pages under the page root in templates/md"
|
||||
"Returns a list of markdown files representing pages under the page root."
|
||||
[{:keys [page-root ignored-files]} mu]
|
||||
(find-assets (str "templates/" (m/dir mu) page-root) (m/ext mu) ignored-files))
|
||||
(find-entries page-root mu ignored-files))
|
||||
|
||||
(defn parse-post-date
|
||||
"Parses the post date from the post's file name and returns the corresponding java date object"
|
||||
[file-name date-fmt]
|
||||
[^String file-name date-fmt]
|
||||
(let [fmt (java.text.SimpleDateFormat. date-fmt)]
|
||||
(.parse fmt (.substring file-name 0 10))))
|
||||
|
||||
(defn post-uri
|
||||
"Creates a post uri from the post file name"
|
||||
[file-name {:keys [blog-prefix post-root]} mu]
|
||||
(str blog-prefix post-root (s/replace file-name (re-pattern-from-ext (m/ext mu)) ".html")))
|
||||
|
||||
(defn page-uri
|
||||
"Creates a page uri from the page file name"
|
||||
[page-name {:keys [blog-prefix page-root]} mu]
|
||||
(str blog-prefix page-root (s/replace page-name (re-pattern-from-ext (m/ext mu)) ".html")))
|
||||
"Creates a URI from file name. `uri-type` is any of the uri types specified in config, e.g., `:post-root-uri`."
|
||||
([file-name params]
|
||||
(page-uri file-name nil params))
|
||||
([file-name uri-type {:keys [blog-prefix clean-urls?] :as params}]
|
||||
(let [page-uri (params uri-type)
|
||||
uri-end (if clean-urls? (s/replace file-name #"(index)?\.html" "/") file-name)]
|
||||
(path "/" blog-prefix page-uri uri-end))))
|
||||
|
||||
(defn read-page-meta
|
||||
"Returns the clojure map from the top of a markdown page/post"
|
||||
|
@ -68,9 +82,10 @@
|
|||
(defn page-content
|
||||
"Returns a map with the given page's file-name, metadata and content parsed from
|
||||
the file with the given markup."
|
||||
[page config markup]
|
||||
[^java.io.File page config markup]
|
||||
(with-open [rdr (java.io.PushbackReader. (reader page))]
|
||||
(let [page-name (.getName page)
|
||||
(let [re-root (re-pattern (str "^.*?(" (:page-root config) "|" (:post-root config) ")/"))
|
||||
page-name (s/replace (str page) re-root "")
|
||||
file-name (s/replace page-name (re-pattern-from-ext (m/ext markup)) ".html")
|
||||
page-meta (read-page-meta page-name rdr)
|
||||
content ((m/render-fn markup) rdr config)]
|
||||
|
@ -85,7 +100,8 @@
|
|||
(update-in page-meta [:layout] #(str (name %) ".html"))
|
||||
{:file-name file-name
|
||||
:content content
|
||||
:toc (if (:toc page-meta) (generate-toc content))}))
|
||||
:toc (if-let [toc (:toc page-meta)]
|
||||
(generate-toc content :list-type toc))}))
|
||||
|
||||
(defn parse-page
|
||||
"Parses a page/post and returns a map of the content, uri, date etc."
|
||||
|
@ -93,7 +109,7 @@
|
|||
(let [{:keys [file-name page-meta content]} (page-content page config markup)]
|
||||
(merge
|
||||
(merge-meta-and-content file-name page-meta content)
|
||||
{:uri (page-uri file-name config markup)
|
||||
{:uri (page-uri file-name :page-root-uri config)
|
||||
:page-index (:page-index page-meta)})))
|
||||
|
||||
(defn parse-post
|
||||
|
@ -102,13 +118,15 @@
|
|||
(let [{:keys [file-name page-meta content]} (page-content page config markup)]
|
||||
(merge
|
||||
(merge-meta-and-content file-name page-meta content)
|
||||
(let [date (parse-post-date file-name (:post-date-format config))
|
||||
archive-fmt (java.text.SimpleDateFormat. "yyyy MMMM" (java.util.Locale. "en"))
|
||||
(let [date (if (:date page-meta)
|
||||
(.parse (java.text.SimpleDateFormat. (:post-date-format config)) (:date page-meta))
|
||||
(parse-post-date file-name (:post-date-format config)))
|
||||
archive-fmt (java.text.SimpleDateFormat. (get config :archive-group-format "yyyy MMMM") (Locale/getDefault))
|
||||
formatted-group (.format archive-fmt date)]
|
||||
{:date date
|
||||
:formatted-archive-group formatted-group
|
||||
:parsed-archive-group (.parse archive-fmt formatted-group)
|
||||
:uri (post-uri file-name config markup)
|
||||
:uri (page-uri file-name :post-root-uri config)
|
||||
:tags (set (:tags page-meta))}))))
|
||||
|
||||
(defn read-posts
|
||||
|
@ -119,7 +137,8 @@
|
|||
(fn [mu]
|
||||
(->>
|
||||
(find-posts config mu)
|
||||
(map #(parse-post % config mu))))
|
||||
(pmap #(parse-post % config mu))
|
||||
(remove #(= (:draft? %) true))))
|
||||
(m/markups))
|
||||
(sort-by :date)
|
||||
reverse))
|
||||
|
@ -140,7 +159,7 @@
|
|||
"Adds the uri and title of a post to the list of posts under each of its tags"
|
||||
[tags post]
|
||||
(reduce (fn [tags tag]
|
||||
(update-in tags [tag] (fnil conj []) (select-keys post [:uri :title :content :date])))
|
||||
(update-in tags [tag] (fnil conj []) (select-keys post [:uri :title :content :date :enclosure])))
|
||||
tags (:tags post)))
|
||||
|
||||
(defn group-by-tags
|
||||
|
@ -161,11 +180,22 @@
|
|||
(sort-by :parsed-group)
|
||||
reverse))
|
||||
|
||||
(defn group-for-author
|
||||
"Groups the posts by author. If no post author if found defaults `default-author`."
|
||||
[posts default-author]
|
||||
(->> posts
|
||||
(map #(select-keys % [:title :uri :date :formatted-archive-group :parsed-archive-group :author]))
|
||||
(map #(update % :author (fn [author] (or author default-author))))
|
||||
(group-by :author)
|
||||
(map (fn [[author posts]]
|
||||
{:author author
|
||||
:posts posts}))))
|
||||
|
||||
(defn tag-info
|
||||
"Returns a map containing the name and uri of the specified tag"
|
||||
[{:keys [blog-prefix tag-root]} tag]
|
||||
[config tag]
|
||||
{:name (name tag)
|
||||
:uri (str blog-prefix tag-root (name tag) ".html")})
|
||||
:uri (page-uri (str (name tag) ".html") :tag-root-uri config)})
|
||||
|
||||
(defn add-prev-next
|
||||
"Adds a :prev and :next key to the page/post data containing the title and uri of the prev/next
|
||||
|
@ -184,66 +214,92 @@
|
|||
sidebar-pages false} (group-by #(boolean (:navbar? %)) pages)]
|
||||
(map (partial sort-by :page-index) [navbar-pages sidebar-pages])))
|
||||
|
||||
(defn write-html
|
||||
"When `clean-urls?` is set, appends `/index.html` before spit; otherwise just spits."
|
||||
[file-uri {:keys [clean-urls?]} data]
|
||||
(if clean-urls?
|
||||
(create-file-recursive (path file-uri "index.html") data)
|
||||
(create-file file-uri data)))
|
||||
|
||||
(defn compile-pages
|
||||
"Compiles all the pages into html and spits them out into the public folder"
|
||||
[{:keys [blog-prefix page-root] :as params} pages]
|
||||
[{:keys [blog-prefix page-root-uri] :as params} pages]
|
||||
(when-not (empty? pages)
|
||||
(println (blue "compiling pages"))
|
||||
(create-folder (str blog-prefix page-root))
|
||||
(create-folder (path "/" blog-prefix page-root-uri))
|
||||
(doseq [{:keys [uri] :as page} pages]
|
||||
(println "\t-->" (cyan uri))
|
||||
(spit (str public uri)
|
||||
(write-html uri
|
||||
params
|
||||
(render-file (str "/html/" (:layout page))
|
||||
(merge params
|
||||
{:servlet-context "../"
|
||||
{:active-page "pages"
|
||||
:servlet-context (path "/" blog-prefix "/")
|
||||
:page page
|
||||
:uri uri}))))))
|
||||
|
||||
(defn compile-posts
|
||||
"Compiles all the posts into html and spits them out into the public folder"
|
||||
[{:keys [blog-prefix post-root disqus-shortname] :as params} posts]
|
||||
[{:keys [blog-prefix post-root-uri disqus-shortname] :as params} posts]
|
||||
(when-not (empty? posts)
|
||||
(println (blue "compiling posts"))
|
||||
(create-folder (str blog-prefix post-root))
|
||||
(create-folder (path "/" blog-prefix post-root-uri))
|
||||
(doseq [post posts]
|
||||
(println "\t-->" (cyan (:uri post)))
|
||||
(spit (str public (:uri post))
|
||||
(write-html (:uri post)
|
||||
params
|
||||
(render-file (str "/html/" (:layout post))
|
||||
(merge params
|
||||
{:servlet-context "../"
|
||||
{:active-page "posts"
|
||||
:servlet-context (path "/" blog-prefix "/")
|
||||
:post post
|
||||
:disqus-shortname disqus-shortname
|
||||
:uri (:uri post)}))))))
|
||||
|
||||
(defn compile-tags
|
||||
"Compiles all the tag pages into html and spits them out into the public folder"
|
||||
[{:keys [blog-prefix tag-root] :as params} posts-by-tag]
|
||||
[{:keys [blog-prefix tag-root-uri] :as params} posts-by-tag]
|
||||
(when-not (empty? posts-by-tag)
|
||||
(println (blue "compiling tags"))
|
||||
(create-folder (str blog-prefix tag-root))
|
||||
(create-folder (path "/" blog-prefix tag-root-uri))
|
||||
(doseq [[tag posts] posts-by-tag]
|
||||
(let [{:keys [name uri]} (tag-info params tag)]
|
||||
(println "\t-->" (cyan uri))
|
||||
(spit (str public uri)
|
||||
(write-html uri
|
||||
params
|
||||
(render-file "/html/tag.html"
|
||||
(merge params
|
||||
{:servlet-context "../"
|
||||
{:active-page "tags"
|
||||
:servlet-context (path "/" blog-prefix "/")
|
||||
:name name
|
||||
:posts posts
|
||||
:uri uri})))))))
|
||||
|
||||
(defn compile-tags-page [{:keys [blog-prefix] :as params}]
|
||||
(println (blue "compiling tags page"))
|
||||
(spit (str public blog-prefix "/tags.html")
|
||||
(let [uri (page-uri "tags.html" params)]
|
||||
(write-html uri
|
||||
params
|
||||
(render-file "/html/tags.html"
|
||||
(merge params
|
||||
{:uri (str blog-prefix "/tags.html")}))))
|
||||
{:active-page "tags"
|
||||
:servlet-context (path "/" blog-prefix "/")
|
||||
:uri uri})))))
|
||||
|
||||
(defn content-until-more-marker
|
||||
[^String content]
|
||||
(let [index (.indexOf content "<!--more-->")]
|
||||
(if (pos? index)
|
||||
(let [s (subs content 0 index)]
|
||||
(->> ((tagsoup/parse-string s) 2)
|
||||
(drop 2)
|
||||
hiccup/html)))))
|
||||
|
||||
(defn create-preview
|
||||
"Creates a single post preview"
|
||||
[blocks-per-preview post]
|
||||
(merge (select-keys post [:title :author :date :uri])
|
||||
{:content (or (re-find #".+?(?=<!--more-->)" (:content post))
|
||||
(merge post
|
||||
{:content (or (content-until-more-marker (:content post))
|
||||
(->> ((tagsoup/parse-string (:content post)) 2)
|
||||
(drop 2)
|
||||
(take blocks-per-preview)
|
||||
|
@ -253,18 +309,18 @@
|
|||
"Returns a sequence of vectors, each containing a set of post previews"
|
||||
[posts-per-page blocks-per-preview posts]
|
||||
(->> posts
|
||||
(reduce (fn [v post] (conj v (create-preview blocks-per-preview post))) [])
|
||||
(map #(create-preview blocks-per-preview %))
|
||||
(partition-all posts-per-page)
|
||||
(map-indexed (fn [i v] {:index (inc i) :posts v}))))
|
||||
|
||||
(defn create-preview-links
|
||||
"Turn each vector of previews into a map with :prev and :next keys that contain the uri of the
|
||||
prev/next preview page"
|
||||
[previews blog-prefix]
|
||||
[previews params]
|
||||
(mapv (fn [[prev target next]]
|
||||
(merge target
|
||||
{:prev (if prev (str blog-prefix "/p/" (:index prev)) nil)
|
||||
:next (if next (str blog-prefix "/p/" (:index next)) nil)}))
|
||||
{:prev (if prev (page-uri (path "p" (str (:index prev) ".html")) params) nil)
|
||||
:next (if next (page-uri (path "p" (str (:index next) ".html")) params) nil)}))
|
||||
(partition 3 1 (flatten [nil previews nil]))))
|
||||
|
||||
(defn compile-preview-pages
|
||||
|
@ -272,54 +328,95 @@
|
|||
[{:keys [blog-prefix posts-per-page blocks-per-preview] :as params} posts]
|
||||
(when-not (empty? posts)
|
||||
(let [previews (-> (create-previews posts-per-page blocks-per-preview posts)
|
||||
(create-preview-links blog-prefix))
|
||||
previews (if (> (count previews) 1) (assoc-in previews [1 :prev] (str blog-prefix "/index.html")) previews)]
|
||||
(create-folder (str blog-prefix "/p/"))
|
||||
(doseq [{:keys [index posts prev next]} previews]
|
||||
(spit (if (= 1 index) (str public blog-prefix "/index.html") (str public blog-prefix "/p/" index))
|
||||
(create-preview-links params))
|
||||
previews (if (> (count previews) 1) (assoc-in previews [1 :prev] (page-uri "index.html" params)) previews)]
|
||||
(create-folder (path "/" blog-prefix "p"))
|
||||
(doseq [{:keys [index posts prev next]} previews
|
||||
:let [index-page? (= 1 index)]]
|
||||
(write-html (if index-page? (page-uri "index.html" params) (page-uri (path "p" (str index ".html")) params))
|
||||
params
|
||||
(render-file "/html/previews.html"
|
||||
(merge params
|
||||
{:servlet-context (if (= 1 index) "" "../")
|
||||
{:active-page "preview"
|
||||
:home (when index-page? true)
|
||||
:servlet-context (path "/" blog-prefix "/")
|
||||
:posts posts
|
||||
:prev-uri prev
|
||||
:next-uri next})))))))
|
||||
|
||||
(defn compile-index
|
||||
"Compiles the index page into html and spits it out into the public folder"
|
||||
[{:keys [blog-prefix disqus?] :as params}]
|
||||
[{:keys [disqus?] :as params}]
|
||||
(println (blue "compiling index"))
|
||||
(spit (str public blog-prefix "/index.html")
|
||||
(let [uri (page-uri "index.html" params)]
|
||||
(write-html uri
|
||||
params
|
||||
(render-file "/html/home.html"
|
||||
(merge params
|
||||
{:home true
|
||||
{:active-page "home"
|
||||
:home true
|
||||
:disqus? disqus?
|
||||
:post (get-in params [:latest-posts 0])
|
||||
:uri (str blog-prefix "/index.html")}))))
|
||||
:uri uri})))))
|
||||
|
||||
(defn compile-archives
|
||||
"Compiles the archives page into html and spits it out into the public folder"
|
||||
[{:keys [blog-prefix] :as params} posts]
|
||||
(println (blue "compiling archives"))
|
||||
(spit (str public blog-prefix "/archives.html")
|
||||
(let [uri (page-uri "archives.html" params)]
|
||||
(write-html uri
|
||||
params
|
||||
(render-file "/html/archives.html"
|
||||
(merge params
|
||||
{:archives true
|
||||
{:active-page "archives"
|
||||
:archives true
|
||||
:groups (group-for-archive posts)
|
||||
:uri (str blog-prefix "/archives.html")}))))
|
||||
:servlet-context (path "/" blog-prefix "/")
|
||||
:uri uri})))))
|
||||
|
||||
(defn compile-authors
|
||||
"For each author, creates a page with filtered posts."
|
||||
[{:keys [blog-prefix author-root-uri author] :as params} posts]
|
||||
(println (blue "compiling authors"))
|
||||
(create-folder (path "/" blog-prefix author-root-uri))
|
||||
;; if the post author is empty defaults to config's :author
|
||||
(doseq [{:keys [author posts]} (group-for-author posts author)]
|
||||
(let [uri (page-uri (str author ".html") :author-root-uri params)]
|
||||
(println "\t-->" (cyan uri))
|
||||
(write-html uri
|
||||
params
|
||||
(render-file "/html/author.html"
|
||||
(merge params
|
||||
{:author author
|
||||
:groups (group-for-archive posts)
|
||||
:servlet-context (path "/" blog-prefix "/")
|
||||
:uri uri}))))))
|
||||
|
||||
(defn tag-posts
|
||||
"Converts the tags in each post into links"
|
||||
[posts config]
|
||||
(map #(update-in % [:tags] (partial map (partial tag-info config))) posts))
|
||||
|
||||
(defn- template-dir?
|
||||
"Checks that the dir exists in the templates directory."
|
||||
[dir]
|
||||
(.isDirectory (file (str "resources/templates/" dir))))
|
||||
|
||||
(defn- markup-entries [post-root page-root]
|
||||
(let [entries (for [mu (m/markups)
|
||||
t (distinct [post-root page-root])]
|
||||
[(str (m/dir mu) "/" t) t])]
|
||||
(apply concat entries)))
|
||||
|
||||
(defn copy-resources-from-markup-folders
|
||||
"Copy resources from markup folders"
|
||||
[config]
|
||||
"Copy resources from markup folders. This does not copy the markup entries."
|
||||
[{:keys [post-root page-root] :as config}]
|
||||
(let [folders (->> (markup-entries post-root page-root)
|
||||
(filter template-dir?))]
|
||||
(copy-resources
|
||||
(merge config
|
||||
{:resources (for [mu (m/markups)
|
||||
t ["posts" "pages"]] (str (m/dir mu) "/" t))
|
||||
:ignored-files (map #(re-pattern-from-ext (m/ext %)) (m/markups))})))
|
||||
{:resources folders
|
||||
:ignored-files (map #(re-pattern-from-ext (m/ext %)) (m/markups))}))))
|
||||
|
||||
(defn read-config
|
||||
"Reads the config file"
|
||||
|
@ -330,6 +427,9 @@
|
|||
slurp
|
||||
read-string
|
||||
(update-in [:blog-prefix] (fnil str ""))
|
||||
(update-in [:page-root] (fnil str ""))
|
||||
(update-in [:post-root] (fnil str ""))
|
||||
(update-in [:tag-root-uri] (fnil str ""))
|
||||
(update-in [:rss-name] (fnil str "rss.xml"))
|
||||
(update-in [:rss-filters] (fnil seq []))
|
||||
(update-in [:sass-src] (fnil str "css"))
|
||||
|
@ -339,9 +439,8 @@
|
|||
(update-in [:ignored-files] (fnil seq [#"^\.#.*" #".*\.swp$"])))]
|
||||
(merge
|
||||
config
|
||||
{:page-root (root-path :page-root config)
|
||||
:post-root (root-path :post-root config)
|
||||
:tag-root (root-path :tag-root config)}))
|
||||
{:page-root-uri (root-uri :page-root-uri config)
|
||||
:post-root-uri (root-uri :post-root-uri config)}))
|
||||
(catch Exception _
|
||||
(throw (IllegalArgumentException. "Failed to parse config.edn")))))
|
||||
|
||||
|
@ -349,22 +448,24 @@
|
|||
"Generates all the html and copies over resources specified in the config"
|
||||
[]
|
||||
(println (green "compiling assets..."))
|
||||
(let [{:keys [site-url blog-prefix rss-name recent-posts sass-src sass-dest keep-files ignored-files previews?] :as config} (read-config)
|
||||
(let [{:keys [^String site-url blog-prefix rss-name recent-posts sass-src sass-dest keep-files ignored-files previews? author-root-uri] :as config} (read-config)
|
||||
posts (add-prev-next (read-posts config))
|
||||
pages (add-prev-next (read-pages config))
|
||||
[navbar-pages sidebar-pages] (group-pages pages)
|
||||
posts-by-tag (group-by-tags posts)
|
||||
posts (tag-posts posts config)
|
||||
params (merge config
|
||||
{:title (:site-title config)
|
||||
{:today (java.util.Date.)
|
||||
:title (:site-title config)
|
||||
:active-page "home"
|
||||
:tags (map (partial tag-info config) (keys posts-by-tag))
|
||||
:latest-posts (->> posts (take recent-posts) vec)
|
||||
:navbar-pages navbar-pages
|
||||
:sidebar-pages sidebar-pages
|
||||
:archives-uri (str blog-prefix "/archives.html")
|
||||
:index-uri (str blog-prefix "/index.html")
|
||||
:tags-uri (str blog-prefix "/tags.html")
|
||||
:rss-uri (str blog-prefix "/" rss-name)
|
||||
:archives-uri (page-uri "archives.html" config)
|
||||
:index-uri (page-uri "index.html" config)
|
||||
:tags-uri (page-uri "tags.html" config)
|
||||
:rss-uri (path "/" blog-prefix rss-name)
|
||||
:site-url (if (.endsWith site-url "/") (.substring site-url 0 (dec (count site-url))) site-url)
|
||||
:theme-path (str "file:resources/templates/themes/" (:theme config))})]
|
||||
|
||||
|
@ -383,16 +484,19 @@
|
|||
(compile-preview-pages params posts)
|
||||
(compile-index params))
|
||||
(compile-archives params posts)
|
||||
(when author-root-uri
|
||||
(println (blue "generating authors views"))
|
||||
(compile-authors params posts))
|
||||
(println (blue "generating site map"))
|
||||
(spit (str public blog-prefix "/sitemap.xml") (sitemap/generate site-url ignored-files))
|
||||
(create-file (path "/" blog-prefix "sitemap.xml") (sitemap/generate site-url ignored-files))
|
||||
(println (blue "generating main rss"))
|
||||
(spit (str public blog-prefix "/" rss-name) (rss/make-channel config posts))
|
||||
(create-file (path "/" blog-prefix rss-name) (rss/make-channel config posts))
|
||||
(println (blue "generating filtered rss"))
|
||||
(rss/make-filtered-channels public config posts-by-tag)
|
||||
(rss/make-filtered-channels config posts-by-tag)
|
||||
(println (blue "compiling sass"))
|
||||
(sass/compile-sass->css!
|
||||
{:src-sass sass-src
|
||||
:dest-sass (str "../public" blog-prefix "/" sass-dest)
|
||||
:dest-sass (path ".." "public" blog-prefix sass-dest)
|
||||
:ignored-files ignored-files
|
||||
:base-dir "resources/templates/"})))
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
(catch Exception e {:error (.getMessage e)}))]
|
||||
(when-not (:error git-resp)
|
||||
(if-let [git-src (json/parse-string git-resp)]
|
||||
{:content (String. (Base64/decodeBase64 (get git-src "content")) "UTF-8")
|
||||
{:content (String. ^bytes (Base64/decodeBase64 ^String (get git-src "content")) "UTF-8")
|
||||
:name (get git-src "name")
|
||||
:uri (get (get git-src "_links") "html")}))))
|
||||
|
||||
|
|
|
@ -1,9 +1,18 @@
|
|||
(ns cryogen-core.io
|
||||
(:require [clojure.java.io :as io]
|
||||
[clojure.string :as s]
|
||||
[me.raynes.fs :as fs]))
|
||||
|
||||
(def public "resources/public")
|
||||
|
||||
(defn path
|
||||
"Creates path from given parts, ignore empty elements"
|
||||
[& path-parts]
|
||||
(->> path-parts
|
||||
(remove s/blank?)
|
||||
(s/join "/")
|
||||
(#(s/replace % #"/+" "/"))))
|
||||
|
||||
(defn re-filter [bool-fn re & other-res]
|
||||
(let [res (conj other-res re)]
|
||||
(reify java.io.FilenameFilter
|
||||
|
@ -17,7 +26,7 @@
|
|||
(-> resource io/resource io/file))
|
||||
|
||||
(defn ignore [ignored-files]
|
||||
(fn [file]
|
||||
(fn [^java.io.File file]
|
||||
(let [name (.getName file)
|
||||
matches (map #(re-find % name) ignored-files)]
|
||||
(not (some seq matches)))))
|
||||
|
@ -27,19 +36,26 @@
|
|||
extension (ext) ignoring any files that match the given (ignored-files).
|
||||
First make sure that the root directory exists, if yes: process as normal;
|
||||
if no, return empty vector."
|
||||
[f ext ignored-files]
|
||||
[f ^String ext ignored-files]
|
||||
(if-let [root (get-resource f)]
|
||||
(->> (get-resource f)
|
||||
file-seq
|
||||
(filter (ignore ignored-files))
|
||||
(filter (fn [file] (-> file .getName (.endsWith ext)))))
|
||||
(filter (fn [^java.io.File file] (-> file .getName (.endsWith ext)))))
|
||||
[]))
|
||||
|
||||
(defn create-folder [folder]
|
||||
(let [loc (io/file (str public folder))]
|
||||
(let [loc (io/file (path public folder))]
|
||||
(when-not (.exists loc)
|
||||
(.mkdirs loc))))
|
||||
|
||||
(defn create-file [file data]
|
||||
(spit (path public file) data))
|
||||
|
||||
(defn create-file-recursive [file data]
|
||||
(create-folder (.getParent (io/file file)))
|
||||
(create-file file data))
|
||||
|
||||
(defn wipe-public-folder [keep-files]
|
||||
(let [filenamefilter (reify java.io.FilenameFilter (accept [this _ filename] (not (some #{filename} keep-files))))]
|
||||
(doseq [path (.listFiles (io/file public) filenamefilter)]
|
||||
|
@ -47,9 +63,9 @@
|
|||
|
||||
(defn copy-dir [src target ignored-files]
|
||||
(fs/mkdirs target)
|
||||
(let [filename-filter (apply reject-re-filter ignored-files)
|
||||
(let [^java.io.FilenameFilter filename-filter (apply reject-re-filter ignored-files)
|
||||
files (.listFiles (io/file src) filename-filter)]
|
||||
(doseq [f files]
|
||||
(doseq [^java.io.File f files]
|
||||
(let [out (io/file target (.getName f))]
|
||||
(if (.isDirectory f)
|
||||
(copy-dir f out ignored-files)
|
||||
|
@ -58,7 +74,7 @@
|
|||
(defn copy-resources [{:keys [blog-prefix resources ignored-files]}]
|
||||
(doseq [resource resources]
|
||||
(let [src (str "resources/templates/" resource)
|
||||
target (str public blog-prefix "/" (fs/base-name resource))]
|
||||
target (path public blog-prefix (fs/base-name resource))]
|
||||
(cond
|
||||
(not (.exists (io/file src)))
|
||||
(throw (IllegalArgumentException. (str "resource " src " not found")))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
(ns cryogen-core.markup
|
||||
(:require [clojure.string :as s])
|
||||
(:import java.util.Collections))
|
||||
|
||||
(defonce markup-registry (atom []))
|
||||
|
@ -13,10 +14,11 @@
|
|||
|
||||
(defn rewrite-hrefs
|
||||
"Injects the blog prefix in front of any local links
|
||||
|
||||
ex. <img src='/img/cryogen.png'/> becomes <img src='/blog/img/cryogen.png'/>"
|
||||
[blog-prefix text]
|
||||
(clojure.string/replace text #"href=.?/|src=.?/" #(str (subs % 0 (dec (count %))) blog-prefix "/")))
|
||||
(if (s/blank? blog-prefix)
|
||||
text
|
||||
(clojure.string/replace text #"href=.?/|src=.?/" #(str (subs % 0 (dec (count %))) blog-prefix "/"))))
|
||||
|
||||
(defn markups
|
||||
"Return a vector of Markup implementations. This is the primary entry point
|
||||
|
@ -24,3 +26,13 @@
|
|||
Markups."
|
||||
[]
|
||||
@markup-registry)
|
||||
|
||||
(defn register-markup
|
||||
"Add a Markup implementation to the registry."
|
||||
[mu]
|
||||
(swap! markup-registry conj mu))
|
||||
|
||||
(defn clear-registry
|
||||
"Reset the Markup registry."
|
||||
[]
|
||||
(reset! markup-registry []))
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[clojure.string :as s]
|
||||
[text-decoration.core :refer :all]))
|
||||
|
||||
(defn load-plugin [url]
|
||||
(defn load-plugin [^java.net.URL url]
|
||||
(let [{:keys [description init]} (edn/read-string (slurp url))]
|
||||
(println (green (str "loading module: " description)))
|
||||
(-> init str (s/split #"/") first symbol require)
|
||||
|
@ -12,7 +12,5 @@
|
|||
|
||||
(defn load-plugins []
|
||||
(let [plugins (.getResources (ClassLoader/getSystemClassLoader) "plugin.edn")]
|
||||
(loop []
|
||||
(load-plugin (.. plugins nextElement openStream))
|
||||
(when (.hasMoreElements plugins)
|
||||
(recur)))))
|
||||
(doseq [plugin (enumeration-seq plugins)]
|
||||
(load-plugin (. ^java.net.URL plugin openStream)))))
|
||||
|
|
|
@ -1,18 +1,21 @@
|
|||
(ns cryogen-core.rss
|
||||
(:require [clj-rss.core :as rss]
|
||||
[clojure.xml :refer [emit]]
|
||||
[text-decoration.core :refer :all])
|
||||
[text-decoration.core :refer :all]
|
||||
[cryogen-core.io :refer [create-file path]])
|
||||
(:import java.util.Date))
|
||||
|
||||
|
||||
(defn posts-to-items [site-url posts]
|
||||
(defn posts-to-items [^String site-url posts]
|
||||
(map
|
||||
(fn [{:keys [uri title content date]}]
|
||||
(let [link (str (if (.endsWith site-url "/") (apply str (butlast site-url)) site-url) uri)]
|
||||
(fn [{:keys [uri title content date enclosure author description]}]
|
||||
(let [link (str (if (.endsWith site-url "/") (apply str (butlast site-url)) site-url) uri)
|
||||
enclosure (if (nil? enclosure) "" enclosure)]
|
||||
{:guid link
|
||||
:link link
|
||||
:title title
|
||||
:description content
|
||||
:description (or description content)
|
||||
:author author
|
||||
:enclosure enclosure
|
||||
:pubDate date}))
|
||||
posts))
|
||||
|
||||
|
@ -26,8 +29,8 @@
|
|||
:lastBuildDate (Date.)})
|
||||
(posts-to-items (:site-url config) posts)))
|
||||
|
||||
(defn make-filtered-channels [public {:keys [rss-filters blog-prefix] :as config} posts-by-tag]
|
||||
(defn make-filtered-channels [{:keys [rss-filters blog-prefix] :as config} posts-by-tag]
|
||||
(doseq [filter rss-filters]
|
||||
(let [uri (str public blog-prefix "/" (name filter) ".xml")]
|
||||
(let [uri (path "/" blog-prefix (str (name filter) ".xml"))]
|
||||
(println "\t-->" (cyan uri))
|
||||
(spit uri (make-channel config (get posts-by-tag filter))))))
|
||||
(create-file uri (make-channel config (get posts-by-tag filter))))))
|
||||
|
|
|
@ -17,17 +17,20 @@
|
|||
(defn compass-installed?
|
||||
"Checks for the installation of Compass."
|
||||
[]
|
||||
(= 0 (:exit (sh "compass" "--version"))))
|
||||
(try
|
||||
(= 0 (:exit (sh "compass" "--version")))
|
||||
(catch java.io.IOException _
|
||||
false)))
|
||||
|
||||
(defn find-sass-files
|
||||
"Given a Diretory, gets files, Filtered to those having scss or sass
|
||||
extention. Ignores files matching any ignored regexps."
|
||||
[base-dir dir ignored-files]
|
||||
(let [filename-filter (match-re-filter #"(?i:s[ca]ss$)")]
|
||||
(let [^java.io.FilenameFilter filename-filter (match-re-filter #"(?i:s[ca]ss$)")]
|
||||
(->> (.listFiles (io/file base-dir dir) filename-filter)
|
||||
(filter #(not (.isDirectory %)))
|
||||
(filter #(not (.isDirectory ^java.io.File %)))
|
||||
(filter (ignore ignored-files))
|
||||
(map #(.getName %)))))
|
||||
(map #(.getName ^java.io.File %)))))
|
||||
|
||||
(defn compile-sass-file!
|
||||
"Given a sass file which might be in src-sass directory,
|
||||
|
@ -37,10 +40,9 @@
|
|||
dest-sass
|
||||
base-dir]}]
|
||||
(shell/with-sh-dir base-dir
|
||||
(sh "sass"
|
||||
"--update"
|
||||
(when (compass-installed?) "--compass")
|
||||
(str src-sass ":" dest-sass))))
|
||||
(if (compass-installed?)
|
||||
(sh "sass" "--compass" "--update" (str src-sass ":" dest-sass))
|
||||
(sh "sass" "--update" (str src-sass ":" dest-sass)))))
|
||||
|
||||
(defn compile-sass->css!
|
||||
"Given a directory src-sass, looks for all sass files and compiles them into
|
||||
|
@ -55,7 +57,7 @@ the command. Shows you any problems it comes across when compiling. "
|
|||
;; I found sass files,
|
||||
;; If sass is installed
|
||||
(do
|
||||
(println "Compiling Sass Files:" src-sass dest-sass)
|
||||
(println "\t" (cyan src-sass) "-->" (cyan dest-sass))
|
||||
(let [result (compile-sass-file! opts)]
|
||||
(if (zero? (:exit result))
|
||||
;; no problems in sass compilation
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
(let [fmt (java.text.SimpleDateFormat. "yyyy-MM-dd")]
|
||||
(.format fmt date)))
|
||||
|
||||
(defn loc [f]
|
||||
(defn loc [^java.io.File f]
|
||||
(-> f (.getAbsolutePath) (.split "resources/public/") second))
|
||||
|
||||
(defn generate [site-url ignored-files]
|
||||
|
@ -19,7 +19,7 @@
|
|||
{:tag :urlset
|
||||
:attrs {:xmlns "http://www.sitemaps.org/schemas/sitemap/0.9"}
|
||||
:content
|
||||
(for [f (find-assets "public" ".html" ignored-files)]
|
||||
(for [^java.io.File f (find-assets "public" ".html" ignored-files)]
|
||||
{:tag :url
|
||||
:content
|
||||
[{:tag :loc
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
(ns cryogen-core.toc
|
||||
(:require [crouton.html :as html]
|
||||
(:require [clojure.zip :as z]
|
||||
[crouton.html :as html]
|
||||
[hiccup.core :as hiccup]))
|
||||
|
||||
(def _h [:h1 :h2 :h3 :h4 :h5 :h6])
|
||||
(defn- compare_index [i1 i2] (- (.indexOf _h i2) (.indexOf _h i1)))
|
||||
|
||||
(defn- compare-index [i1 i2] (- (.indexOf ^clojure.lang.APersistentVector _h i2) (.indexOf ^clojure.lang.APersistentVector _h i1)))
|
||||
|
||||
(defn- get-headings
|
||||
"Turn a body of html content into a vector of elements whose tags are
|
||||
|
@ -18,35 +20,96 @@
|
|||
headings)))
|
||||
[] content))
|
||||
|
||||
(defn make-links
|
||||
"Create a table of contents from the given headings. This function will look
|
||||
for either:
|
||||
(1) headings with a child anchor with a non-nil name attribute, e.g.
|
||||
<h1><a name=\"reference\">Reference Title</a></h1>
|
||||
or
|
||||
(2) headings with an id attribute, e.g. <h1 id=\"reference\">Reference Title</h1>
|
||||
In both cases above, the anchor reference becomes \"#reference\" and the
|
||||
anchor text is \"Reference Title\"."
|
||||
(defn- zip-toc-tree-to-insertion-point
|
||||
"Given a toc-tree zipper and a header level, navigate
|
||||
the zipper to the appropriate parent of the level for that header
|
||||
to be inserted and return the zipper."
|
||||
[toctree h-tag]
|
||||
(if-let [current-tag (-> toctree first :value :tag)]
|
||||
(let [direction (compare-index h-tag current-tag)]
|
||||
(cond (zero? direction) (z/up toctree) ; Tag belongs at current level
|
||||
(neg? direction) toctree ; Tag belongs below this level
|
||||
(pos? direction) (recur (z/up toctree) h-tag))) ; Keep looking up
|
||||
; This level is the root list, return it
|
||||
toctree))
|
||||
|
||||
(defn- insert-toc-tree-entry
|
||||
"Inserts a toc-tree (zipper) entry for the given entry at the appropriate place.
|
||||
Obeys the invariant that the toc-tree (zipper) is always moved to the inserted loc."
|
||||
[tree entry]
|
||||
(let [{htag :tag} entry
|
||||
tree (zip-toc-tree-to-insertion-point tree htag)]
|
||||
(-> tree (z/append-child {:children [] :value entry}) z/down z/rightmost)))
|
||||
|
||||
(defn- build-toc-tree
|
||||
"Given a sequence of header nodes, build a toc tree using zippers
|
||||
and return it."
|
||||
[headings]
|
||||
(loop [items headings acc nil _last nil]
|
||||
(loop [zp (z/zipper
|
||||
map?
|
||||
:children
|
||||
(fn [node children] (assoc node :children (apply vector children)))
|
||||
{:value :root :children []})
|
||||
items headings]
|
||||
(if-let [{tag :tag {id :id} :attrs [{{name :name} :attrs} title :as htext] :content} (first items)]
|
||||
(let [anchor (or id name)]
|
||||
(if (nil? anchor)
|
||||
(recur (rest items) acc nil)
|
||||
(let [entry [:li [:a {:href (str "#" anchor)} (or title (first htext))]]
|
||||
jump (compare_index _last tag)]
|
||||
(cond (> jump 0) (recur (rest items) (str acc "<ol>" (hiccup/html entry)) tag)
|
||||
(= jump 0) (recur (rest items) (str acc (hiccup/html entry)) tag)
|
||||
(< jump 0) (recur (rest items) (str acc (apply str (repeat (* -1 jump) "</ol>"))
|
||||
(hiccup/html entry)) tag)))))
|
||||
(str acc "</ol>"))))
|
||||
(recur zp (rest items))
|
||||
(recur (insert-toc-tree-entry zp
|
||||
{:tag tag
|
||||
:anchor anchor
|
||||
:text (or
|
||||
(if (string? title) title (-> title :content first))
|
||||
(first htext))})
|
||||
(rest items))))
|
||||
(z/root zp))))
|
||||
|
||||
(defn generate-toc [html]
|
||||
|
||||
(defn- make-toc-entry
|
||||
"Given an anchor link and some text, construct a toc entry
|
||||
consisting of link to the anchor using the given text, wrapped
|
||||
in an <li> tag."
|
||||
[anchor text]
|
||||
(when (and anchor text)
|
||||
[:li [:a {:href (str "#" anchor)} text]]))
|
||||
|
||||
|
||||
(defn- build-toc
|
||||
"Given the root of a toc tree and either :ol or :ul,
|
||||
generate the table of contents and return it as a hiccup tree."
|
||||
[toc-tree list-open & {:keys [outer-list?] :or {outer-list? true}}]
|
||||
(let [{:keys [children], {:keys [anchor text]} :value} toc-tree
|
||||
li (make-toc-entry anchor text)
|
||||
first-list-open (if outer-list?
|
||||
(keyword (str (name list-open) ".content"))
|
||||
list-open)]
|
||||
; Create hiccup sequence of :ol/:ul tag and sequence of :li tags
|
||||
(if (seq children)
|
||||
(let [sublist [first-list-open (map build-toc children
|
||||
(repeat list-open)
|
||||
(repeat :outer-list?)
|
||||
(repeat false))]]
|
||||
(if-let [li li] ; The root element has nothing so ignore it
|
||||
(seq [li sublist]) ; Use seq to lazily concat the li with the sublists
|
||||
sublist))
|
||||
li))) ; Or just return the naked :li tag
|
||||
|
||||
(defn generate-toc
|
||||
"Reads an HTML string and parses it for headers, then returns a list of links
|
||||
to them.
|
||||
|
||||
Optionally, a map of :list-type can be provided with value :ul, :ol, or true.
|
||||
:ol and true will result in an ordered list being generated for the table of
|
||||
contents, while :ul will result in an unordered list. The default is an
|
||||
ordered list."
|
||||
[^String html & {:keys [list-type] :or {list-type :ol}}]
|
||||
(let [list-type (if (true? list-type) :ol list-type)]
|
||||
(-> html
|
||||
(.getBytes "UTF-8")
|
||||
(java.io.ByteArrayInputStream.)
|
||||
(html/parse)
|
||||
:content
|
||||
(get-headings)
|
||||
(make-links)
|
||||
(clojure.string/replace-first #"ol" "ol class=\"contents\"")))
|
||||
(build-toc-tree)
|
||||
(build-toc list-type)
|
||||
(hiccup/html))))
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
(ns cryogen-core.watcher
|
||||
(:require [clojure.java.io :refer [file]]
|
||||
[cryogen-core.io :refer [ignore]]
|
||||
[pandect.core :refer [md5]]
|
||||
[pandect.algo.md5 :refer [md5]]
|
||||
[hawk.core :as hawk]
|
||||
[clojure.set :as set]))
|
||||
|
||||
(defn get-assets [path ignored-files]
|
||||
(->> path
|
||||
file
|
||||
file-seq
|
||||
(filter #(not (.isDirectory %)))
|
||||
(filter #(not (.isDirectory ^java.io.File %)))
|
||||
(filter (ignore ignored-files))))
|
||||
|
||||
(defn checksums [path ignored-files]
|
||||
|
@ -21,15 +22,15 @@
|
|||
(when-some [changes (set/difference new-sum-set old-sum-set)]
|
||||
(vals (select-keys new-sums changes)))))
|
||||
|
||||
(defn watch-assets [root ignored-files action]
|
||||
(loop [sums (checksums root ignored-files)]
|
||||
(Thread/sleep 300)
|
||||
(defn watch-assets [sums root ignored-files action]
|
||||
(let [new-sums (checksums root ignored-files)]
|
||||
(when (find-changes sums new-sums)
|
||||
(action))
|
||||
(recur new-sums))))
|
||||
(when (find-changes @sums new-sums)
|
||||
(action)
|
||||
(reset! sums new-sums))))
|
||||
|
||||
(defn start-watcher! [root ignored-files action]
|
||||
(doto (Thread. #(watch-assets root ignored-files action))
|
||||
(.setDaemon true)
|
||||
(.start)))
|
||||
(let [sums (atom (checksums root ignored-files))
|
||||
handler (fn [ctx e]
|
||||
(watch-assets sums root ignored-files action))]
|
||||
(hawk/watch! [{:paths [root]
|
||||
:handler handler}])))
|
||||
|
|
118
test/cryogen_core/compiler_test.clj
Normal file
118
test/cryogen_core/compiler_test.clj
Normal file
|
@ -0,0 +1,118 @@
|
|||
(ns cryogen-core.compiler-test
|
||||
(:require [clojure.test :refer :all]
|
||||
[cryogen-core.compiler :refer :all]
|
||||
[cryogen-core.io :refer [path]]
|
||||
[cryogen-core.markup :as m]
|
||||
[me.raynes.fs :as fs])
|
||||
(:import [java.io File]))
|
||||
|
||||
; Test that the content-until-more-marker return nil or correct html text.
|
||||
(deftest test-content-until-more-marker
|
||||
; text without more marker, return nil
|
||||
(is (nil? (content-until-more-marker "<div id=\"post\">
|
||||
<div class=\"post-content\">
|
||||
this post does not have more marker
|
||||
</div>
|
||||
</div>")))
|
||||
; text with more marker, return text before more marker with closing tags.
|
||||
(is (= (content-until-more-marker "<div id='post'>
|
||||
<div class='post-content'>
|
||||
this post has more marker
|
||||
<!--more-->
|
||||
and more content.
|
||||
</div>
|
||||
</div>")
|
||||
"<div id=\"post\"><div class=\"post-content\">
|
||||
this post has more marker
|
||||
</div></div>")))
|
||||
|
||||
(defn- markdown []
|
||||
(reify m/Markup
|
||||
(dir [this] "md")
|
||||
(ext [this] ".md")))
|
||||
|
||||
(defn- asciidoc []
|
||||
(reify m/Markup
|
||||
(dir [this] "asc")
|
||||
(ext [this] ".asc")))
|
||||
|
||||
(defn- create-entry [dir file]
|
||||
(fs/mkdirs (File. dir))
|
||||
(fs/create (File. (str dir File/separator file))))
|
||||
|
||||
(defn- reset-resources []
|
||||
(fs/delete-dir "resources")
|
||||
(create-entry "resources" ".gitkeep"))
|
||||
|
||||
(defn- check-for-pages [mu]
|
||||
(find-pages {:page-root "pages"} mu))
|
||||
|
||||
(defn- check-for-posts [mu]
|
||||
(find-posts {:post-root "posts"} mu))
|
||||
|
||||
(deftest test-find-entries
|
||||
(reset-resources)
|
||||
(let [mu (markdown)]
|
||||
(testing "Finds no files"
|
||||
(is (empty? (check-for-posts mu))
|
||||
(is (empty? (check-for-pages mu))))
|
||||
|
||||
(let [dir->file
|
||||
[[check-for-posts "resources/templates/md/posts" "post.md"]
|
||||
[check-for-posts "resources/templates/posts" "post.md"]
|
||||
[check-for-pages "resources/templates/md/pages" "page.md"]
|
||||
[check-for-pages "resources/templates/pages" "page.md"]]]
|
||||
(doseq [[check-fn dir file] dir->file]
|
||||
(testing (str "Finds files in " dir)
|
||||
(create-entry dir file)
|
||||
(let [entries (check-fn mu)]
|
||||
(is (= 1 (count entries)))
|
||||
(is (= (.getAbsolutePath (File. (str dir File/separator file)))
|
||||
(.getAbsolutePath (first entries)))))
|
||||
(reset-resources)))))))
|
||||
|
||||
(defmacro with-markup [mu & body]
|
||||
`(do
|
||||
(m/register-markup ~mu)
|
||||
(try
|
||||
~@body
|
||||
(finally
|
||||
(m/clear-registry)))))
|
||||
|
||||
(defn- copy-and-check-markup-folders
|
||||
"Create entries in the markup folders. If `with-dir?` is set to true, include
|
||||
the Markup implementation's `dir` in the path. Check that the folders exist
|
||||
in the output folder."
|
||||
[[pages-root posts-root :as dirs] mu with-dir?]
|
||||
(doseq [dir dirs]
|
||||
(let [path (if with-dir?
|
||||
(str (m/dir mu) "/" dir)
|
||||
dir)]
|
||||
(create-entry (str "resources/templates/" path)
|
||||
(str "entry" (m/ext mu)))))
|
||||
(with-markup mu
|
||||
(copy-resources-from-markup-folders
|
||||
{:post-root posts-root
|
||||
:page-root pages-root
|
||||
:blog-prefix "/blog"}))
|
||||
(doseq [dir dirs]
|
||||
(is (.isDirectory (File. (str "resources/public/blog/" dir))))))
|
||||
|
||||
(deftest test-copy-resources-from-markup-folders
|
||||
(reset-resources)
|
||||
(testing "No pages or posts nothing to copy"
|
||||
(copy-resources-from-markup-folders
|
||||
{:post-root "pages"
|
||||
:page-root "posts"
|
||||
:blog-prefix "/blog"})
|
||||
(is (not (.isDirectory (File. (str "resources/public/blog/pages")))))
|
||||
(is (not (.isDirectory (File. (str "resources/public/blog/posts"))))))
|
||||
|
||||
(reset-resources)
|
||||
(doseq [mu [(markdown) (asciidoc)]]
|
||||
(testing (str "Test copy from markup folders (" (m/dir mu) ")")
|
||||
(let [dirs ["pages" "posts"]]
|
||||
(copy-and-check-markup-folders dirs mu true)
|
||||
(reset-resources)
|
||||
(copy-and-check-markup-folders dirs mu false))))
|
||||
(reset-resources))
|
109
test/cryogen_core/toc_test.clj
Normal file
109
test/cryogen_core/toc_test.clj
Normal file
|
@ -0,0 +1,109 @@
|
|||
(ns cryogen-core.toc-test
|
||||
(:require [clojure.test :refer :all]
|
||||
[clojure.string :refer [join]]
|
||||
[clojure.zip :as z]
|
||||
[crouton.html :as html]
|
||||
[hiccup.core :as hiccup]
|
||||
[cryogen-core.toc :refer :all]))
|
||||
|
||||
; Reimport private functions
|
||||
(def get-headings #'cryogen-core.toc/get-headings)
|
||||
(def make-toc-entry #'cryogen-core.toc/make-toc-entry)
|
||||
(def zip-toc-tree-to-insertion-point #'cryogen-core.toc/zip-toc-tree-to-insertion-point)
|
||||
(def insert-toc-tree-entry #'cryogen-core.toc/insert-toc-tree-entry)
|
||||
(def build-toc-tree #'cryogen-core.toc/build-toc-tree)
|
||||
(def build-toc #'cryogen-core.toc/build-toc)
|
||||
|
||||
(defn parse-to-headings
|
||||
[hiccup-seq]
|
||||
(-> hiccup-seq hiccup/html html/parse-string :content get-headings))
|
||||
|
||||
; Test that the get-headings function properly filters non-headers
|
||||
(deftest test-get-headings
|
||||
(let [noisy-headers [:div [:h1 "First H1"]
|
||||
[:p "Ignore..."]
|
||||
[:h2 "First H2"]]]
|
||||
(is (= (parse-to-headings noisy-headers)
|
||||
[{:tag :h1 :attrs nil :content ["First H1"]}
|
||||
{:tag :h2 :attrs nil :content ["First H2"]}]))))
|
||||
|
||||
; Test that the make-toc-entry ignores invalid input
|
||||
(deftest test-make-toc-entry
|
||||
(is (nil?
|
||||
(make-toc-entry nil "Text")))
|
||||
(is (nil?
|
||||
(make-toc-entry "anchor" nil)))
|
||||
(is (= [:li [:a {:href "#anchor"} "Text"]]
|
||||
(make-toc-entry "anchor" "Text"))))
|
||||
|
||||
; Test that the built table of contents always treats later
|
||||
; headers as being at the same level as earlier headers, even
|
||||
; if the later headers are strictly greater in value.
|
||||
; E.G.
|
||||
; * h2
|
||||
; * h3
|
||||
; * h1
|
||||
(deftest test-build-toc
|
||||
(let [simplest-header [:div [:h2 [:a {:name "test"}] "Test"]]
|
||||
no-headers [:div [:p "This is not a header"]]
|
||||
closing-header-larger-than-opening-1
|
||||
[:div [:h2 [:a {:name "starting_low"}]
|
||||
"Starting Low"]
|
||||
[:h1 [:a {:name "finishing_high"}]
|
||||
"Finishing High"]]
|
||||
closing-header-larger-than-opening-2
|
||||
[:div [:h2 [:a {:name "starting_low"}]
|
||||
"Starting Low"]
|
||||
[:h4 [:a {:name "jumping_in"}]
|
||||
"Jumping Right In"]
|
||||
[:h3 [:a {:name "pulling_back"}]
|
||||
"But then pull back"]
|
||||
[:h2 [:a {:name "to_the_top"}]
|
||||
"To the top"]]]
|
||||
(is (= [:ol.content (seq [[:li [:a {:href "#test"} "Test"]]])]
|
||||
(-> simplest-header parse-to-headings build-toc-tree
|
||||
(build-toc :ol))))
|
||||
(is (nil?
|
||||
(-> no-headers parse-to-headings build-toc-tree
|
||||
(build-toc :ol))))
|
||||
(is (= [:ol.content (seq [[:li [:a {:href "#starting_low"} "Starting Low"]]
|
||||
[:li [:a {:href "#finishing_high"} "Finishing High"]]])]
|
||||
(-> closing-header-larger-than-opening-1
|
||||
parse-to-headings
|
||||
build-toc-tree
|
||||
(build-toc :ol)))
|
||||
"No outer header should be less indented than the first header tag.")
|
||||
(is (= [:ul.content
|
||||
(seq [
|
||||
(seq [
|
||||
[:li [:a {:href "#starting_low"} "Starting Low"]]
|
||||
[:ul
|
||||
(seq [
|
||||
[:li [:a {:href "#jumping_in"} "Jumping Right In"]]
|
||||
[:li [:a {:href "#pulling_back"} "But then pull back"]]
|
||||
])
|
||||
] ])
|
||||
[:li [:a {:href "#to_the_top"} "To the top"]]
|
||||
])]
|
||||
(-> closing-header-larger-than-opening-2
|
||||
parse-to-headings
|
||||
build-toc-tree
|
||||
(build-toc :ul)))
|
||||
(join "" ["Inner headers can be more indented, "
|
||||
"but outer headers cannot be less indented "
|
||||
"than the original header."]))
|
||||
))
|
||||
|
||||
|
||||
(deftest test-generate-toc
|
||||
(let [htmlString "<div><h2><a name=\"test\"></a>Test</h2></div>"]
|
||||
(is (= "<ol class=\"content\"><li><a href=\"#test\">Test</a></li></ol>"
|
||||
(generate-toc htmlString)))
|
||||
(is (= "<ol class=\"content\"><li><a href=\"#test\">Test</a></li></ol>"
|
||||
(generate-toc htmlString :list-type true)))
|
||||
(is (= "<ol class=\"content\"><li><a href=\"#test\">Test</a></li></ol>"
|
||||
(generate-toc htmlString :list-type :ol)))
|
||||
(is (= "<ul class=\"content\"><li><a href=\"#test\">Test</a></li></ul>"
|
||||
(generate-toc htmlString :list-type :ul)))))
|
||||
|
||||
|
Loading…
Reference in a new issue