Compare commits

...

15 Commits
v0.2.0 ... main

@ -12,14 +12,17 @@ finished, it was only a proof of concept. But now it works for created a entire
Next will be, Next will be,
* Some Tests * Some more Tests
* Standalone, handle Webpack only as wrapper
Maybe later, Maybe later,
* Integrate Eta.js and LiquidJS * Integrate Eta.js and LiquidJS
* Hooks for handle generic content * Hooks for handle generic content
## Images for Tests
No Dogs were harmed during the Tests, all Images are under cc0 and from [pexels.com](https://www.pexels.com/creative-commons-images/).
## Additional Packages ## Additional Packages
[@site-o-mat/webpack-plugin](https://gitea.node001.net/site-o-mat/webpack-plugin) - Wrapper for Core to use as Webpack Plugin [@site-o-mat/webpack-plugin](https://gitea.node001.net/site-o-mat/webpack-plugin) - Wrapper for Core to use as Webpack Plugin
@ -95,7 +98,7 @@ After parsing Markdown-Files a Page always this values,
| hidden | {Boolean} | false | If hidden, file will not be written | | hidden | {Boolean} | false | If hidden, file will not be written |
| blocks | {Object} | Object | Blocks of Directory | | blocks | {Object} | Object | Blocks of Directory |
| content | {String} | empty | Parsed Markdown | | content | {String} | empty | Parsed Markdown |
| view | {String} | page.njk | Tempate to render file | | view | {String} | page.njk | Tempate to render file |
## Nesting ## Nesting
@ -148,6 +151,78 @@ can be used like this,
{% endFor %} {% endFor %}
``` ```
## Media
Image Files can be add to the Markdown-Structure and will be processed by [Sharp](https://sharp.pixelplumbing.com/).
```
recipes
└ index.md
_images
└ dog.jpg
```
In Fields all keys with "src" will be handled as Path to Images. Files will be search first
in current Directory of page, if nothing found, it will be search in Root-Directory of
Markdown-Structure. Blocks can also have there own Images.
```
---
title: "health goth DIY tattooed"
view: "home.njk"
meta:
description: "La"
media:
teaser:
src: "_images/dog.jpg"
alt: "cold-pressed"
---
```
It is also possible to add multiple Sizes. For more, check [Sharp](https://sharp.pixelplumbing.com/).
```
---
title: "health goth DIY tattooed"
view: "home.njk"
meta:
description: "La"
media:
teaser:
src:
src: '_images/dog.jpg'
sizes:
- width: 300
- width: 500
height: 100
alt: "cold-pressed"
---
```
Options from Sharp can be add on two different ways. As "options" for all Sizes or
you can simply add options to a sizes, that means the main options will be ignored.
```
---
title: "health goth DIY tattooed"
view: "home.njk"
meta:
description: "La"
media:
teaser:
src:
src: '_images/dog.jpg'
sizes:
- width: 300
- width: 500
height: 100
position: 'left'
alt: "cold-pressed"
options:
position: 'right'
---
```
## Queries ## Queries
Queries can be used in Templates to get Pages. Queries can be used in Templates to get Pages.
@ -207,25 +282,6 @@ This function handle manifest-File from [https://laravel-mix.com/](Laravel-Mix).
<script src="{{ asset('js/app.js') }}"></script> <script src="{{ asset('js/app.js') }}"></script>
``` ```
#### Filters
##### resize
The Filter is using [sharp](https://github.com/lovell/sharp), for crop, resize and
optimize Images. The Filter needs a relative Path in the File Structure.
Basic Usage:
```
{% page.teaser.src | resize({ 'width': '300' }) %}
```
Add options:
```
{% page.teaser.src | resize({ 'width': '300' }, { sigma: 2 }) %}
```
## Json ## Json
Results from PageQuery can also be created as json-File. The can use with a Results from PageQuery can also be created as json-File. The can use with a

@ -1,6 +1,6 @@
{ {
"name": "@site-o-mat/core", "name": "@site-o-mat/core",
"version": "0.2.0", "version": "0.5.0",
"build": "webpack", "build": "webpack",
"author": "Björn Hase <me@herr-hase.wtf>", "author": "Björn Hase <me@herr-hase.wtf>",
"main": "index.js", "main": "index.js",

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

@ -1,5 +1,7 @@
--- ---
title: "Data" title: "Data"
media:
src: '_images/dog-tired.webp'
--- ---
## Normcore cold-pressed ramps DSA ## Normcore cold-pressed ramps DSA

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 MiB

@ -3,6 +3,8 @@ title: "article"
view: "article.njk" view: "article.njk"
meta: meta:
description: "DSA yes plz hot chicken green juice" description: "DSA yes plz hot chicken green juice"
media:
src: '_images/dog.jpg'
--- ---
## Normcore cold-pressed ramps DSA ## Normcore cold-pressed ramps DSA

@ -3,6 +3,13 @@ title: "blog"
view: "page.njk" view: "page.njk"
meta: meta:
description: "DSA yes plz hot chicken green juice" description: "DSA yes plz hot chicken green juice"
media:
src:
src: '_images/dog.jpg'
sizes:
- width: 300
- width: 500
height: 100
--- ---
## Normcore cold-pressed ramps DSA ## Normcore cold-pressed ramps DSA

@ -3,6 +3,14 @@ title: "health goth DIY tattooed"
view: "page.njk" view: "page.njk"
meta: meta:
description: "DSA yes plz hot chicken green juice" description: "DSA yes plz hot chicken green juice"
robots: "noindex"
media:
src:
src: '_images/dog.jpg'
sizes:
- width: 300
- width: 500
height: 100
--- ---
## Normcore cold-pressed ramps DSA ## Normcore cold-pressed ramps DSA
@ -14,4 +22,6 @@ bicycle rights sartorial godard slow-carb thundercats art party cray JOMO. Truff
## Bitters kale chips chambray activated charcoal ## Bitters kale chips chambray activated charcoal
wolf keffiyeh hell of selfies. Wolf readymade shoreditch flexitarian venmo single-origin coffee, knausgaard fit actually street art cold-pressed iPhone gatekeep. Migas bruh adaptogen semiotics marfa pickled yuccie. Locavore normcore lomo, shoreditch fashion axe actually glossier iPhone photo booth blue bottle DIY XOXO williamsburg. Pinterest whatever taxidermy, kale chips prism XOXO schlitz twee tote bag woke swag. Wayfarers fashion axe heirloom humblebrag synth. Whatever succulents PBR&B, pop-up enamel pin echo park tonx stumptown taiyaki. wolf keffiyeh hell of selfies. Wolf readymade shoreditch flexitarian venmo single-origin coffee, knausgaard fit actually street art cold-pressed iPhone gatekeep. Migas bruh adaptogen semiotics marfa pickled yuccie. Locavore normcore lomo, shoreditch fashion axe actually glossier iPhone photo booth blue bottle DIY XOXO williamsburg. Pinterest whatever taxidermy, kale chips prism XOXO schlitz twee tote bag woke swag. Wayfarers fashion axe heirloom humblebrag synth. Whatever succulents PBR&B, pop-up enamel pin echo park tonx stumptown taiyaki.
![_images/dog.jpg](_images/dog.jpg)

@ -0,0 +1,4 @@
title: "Test"
language: "de"
domain: "test.lan"
https: true

@ -37,11 +37,6 @@ class ConfigStore {
* @return {String|Object} * @return {String|Object}
*/ */
get(key) { get(key) {
if (!this._data?.[key]) {
throw new Error(key + ' not found in ConfigStore!')
}
return this._data[key] return this._data[key]
} }
} }
@ -49,4 +44,4 @@ class ConfigStore {
// create instance // create instance
const instance = new ConfigStore(); const instance = new ConfigStore();
module.exports = instance module.exports = instance

@ -1,7 +1,6 @@
const path = require('path') const path = require('path')
const fs = require('fs') const fs = require('fs')
const configStore = require('./../config.js')
const Media = require('./../factories/media.js')
/** /**
* asset - checks manifest.json for given path and return * asset - checks manifest.json for given path and return
@ -18,7 +17,7 @@ function asset(staticPath) {
let result = staticPath let result = staticPath
// path to mix-manifest // path to mix-manifest
const file = path.join(path.resolve()) + 'mix-manifest.json' const file = path.join(configStore.get('destination')) + '/mix-manifest.json'
if (fs.existsSync(file)) { if (fs.existsSync(file)) {
@ -33,21 +32,4 @@ function asset(staticPath) {
return result return result
} }
/** module.exports = { asset }
* asset - checks manifest.json for given path and return
* file path with id for cache busting
*
*
* @param {String} publicPath
*
*/
async function resize(src, sizes, options, done)
{
const media = new Media()
src = await media.resize(src, sizes, options)
done(null, src)
}
module.exports = { asset, resize }

@ -4,7 +4,7 @@ const assign = require('assign-deep')
const { minify } = require('html-minifier') const { minify } = require('html-minifier')
const configStore = require('./../config.js') const configStore = require('./../config.js')
const { asset, resize } = require('./helpers.js') const { asset } = require('./helpers.js')
const PageQuery = require('./../queries/pages.js') const PageQuery = require('./../queries/pages.js')
const dayjs = require('dayjs') const dayjs = require('dayjs')
@ -37,14 +37,6 @@ class Engine {
this.nunjucks = nunjucks.configure(views, this._options) this.nunjucks = nunjucks.configure(views, this._options)
// add filter: resize
this.nunjucks.addFilter('resize', (...args) => {
const done = args.pop()
const options = args?.[2] ? {} : args[2]
resize(args[0], args[1], options, done)
}, true)
// adding defaults for view, data from site.yml, functions and pageQuery // adding defaults for view, data from site.yml, functions and pageQuery
this._defaults = assign(this._options.defaults, { this._defaults = assign(this._options.defaults, {
site: site, site: site,

@ -1,6 +1,5 @@
const path = require('path') const path = require('path')
const parseMarkdownFile = require('./../parsers/markdown.js') const parseMarkdownFile = require('./../parsers/markdown.js')
const assign = require('assign-deep') const assign = require('assign-deep')
/** /**
@ -23,10 +22,12 @@ class Block {
* @param {string} fileString * @param {string} fileString
* *
*/ */
constructor(fileString) { constructor(fileString, dirPath) {
// parse string of file // parse string of file
const parsedFile = parseMarkdownFile(fileString) const parsedFile = parseMarkdownFile(fileString, dirPath)
this._dirPath = dirPath
// getting parsed data // getting parsed data
this._content = parsedFile.content this._content = parsedFile.content
@ -46,4 +47,4 @@ class Block {
} }
} }
module.exports = Block module.exports = Block

@ -4,20 +4,30 @@ const sharp = require('sharp')
const mkdirp = require('mkdirp') const mkdirp = require('mkdirp')
const crypto = require('crypto') const crypto = require('crypto')
const slugify = require('slugify') const slugify = require('slugify')
const assign = require('assign-deep')
const configStore = require('./../config.js') const configStore = require('./../config.js')
/** /**
* Media
* *
* change size, optimize and copy media to assets
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
* *
*/ */
class Media { class Media {
constructor() { constructor(dirPath = NULL) {
this._path = dirPath
this._DIR_ASSETS = '/assets/' this._DIR_ASSETS = '/assets/'
} }
/** /**
* resolve media
* *
* @param {string} src * @param {string} src
* @param {object} sizes * @param {object} sizes
@ -25,75 +35,221 @@ class Media {
* @return {string} * @return {string}
* *
*/ */
async resize(src, sizes, options = {}) { resolve(src, sizes = {}, options = {}) {
this._extension = path.extname(src) let extension = path.extname(src)
this._filename = slugify(path.basename(src, this._extension)) let sourcePath
const filename = slugify(path.basename(src, extension))
this._process = await sharp(configStore.get('source') + '/' + src) // check for images in path
sourcePath = this._getSourcePath(src)
// getting sharp
const process = sharp(sourcePath)
// resize without options and with options if (extension === '.gif') {
if (Object.getOwnPropertyNames(options).length === 0) { process
await this._process .gif({
.resize(sizes) reoptimise: true
})
} else { } else {
this._process
.resize(sizes, options) // change extension
extension = '.webp'
process
.webp({
lossless: true
})
} }
// optimize // destination
this._optimize() const destinationPath = this._getDestinationPath(sourcePath)
// create files to write
const filesToWrite = this._getFilesToWrite(filename, extension, destinationPath, sizes)
const fileBuffer = await this._process // results contains only path as strings
.toBuffer() const results = {}
const relativeDestinationPath = this._DIR_ASSETS + this._resolveRelativeDestinationPath(fileBuffer) // create path if not exists
if (!fs.existsSync(configStore.get('destination') + destinationPath)) {
mkdirp.sync(configStore.get('destination') + destinationPath)
}
// create directories and write file filesToWrite.forEach((file) => {
mkdirp.sync(configStore.get('destination') + relativeDestinationPath) if (!fs.existsSync(configStore.get('destination') + file.path)) {
fs.writeFileSync(configStore.get('destination') + relativeDestinationPath + '/' + this._filename + this._extension, fileBuffer) this._writeFile(file, process, options)
}
return relativeDestinationPath + '/' + this._filename + this._extension results[file.name] = file.path
})
return this._reduce(results)
} }
/** /**
* @TODO much nicer to add a hook system so behavior can be change * get source path
*
* @param {String} src
* @return {String}
* *
*/
_getSourcePath(src) {
let sourcePath = configStore.get('source') + '/' + src
if (Array.isArray(this._path)) {
if (fs.existsSync(configStore.get('source') + this._path[0] + '/' + src)) {
sourcePath = configStore.get('source') + this._path[0] + '/' + src
} else if (fs.existsSync(configStore.get('source') + this._path[1] + '/' + src)) {
sourcePath = configStore.get('source') + this._path[1] + '/' + src
}
} else if (this._path && fs.existsSync(configStore.get('source') + this._path + '/' + src)) {
sourcePath = configStore.get('source') + this._path + '/' + src
}
return sourcePath
}
/**
* if only full is in results, reduce object to string
* *
* @param {string} extension * @param {Object} results
* @return {mixed}
* *
*/ */
_optimize() { _reduce(results) {
if (this._extension === '.gif') { if (Object.getOwnPropertyNames(results).length === 1) {
this._process results = results['full']
.gif({ }
reoptimise: true
})
} else {
// change extension return results
this._extension = '.webp' }
this._process
.webp({ /**
lossless: true * getting files to write
}) *
* @param {string} src
* @param {string} extension
* @param {Object} sizes
* @return {string}
*/
_getFilesToWrite(filename, extension, destinationPath, sizes) {
const results = []
// add orginal
results.push(this._getFile(filename, destinationPath, extension))
// check for sizes
if (typeof sizes === 'object' && !Array.isArray(sizes)) {
results.push(this._getFile(filename, destinationPath, extension, sizes))
} else if (Array.isArray(sizes)) {
sizes.forEach((size) => {
results.push(this._getFile(filename, destinationPath, extension, size))
})
} }
return results
}
/**
* write files to destination
*
* @param {string} file
* @param {object} process
* @param {Object} options
*/
_writeFile(file, process, options) {
// if sizes have height or width with no optional parameters then check for merge of options
if (file.sizes &&
(((!file.sizes.height || !file.sizes.width) && Object.getOwnPropertyNames(file.sizes).length === 1) ||
(file.sizes.height && file.sizes.width && Object.getOwnPropertyNames(file.sizes).length === 2))) {
process.resize(this._mergeOptions(file.sizes, options))
// if already options in sizes ignore options
} else if (file.sizes) {
process.resize(file.sizes)
}
process.toFile(configStore.get('destination') + file.path)
} }
/** /**
* resolve path to write file, hash will be get = fileBuffer and * if options are exists merge them with sizes
* *
* @param {object} sizes
* @param {Object} options
*/
_mergeOptions(sizes, options) {
if (Object.getOwnPropertyNames(options).length > 0) {
sizes = assign(options, sizes)
}
return sizes
}
/**
* generate destination path from hash of file
* *
* @param {object} fileBuffer * @param {string}
* @return {string} * @return {string}
*
*/ */
_resolveRelativeDestinationPath(fileBuffer) { _getDestinationPath(sourcePath) {
const hash = crypto.createHash('sha1') const hash = crypto.createHash('sha1')
hash.update(fileBuffer) const file = fs.readFileSync(sourcePath)
// getting hash from file
hash.update(file)
return this._DIR_ASSETS + hash.digest('hex').match(new RegExp('.{1,8}', 'g')).join('/')
}
/**
* create file as object, adding path, name with sizes
*
* @param {string} filename
* @param {object} destinationPath
* @param {Object} extension
* @param {sizes} sizes
* @return {object}
*/
_getFile(filename, destinationPath, extension, sizes = undefined) {
let file = {
name: ''
}
let prefix = ''
// check for sizes
if (sizes && sizes.width) {
file.name += sizes.width
}
if (sizes && sizes.height) {
if (sizes.width) {
prefix = 'x'
}
file.name += prefix + sizes.height
}
// create path before name is set to orginal as fallback
file.path = destinationPath + '/' + filename + file.name + extension
if (!file.name) {
file.name = 'full'
}
if (sizes) {
file.sizes = sizes
}
return hash.digest('hex').match(new RegExp('.{1,8}', 'g')).join('/') return file
} }
} }
module.exports = Media module.exports = Media

@ -3,11 +3,14 @@ const slugify = require('slugify')
const merge = require('lodash.merge') const merge = require('lodash.merge')
const nunjucks = require('nunjucks') const nunjucks = require('nunjucks')
const assign = require('assign-deep') const assign = require('assign-deep')
const fs = require('fs')
const Media = require('./../factories/media.js')
const parseMarkdownFile = require('./../parsers/markdown.js') const parseMarkdownFile = require('./../parsers/markdown.js')
const configStore = require('./../config.js')
/** /**
* Page * Page - building from markdown file
* *
* *
* @author Björn Hase <me@herr-hase.wtf> * @author Björn Hase <me@herr-hase.wtf>
@ -29,8 +32,11 @@ class Page {
*/ */
constructor(file, parent, fileString, blocks = {}) { constructor(file, parent, fileString, blocks = {}) {
// getting dirPath for files for page
this._dirPath = this._resolvePath(parent)
// parse file // parse file
const result = parseMarkdownFile(fileString) const result = parseMarkdownFile(fileString, this._dirPath)
// fields merge by default values // fields merge by default values
this._fields = merge({ this._fields = merge({
@ -45,17 +51,40 @@ class Page {
// adding filename for html as pathname and relative path in structure // adding filename for html as pathname and relative path in structure
this._filename = this._resolveFilename(file) this._filename = this._resolveFilename(file)
this._slug = this._resolveSlug(this._filename) this._slug = this._resolveSlug(this._filename)
this._dirPath = this._resolvePath(parent)
this._permalink = this._dirPath this._permalink = this._dirPath
if (this._slug) { if (this._slug) {
this._permalink = this._dirPath + '/' + this._slug this._permalink = this._dirPath + '/' + this._slug
} }
// check if page is in subdirectory
if (fs.existsSync(configStore.get('source') + this._permalink) && this._slug) {
this._dirPath += '/' + this._slug
this._filename = 'index'
}
this._filename += '.' + this._fields.extensions this._filename += '.' + this._fields.extensions
this._content = result.content this._content = result.content
this._blocks = blocks this._blocks = blocks
// check for fields and resolve media
if (this._fields) {
this._fields = this._resolveMedia(this._fields, this._dirPath)
}
// check for fields and resolve media
if (this._blocks) {
for (const key of Object.keys(this._blocks)) {
if (Array.isArray(this._blocks[key])) {
this._blocks[key].forEach((fields, index) => {
this._blocks[key][index] = this._resolveMedia(fields, this._dirPath + '/_blocks')
})
} else {
this._blocks[key] = this._resolveMedia(this._blocks[key], this._dirPath + '/_blocks')
}
}
}
} }
/** /**
@ -76,6 +105,47 @@ class Page {
}, this._fields) }, this._fields)
} }
/**
*
*
*/
_resolveMedia(fields, dirPath) {
for (const key of Object.keys(fields)) {
if (key === 'src') {
fields[key] = this._resolveMediaSrc(fields[key], dirPath)
}
if (toString.call(fields[key]) === '[object Object]') {
fields[key] = this._resolveMedia(fields[key], dirPath)
}
}
return fields
}
/**
*
*
*/
_resolveMediaSrc(field, dirPath) {
const media = new Media(dirPath)
if (typeof field === 'string' || field instanceof String) {
field = media.resolve(field)
}
if (typeof field === 'object' || field instanceof Object) {
if (field.options) {
field = media.resolve(field.src, field.sizes, field.options)
} else {
field = media.resolve(field.src, field.sizes)
}
}
return field
}
/** /**
* *
* *

@ -1,5 +1,6 @@
const { XMLParser, XMLBuilder, XMLValidator} = require('fast-xml-parser') const { XMLParser, XMLBuilder, XMLValidator} = require('fast-xml-parser')
const dayjs = require('dayjs') const dayjs = require('dayjs')
const assign = require('assign-deep')
/** /**
* *
@ -19,7 +20,11 @@ class Sitemap {
* *
*/ */
constructor(site) { constructor(site) {
this._site = site this._site = assign({
'sitemap': {
'use_permalinks': true
}
}, site)
this._urls = [] this._urls = []
} }
@ -31,8 +36,15 @@ class Sitemap {
*/ */
addPage(page) { addPage(page) {
if (this._isValid(page)) { if (this._isValid(page)) {
let path = page.permalink
if (this._site.sitemap.use_permalinks === false) {
path = page.path
}
this._urls.push({ this._urls.push({
loc: 'https://' + this._site.domain + page.pathname + '/' + page.filename, loc: 'https://' + this._site.domain + path,
lastmod: dayjs().format() lastmod: dayjs().format()
}) })
} }
@ -59,17 +71,11 @@ class Sitemap {
let result = true let result = true
if (page.meta) { if (page.meta && page.meta.robots && page.meta.robots.includes('noindex')) {
page.meta = Object.entries(page.meta) result = false
page.meta.forEach((meta) => {
if (meta['name'] === 'robots' && meta['content'].includes('noindex')) {
result = false
return;
}
})
} }
if (page.type !== 'html') { if (page.extensions !== 'html') {
result = false result = false
} }
@ -108,4 +114,4 @@ class Sitemap {
} }
} }
module.exports = Sitemap module.exports = Sitemap

@ -0,0 +1,112 @@
const { marked } = require('marked')
const configStore = require('./../config.js')
const Media = require('./../factories/media.js')
/**
*
*
*/
// copy from @marked/src/helpers.js, no export possible
function cleanUrl(sanitize, base, href) {
if (sanitize) {
let prot
try {
prot = decodeURIComponent(unescape(href))
.replace(nonWordAndColonTest, '')
.toLowerCase()
} catch (e) {
return null
}
if (prot.indexOf('javascript:') === 0 || prot.indexOf('vbscript:') === 0 || prot.indexOf('data:') === 0) {
return null
}
}
if (base && !originIndependentUrl.test(href)) {
href = resolveUrl(base, href)
}
try {
href = encodeURI(href).replace(/%25/g, '%')
} catch (e) {
return null
}
return href
}
const renderer = {
/**
*
* @param {string} href
* @param {string} title
* @param {string} text
*/
link(href, title, text) {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href)
if (href === null) {
return text
}
let out = '<a href="' + href + '"'
if (title) {
out += ' title="' + title + '"'
}
// check if url is external and add target
if (href.match(/^(http|https):\/\//)) {
out += ' target="_blank" rel="noopener" '
}
out += '>' + text + '</a>'
return out
},
/**
*
* @param {string} href
* @param {string} title
* @param {string} text
*/
image(href, title, text) {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href)
if (href === null) {
return text
}
// check if href for image is relative
if (!href.match(/^(http|https):\/\//)) {
const markedDirPath = configStore.get('markedDirPath')
// check if dirPath are exists from options
if (markedDirPath || markedDirPath === '') {
const media = new Media(markedDirPath)
href = media.resolve(href)
}
}
let out = `<img src="${href}" alt="${text}"`
if (title) {
out += ` title="${title}"`
}
out += this.options.xhtml ? '/>' : '>'
return out
}
}
module.exports = renderer

@ -1,6 +1,9 @@
const yaml = require('js-yaml') const yaml = require('js-yaml')
const { marked } = require('marked') const { marked } = require('marked')
const configStore = require('./../config.js')
const renderer = require('./../marked/renderer.js')
/** /**
* parse string of file, parse yaml and parse markdown * parse string of file, parse yaml and parse markdown
* *
@ -10,7 +13,7 @@ const { marked } = require('marked')
* *
*/ */
function parseMarkdownFile(fileString) { function parseMarkdownFile(fileString, dirPath = '') {
// regex get yaml section and markdown // regex get yaml section and markdown
// thanks to, https://github.com/getgrav/grav // thanks to, https://github.com/getgrav/grav
@ -33,10 +36,20 @@ function parseMarkdownFile(fileString) {
// if markdown section exits parse it to html 6565 // if markdown section exits parse it to html 6565
if (matches?.[3]) { if (matches?.[3]) {
// reset configStore
configStore.set('markedDirPath', false)
// check for dirPath and set it to configStore for marked/renderer.js
if (dirPath || dirPath === '') {
configStore.set('markedDirPath', dirPath)
}
marked.use({ renderer })
result.content = marked.parse(matches[3]) result.content = marked.parse(matches[3])
} }
return result return result
} }
module.exports = parseMarkdownFile module.exports = parseMarkdownFile

@ -43,7 +43,6 @@ class Blocks {
* @return {array} * @return {array}
*/ */
find() { find() {
if (fs.existsSync(this._dirPath)) { if (fs.existsSync(this._dirPath)) {
this._findFiles(this._dirPath) this._findFiles(this._dirPath)
} }
@ -72,11 +71,6 @@ class Blocks {
return return
} }
// if directory going deep
if (file.isDirectory()) {
this._findFiles(dirPath, parent + '/' + file.name)
}
// get file // get file
const fileString = this._getFile(file, dirPath + parent) const fileString = this._getFile(file, dirPath + parent)
@ -86,7 +80,7 @@ class Blocks {
} }
// create page object and add to page // create page object and add to page
const block = new BlockFactory(fileString) const block = new BlockFactory(fileString, this._dirPath)
const blockname = this._parseBlockname(file.name) const blockname = this._parseBlockname(file.name)
if (this._isArray(file.name)) { if (this._isArray(file.name)) {
@ -150,4 +144,4 @@ class Blocks {
} }
module.exports =Blocks module.exports = Blocks

@ -135,7 +135,7 @@ class Pages {
// check for filters and skip // check for filters and skip
if (this._filter && !this._filter.validate(page.get())) { if (this._filter && !this._filter.validate(page.get())) {
return; return
} }
this._results.push(page.get()) this._results.push(page.get())

@ -12,7 +12,7 @@ const PagesQuery = require('./queries/pages.js')
const parseYamlFile = require('./parsers/yaml.js') const parseYamlFile = require('./parsers/yaml.js')
/** /**
* Siteomat * Site-O-Mat
* *
* *
* *

@ -1,8 +1,13 @@
const { assert } = require('chai') const { assert } = require('chai')
const fs = require('fs') const fs = require('fs')
const configStore = require('./../src/config.js')
describe('Blocks / Array', function () { describe('Blocks / Array', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
// get function parseMarkdownFile // get function parseMarkdownFile
const BlocksQuery = require('./../src/queries/blocks.js') const BlocksQuery = require('./../src/queries/blocks.js')
@ -25,6 +30,9 @@ describe('Blocks / Array', function () {
describe('Blocks / Single', function () { describe('Blocks / Single', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
// get function parseMarkdownFile // get function parseMarkdownFile
const BlocksQuery = require('./../src/queries/blocks.js') const BlocksQuery = require('./../src/queries/blocks.js')

@ -2,9 +2,13 @@ const { assert } = require('chai')
const fs = require('fs') const fs = require('fs')
const PagesQuery = require('./../src/queries/pages.js') const PagesQuery = require('./../src/queries/pages.js')
const configStore = require('./../src/config.js')
describe('Page /index.md', function () { describe('Page /index.md', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
const query = new PagesQuery('./ressources'); const query = new PagesQuery('./ressources');
const results = query.find() const results = query.find()
@ -25,10 +29,17 @@ describe('Page /index.md', function () {
it('permalink', function() { it('permalink', function() {
assert.equal(page.permalink, '') assert.equal(page.permalink, '')
}) })
it('parsed content contains image', () => {
assert.match(page.content, /<img src="\/assets\/88c010ea\/4ca9b5f5\/6024c57d\/05899fae\/a33d9a45\/dog.webp" alt="_images\/dog.jpg">/)
})
}) })
describe('Page /blog/index.md', function () { describe('Page /blog/index.md', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
const query = new PagesQuery('./ressources'); const query = new PagesQuery('./ressources');
const results = query.find() const results = query.find()
@ -43,16 +54,27 @@ describe('Page /blog/index.md', function () {
}) })
it('path', function() { it('path', function() {
assert.equal(page.path, '/blog.html') assert.equal(page.path, '/blog/index.html')
}) })
it('permalink', function() { it('permalink', function() {
assert.equal(page.permalink, '/blog') assert.equal(page.permalink, '/blog')
}) })
it('fields has media src', function() {
assert.deepEqual(page.media.src, {
"300": "/assets/a6c45d17/11bf0a4e/a2b1d75d/dc85ca56/71c63294/dog300.webp",
"500x100": "/assets/a6c45d17/11bf0a4e/a2b1d75d/dc85ca56/71c63294/dog500x100.webp",
"full": "/assets/a6c45d17/11bf0a4e/a2b1d75d/dc85ca56/71c63294/dog.webp"
})
})
}) })
describe('Page /blog/article.md', function () { describe('Page /blog/article.md', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
const query = new PagesQuery('./ressources'); const query = new PagesQuery('./ressources');
const results = query.find() const results = query.find()
@ -69,4 +91,8 @@ describe('Page /blog/article.md', function () {
it('permalink', function() { it('permalink', function() {
assert.equal(page.permalink, '/blog/article') assert.equal(page.permalink, '/blog/article')
}) })
it('fields has media src', function() {
assert.equal(page.media.src, '/assets/a6c45d17/11bf0a4e/a2b1d75d/dc85ca56/71c63294/dog.webp')
})
}) })

@ -23,10 +23,24 @@ describe('Parser Markdown', function () {
it('fields are valid', function() { it('fields are valid', function() {
assert.deepEqual(result.fields, { assert.deepEqual(result.fields, {
media: {
src: {
sizes: [{
width: 300
},
{
height: 100,
width: 500
}
],
src: '_images/dog.jpg'
}
},
title: 'health goth DIY tattooed', title: 'health goth DIY tattooed',
view: 'page.njk', view: 'page.njk',
meta: { meta: {
description: 'DSA yes plz hot chicken green juice' description: 'DSA yes plz hot chicken green juice',
robots: 'noindex'
} }
}) })
}) })

@ -0,0 +1,34 @@
const { assert } = require('chai')
const fs = require('fs')
const PagesQuery = require('./../src/queries/pages.js')
const configStore = require('./../src/config.js')
const parseYamlFile = require('./../src/parsers/yaml.js')
const Sitemap = require('./../src/factories/sitemap.js')
describe('Sitemap', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
const file = fs.readFileSync('./ressources/site.yml', 'utf8')
const siteConfig = parseYamlFile(file)
const query = new PagesQuery('./ressources')
const results = query.find()
const sitemap = new Sitemap(siteConfig)
results.forEach((page) => {
sitemap.addPage(page)
})
// check results
it('loc-tag with url', function() {
assert.match(sitemap.getXmlAsString(), /<loc>https:\/\/test.lan\/blog\/article<\/loc>/)
})
it('loc-tag has robotos:noindex and has missing', function() {
assert.notMatch(sitemap.getXmlAsString(), /<loc>https:\/\/test.lan\/<\/loc>/)
})
})
Loading…
Cancel
Save