Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add overwrite:false file config and better parser matching #323

Merged
merged 2 commits into from
Jul 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion lib/apply/apply-files.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const run = async (dir, files, options) => {
await rmEach(dir, rm, options, (f) => fs.rm(f))

log.verbose('apply-files', 'add', add)
await parseEach(dir, add, options, (p) => p.applyWrite())
await parseEach(dir, add, options, {}, (p) => p.applyWrite())
}

module.exports = [{
Expand Down
3 changes: 2 additions & 1 deletion lib/check/check-apply.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ const run = async (type, dir, files, options) => {
const { add: addFiles, rm: rmFiles } = files

const rm = await rmEach(dir, rmFiles, options, (f) => rel(f))
const [add, update] = partition(await parseEach(dir, addFiles, options, async (p) => {
const parseOpts = { allowMultipleSources: false }
const [add, update] = partition(await parseEach(dir, addFiles, options, parseOpts, async (p) => {
const diff = await p.applyDiff()
const target = rel(p.target)
if (diff === null) {
Expand Down
8 changes: 4 additions & 4 deletions lib/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ const semver = require('semver')
const parseCIVersions = require('./util/parse-ci-versions.js')
const getGitUrl = require('./util/get-git-url.js')
const gitignore = require('./util/gitignore.js')
const { withArrays } = require('./util/merge.js')
const { FILE_KEYS, parseConfig: parseFiles, getAddedFiles } = require('./util/files.js')
const { mergeWithArrays } = require('./util/merge.js')
const { FILE_KEYS, parseConfig: parseFiles, getAddedFiles, mergeFiles } = require('./util/files.js')

const CONFIG_KEY = 'templateOSS'
const getPkgConfig = (pkg) => pkg[CONFIG_KEY] || {}
Expand All @@ -14,7 +14,7 @@ const { name: NAME, version: LATEST_VERSION } = require('../package.json')
const MERGE_KEYS = [...FILE_KEYS, 'defaultContent', 'content']
const DEFAULT_CONTENT = require.resolve(NAME)

const merge = withArrays('branches', 'distPaths', 'allowPaths', 'ignorePaths')
const merge = mergeWithArrays('branches', 'distPaths', 'allowPaths', 'ignorePaths')

const makePosix = (v) => v.split(win32.sep).join(posix.sep)
const deglob = (v) => makePosix(v).replace(/[/*]+$/, '')
Expand Down Expand Up @@ -120,7 +120,7 @@ const getFullConfig = async ({
// Files get merged in from the default content (that template-oss provides) as well
// as any content paths provided from the root or the workspace
const fileDirs = uniq([useDefault && defaultDir, rootDir, pkgDir].filter(Boolean))
const files = merge(useDefault && defaultFiles, rootFiles, pkgFiles)
const files = mergeFiles(useDefault && defaultFiles, rootFiles, pkgFiles)
const repoFiles = isRoot ? files.rootRepo : files.workspaceRepo
const moduleFiles = isRoot ? files.rootModule : files.workspaceModule

Expand Down
74 changes: 55 additions & 19 deletions lib/util/files.js
Original file line number Diff line number Diff line change
@@ -1,27 +1,62 @@
const { join } = require('path')
const { defaultsDeep } = require('lodash')
const merge = require('./merge.js')
const { defaultsDeep, omit } = require('lodash')
const deepMapValues = require('just-deep-map-values')
const { glob } = require('glob')
const { mergeWithCustomizers, customizers } = require('./merge.js')
const Parser = require('./parser.js')
const template = require('./template.js')

const ADD_KEY = 'add'
const RM_KEY = 'rm'
const FILE_KEYS = ['rootRepo', 'rootModule', 'workspaceRepo', 'workspaceModule']

const globify = pattern => pattern.split('\\').join('/')

const fileEntries = (dir, files, options) => Object.entries(files)
// remove any false values
.filter(([_, v]) => v !== false)
// target paths need to be joinsed with dir and templated
.map(([k, source]) => {
const target = join(dir, template(k, options))
return [target, source]
})
const mergeFiles = mergeWithCustomizers((value, srcValue, key, target, source, stack) => {
// This will merge all files except if the src file has overwrite:false. Then
// the files will be turned into an array so they can be applied on top of
// each other in the parser.
if (
stack[0] === ADD_KEY &&
FILE_KEYS.includes(stack[1]) &&
value?.file &&
srcValue?.overwrite === false
) {
return [value, omit(srcValue, 'overwrite')]
}
}, customizers.overwriteArrays)

const fileEntries = (dir, files, options, { allowMultipleSources = true } = {}) => {
const results = []

for (const [key, source] of Object.entries(files)) {
// remove any false values first since that means those targets are skipped
if (source === false) {
continue
}

// target paths need to be joinsed with dir and templated
const target = join(dir, template(key, options))

if (Array.isArray(source)) {
// When turning an object of files into all its entries, we allow
// multiples when applying changes, but not when checking for changes
// since earlier files would always return as needing an update. So we
// either allow multiples and return the array or only return the last
// source file in the array.
const sources = allowMultipleSources ? source : source.slice(-1)
results.push(...sources.map(s => [target, s]))
} else {
results.push([target, source])
}
}

return results
}

// given an obj of files, return the full target/source paths and associated parser
const getParsers = (dir, files, options) => {
const parsers = fileEntries(dir, files, options).map(([target, source]) => {
const getParsers = (dir, files, options, parseOptions) => {
const parsers = fileEntries(dir, files, options, parseOptions).map(([target, source]) => {
const { file, parser, filter, clean: shouldClean } = source

if (typeof filter === 'function' && !filter(options)) {
Expand All @@ -35,7 +70,7 @@ const getParsers = (dir, files, options) => {
return new (parser(Parser.Parsers))(target, file, options, { clean })
}

return new (Parser(file))(target, file, options, { clean })
return new (Parser(target))(target, file, options, { clean })
})

return parsers.filter(Boolean)
Expand All @@ -62,17 +97,17 @@ const rmEach = async (dir, files, options, fn) => {
return res.filter(Boolean)
}

const parseEach = async (dir, files, options, fn) => {
const parseEach = async (dir, files, options, parseOptions, fn) => {
const res = []
for (const parser of getParsers(dir, files, options)) {
for (const parser of getParsers(dir, files, options, parseOptions)) {
res.push(await fn(parser))
}
return res.filter(Boolean)
}

const parseConfig = (files, dir, overrides) => {
const normalizeFiles = (v) => deepMapValues(v, (value, key) => {
if (key === 'rm' && Array.isArray(value)) {
if (key === RM_KEY && Array.isArray(value)) {
return value.reduce((acc, k) => {
acc[k] = true
return acc
Expand All @@ -88,21 +123,22 @@ const parseConfig = (files, dir, overrides) => {
return value
})

const merged = merge(normalizeFiles(files), normalizeFiles(overrides))
const merged = mergeFiles(normalizeFiles(files), normalizeFiles(overrides))
const withDefaults = defaultsDeep(merged, FILE_KEYS.reduce((acc, k) => {
acc[k] = { add: {}, rm: {} }
acc[k] = { [ADD_KEY]: {}, [RM_KEY]: {} }
return acc
}, {}))

return withDefaults
}

const getAddedFiles = (files) => files ? Object.keys(files.add || {}) : []
const getAddedFiles = (files) => files ? Object.keys(files[ADD_KEY] || {}) : []

module.exports = {
rmEach,
parseEach,
FILE_KEYS,
parseConfig,
getAddedFiles,
mergeFiles,
}
75 changes: 63 additions & 12 deletions lib/util/merge.js
Original file line number Diff line number Diff line change
@@ -1,21 +1,72 @@
const { mergeWith } = require('lodash')
const { mergeWith: _mergeWith } = require('lodash')

const merge = (...objects) => mergeWith({}, ...objects, (value, srcValue, key) => {
if (Array.isArray(srcValue)) {
// Dont merge arrays, last array wins
return srcValue
}
})
// Adapted from https://github.com/lodash/lodash/issues/3901#issuecomment-517983996
// Allows us to keep track of the current key during each merge so a customizer
// can make different merges based on the parent keys.
const mergeWith = (...args) => {
const customizer = args.pop()
const objects = args
const sourceStack = []
const keyStack = []
return _mergeWith({}, ...objects, (value, srcValue, key, target, source) => {
let currentKeys
while (true) {
if (!sourceStack.length) {
sourceStack.push(source)
keyStack.push([])
}
if (source === sourceStack[sourceStack.length - 1]) {
currentKeys = keyStack[keyStack.length - 1].concat(key)
sourceStack.push(srcValue)
keyStack.push(currentKeys)
break
}
sourceStack.pop()
keyStack.pop()
}
// Remove the last key since that is the current one and reverse the whole
// array so that the first entry is the parent, 2nd grandparent, etc
return customizer(value, srcValue, key, target, source, currentKeys.slice(0, -1).reverse())
})
}

// Create a merge function that will run a set of customizer functions
const mergeWithCustomizers = (...customizers) => {
return (...objects) => mergeWith({}, ...objects, (...args) => {
for (const customizer of customizers) {
const result = customizer(...args)
// undefined means the customizer will defer to the next one
// the default behavior of undefined in lodash is to merge
if (result !== undefined) {
return result
}
}
})
}

const mergeWithArrays = (...keys) =>
(...objects) => mergeWith({}, ...objects, (value, srcValue, key) => {
const customizers = {
// Dont merge arrays, last array wins
overwriteArrays: (value, srcValue) => {
if (Array.isArray(srcValue)) {
return srcValue
}
},
// Merge arrays if their key matches one of the passed in keys
mergeArrays: (...keys) => (value, srcValue, key) => {
if (Array.isArray(srcValue)) {
if (keys.includes(key)) {
return (Array.isArray(value) ? value : []).concat(srcValue)
}
return srcValue
}
})
},
}

module.exports = merge
module.exports.withArrays = mergeWithArrays
module.exports = {
// default merge is to overwrite arrays
merge: mergeWithCustomizers(customizers.overwriteArrays),
mergeWithArrays: (...keys) => mergeWithCustomizers(customizers.mergeArrays(...keys)),
mergeWithCustomizers,
mergeWith,
customizers,
}
48 changes: 31 additions & 17 deletions lib/util/parser.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
const fs = require('fs/promises')
const { basename, extname, dirname } = require('path')
const { dirname } = require('path')
const yaml = require('yaml')
const NpmPackageJson = require('@npmcli/package-json')
const jsonParse = require('json-parse-even-better-errors')
const Diff = require('diff')
const { unset } = require('lodash')
const ini = require('ini')
const { minimatch } = require('minimatch')
const template = require('./template.js')
const jsonDiff = require('./json-diff')
const merge = require('./merge.js')
const { merge } = require('./merge.js')

const setFirst = (first, rest) => ({ ...first, ...rest })

Expand Down Expand Up @@ -167,17 +168,17 @@ class Base {
}

class Gitignore extends Base {
static types = ['codeowners', 'gitignore']
static types = ['codeowners', '.gitignore']
comment = (c) => `# ${c}`
}

class Js extends Base {
static types = ['js']
static types = ['*.js']
comment = (c) => `/* ${c} */`
}

class Ini extends Base {
static types = ['ini']
static types = ['*.ini']
comment = (c) => `; ${c}`

toString (s) {
Expand All @@ -202,17 +203,17 @@ class Ini extends Base {
}

class IniMerge extends Ini {
static types = ['npmrc']
static types = ['.npmrc']
merge = (t, s) => merge(t, s)
}

class Markdown extends Base {
static types = ['md']
static types = ['*.md']
comment = (c) => `<!-- ${c} -->`
}

class Yml extends Base {
static types = ['yml']
static types = ['*.yml']
comment = (c) => ` ${c}`

toString (s) {
Expand Down Expand Up @@ -274,7 +275,7 @@ class YmlMerge extends Yml {
}

class Json extends Base {
static types = ['json']
static types = ['*.json']
// its a json comment! not really but we do add a special key
// to json objects
comment = (c) => ({ [`//${this.options.config.__NAME__}`]: c })
Expand Down Expand Up @@ -306,7 +307,7 @@ class JsonMerge extends Json {
}

class PackageJson extends JsonMerge {
static types = ['pkg.json']
static types = ['package.json']

async prepare (s, t) {
// merge new source with current pkg content
Expand Down Expand Up @@ -348,15 +349,28 @@ const Parsers = {
PackageJson,
}

const parserLookup = Object.values(Parsers)
// Create an order to lookup parsers based on filename the only important part
// of ordering is that we want to match types by exact match first, then globs,
// so we always sort globs to the bottom
const parserLookup = []
for (const parser of Object.values(Parsers)) {
for (const type of parser.types) {
const parserEntry = [type, parser]
if (type.includes('*')) {
parserLookup.push(parserEntry)
} else {
parserLookup.unshift(parserEntry)
}
}
}

const getParser = (file) => {
const base = basename(file).toLowerCase()
const ext = extname(file).slice(1).toLowerCase()

return parserLookup.find((p) => p.types.includes(base))
|| parserLookup.find((p) => p.types.includes(ext))
|| Parsers.Base
for (const [type, parser] of parserLookup) {
if (minimatch(file, type, { nocase: true, dot: true, matchBase: true })) {
return parser
}
}
return Parsers.Base
}

module.exports = getParser
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
"just-deep-map-values": "^1.1.1",
"just-diff": "^6.0.0",
"lodash": "^4.17.21",
"minimatch": "^9.0.2",
"npm-package-arg": "^10.0.0",
"proc-log": "^3.0.0",
"release-please": "npm:@npmcli/release-please@^14.2.6",
Expand Down
Loading