Extract out file transformer (#591)
* Extract out synchronizer * Alter codeowner of synchronizer * Move agnostic input to pipeline * Fix up manifest writing * Abstract away Manifest ready output * Extract rules from pipeline * Move rules to live within server as they are blitz specific business concerns and depend on next.js * Fix up auto import error * Fix up incorrect Manifest type path * Create DEBUG logger * Pass configuration to rules not synchronizer * Rename configureRules function * Tidy up file transformer API * Simplify configuration * synchronizer -> file-pipeline * Rule -> Stage * Use nullish coalescing now we have it in TS * Add documentation for file-pipeline * Update docs * Move image and remove outdated diagram * Update Docs * Refactor: Remove pipeline folder * Update readme with a PR suggestion * Update wait on scripts * Add Documentation * Update docs
This commit is contained in:
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -4,6 +4,7 @@
|
||||
*.md @merelinguist
|
||||
|
||||
packages/server/**/* @ryardley
|
||||
packages/file-pipeline/**/* @ryardley
|
||||
packages/cli/**/* @aem
|
||||
packages/generator/**/* @aem
|
||||
packages/installer/**/* @aem
|
||||
|
||||
@@ -117,6 +117,14 @@ const variable = (val: any) => {
|
||||
return chalk.cyan.bold(`${val}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* If the DEBUG env var is set this will write to the console
|
||||
* @param str msg
|
||||
*/
|
||||
const debug = (str: string) => {
|
||||
process.env.DEBUG && console.log(str)
|
||||
}
|
||||
|
||||
export const log = {
|
||||
withBrand,
|
||||
withWarning,
|
||||
@@ -133,4 +141,5 @@ export const log = {
|
||||
success,
|
||||
variable,
|
||||
info,
|
||||
debug,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": ["src", "types", "test"],
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules"],
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
|
||||
4
packages/file-pipeline/.gitignore
vendored
Normal file
4
packages/file-pipeline/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
*.log
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
288
packages/file-pipeline/README.md
Normal file
288
packages/file-pipeline/README.md
Normal file
@@ -0,0 +1,288 @@
|
||||
# `@blitzjs/file-pipeline`
|
||||
|
||||
## File Transformation Pipeline
|
||||
|
||||
### Design goals
|
||||
|
||||
- Stages: Related logic needs to live together
|
||||
- Everything is a stream
|
||||
- Efficiency
|
||||
- Parallel processing
|
||||
- Cleaner Architecture for Dirty restart functionality
|
||||
- Agnostic input file watcher / glob
|
||||
- Simplify tests
|
||||
|
||||
This package provides a gulp based pipeline for transforming files from a source to a destination across a series of stages.
|
||||
|
||||
```ts
|
||||
import {transformFiles} from '@blitzjs/file-pipeline'
|
||||
import through from 'through2'
|
||||
import File from 'vinyl'
|
||||
|
||||
// This is an example of a stage that does nothing to the
|
||||
// files as they pass through the pipeline
|
||||
const myStage = () => ({
|
||||
stream: through.obj((file:File, _, next) => {
|
||||
// Normally transformation will take place here
|
||||
next(null, file)
|
||||
})
|
||||
})
|
||||
|
||||
const mySecondStage = () => ({
|
||||
stream: through.obj((file:File, _, next) => {
|
||||
// Normally transformation will take place here
|
||||
next(null, file)
|
||||
})
|
||||
})
|
||||
|
||||
// Files start off at the source
|
||||
const src = '/path/to/src'
|
||||
// Pass through the stages one by one
|
||||
const stages = [
|
||||
myStage,
|
||||
mySecondStage
|
||||
]
|
||||
// Then end up at the destination
|
||||
const dest = '/path/to/dest'
|
||||
|
||||
// We can set various options they are all optional
|
||||
const options = {
|
||||
// This indicates if the file watcher will be turned on
|
||||
watch:true,
|
||||
// this is a list of source files globs to ignore
|
||||
ignore: [],
|
||||
// this is a list of source files globs to include
|
||||
include: ['**/*']
|
||||
// pass in an optional transform stream that will be used as an event bus
|
||||
bus: someTransformStream
|
||||
}
|
||||
|
||||
// run the transform
|
||||
transformFiles(src, stages, dest, options),
|
||||
```
|
||||
|
||||
## Stages
|
||||
|
||||
Stages are how you provide special behaviour to your file-pipeline.
|
||||
|
||||
The anatomy of your stage looks like this:
|
||||
|
||||
```ts
|
||||
function myStage({
|
||||
// Stage config holds the basic info you need for the stage
|
||||
config: {
|
||||
// src folder
|
||||
src,
|
||||
// dest folder
|
||||
dest,
|
||||
// current working directory
|
||||
cwd,
|
||||
// include globs
|
||||
include,
|
||||
// ignore globs
|
||||
ignore,
|
||||
// if we are in watch mode
|
||||
watch,
|
||||
},
|
||||
// Input writable stream - use input.write(file) to send a file the input of the pipeline
|
||||
input,
|
||||
// Event bus stream - use this to send events to listeners within and outside of the pipeline
|
||||
bus,
|
||||
// Get the input cache.
|
||||
// This is an object that contains cached objects for all the files ingested.
|
||||
// Use this for things that require lists of files
|
||||
getInputCache,
|
||||
}: StageArgs) {
|
||||
// Create some kind of transform stream
|
||||
const stream = createSomeKindOfTransformStream()
|
||||
|
||||
// Ready - is an object that will be merged with all other
|
||||
// Stages and returned in a promise by transformFiles()
|
||||
const ready = {foo: 'This will appear in the object returned by transformation promise'}
|
||||
|
||||
// Export the stream and the ready info
|
||||
return {stream, ready}
|
||||
}
|
||||
```
|
||||
|
||||
# Why Streams?
|
||||
|
||||
Initially, Blitz will be used by people with small projects however as the number files and throughput increases we will need to use an architecture that allows for large parallel throughput with low memory consumption. Node is built on streams as a primitive so it makes sense to utilize what is available. The Gulp ecosystems provide several tools for managing streams of files so that makes sense to use those tools when available. Because refactoring to streams later would be extremely difficult and painful not starting with streams would be a design mistake.
|
||||
|
||||
# Why not RxJS?
|
||||
|
||||
RxJS could be a good match for streaming architectures and introduces some really powerful tools for managing stream operations. As we are using object streams it would also possibly simplify some of the boilerplate using RxJS. However, certain operators in RxJS can be inapproachable for newer developers and tend to encourage too much abstraction. It is also an extra dependency that increases the learning surface of the codebase and as we are stuck with understanding basic node streams, in any case, it makes sense to avoid RxJS until absolutely necessary.
|
||||
|
||||
## File Transform Pipeline
|
||||
|
||||
<img src="diagram-file-transform.png" />
|
||||
|
||||
# Stream helpers
|
||||
|
||||
So Node streams are a little incompatible on old versions of Node and there are a few compatibility libs we are using to help us work with streams.
|
||||
|
||||
https://www.freecodecamp.org/news/rxjs-and-node-8f4e0acebc7c/
|
||||
|
||||
Helper Libs
|
||||
|
||||
- Pipe - [pump](https://npmjs.com/package/pump)
|
||||
- Pipeline - [pumpify](https://npmjs.com/package/pumpify)
|
||||
- Through - [through2](https://npmjs.com/package/through2)
|
||||
- Concat - [concat-stream](https://npmjs.com/package/concat-stream)
|
||||
- Parallel - [parallel-transform](https://npmjs.com/package/parallel-transform)
|
||||
- Node Compat - [readable-stream](https://npmjs.com/package/readable-stream)
|
||||
|
||||
# A good way to work with streams
|
||||
|
||||
A pattern we have found that works well is using a constructor function to accept connectors and return a stream as well as any shared data you need to provide to other components connectors. You will see this a lot around the synchronizer.
|
||||
|
||||
```ts
|
||||
type CreatorFn = ConnectionsOrConfig => StreamAsWellAsSharedData
|
||||
```
|
||||
|
||||
An example might look like this:
|
||||
|
||||
```ts
|
||||
// Config -> Merged Glob && FileWatcher
|
||||
const source = agnosticSource({cwd: src, include, ignore, watch})
|
||||
|
||||
// you can then pipe the stream to a pipeline
|
||||
pipe(source.stream, fileTransformPipeline)
|
||||
```
|
||||
|
||||
The reason we don't just return a stream is that often we need to return other data and share it elsewhere, for example, to analyze input file structure in the pages rule we use a file cache.
|
||||
|
||||
```ts
|
||||
// Here
|
||||
const fileCache = createFileCache(config)
|
||||
const pageRule = createPageRule(fileCache.cache)
|
||||
|
||||
pipeline(
|
||||
fileCache.stream, // manages the fileCache to be used by other streams
|
||||
// ...
|
||||
pageRule.stream, // has access to the fileCache
|
||||
)
|
||||
```
|
||||
|
||||
# View rendering and error handling
|
||||
|
||||
The cli view is provided by a stream which accepts Events which it manages and displays. This is responsible for rendering stuff to the view.
|
||||
|
||||
If you push an Error to the transform stream `next(new Error)` an Error Event will be sent over the event bus. This can get handled by some event handling logic in your Stage.
|
||||
|
||||
# Evented Vinyl Files
|
||||
|
||||
Evented Vinyl Files are [Vinyl Files](https://github.com/gulpjs/vinyl) with events attached to them
|
||||
|
||||
```ts
|
||||
const isDelete = (file) => file.isNull() && file.event === 'unlink'
|
||||
|
||||
// The input file at '/path/to/foo' was deleted
|
||||
// This can be transformed during the process phase
|
||||
return new Vinyl({
|
||||
path: '/path/to/foo',
|
||||
content: null,
|
||||
event: 'unlink',
|
||||
})
|
||||
```
|
||||
|
||||
```ts
|
||||
// Add file at '/path/to/foo'
|
||||
new Vinyl({
|
||||
path: '/path/to/foo',
|
||||
content: someContentStream,
|
||||
})
|
||||
```
|
||||
|
||||
# Input agnostic
|
||||
|
||||
Pipeline is input agnostic ie. it should not matter if it comes from watch or a folder glob so to help with that we have created an agnostic input stream that takes glob config and returns a file stream. It consumes input from both chokidar and vinyl-fs.
|
||||
|
||||
# Optimization
|
||||
|
||||
Input manages inputting of evented vinyl file. Files that have already been processed or are currently being processed should not be processed again. Here we try and manage a running list of files to work on based on the hash of their filename and mtime.
|
||||
|
||||
# Analysis
|
||||
|
||||
Some types of analysis need a list of all the files other types do not
|
||||
|
||||
Analysis needs to be done in stream as new information comes in. Eg. when someone renames a file that file goes to the analysis engine which works out invariants as they occur without requiring a sweep of the entire file system.
|
||||
|
||||
For this, we can create file caches which represent projections of the file system and update based on input file events.
|
||||
|
||||
# Stages
|
||||
|
||||
Stage streams represent specific things we need the `file-pipeline` to do
|
||||
|
||||
Possible things it can do:
|
||||
|
||||
- Change its path or contents
|
||||
- Drop the file from further processing. Don't copy it.
|
||||
- Add new files to the input stream - Associating the new files with the original
|
||||
- Write an error to the error stream
|
||||
|
||||
```ts
|
||||
import {through} from './streams'
|
||||
|
||||
// Typical Stage
|
||||
export default myStage({config, input, getInputCache}) => {
|
||||
const service = createSomeService()
|
||||
|
||||
// This is an incremental file cache that
|
||||
// gets built as Files are read
|
||||
const cache = getInputCache()
|
||||
|
||||
// You should not write to the file system use input.write() instead to send
|
||||
// a new file down the pipelin
|
||||
if (!pathExistsSync(resolve(config.src, 'blitz.config.js'))) {
|
||||
input.write(
|
||||
new File({
|
||||
path:resolve(config.src, 'blitz.config.js'),
|
||||
content:Buffer.from('Hello World')
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
const stream = through.obj(function (file, enc, next) {
|
||||
// You can test for changes in the input cache
|
||||
if (cache.filter(/next\.config\.js$/.exec).length > -1) {
|
||||
const err = new Error('Cannot have next config!')
|
||||
err.name = 'NextConfigError'
|
||||
next(err) // Will cause an Error Event to be fired on the event bus
|
||||
}
|
||||
|
||||
// process file in some way
|
||||
file.path = file.path.toUpperCase()
|
||||
|
||||
// you can push to the stream output (note you cannot use arrow fns)
|
||||
this.push(file)
|
||||
|
||||
// You can push new files to the output too but this is a little dodgy
|
||||
// One issue here is that this file will not be registered as a piece
|
||||
// of work and this may have unintended consequences.
|
||||
// This is still ok in many situations but it is safer to write to the input.
|
||||
// (note input.write vs this.push)
|
||||
this.push(
|
||||
new File({
|
||||
path: '/path/to/foo',
|
||||
content: Buffer.from('This is a file')
|
||||
})
|
||||
)
|
||||
|
||||
// or send file onwards to be written this does the same thing as this.push()
|
||||
next(null, file)
|
||||
})
|
||||
|
||||
// provide an object that will be merged with other ready
|
||||
// objects to form the return of the transformFiles return promise value
|
||||
const ready = {
|
||||
foo: 'I am foo'
|
||||
}
|
||||
|
||||
return {
|
||||
stream,
|
||||
ready,
|
||||
service, // provide that service to consumers outside the stream
|
||||
}
|
||||
}
|
||||
```
|
||||
|
Before Width: | Height: | Size: 41 KiB After Width: | Height: | Size: 41 KiB |
57
packages/file-pipeline/package.json
Normal file
57
packages/file-pipeline/package.json
Normal file
@@ -0,0 +1,57 @@
|
||||
{
|
||||
"name": "@blitzjs/file-pipeline",
|
||||
"version": "0.12.0",
|
||||
"description": "Display package for the Blitz CLI",
|
||||
"homepage": "https://github.com/blitz-js/blitz#readme",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"dev": "tsdx watch --verbose",
|
||||
"build": "tsdx build",
|
||||
"test": "tsdx test",
|
||||
"test:watch": "tsdx test --watch",
|
||||
"lint": "tsdx lint"
|
||||
},
|
||||
"author": {
|
||||
"name": "Brandon Bayer",
|
||||
"email": "b@bayer.ws",
|
||||
"url": "https://twitter.com/flybayer"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/file-pipeline.esm.js",
|
||||
"types": "dist/packages/file-pipeline/src/index.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "tsdx lint"
|
||||
}
|
||||
},
|
||||
"keywords": [
|
||||
"blitz",
|
||||
"display"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/blitz-js/blitz.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "4.0.0",
|
||||
"chokidar": "3.4.0",
|
||||
"flush-write-stream": "2.0.0",
|
||||
"from2": "2.3.0",
|
||||
"fs-extra": "9.0.0",
|
||||
"gulp-if": "3.0.0",
|
||||
"merge-stream": "2.0.0",
|
||||
"ora": "4.0.4",
|
||||
"parallel-transform": "1.2.0",
|
||||
"pump": "3.0.0",
|
||||
"pumpify": "2.0.1",
|
||||
"slash": "3.0.0",
|
||||
"through2": "3.0.1",
|
||||
"vinyl": "2.2.0",
|
||||
"vinyl-file": "3.0.0",
|
||||
"vinyl-fs": "3.0.3"
|
||||
},
|
||||
"devDependencies": {}
|
||||
}
|
||||
92
packages/file-pipeline/src/helpers/README.md
Normal file
92
packages/file-pipeline/src/helpers/README.md
Normal file
@@ -0,0 +1,92 @@
|
||||
# Future thinking - work-optimizer
|
||||
|
||||
So one future issue we have been trying to account for here is how to solve the dirty sync problem with streams. Basically, we want Blitz to do as little work as possible. At this point, we are blowing away Blitz folders when we start but it would be smarter to analyze the source and destination folders and only manipulate the files that are actually required to be changed. This is not required as of now but will be a consideration as we try and get this thing faster and faster to live up to its name. To prepare for this we have setup a work optimizer that checks the hash of the input file and guards against new work being done
|
||||
|
||||
The following is a rough plan for how to do this. (Likely to change/improve at a later point)
|
||||
|
||||
- Encode vinyl files + stats
|
||||
|
||||
```ts
|
||||
const hash = crypto
|
||||
.createHash('md5')
|
||||
.update(file.path + file.stats.mtime)
|
||||
.digest('hex')
|
||||
|
||||
file.hash = hash
|
||||
```
|
||||
|
||||
- Use those hashes to index file details in the following structures:
|
||||
|
||||
Following
|
||||
|
||||
```ts
|
||||
// reduced to as the first step during input
|
||||
const input = {abc123def456: '/foo/bar/baz', def456abc123: '/foo/bar/bop'}
|
||||
|
||||
// reduced to as the last step just before file write
|
||||
const complete = {
|
||||
abc123def456: {
|
||||
input: '/foo/bar/baz',
|
||||
output: ['/bas/boop/blop', '/bas/boop/ding', '/bas/boop/bar'],
|
||||
},
|
||||
def456abc123: {
|
||||
input: '/foo/bar/bing',
|
||||
output: ['/bas/boop/ping', '/bas/boop/foo', '/bas/boop/fawn'],
|
||||
},
|
||||
cbd123aef456: {
|
||||
input: '/foo/bar/bop',
|
||||
output: ['/bas/boop/thing'],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
Has this file hash been processed?
|
||||
|
||||
```ts
|
||||
const hash => !!output[hash];
|
||||
```
|
||||
|
||||
Which files do I need to delete based on input?
|
||||
|
||||
```ts
|
||||
const deleteHashes = Object.keys(output).filter((hash) => input[hash])
|
||||
```
|
||||
|
||||
- Output can also be indexed by filetype to keep going with our hacky error mapping (eventually this should probably be a sourcemap)
|
||||
|
||||
```json
|
||||
{
|
||||
"/bas/boop/bar": "/foo/bar/baz",
|
||||
"/bas/boop/blop": "/foo/bar/baz",
|
||||
"/bas/boop/ding": "/foo/bar/baz",
|
||||
"/bas/boop/fawn": "/foo/bar/bing",
|
||||
"/bas/boop/foo": "/foo/bar/bing",
|
||||
"/bas/boop/ping": "/foo/bar/bing",
|
||||
"/bas/boop/thing": "/foo/bar/bop"
|
||||
}
|
||||
```
|
||||
|
||||
Does my output match my input ie. am I in a stable state? or in our case can we return the promise.
|
||||
|
||||
```ts
|
||||
function isStable(input, output) {
|
||||
if (!input || !output) {
|
||||
return // We are not stable if we don't have both an input or output
|
||||
}
|
||||
|
||||
const inputKeys = Object.keys(input)
|
||||
const outputKeys = Object.keys(output)
|
||||
|
||||
if (inputKeys.length !== outputKeys.length) {
|
||||
return false
|
||||
}
|
||||
match = true
|
||||
for (let i = 0; i < inputKeys.length; i++) {
|
||||
match = match && outputKey[i] === inputKeys[i]
|
||||
if (!match) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
```
|
||||
@@ -1,4 +1,4 @@
|
||||
import {through} from '../../streams'
|
||||
import {through} from '../streams'
|
||||
import vfs from 'vinyl-fs'
|
||||
import mergeStream from 'merge-stream'
|
||||
|
||||
@@ -42,7 +42,7 @@ export const watch = (includePaths: string[] | string, options: chokidar.WatchOp
|
||||
type SourceConfig = {cwd: string; include: string[]; ignore: string[]; watch: boolean}
|
||||
|
||||
/**
|
||||
* A rule that will provide agnostic file input based on a set of globs.
|
||||
* A stage that will provide agnostic file input based on a set of globs.
|
||||
* Initially it will start as a vinyl stream and if the watch config is
|
||||
* set to true it will also provide a file watcher.
|
||||
* @param config Config object
|
||||
@@ -1,8 +1,8 @@
|
||||
import {through} from '../../streams'
|
||||
import {through} from '../streams'
|
||||
import crypto from 'crypto'
|
||||
|
||||
/**
|
||||
* Returns a rule that prepares files coming into the stream
|
||||
* Returns a stage that prepares files coming into the stream
|
||||
* with correct event information as well as hash information
|
||||
* This is used by the work optimizer and elsewhere to manage the
|
||||
* way files are handled and optimized
|
||||
@@ -1,4 +1,4 @@
|
||||
import {through} from '../../streams'
|
||||
import {through} from '../streams'
|
||||
import File from 'vinyl'
|
||||
|
||||
type FileCacheEntry = {path: string}
|
||||
@@ -1,6 +1,6 @@
|
||||
import {createIdleHandler} from './idle-handler'
|
||||
|
||||
import {to, pipeline, through} from '../../streams'
|
||||
import {to, pipeline, through} from '../streams'
|
||||
|
||||
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms))
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {through} from '../../streams'
|
||||
import {READY, IDLE} from '../../events'
|
||||
import {through} from '../streams'
|
||||
import {READY, IDLE} from '../events'
|
||||
import {Writable} from 'stream'
|
||||
|
||||
/**
|
||||
@@ -1,7 +1,7 @@
|
||||
// Mostly concerned with solving the Dirty Sync problem
|
||||
|
||||
import {through} from '../../streams'
|
||||
|
||||
import {through} from '../streams'
|
||||
import {log} from '@blitzjs/display'
|
||||
import File from 'vinyl'
|
||||
|
||||
/**
|
||||
@@ -16,14 +16,20 @@ export function createWorkOptimizer() {
|
||||
const stats = {todo, done}
|
||||
|
||||
const reportComplete = through({objectMode: true}, (file: File, _, next) => {
|
||||
done.push(file.hash)
|
||||
if (file.hash) {
|
||||
done.push(file.hash)
|
||||
}
|
||||
next(null, file)
|
||||
})
|
||||
|
||||
const triage = through({objectMode: true}, function (file: File, _, next) {
|
||||
if (!file.hash) {
|
||||
log.debug('File does not have hash! ' + file.path)
|
||||
return next()
|
||||
}
|
||||
// Dont send files that have already been done or have already been added
|
||||
if (done.includes(file.hash) || todo.includes(file.hash)) {
|
||||
process.env.DEBUG && console.log('Rejecting because this job has been done before: ' + file.path)
|
||||
log.debug('Rejecting because this job has been done before: ' + file.path)
|
||||
return next()
|
||||
}
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import {through, pipeline} from '../../../streams'
|
||||
import {through, pipeline} from '../streams'
|
||||
import gulpIf from 'gulp-if'
|
||||
import {unlink} from '../../helpers/unlink'
|
||||
import {unlink} from './unlink'
|
||||
import {dest} from 'vinyl-fs'
|
||||
import File from 'vinyl'
|
||||
import {Rule} from '../../../types'
|
||||
import {FILE_WRITTEN} from '../../../events'
|
||||
import {FILE_WRITTEN} from '../events'
|
||||
import {Writable} from 'stream'
|
||||
|
||||
/**
|
||||
* Returns a Rule that writes files to the destination path
|
||||
* Returns a Stage that writes files to the destination path
|
||||
*/
|
||||
export const createRuleWrite: Rule = ({config, reporter}) => {
|
||||
export const createWrite = (destination: string, reporter: Writable) => {
|
||||
const stream = pipeline(
|
||||
gulpIf(isUnlinkFile, unlink(config.dest), dest(config.dest)),
|
||||
gulpIf(isUnlinkFile, unlink(destination), dest(destination)),
|
||||
through({objectMode: true}, (file: File, _, next) => {
|
||||
reporter.write({type: FILE_WRITTEN, payload: file})
|
||||
next(null, file)
|
||||
3
packages/file-pipeline/src/index.ts
Normal file
3
packages/file-pipeline/src/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export {transformFiles} from './transform-files'
|
||||
export {Stage} from './types'
|
||||
export * from './events'
|
||||
66
packages/file-pipeline/src/pipeline.ts
Normal file
66
packages/file-pipeline/src/pipeline.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import {Writable} from 'stream'
|
||||
import File from 'vinyl'
|
||||
import {pipeline, through} from './streams'
|
||||
import {Stage, StageArgs, StageConfig} from './types'
|
||||
import {agnosticSource} from './helpers/agnostic-source'
|
||||
import {createEnrichFiles} from './helpers/enrich-files'
|
||||
import {createFileCache} from './helpers/file-cache'
|
||||
import {createIdleHandler} from './helpers/idle-handler'
|
||||
import {createWorkOptimizer} from './helpers/work-optimizer'
|
||||
import {createWrite} from './helpers/writer'
|
||||
|
||||
export function isSourceFile(file: File) {
|
||||
return file.hash.indexOf(':') === -1
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a pipeline stream that transforms files.
|
||||
* @param config Config object containing basic information for the file pipeline
|
||||
* @param stages Array of stages to apply to each file
|
||||
* @param errors Stream that takes care of all operational error rendering
|
||||
* @param bus Stream to pipe events to
|
||||
*/
|
||||
export function createPipeline(config: StageConfig, stages: Stage[], bus: Writable) {
|
||||
// Helper streams don't account for business stages
|
||||
const source = agnosticSource(config)
|
||||
const input = through({objectMode: true}, (f, _, next) => next(null, f))
|
||||
const optimizer = createWorkOptimizer()
|
||||
const enrichFiles = createEnrichFiles()
|
||||
const srcCache = createFileCache(isSourceFile)
|
||||
const idleHandler = createIdleHandler(bus)
|
||||
const writer = createWrite(config.dest, bus)
|
||||
|
||||
// Send this object to every stage
|
||||
const api: StageArgs = {
|
||||
config,
|
||||
input,
|
||||
bus,
|
||||
getInputCache: () => srcCache.cache,
|
||||
}
|
||||
|
||||
// Initialize each stage
|
||||
const initializedStages = stages.map((stage) => stage(api))
|
||||
|
||||
const stream = pipeline(
|
||||
source.stream, // files come from file system
|
||||
input, // files coming via internal API
|
||||
|
||||
// Preparing files
|
||||
enrichFiles.stream,
|
||||
srcCache.stream,
|
||||
optimizer.triage,
|
||||
|
||||
// Run business stages
|
||||
...initializedStages.map((stage) => stage.stream),
|
||||
|
||||
// Tidy up
|
||||
writer.stream,
|
||||
optimizer.reportComplete,
|
||||
|
||||
idleHandler.stream,
|
||||
)
|
||||
|
||||
const ready = Object.assign({}, ...initializedStages.map((stage) => stage.ready))
|
||||
|
||||
return {stream, ready}
|
||||
}
|
||||
82
packages/file-pipeline/src/transform-files.ts
Normal file
82
packages/file-pipeline/src/transform-files.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import {pipe} from './streams'
|
||||
import {createPipeline} from './pipeline'
|
||||
import {pathExists, ensureDir, remove} from 'fs-extra'
|
||||
import {through} from './streams'
|
||||
import {createDisplay} from './display'
|
||||
import {READY, ERROR_THROWN} from './events'
|
||||
import {Stage} from './types'
|
||||
import {Transform} from 'stream'
|
||||
|
||||
type SynchronizeFilesOptions = {
|
||||
ignore?: string[]
|
||||
include?: string[]
|
||||
watch?: boolean
|
||||
bus?: Transform
|
||||
}
|
||||
|
||||
const defaultBus = through({objectMode: true}, (event, __, next) => {
|
||||
next(null, event)
|
||||
})
|
||||
|
||||
/**
|
||||
* Assembles a file stranform pipeline to convert blitz source code to something that
|
||||
* can run in NextJS.
|
||||
* @param config Configuration object
|
||||
*/
|
||||
export async function transformFiles(
|
||||
src: string,
|
||||
stages: Stage[],
|
||||
dest: string,
|
||||
options: SynchronizeFilesOptions,
|
||||
): Promise<any> {
|
||||
const {
|
||||
// default options
|
||||
ignore = [],
|
||||
include = [],
|
||||
watch = false,
|
||||
bus = defaultBus,
|
||||
} = options
|
||||
|
||||
// HACK: cleaning the dev folder on every restart means we do more work than necessary
|
||||
// TODO: remove this clean and devise a way to resolve differences in stream
|
||||
await clean(dest)
|
||||
|
||||
// const errors = createErrorsStream(reporter.stream)
|
||||
const display = createDisplay()
|
||||
return new Promise((resolve, reject) => {
|
||||
const config = {
|
||||
cwd: src,
|
||||
src,
|
||||
dest,
|
||||
include,
|
||||
ignore,
|
||||
watch,
|
||||
}
|
||||
|
||||
bus.on('data', ({type}) => {
|
||||
if (type === READY) {
|
||||
resolve(fileTransformPipeline.ready)
|
||||
}
|
||||
})
|
||||
|
||||
const fileTransformPipeline = createPipeline(config, stages, bus)
|
||||
|
||||
// Send source to fileTransformPipeline
|
||||
fileTransformPipeline.stream.on('error', (err) => {
|
||||
bus.write({type: ERROR_THROWN, payload: err})
|
||||
if (err) reject(err)
|
||||
})
|
||||
|
||||
// Send reporter events to display
|
||||
pipe(bus, display.stream, (err) => {
|
||||
if (err) reject(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function clean(path: string) {
|
||||
if (await pathExists(path)) {
|
||||
await remove(path)
|
||||
}
|
||||
return await ensureDir(path)
|
||||
}
|
||||
34
packages/file-pipeline/src/types.ts
Normal file
34
packages/file-pipeline/src/types.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import {Readable, Writable} from 'stream'
|
||||
import {FileCache} from './helpers/file-cache'
|
||||
|
||||
/**
|
||||
* Configuration for Stages
|
||||
*/
|
||||
export type StageConfig = {
|
||||
src: string
|
||||
dest: string
|
||||
cwd: string
|
||||
include: string[]
|
||||
ignore: string[]
|
||||
watch: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Arguments object for Stages
|
||||
*/
|
||||
export type StageArgs = {
|
||||
config: StageConfig
|
||||
input: Writable
|
||||
bus: Writable
|
||||
getInputCache: () => FileCache
|
||||
}
|
||||
|
||||
/**
|
||||
* Basic template for Pipeline Stages modules
|
||||
*/
|
||||
export type Stage = (
|
||||
a: StageArgs,
|
||||
) => {
|
||||
stream: Readable
|
||||
ready?: Record<string, any>
|
||||
} & Record<string, any>
|
||||
13
packages/file-pipeline/tsconfig.json
Normal file
13
packages/file-pipeline/tsconfig.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules"],
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"declarationDir": "./dist",
|
||||
"downlevelIteration": true,
|
||||
"paths": {
|
||||
"*": ["src/*", "node_modules/*"]
|
||||
}
|
||||
}
|
||||
}
|
||||
27
packages/server/README.md
Normal file
27
packages/server/README.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# `@blitzjs/server`
|
||||
|
||||
Responsible for managing the development and production server for Blitz.
|
||||
|
||||
The Server package exposes some key functions for controlling blitz.
|
||||
|
||||
## `dev()`
|
||||
|
||||
Start the development server in watch mode.
|
||||
|
||||
```ts
|
||||
import {dev} from '@blitzjs/server'
|
||||
|
||||
await dev(serverConfig)
|
||||
```
|
||||
|
||||
## `prod()`
|
||||
|
||||
Start the production server.
|
||||
|
||||
```ts
|
||||
import {prod} from '@blitzjs/server'
|
||||
|
||||
await prod(serverConfig)
|
||||
```
|
||||
|
||||
_This readme needs more work. If you want to help out please submit a PR_
|
||||
@@ -13,7 +13,9 @@
|
||||
],
|
||||
"scripts": {
|
||||
"clean": "rimraf dist",
|
||||
"predev": "wait-on ../core/dist/packages/core/src/index.d.ts",
|
||||
"wait:file-pipeline": "wait-on ../file-pipeline/dist/packages/file-pipeline/src/index.d.ts",
|
||||
"wait:core": "wait-on ../core/dist/packages/core/src/index.d.ts",
|
||||
"predev": "yarn wait:core && yarn wait:file-pipeline",
|
||||
"dev": "tsdx watch --verbose",
|
||||
"build": "tsdx build",
|
||||
"test": "tsdx test",
|
||||
@@ -33,6 +35,7 @@
|
||||
"types": "dist/packages/server/src/index.d.ts",
|
||||
"dependencies": {
|
||||
"@blitzjs/display": "0.12.0",
|
||||
"@blitzjs/file-pipeline": "0.12.0",
|
||||
"cross-spawn": "7.0.2",
|
||||
"detect-port": "1.3.0",
|
||||
"fast-glob": "3.2.2",
|
||||
|
||||
@@ -1,32 +1,32 @@
|
||||
import {resolve} from 'path'
|
||||
import {synchronizeFiles} from './synchronizer'
|
||||
import {move, remove, pathExists} from 'fs-extra'
|
||||
import {ServerConfig, enhance} from './config'
|
||||
import {ServerConfig, normalize} from './config'
|
||||
import {nextBuild} from './next-utils'
|
||||
import {saveBuild} from './build-hash'
|
||||
import {configureStages} from './stages'
|
||||
|
||||
export async function build(config: ServerConfig) {
|
||||
const {
|
||||
rootFolder,
|
||||
transformFiles,
|
||||
buildFolder,
|
||||
nextBin,
|
||||
ignoredPaths,
|
||||
manifestPath,
|
||||
writeManifestFile,
|
||||
includePaths,
|
||||
watch = false,
|
||||
} = await enhance(config)
|
||||
|
||||
await synchronizeFiles({
|
||||
src: rootFolder,
|
||||
dest: buildFolder,
|
||||
ignore,
|
||||
include,
|
||||
watch,
|
||||
manifestPath,
|
||||
writeManifestFile,
|
||||
ignoredPaths,
|
||||
includePaths,
|
||||
})
|
||||
...stageConfig
|
||||
} = await normalize(config)
|
||||
|
||||
const src = rootFolder
|
||||
const stages = configureStages(stageConfig)
|
||||
const dest = buildFolder
|
||||
const options = {
|
||||
ignore,
|
||||
include,
|
||||
watch,
|
||||
}
|
||||
|
||||
await transformFiles(src, stages, dest, options)
|
||||
await nextBuild(nextBin, buildFolder)
|
||||
|
||||
const rootNextFolder = resolve(rootFolder, '.next')
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
export function ciLog(name: string, obj: any) {
|
||||
if (process.env.CI && process.env.JEST_WORKER_ID !== undefined) {
|
||||
console.log('JEST_WORKER_ID:', process.env.JEST_WORKER_ID)
|
||||
console.log(name + '\n' + JSON.stringify(obj, null, 2) + '\n')
|
||||
}
|
||||
return obj
|
||||
}
|
||||
@@ -1,9 +1,10 @@
|
||||
import {resolve} from 'path'
|
||||
import {ciLog} from './ci-log'
|
||||
import {resolveBinAsync} from './resolve-bin-async'
|
||||
import {synchronizeFiles} from './synchronizer'
|
||||
import {transformFiles} from '@blitzjs/file-pipeline'
|
||||
import {parseChokidarRulesFromGitignore} from './parse-chokidar-rules-from-gitignore'
|
||||
|
||||
type Synchronizer = typeof transformFiles
|
||||
|
||||
export type ServerConfig = {
|
||||
rootFolder: string
|
||||
port: number
|
||||
@@ -11,10 +12,20 @@ export type ServerConfig = {
|
||||
interceptNextErrors?: boolean
|
||||
devFolder?: string
|
||||
buildFolder?: string
|
||||
manifestPath?: string
|
||||
writeManifestFile?: boolean
|
||||
watch?: boolean
|
||||
synchronizer?: typeof synchronizeFiles
|
||||
transformFiles?: Synchronizer
|
||||
}
|
||||
|
||||
type NormalizedConfig = Omit<ServerConfig, 'interceptNextErrors'> & {
|
||||
ignore: string[]
|
||||
include: string[]
|
||||
nextBin: string
|
||||
devFolder: string
|
||||
buildFolder: string
|
||||
transformFiles: Synchronizer
|
||||
writeManifestFile: boolean
|
||||
watch: boolean
|
||||
}
|
||||
|
||||
const defaults = {
|
||||
@@ -37,41 +48,23 @@ const defaults = {
|
||||
devFolder: '.blitz/caches/dev',
|
||||
buildFolder: '.blitz/caches/build',
|
||||
nextBinPatched: './node_modules/.bin/next-patched',
|
||||
manifestPath: '_manifest.json',
|
||||
writeManifestFile: true,
|
||||
}
|
||||
|
||||
export async function enhance(config: ServerConfig) {
|
||||
const devFolder = resolve(config.rootFolder, config.devFolder || defaults.devFolder)
|
||||
const buildFolder = resolve(config.rootFolder, config.buildFolder || defaults.buildFolder)
|
||||
const manifestPath = resolve(devFolder, config.manifestPath || defaults.manifestPath)
|
||||
const writeManifestFile =
|
||||
typeof config.writeManifestFile === 'undefined' ? defaults.writeManifestFile : config.writeManifestFile
|
||||
|
||||
export async function normalize(config: ServerConfig): Promise<NormalizedConfig> {
|
||||
const nextBinOrig = await resolveBinAsync('next')
|
||||
const nextBinPatched = await resolveBinAsync('@blitzjs/server', 'next-patched')
|
||||
const git = parseChokidarRulesFromGitignore(resolve(process.cwd(), config.rootFolder))
|
||||
|
||||
const nextBin = resolve(config.rootFolder, config.interceptNextErrors ? nextBinPatched : nextBinOrig)
|
||||
|
||||
const {ignoredPaths: gitIgnoredPaths, includePaths: gitIncludePaths} = parseChokidarRulesFromGitignore(
|
||||
resolve(process.cwd(), config.rootFolder),
|
||||
)
|
||||
|
||||
return ciLog(
|
||||
`
|
||||
Logging the following to understand what is happening in our CI environment
|
||||
and investigate why we have been getting random CI test failures.
|
||||
This will be temporary.
|
||||
`,
|
||||
{
|
||||
...config,
|
||||
ignoredPaths: defaults.ignoredPaths.concat(gitIgnoredPaths),
|
||||
includePaths: defaults.includePaths.concat(gitIncludePaths),
|
||||
manifestPath,
|
||||
nextBin,
|
||||
buildFolder,
|
||||
devFolder,
|
||||
writeManifestFile,
|
||||
},
|
||||
)
|
||||
return {
|
||||
...config,
|
||||
buildFolder: resolve(config.rootFolder, config.buildFolder ?? defaults.buildFolder),
|
||||
devFolder: resolve(config.rootFolder, config.devFolder ?? defaults.devFolder),
|
||||
ignore: defaults.ignoredPaths.concat(git.ignoredPaths),
|
||||
include: defaults.includePaths.concat(git.includePaths),
|
||||
nextBin: resolve(config.rootFolder, config.interceptNextErrors ? nextBinPatched : nextBinOrig),
|
||||
transformFiles: config.transformFiles ?? transformFiles,
|
||||
watch: config.watch ?? false,
|
||||
writeManifestFile: config.writeManifestFile ?? defaults.writeManifestFile,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +1,38 @@
|
||||
import {resolve} from 'path'
|
||||
import {synchronizeFiles as defaultSynchronizer} from './synchronizer'
|
||||
import {ServerConfig, enhance} from './config'
|
||||
import {ServerConfig, normalize} from './config'
|
||||
import {nextStartDev} from './next-utils'
|
||||
import {configureStages} from './stages'
|
||||
|
||||
export async function dev(config: ServerConfig, readyForNextDev: Promise<any> = Promise.resolve()) {
|
||||
export async function dev(
|
||||
{watch = true, ...config}: ServerConfig,
|
||||
readyForNextDev: Promise<any> = Promise.resolve(),
|
||||
) {
|
||||
const {
|
||||
//
|
||||
rootFolder,
|
||||
transformFiles,
|
||||
nextBin,
|
||||
devFolder,
|
||||
ignoredPaths,
|
||||
manifestPath,
|
||||
writeManifestFile,
|
||||
includePaths,
|
||||
synchronizer: synchronizeFiles = defaultSynchronizer,
|
||||
watch = true,
|
||||
} = await enhance({
|
||||
ignore,
|
||||
include,
|
||||
...stagesConfig
|
||||
} = await normalize({
|
||||
...config,
|
||||
interceptNextErrors: true,
|
||||
})
|
||||
|
||||
const src = resolve(rootFolder)
|
||||
const stages = configureStages(stagesConfig)
|
||||
const dest = resolve(rootFolder, devFolder)
|
||||
const options = {
|
||||
ignore,
|
||||
include,
|
||||
watch,
|
||||
}
|
||||
|
||||
const [{manifest}] = await Promise.all([
|
||||
synchronizeFiles({
|
||||
src,
|
||||
dest,
|
||||
watch,
|
||||
ignoredPaths,
|
||||
includePaths,
|
||||
manifestPath,
|
||||
writeManifestFile,
|
||||
}),
|
||||
transformFiles(src, stages, dest, options),
|
||||
// Ensure next does not start until parallel processing completes
|
||||
readyForNextDev,
|
||||
])
|
||||
|
||||
|
||||
@@ -2,8 +2,7 @@ export {withBlitz} from './with-blitz'
|
||||
export {build} from './build'
|
||||
export {dev} from './dev'
|
||||
export {prod} from './prod'
|
||||
export {Manifest} from './synchronizer/pipeline/rules/manifest'
|
||||
export {ManifestLoader} from './synchronizer/manifest-loader'
|
||||
export {enhance} from './config'
|
||||
export {normalize} from './config'
|
||||
export {resolveBinAsync} from './resolve-bin-async'
|
||||
export {ManifestLoader} from './manifest-loader'
|
||||
export * from './rpc'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {readFile} from 'fs'
|
||||
import {Manifest} from './pipeline/rules/manifest'
|
||||
import {Manifest} from './stages/manifest'
|
||||
|
||||
export const ManifestLoader = {
|
||||
async load(filename: string) {
|
||||
@@ -1,7 +1,7 @@
|
||||
import {spawn} from 'cross-spawn'
|
||||
import detect from 'detect-port'
|
||||
import {Manifest} from './synchronizer/pipeline/rules/manifest'
|
||||
import {through} from './synchronizer/streams'
|
||||
import {Manifest} from './stages/manifest'
|
||||
import {through} from './streams'
|
||||
import {ServerConfig} from 'config'
|
||||
|
||||
function createOutputTransformer(manifest: Manifest, devFolder: string) {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import {ServerConfig, enhance} from './config'
|
||||
import {ServerConfig, normalize} from './config'
|
||||
import {nextStart} from './next-utils'
|
||||
import {build} from './build'
|
||||
import {alreadyBuilt} from './build-hash'
|
||||
|
||||
export async function prod(config: ServerConfig) {
|
||||
const {rootFolder, buildFolder, nextBin} = await enhance(config)
|
||||
const {rootFolder, buildFolder, nextBin} = await normalize(config)
|
||||
if (!(await alreadyBuilt(buildFolder))) {
|
||||
await build(config)
|
||||
}
|
||||
|
||||
9
packages/server/src/stages/README.md
Normal file
9
packages/server/src/stages/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Pipeline Stages for Blitz
|
||||
|
||||
These are the business rule transformations that are run on sourcefiles while in transit during a Blitz Start or a Blitz Build.
|
||||
|
||||
Each folder represents a business rule and should be separately applicable and not depend on each other.
|
||||
|
||||
These live here because of their dependencies and to keep [@blitzjs/file-pipeline](../../../file-pipeline/README.md) a general utility.
|
||||
|
||||
For more information see [@blitzjs/file-pipeline](../../../file-pipeline/README.md)
|
||||
@@ -2,15 +2,15 @@ import {pathExistsSync} from 'fs-extra'
|
||||
import {resolve} from 'path'
|
||||
import File from 'vinyl'
|
||||
|
||||
import {through} from '../../../streams'
|
||||
import {Rule} from '../../../types'
|
||||
import {through} from '../../streams'
|
||||
import {Stage} from '@blitzjs/file-pipeline'
|
||||
|
||||
const isNextConfigPath = (p: string) => /next\.config\.(js|ts)/.test(p)
|
||||
const isNowBuild = () => process.env.NOW_BUILDER || process.env.VERCEL_BUILDER
|
||||
/**
|
||||
* Returns a Rule that manages converting from blitz.config.js to next.config.js
|
||||
* Returns a Stage that manages converting from blitz.config.js to next.config.js
|
||||
*/
|
||||
export const createRuleConfig: Rule = ({config, input}) => {
|
||||
export const createStageConfig: Stage = ({config, input}) => {
|
||||
// Preconditions
|
||||
const hasNextConfig = pathExistsSync(resolve(config.src, 'next.config.js'))
|
||||
const hasBlitzConfig = pathExistsSync(resolve(config.src, 'blitz.config.js'))
|
||||
16
packages/server/src/stages/index.ts
Normal file
16
packages/server/src/stages/index.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import {createStageRelative} from './relative'
|
||||
import {createStagePages} from './pages'
|
||||
import {createStageRpc} from './rpc'
|
||||
import {createStageConfig} from './config'
|
||||
import {createStageManifest} from './manifest'
|
||||
|
||||
// These create pipeline stages that are run as the business rules for Blitz
|
||||
// Read this folders README for more information
|
||||
export const configureStages = (config: {writeManifestFile: boolean}) => [
|
||||
// Order is important
|
||||
createStageRelative,
|
||||
createStagePages,
|
||||
createStageRpc,
|
||||
createStageConfig,
|
||||
createStageManifest(config.writeManifestFile),
|
||||
]
|
||||
108
packages/server/src/stages/manifest/index.ts
Normal file
108
packages/server/src/stages/manifest/index.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import File from 'vinyl'
|
||||
import {through} from '../../streams'
|
||||
import {Stage} from '@blitzjs/file-pipeline'
|
||||
|
||||
type ManifestVO = {
|
||||
keys: {[k: string]: string}
|
||||
values: {[k: string]: string}
|
||||
}
|
||||
|
||||
export class Manifest {
|
||||
private keys: {[k: string]: string} = {}
|
||||
private values: {[k: string]: string} = {}
|
||||
private events: string[] = []
|
||||
|
||||
constructor(obj?: ManifestVO) {
|
||||
if (obj) {
|
||||
this.keys = obj.keys
|
||||
this.values = obj.values
|
||||
}
|
||||
}
|
||||
|
||||
getByKey(key: string) {
|
||||
return this.keys[key]
|
||||
}
|
||||
|
||||
getByValue(value: string) {
|
||||
return this.values[value]
|
||||
}
|
||||
|
||||
setEntry(key: string, dest: string) {
|
||||
this.keys[key] = dest
|
||||
this.values[dest] = key
|
||||
this.events.push(`set:${dest}`)
|
||||
}
|
||||
|
||||
removeKey(key: string) {
|
||||
const dest = this.getByKey(key)
|
||||
if (!dest) {
|
||||
throw new Error(`Key "${key}" returns`)
|
||||
}
|
||||
delete this.values[dest]
|
||||
delete this.keys[key]
|
||||
this.events.push(`del:${key}`)
|
||||
return dest
|
||||
}
|
||||
|
||||
getEvents() {
|
||||
return this.events
|
||||
}
|
||||
|
||||
toJson(compact = false) {
|
||||
return JSON.stringify(this.toObject(), null, compact ? undefined : 2)
|
||||
}
|
||||
|
||||
toObject() {
|
||||
return {
|
||||
keys: this.keys,
|
||||
values: this.values,
|
||||
}
|
||||
}
|
||||
|
||||
static create(obj?: ManifestVO) {
|
||||
return new Manifest(obj)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a stage to create and write the file error manifest so we can
|
||||
* link to the correct files on a NextJS browser error.
|
||||
*/
|
||||
export const createStageManifest = (
|
||||
writeManifestFile: boolean = true,
|
||||
manifestPath: string = '_manifest.json',
|
||||
) => {
|
||||
const stage: Stage = () => {
|
||||
const manifest = Manifest.create()
|
||||
|
||||
const stream = through({objectMode: true}, function (file: File, _, next) {
|
||||
this.push(file) // Send file on through to be written
|
||||
|
||||
const [origin] = file.history
|
||||
const dest = file.path
|
||||
|
||||
if (file.event === 'add' || file.event === 'change') {
|
||||
manifest.setEntry(origin, dest)
|
||||
}
|
||||
|
||||
if (file.event === 'unlink' || file.event === 'unlinkDir') {
|
||||
manifest.removeKey(origin)
|
||||
}
|
||||
|
||||
if (writeManifestFile) {
|
||||
this.push(
|
||||
new File({
|
||||
// NOTE: no need to for hash because this is a manifest
|
||||
// and doesn't count as work
|
||||
path: manifestPath,
|
||||
contents: Buffer.from(manifest.toJson(false)),
|
||||
}),
|
||||
)
|
||||
}
|
||||
next()
|
||||
})
|
||||
|
||||
return {stream, ready: {manifest}}
|
||||
}
|
||||
return stage
|
||||
}
|
||||
@@ -1,33 +1,25 @@
|
||||
import {log} from '@blitzjs/display'
|
||||
import {through} from './streams'
|
||||
import {Writable} from 'stream'
|
||||
import {ERROR_THROWN} from './events'
|
||||
import {ERROR_THROWN} from '@blitzjs/file-pipeline'
|
||||
|
||||
export type Event<T> = {type: string; payload: T}
|
||||
|
||||
type Error = DuplicatePathError | NestedRouteError
|
||||
|
||||
/**
|
||||
* Returns an object with a stream that takes operational errors and prepares them for the console.
|
||||
*/
|
||||
export function createErrorsStream(reporter: Writable) {
|
||||
const stream = through({objectMode: true}, (err: Error, _, next) => {
|
||||
reporter.write({type: ERROR_THROWN, payload: err})
|
||||
|
||||
export function handleErrors(bus: Writable) {
|
||||
bus.on('data', (event: Event<Error>) => {
|
||||
if (event.type !== ERROR_THROWN) return
|
||||
const err = event.payload as Error
|
||||
if (err instanceof DuplicatePathError) {
|
||||
renderDuplicatePathError(err)
|
||||
return next()
|
||||
return
|
||||
}
|
||||
|
||||
if (err instanceof NestedRouteError) {
|
||||
renderNestedRouteError(err)
|
||||
return next()
|
||||
return
|
||||
}
|
||||
|
||||
next(err)
|
||||
})
|
||||
|
||||
return {stream}
|
||||
}
|
||||
|
||||
export class DuplicatePathError extends Error {
|
||||
@@ -1,17 +1,19 @@
|
||||
import {join} from 'path'
|
||||
import File from 'vinyl'
|
||||
import {through} from '../../../streams'
|
||||
import {getDuplicatePaths, absolutePathTransform} from '../../utils'
|
||||
import {RuleArgs} from '../../../types'
|
||||
import {DuplicatePathError, NestedRouteError} from '../../../errors'
|
||||
import {getDuplicatePaths, absolutePathTransform} from '../utils'
|
||||
import {through} from '../../streams'
|
||||
import {Stage} from '@blitzjs/file-pipeline'
|
||||
import {handleErrors, DuplicatePathError, NestedRouteError} from './errors'
|
||||
|
||||
/**
|
||||
* Returns a Rule to assemble NextJS `/pages` folder from within
|
||||
* Returns a Stage to assemble NextJS `/pages` folder from within
|
||||
* the BlitzJS folder structure
|
||||
*/
|
||||
export const createRulePages = ({config, errors, getInputCache}: RuleArgs) => {
|
||||
export const createStagePages: Stage = ({config, bus, getInputCache}) => {
|
||||
const {src} = config
|
||||
|
||||
handleErrors(bus)
|
||||
|
||||
const pagesTransformer = absolutePathTransform(src)(pagesPathTransformer)
|
||||
const apiTransformer = absolutePathTransform(src)(apiPathTransformer)
|
||||
|
||||
@@ -27,7 +29,6 @@ export const createRulePages = ({config, errors, getInputCache}: RuleArgs) => {
|
||||
duplicatePages,
|
||||
)
|
||||
|
||||
errors.write(err)
|
||||
return next(err)
|
||||
}
|
||||
|
||||
@@ -40,7 +41,6 @@ export const createRulePages = ({config, errors, getInputCache}: RuleArgs) => {
|
||||
duplicateApi,
|
||||
)
|
||||
|
||||
errors.write(err)
|
||||
return next(err)
|
||||
}
|
||||
|
||||
@@ -58,7 +58,6 @@ export const createRulePages = ({config, errors, getInputCache}: RuleArgs) => {
|
||||
|
||||
const err = new NestedRouteError(message, secondary, nestedApiRoutes)
|
||||
|
||||
errors.write(err)
|
||||
return next(err)
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import {through} from '../../../streams'
|
||||
import File from 'vinyl'
|
||||
import {Rule} from '../../../types'
|
||||
import {through} from '../../streams'
|
||||
import {Stage} from '@blitzjs/file-pipeline'
|
||||
import path from 'path'
|
||||
import slash from 'slash'
|
||||
|
||||
/**
|
||||
* Returns a Rule that converts relative files paths to absolute
|
||||
* Returns a Stage that converts relative files paths to absolute
|
||||
*/
|
||||
export const createRuleRelative: Rule = () => {
|
||||
export const createStageRelative: Stage = () => {
|
||||
const stream = through({objectMode: true}, (file: File, _, next) => {
|
||||
const cwd = process.cwd()
|
||||
const filecontents = file.contents
|
||||
@@ -1,14 +1,14 @@
|
||||
import File from 'vinyl'
|
||||
import slash from 'slash'
|
||||
import {absolutePathTransform} from '../../utils'
|
||||
import {absolutePathTransform} from '../utils'
|
||||
import {relative} from 'path'
|
||||
import {through} from '../../../streams'
|
||||
import {Rule} from '../../../types'
|
||||
import {through} from '../../streams'
|
||||
import {Stage} from '@blitzjs/file-pipeline'
|
||||
|
||||
/**
|
||||
* Returns a Rule that manages generating the internal RPC commands and handlers
|
||||
* Returns a Stage that manages generating the internal RPC commands and handlers
|
||||
*/
|
||||
export const createRuleRpc: Rule = function configure({config: {src}}) {
|
||||
export const createStageRpc: Stage = function configure({config: {src}}) {
|
||||
const fileTransformer = absolutePathTransform(src)
|
||||
|
||||
const getRpcPath = fileTransformer(rpcPath)
|
||||
@@ -1,4 +1,3 @@
|
||||
import File from 'vinyl'
|
||||
import {relative, resolve} from 'path'
|
||||
|
||||
// Transform an absolute path with a relative path transformer
|
||||
@@ -10,10 +9,6 @@ export const absolutePathTransform = (sourceFolder = '') => (relativeTransformer
|
||||
return resolve(sourceFolder, transformedPath)
|
||||
}
|
||||
|
||||
export function isSourceFile(file: File) {
|
||||
return file.hash.indexOf(':') === -1
|
||||
}
|
||||
|
||||
export const getDuplicatePaths = (entries: string[], type: string) => {
|
||||
const allRoutes = entries.filter((route) => route.includes(type))
|
||||
const cleanRoutes = allRoutes.map((route) => route.split(`${type}`)[1])
|
||||
39
packages/server/src/streams.ts
Normal file
39
packages/server/src/streams.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
// The following are a loose collaction of stream
|
||||
// helpers based on the missisippi library
|
||||
|
||||
import {Stream} from 'stream'
|
||||
|
||||
// Remove error 'default' is imported from external module 'pump' but never used
|
||||
// import pipe from 'pump'
|
||||
// export {pipe}
|
||||
|
||||
import through from 'through2'
|
||||
export {through}
|
||||
|
||||
export {default as parallel} from 'parallel-transform'
|
||||
|
||||
// Fix issues with interop
|
||||
import from2 from 'from2'
|
||||
type From2 = typeof from2
|
||||
const from: From2 = require('from2')
|
||||
export {from}
|
||||
|
||||
// Fix issues with interop
|
||||
import flushWriteStream from 'flush-write-stream'
|
||||
type FlushWriteStream = typeof flushWriteStream
|
||||
const to: FlushWriteStream = require('flush-write-stream')
|
||||
export {to}
|
||||
|
||||
import pumpify from 'pumpify'
|
||||
|
||||
// Bad types
|
||||
type PumpifyFn = (...streams: Stream[]) => pumpify
|
||||
// const pipeline = (pumpifyFn as any) as PumpifyFn & {obj: PumpifyFn}
|
||||
const pipeline = (pumpify.ctor({
|
||||
autoDestroy: false,
|
||||
destroy: false,
|
||||
objectMode: true,
|
||||
highWaterMark: 160,
|
||||
}) as any) as PumpifyFn
|
||||
|
||||
export {pipeline}
|
||||
@@ -1,315 +0,0 @@
|
||||
# Synchronizer
|
||||
|
||||
Design goals
|
||||
|
||||
- Rules: Related logic needs to live together
|
||||
- Everything is a rule
|
||||
- Efficiency
|
||||
- Parallel processing
|
||||
- Cleaner Architecture for Dirty restart functionality
|
||||
- Agnostic input file watcher / glob
|
||||
- Simplify tests
|
||||
|
||||
# Why Streams?
|
||||
|
||||
Initially, Blitz will be used by people with small projects however as the number files and throughput increases we will need to use an architecture that allows for large parallel throughput with low memory consumption. Node is built on streams as a primitive so it makes sense to utilize what is available. The Gulp ecosystems provide several tools for managing streams of files so that makes sense to use those tools when available. Because refactoring to streams later would be extremely difficult and painful not starting with streams would be a design mistake.
|
||||
|
||||
# Why not RxJS?
|
||||
|
||||
RxJS could be a good match for streaming architectures and introduces some really powerful tools for managing stream operations. As we are using object streams it would also possibly simplify some of the boilerplate using RxJS. However, certain operators in RxJS can be inapproachable for newer developers and tend to encourage too much abstraction. It is also an extra dependency that increases the learning surface of the codebase and as we are stuck with understanding basic node streams, in any case, it makes sense to avoid RxJS until absolutely necessary.
|
||||
|
||||
# Broad Architecture
|
||||
|
||||
Our architecture is a big file transform pipeline. Every business concern is colocated in a rule which basically exports a stream. There are rules for general business concerns such as:
|
||||
|
||||
- Blitz Config
|
||||
- Compiling Routes
|
||||
- RPC Generation
|
||||
- File lookup table generation
|
||||
|
||||
## File Transform Pipeline
|
||||
|
||||
<img src="diagram-file-transform.png" />
|
||||
|
||||
# Stream helpers
|
||||
|
||||
So Node streams are a little incompatible on old versions of Node and there are a few compatibility libs we are using to help us work with streams.
|
||||
|
||||
https://www.freecodecamp.org/news/rxjs-and-node-8f4e0acebc7c/
|
||||
|
||||
Helper Libs
|
||||
|
||||
- Pipe - [pump](https://npmjs.com/package/pump)
|
||||
- Pipeline - [pumpify](https://npmjs.com/package/pumpify)
|
||||
- Through - [through2](https://npmjs.com/package/through2)
|
||||
- Concat - [concat-stream](https://npmjs.com/package/concat-stream)
|
||||
- Parallel - [parallel-transform](https://npmjs.com/package/parallel-transform)
|
||||
- Node Compat - [readable-stream](https://npmjs.com/package/readable-stream)
|
||||
|
||||
# A good way to work with streams
|
||||
|
||||
A pattern we have found that works well is using a constructor function to accept connectors and return a stream as well as any shared data you need to provide to other components connectors. You will see this a lot around the synchronizer.
|
||||
|
||||
```ts
|
||||
type CreatorFn = ConnectionsOrConfig => StreamAsWellAsSharedData
|
||||
```
|
||||
|
||||
An example might look like this:
|
||||
|
||||
```ts
|
||||
// Config -> Merged Glob && FileWatcher
|
||||
const source = agnosticSource({cwd: src, include, ignore, watch})
|
||||
|
||||
// you can then pipe the stream to a pipeline
|
||||
pipe(source.stream, fileTransformPipeline)
|
||||
```
|
||||
|
||||
The reason we don't just return a stream is that often we need to return other data and share it elsewhere, for example, to analyze input file structure in the pages rule we use a file cache.
|
||||
|
||||
```ts
|
||||
// Here
|
||||
const fileCache = createFileCache(config)
|
||||
const pageRule = createPageRule(fileCache.cache)
|
||||
|
||||
pipeline(
|
||||
fileCache.stream, // manages the fileCache to be used by other streams
|
||||
// ...
|
||||
pageRule.stream, // has access to the fileCache
|
||||
)
|
||||
```
|
||||
|
||||
# View rendering and error handling
|
||||
|
||||
The cli view is provided by a [reporter](./reporter) stream which accepts Events which it manages and displays. This is responsible for rendering stuff to the view.
|
||||
|
||||
Secondly there is an [errors](./errors) stream which works a similar way but for Operational Errors.
|
||||
|
||||
It is important to differentiate between Operational Errors and Exceptions. Exceptions are probably programmer errors whereas operation errors are more likely a result of the user providing us with the wrong input/files.
|
||||
|
||||
In this architecture, we write operational errors to the error stream and catch Exceptions in stream error handlers. We should be able to drop Exceptions on the floor but by attaching a view to the end of the error stream we can print nice messages for our users.
|
||||
|
||||
Because everything is streams we can get pretty creative with how we present stuff to the user and get realtime updates to it. This might make it easier to integrate with Reactive cli view frameworks at a later point.
|
||||
|
||||
<img src="diagram-error-and-view.png" />
|
||||
|
||||
# Synchronizer Event Pipeline
|
||||
|
||||
The main element within the file synchronizer is the [file transform pipeline](./pipeline/index.ts).
|
||||
|
||||
This is a big stream pipeline that handles the transformation and writing of files. The concept is that you can write an [evented vinyl file object](#evented-vinyl-files) to it and it will run a series of transformations on that file and write it to disk or delete it at the end.
|
||||
|
||||
# Evented Vinyl Files
|
||||
|
||||
Evented Vinyl Files are [Vinyl Files](https://github.com/gulpjs/vinyl) with events attached to them
|
||||
|
||||
```ts
|
||||
const isDelete = (file) => file.isNull() && file.event === 'unlink'
|
||||
|
||||
// The input file at '/path/to/foo' was deleted
|
||||
// This can be transformed during the process phase
|
||||
return new Vinyl({
|
||||
path: '/path/to/foo',
|
||||
content: null,
|
||||
event: 'unlink',
|
||||
})
|
||||
```
|
||||
|
||||
```ts
|
||||
// Add file at '/path/to/foo'
|
||||
new Vinyl({
|
||||
path: '/path/to/foo',
|
||||
content: someContentStream,
|
||||
})
|
||||
```
|
||||
|
||||
# Input agnostic
|
||||
|
||||
Pipeline should be input agnostic ie. it should not matter if it comes from watch or a folder glob so to help with that we have created an agnostic input stream that takes glob config and returns a file stream. It consumes input from both chokidar and vinyl-fs.
|
||||
|
||||
# Optimization
|
||||
|
||||
Input manages inputting of evented vinyl file. Files that have already been processed or are currently being processed should not be processed again. Here we try and manage a running list of files to work on based on the hash of their filename and mtime.
|
||||
|
||||
# Analysis
|
||||
|
||||
Some types of analysis need a list of all the files other types do not
|
||||
|
||||
Analysis needs to be done in stream as new information comes in. Eg. when someone renames a file that file goes to the analysis engine which works out invariants as they occur without requiring a sweep of the entire file system.
|
||||
|
||||
For this, we can create file caches which represent projections of the file system and update based on input file events.
|
||||
|
||||
# Rules
|
||||
|
||||
Rule streams represent Blitz specific rules we need the synchronizer to do
|
||||
|
||||
Possible things it can do:
|
||||
|
||||
- Change its path or contents
|
||||
- Drop the file from further processing. Don't copy it.
|
||||
- Add new files to the input stream - Associating the new files with the original
|
||||
- Write an error to the error stream
|
||||
|
||||
Rules can create a new file to add to the head of the queue
|
||||
|
||||
They can hold state in a closure.
|
||||
|
||||
They should be managed in a list.
|
||||
|
||||
The entire chain can be a list of streams.
|
||||
|
||||
# Examples
|
||||
|
||||
Some code examples
|
||||
|
||||
```ts
|
||||
// Rules represent business rules
|
||||
const rulePages = createRulePages(api)
|
||||
const ruleRpc = createRuleRpc(api)
|
||||
const ruleConfig = createRuleConfig(api)
|
||||
const ruleWrite = createRuleWrite(api)
|
||||
|
||||
const stream = pipeline(
|
||||
// They can then be used in the pipeline
|
||||
input,
|
||||
rulePages.stream,
|
||||
ruleRpc.stream,
|
||||
ruleConfig.stream,
|
||||
ruleWrite.stream,
|
||||
)
|
||||
```
|
||||
|
||||
```ts
|
||||
import {through} from './streams'
|
||||
|
||||
// Typical Rule
|
||||
export default SERVICE_NAME({config, input, errors, getInputCache}) => {
|
||||
const service = createSomeService()
|
||||
|
||||
// This is an incremental file cache that
|
||||
// gets built as Files are read
|
||||
const cache = getInputCache()
|
||||
|
||||
// Probing sync methods are probably ok here as this is effectively synchronous and could be
|
||||
// considered bootstrapping and runs first but you should not write to the file system
|
||||
// Use input.write() instead.
|
||||
if (!pathExistsSync(resolve(config.src, 'blitz.config.js'))) {
|
||||
input.write(resolve(config.src, 'blitz.config.js'), 'Hello World')
|
||||
}
|
||||
|
||||
const stream = through.obj(function (file, enc, next) {
|
||||
// You can test for changes in the input cache
|
||||
if (cache.filter(/next\.config\.js$/.exec).length > -1) {
|
||||
const err = new Error('Cannot have next config!')
|
||||
err.name = 'NextConfigError'
|
||||
errors.write(err)
|
||||
}
|
||||
|
||||
// process file in some way
|
||||
file.path = file.path.toUpperCase()
|
||||
|
||||
// you can push to the stream output (note you cannot use arrow fns)
|
||||
this.push(file)
|
||||
|
||||
// or send file onwards to be written this does the same thing as this.push()
|
||||
next(null, file)
|
||||
})
|
||||
|
||||
return {
|
||||
stream,
|
||||
service, // provide that service to consumers outside the stream
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
# Future thinking
|
||||
|
||||
So one future issue we have been trying to account for here is how to solve the dirty sync problem with streams. Basically, we want Blitz to do as little work as possible. At this point, we are blowing away Blitz folders when we start but it would be smarter to analyze the source and destination folders and only manipulate the files that are actually required to be changed. This is not required as of now but will be a consideration as we try and get this thing faster and faster to live up to its name. To prepare for this we have setup a work optimizer that checks the hash of the input file and guards against new work being done
|
||||
|
||||
The following is a rough plan for how to do this. (Likely to change/improve at a later point)
|
||||
|
||||
- Encode vinyl files + stats
|
||||
|
||||
```ts
|
||||
const hash = crypto
|
||||
.createHash('md5')
|
||||
.update(file.path + file.stats.mtime)
|
||||
.digest('hex')
|
||||
|
||||
file.hash = hash
|
||||
```
|
||||
|
||||
- Use those hashes to index file details in the following structures:
|
||||
|
||||
Following
|
||||
|
||||
```ts
|
||||
// reduced to as the first step during input
|
||||
const input = {abc123def456: '/foo/bar/baz', def456abc123: '/foo/bar/bop'}
|
||||
|
||||
// reduced to as the last step just before file write
|
||||
const complete = {
|
||||
abc123def456: {
|
||||
input: '/foo/bar/baz',
|
||||
output: ['/bas/boop/blop', '/bas/boop/ding', '/bas/boop/bar'],
|
||||
},
|
||||
def456abc123: {
|
||||
input: '/foo/bar/bing',
|
||||
output: ['/bas/boop/ping', '/bas/boop/foo', '/bas/boop/fawn'],
|
||||
},
|
||||
cbd123aef456: {
|
||||
input: '/foo/bar/bop',
|
||||
output: ['/bas/boop/thing'],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
Has this file hash been processed?
|
||||
|
||||
```ts
|
||||
const hash => !!output[hash];
|
||||
```
|
||||
|
||||
Which files do I need to delete based on input?
|
||||
|
||||
```ts
|
||||
const deleteHashes = Object.keys(output).filter((hash) => input[hash])
|
||||
```
|
||||
|
||||
- Output can also be indexed by filetype to keep going with our hacky error mapping (eventually this should probably be a sourcemap)
|
||||
|
||||
```json
|
||||
{
|
||||
"/bas/boop/bar": "/foo/bar/baz",
|
||||
"/bas/boop/blop": "/foo/bar/baz",
|
||||
"/bas/boop/ding": "/foo/bar/baz",
|
||||
"/bas/boop/fawn": "/foo/bar/bing",
|
||||
"/bas/boop/foo": "/foo/bar/bing",
|
||||
"/bas/boop/ping": "/foo/bar/bing",
|
||||
"/bas/boop/thing": "/foo/bar/bop"
|
||||
}
|
||||
```
|
||||
|
||||
Does my output match my input ie. am I in a stable state? or in our case can we return the promise.
|
||||
|
||||
```ts
|
||||
function isStable(input, output) {
|
||||
if (!input || !output) {
|
||||
return // We are not stable if we don't have both an input or output
|
||||
}
|
||||
|
||||
const inputKeys = Object.keys(input)
|
||||
const outputKeys = Object.keys(output)
|
||||
|
||||
if (inputKeys.length !== outputKeys.length) {
|
||||
return false
|
||||
}
|
||||
match = true
|
||||
for (let i = 0; i < inputKeys.length; i++) {
|
||||
match = match && outputKey[i] === inputKeys[i]
|
||||
if (!match) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
```
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 25 KiB |
@@ -1,90 +0,0 @@
|
||||
import {Manifest} from './pipeline/rules/manifest'
|
||||
import {pipe} from './streams'
|
||||
import {createPipeline} from './pipeline'
|
||||
import {agnosticSource} from './pipeline/helpers/agnostic-source'
|
||||
import {pathExists, ensureDir, remove} from 'fs-extra'
|
||||
import {through} from './streams'
|
||||
import {createDisplay} from './display'
|
||||
import {createErrorsStream} from './errors'
|
||||
import {READY} from './events'
|
||||
|
||||
type SynchronizeFilesInput = {
|
||||
src: string
|
||||
dest: string
|
||||
watch: boolean
|
||||
manifestPath: string
|
||||
ignoredPaths: string[]
|
||||
includePaths: string[]
|
||||
writeManifestFile: boolean
|
||||
}
|
||||
|
||||
type SynchronizeFilesOutput = {
|
||||
manifest: Manifest
|
||||
}
|
||||
|
||||
/**
|
||||
* Assembles a file stranform pipeline to convert blitz source code to something that
|
||||
* can run in NextJS.
|
||||
* @param config Configuration object
|
||||
*/
|
||||
export async function synchronizeFiles({
|
||||
dest,
|
||||
src,
|
||||
manifestPath,
|
||||
watch,
|
||||
includePaths: include,
|
||||
ignoredPaths: ignore,
|
||||
writeManifestFile,
|
||||
}: SynchronizeFilesInput): Promise<SynchronizeFilesOutput> {
|
||||
// HACK: cleaning the dev folder on every restart means we do more work than necessary
|
||||
// TODO: remove this clean and devise a way to resolve differences in stream
|
||||
await clean(dest)
|
||||
|
||||
const reporter = {
|
||||
stream: through({objectMode: true}, (event, __, next) => {
|
||||
next(null, event)
|
||||
}),
|
||||
}
|
||||
|
||||
const errors = createErrorsStream(reporter.stream)
|
||||
const display = createDisplay()
|
||||
return new Promise((resolve, reject) => {
|
||||
const config = {
|
||||
cwd: src,
|
||||
src: src,
|
||||
dest: dest,
|
||||
manifest: {
|
||||
path: manifestPath,
|
||||
write: writeManifestFile,
|
||||
},
|
||||
}
|
||||
|
||||
reporter.stream.on('data', ({type}) => {
|
||||
if (type === READY) {
|
||||
resolve({
|
||||
manifest: fileTransformer.manifest,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const catchErrors = (err: any) => {
|
||||
if (err) reject(err)
|
||||
}
|
||||
|
||||
const source = agnosticSource({cwd: src, include, ignore, watch})
|
||||
const fileTransformer = createPipeline(config, errors.stream, reporter.stream)
|
||||
|
||||
// Send source to fileTransformer
|
||||
pipe(source.stream, fileTransformer.stream, catchErrors)
|
||||
|
||||
// Send reporter events to display
|
||||
pipe(reporter.stream, display.stream, catchErrors)
|
||||
})
|
||||
}
|
||||
|
||||
async function clean(path: string) {
|
||||
if (await pathExists(path)) {
|
||||
await remove(path)
|
||||
}
|
||||
return await ensureDir(path)
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
import {pipeline, through} from '../streams'
|
||||
import {RuleConfig, RuleArgs} from '../types'
|
||||
import {createEnrichFiles} from './helpers/enrich-files'
|
||||
import {createFileCache} from './helpers/file-cache'
|
||||
import {createIdleHandler} from './helpers/idle-handler'
|
||||
import {createWorkOptimizer} from './helpers/work-optimizer'
|
||||
import {createRuleConfig} from './rules/config'
|
||||
import {createRuleManifest} from './rules/manifest'
|
||||
import {createRuleRelative} from './rules/relative'
|
||||
import {createRulePages} from './rules/pages'
|
||||
import {createRuleRpc} from './rules/rpc'
|
||||
import {createRuleWrite} from './rules/write'
|
||||
import {isSourceFile} from './utils'
|
||||
import {Writable} from 'stream'
|
||||
|
||||
const input = through({objectMode: true}, (f, _, next) => next(null, f))
|
||||
|
||||
/**
|
||||
* Creates a pipeline stream that transforms files.
|
||||
* @param config Config object containing basic information for the file pipeline
|
||||
* @param errors Stream that takes care of all operational error rendering
|
||||
* @param reporter Stream that takes care of all view rendering
|
||||
*/
|
||||
export function createPipeline(config: RuleConfig, errors: Writable, reporter: Writable) {
|
||||
// Helper streams don't account for business rules
|
||||
const optimizer = createWorkOptimizer()
|
||||
const enrichFiles = createEnrichFiles()
|
||||
const srcCache = createFileCache(isSourceFile)
|
||||
const idleHandler = createIdleHandler(reporter)
|
||||
|
||||
// Send this DI object to every rule
|
||||
const api: RuleArgs = {
|
||||
config,
|
||||
input,
|
||||
reporter,
|
||||
errors,
|
||||
getInputCache: () => srcCache.cache,
|
||||
}
|
||||
|
||||
// Rules represent business rules
|
||||
// Perhaps if it makes sense we can iterate over rules passed in
|
||||
const rulePages = createRulePages(api)
|
||||
const ruleRpc = createRuleRpc(api)
|
||||
const ruleConfig = createRuleConfig(api)
|
||||
const ruleRelative = createRuleRelative(api)
|
||||
const ruleWrite = createRuleWrite(api)
|
||||
const ruleManifest = createRuleManifest(api)
|
||||
|
||||
const stream = pipeline(
|
||||
input,
|
||||
|
||||
// Preparing files
|
||||
enrichFiles.stream,
|
||||
srcCache.stream,
|
||||
optimizer.triage,
|
||||
|
||||
// Run business rules
|
||||
ruleRelative.stream,
|
||||
rulePages.stream,
|
||||
ruleRpc.stream,
|
||||
ruleConfig.stream,
|
||||
ruleWrite.stream,
|
||||
|
||||
// Tidy up
|
||||
optimizer.reportComplete,
|
||||
|
||||
// TODO: try and move this up to business rules section
|
||||
ruleManifest.stream,
|
||||
|
||||
idleHandler.stream,
|
||||
)
|
||||
|
||||
return {stream, manifest: ruleManifest.manifest}
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
import File from 'vinyl'
|
||||
import {through, pipeline} from '../../../streams'
|
||||
import {dest} from 'vinyl-fs'
|
||||
import gulpIf from 'gulp-if'
|
||||
import {resolve} from 'path'
|
||||
import {Rule} from '../../../types'
|
||||
|
||||
type ManifestVO = {
|
||||
keys: {[k: string]: string}
|
||||
values: {[k: string]: string}
|
||||
}
|
||||
|
||||
export class Manifest {
|
||||
private keys: {[k: string]: string} = {}
|
||||
private values: {[k: string]: string} = {}
|
||||
private events: string[] = []
|
||||
|
||||
constructor(obj?: ManifestVO) {
|
||||
if (obj) {
|
||||
this.keys = obj.keys
|
||||
this.values = obj.values
|
||||
}
|
||||
}
|
||||
|
||||
getByKey(key: string) {
|
||||
return this.keys[key]
|
||||
}
|
||||
|
||||
getByValue(value: string) {
|
||||
return this.values[value]
|
||||
}
|
||||
|
||||
setEntry(key: string, dest: string) {
|
||||
this.keys[key] = dest
|
||||
this.values[dest] = key
|
||||
this.events.push(`set:${dest}`)
|
||||
}
|
||||
|
||||
removeKey(key: string) {
|
||||
const dest = this.getByKey(key)
|
||||
if (!dest) {
|
||||
throw new Error(`Key "${key}" returns`)
|
||||
}
|
||||
delete this.values[dest]
|
||||
delete this.keys[key]
|
||||
this.events.push(`del:${key}`)
|
||||
return dest
|
||||
}
|
||||
|
||||
getEvents() {
|
||||
return this.events
|
||||
}
|
||||
|
||||
toJson(compact = false) {
|
||||
return JSON.stringify(this.toObject(), null, compact ? undefined : 2)
|
||||
}
|
||||
|
||||
toObject() {
|
||||
return {
|
||||
keys: this.keys,
|
||||
values: this.values,
|
||||
}
|
||||
}
|
||||
|
||||
static create(obj?: ManifestVO) {
|
||||
return new Manifest(obj)
|
||||
}
|
||||
}
|
||||
|
||||
const setManifestEntry = (manifest: Manifest) => {
|
||||
const stream = through({objectMode: true}, (file: File, _, next) => {
|
||||
const [origin] = file.history
|
||||
const dest = file.path
|
||||
if (file.event === 'add' || file.event === 'change') {
|
||||
manifest.setEntry(origin, dest)
|
||||
}
|
||||
|
||||
if (file.event === 'unlink' || file.event === 'unlinkDir') {
|
||||
manifest.removeKey(origin)
|
||||
}
|
||||
|
||||
next(null, file)
|
||||
})
|
||||
return {stream}
|
||||
}
|
||||
|
||||
const createManifestFile = (manifest: Manifest, fileName: string, compact: boolean = false) => {
|
||||
const stream = through({objectMode: true}, (_, __, next) => {
|
||||
const manifestFile = new File({
|
||||
path: fileName,
|
||||
contents: Buffer.from(manifest.toJson(compact)),
|
||||
})
|
||||
|
||||
next(null, manifestFile)
|
||||
})
|
||||
return {stream}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a rule to create and write the file error manifest so we can
|
||||
* link to the correct files on a NextJS browser error.
|
||||
*/
|
||||
// TODO: Offload the file writing to later and write with all the other file writing
|
||||
export const createRuleManifest: Rule = ({config}) => {
|
||||
const manifest = Manifest.create()
|
||||
const stream = pipeline(
|
||||
setManifestEntry(manifest).stream,
|
||||
createManifestFile(manifest, resolve(config.cwd, config.manifest.path)).stream,
|
||||
gulpIf(config.manifest.write, dest(config.src)),
|
||||
)
|
||||
|
||||
return {stream, manifest}
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
import {Readable, Writable} from 'stream'
|
||||
import {FileCache} from './pipeline/helpers/file-cache'
|
||||
|
||||
/**
|
||||
* Configuration for Rules
|
||||
*/
|
||||
export type RuleConfig = {
|
||||
src: string
|
||||
dest: string
|
||||
cwd: string
|
||||
manifest: {
|
||||
path: string
|
||||
write: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Arguments object for Rules
|
||||
*/
|
||||
export type RuleArgs = {
|
||||
config: RuleConfig
|
||||
input: Writable
|
||||
reporter: Writable
|
||||
errors: Writable
|
||||
getInputCache: () => FileCache
|
||||
}
|
||||
|
||||
/**
|
||||
* Basic template for Business rules modules
|
||||
*/
|
||||
export type Rule = (
|
||||
a: RuleArgs,
|
||||
) => {
|
||||
stream: Readable
|
||||
} & Record<any, any>
|
||||
@@ -22,7 +22,7 @@ import {directoryTree} from './utils/tree-utils'
|
||||
describe('Dev command', () => {
|
||||
const rootFolder = resolve('')
|
||||
const buildFolder = resolve(rootFolder, '.blitz-build')
|
||||
const devFolder = resolve(rootFolder, '.blitz-rules')
|
||||
const devFolder = resolve(rootFolder, '.blitz-stages')
|
||||
|
||||
beforeEach(async () => {
|
||||
mocks.mockFs({
|
||||
@@ -55,7 +55,7 @@ describe('Dev command', () => {
|
||||
|
||||
it('should copy the correct files to the dev folder', async () => {
|
||||
expect(directoryTree(devFolder)).toEqual({
|
||||
name: '.blitz-rules',
|
||||
name: '.blitz-stages',
|
||||
children: [
|
||||
{name: 'blitz.config.js'},
|
||||
{name: 'next.config.js'},
|
||||
|
||||
@@ -17,7 +17,7 @@ const mocks = multiMock(
|
||||
|
||||
// Import with mocks applied
|
||||
import {dev} from '../src/dev'
|
||||
import {Manifest} from '../src/synchronizer/pipeline/rules/manifest/index'
|
||||
import {Manifest} from '../src/stages/manifest'
|
||||
import {directoryTree} from './utils/tree-utils'
|
||||
|
||||
const originalLog = console.log
|
||||
@@ -47,11 +47,11 @@ describe('Dev command', () => {
|
||||
})
|
||||
|
||||
it('should blow up', (done) => {
|
||||
const mockSynchronizer = () => Promise.resolve({manifest: Manifest.create()})
|
||||
const transformFiles = () => Promise.resolve({manifest: Manifest.create()})
|
||||
;(async () => {
|
||||
try {
|
||||
await dev({
|
||||
synchronizer: mockSynchronizer,
|
||||
transformFiles,
|
||||
rootFolder: '',
|
||||
writeManifestFile: false,
|
||||
watch: false,
|
||||
|
||||
@@ -22,7 +22,7 @@ import {directoryTree} from './utils/tree-utils'
|
||||
describe('Dev command', () => {
|
||||
const rootFolder = resolve('')
|
||||
const buildFolder = resolve(rootFolder, '.blitz-build')
|
||||
const devFolder = resolve(rootFolder, '.blitz-rules')
|
||||
const devFolder = resolve(rootFolder, '.blitz-stages')
|
||||
|
||||
beforeEach(async () => {
|
||||
mocks.mockFs({
|
||||
@@ -47,7 +47,7 @@ describe('Dev command', () => {
|
||||
|
||||
it('should copy the correct files to the dev folder', async () => {
|
||||
expect(directoryTree(devFolder)).toEqual({
|
||||
name: '.blitz-rules',
|
||||
name: '.blitz-stages',
|
||||
children: [
|
||||
{name: 'blitz.config.js'},
|
||||
{name: 'next.config.js'},
|
||||
|
||||
@@ -4,7 +4,6 @@ describe('withBlitz', () => {
|
||||
it('alters the webpack config as expected', () => {
|
||||
const nextConfigFn = withBlitz({})
|
||||
const newNext = nextConfigFn('', {defaultConfig: {}})
|
||||
// const newWebpack = newNext.webpack({module: {rules: []}}, {})
|
||||
const newNextWithoutWebpack = Object.assign({}, newNext, {webpack: null})
|
||||
|
||||
expect(newNextWithoutWebpack).toStrictEqual({
|
||||
|
||||
18
yarn.lock
18
yarn.lock
@@ -5047,7 +5047,7 @@ chokidar@3.3.1:
|
||||
optionalDependencies:
|
||||
fsevents "~2.1.2"
|
||||
|
||||
chokidar@^3.3.0, chokidar@^3.4.0:
|
||||
chokidar@3.4.0, chokidar@^3.3.0, chokidar@^3.4.0:
|
||||
version "3.4.0"
|
||||
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.0.tgz#b30611423ce376357c765b9b8f904b9fba3c0be8"
|
||||
integrity sha512-aXAaho2VJtisB/1fg1+3nlLJqGOuewTzQpd/Tz0yTg2R0e4IGtshYvtjowyEumcBv2z+y4+kc75Mz7j5xJskcQ==
|
||||
@@ -13241,18 +13241,18 @@ public-encrypt@^4.0.0:
|
||||
randombytes "^2.0.1"
|
||||
safe-buffer "^5.1.2"
|
||||
|
||||
pump@^2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909"
|
||||
integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==
|
||||
pump@3.0.0, pump@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
|
||||
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
|
||||
dependencies:
|
||||
end-of-stream "^1.1.0"
|
||||
once "^1.3.1"
|
||||
|
||||
pump@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
|
||||
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
|
||||
pump@^2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909"
|
||||
integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==
|
||||
dependencies:
|
||||
end-of-stream "^1.1.0"
|
||||
once "^1.3.1"
|
||||
|
||||
Reference in New Issue
Block a user