482 changed files with 86994 additions and 74 deletions
-
51app/assets/css/input.css
-
204app/assets/css/output.css
-
49app/assets/stylesheets/application.css
-
1app/views/layouts/application.html.erb
-
26app/views/livres/index.html.erb
-
1node_modules/.bin/tailwindcss
-
391node_modules/.package-lock.json
-
21node_modules/@parcel/watcher-linux-x64-glibc/LICENSE
-
1node_modules/@parcel/watcher-linux-x64-glibc/README.md
-
33node_modules/@parcel/watcher-linux-x64-glibc/package.json
-
BINnode_modules/@parcel/watcher-linux-x64-glibc/watcher.node
-
21node_modules/@parcel/watcher-linux-x64-musl/LICENSE
-
1node_modules/@parcel/watcher-linux-x64-musl/README.md
-
33node_modules/@parcel/watcher-linux-x64-musl/package.json
-
BINnode_modules/@parcel/watcher-linux-x64-musl/watcher.node
-
21node_modules/@parcel/watcher/LICENSE
-
136node_modules/@parcel/watcher/README.md
-
109node_modules/@parcel/watcher/binding.gyp
-
49node_modules/@parcel/watcher/index.d.ts
-
42node_modules/@parcel/watcher/index.js
-
48node_modules/@parcel/watcher/index.js.flow
-
201node_modules/@parcel/watcher/node_modules/detect-libc/LICENSE
-
163node_modules/@parcel/watcher/node_modules/detect-libc/README.md
-
14node_modules/@parcel/watcher/node_modules/detect-libc/index.d.ts
-
313node_modules/@parcel/watcher/node_modules/detect-libc/lib/detect-libc.js
-
39node_modules/@parcel/watcher/node_modules/detect-libc/lib/elf.js
-
51node_modules/@parcel/watcher/node_modules/detect-libc/lib/filesystem.js
-
24node_modules/@parcel/watcher/node_modules/detect-libc/lib/process.js
-
44node_modules/@parcel/watcher/node_modules/detect-libc/package.json
-
21node_modules/@parcel/watcher/node_modules/picomatch/LICENSE
-
738node_modules/@parcel/watcher/node_modules/picomatch/README.md
-
17node_modules/@parcel/watcher/node_modules/picomatch/index.js
-
180node_modules/@parcel/watcher/node_modules/picomatch/lib/constants.js
-
1085node_modules/@parcel/watcher/node_modules/picomatch/lib/parse.js
-
341node_modules/@parcel/watcher/node_modules/picomatch/lib/picomatch.js
-
391node_modules/@parcel/watcher/node_modules/picomatch/lib/scan.js
-
72node_modules/@parcel/watcher/node_modules/picomatch/lib/utils.js
-
83node_modules/@parcel/watcher/node_modules/picomatch/package.json
-
3node_modules/@parcel/watcher/node_modules/picomatch/posix.js
-
88node_modules/@parcel/watcher/package.json
-
13node_modules/@parcel/watcher/scripts/build-from-source.js
-
186node_modules/@parcel/watcher/src/Backend.cc
-
37node_modules/@parcel/watcher/src/Backend.hh
-
113node_modules/@parcel/watcher/src/Debounce.cc
-
49node_modules/@parcel/watcher/src/Debounce.hh
-
164node_modules/@parcel/watcher/src/DirTree.cc
-
50node_modules/@parcel/watcher/src/DirTree.hh
-
109node_modules/@parcel/watcher/src/Event.hh
-
22node_modules/@parcel/watcher/src/Glob.cc
-
34node_modules/@parcel/watcher/src/Glob.hh
-
101node_modules/@parcel/watcher/src/PromiseRunner.hh
-
46node_modules/@parcel/watcher/src/Signal.hh
-
241node_modules/@parcel/watcher/src/Watcher.cc
-
73node_modules/@parcel/watcher/src/Watcher.hh
-
268node_modules/@parcel/watcher/src/binding.cc
-
306node_modules/@parcel/watcher/src/kqueue/KqueueBackend.cc
-
35node_modules/@parcel/watcher/src/kqueue/KqueueBackend.hh
-
236node_modules/@parcel/watcher/src/linux/InotifyBackend.cc
-
34node_modules/@parcel/watcher/src/linux/InotifyBackend.hh
-
338node_modules/@parcel/watcher/src/macos/FSEventsBackend.cc
-
20node_modules/@parcel/watcher/src/macos/FSEventsBackend.hh
-
41node_modules/@parcel/watcher/src/shared/BruteForceBackend.cc
-
25node_modules/@parcel/watcher/src/shared/BruteForceBackend.hh
-
50node_modules/@parcel/watcher/src/unix/fts.cc
-
77node_modules/@parcel/watcher/src/unix/legacy.cc
-
132node_modules/@parcel/watcher/src/wasm/WasmBackend.cc
-
34node_modules/@parcel/watcher/src/wasm/WasmBackend.hh
-
74node_modules/@parcel/watcher/src/wasm/include.h
-
302node_modules/@parcel/watcher/src/watchman/BSER.cc
-
69node_modules/@parcel/watcher/src/watchman/BSER.hh
-
175node_modules/@parcel/watcher/src/watchman/IPC.hh
-
342node_modules/@parcel/watcher/src/watchman/WatchmanBackend.cc
-
35node_modules/@parcel/watcher/src/watchman/WatchmanBackend.hh
-
282node_modules/@parcel/watcher/src/windows/WindowsBackend.cc
-
18node_modules/@parcel/watcher/src/windows/WindowsBackend.hh
-
44node_modules/@parcel/watcher/src/windows/win_utils.cc
-
11node_modules/@parcel/watcher/src/windows/win_utils.hh
-
74node_modules/@parcel/watcher/wrapper.js
-
21node_modules/@tailwindcss/cli/LICENSE
-
36node_modules/@tailwindcss/cli/README.md
-
9node_modules/@tailwindcss/cli/dist/index.mjs
-
40node_modules/@tailwindcss/cli/package.json
-
21node_modules/@tailwindcss/node/LICENSE
-
36node_modules/@tailwindcss/node/README.md
-
5node_modules/@tailwindcss/node/dist/esm-cache.loader.d.mts
-
1node_modules/@tailwindcss/node/dist/esm-cache.loader.mjs
-
255node_modules/@tailwindcss/node/dist/index.d.mts
-
255node_modules/@tailwindcss/node/dist/index.d.ts
-
18node_modules/@tailwindcss/node/dist/index.js
-
18node_modules/@tailwindcss/node/dist/index.mjs
-
3node_modules/@tailwindcss/node/dist/require-cache.d.ts
-
1node_modules/@tailwindcss/node/dist/require-cache.js
-
1node_modules/@tailwindcss/node/node_modules/.bin/jiti
-
19node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/LICENSE
-
227node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/README.md
-
292node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
-
6node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
-
358node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
-
6node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
-
88node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
@ -0,0 +1,51 @@ |
|||
@import "tailwindcss"; |
|||
|
|||
.table { |
|||
text-shadow: 1px 1px 1px rgba(0, 0, 0, 0.2); |
|||
display: flex; |
|||
flex-direction: column; |
|||
width: 40%; |
|||
border: 1px solid #e0e0e0; |
|||
border-radius: 6px; |
|||
margin-top: 30px; |
|||
background-color: #fafafa; |
|||
} |
|||
|
|||
.table-header, |
|||
.table-row { |
|||
display: flex; |
|||
padding: 12px; |
|||
border-bottom: 1px solid #eaeaea; |
|||
} |
|||
|
|||
.cell { |
|||
display: flex; |
|||
flex: 1; |
|||
padding: 8px 12px; |
|||
align-items: center; |
|||
} |
|||
|
|||
.table-header { |
|||
background-color: #f5f7fa; |
|||
font-weight: 600; |
|||
color: #a51010; |
|||
} |
|||
|
|||
.btn { |
|||
text-shadow: 1px 1px 1px rgba(0, 0, 0, 0.2); |
|||
display: inline-block; |
|||
background-color: #b7e1ff; |
|||
color: #0f0f0f; |
|||
padding: 1px 10px; |
|||
margin-right: 8px; |
|||
border: none; |
|||
border-radius: 6px; |
|||
text-decoration: none; |
|||
font-size: 14px; |
|||
cursor: pointer; |
|||
transition: background-color 0.3s ease; |
|||
} |
|||
|
|||
.btn:hover { |
|||
background-color: #f6fafe; |
|||
} |
|||
@ -0,0 +1,204 @@ |
|||
/*! tailwindcss v4.1.18 | MIT License | https://tailwindcss.com */ |
|||
@layer theme, base, components, utilities; |
|||
@layer theme { |
|||
:root, :host { |
|||
--font-sans: ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", |
|||
"Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; |
|||
--font-mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", |
|||
"Courier New", monospace; |
|||
--default-font-family: var(--font-sans); |
|||
--default-mono-font-family: var(--font-mono); |
|||
} |
|||
} |
|||
@layer base { |
|||
*, ::after, ::before, ::backdrop, ::file-selector-button { |
|||
box-sizing: border-box; |
|||
margin: 0; |
|||
padding: 0; |
|||
border: 0 solid; |
|||
} |
|||
html, :host { |
|||
line-height: 1.5; |
|||
-webkit-text-size-adjust: 100%; |
|||
tab-size: 4; |
|||
font-family: var(--default-font-family, ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"); |
|||
font-feature-settings: var(--default-font-feature-settings, normal); |
|||
font-variation-settings: var(--default-font-variation-settings, normal); |
|||
-webkit-tap-highlight-color: transparent; |
|||
} |
|||
hr { |
|||
height: 0; |
|||
color: inherit; |
|||
border-top-width: 1px; |
|||
} |
|||
abbr:where([title]) { |
|||
-webkit-text-decoration: underline dotted; |
|||
text-decoration: underline dotted; |
|||
} |
|||
h1, h2, h3, h4, h5, h6 { |
|||
font-size: inherit; |
|||
font-weight: inherit; |
|||
} |
|||
a { |
|||
color: inherit; |
|||
-webkit-text-decoration: inherit; |
|||
text-decoration: inherit; |
|||
} |
|||
b, strong { |
|||
font-weight: bolder; |
|||
} |
|||
code, kbd, samp, pre { |
|||
font-family: var(--default-mono-font-family, ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace); |
|||
font-feature-settings: var(--default-mono-font-feature-settings, normal); |
|||
font-variation-settings: var(--default-mono-font-variation-settings, normal); |
|||
font-size: 1em; |
|||
} |
|||
small { |
|||
font-size: 80%; |
|||
} |
|||
sub, sup { |
|||
font-size: 75%; |
|||
line-height: 0; |
|||
position: relative; |
|||
vertical-align: baseline; |
|||
} |
|||
sub { |
|||
bottom: -0.25em; |
|||
} |
|||
sup { |
|||
top: -0.5em; |
|||
} |
|||
table { |
|||
text-indent: 0; |
|||
border-color: inherit; |
|||
border-collapse: collapse; |
|||
} |
|||
:-moz-focusring { |
|||
outline: auto; |
|||
} |
|||
progress { |
|||
vertical-align: baseline; |
|||
} |
|||
summary { |
|||
display: list-item; |
|||
} |
|||
ol, ul, menu { |
|||
list-style: none; |
|||
} |
|||
img, svg, video, canvas, audio, iframe, embed, object { |
|||
display: block; |
|||
vertical-align: middle; |
|||
} |
|||
img, video { |
|||
max-width: 100%; |
|||
height: auto; |
|||
} |
|||
button, input, select, optgroup, textarea, ::file-selector-button { |
|||
font: inherit; |
|||
font-feature-settings: inherit; |
|||
font-variation-settings: inherit; |
|||
letter-spacing: inherit; |
|||
color: inherit; |
|||
border-radius: 0; |
|||
background-color: transparent; |
|||
opacity: 1; |
|||
} |
|||
:where(select:is([multiple], [size])) optgroup { |
|||
font-weight: bolder; |
|||
} |
|||
:where(select:is([multiple], [size])) optgroup option { |
|||
padding-inline-start: 20px; |
|||
} |
|||
::file-selector-button { |
|||
margin-inline-end: 4px; |
|||
} |
|||
::placeholder { |
|||
opacity: 1; |
|||
} |
|||
@supports (not (-webkit-appearance: -apple-pay-button)) or (contain-intrinsic-size: 1px) { |
|||
::placeholder { |
|||
color: currentcolor; |
|||
@supports (color: color-mix(in lab, red, red)) { |
|||
color: color-mix(in oklab, currentcolor 50%, transparent); |
|||
} |
|||
} |
|||
} |
|||
textarea { |
|||
resize: vertical; |
|||
} |
|||
::-webkit-search-decoration { |
|||
-webkit-appearance: none; |
|||
} |
|||
::-webkit-date-and-time-value { |
|||
min-height: 1lh; |
|||
text-align: inherit; |
|||
} |
|||
::-webkit-datetime-edit { |
|||
display: inline-flex; |
|||
} |
|||
::-webkit-datetime-edit-fields-wrapper { |
|||
padding: 0; |
|||
} |
|||
::-webkit-datetime-edit, ::-webkit-datetime-edit-year-field, ::-webkit-datetime-edit-month-field, ::-webkit-datetime-edit-day-field, ::-webkit-datetime-edit-hour-field, ::-webkit-datetime-edit-minute-field, ::-webkit-datetime-edit-second-field, ::-webkit-datetime-edit-millisecond-field, ::-webkit-datetime-edit-meridiem-field { |
|||
padding-block: 0; |
|||
} |
|||
::-webkit-calendar-picker-indicator { |
|||
line-height: 1; |
|||
} |
|||
:-moz-ui-invalid { |
|||
box-shadow: none; |
|||
} |
|||
button, input:where([type="button"], [type="reset"], [type="submit"]), ::file-selector-button { |
|||
appearance: button; |
|||
} |
|||
::-webkit-inner-spin-button, ::-webkit-outer-spin-button { |
|||
height: auto; |
|||
} |
|||
[hidden]:where(:not([hidden="until-found"])) { |
|||
display: none !important; |
|||
} |
|||
} |
|||
@layer utilities { |
|||
.visible { |
|||
visibility: visible; |
|||
} |
|||
.absolute { |
|||
position: absolute; |
|||
} |
|||
.fixed { |
|||
position: fixed; |
|||
} |
|||
.static { |
|||
position: static; |
|||
} |
|||
.container { |
|||
width: 100%; |
|||
@media (width >= 40rem) { |
|||
max-width: 40rem; |
|||
} |
|||
@media (width >= 48rem) { |
|||
max-width: 48rem; |
|||
} |
|||
@media (width >= 64rem) { |
|||
max-width: 64rem; |
|||
} |
|||
@media (width >= 80rem) { |
|||
max-width: 80rem; |
|||
} |
|||
@media (width >= 96rem) { |
|||
max-width: 96rem; |
|||
} |
|||
} |
|||
.contents { |
|||
display: contents; |
|||
} |
|||
.inline { |
|||
display: inline; |
|||
} |
|||
.table { |
|||
display: table; |
|||
} |
|||
.table-row { |
|||
display: table-row; |
|||
} |
|||
} |
|||
@ -1,49 +0,0 @@ |
|||
.table { |
|||
text-shadow: 1px 1px 1px rgba(0, 0, 0, 0.2); |
|||
display: flex; |
|||
flex-direction: column; |
|||
width: 40%; |
|||
border: 1px solid #e0e0e0; |
|||
border-radius: 6px; |
|||
margin-top: 30px; |
|||
background-color: #fafafa; |
|||
} |
|||
|
|||
.table-header, |
|||
.table-row { |
|||
display: flex; |
|||
padding: 12px; |
|||
border-bottom: 1px solid #eaeaea; |
|||
} |
|||
|
|||
.cell { |
|||
display: flex; |
|||
flex: 1; |
|||
padding: 8px 12px; |
|||
align-items: center; |
|||
} |
|||
|
|||
.table-header { |
|||
background-color: #f5f7fa; |
|||
font-weight: 600; |
|||
color: #a51010; |
|||
} |
|||
|
|||
.btn { |
|||
text-shadow: 1px 1px 1px rgba(0, 0, 0, 0.2); |
|||
display: inline-block; |
|||
background-color: #b7e1ff; |
|||
color: #0f0f0f; |
|||
padding: 1px 10px; |
|||
margin-right: 8px; |
|||
border: none; |
|||
border-radius: 6px; |
|||
text-decoration: none; |
|||
font-size: 14px; |
|||
cursor: pointer; |
|||
transition: background-color 0.3s ease; |
|||
} |
|||
|
|||
.btn:hover { |
|||
background-color: #f6fafe; |
|||
} |
|||
@ -0,0 +1 @@ |
|||
../@tailwindcss/cli/dist/index.mjs |
|||
@ -0,0 +1,391 @@ |
|||
{ |
|||
"name": "livres", |
|||
"version": "1.0.0", |
|||
"lockfileVersion": 3, |
|||
"requires": true, |
|||
"packages": { |
|||
"node_modules/@parcel/watcher": { |
|||
"version": "2.5.6", |
|||
"resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.6.tgz", |
|||
"integrity": "sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==", |
|||
"hasInstallScript": true, |
|||
"dependencies": { |
|||
"detect-libc": "^2.0.3", |
|||
"is-glob": "^4.0.3", |
|||
"node-addon-api": "^7.0.0", |
|||
"picomatch": "^4.0.3" |
|||
}, |
|||
"engines": { |
|||
"node": ">= 10.0.0" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
}, |
|||
"optionalDependencies": { |
|||
"@parcel/watcher-android-arm64": "2.5.6", |
|||
"@parcel/watcher-darwin-arm64": "2.5.6", |
|||
"@parcel/watcher-darwin-x64": "2.5.6", |
|||
"@parcel/watcher-freebsd-x64": "2.5.6", |
|||
"@parcel/watcher-linux-arm-glibc": "2.5.6", |
|||
"@parcel/watcher-linux-arm-musl": "2.5.6", |
|||
"@parcel/watcher-linux-arm64-glibc": "2.5.6", |
|||
"@parcel/watcher-linux-arm64-musl": "2.5.6", |
|||
"@parcel/watcher-linux-x64-glibc": "2.5.6", |
|||
"@parcel/watcher-linux-x64-musl": "2.5.6", |
|||
"@parcel/watcher-win32-arm64": "2.5.6", |
|||
"@parcel/watcher-win32-ia32": "2.5.6", |
|||
"@parcel/watcher-win32-x64": "2.5.6" |
|||
} |
|||
}, |
|||
"node_modules/@parcel/watcher-linux-x64-glibc": { |
|||
"version": "2.5.6", |
|||
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.6.tgz", |
|||
"integrity": "sha512-kbT5wvNQlx7NaGjzPFu8nVIW1rWqV780O7ZtkjuWaPUgpv2NMFpjYERVi0UYj1msZNyCzGlaCWEtzc+exjMGbQ==", |
|||
"cpu": [ |
|||
"x64" |
|||
], |
|||
"optional": true, |
|||
"os": [ |
|||
"linux" |
|||
], |
|||
"engines": { |
|||
"node": ">= 10.0.0" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
} |
|||
}, |
|||
"node_modules/@parcel/watcher-linux-x64-musl": { |
|||
"version": "2.5.6", |
|||
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.6.tgz", |
|||
"integrity": "sha512-1JRFeC+h7RdXwldHzTsmdtYR/Ku8SylLgTU/reMuqdVD7CtLwf0VR1FqeprZ0eHQkO0vqsbvFLXUmYm/uNKJBg==", |
|||
"cpu": [ |
|||
"x64" |
|||
], |
|||
"optional": true, |
|||
"os": [ |
|||
"linux" |
|||
], |
|||
"engines": { |
|||
"node": ">= 10.0.0" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
} |
|||
}, |
|||
"node_modules/@parcel/watcher/node_modules/detect-libc": { |
|||
"version": "2.1.2", |
|||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", |
|||
"integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", |
|||
"engines": { |
|||
"node": ">=8" |
|||
} |
|||
}, |
|||
"node_modules/@parcel/watcher/node_modules/picomatch": { |
|||
"version": "4.0.3", |
|||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", |
|||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", |
|||
"engines": { |
|||
"node": ">=12" |
|||
}, |
|||
"funding": { |
|||
"url": "https://github.com/sponsors/jonschlinkert" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/cli": { |
|||
"version": "4.1.18", |
|||
"resolved": "https://registry.npmjs.org/@tailwindcss/cli/-/cli-4.1.18.tgz", |
|||
"integrity": "sha512-sMZ+lZbDyxwjD2E0L7oRUjJ01Ffjtme5OtjvvnC+cV4CEDcbqzbp25TCpxHj6kWLU9+DlqJOiNgSOgctC2aZmg==", |
|||
"dependencies": { |
|||
"@parcel/watcher": "^2.5.1", |
|||
"@tailwindcss/node": "4.1.18", |
|||
"@tailwindcss/oxide": "4.1.18", |
|||
"enhanced-resolve": "^5.18.3", |
|||
"mri": "^1.2.0", |
|||
"picocolors": "^1.1.1", |
|||
"tailwindcss": "4.1.18" |
|||
}, |
|||
"bin": { |
|||
"tailwindcss": "dist/index.mjs" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node": { |
|||
"version": "4.1.18", |
|||
"resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.18.tgz", |
|||
"integrity": "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ==", |
|||
"dependencies": { |
|||
"@jridgewell/remapping": "^2.3.4", |
|||
"enhanced-resolve": "^5.18.3", |
|||
"jiti": "^2.6.1", |
|||
"lightningcss": "1.30.2", |
|||
"magic-string": "^0.30.21", |
|||
"source-map-js": "^1.2.1", |
|||
"tailwindcss": "4.1.18" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping": { |
|||
"version": "0.3.13", |
|||
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", |
|||
"integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", |
|||
"dependencies": { |
|||
"@jridgewell/sourcemap-codec": "^1.5.0", |
|||
"@jridgewell/trace-mapping": "^0.3.24" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/@jridgewell/remapping": { |
|||
"version": "2.3.5", |
|||
"resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", |
|||
"integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", |
|||
"dependencies": { |
|||
"@jridgewell/gen-mapping": "^0.3.5", |
|||
"@jridgewell/trace-mapping": "^0.3.24" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/@jridgewell/resolve-uri": { |
|||
"version": "3.1.2", |
|||
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", |
|||
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", |
|||
"engines": { |
|||
"node": ">=6.0.0" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/@jridgewell/sourcemap-codec": { |
|||
"version": "1.5.5", |
|||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", |
|||
"integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==" |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/@jridgewell/trace-mapping": { |
|||
"version": "0.3.31", |
|||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", |
|||
"integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", |
|||
"dependencies": { |
|||
"@jridgewell/resolve-uri": "^3.1.0", |
|||
"@jridgewell/sourcemap-codec": "^1.4.14" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/detect-libc": { |
|||
"version": "2.1.2", |
|||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", |
|||
"integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", |
|||
"engines": { |
|||
"node": ">=8" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/jiti": { |
|||
"version": "2.6.1", |
|||
"resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", |
|||
"integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", |
|||
"bin": { |
|||
"jiti": "lib/jiti-cli.mjs" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/lightningcss": { |
|||
"version": "1.30.2", |
|||
"resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", |
|||
"integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", |
|||
"dependencies": { |
|||
"detect-libc": "^2.0.3" |
|||
}, |
|||
"engines": { |
|||
"node": ">= 12.0.0" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
}, |
|||
"optionalDependencies": { |
|||
"lightningcss-android-arm64": "1.30.2", |
|||
"lightningcss-darwin-arm64": "1.30.2", |
|||
"lightningcss-darwin-x64": "1.30.2", |
|||
"lightningcss-freebsd-x64": "1.30.2", |
|||
"lightningcss-linux-arm-gnueabihf": "1.30.2", |
|||
"lightningcss-linux-arm64-gnu": "1.30.2", |
|||
"lightningcss-linux-arm64-musl": "1.30.2", |
|||
"lightningcss-linux-x64-gnu": "1.30.2", |
|||
"lightningcss-linux-x64-musl": "1.30.2", |
|||
"lightningcss-win32-arm64-msvc": "1.30.2", |
|||
"lightningcss-win32-x64-msvc": "1.30.2" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/lightningcss-linux-x64-gnu": { |
|||
"version": "1.30.2", |
|||
"resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz", |
|||
"integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==", |
|||
"cpu": [ |
|||
"x64" |
|||
], |
|||
"optional": true, |
|||
"os": [ |
|||
"linux" |
|||
], |
|||
"engines": { |
|||
"node": ">= 12.0.0" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/lightningcss-linux-x64-musl": { |
|||
"version": "1.30.2", |
|||
"resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz", |
|||
"integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==", |
|||
"cpu": [ |
|||
"x64" |
|||
], |
|||
"optional": true, |
|||
"os": [ |
|||
"linux" |
|||
], |
|||
"engines": { |
|||
"node": ">= 12.0.0" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/magic-string": { |
|||
"version": "0.30.21", |
|||
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", |
|||
"integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", |
|||
"dependencies": { |
|||
"@jridgewell/sourcemap-codec": "^1.5.5" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/node/node_modules/source-map-js": { |
|||
"version": "1.2.1", |
|||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", |
|||
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", |
|||
"engines": { |
|||
"node": ">=0.10.0" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/oxide": { |
|||
"version": "4.1.18", |
|||
"resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.18.tgz", |
|||
"integrity": "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A==", |
|||
"engines": { |
|||
"node": ">= 10" |
|||
}, |
|||
"optionalDependencies": { |
|||
"@tailwindcss/oxide-android-arm64": "4.1.18", |
|||
"@tailwindcss/oxide-darwin-arm64": "4.1.18", |
|||
"@tailwindcss/oxide-darwin-x64": "4.1.18", |
|||
"@tailwindcss/oxide-freebsd-x64": "4.1.18", |
|||
"@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", |
|||
"@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", |
|||
"@tailwindcss/oxide-linux-arm64-musl": "4.1.18", |
|||
"@tailwindcss/oxide-linux-x64-gnu": "4.1.18", |
|||
"@tailwindcss/oxide-linux-x64-musl": "4.1.18", |
|||
"@tailwindcss/oxide-wasm32-wasi": "4.1.18", |
|||
"@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", |
|||
"@tailwindcss/oxide-win32-x64-msvc": "4.1.18" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/oxide/node_modules/@tailwindcss/oxide-linux-x64-gnu": { |
|||
"version": "4.1.18", |
|||
"resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.18.tgz", |
|||
"integrity": "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g==", |
|||
"cpu": [ |
|||
"x64" |
|||
], |
|||
"optional": true, |
|||
"os": [ |
|||
"linux" |
|||
], |
|||
"engines": { |
|||
"node": ">= 10" |
|||
} |
|||
}, |
|||
"node_modules/@tailwindcss/oxide/node_modules/@tailwindcss/oxide-linux-x64-musl": { |
|||
"version": "4.1.18", |
|||
"resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.18.tgz", |
|||
"integrity": "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ==", |
|||
"cpu": [ |
|||
"x64" |
|||
], |
|||
"optional": true, |
|||
"os": [ |
|||
"linux" |
|||
], |
|||
"engines": { |
|||
"node": ">= 10" |
|||
} |
|||
}, |
|||
"node_modules/enhanced-resolve": { |
|||
"version": "5.18.4", |
|||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.4.tgz", |
|||
"integrity": "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q==", |
|||
"dependencies": { |
|||
"graceful-fs": "^4.2.4", |
|||
"tapable": "^2.2.0" |
|||
}, |
|||
"engines": { |
|||
"node": ">=10.13.0" |
|||
} |
|||
}, |
|||
"node_modules/enhanced-resolve/node_modules/graceful-fs": { |
|||
"version": "4.2.11", |
|||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", |
|||
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" |
|||
}, |
|||
"node_modules/enhanced-resolve/node_modules/tapable": { |
|||
"version": "2.3.0", |
|||
"resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", |
|||
"integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", |
|||
"engines": { |
|||
"node": ">=6" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/webpack" |
|||
} |
|||
}, |
|||
"node_modules/is-extglob": { |
|||
"version": "2.1.1", |
|||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", |
|||
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", |
|||
"engines": { |
|||
"node": ">=0.10.0" |
|||
} |
|||
}, |
|||
"node_modules/is-glob": { |
|||
"version": "4.0.3", |
|||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", |
|||
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", |
|||
"dependencies": { |
|||
"is-extglob": "^2.1.1" |
|||
}, |
|||
"engines": { |
|||
"node": ">=0.10.0" |
|||
} |
|||
}, |
|||
"node_modules/mri": { |
|||
"version": "1.2.0", |
|||
"resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", |
|||
"integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", |
|||
"engines": { |
|||
"node": ">=4" |
|||
} |
|||
}, |
|||
"node_modules/node-addon-api": { |
|||
"version": "7.1.1", |
|||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", |
|||
"integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==" |
|||
}, |
|||
"node_modules/picocolors": { |
|||
"version": "1.1.1", |
|||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", |
|||
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" |
|||
}, |
|||
"node_modules/tailwindcss": { |
|||
"version": "4.1.18", |
|||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz", |
|||
"integrity": "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw==" |
|||
} |
|||
} |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
MIT License |
|||
|
|||
Copyright (c) 2017-present Devon Govett |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1 @@ |
|||
This is the linux-x64-glibc build of @parcel/watcher. See https://github.com/parcel-bundler/watcher for details. |
|||
@ -0,0 +1,33 @@ |
|||
{ |
|||
"name": "@parcel/watcher-linux-x64-glibc", |
|||
"version": "2.5.6", |
|||
"main": "watcher.node", |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "https://github.com/parcel-bundler/watcher.git" |
|||
}, |
|||
"description": "A native C++ Node module for querying and subscribing to filesystem events. Used by Parcel 2.", |
|||
"license": "MIT", |
|||
"publishConfig": { |
|||
"access": "public" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
}, |
|||
"files": [ |
|||
"watcher.node" |
|||
], |
|||
"engines": { |
|||
"node": ">= 10.0.0" |
|||
}, |
|||
"os": [ |
|||
"linux" |
|||
], |
|||
"cpu": [ |
|||
"x64" |
|||
], |
|||
"libc": [ |
|||
"glibc" |
|||
] |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
MIT License |
|||
|
|||
Copyright (c) 2017-present Devon Govett |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1 @@ |
|||
This is the linux-x64-musl build of @parcel/watcher. See https://github.com/parcel-bundler/watcher for details. |
|||
@ -0,0 +1,33 @@ |
|||
{ |
|||
"name": "@parcel/watcher-linux-x64-musl", |
|||
"version": "2.5.6", |
|||
"main": "watcher.node", |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "https://github.com/parcel-bundler/watcher.git" |
|||
}, |
|||
"description": "A native C++ Node module for querying and subscribing to filesystem events. Used by Parcel 2.", |
|||
"license": "MIT", |
|||
"publishConfig": { |
|||
"access": "public" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
}, |
|||
"files": [ |
|||
"watcher.node" |
|||
], |
|||
"engines": { |
|||
"node": ">= 10.0.0" |
|||
}, |
|||
"os": [ |
|||
"linux" |
|||
], |
|||
"cpu": [ |
|||
"x64" |
|||
], |
|||
"libc": [ |
|||
"musl" |
|||
] |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
MIT License |
|||
|
|||
Copyright (c) 2017-present Devon Govett |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,136 @@ |
|||
# @parcel/watcher |
|||
|
|||
A native C++ Node module for querying and subscribing to filesystem events. Used by [Parcel 2](https://github.com/parcel-bundler/parcel). |
|||
|
|||
## Features |
|||
|
|||
- **Watch** - subscribe to realtime recursive directory change notifications when files or directories are created, updated, or deleted. |
|||
- **Query** - performantly query for historical change events in a directory, even when your program is not running. |
|||
- **Native** - implemented in C++ for performance and low-level integration with the operating system. |
|||
- **Cross platform** - includes backends for macOS, Linux, Windows, FreeBSD, and Watchman. |
|||
- **Performant** - events are throttled in C++ so the JavaScript thread is not overwhelmed during large filesystem changes (e.g. `git checkout` or `npm install`). |
|||
- **Scalable** - tens of thousands of files can be watched or queried at once with good performance. |
|||
|
|||
## Example |
|||
|
|||
```javascript |
|||
const watcher = require('@parcel/watcher'); |
|||
const path = require('path'); |
|||
|
|||
// Subscribe to events |
|||
let subscription = await watcher.subscribe(process.cwd(), (err, events) => { |
|||
console.log(events); |
|||
}); |
|||
|
|||
// later on... |
|||
await subscription.unsubscribe(); |
|||
|
|||
// Get events since some saved snapshot in the past |
|||
let snapshotPath = path.join(process.cwd(), 'snapshot.txt'); |
|||
let events = await watcher.getEventsSince(process.cwd(), snapshotPath); |
|||
|
|||
// Save a snapshot for later |
|||
await watcher.writeSnapshot(process.cwd(), snapshotPath); |
|||
``` |
|||
|
|||
## Watching |
|||
|
|||
`@parcel/watcher` supports subscribing to realtime notifications of changes in a directory. It works recursively, so changes in sub-directories will also be emitted. |
|||
|
|||
Events are throttled and coalesced for performance during large changes like `git checkout` or `npm install`, and a single notification will be emitted with all of the events at the end. |
|||
|
|||
Only one notification will be emitted per file. For example, if a file was both created and updated since the last event, you'll get only a `create` event. If a file is both created and deleted, you will not be notifed of that file. Renames cause two events: a `delete` for the old name, and a `create` for the new name. |
|||
|
|||
```javascript |
|||
let subscription = await watcher.subscribe(process.cwd(), (err, events) => { |
|||
console.log(events); |
|||
}); |
|||
``` |
|||
|
|||
Events have two properties: |
|||
|
|||
- `type` - the event type: `create`, `update`, or `delete`. |
|||
- `path` - the absolute path to the file or directory. |
|||
|
|||
To unsubscribe from change notifications, call the `unsubscribe` method on the returned subscription object. |
|||
|
|||
```javascript |
|||
await subscription.unsubscribe(); |
|||
``` |
|||
|
|||
`@parcel/watcher` has the following watcher backends, listed in priority order: |
|||
|
|||
- [FSEvents](https://developer.apple.com/documentation/coreservices/file_system_events) on macOS |
|||
- [Watchman](https://facebook.github.io/watchman/) if installed |
|||
- [inotify](http://man7.org/linux/man-pages/man7/inotify.7.html) on Linux |
|||
- [ReadDirectoryChangesW](https://msdn.microsoft.com/en-us/library/windows/desktop/aa365465%28v%3Dvs.85%29.aspx) on Windows |
|||
- [kqueue](https://man.freebsd.org/cgi/man.cgi?kqueue) on FreeBSD, or as an alternative to FSEvents on macOS |
|||
|
|||
You can specify the exact backend you wish to use by passing the `backend` option. If that backend is not available on the current platform, the default backend will be used instead. See below for the list of backend names that can be passed to the options. |
|||
|
|||
## Querying |
|||
|
|||
`@parcel/watcher` also supports querying for historical changes made in a directory, even when your program is not running. This makes it easy to invalidate a cache and re-build only the files that have changed, for example. It can be **significantly** faster than traversing the entire filesystem to determine what files changed, depending on the platform. |
|||
|
|||
In order to query for historical changes, you first need a previous snapshot to compare to. This can be saved to a file with the `writeSnapshot` function, e.g. just before your program exits. |
|||
|
|||
```javascript |
|||
await watcher.writeSnapshot(dirPath, snapshotPath); |
|||
``` |
|||
|
|||
When your program starts up, you can query for changes that have occurred since that snapshot using the `getEventsSince` function. |
|||
|
|||
```javascript |
|||
let events = await watcher.getEventsSince(dirPath, snapshotPath); |
|||
``` |
|||
|
|||
The events returned are exactly the same as the events that would be passed to the `subscribe` callback (see above). |
|||
|
|||
`@parcel/watcher` has the following watcher backends, listed in priority order: |
|||
|
|||
- [FSEvents](https://developer.apple.com/documentation/coreservices/file_system_events) on macOS |
|||
- [Watchman](https://facebook.github.io/watchman/) if installed |
|||
- [fts](http://man7.org/linux/man-pages/man3/fts.3.html) (brute force) on Linux and FreeBSD |
|||
- [FindFirstFile](https://docs.microsoft.com/en-us/windows/desktop/api/fileapi/nf-fileapi-findfirstfilea) (brute force) on Windows |
|||
|
|||
The FSEvents (macOS) and Watchman backends are significantly more performant than the brute force backends used by default on Linux and Windows, for example returning results in miliseconds instead of seconds for large directory trees. This is because a background daemon monitoring filesystem changes on those platforms allows us to query cached data rather than traversing the filesystem manually (brute force). |
|||
|
|||
macOS has good performance with FSEvents by default. For the best performance on other platforms, install [Watchman](https://facebook.github.io/watchman/) and it will be used by `@parcel/watcher` automatically. |
|||
|
|||
You can specify the exact backend you wish to use by passing the `backend` option. If that backend is not available on the current platform, the default backend will be used instead. See below for the list of backend names that can be passed to the options. |
|||
|
|||
## Options |
|||
|
|||
All of the APIs in `@parcel/watcher` support the following options, which are passed as an object as the last function argument. |
|||
|
|||
- `ignore` - an array of paths or glob patterns to ignore. uses [`is-glob`](https://github.com/micromatch/is-glob) to distinguish paths from globs. glob patterns are parsed with [`picomatch`](https://github.com/micromatch/picomatch) (see [features](https://github.com/micromatch/picomatch#globbing-features)). |
|||
- paths can be relative or absolute and can either be files or directories. No events will be emitted about these files or directories or their children. |
|||
- glob patterns match on relative paths from the root that is watched. No events will be emitted for matching paths. |
|||
- `backend` - the name of an explicitly chosen backend to use. Allowed options are `"fs-events"`, `"watchman"`, `"inotify"`, `"kqueue"`, `"windows"`, or `"brute-force"` (only for querying). If the specified backend is not available on the current platform, the default backend will be used instead. |
|||
|
|||
## WASM |
|||
|
|||
The `@parcel/watcher-wasm` package can be used in place of `@parcel/watcher` on unsupported platforms. It relies on the Node `fs` module, so in non-Node environments such as browsers, an `fs` polyfill will be needed. |
|||
|
|||
**Note**: the WASM implementation is significantly less efficient than the native implementations because it must crawl the file system to watch each directory individually. Use the native `@parcel/watcher` package wherever possible. |
|||
|
|||
```js |
|||
import {subscribe} from '@parcel/watcher-wasm'; |
|||
|
|||
// Use the module as documented above. |
|||
subscribe(/* ... */); |
|||
``` |
|||
|
|||
## Who is using this? |
|||
|
|||
- [Parcel 2](https://parceljs.org/) |
|||
- [VSCode](https://code.visualstudio.com/updates/v1_62#_file-watching-changes) |
|||
- [Tailwind CSS Intellisense](https://github.com/tailwindlabs/tailwindcss-intellisense) |
|||
- [Gatsby Cloud](https://twitter.com/chatsidhartha/status/1435647412828196867) |
|||
- [Nx](https://nx.dev) |
|||
- [Nuxt](https://nuxt.com) |
|||
- [Meteor](https://github.com/meteor/meteor) |
|||
|
|||
## License |
|||
|
|||
MIT |
|||
@ -0,0 +1,109 @@ |
|||
{ |
|||
"targets": [ |
|||
{ |
|||
"target_name": "watcher", |
|||
"defines": [ "NAPI_DISABLE_CPP_EXCEPTIONS" ], |
|||
"sources": [ "src/binding.cc", "src/Watcher.cc", "src/Backend.cc", "src/DirTree.cc", "src/Glob.cc", "src/Debounce.cc" ], |
|||
"include_dirs" : ["<!(node -p \"require('node-addon-api').include_dir\")"], |
|||
'cflags!': [ '-fno-exceptions', '-std=c++17' ], |
|||
'cflags_cc!': [ '-fno-exceptions', '-std=c++17' ], |
|||
'cflags': [ '-fstack-protector-strong' ], |
|||
"conditions": [ |
|||
['OS=="mac"', { |
|||
"sources": [ |
|||
"src/watchman/BSER.cc", |
|||
"src/watchman/WatchmanBackend.cc", |
|||
"src/shared/BruteForceBackend.cc", |
|||
"src/unix/fts.cc", |
|||
"src/macos/FSEventsBackend.cc", |
|||
"src/kqueue/KqueueBackend.cc" |
|||
], |
|||
"link_settings": { |
|||
"libraries": ["CoreServices.framework"] |
|||
}, |
|||
"defines": [ |
|||
"WATCHMAN", |
|||
"BRUTE_FORCE", |
|||
"FS_EVENTS", |
|||
"KQUEUE" |
|||
], |
|||
"xcode_settings": { |
|||
"GCC_ENABLE_CPP_EXCEPTIONS": "YES" |
|||
} |
|||
}], |
|||
['OS=="mac" and target_arch=="arm64"', { |
|||
"xcode_settings": { |
|||
"ARCHS": ["arm64"] |
|||
} |
|||
}], |
|||
['OS=="linux" or OS=="android"', { |
|||
"sources": [ |
|||
"src/watchman/BSER.cc", |
|||
"src/watchman/WatchmanBackend.cc", |
|||
"src/shared/BruteForceBackend.cc", |
|||
"src/linux/InotifyBackend.cc", |
|||
"src/unix/legacy.cc" |
|||
], |
|||
"defines": [ |
|||
"WATCHMAN", |
|||
"INOTIFY", |
|||
"BRUTE_FORCE" |
|||
] |
|||
}], |
|||
['OS=="win"', { |
|||
"sources": [ |
|||
"src/watchman/BSER.cc", |
|||
"src/watchman/WatchmanBackend.cc", |
|||
"src/shared/BruteForceBackend.cc", |
|||
"src/windows/WindowsBackend.cc", |
|||
"src/windows/win_utils.cc" |
|||
], |
|||
"defines": [ |
|||
"WATCHMAN", |
|||
"WINDOWS", |
|||
"BRUTE_FORCE" |
|||
], |
|||
"msvs_settings": { |
|||
"VCCLCompilerTool": { |
|||
"ExceptionHandling": 1, # /EHsc |
|||
"AdditionalOptions": [ |
|||
"-std:c++17", |
|||
"/guard:cf", |
|||
"/W3", |
|||
"/we4146", |
|||
"/w34244", |
|||
"/we4267", |
|||
"/sdl", |
|||
"/ZH:SHA_256" |
|||
] |
|||
}, |
|||
"VCLinkerTool": { |
|||
"AdditionalOptions": [ |
|||
"/DYNAMICBASE", |
|||
"/guard:cf" |
|||
] |
|||
} |
|||
} |
|||
}], |
|||
['OS=="freebsd"', { |
|||
"sources": [ |
|||
"src/watchman/BSER.cc", |
|||
"src/watchman/WatchmanBackend.cc", |
|||
"src/shared/BruteForceBackend.cc", |
|||
"src/unix/fts.cc", |
|||
"src/kqueue/KqueueBackend.cc" |
|||
], |
|||
"defines": [ |
|||
"WATCHMAN", |
|||
"BRUTE_FORCE", |
|||
"KQUEUE" |
|||
] |
|||
}] |
|||
] |
|||
} |
|||
], |
|||
"variables": { |
|||
"openssl_fips": "", |
|||
"node_use_dtrace": "false" |
|||
} |
|||
} |
|||
@ -0,0 +1,49 @@ |
|||
declare type FilePath = string; |
|||
declare type GlobPattern = string; |
|||
|
|||
declare namespace ParcelWatcher { |
|||
export type BackendType = |
|||
| 'fs-events' |
|||
| 'watchman' |
|||
| 'inotify' |
|||
| 'windows' |
|||
| 'brute-force'; |
|||
export type EventType = 'create' | 'update' | 'delete'; |
|||
export interface Options { |
|||
ignore?: (FilePath|GlobPattern)[]; |
|||
backend?: BackendType; |
|||
} |
|||
export type SubscribeCallback = ( |
|||
err: Error | null, |
|||
events: Event[] |
|||
) => unknown; |
|||
export interface AsyncSubscription { |
|||
unsubscribe(): Promise<void>; |
|||
} |
|||
export interface Event { |
|||
path: FilePath; |
|||
type: EventType; |
|||
} |
|||
export function getEventsSince( |
|||
dir: FilePath, |
|||
snapshot: FilePath, |
|||
opts?: Options |
|||
): Promise<Event[]>; |
|||
export function subscribe( |
|||
dir: FilePath, |
|||
fn: SubscribeCallback, |
|||
opts?: Options |
|||
): Promise<AsyncSubscription>; |
|||
export function unsubscribe( |
|||
dir: FilePath, |
|||
fn: SubscribeCallback, |
|||
opts?: Options |
|||
): Promise<void>; |
|||
export function writeSnapshot( |
|||
dir: FilePath, |
|||
snapshot: FilePath, |
|||
opts?: Options |
|||
): Promise<FilePath>; |
|||
} |
|||
|
|||
export = ParcelWatcher; |
|||
@ -0,0 +1,42 @@ |
|||
const {createWrapper} = require('./wrapper'); |
|||
|
|||
let name = `@parcel/watcher-${process.platform}-${process.arch}`; |
|||
if (process.platform === 'linux') { |
|||
const { MUSL, familySync } = require('detect-libc'); |
|||
const family = familySync(); |
|||
if (family === MUSL) { |
|||
name += '-musl'; |
|||
} else { |
|||
name += '-glibc'; |
|||
} |
|||
} |
|||
|
|||
let binding; |
|||
try { |
|||
binding = require(name); |
|||
} catch (err) { |
|||
handleError(err); |
|||
try { |
|||
binding = require('./build/Release/watcher.node'); |
|||
} catch (err) { |
|||
handleError(err); |
|||
try { |
|||
binding = require('./build/Debug/watcher.node'); |
|||
} catch (err) { |
|||
handleError(err); |
|||
throw new Error(`No prebuild or local build of @parcel/watcher found. Tried ${name}. Please ensure it is installed (don't use --no-optional when installing with npm). Otherwise it is possible we don't support your platform yet. If this is the case, please report an issue to https://github.com/parcel-bundler/watcher.`); |
|||
} |
|||
} |
|||
} |
|||
|
|||
function handleError(err) { |
|||
if (err?.code !== 'MODULE_NOT_FOUND') { |
|||
throw err; |
|||
} |
|||
} |
|||
|
|||
const wrapper = createWrapper(binding); |
|||
exports.writeSnapshot = wrapper.writeSnapshot; |
|||
exports.getEventsSince = wrapper.getEventsSince; |
|||
exports.subscribe = wrapper.subscribe; |
|||
exports.unsubscribe = wrapper.unsubscribe; |
|||
@ -0,0 +1,48 @@ |
|||
// @flow |
|||
declare type FilePath = string; |
|||
declare type GlobPattern = string; |
|||
|
|||
export type BackendType = |
|||
| 'fs-events' |
|||
| 'watchman' |
|||
| 'inotify' |
|||
| 'windows' |
|||
| 'brute-force'; |
|||
export type EventType = 'create' | 'update' | 'delete'; |
|||
export interface Options { |
|||
ignore?: Array<FilePath | GlobPattern>, |
|||
backend?: BackendType |
|||
} |
|||
export type SubscribeCallback = ( |
|||
err: ?Error, |
|||
events: Array<Event> |
|||
) => mixed; |
|||
export interface AsyncSubscription { |
|||
unsubscribe(): Promise<void> |
|||
} |
|||
export interface Event { |
|||
path: FilePath, |
|||
type: EventType |
|||
} |
|||
declare module.exports: { |
|||
getEventsSince( |
|||
dir: FilePath, |
|||
snapshot: FilePath, |
|||
opts?: Options |
|||
): Promise<Array<Event>>, |
|||
subscribe( |
|||
dir: FilePath, |
|||
fn: SubscribeCallback, |
|||
opts?: Options |
|||
): Promise<AsyncSubscription>, |
|||
unsubscribe( |
|||
dir: FilePath, |
|||
fn: SubscribeCallback, |
|||
opts?: Options |
|||
): Promise<void>, |
|||
writeSnapshot( |
|||
dir: FilePath, |
|||
snapshot: FilePath, |
|||
opts?: Options |
|||
): Promise<FilePath> |
|||
} |
|||
@ -0,0 +1,201 @@ |
|||
Apache License |
|||
Version 2.0, January 2004 |
|||
http://www.apache.org/licenses/ |
|||
|
|||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION |
|||
|
|||
1. Definitions. |
|||
|
|||
"License" shall mean the terms and conditions for use, reproduction, |
|||
and distribution as defined by Sections 1 through 9 of this document. |
|||
|
|||
"Licensor" shall mean the copyright owner or entity authorized by |
|||
the copyright owner that is granting the License. |
|||
|
|||
"Legal Entity" shall mean the union of the acting entity and all |
|||
other entities that control, are controlled by, or are under common |
|||
control with that entity. For the purposes of this definition, |
|||
"control" means (i) the power, direct or indirect, to cause the |
|||
direction or management of such entity, whether by contract or |
|||
otherwise, or (ii) ownership of fifty percent (50%) or more of the |
|||
outstanding shares, or (iii) beneficial ownership of such entity. |
|||
|
|||
"You" (or "Your") shall mean an individual or Legal Entity |
|||
exercising permissions granted by this License. |
|||
|
|||
"Source" form shall mean the preferred form for making modifications, |
|||
including but not limited to software source code, documentation |
|||
source, and configuration files. |
|||
|
|||
"Object" form shall mean any form resulting from mechanical |
|||
transformation or translation of a Source form, including but |
|||
not limited to compiled object code, generated documentation, |
|||
and conversions to other media types. |
|||
|
|||
"Work" shall mean the work of authorship, whether in Source or |
|||
Object form, made available under the License, as indicated by a |
|||
copyright notice that is included in or attached to the work |
|||
(an example is provided in the Appendix below). |
|||
|
|||
"Derivative Works" shall mean any work, whether in Source or Object |
|||
form, that is based on (or derived from) the Work and for which the |
|||
editorial revisions, annotations, elaborations, or other modifications |
|||
represent, as a whole, an original work of authorship. For the purposes |
|||
of this License, Derivative Works shall not include works that remain |
|||
separable from, or merely link (or bind by name) to the interfaces of, |
|||
the Work and Derivative Works thereof. |
|||
|
|||
"Contribution" shall mean any work of authorship, including |
|||
the original version of the Work and any modifications or additions |
|||
to that Work or Derivative Works thereof, that is intentionally |
|||
submitted to Licensor for inclusion in the Work by the copyright owner |
|||
or by an individual or Legal Entity authorized to submit on behalf of |
|||
the copyright owner. For the purposes of this definition, "submitted" |
|||
means any form of electronic, verbal, or written communication sent |
|||
to the Licensor or its representatives, including but not limited to |
|||
communication on electronic mailing lists, source code control systems, |
|||
and issue tracking systems that are managed by, or on behalf of, the |
|||
Licensor for the purpose of discussing and improving the Work, but |
|||
excluding communication that is conspicuously marked or otherwise |
|||
designated in writing by the copyright owner as "Not a Contribution." |
|||
|
|||
"Contributor" shall mean Licensor and any individual or Legal Entity |
|||
on behalf of whom a Contribution has been received by Licensor and |
|||
subsequently incorporated within the Work. |
|||
|
|||
2. Grant of Copyright License. Subject to the terms and conditions of |
|||
this License, each Contributor hereby grants to You a perpetual, |
|||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable |
|||
copyright license to reproduce, prepare Derivative Works of, |
|||
publicly display, publicly perform, sublicense, and distribute the |
|||
Work and such Derivative Works in Source or Object form. |
|||
|
|||
3. Grant of Patent License. Subject to the terms and conditions of |
|||
this License, each Contributor hereby grants to You a perpetual, |
|||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable |
|||
(except as stated in this section) patent license to make, have made, |
|||
use, offer to sell, sell, import, and otherwise transfer the Work, |
|||
where such license applies only to those patent claims licensable |
|||
by such Contributor that are necessarily infringed by their |
|||
Contribution(s) alone or by combination of their Contribution(s) |
|||
with the Work to which such Contribution(s) was submitted. If You |
|||
institute patent litigation against any entity (including a |
|||
cross-claim or counterclaim in a lawsuit) alleging that the Work |
|||
or a Contribution incorporated within the Work constitutes direct |
|||
or contributory patent infringement, then any patent licenses |
|||
granted to You under this License for that Work shall terminate |
|||
as of the date such litigation is filed. |
|||
|
|||
4. Redistribution. You may reproduce and distribute copies of the |
|||
Work or Derivative Works thereof in any medium, with or without |
|||
modifications, and in Source or Object form, provided that You |
|||
meet the following conditions: |
|||
|
|||
(a) You must give any other recipients of the Work or |
|||
Derivative Works a copy of this License; and |
|||
|
|||
(b) You must cause any modified files to carry prominent notices |
|||
stating that You changed the files; and |
|||
|
|||
(c) You must retain, in the Source form of any Derivative Works |
|||
that You distribute, all copyright, patent, trademark, and |
|||
attribution notices from the Source form of the Work, |
|||
excluding those notices that do not pertain to any part of |
|||
the Derivative Works; and |
|||
|
|||
(d) If the Work includes a "NOTICE" text file as part of its |
|||
distribution, then any Derivative Works that You distribute must |
|||
include a readable copy of the attribution notices contained |
|||
within such NOTICE file, excluding those notices that do not |
|||
pertain to any part of the Derivative Works, in at least one |
|||
of the following places: within a NOTICE text file distributed |
|||
as part of the Derivative Works; within the Source form or |
|||
documentation, if provided along with the Derivative Works; or, |
|||
within a display generated by the Derivative Works, if and |
|||
wherever such third-party notices normally appear. The contents |
|||
of the NOTICE file are for informational purposes only and |
|||
do not modify the License. You may add Your own attribution |
|||
notices within Derivative Works that You distribute, alongside |
|||
or as an addendum to the NOTICE text from the Work, provided |
|||
that such additional attribution notices cannot be construed |
|||
as modifying the License. |
|||
|
|||
You may add Your own copyright statement to Your modifications and |
|||
may provide additional or different license terms and conditions |
|||
for use, reproduction, or distribution of Your modifications, or |
|||
for any such Derivative Works as a whole, provided Your use, |
|||
reproduction, and distribution of the Work otherwise complies with |
|||
the conditions stated in this License. |
|||
|
|||
5. Submission of Contributions. Unless You explicitly state otherwise, |
|||
any Contribution intentionally submitted for inclusion in the Work |
|||
by You to the Licensor shall be under the terms and conditions of |
|||
this License, without any additional terms or conditions. |
|||
Notwithstanding the above, nothing herein shall supersede or modify |
|||
the terms of any separate license agreement you may have executed |
|||
with Licensor regarding such Contributions. |
|||
|
|||
6. Trademarks. This License does not grant permission to use the trade |
|||
names, trademarks, service marks, or product names of the Licensor, |
|||
except as required for reasonable and customary use in describing the |
|||
origin of the Work and reproducing the content of the NOTICE file. |
|||
|
|||
7. Disclaimer of Warranty. Unless required by applicable law or |
|||
agreed to in writing, Licensor provides the Work (and each |
|||
Contributor provides its Contributions) on an "AS IS" BASIS, |
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
|||
implied, including, without limitation, any warranties or conditions |
|||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A |
|||
PARTICULAR PURPOSE. You are solely responsible for determining the |
|||
appropriateness of using or redistributing the Work and assume any |
|||
risks associated with Your exercise of permissions under this License. |
|||
|
|||
8. Limitation of Liability. In no event and under no legal theory, |
|||
whether in tort (including negligence), contract, or otherwise, |
|||
unless required by applicable law (such as deliberate and grossly |
|||
negligent acts) or agreed to in writing, shall any Contributor be |
|||
liable to You for damages, including any direct, indirect, special, |
|||
incidental, or consequential damages of any character arising as a |
|||
result of this License or out of the use or inability to use the |
|||
Work (including but not limited to damages for loss of goodwill, |
|||
work stoppage, computer failure or malfunction, or any and all |
|||
other commercial damages or losses), even if such Contributor |
|||
has been advised of the possibility of such damages. |
|||
|
|||
9. Accepting Warranty or Additional Liability. While redistributing |
|||
the Work or Derivative Works thereof, You may choose to offer, |
|||
and charge a fee for, acceptance of support, warranty, indemnity, |
|||
or other liability obligations and/or rights consistent with this |
|||
License. However, in accepting such obligations, You may act only |
|||
on Your own behalf and on Your sole responsibility, not on behalf |
|||
of any other Contributor, and only if You agree to indemnify, |
|||
defend, and hold each Contributor harmless for any liability |
|||
incurred by, or claims asserted against, such Contributor by reason |
|||
of your accepting any such warranty or additional liability. |
|||
|
|||
END OF TERMS AND CONDITIONS |
|||
|
|||
APPENDIX: How to apply the Apache License to your work. |
|||
|
|||
To apply the Apache License to your work, attach the following |
|||
boilerplate notice, with the fields enclosed by brackets "{}" |
|||
replaced with your own identifying information. (Don't include |
|||
the brackets!) The text should be enclosed in the appropriate |
|||
comment syntax for the file format. We also recommend that a |
|||
file or class name and description of purpose be included on the |
|||
same "printed page" as the copyright notice for easier |
|||
identification within third-party archives. |
|||
|
|||
Copyright {yyyy} {name of copyright owner} |
|||
|
|||
Licensed under the Apache License, Version 2.0 (the "License"); |
|||
you may not use this file except in compliance with the License. |
|||
You may obtain a copy of the License at |
|||
|
|||
http://www.apache.org/licenses/LICENSE-2.0 |
|||
|
|||
Unless required by applicable law or agreed to in writing, software |
|||
distributed under the License is distributed on an "AS IS" BASIS, |
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|||
See the License for the specific language governing permissions and |
|||
limitations under the License. |
|||
@ -0,0 +1,163 @@ |
|||
# detect-libc |
|||
|
|||
Node.js module to detect details of the C standard library (libc) |
|||
implementation provided by a given Linux system. |
|||
|
|||
Currently supports detection of GNU glibc and MUSL libc. |
|||
|
|||
Provides asychronous and synchronous functions for the |
|||
family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`). |
|||
|
|||
The version numbers of libc implementations |
|||
are not guaranteed to be semver-compliant. |
|||
|
|||
For previous v1.x releases, please see the |
|||
[v1](https://github.com/lovell/detect-libc/tree/v1) branch. |
|||
|
|||
## Install |
|||
|
|||
```sh |
|||
npm install detect-libc |
|||
``` |
|||
|
|||
## API |
|||
|
|||
### GLIBC |
|||
|
|||
```ts |
|||
const GLIBC: string = 'glibc'; |
|||
``` |
|||
|
|||
A String constant containing the value `glibc`. |
|||
|
|||
### MUSL |
|||
|
|||
```ts |
|||
const MUSL: string = 'musl'; |
|||
``` |
|||
|
|||
A String constant containing the value `musl`. |
|||
|
|||
### family |
|||
|
|||
```ts |
|||
function family(): Promise<string | null>; |
|||
``` |
|||
|
|||
Resolves asychronously with: |
|||
|
|||
* `glibc` or `musl` when the libc family can be determined |
|||
* `null` when the libc family cannot be determined |
|||
* `null` when run on a non-Linux platform |
|||
|
|||
```js |
|||
const { family, GLIBC, MUSL } = require('detect-libc'); |
|||
|
|||
switch (await family()) { |
|||
case GLIBC: ... |
|||
case MUSL: ... |
|||
case null: ... |
|||
} |
|||
``` |
|||
|
|||
### familySync |
|||
|
|||
```ts |
|||
function familySync(): string | null; |
|||
``` |
|||
|
|||
Synchronous version of `family()`. |
|||
|
|||
```js |
|||
const { familySync, GLIBC, MUSL } = require('detect-libc'); |
|||
|
|||
switch (familySync()) { |
|||
case GLIBC: ... |
|||
case MUSL: ... |
|||
case null: ... |
|||
} |
|||
``` |
|||
|
|||
### version |
|||
|
|||
```ts |
|||
function version(): Promise<string | null>; |
|||
``` |
|||
|
|||
Resolves asychronously with: |
|||
|
|||
* The version when it can be determined |
|||
* `null` when the libc family cannot be determined |
|||
* `null` when run on a non-Linux platform |
|||
|
|||
```js |
|||
const { version } = require('detect-libc'); |
|||
|
|||
const v = await version(); |
|||
if (v) { |
|||
const [major, minor, patch] = v.split('.'); |
|||
} |
|||
``` |
|||
|
|||
### versionSync |
|||
|
|||
```ts |
|||
function versionSync(): string | null; |
|||
``` |
|||
|
|||
Synchronous version of `version()`. |
|||
|
|||
```js |
|||
const { versionSync } = require('detect-libc'); |
|||
|
|||
const v = versionSync(); |
|||
if (v) { |
|||
const [major, minor, patch] = v.split('.'); |
|||
} |
|||
``` |
|||
|
|||
### isNonGlibcLinux |
|||
|
|||
```ts |
|||
function isNonGlibcLinux(): Promise<boolean>; |
|||
``` |
|||
|
|||
Resolves asychronously with: |
|||
|
|||
* `false` when the libc family is `glibc` |
|||
* `true` when the libc family is not `glibc` |
|||
* `false` when run on a non-Linux platform |
|||
|
|||
```js |
|||
const { isNonGlibcLinux } = require('detect-libc'); |
|||
|
|||
if (await isNonGlibcLinux()) { ... } |
|||
``` |
|||
|
|||
### isNonGlibcLinuxSync |
|||
|
|||
```ts |
|||
function isNonGlibcLinuxSync(): boolean; |
|||
``` |
|||
|
|||
Synchronous version of `isNonGlibcLinux()`. |
|||
|
|||
```js |
|||
const { isNonGlibcLinuxSync } = require('detect-libc'); |
|||
|
|||
if (isNonGlibcLinuxSync()) { ... } |
|||
``` |
|||
|
|||
## Licensing |
|||
|
|||
Copyright 2017 Lovell Fuller and others. |
|||
|
|||
Licensed under the Apache License, Version 2.0 (the "License"); |
|||
you may not use this file except in compliance with the License. |
|||
You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html) |
|||
|
|||
Unless required by applicable law or agreed to in writing, software |
|||
distributed under the License is distributed on an "AS IS" BASIS, |
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|||
See the License for the specific language governing permissions and |
|||
limitations under the License. |
|||
@ -0,0 +1,14 @@ |
|||
// Copyright 2017 Lovell Fuller and others.
|
|||
// SPDX-License-Identifier: Apache-2.0
|
|||
|
|||
export const GLIBC: 'glibc'; |
|||
export const MUSL: 'musl'; |
|||
|
|||
export function family(): Promise<string | null>; |
|||
export function familySync(): string | null; |
|||
|
|||
export function isNonGlibcLinux(): Promise<boolean>; |
|||
export function isNonGlibcLinuxSync(): boolean; |
|||
|
|||
export function version(): Promise<string | null>; |
|||
export function versionSync(): string | null; |
|||
@ -0,0 +1,313 @@ |
|||
// Copyright 2017 Lovell Fuller and others.
|
|||
// SPDX-License-Identifier: Apache-2.0
|
|||
|
|||
'use strict'; |
|||
|
|||
const childProcess = require('child_process'); |
|||
const { isLinux, getReport } = require('./process'); |
|||
const { LDD_PATH, SELF_PATH, readFile, readFileSync } = require('./filesystem'); |
|||
const { interpreterPath } = require('./elf'); |
|||
|
|||
let cachedFamilyInterpreter; |
|||
let cachedFamilyFilesystem; |
|||
let cachedVersionFilesystem; |
|||
|
|||
const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true'; |
|||
let commandOut = ''; |
|||
|
|||
const safeCommand = () => { |
|||
if (!commandOut) { |
|||
return new Promise((resolve) => { |
|||
childProcess.exec(command, (err, out) => { |
|||
commandOut = err ? ' ' : out; |
|||
resolve(commandOut); |
|||
}); |
|||
}); |
|||
} |
|||
return commandOut; |
|||
}; |
|||
|
|||
const safeCommandSync = () => { |
|||
if (!commandOut) { |
|||
try { |
|||
commandOut = childProcess.execSync(command, { encoding: 'utf8' }); |
|||
} catch (_err) { |
|||
commandOut = ' '; |
|||
} |
|||
} |
|||
return commandOut; |
|||
}; |
|||
|
|||
/** |
|||
* A String constant containing the value `glibc`. |
|||
* @type {string} |
|||
* @public |
|||
*/ |
|||
const GLIBC = 'glibc'; |
|||
|
|||
/** |
|||
* A Regexp constant to get the GLIBC Version. |
|||
* @type {string} |
|||
*/ |
|||
const RE_GLIBC_VERSION = /LIBC[a-z0-9 \-).]*?(\d+\.\d+)/i; |
|||
|
|||
/** |
|||
* A String constant containing the value `musl`. |
|||
* @type {string} |
|||
* @public |
|||
*/ |
|||
const MUSL = 'musl'; |
|||
|
|||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-'); |
|||
|
|||
const familyFromReport = () => { |
|||
const report = getReport(); |
|||
if (report.header && report.header.glibcVersionRuntime) { |
|||
return GLIBC; |
|||
} |
|||
if (Array.isArray(report.sharedObjects)) { |
|||
if (report.sharedObjects.some(isFileMusl)) { |
|||
return MUSL; |
|||
} |
|||
} |
|||
return null; |
|||
}; |
|||
|
|||
const familyFromCommand = (out) => { |
|||
const [getconf, ldd1] = out.split(/[\r\n]+/); |
|||
if (getconf && getconf.includes(GLIBC)) { |
|||
return GLIBC; |
|||
} |
|||
if (ldd1 && ldd1.includes(MUSL)) { |
|||
return MUSL; |
|||
} |
|||
return null; |
|||
}; |
|||
|
|||
const familyFromInterpreterPath = (path) => { |
|||
if (path) { |
|||
if (path.includes('/ld-musl-')) { |
|||
return MUSL; |
|||
} else if (path.includes('/ld-linux-')) { |
|||
return GLIBC; |
|||
} |
|||
} |
|||
return null; |
|||
}; |
|||
|
|||
const getFamilyFromLddContent = (content) => { |
|||
content = content.toString(); |
|||
if (content.includes('musl')) { |
|||
return MUSL; |
|||
} |
|||
if (content.includes('GNU C Library')) { |
|||
return GLIBC; |
|||
} |
|||
return null; |
|||
}; |
|||
|
|||
const familyFromFilesystem = async () => { |
|||
if (cachedFamilyFilesystem !== undefined) { |
|||
return cachedFamilyFilesystem; |
|||
} |
|||
cachedFamilyFilesystem = null; |
|||
try { |
|||
const lddContent = await readFile(LDD_PATH); |
|||
cachedFamilyFilesystem = getFamilyFromLddContent(lddContent); |
|||
} catch (e) {} |
|||
return cachedFamilyFilesystem; |
|||
}; |
|||
|
|||
const familyFromFilesystemSync = () => { |
|||
if (cachedFamilyFilesystem !== undefined) { |
|||
return cachedFamilyFilesystem; |
|||
} |
|||
cachedFamilyFilesystem = null; |
|||
try { |
|||
const lddContent = readFileSync(LDD_PATH); |
|||
cachedFamilyFilesystem = getFamilyFromLddContent(lddContent); |
|||
} catch (e) {} |
|||
return cachedFamilyFilesystem; |
|||
}; |
|||
|
|||
const familyFromInterpreter = async () => { |
|||
if (cachedFamilyInterpreter !== undefined) { |
|||
return cachedFamilyInterpreter; |
|||
} |
|||
cachedFamilyInterpreter = null; |
|||
try { |
|||
const selfContent = await readFile(SELF_PATH); |
|||
const path = interpreterPath(selfContent); |
|||
cachedFamilyInterpreter = familyFromInterpreterPath(path); |
|||
} catch (e) {} |
|||
return cachedFamilyInterpreter; |
|||
}; |
|||
|
|||
const familyFromInterpreterSync = () => { |
|||
if (cachedFamilyInterpreter !== undefined) { |
|||
return cachedFamilyInterpreter; |
|||
} |
|||
cachedFamilyInterpreter = null; |
|||
try { |
|||
const selfContent = readFileSync(SELF_PATH); |
|||
const path = interpreterPath(selfContent); |
|||
cachedFamilyInterpreter = familyFromInterpreterPath(path); |
|||
} catch (e) {} |
|||
return cachedFamilyInterpreter; |
|||
}; |
|||
|
|||
/** |
|||
* Resolves with the libc family when it can be determined, `null` otherwise. |
|||
* @returns {Promise<?string>} |
|||
*/ |
|||
const family = async () => { |
|||
let family = null; |
|||
if (isLinux()) { |
|||
family = await familyFromInterpreter(); |
|||
if (!family) { |
|||
family = await familyFromFilesystem(); |
|||
if (!family) { |
|||
family = familyFromReport(); |
|||
} |
|||
if (!family) { |
|||
const out = await safeCommand(); |
|||
family = familyFromCommand(out); |
|||
} |
|||
} |
|||
} |
|||
return family; |
|||
}; |
|||
|
|||
/** |
|||
* Returns the libc family when it can be determined, `null` otherwise. |
|||
* @returns {?string} |
|||
*/ |
|||
const familySync = () => { |
|||
let family = null; |
|||
if (isLinux()) { |
|||
family = familyFromInterpreterSync(); |
|||
if (!family) { |
|||
family = familyFromFilesystemSync(); |
|||
if (!family) { |
|||
family = familyFromReport(); |
|||
} |
|||
if (!family) { |
|||
const out = safeCommandSync(); |
|||
family = familyFromCommand(out); |
|||
} |
|||
} |
|||
} |
|||
return family; |
|||
}; |
|||
|
|||
/** |
|||
* Resolves `true` only when the platform is Linux and the libc family is not `glibc`. |
|||
* @returns {Promise<boolean>} |
|||
*/ |
|||
const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC; |
|||
|
|||
/** |
|||
* Returns `true` only when the platform is Linux and the libc family is not `glibc`. |
|||
* @returns {boolean} |
|||
*/ |
|||
const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC; |
|||
|
|||
const versionFromFilesystem = async () => { |
|||
if (cachedVersionFilesystem !== undefined) { |
|||
return cachedVersionFilesystem; |
|||
} |
|||
cachedVersionFilesystem = null; |
|||
try { |
|||
const lddContent = await readFile(LDD_PATH); |
|||
const versionMatch = lddContent.match(RE_GLIBC_VERSION); |
|||
if (versionMatch) { |
|||
cachedVersionFilesystem = versionMatch[1]; |
|||
} |
|||
} catch (e) {} |
|||
return cachedVersionFilesystem; |
|||
}; |
|||
|
|||
const versionFromFilesystemSync = () => { |
|||
if (cachedVersionFilesystem !== undefined) { |
|||
return cachedVersionFilesystem; |
|||
} |
|||
cachedVersionFilesystem = null; |
|||
try { |
|||
const lddContent = readFileSync(LDD_PATH); |
|||
const versionMatch = lddContent.match(RE_GLIBC_VERSION); |
|||
if (versionMatch) { |
|||
cachedVersionFilesystem = versionMatch[1]; |
|||
} |
|||
} catch (e) {} |
|||
return cachedVersionFilesystem; |
|||
}; |
|||
|
|||
const versionFromReport = () => { |
|||
const report = getReport(); |
|||
if (report.header && report.header.glibcVersionRuntime) { |
|||
return report.header.glibcVersionRuntime; |
|||
} |
|||
return null; |
|||
}; |
|||
|
|||
const versionSuffix = (s) => s.trim().split(/\s+/)[1]; |
|||
|
|||
const versionFromCommand = (out) => { |
|||
const [getconf, ldd1, ldd2] = out.split(/[\r\n]+/); |
|||
if (getconf && getconf.includes(GLIBC)) { |
|||
return versionSuffix(getconf); |
|||
} |
|||
if (ldd1 && ldd2 && ldd1.includes(MUSL)) { |
|||
return versionSuffix(ldd2); |
|||
} |
|||
return null; |
|||
}; |
|||
|
|||
/** |
|||
* Resolves with the libc version when it can be determined, `null` otherwise. |
|||
* @returns {Promise<?string>} |
|||
*/ |
|||
const version = async () => { |
|||
let version = null; |
|||
if (isLinux()) { |
|||
version = await versionFromFilesystem(); |
|||
if (!version) { |
|||
version = versionFromReport(); |
|||
} |
|||
if (!version) { |
|||
const out = await safeCommand(); |
|||
version = versionFromCommand(out); |
|||
} |
|||
} |
|||
return version; |
|||
}; |
|||
|
|||
/** |
|||
* Returns the libc version when it can be determined, `null` otherwise. |
|||
* @returns {?string} |
|||
*/ |
|||
const versionSync = () => { |
|||
let version = null; |
|||
if (isLinux()) { |
|||
version = versionFromFilesystemSync(); |
|||
if (!version) { |
|||
version = versionFromReport(); |
|||
} |
|||
if (!version) { |
|||
const out = safeCommandSync(); |
|||
version = versionFromCommand(out); |
|||
} |
|||
} |
|||
return version; |
|||
}; |
|||
|
|||
module.exports = { |
|||
GLIBC, |
|||
MUSL, |
|||
family, |
|||
familySync, |
|||
isNonGlibcLinux, |
|||
isNonGlibcLinuxSync, |
|||
version, |
|||
versionSync |
|||
}; |
|||
@ -0,0 +1,39 @@ |
|||
// Copyright 2017 Lovell Fuller and others.
|
|||
// SPDX-License-Identifier: Apache-2.0
|
|||
|
|||
'use strict'; |
|||
|
|||
const interpreterPath = (elf) => { |
|||
if (elf.length < 64) { |
|||
return null; |
|||
} |
|||
if (elf.readUInt32BE(0) !== 0x7F454C46) { |
|||
// Unexpected magic bytes
|
|||
return null; |
|||
} |
|||
if (elf.readUInt8(4) !== 2) { |
|||
// Not a 64-bit ELF
|
|||
return null; |
|||
} |
|||
if (elf.readUInt8(5) !== 1) { |
|||
// Not little-endian
|
|||
return null; |
|||
} |
|||
const offset = elf.readUInt32LE(32); |
|||
const size = elf.readUInt16LE(54); |
|||
const count = elf.readUInt16LE(56); |
|||
for (let i = 0; i < count; i++) { |
|||
const headerOffset = offset + (i * size); |
|||
const type = elf.readUInt32LE(headerOffset); |
|||
if (type === 3) { |
|||
const fileOffset = elf.readUInt32LE(headerOffset + 8); |
|||
const fileSize = elf.readUInt32LE(headerOffset + 32); |
|||
return elf.subarray(fileOffset, fileOffset + fileSize).toString().replace(/\0.*$/g, ''); |
|||
} |
|||
} |
|||
return null; |
|||
}; |
|||
|
|||
module.exports = { |
|||
interpreterPath |
|||
}; |
|||
@ -0,0 +1,51 @@ |
|||
// Copyright 2017 Lovell Fuller and others.
|
|||
// SPDX-License-Identifier: Apache-2.0
|
|||
|
|||
'use strict'; |
|||
|
|||
const fs = require('fs'); |
|||
|
|||
const LDD_PATH = '/usr/bin/ldd'; |
|||
const SELF_PATH = '/proc/self/exe'; |
|||
const MAX_LENGTH = 2048; |
|||
|
|||
/** |
|||
* Read the content of a file synchronous |
|||
* |
|||
* @param {string} path |
|||
* @returns {Buffer} |
|||
*/ |
|||
const readFileSync = (path) => { |
|||
const fd = fs.openSync(path, 'r'); |
|||
const buffer = Buffer.alloc(MAX_LENGTH); |
|||
const bytesRead = fs.readSync(fd, buffer, 0, MAX_LENGTH, 0); |
|||
fs.close(fd, () => {}); |
|||
return buffer.subarray(0, bytesRead); |
|||
}; |
|||
|
|||
/** |
|||
* Read the content of a file |
|||
* |
|||
* @param {string} path |
|||
* @returns {Promise<Buffer>} |
|||
*/ |
|||
const readFile = (path) => new Promise((resolve, reject) => { |
|||
fs.open(path, 'r', (err, fd) => { |
|||
if (err) { |
|||
reject(err); |
|||
} else { |
|||
const buffer = Buffer.alloc(MAX_LENGTH); |
|||
fs.read(fd, buffer, 0, MAX_LENGTH, 0, (_, bytesRead) => { |
|||
resolve(buffer.subarray(0, bytesRead)); |
|||
fs.close(fd, () => {}); |
|||
}); |
|||
} |
|||
}); |
|||
}); |
|||
|
|||
module.exports = { |
|||
LDD_PATH, |
|||
SELF_PATH, |
|||
readFileSync, |
|||
readFile |
|||
}; |
|||
@ -0,0 +1,24 @@ |
|||
// Copyright 2017 Lovell Fuller and others.
|
|||
// SPDX-License-Identifier: Apache-2.0
|
|||
|
|||
'use strict'; |
|||
|
|||
const isLinux = () => process.platform === 'linux'; |
|||
|
|||
let report = null; |
|||
const getReport = () => { |
|||
if (!report) { |
|||
/* istanbul ignore next */ |
|||
if (isLinux() && process.report) { |
|||
const orig = process.report.excludeNetwork; |
|||
process.report.excludeNetwork = true; |
|||
report = process.report.getReport(); |
|||
process.report.excludeNetwork = orig; |
|||
} else { |
|||
report = {}; |
|||
} |
|||
} |
|||
return report; |
|||
}; |
|||
|
|||
module.exports = { isLinux, getReport }; |
|||
@ -0,0 +1,44 @@ |
|||
{ |
|||
"name": "detect-libc", |
|||
"version": "2.1.2", |
|||
"description": "Node.js module to detect the C standard library (libc) implementation family and version", |
|||
"main": "lib/detect-libc.js", |
|||
"files": [ |
|||
"lib/", |
|||
"index.d.ts" |
|||
], |
|||
"scripts": { |
|||
"test": "semistandard && nyc --reporter=text --check-coverage --branches=100 ava test/unit.js", |
|||
"changelog": "conventional-changelog -i CHANGELOG.md -s", |
|||
"bench": "node benchmark/detect-libc", |
|||
"bench:calls": "node benchmark/call-familySync.js && sleep 1 && node benchmark/call-isNonGlibcLinuxSync.js && sleep 1 && node benchmark/call-versionSync.js" |
|||
}, |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "git://github.com/lovell/detect-libc.git" |
|||
}, |
|||
"keywords": [ |
|||
"libc", |
|||
"glibc", |
|||
"musl" |
|||
], |
|||
"author": "Lovell Fuller <npm@lovell.info>", |
|||
"contributors": [ |
|||
"Niklas Salmoukas <niklas@salmoukas.com>", |
|||
"Vinícius Lourenço <vinyygamerlol@gmail.com>" |
|||
], |
|||
"license": "Apache-2.0", |
|||
"devDependencies": { |
|||
"ava": "^2.4.0", |
|||
"benchmark": "^2.1.4", |
|||
"conventional-changelog-cli": "^5.0.0", |
|||
"eslint-config-standard": "^13.0.1", |
|||
"nyc": "^15.1.0", |
|||
"proxyquire": "^2.1.3", |
|||
"semistandard": "^14.2.3" |
|||
}, |
|||
"engines": { |
|||
"node": ">=8" |
|||
}, |
|||
"types": "index.d.ts" |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
The MIT License (MIT) |
|||
|
|||
Copyright (c) 2017-present, Jon Schlinkert. |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
|||
THE SOFTWARE. |
|||
@ -0,0 +1,738 @@ |
|||
<h1 align="center">Picomatch</h1> |
|||
|
|||
<p align="center"> |
|||
<a href="https://npmjs.org/package/picomatch"> |
|||
<img src="https://img.shields.io/npm/v/picomatch.svg" alt="version"> |
|||
</a> |
|||
<a href="https://github.com/micromatch/picomatch/actions?workflow=Tests"> |
|||
<img src="https://github.com/micromatch/picomatch/workflows/Tests/badge.svg" alt="test status"> |
|||
</a> |
|||
<a href="https://coveralls.io/github/micromatch/picomatch"> |
|||
<img src="https://img.shields.io/coveralls/github/micromatch/picomatch/master.svg" alt="coverage status"> |
|||
</a> |
|||
<a href="https://npmjs.org/package/picomatch"> |
|||
<img src="https://img.shields.io/npm/dm/picomatch.svg" alt="downloads"> |
|||
</a> |
|||
</p> |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
<p align="center"> |
|||
<strong>Blazing fast and accurate glob matcher written in JavaScript.</strong></br> |
|||
<em>No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.</em> |
|||
</p> |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
## Why picomatch? |
|||
|
|||
* **Lightweight** - No dependencies |
|||
* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function. |
|||
* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps) |
|||
* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files) |
|||
* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes. |
|||
* **Well tested** - Thousands of unit tests |
|||
|
|||
See the [library comparison](#library-comparisons) to other libraries. |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
## Table of Contents |
|||
|
|||
<details><summary> Click to expand </summary> |
|||
|
|||
- [Install](#install) |
|||
- [Usage](#usage) |
|||
- [API](#api) |
|||
* [picomatch](#picomatch) |
|||
* [.test](#test) |
|||
* [.matchBase](#matchbase) |
|||
* [.isMatch](#ismatch) |
|||
* [.parse](#parse) |
|||
* [.scan](#scan) |
|||
* [.compileRe](#compilere) |
|||
* [.makeRe](#makere) |
|||
* [.toRegex](#toregex) |
|||
- [Options](#options) |
|||
* [Picomatch options](#picomatch-options) |
|||
* [Scan Options](#scan-options) |
|||
* [Options Examples](#options-examples) |
|||
- [Globbing features](#globbing-features) |
|||
* [Basic globbing](#basic-globbing) |
|||
* [Advanced globbing](#advanced-globbing) |
|||
* [Braces](#braces) |
|||
* [Matching special characters as literals](#matching-special-characters-as-literals) |
|||
- [Library Comparisons](#library-comparisons) |
|||
- [Benchmarks](#benchmarks) |
|||
- [Philosophies](#philosophies) |
|||
- [About](#about) |
|||
* [Author](#author) |
|||
* [License](#license) |
|||
|
|||
_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ |
|||
|
|||
</details> |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
## Install |
|||
|
|||
Install with [npm](https://www.npmjs.com/): |
|||
|
|||
```sh |
|||
npm install --save picomatch |
|||
``` |
|||
|
|||
<br> |
|||
|
|||
## Usage |
|||
|
|||
The main export is a function that takes a glob pattern and an options object and returns a function for matching strings. |
|||
|
|||
```js |
|||
const pm = require('picomatch'); |
|||
const isMatch = pm('*.js'); |
|||
|
|||
console.log(isMatch('abcd')); //=> false |
|||
console.log(isMatch('a.js')); //=> true |
|||
console.log(isMatch('a.md')); //=> false |
|||
console.log(isMatch('a/b.js')); //=> false |
|||
``` |
|||
|
|||
<br> |
|||
|
|||
## API |
|||
|
|||
### [picomatch](lib/picomatch.js#L31) |
|||
|
|||
Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information. |
|||
|
|||
**Params** |
|||
|
|||
* `globs` **{String|Array}**: One or more glob patterns. |
|||
* `options` **{Object=}** |
|||
* `returns` **{Function=}**: Returns a matcher function. |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
// picomatch(glob[, options]); |
|||
|
|||
const isMatch = picomatch('*.!(*a)'); |
|||
console.log(isMatch('a.a')); //=> false |
|||
console.log(isMatch('a.b')); //=> true |
|||
``` |
|||
|
|||
**Example without node.js** |
|||
|
|||
For environments without `node.js`, `picomatch/posix` provides you a dependency-free matcher, without automatic OS detection. |
|||
|
|||
```js |
|||
const picomatch = require('picomatch/posix'); |
|||
// the same API, defaulting to posix paths |
|||
const isMatch = picomatch('a/*'); |
|||
console.log(isMatch('a\\b')); //=> false |
|||
console.log(isMatch('a/b')); //=> true |
|||
|
|||
// you can still configure the matcher function to accept windows paths |
|||
const isMatch = picomatch('a/*', { options: windows }); |
|||
console.log(isMatch('a\\b')); //=> true |
|||
console.log(isMatch('a/b')); //=> true |
|||
``` |
|||
|
|||
### [.test](lib/picomatch.js#L116) |
|||
|
|||
Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string. |
|||
|
|||
**Params** |
|||
|
|||
* `input` **{String}**: String to test. |
|||
* `regex` **{RegExp}** |
|||
* `returns` **{Object}**: Returns an object with matching info. |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
// picomatch.test(input, regex[, options]); |
|||
|
|||
console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); |
|||
// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } |
|||
``` |
|||
|
|||
### [.matchBase](lib/picomatch.js#L160) |
|||
|
|||
Match the basename of a filepath. |
|||
|
|||
**Params** |
|||
|
|||
* `input` **{String}**: String to test. |
|||
* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe). |
|||
* `returns` **{Boolean}** |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
// picomatch.matchBase(input, glob[, options]); |
|||
console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true |
|||
``` |
|||
|
|||
### [.isMatch](lib/picomatch.js#L182) |
|||
|
|||
Returns true if **any** of the given glob `patterns` match the specified `string`. |
|||
|
|||
**Params** |
|||
|
|||
* **{String|Array}**: str The string to test. |
|||
* **{String|Array}**: patterns One or more glob patterns to use for matching. |
|||
* **{Object}**: See available [options](#options). |
|||
* `returns` **{Boolean}**: Returns true if any patterns match `str` |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
// picomatch.isMatch(string, patterns[, options]); |
|||
|
|||
console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true |
|||
console.log(picomatch.isMatch('a.a', 'b.*')); //=> false |
|||
``` |
|||
|
|||
### [.parse](lib/picomatch.js#L198) |
|||
|
|||
Parse a glob pattern to create the source string for a regular expression. |
|||
|
|||
**Params** |
|||
|
|||
* `pattern` **{String}** |
|||
* `options` **{Object}** |
|||
* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string. |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
const result = picomatch.parse(pattern[, options]); |
|||
``` |
|||
|
|||
### [.scan](lib/picomatch.js#L230) |
|||
|
|||
Scan a glob pattern to separate the pattern into segments. |
|||
|
|||
**Params** |
|||
|
|||
* `input` **{String}**: Glob pattern to scan. |
|||
* `options` **{Object}** |
|||
* `returns` **{Object}**: Returns an object with |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
// picomatch.scan(input[, options]); |
|||
|
|||
const result = picomatch.scan('!./foo/*.js'); |
|||
console.log(result); |
|||
{ prefix: '!./', |
|||
input: '!./foo/*.js', |
|||
start: 3, |
|||
base: 'foo', |
|||
glob: '*.js', |
|||
isBrace: false, |
|||
isBracket: false, |
|||
isGlob: true, |
|||
isExtglob: false, |
|||
isGlobstar: false, |
|||
negated: true } |
|||
``` |
|||
|
|||
### [.compileRe](lib/picomatch.js#L244) |
|||
|
|||
Compile a regular expression from the `state` object returned by the |
|||
[parse()](#parse) method. |
|||
|
|||
**Params** |
|||
|
|||
* `state` **{Object}** |
|||
* `options` **{Object}** |
|||
* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. |
|||
* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. |
|||
* `returns` **{RegExp}** |
|||
|
|||
### [.makeRe](lib/picomatch.js#L285) |
|||
|
|||
Create a regular expression from a parsed glob pattern. |
|||
|
|||
**Params** |
|||
|
|||
* `state` **{String}**: The object returned from the `.parse` method. |
|||
* `options` **{Object}** |
|||
* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. |
|||
* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. |
|||
* `returns` **{RegExp}**: Returns a regex created from the given pattern. |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
const state = picomatch.parse('*.js'); |
|||
// picomatch.compileRe(state[, options]); |
|||
|
|||
console.log(picomatch.compileRe(state)); |
|||
//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ |
|||
``` |
|||
|
|||
### [.toRegex](lib/picomatch.js#L320) |
|||
|
|||
Create a regular expression from the given regex source string. |
|||
|
|||
**Params** |
|||
|
|||
* `source` **{String}**: Regular expression source string. |
|||
* `options` **{Object}** |
|||
* `returns` **{RegExp}** |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
// picomatch.toRegex(source[, options]); |
|||
|
|||
const { output } = picomatch.parse('*.js'); |
|||
console.log(picomatch.toRegex(output)); |
|||
//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ |
|||
``` |
|||
|
|||
<br> |
|||
|
|||
## Options |
|||
|
|||
### Picomatch options |
|||
|
|||
The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API. |
|||
|
|||
| **Option** | **Type** | **Default value** | **Description** | |
|||
| --- | --- | --- | --- | |
|||
| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | |
|||
| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | |
|||
| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | |
|||
| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | |
|||
| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | |
|||
| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | |
|||
| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true | |
|||
| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. | |
|||
| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. | |
|||
| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | |
|||
| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | |
|||
| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | |
|||
| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | |
|||
| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | |
|||
| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | |
|||
| `matchBase` | `boolean` | `false` | Alias for `basename` | |
|||
| `maxLength` | `number` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | |
|||
| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | |
|||
| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | |
|||
| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. | |
|||
| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | |
|||
| `noext` | `boolean` | `false` | Alias for `noextglob` | |
|||
| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) | |
|||
| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | |
|||
| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | |
|||
| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | |
|||
| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | |
|||
| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | |
|||
| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | |
|||
| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). | |
|||
| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | |
|||
| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. | |
|||
| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | |
|||
| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | |
|||
| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | |
|||
| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. | |
|||
| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. | |
|||
| `windows` | `boolean` | `false` | Also accept backslashes as the path separator. | |
|||
|
|||
### Scan Options |
|||
|
|||
In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method. |
|||
|
|||
| **Option** | **Type** | **Default value** | **Description** | |
|||
| --- | --- | --- | --- | |
|||
| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern | |
|||
| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true | |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
const picomatch = require('picomatch'); |
|||
const result = picomatch.scan('!./foo/*.js', { tokens: true }); |
|||
console.log(result); |
|||
// { |
|||
// prefix: '!./', |
|||
// input: '!./foo/*.js', |
|||
// start: 3, |
|||
// base: 'foo', |
|||
// glob: '*.js', |
|||
// isBrace: false, |
|||
// isBracket: false, |
|||
// isGlob: true, |
|||
// isExtglob: false, |
|||
// isGlobstar: false, |
|||
// negated: true, |
|||
// maxDepth: 2, |
|||
// tokens: [ |
|||
// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true }, |
|||
// { value: 'foo', depth: 1, isGlob: false }, |
|||
// { value: '*.js', depth: 1, isGlob: true } |
|||
// ], |
|||
// slashes: [ 2, 6 ], |
|||
// parts: [ 'foo', '*.js' ] |
|||
// } |
|||
``` |
|||
|
|||
<br> |
|||
|
|||
### Options Examples |
|||
|
|||
#### options.expandRange |
|||
|
|||
**Type**: `function` |
|||
|
|||
**Default**: `undefined` |
|||
|
|||
Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. |
|||
|
|||
**Example** |
|||
|
|||
The following example shows how to create a glob that matches a folder |
|||
|
|||
```js |
|||
const fill = require('fill-range'); |
|||
const regex = pm.makeRe('foo/{01..25}/bar', { |
|||
expandRange(a, b) { |
|||
return `(${fill(a, b, { toRegex: true })})`; |
|||
} |
|||
}); |
|||
|
|||
console.log(regex); |
|||
//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ |
|||
|
|||
console.log(regex.test('foo/00/bar')) // false |
|||
console.log(regex.test('foo/01/bar')) // true |
|||
console.log(regex.test('foo/10/bar')) // true |
|||
console.log(regex.test('foo/22/bar')) // true |
|||
console.log(regex.test('foo/25/bar')) // true |
|||
console.log(regex.test('foo/26/bar')) // false |
|||
``` |
|||
|
|||
#### options.format |
|||
|
|||
**Type**: `function` |
|||
|
|||
**Default**: `undefined` |
|||
|
|||
Custom function for formatting strings before they're matched. |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
// strip leading './' from strings |
|||
const format = str => str.replace(/^\.\//, ''); |
|||
const isMatch = picomatch('foo/*.js', { format }); |
|||
console.log(isMatch('./foo/bar.js')); //=> true |
|||
``` |
|||
|
|||
#### options.onMatch |
|||
|
|||
```js |
|||
const onMatch = ({ glob, regex, input, output }) => { |
|||
console.log({ glob, regex, input, output }); |
|||
}; |
|||
|
|||
const isMatch = picomatch('*', { onMatch }); |
|||
isMatch('foo'); |
|||
isMatch('bar'); |
|||
isMatch('baz'); |
|||
``` |
|||
|
|||
#### options.onIgnore |
|||
|
|||
```js |
|||
const onIgnore = ({ glob, regex, input, output }) => { |
|||
console.log({ glob, regex, input, output }); |
|||
}; |
|||
|
|||
const isMatch = picomatch('*', { onIgnore, ignore: 'f*' }); |
|||
isMatch('foo'); |
|||
isMatch('bar'); |
|||
isMatch('baz'); |
|||
``` |
|||
|
|||
#### options.onResult |
|||
|
|||
```js |
|||
const onResult = ({ glob, regex, input, output }) => { |
|||
console.log({ glob, regex, input, output }); |
|||
}; |
|||
|
|||
const isMatch = picomatch('*', { onResult, ignore: 'f*' }); |
|||
isMatch('foo'); |
|||
isMatch('bar'); |
|||
isMatch('baz'); |
|||
``` |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
## Globbing features |
|||
|
|||
* [Basic globbing](#basic-globbing) (Wildcard matching) |
|||
* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching) |
|||
|
|||
### Basic globbing |
|||
|
|||
| **Character** | **Description** | |
|||
| --- | --- | |
|||
| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. | |
|||
| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` with the `windows` option) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. | |
|||
| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. | |
|||
| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. | |
|||
|
|||
#### Matching behavior vs. Bash |
|||
|
|||
Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions: |
|||
|
|||
* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`. |
|||
* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`. |
|||
|
|||
<br> |
|||
|
|||
### Advanced globbing |
|||
|
|||
* [extglobs](#extglobs) |
|||
* [POSIX brackets](#posix-brackets) |
|||
* [Braces](#brace-expansion) |
|||
|
|||
#### Extglobs |
|||
|
|||
| **Pattern** | **Description** | |
|||
| --- | --- | |
|||
| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` | |
|||
| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` | |
|||
| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` | |
|||
| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` | |
|||
| `!(pattern)` | Match _anything but_ `pattern` | |
|||
|
|||
**Examples** |
|||
|
|||
```js |
|||
const pm = require('picomatch'); |
|||
|
|||
// *(pattern) matches ZERO or more of "pattern" |
|||
console.log(pm.isMatch('a', 'a*(z)')); // true |
|||
console.log(pm.isMatch('az', 'a*(z)')); // true |
|||
console.log(pm.isMatch('azzz', 'a*(z)')); // true |
|||
|
|||
// +(pattern) matches ONE or more of "pattern" |
|||
console.log(pm.isMatch('a', 'a+(z)')); // false |
|||
console.log(pm.isMatch('az', 'a+(z)')); // true |
|||
console.log(pm.isMatch('azzz', 'a+(z)')); // true |
|||
|
|||
// supports multiple extglobs |
|||
console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false |
|||
|
|||
// supports nested extglobs |
|||
console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true |
|||
``` |
|||
|
|||
#### POSIX brackets |
|||
|
|||
POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true. |
|||
|
|||
**Enable POSIX bracket support** |
|||
|
|||
```js |
|||
console.log(pm.makeRe('[[:word:]]+', { posix: true })); |
|||
//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/ |
|||
``` |
|||
|
|||
**Supported POSIX classes** |
|||
|
|||
The following named POSIX bracket expressions are supported: |
|||
|
|||
* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]` |
|||
* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`. |
|||
* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`. |
|||
* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`. |
|||
* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`. |
|||
* `[:digit:]` - Numerical digits, equivalent to `[0-9]`. |
|||
* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`. |
|||
* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`. |
|||
* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`. |
|||
* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`. |
|||
* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`. |
|||
* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`. |
|||
* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`. |
|||
* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`. |
|||
|
|||
See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information. |
|||
|
|||
### Braces |
|||
|
|||
Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces. |
|||
|
|||
### Matching special characters as literals |
|||
|
|||
If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes: |
|||
|
|||
**Special Characters** |
|||
|
|||
Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. |
|||
|
|||
To match any of the following characters as literals: `$^*+?()[] |
|||
|
|||
Examples: |
|||
|
|||
```js |
|||
console.log(pm.makeRe('foo/bar \\(1\\)')); |
|||
console.log(pm.makeRe('foo/bar \\(1\\)')); |
|||
``` |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
## Library Comparisons |
|||
|
|||
The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets). |
|||
|
|||
| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` | |
|||
| --- | --- | --- | --- | --- | --- | --- | --- | |
|||
| Wildcard matching (`*?+`) | ✔ | ✔ | ✔ | ✔ | - | - | - | |
|||
| Advancing globbing | ✔ | ✔ | ✔ | - | - | - | - | |
|||
| Brace _matching_ | ✔ | ✔ | ✔ | - | - | ✔ | - | |
|||
| Brace _expansion_ | ✔ | ✔ | - | - | - | ✔ | - | |
|||
| Extglobs | partial | ✔ | ✔ | - | ✔ | - | - | |
|||
| Posix brackets | - | ✔ | ✔ | - | - | - | ✔ | |
|||
| Regular expression syntax | - | ✔ | ✔ | ✔ | ✔ | - | ✔ | |
|||
| File system operations | - | - | - | - | - | - | - | |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
## Benchmarks |
|||
|
|||
Performance comparison of picomatch and minimatch. |
|||
|
|||
_(Pay special attention to the last three benchmarks. Minimatch freezes on long ranges.)_ |
|||
|
|||
``` |
|||
# .makeRe star (*) |
|||
picomatch x 4,449,159 ops/sec ±0.24% (97 runs sampled) |
|||
minimatch x 632,772 ops/sec ±0.14% (98 runs sampled) |
|||
|
|||
# .makeRe star; dot=true (*) |
|||
picomatch x 3,500,079 ops/sec ±0.26% (99 runs sampled) |
|||
minimatch x 564,916 ops/sec ±0.23% (96 runs sampled) |
|||
|
|||
# .makeRe globstar (**) |
|||
picomatch x 3,261,000 ops/sec ±0.27% (98 runs sampled) |
|||
minimatch x 1,664,766 ops/sec ±0.20% (100 runs sampled) |
|||
|
|||
# .makeRe globstars (**/**/**) |
|||
picomatch x 3,284,469 ops/sec ±0.18% (97 runs sampled) |
|||
minimatch x 1,435,880 ops/sec ±0.34% (95 runs sampled) |
|||
|
|||
# .makeRe with leading star (*.txt) |
|||
picomatch x 3,100,197 ops/sec ±0.35% (99 runs sampled) |
|||
minimatch x 428,347 ops/sec ±0.42% (94 runs sampled) |
|||
|
|||
# .makeRe - basic braces ({a,b,c}*.txt) |
|||
picomatch x 443,578 ops/sec ±1.33% (89 runs sampled) |
|||
minimatch x 107,143 ops/sec ±0.35% (94 runs sampled) |
|||
|
|||
# .makeRe - short ranges ({a..z}*.txt) |
|||
picomatch x 415,484 ops/sec ±0.76% (96 runs sampled) |
|||
minimatch x 14,299 ops/sec ±0.26% (96 runs sampled) |
|||
|
|||
# .makeRe - medium ranges ({1..100000}*.txt) |
|||
picomatch x 395,020 ops/sec ±0.87% (89 runs sampled) |
|||
minimatch x 2 ops/sec ±4.59% (10 runs sampled) |
|||
|
|||
# .makeRe - long ranges ({1..10000000}*.txt) |
|||
picomatch x 400,036 ops/sec ±0.83% (90 runs sampled) |
|||
minimatch (FATAL ERROR: Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory) |
|||
``` |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
## Philosophies |
|||
|
|||
The goal of this library is to be blazing fast, without compromising on accuracy. |
|||
|
|||
**Accuracy** |
|||
|
|||
The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`. |
|||
|
|||
Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements. |
|||
|
|||
**Performance** |
|||
|
|||
Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer. |
|||
|
|||
<br> |
|||
<br> |
|||
|
|||
## About |
|||
|
|||
<details> |
|||
<summary><strong>Contributing</strong></summary> |
|||
|
|||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). |
|||
|
|||
Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. |
|||
|
|||
</details> |
|||
|
|||
<details> |
|||
<summary><strong>Running Tests</strong></summary> |
|||
|
|||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: |
|||
|
|||
```sh |
|||
npm install && npm test |
|||
``` |
|||
|
|||
</details> |
|||
|
|||
<details> |
|||
<summary><strong>Building docs</strong></summary> |
|||
|
|||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ |
|||
|
|||
To generate the readme, run the following command: |
|||
|
|||
```sh |
|||
npm install -g verbose/verb#dev verb-generate-readme && verb |
|||
``` |
|||
|
|||
</details> |
|||
|
|||
### Author |
|||
|
|||
**Jon Schlinkert** |
|||
|
|||
* [GitHub Profile](https://github.com/jonschlinkert) |
|||
* [Twitter Profile](https://twitter.com/jonschlinkert) |
|||
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) |
|||
|
|||
### License |
|||
|
|||
Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). |
|||
Released under the [MIT License](LICENSE). |
|||
@ -0,0 +1,17 @@ |
|||
'use strict'; |
|||
|
|||
const pico = require('./lib/picomatch'); |
|||
const utils = require('./lib/utils'); |
|||
|
|||
function picomatch(glob, options, returnState = false) { |
|||
// default to os.platform()
|
|||
if (options && (options.windows === null || options.windows === undefined)) { |
|||
// don't mutate the original options object
|
|||
options = { ...options, windows: utils.isWindows() }; |
|||
} |
|||
|
|||
return pico(glob, options, returnState); |
|||
} |
|||
|
|||
Object.assign(picomatch, pico); |
|||
module.exports = picomatch; |
|||
@ -0,0 +1,180 @@ |
|||
'use strict'; |
|||
|
|||
const WIN_SLASH = '\\\\/'; |
|||
const WIN_NO_SLASH = `[^${WIN_SLASH}]`; |
|||
|
|||
/** |
|||
* Posix glob regex |
|||
*/ |
|||
|
|||
const DOT_LITERAL = '\\.'; |
|||
const PLUS_LITERAL = '\\+'; |
|||
const QMARK_LITERAL = '\\?'; |
|||
const SLASH_LITERAL = '\\/'; |
|||
const ONE_CHAR = '(?=.)'; |
|||
const QMARK = '[^/]'; |
|||
const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; |
|||
const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; |
|||
const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; |
|||
const NO_DOT = `(?!${DOT_LITERAL})`; |
|||
const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; |
|||
const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; |
|||
const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; |
|||
const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; |
|||
const STAR = `${QMARK}*?`; |
|||
const SEP = '/'; |
|||
|
|||
const POSIX_CHARS = { |
|||
DOT_LITERAL, |
|||
PLUS_LITERAL, |
|||
QMARK_LITERAL, |
|||
SLASH_LITERAL, |
|||
ONE_CHAR, |
|||
QMARK, |
|||
END_ANCHOR, |
|||
DOTS_SLASH, |
|||
NO_DOT, |
|||
NO_DOTS, |
|||
NO_DOT_SLASH, |
|||
NO_DOTS_SLASH, |
|||
QMARK_NO_DOT, |
|||
STAR, |
|||
START_ANCHOR, |
|||
SEP |
|||
}; |
|||
|
|||
/** |
|||
* Windows glob regex |
|||
*/ |
|||
|
|||
const WINDOWS_CHARS = { |
|||
...POSIX_CHARS, |
|||
|
|||
SLASH_LITERAL: `[${WIN_SLASH}]`, |
|||
QMARK: WIN_NO_SLASH, |
|||
STAR: `${WIN_NO_SLASH}*?`, |
|||
DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, |
|||
NO_DOT: `(?!${DOT_LITERAL})`, |
|||
NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, |
|||
NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, |
|||
NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, |
|||
QMARK_NO_DOT: `[^.${WIN_SLASH}]`, |
|||
START_ANCHOR: `(?:^|[${WIN_SLASH}])`, |
|||
END_ANCHOR: `(?:[${WIN_SLASH}]|$)`, |
|||
SEP: '\\' |
|||
}; |
|||
|
|||
/** |
|||
* POSIX Bracket Regex |
|||
*/ |
|||
|
|||
const POSIX_REGEX_SOURCE = { |
|||
alnum: 'a-zA-Z0-9', |
|||
alpha: 'a-zA-Z', |
|||
ascii: '\\x00-\\x7F', |
|||
blank: ' \\t', |
|||
cntrl: '\\x00-\\x1F\\x7F', |
|||
digit: '0-9', |
|||
graph: '\\x21-\\x7E', |
|||
lower: 'a-z', |
|||
print: '\\x20-\\x7E ', |
|||
punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', |
|||
space: ' \\t\\r\\n\\v\\f', |
|||
upper: 'A-Z', |
|||
word: 'A-Za-z0-9_', |
|||
xdigit: 'A-Fa-f0-9' |
|||
}; |
|||
|
|||
module.exports = { |
|||
MAX_LENGTH: 1024 * 64, |
|||
POSIX_REGEX_SOURCE, |
|||
|
|||
// regular expressions
|
|||
REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, |
|||
REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, |
|||
REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, |
|||
REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, |
|||
REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, |
|||
REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, |
|||
|
|||
// Replace globs with equivalent patterns to reduce parsing time.
|
|||
REPLACEMENTS: { |
|||
__proto__: null, |
|||
'***': '*', |
|||
'**/**': '**', |
|||
'**/**/**': '**' |
|||
}, |
|||
|
|||
// Digits
|
|||
CHAR_0: 48, /* 0 */ |
|||
CHAR_9: 57, /* 9 */ |
|||
|
|||
// Alphabet chars.
|
|||
CHAR_UPPERCASE_A: 65, /* A */ |
|||
CHAR_LOWERCASE_A: 97, /* a */ |
|||
CHAR_UPPERCASE_Z: 90, /* Z */ |
|||
CHAR_LOWERCASE_Z: 122, /* z */ |
|||
|
|||
CHAR_LEFT_PARENTHESES: 40, /* ( */ |
|||
CHAR_RIGHT_PARENTHESES: 41, /* ) */ |
|||
|
|||
CHAR_ASTERISK: 42, /* * */ |
|||
|
|||
// Non-alphabetic chars.
|
|||
CHAR_AMPERSAND: 38, /* & */ |
|||
CHAR_AT: 64, /* @ */ |
|||
CHAR_BACKWARD_SLASH: 92, /* \ */ |
|||
CHAR_CARRIAGE_RETURN: 13, /* \r */ |
|||
CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ |
|||
CHAR_COLON: 58, /* : */ |
|||
CHAR_COMMA: 44, /* , */ |
|||
CHAR_DOT: 46, /* . */ |
|||
CHAR_DOUBLE_QUOTE: 34, /* " */ |
|||
CHAR_EQUAL: 61, /* = */ |
|||
CHAR_EXCLAMATION_MARK: 33, /* ! */ |
|||
CHAR_FORM_FEED: 12, /* \f */ |
|||
CHAR_FORWARD_SLASH: 47, /* / */ |
|||
CHAR_GRAVE_ACCENT: 96, /* ` */ |
|||
CHAR_HASH: 35, /* # */ |
|||
CHAR_HYPHEN_MINUS: 45, /* - */ |
|||
CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ |
|||
CHAR_LEFT_CURLY_BRACE: 123, /* { */ |
|||
CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ |
|||
CHAR_LINE_FEED: 10, /* \n */ |
|||
CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ |
|||
CHAR_PERCENT: 37, /* % */ |
|||
CHAR_PLUS: 43, /* + */ |
|||
CHAR_QUESTION_MARK: 63, /* ? */ |
|||
CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ |
|||
CHAR_RIGHT_CURLY_BRACE: 125, /* } */ |
|||
CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ |
|||
CHAR_SEMICOLON: 59, /* ; */ |
|||
CHAR_SINGLE_QUOTE: 39, /* ' */ |
|||
CHAR_SPACE: 32, /* */ |
|||
CHAR_TAB: 9, /* \t */ |
|||
CHAR_UNDERSCORE: 95, /* _ */ |
|||
CHAR_VERTICAL_LINE: 124, /* | */ |
|||
CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ |
|||
|
|||
/** |
|||
* Create EXTGLOB_CHARS |
|||
*/ |
|||
|
|||
extglobChars(chars) { |
|||
return { |
|||
'!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, |
|||
'?': { type: 'qmark', open: '(?:', close: ')?' }, |
|||
'+': { type: 'plus', open: '(?:', close: ')+' }, |
|||
'*': { type: 'star', open: '(?:', close: ')*' }, |
|||
'@': { type: 'at', open: '(?:', close: ')' } |
|||
}; |
|||
}, |
|||
|
|||
/** |
|||
* Create GLOB_CHARS |
|||
*/ |
|||
|
|||
globChars(win32) { |
|||
return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; |
|||
} |
|||
}; |
|||
1085
node_modules/@parcel/watcher/node_modules/picomatch/lib/parse.js
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,341 @@ |
|||
'use strict'; |
|||
|
|||
const scan = require('./scan'); |
|||
const parse = require('./parse'); |
|||
const utils = require('./utils'); |
|||
const constants = require('./constants'); |
|||
const isObject = val => val && typeof val === 'object' && !Array.isArray(val); |
|||
|
|||
/** |
|||
* Creates a matcher function from one or more glob patterns. The |
|||
* returned function takes a string to match as its first argument, |
|||
* and returns true if the string is a match. The returned matcher |
|||
* function also takes a boolean as the second argument that, when true, |
|||
* returns an object with additional information. |
|||
* |
|||
* ```js
|
|||
* const picomatch = require('picomatch'); |
|||
* // picomatch(glob[, options]);
|
|||
* |
|||
* const isMatch = picomatch('*.!(*a)'); |
|||
* console.log(isMatch('a.a')); //=> false
|
|||
* console.log(isMatch('a.b')); //=> true
|
|||
* ```
|
|||
* @name picomatch |
|||
* @param {String|Array} `globs` One or more glob patterns. |
|||
* @param {Object=} `options` |
|||
* @return {Function=} Returns a matcher function. |
|||
* @api public |
|||
*/ |
|||
|
|||
const picomatch = (glob, options, returnState = false) => { |
|||
if (Array.isArray(glob)) { |
|||
const fns = glob.map(input => picomatch(input, options, returnState)); |
|||
const arrayMatcher = str => { |
|||
for (const isMatch of fns) { |
|||
const state = isMatch(str); |
|||
if (state) return state; |
|||
} |
|||
return false; |
|||
}; |
|||
return arrayMatcher; |
|||
} |
|||
|
|||
const isState = isObject(glob) && glob.tokens && glob.input; |
|||
|
|||
if (glob === '' || (typeof glob !== 'string' && !isState)) { |
|||
throw new TypeError('Expected pattern to be a non-empty string'); |
|||
} |
|||
|
|||
const opts = options || {}; |
|||
const posix = opts.windows; |
|||
const regex = isState |
|||
? picomatch.compileRe(glob, options) |
|||
: picomatch.makeRe(glob, options, false, true); |
|||
|
|||
const state = regex.state; |
|||
delete regex.state; |
|||
|
|||
let isIgnored = () => false; |
|||
if (opts.ignore) { |
|||
const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; |
|||
isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); |
|||
} |
|||
|
|||
const matcher = (input, returnObject = false) => { |
|||
const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); |
|||
const result = { glob, state, regex, posix, input, output, match, isMatch }; |
|||
|
|||
if (typeof opts.onResult === 'function') { |
|||
opts.onResult(result); |
|||
} |
|||
|
|||
if (isMatch === false) { |
|||
result.isMatch = false; |
|||
return returnObject ? result : false; |
|||
} |
|||
|
|||
if (isIgnored(input)) { |
|||
if (typeof opts.onIgnore === 'function') { |
|||
opts.onIgnore(result); |
|||
} |
|||
result.isMatch = false; |
|||
return returnObject ? result : false; |
|||
} |
|||
|
|||
if (typeof opts.onMatch === 'function') { |
|||
opts.onMatch(result); |
|||
} |
|||
return returnObject ? result : true; |
|||
}; |
|||
|
|||
if (returnState) { |
|||
matcher.state = state; |
|||
} |
|||
|
|||
return matcher; |
|||
}; |
|||
|
|||
/** |
|||
* Test `input` with the given `regex`. This is used by the main |
|||
* `picomatch()` function to test the input string. |
|||
* |
|||
* ```js
|
|||
* const picomatch = require('picomatch'); |
|||
* // picomatch.test(input, regex[, options]);
|
|||
* |
|||
* console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); |
|||
* // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
|
|||
* ```
|
|||
* @param {String} `input` String to test. |
|||
* @param {RegExp} `regex` |
|||
* @return {Object} Returns an object with matching info. |
|||
* @api public |
|||
*/ |
|||
|
|||
picomatch.test = (input, regex, options, { glob, posix } = {}) => { |
|||
if (typeof input !== 'string') { |
|||
throw new TypeError('Expected input to be a string'); |
|||
} |
|||
|
|||
if (input === '') { |
|||
return { isMatch: false, output: '' }; |
|||
} |
|||
|
|||
const opts = options || {}; |
|||
const format = opts.format || (posix ? utils.toPosixSlashes : null); |
|||
let match = input === glob; |
|||
let output = (match && format) ? format(input) : input; |
|||
|
|||
if (match === false) { |
|||
output = format ? format(input) : input; |
|||
match = output === glob; |
|||
} |
|||
|
|||
if (match === false || opts.capture === true) { |
|||
if (opts.matchBase === true || opts.basename === true) { |
|||
match = picomatch.matchBase(input, regex, options, posix); |
|||
} else { |
|||
match = regex.exec(output); |
|||
} |
|||
} |
|||
|
|||
return { isMatch: Boolean(match), match, output }; |
|||
}; |
|||
|
|||
/** |
|||
* Match the basename of a filepath. |
|||
* |
|||
* ```js
|
|||
* const picomatch = require('picomatch'); |
|||
* // picomatch.matchBase(input, glob[, options]);
|
|||
* console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
|
|||
* ```
|
|||
* @param {String} `input` String to test. |
|||
* @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). |
|||
* @return {Boolean} |
|||
* @api public |
|||
*/ |
|||
|
|||
picomatch.matchBase = (input, glob, options) => { |
|||
const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); |
|||
return regex.test(utils.basename(input)); |
|||
}; |
|||
|
|||
/** |
|||
* Returns true if **any** of the given glob `patterns` match the specified `string`. |
|||
* |
|||
* ```js
|
|||
* const picomatch = require('picomatch'); |
|||
* // picomatch.isMatch(string, patterns[, options]);
|
|||
* |
|||
* console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
|
|||
* console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
|
|||
* ```
|
|||
* @param {String|Array} str The string to test. |
|||
* @param {String|Array} patterns One or more glob patterns to use for matching. |
|||
* @param {Object} [options] See available [options](#options). |
|||
* @return {Boolean} Returns true if any patterns match `str` |
|||
* @api public |
|||
*/ |
|||
|
|||
picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); |
|||
|
|||
/** |
|||
* Parse a glob pattern to create the source string for a regular |
|||
* expression. |
|||
* |
|||
* ```js
|
|||
* const picomatch = require('picomatch'); |
|||
* const result = picomatch.parse(pattern[, options]); |
|||
* ```
|
|||
* @param {String} `pattern` |
|||
* @param {Object} `options` |
|||
* @return {Object} Returns an object with useful properties and output to be used as a regex source string. |
|||
* @api public |
|||
*/ |
|||
|
|||
picomatch.parse = (pattern, options) => { |
|||
if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); |
|||
return parse(pattern, { ...options, fastpaths: false }); |
|||
}; |
|||
|
|||
/** |
|||
* Scan a glob pattern to separate the pattern into segments. |
|||
* |
|||
* ```js
|
|||
* const picomatch = require('picomatch'); |
|||
* // picomatch.scan(input[, options]);
|
|||
* |
|||
* const result = picomatch.scan('!./foo/*.js'); |
|||
* console.log(result); |
|||
* { prefix: '!./', |
|||
* input: '!./foo/*.js', |
|||
* start: 3, |
|||
* base: 'foo', |
|||
* glob: '*.js', |
|||
* isBrace: false, |
|||
* isBracket: false, |
|||
* isGlob: true, |
|||
* isExtglob: false, |
|||
* isGlobstar: false, |
|||
* negated: true } |
|||
* ```
|
|||
* @param {String} `input` Glob pattern to scan. |
|||
* @param {Object} `options` |
|||
* @return {Object} Returns an object with |
|||
* @api public |
|||
*/ |
|||
|
|||
picomatch.scan = (input, options) => scan(input, options); |
|||
|
|||
/** |
|||
* Compile a regular expression from the `state` object returned by the |
|||
* [parse()](#parse) method. |
|||
* |
|||
* @param {Object} `state` |
|||
* @param {Object} `options` |
|||
* @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. |
|||
* @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. |
|||
* @return {RegExp} |
|||
* @api public |
|||
*/ |
|||
|
|||
picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { |
|||
if (returnOutput === true) { |
|||
return state.output; |
|||
} |
|||
|
|||
const opts = options || {}; |
|||
const prepend = opts.contains ? '' : '^'; |
|||
const append = opts.contains ? '' : '$'; |
|||
|
|||
let source = `${prepend}(?:${state.output})${append}`; |
|||
if (state && state.negated === true) { |
|||
source = `^(?!${source}).*$`; |
|||
} |
|||
|
|||
const regex = picomatch.toRegex(source, options); |
|||
if (returnState === true) { |
|||
regex.state = state; |
|||
} |
|||
|
|||
return regex; |
|||
}; |
|||
|
|||
/** |
|||
* Create a regular expression from a parsed glob pattern. |
|||
* |
|||
* ```js
|
|||
* const picomatch = require('picomatch'); |
|||
* const state = picomatch.parse('*.js'); |
|||
* // picomatch.compileRe(state[, options]);
|
|||
* |
|||
* console.log(picomatch.compileRe(state)); |
|||
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
|
|||
* ```
|
|||
* @param {String} `state` The object returned from the `.parse` method. |
|||
* @param {Object} `options` |
|||
* @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. |
|||
* @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. |
|||
* @return {RegExp} Returns a regex created from the given pattern. |
|||
* @api public |
|||
*/ |
|||
|
|||
picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { |
|||
if (!input || typeof input !== 'string') { |
|||
throw new TypeError('Expected a non-empty string'); |
|||
} |
|||
|
|||
let parsed = { negated: false, fastpaths: true }; |
|||
|
|||
if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { |
|||
parsed.output = parse.fastpaths(input, options); |
|||
} |
|||
|
|||
if (!parsed.output) { |
|||
parsed = parse(input, options); |
|||
} |
|||
|
|||
return picomatch.compileRe(parsed, options, returnOutput, returnState); |
|||
}; |
|||
|
|||
/** |
|||
* Create a regular expression from the given regex source string. |
|||
* |
|||
* ```js
|
|||
* const picomatch = require('picomatch'); |
|||
* // picomatch.toRegex(source[, options]);
|
|||
* |
|||
* const { output } = picomatch.parse('*.js'); |
|||
* console.log(picomatch.toRegex(output)); |
|||
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
|
|||
* ```
|
|||
* @param {String} `source` Regular expression source string. |
|||
* @param {Object} `options` |
|||
* @return {RegExp} |
|||
* @api public |
|||
*/ |
|||
|
|||
picomatch.toRegex = (source, options) => { |
|||
try { |
|||
const opts = options || {}; |
|||
return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); |
|||
} catch (err) { |
|||
if (options && options.debug === true) throw err; |
|||
return /$^/; |
|||
} |
|||
}; |
|||
|
|||
/** |
|||
* Picomatch constants. |
|||
* @return {Object} |
|||
*/ |
|||
|
|||
picomatch.constants = constants; |
|||
|
|||
/** |
|||
* Expose "picomatch" |
|||
*/ |
|||
|
|||
module.exports = picomatch; |
|||
@ -0,0 +1,391 @@ |
|||
'use strict'; |
|||
|
|||
const utils = require('./utils'); |
|||
const { |
|||
CHAR_ASTERISK, /* * */ |
|||
CHAR_AT, /* @ */ |
|||
CHAR_BACKWARD_SLASH, /* \ */ |
|||
CHAR_COMMA, /* , */ |
|||
CHAR_DOT, /* . */ |
|||
CHAR_EXCLAMATION_MARK, /* ! */ |
|||
CHAR_FORWARD_SLASH, /* / */ |
|||
CHAR_LEFT_CURLY_BRACE, /* { */ |
|||
CHAR_LEFT_PARENTHESES, /* ( */ |
|||
CHAR_LEFT_SQUARE_BRACKET, /* [ */ |
|||
CHAR_PLUS, /* + */ |
|||
CHAR_QUESTION_MARK, /* ? */ |
|||
CHAR_RIGHT_CURLY_BRACE, /* } */ |
|||
CHAR_RIGHT_PARENTHESES, /* ) */ |
|||
CHAR_RIGHT_SQUARE_BRACKET /* ] */ |
|||
} = require('./constants'); |
|||
|
|||
const isPathSeparator = code => { |
|||
return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; |
|||
}; |
|||
|
|||
const depth = token => { |
|||
if (token.isPrefix !== true) { |
|||
token.depth = token.isGlobstar ? Infinity : 1; |
|||
} |
|||
}; |
|||
|
|||
/** |
|||
* Quickly scans a glob pattern and returns an object with a handful of |
|||
* useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), |
|||
* `glob` (the actual pattern), `negated` (true if the path starts with `!` but not |
|||
* with `!(`) and `negatedExtglob` (true if the path starts with `!(`). |
|||
* |
|||
* ```js
|
|||
* const pm = require('picomatch'); |
|||
* console.log(pm.scan('foo/bar/*.js')); |
|||
* { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } |
|||
* ```
|
|||
* @param {String} `str` |
|||
* @param {Object} `options` |
|||
* @return {Object} Returns an object with tokens and regex source string. |
|||
* @api public |
|||
*/ |
|||
|
|||
const scan = (input, options) => { |
|||
const opts = options || {}; |
|||
|
|||
const length = input.length - 1; |
|||
const scanToEnd = opts.parts === true || opts.scanToEnd === true; |
|||
const slashes = []; |
|||
const tokens = []; |
|||
const parts = []; |
|||
|
|||
let str = input; |
|||
let index = -1; |
|||
let start = 0; |
|||
let lastIndex = 0; |
|||
let isBrace = false; |
|||
let isBracket = false; |
|||
let isGlob = false; |
|||
let isExtglob = false; |
|||
let isGlobstar = false; |
|||
let braceEscaped = false; |
|||
let backslashes = false; |
|||
let negated = false; |
|||
let negatedExtglob = false; |
|||
let finished = false; |
|||
let braces = 0; |
|||
let prev; |
|||
let code; |
|||
let token = { value: '', depth: 0, isGlob: false }; |
|||
|
|||
const eos = () => index >= length; |
|||
const peek = () => str.charCodeAt(index + 1); |
|||
const advance = () => { |
|||
prev = code; |
|||
return str.charCodeAt(++index); |
|||
}; |
|||
|
|||
while (index < length) { |
|||
code = advance(); |
|||
let next; |
|||
|
|||
if (code === CHAR_BACKWARD_SLASH) { |
|||
backslashes = token.backslashes = true; |
|||
code = advance(); |
|||
|
|||
if (code === CHAR_LEFT_CURLY_BRACE) { |
|||
braceEscaped = true; |
|||
} |
|||
continue; |
|||
} |
|||
|
|||
if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { |
|||
braces++; |
|||
|
|||
while (eos() !== true && (code = advance())) { |
|||
if (code === CHAR_BACKWARD_SLASH) { |
|||
backslashes = token.backslashes = true; |
|||
advance(); |
|||
continue; |
|||
} |
|||
|
|||
if (code === CHAR_LEFT_CURLY_BRACE) { |
|||
braces++; |
|||
continue; |
|||
} |
|||
|
|||
if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { |
|||
isBrace = token.isBrace = true; |
|||
isGlob = token.isGlob = true; |
|||
finished = true; |
|||
|
|||
if (scanToEnd === true) { |
|||
continue; |
|||
} |
|||
|
|||
break; |
|||
} |
|||
|
|||
if (braceEscaped !== true && code === CHAR_COMMA) { |
|||
isBrace = token.isBrace = true; |
|||
isGlob = token.isGlob = true; |
|||
finished = true; |
|||
|
|||
if (scanToEnd === true) { |
|||
continue; |
|||
} |
|||
|
|||
break; |
|||
} |
|||
|
|||
if (code === CHAR_RIGHT_CURLY_BRACE) { |
|||
braces--; |
|||
|
|||
if (braces === 0) { |
|||
braceEscaped = false; |
|||
isBrace = token.isBrace = true; |
|||
finished = true; |
|||
break; |
|||
} |
|||
} |
|||
} |
|||
|
|||
if (scanToEnd === true) { |
|||
continue; |
|||
} |
|||
|
|||
break; |
|||
} |
|||
|
|||
if (code === CHAR_FORWARD_SLASH) { |
|||
slashes.push(index); |
|||
tokens.push(token); |
|||
token = { value: '', depth: 0, isGlob: false }; |
|||
|
|||
if (finished === true) continue; |
|||
if (prev === CHAR_DOT && index === (start + 1)) { |
|||
start += 2; |
|||
continue; |
|||
} |
|||
|
|||
lastIndex = index + 1; |
|||
continue; |
|||
} |
|||
|
|||
if (opts.noext !== true) { |
|||
const isExtglobChar = code === CHAR_PLUS |
|||
|| code === CHAR_AT |
|||
|| code === CHAR_ASTERISK |
|||
|| code === CHAR_QUESTION_MARK |
|||
|| code === CHAR_EXCLAMATION_MARK; |
|||
|
|||
if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { |
|||
isGlob = token.isGlob = true; |
|||
isExtglob = token.isExtglob = true; |
|||
finished = true; |
|||
if (code === CHAR_EXCLAMATION_MARK && index === start) { |
|||
negatedExtglob = true; |
|||
} |
|||
|
|||
if (scanToEnd === true) { |
|||
while (eos() !== true && (code = advance())) { |
|||
if (code === CHAR_BACKWARD_SLASH) { |
|||
backslashes = token.backslashes = true; |
|||
code = advance(); |
|||
continue; |
|||
} |
|||
|
|||
if (code === CHAR_RIGHT_PARENTHESES) { |
|||
isGlob = token.isGlob = true; |
|||
finished = true; |
|||
break; |
|||
} |
|||
} |
|||
continue; |
|||
} |
|||
break; |
|||
} |
|||
} |
|||
|
|||
if (code === CHAR_ASTERISK) { |
|||
if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; |
|||
isGlob = token.isGlob = true; |
|||
finished = true; |
|||
|
|||
if (scanToEnd === true) { |
|||
continue; |
|||
} |
|||
break; |
|||
} |
|||
|
|||
if (code === CHAR_QUESTION_MARK) { |
|||
isGlob = token.isGlob = true; |
|||
finished = true; |
|||
|
|||
if (scanToEnd === true) { |
|||
continue; |
|||
} |
|||
break; |
|||
} |
|||
|
|||
if (code === CHAR_LEFT_SQUARE_BRACKET) { |
|||
while (eos() !== true && (next = advance())) { |
|||
if (next === CHAR_BACKWARD_SLASH) { |
|||
backslashes = token.backslashes = true; |
|||
advance(); |
|||
continue; |
|||
} |
|||
|
|||
if (next === CHAR_RIGHT_SQUARE_BRACKET) { |
|||
isBracket = token.isBracket = true; |
|||
isGlob = token.isGlob = true; |
|||
finished = true; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
if (scanToEnd === true) { |
|||
continue; |
|||
} |
|||
|
|||
break; |
|||
} |
|||
|
|||
if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { |
|||
negated = token.negated = true; |
|||
start++; |
|||
continue; |
|||
} |
|||
|
|||
if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { |
|||
isGlob = token.isGlob = true; |
|||
|
|||
if (scanToEnd === true) { |
|||
while (eos() !== true && (code = advance())) { |
|||
if (code === CHAR_LEFT_PARENTHESES) { |
|||
backslashes = token.backslashes = true; |
|||
code = advance(); |
|||
continue; |
|||
} |
|||
|
|||
if (code === CHAR_RIGHT_PARENTHESES) { |
|||
finished = true; |
|||
break; |
|||
} |
|||
} |
|||
continue; |
|||
} |
|||
break; |
|||
} |
|||
|
|||
if (isGlob === true) { |
|||
finished = true; |
|||
|
|||
if (scanToEnd === true) { |
|||
continue; |
|||
} |
|||
|
|||
break; |
|||
} |
|||
} |
|||
|
|||
if (opts.noext === true) { |
|||
isExtglob = false; |
|||
isGlob = false; |
|||
} |
|||
|
|||
let base = str; |
|||
let prefix = ''; |
|||
let glob = ''; |
|||
|
|||
if (start > 0) { |
|||
prefix = str.slice(0, start); |
|||
str = str.slice(start); |
|||
lastIndex -= start; |
|||
} |
|||
|
|||
if (base && isGlob === true && lastIndex > 0) { |
|||
base = str.slice(0, lastIndex); |
|||
glob = str.slice(lastIndex); |
|||
} else if (isGlob === true) { |
|||
base = ''; |
|||
glob = str; |
|||
} else { |
|||
base = str; |
|||
} |
|||
|
|||
if (base && base !== '' && base !== '/' && base !== str) { |
|||
if (isPathSeparator(base.charCodeAt(base.length - 1))) { |
|||
base = base.slice(0, -1); |
|||
} |
|||
} |
|||
|
|||
if (opts.unescape === true) { |
|||
if (glob) glob = utils.removeBackslashes(glob); |
|||
|
|||
if (base && backslashes === true) { |
|||
base = utils.removeBackslashes(base); |
|||
} |
|||
} |
|||
|
|||
const state = { |
|||
prefix, |
|||
input, |
|||
start, |
|||
base, |
|||
glob, |
|||
isBrace, |
|||
isBracket, |
|||
isGlob, |
|||
isExtglob, |
|||
isGlobstar, |
|||
negated, |
|||
negatedExtglob |
|||
}; |
|||
|
|||
if (opts.tokens === true) { |
|||
state.maxDepth = 0; |
|||
if (!isPathSeparator(code)) { |
|||
tokens.push(token); |
|||
} |
|||
state.tokens = tokens; |
|||
} |
|||
|
|||
if (opts.parts === true || opts.tokens === true) { |
|||
let prevIndex; |
|||
|
|||
for (let idx = 0; idx < slashes.length; idx++) { |
|||
const n = prevIndex ? prevIndex + 1 : start; |
|||
const i = slashes[idx]; |
|||
const value = input.slice(n, i); |
|||
if (opts.tokens) { |
|||
if (idx === 0 && start !== 0) { |
|||
tokens[idx].isPrefix = true; |
|||
tokens[idx].value = prefix; |
|||
} else { |
|||
tokens[idx].value = value; |
|||
} |
|||
depth(tokens[idx]); |
|||
state.maxDepth += tokens[idx].depth; |
|||
} |
|||
if (idx !== 0 || value !== '') { |
|||
parts.push(value); |
|||
} |
|||
prevIndex = i; |
|||
} |
|||
|
|||
if (prevIndex && prevIndex + 1 < input.length) { |
|||
const value = input.slice(prevIndex + 1); |
|||
parts.push(value); |
|||
|
|||
if (opts.tokens) { |
|||
tokens[tokens.length - 1].value = value; |
|||
depth(tokens[tokens.length - 1]); |
|||
state.maxDepth += tokens[tokens.length - 1].depth; |
|||
} |
|||
} |
|||
|
|||
state.slashes = slashes; |
|||
state.parts = parts; |
|||
} |
|||
|
|||
return state; |
|||
}; |
|||
|
|||
module.exports = scan; |
|||
@ -0,0 +1,72 @@ |
|||
/*global navigator*/ |
|||
'use strict'; |
|||
|
|||
const { |
|||
REGEX_BACKSLASH, |
|||
REGEX_REMOVE_BACKSLASH, |
|||
REGEX_SPECIAL_CHARS, |
|||
REGEX_SPECIAL_CHARS_GLOBAL |
|||
} = require('./constants'); |
|||
|
|||
exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); |
|||
exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); |
|||
exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); |
|||
exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); |
|||
exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); |
|||
|
|||
exports.isWindows = () => { |
|||
if (typeof navigator !== 'undefined' && navigator.platform) { |
|||
const platform = navigator.platform.toLowerCase(); |
|||
return platform === 'win32' || platform === 'windows'; |
|||
} |
|||
|
|||
if (typeof process !== 'undefined' && process.platform) { |
|||
return process.platform === 'win32'; |
|||
} |
|||
|
|||
return false; |
|||
}; |
|||
|
|||
exports.removeBackslashes = str => { |
|||
return str.replace(REGEX_REMOVE_BACKSLASH, match => { |
|||
return match === '\\' ? '' : match; |
|||
}); |
|||
}; |
|||
|
|||
exports.escapeLast = (input, char, lastIdx) => { |
|||
const idx = input.lastIndexOf(char, lastIdx); |
|||
if (idx === -1) return input; |
|||
if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); |
|||
return `${input.slice(0, idx)}\\${input.slice(idx)}`; |
|||
}; |
|||
|
|||
exports.removePrefix = (input, state = {}) => { |
|||
let output = input; |
|||
if (output.startsWith('./')) { |
|||
output = output.slice(2); |
|||
state.prefix = './'; |
|||
} |
|||
return output; |
|||
}; |
|||
|
|||
exports.wrapOutput = (input, state = {}, options = {}) => { |
|||
const prepend = options.contains ? '' : '^'; |
|||
const append = options.contains ? '' : '$'; |
|||
|
|||
let output = `${prepend}(?:${input})${append}`; |
|||
if (state.negated === true) { |
|||
output = `(?:^(?!${output}).*$)`; |
|||
} |
|||
return output; |
|||
}; |
|||
|
|||
exports.basename = (path, { windows } = {}) => { |
|||
const segs = path.split(windows ? /[\\/]/ : '/'); |
|||
const last = segs[segs.length - 1]; |
|||
|
|||
if (last === '') { |
|||
return segs[segs.length - 2]; |
|||
} |
|||
|
|||
return last; |
|||
}; |
|||
@ -0,0 +1,83 @@ |
|||
{ |
|||
"name": "picomatch", |
|||
"description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", |
|||
"version": "4.0.3", |
|||
"homepage": "https://github.com/micromatch/picomatch", |
|||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)", |
|||
"funding": "https://github.com/sponsors/jonschlinkert", |
|||
"repository": "micromatch/picomatch", |
|||
"bugs": { |
|||
"url": "https://github.com/micromatch/picomatch/issues" |
|||
}, |
|||
"license": "MIT", |
|||
"files": [ |
|||
"index.js", |
|||
"posix.js", |
|||
"lib" |
|||
], |
|||
"sideEffects": false, |
|||
"main": "index.js", |
|||
"engines": { |
|||
"node": ">=12" |
|||
}, |
|||
"scripts": { |
|||
"lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", |
|||
"mocha": "mocha --reporter dot", |
|||
"test": "npm run lint && npm run mocha", |
|||
"test:ci": "npm run test:cover", |
|||
"test:cover": "nyc npm run mocha" |
|||
}, |
|||
"devDependencies": { |
|||
"eslint": "^8.57.0", |
|||
"fill-range": "^7.0.1", |
|||
"gulp-format-md": "^2.0.0", |
|||
"mocha": "^10.4.0", |
|||
"nyc": "^15.1.0", |
|||
"time-require": "github:jonschlinkert/time-require" |
|||
}, |
|||
"keywords": [ |
|||
"glob", |
|||
"match", |
|||
"picomatch" |
|||
], |
|||
"nyc": { |
|||
"reporter": [ |
|||
"html", |
|||
"lcov", |
|||
"text-summary" |
|||
] |
|||
}, |
|||
"verb": { |
|||
"toc": { |
|||
"render": true, |
|||
"method": "preWrite", |
|||
"maxdepth": 3 |
|||
}, |
|||
"layout": "empty", |
|||
"tasks": [ |
|||
"readme" |
|||
], |
|||
"plugins": [ |
|||
"gulp-format-md" |
|||
], |
|||
"lint": { |
|||
"reflinks": true |
|||
}, |
|||
"related": { |
|||
"list": [ |
|||
"braces", |
|||
"micromatch" |
|||
] |
|||
}, |
|||
"reflinks": [ |
|||
"braces", |
|||
"expand-brackets", |
|||
"extglob", |
|||
"fill-range", |
|||
"micromatch", |
|||
"minimatch", |
|||
"nanomatch", |
|||
"picomatch" |
|||
] |
|||
} |
|||
} |
|||
@ -0,0 +1,3 @@ |
|||
'use strict'; |
|||
|
|||
module.exports = require('./lib/picomatch'); |
|||
@ -0,0 +1,88 @@ |
|||
{ |
|||
"name": "@parcel/watcher", |
|||
"version": "2.5.6", |
|||
"main": "index.js", |
|||
"types": "index.d.ts", |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "https://github.com/parcel-bundler/watcher.git" |
|||
}, |
|||
"description": "A native C++ Node module for querying and subscribing to filesystem events. Used by Parcel 2.", |
|||
"license": "MIT", |
|||
"publishConfig": { |
|||
"access": "public" |
|||
}, |
|||
"funding": { |
|||
"type": "opencollective", |
|||
"url": "https://opencollective.com/parcel" |
|||
}, |
|||
"files": [ |
|||
"index.js", |
|||
"index.js.flow", |
|||
"index.d.ts", |
|||
"wrapper.js", |
|||
"package.json", |
|||
"README.md", |
|||
"LICENSE", |
|||
"src", |
|||
"scripts/build-from-source.js", |
|||
"binding.gyp" |
|||
], |
|||
"scripts": { |
|||
"prebuild": "prebuildify --napi --strip --tag-libc", |
|||
"format": "prettier --write \"./**/*.{js,json,md}\"", |
|||
"build": "node-gyp rebuild", |
|||
"install": "node scripts/build-from-source.js", |
|||
"test": "mocha" |
|||
}, |
|||
"engines": { |
|||
"node": ">= 10.0.0" |
|||
}, |
|||
"husky": { |
|||
"hooks": { |
|||
"pre-commit": "lint-staged" |
|||
} |
|||
}, |
|||
"lint-staged": { |
|||
"*.{js,json,md}": [ |
|||
"prettier --write", |
|||
"git add" |
|||
] |
|||
}, |
|||
"dependencies": { |
|||
"detect-libc": "^2.0.3", |
|||
"is-glob": "^4.0.3", |
|||
"node-addon-api": "^7.0.0", |
|||
"picomatch": "^4.0.3" |
|||
}, |
|||
"devDependencies": { |
|||
"esbuild": "^0.19.8", |
|||
"fs-extra": "^10.0.0", |
|||
"husky": "^7.0.2", |
|||
"lint-staged": "^11.1.2", |
|||
"mocha": "^9.1.1", |
|||
"napi-wasm": "^1.1.0", |
|||
"prebuildify": "^6.0.1", |
|||
"prettier": "^2.3.2" |
|||
}, |
|||
"binary": { |
|||
"napi_versions": [ |
|||
3 |
|||
] |
|||
}, |
|||
"optionalDependencies": { |
|||
"@parcel/watcher-darwin-x64": "2.5.6", |
|||
"@parcel/watcher-darwin-arm64": "2.5.6", |
|||
"@parcel/watcher-win32-x64": "2.5.6", |
|||
"@parcel/watcher-win32-arm64": "2.5.6", |
|||
"@parcel/watcher-win32-ia32": "2.5.6", |
|||
"@parcel/watcher-linux-x64-glibc": "2.5.6", |
|||
"@parcel/watcher-linux-x64-musl": "2.5.6", |
|||
"@parcel/watcher-linux-arm64-glibc": "2.5.6", |
|||
"@parcel/watcher-linux-arm64-musl": "2.5.6", |
|||
"@parcel/watcher-linux-arm-glibc": "2.5.6", |
|||
"@parcel/watcher-linux-arm-musl": "2.5.6", |
|||
"@parcel/watcher-android-arm64": "2.5.6", |
|||
"@parcel/watcher-freebsd-x64": "2.5.6" |
|||
} |
|||
} |
|||
@ -0,0 +1,13 @@ |
|||
#!/usr/bin/env node
|
|||
|
|||
const {spawn} = require('child_process'); |
|||
|
|||
if (process.env.npm_config_build_from_source === 'true') { |
|||
build(); |
|||
} |
|||
|
|||
function build() { |
|||
spawn('node-gyp', ['rebuild'], { stdio: 'inherit', shell: true }).on('exit', function (code) { |
|||
process.exit(code); |
|||
}); |
|||
} |
|||
@ -0,0 +1,186 @@ |
|||
#ifdef FS_EVENTS
|
|||
#include "macos/FSEventsBackend.hh"
|
|||
#endif
|
|||
#ifdef WATCHMAN
|
|||
#include "watchman/WatchmanBackend.hh"
|
|||
#endif
|
|||
#ifdef WINDOWS
|
|||
#include "windows/WindowsBackend.hh"
|
|||
#endif
|
|||
#ifdef INOTIFY
|
|||
#include "linux/InotifyBackend.hh"
|
|||
#endif
|
|||
#ifdef KQUEUE
|
|||
#include "kqueue/KqueueBackend.hh"
|
|||
#endif
|
|||
#ifdef __wasm32__
|
|||
#include "wasm/WasmBackend.hh"
|
|||
#endif
|
|||
#include "shared/BruteForceBackend.hh"
|
|||
|
|||
#include "Backend.hh"
|
|||
#include <unordered_map>
|
|||
|
|||
static std::unordered_map<std::string, std::shared_ptr<Backend>>& getSharedBackends() { |
|||
static std::unordered_map<std::string, std::shared_ptr<Backend>>* sharedBackends = |
|||
new std::unordered_map<std::string, std::shared_ptr<Backend>>(); |
|||
return *sharedBackends; |
|||
} |
|||
|
|||
std::shared_ptr<Backend> getBackend(std::string backend) { |
|||
// Use FSEvents on macOS by default.
|
|||
// Use watchman by default if available on other platforms.
|
|||
// Fall back to brute force.
|
|||
#ifdef FS_EVENTS
|
|||
if (backend == "fs-events" || backend == "default") { |
|||
return std::make_shared<FSEventsBackend>(); |
|||
} |
|||
#endif
|
|||
#ifdef WATCHMAN
|
|||
if ((backend == "watchman" || backend == "default") && WatchmanBackend::checkAvailable()) { |
|||
return std::make_shared<WatchmanBackend>(); |
|||
} |
|||
#endif
|
|||
#ifdef WINDOWS
|
|||
if (backend == "windows" || backend == "default") { |
|||
return std::make_shared<WindowsBackend>(); |
|||
} |
|||
#endif
|
|||
#ifdef INOTIFY
|
|||
if (backend == "inotify" || backend == "default") { |
|||
return std::make_shared<InotifyBackend>(); |
|||
} |
|||
#endif
|
|||
#ifdef KQUEUE
|
|||
if (backend == "kqueue" || backend == "default") { |
|||
return std::make_shared<KqueueBackend>(); |
|||
} |
|||
#endif
|
|||
#ifdef __wasm32__
|
|||
if (backend == "wasm" || backend == "default") { |
|||
return std::make_shared<WasmBackend>(); |
|||
} |
|||
#endif
|
|||
if (backend == "brute-force" || backend == "default") { |
|||
return std::make_shared<BruteForceBackend>(); |
|||
} |
|||
|
|||
return nullptr; |
|||
} |
|||
|
|||
std::shared_ptr<Backend> Backend::getShared(std::string backend) { |
|||
auto found = getSharedBackends().find(backend); |
|||
if (found != getSharedBackends().end()) { |
|||
return found->second; |
|||
} |
|||
|
|||
auto result = getBackend(backend); |
|||
if (!result) { |
|||
return getShared("default"); |
|||
} |
|||
|
|||
result->run(); |
|||
getSharedBackends().emplace(backend, result); |
|||
return result; |
|||
} |
|||
|
|||
void removeShared(Backend *backend) { |
|||
for (auto it = getSharedBackends().begin(); it != getSharedBackends().end(); it++) { |
|||
if (it->second.get() == backend) { |
|||
getSharedBackends().erase(it); |
|||
break; |
|||
} |
|||
} |
|||
|
|||
// Free up memory.
|
|||
if (getSharedBackends().size() == 0) { |
|||
getSharedBackends().rehash(0); |
|||
} |
|||
} |
|||
|
|||
void Backend::run() { |
|||
#ifndef __wasm32__
|
|||
mThread = std::thread([this] () { |
|||
try { |
|||
start(); |
|||
} catch (std::exception &err) { |
|||
handleError(err); |
|||
} |
|||
}); |
|||
|
|||
if (mThread.joinable()) { |
|||
mStartedSignal.wait(); |
|||
} |
|||
#else
|
|||
try { |
|||
start(); |
|||
} catch (std::exception &err) { |
|||
handleError(err); |
|||
} |
|||
#endif
|
|||
} |
|||
|
|||
void Backend::notifyStarted() { |
|||
mStartedSignal.notify(); |
|||
} |
|||
|
|||
void Backend::start() { |
|||
notifyStarted(); |
|||
} |
|||
|
|||
Backend::~Backend() { |
|||
#ifndef __wasm32__
|
|||
// Wait for thread to stop
|
|||
if (mThread.joinable()) { |
|||
// If the backend is being destroyed from the thread itself, detach, otherwise join.
|
|||
if (mThread.get_id() == std::this_thread::get_id()) { |
|||
mThread.detach(); |
|||
} else { |
|||
mThread.join(); |
|||
} |
|||
} |
|||
#endif
|
|||
} |
|||
|
|||
void Backend::watch(WatcherRef watcher) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
auto res = mSubscriptions.find(watcher); |
|||
if (res == mSubscriptions.end()) { |
|||
try { |
|||
this->subscribe(watcher); |
|||
mSubscriptions.insert(watcher); |
|||
} catch (std::exception&) { |
|||
unref(); |
|||
throw; |
|||
} |
|||
} |
|||
} |
|||
|
|||
void Backend::unwatch(WatcherRef watcher) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
size_t deleted = mSubscriptions.erase(watcher); |
|||
if (deleted > 0) { |
|||
this->unsubscribe(watcher); |
|||
unref(); |
|||
} |
|||
} |
|||
|
|||
void Backend::unref() { |
|||
if (mSubscriptions.size() == 0) { |
|||
removeShared(this); |
|||
} |
|||
} |
|||
|
|||
void Backend::handleWatcherError(WatcherError &err) { |
|||
unwatch(err.mWatcher); |
|||
err.mWatcher->notifyError(err); |
|||
} |
|||
|
|||
void Backend::handleError(std::exception &err) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
for (auto it = mSubscriptions.begin(); it != mSubscriptions.end(); it++) { |
|||
(*it)->notifyError(err); |
|||
} |
|||
|
|||
removeShared(this); |
|||
} |
|||
@ -0,0 +1,37 @@ |
|||
#ifndef BACKEND_H
|
|||
#define BACKEND_H
|
|||
|
|||
#include "Event.hh"
|
|||
#include "Watcher.hh"
|
|||
#include "Signal.hh"
|
|||
#include <thread>
|
|||
|
|||
class Backend { |
|||
public: |
|||
virtual ~Backend(); |
|||
void run(); |
|||
void notifyStarted(); |
|||
|
|||
virtual void start(); |
|||
virtual void writeSnapshot(WatcherRef watcher, std::string *snapshotPath) = 0; |
|||
virtual void getEventsSince(WatcherRef watcher, std::string *snapshotPath) = 0; |
|||
virtual void subscribe(WatcherRef watcher) = 0; |
|||
virtual void unsubscribe(WatcherRef watcher) = 0; |
|||
|
|||
static std::shared_ptr<Backend> getShared(std::string backend); |
|||
|
|||
void watch(WatcherRef watcher); |
|||
void unwatch(WatcherRef watcher); |
|||
void unref(); |
|||
void handleWatcherError(WatcherError &err); |
|||
|
|||
std::mutex mMutex; |
|||
std::thread mThread; |
|||
private: |
|||
std::unordered_set<WatcherRef> mSubscriptions; |
|||
Signal mStartedSignal; |
|||
|
|||
void handleError(std::exception &err); |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,113 @@ |
|||
#include "Debounce.hh"
|
|||
|
|||
#ifdef __wasm32__
|
|||
extern "C" void on_timeout(void *ctx) { |
|||
Debounce *debounce = (Debounce *)ctx; |
|||
debounce->notify(); |
|||
} |
|||
#endif
|
|||
|
|||
std::shared_ptr<Debounce> Debounce::getShared() { |
|||
static std::weak_ptr<Debounce> sharedInstance; |
|||
std::shared_ptr<Debounce> shared = sharedInstance.lock(); |
|||
if (!shared) { |
|||
shared = std::make_shared<Debounce>(); |
|||
sharedInstance = shared; |
|||
} |
|||
|
|||
return shared; |
|||
} |
|||
|
|||
Debounce::Debounce() { |
|||
mRunning = true; |
|||
#ifndef __wasm32__
|
|||
mThread = std::thread([this] () { |
|||
loop(); |
|||
}); |
|||
#endif
|
|||
} |
|||
|
|||
Debounce::~Debounce() { |
|||
mRunning = false; |
|||
#ifndef __wasm32__
|
|||
mWaitSignal.notify(); |
|||
mThread.join(); |
|||
#endif
|
|||
} |
|||
|
|||
void Debounce::add(void *key, std::function<void()> cb) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
mCallbacks.emplace(key, cb); |
|||
} |
|||
|
|||
void Debounce::remove(void *key) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
mCallbacks.erase(key); |
|||
} |
|||
|
|||
void Debounce::trigger() { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
#ifdef __wasm32__
|
|||
notifyIfReady(); |
|||
#else
|
|||
mWaitSignal.notify(); |
|||
#endif
|
|||
} |
|||
|
|||
#ifndef __wasm32__
|
|||
void Debounce::loop() { |
|||
while (mRunning) { |
|||
mWaitSignal.wait(); |
|||
if (!mRunning) { |
|||
break; |
|||
} |
|||
|
|||
notifyIfReady(); |
|||
} |
|||
} |
|||
#endif
|
|||
|
|||
void Debounce::notifyIfReady() { |
|||
if (!mRunning) { |
|||
return; |
|||
} |
|||
|
|||
// If we haven't seen an event in more than the maximum wait time, notify callbacks immediately
|
|||
// to ensure that we don't wait forever. Otherwise, wait for the minimum wait time and batch
|
|||
// subsequent fast changes. This also means the first file change in a batch is notified immediately,
|
|||
// separately from the rest of the batch. This seems like an acceptable tradeoff if the common case
|
|||
// is that only a single file was updated at a time.
|
|||
auto time = std::chrono::steady_clock::now(); |
|||
if ((time - mLastTime) > std::chrono::milliseconds(MAX_WAIT_TIME)) { |
|||
mLastTime = time; |
|||
notify(); |
|||
} else { |
|||
wait(); |
|||
} |
|||
} |
|||
|
|||
void Debounce::wait() { |
|||
#ifdef __wasm32__
|
|||
clear_timeout(mTimeout); |
|||
mTimeout = set_timeout(MIN_WAIT_TIME, this); |
|||
#else
|
|||
auto status = mWaitSignal.waitFor(std::chrono::milliseconds(MIN_WAIT_TIME)); |
|||
if (mRunning && (status == std::cv_status::timeout)) { |
|||
notify(); |
|||
} |
|||
#endif
|
|||
} |
|||
|
|||
void Debounce::notify() { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
|
|||
mLastTime = std::chrono::steady_clock::now(); |
|||
for (auto it = mCallbacks.begin(); it != mCallbacks.end(); it++) { |
|||
auto cb = it->second; |
|||
cb(); |
|||
} |
|||
|
|||
#ifndef __wasm32__
|
|||
mWaitSignal.reset(); |
|||
#endif
|
|||
} |
|||
@ -0,0 +1,49 @@ |
|||
#ifndef DEBOUNCE_H
|
|||
#define DEBOUNCE_H
|
|||
|
|||
#include <thread>
|
|||
#include <unordered_map>
|
|||
#include <functional>
|
|||
#include "Signal.hh"
|
|||
|
|||
#define MIN_WAIT_TIME 50
|
|||
#define MAX_WAIT_TIME 500
|
|||
|
|||
#ifdef __wasm32__
|
|||
extern "C" { |
|||
int set_timeout(int ms, void *ctx); |
|||
void clear_timeout(int timeout); |
|||
void on_timeout(void *ctx); |
|||
}; |
|||
#endif
|
|||
|
|||
class Debounce { |
|||
public: |
|||
static std::shared_ptr<Debounce> getShared(); |
|||
|
|||
Debounce(); |
|||
~Debounce(); |
|||
|
|||
void add(void *key, std::function<void()> cb); |
|||
void remove(void *key); |
|||
void trigger(); |
|||
void notify(); |
|||
|
|||
private: |
|||
bool mRunning; |
|||
std::mutex mMutex; |
|||
#ifdef __wasm32__
|
|||
int mTimeout; |
|||
#else
|
|||
Signal mWaitSignal; |
|||
std::thread mThread; |
|||
#endif
|
|||
std::unordered_map<void *, std::function<void()>> mCallbacks; |
|||
std::chrono::time_point<std::chrono::steady_clock> mLastTime; |
|||
|
|||
void loop(); |
|||
void notifyIfReady(); |
|||
void wait(); |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,164 @@ |
|||
#include "DirTree.hh"
|
|||
#include <inttypes.h>
|
|||
|
|||
// "Meyer's singleton", construction is ordered by use, likewise (reverse) for destruction.
|
|||
// https://stackoverflow.com/a/17713799
|
|||
// https://laristra.github.io/flecsi/src/developer-guide/patterns/meyers_singleton.html
|
|||
static std::mutex& mDirCacheMutex() { |
|||
static std::mutex mutex; |
|||
return mutex; |
|||
} |
|||
|
|||
static std::unordered_map<std::string, std::weak_ptr<DirTree>>& dirTreeCache() { |
|||
static std::unordered_map<std::string, std::weak_ptr<DirTree>> cache; |
|||
return cache; |
|||
} |
|||
|
|||
struct DirTreeDeleter { |
|||
void operator()(DirTree *tree) { |
|||
std::lock_guard<std::mutex> lock(mDirCacheMutex()); |
|||
std::unordered_map<std::string, std::weak_ptr<DirTree>> &cache = dirTreeCache(); |
|||
cache.erase(tree->root); |
|||
delete tree; |
|||
|
|||
// Free up memory.
|
|||
if (cache.size() == 0) { |
|||
cache.rehash(0); |
|||
} |
|||
} |
|||
}; |
|||
|
|||
std::shared_ptr<DirTree> DirTree::getCached(std::string root) { |
|||
std::lock_guard<std::mutex> lock(mDirCacheMutex()); |
|||
std::unordered_map<std::string, std::weak_ptr<DirTree>> &cache = dirTreeCache(); |
|||
|
|||
auto found = cache.find(root); |
|||
std::shared_ptr<DirTree> tree; |
|||
|
|||
// Use cached tree, or create an empty one.
|
|||
if (found != cache.end()) { |
|||
tree = found->second.lock(); |
|||
} else { |
|||
tree = std::shared_ptr<DirTree>(new DirTree(root), DirTreeDeleter()); |
|||
cache.emplace(root, tree); |
|||
} |
|||
|
|||
return tree; |
|||
} |
|||
|
|||
DirTree::DirTree(std::string root, FILE *f) : root(root), isComplete(true) { |
|||
size_t size; |
|||
if (fscanf(f, "%zu", &size)) { |
|||
for (size_t i = 0; i < size; i++) { |
|||
DirEntry entry(f); |
|||
entries.emplace(entry.path, entry); |
|||
} |
|||
} |
|||
} |
|||
|
|||
// Internal find method that has no lock
|
|||
DirEntry *DirTree::_find(std::string path) { |
|||
auto found = entries.find(path); |
|||
if (found == entries.end()) { |
|||
return NULL; |
|||
} |
|||
|
|||
return &found->second; |
|||
} |
|||
|
|||
DirEntry *DirTree::add(std::string path, uint64_t mtime, bool isDir) { |
|||
std::lock_guard<std::mutex> lock(mDirCacheMutex()); |
|||
|
|||
DirEntry entry(path, mtime, isDir); |
|||
auto it = entries.emplace(entry.path, entry); |
|||
return &it.first->second; |
|||
} |
|||
|
|||
DirEntry *DirTree::find(std::string path) { |
|||
std::lock_guard<std::mutex> lock(mDirCacheMutex()); |
|||
return _find(path); |
|||
} |
|||
|
|||
DirEntry *DirTree::update(std::string path, uint64_t mtime) { |
|||
std::lock_guard<std::mutex> lock(mDirCacheMutex()); |
|||
|
|||
DirEntry *found = _find(path); |
|||
if (found) { |
|||
found->mtime = mtime; |
|||
} |
|||
|
|||
return found; |
|||
} |
|||
|
|||
void DirTree::remove(std::string path) { |
|||
std::lock_guard<std::mutex> lock(mDirCacheMutex()); |
|||
|
|||
DirEntry *found = _find(path); |
|||
|
|||
// Remove all sub-entries if this is a directory
|
|||
if (found && found->isDir) { |
|||
std::string pathStart = path + DIR_SEP; |
|||
for (auto it = entries.begin(); it != entries.end();) { |
|||
if (it->first.rfind(pathStart, 0) == 0) { |
|||
it = entries.erase(it); |
|||
} else { |
|||
it++; |
|||
} |
|||
} |
|||
} |
|||
|
|||
entries.erase(path); |
|||
} |
|||
|
|||
void DirTree::write(FILE *f) { |
|||
std::lock_guard<std::mutex> lock(mDirCacheMutex()); |
|||
|
|||
fprintf(f, "%zu\n", entries.size()); |
|||
for (auto it = entries.begin(); it != entries.end(); it++) { |
|||
it->second.write(f); |
|||
} |
|||
} |
|||
|
|||
void DirTree::getChanges(DirTree *snapshot, EventList &events) { |
|||
std::lock_guard<std::mutex> lock(mDirCacheMutex()); |
|||
std::lock_guard<std::mutex> snapshotLock(snapshot->mMutex); |
|||
|
|||
for (auto it = entries.begin(); it != entries.end(); it++) { |
|||
auto found = snapshot->entries.find(it->first); |
|||
if (found == snapshot->entries.end()) { |
|||
events.create(it->second.path); |
|||
} else if (found->second.mtime != it->second.mtime && !found->second.isDir && !it->second.isDir) { |
|||
events.update(it->second.path); |
|||
} |
|||
} |
|||
|
|||
for (auto it = snapshot->entries.begin(); it != snapshot->entries.end(); it++) { |
|||
size_t count = entries.count(it->first); |
|||
if (count == 0) { |
|||
events.remove(it->second.path); |
|||
} |
|||
} |
|||
} |
|||
|
|||
DirEntry::DirEntry(std::string p, uint64_t t, bool d) { |
|||
path = p; |
|||
mtime = t; |
|||
isDir = d; |
|||
state = NULL; |
|||
} |
|||
|
|||
DirEntry::DirEntry(FILE *f) { |
|||
size_t size; |
|||
if (fscanf(f, "%zu", &size)) { |
|||
path.resize(size); |
|||
if (fread(&path[0], sizeof(char), size, f)) { |
|||
int d = 0; |
|||
fscanf(f, "%" PRIu64 " %d\n", &mtime, &d); |
|||
isDir = d == 1; |
|||
} |
|||
} |
|||
} |
|||
|
|||
void DirEntry::write(FILE *f) const { |
|||
fprintf(f, "%zu%s%" PRIu64 " %d\n", path.size(), path.c_str(), mtime, isDir); |
|||
} |
|||
@ -0,0 +1,50 @@ |
|||
#ifndef DIR_TREE_H
|
|||
#define DIR_TREE_H
|
|||
|
|||
#include <string>
|
|||
#include <unordered_map>
|
|||
#include <memory>
|
|||
#include "Event.hh"
|
|||
|
|||
#ifdef _WIN32
|
|||
#define DIR_SEP "\\"
|
|||
#else
|
|||
#define DIR_SEP "/"
|
|||
#endif
|
|||
|
|||
struct DirEntry { |
|||
std::string path; |
|||
uint64_t mtime; |
|||
bool isDir; |
|||
mutable void *state; |
|||
|
|||
DirEntry(std::string p, uint64_t t, bool d); |
|||
DirEntry(FILE *f); |
|||
void write(FILE *f) const; |
|||
bool operator==(const DirEntry &other) const { |
|||
return path == other.path; |
|||
} |
|||
}; |
|||
|
|||
class DirTree { |
|||
public: |
|||
static std::shared_ptr<DirTree> getCached(std::string root); |
|||
DirTree(std::string root) : root(root), isComplete(false) {} |
|||
DirTree(std::string root, FILE *f); |
|||
DirEntry *add(std::string path, uint64_t mtime, bool isDir); |
|||
DirEntry *find(std::string path); |
|||
DirEntry *update(std::string path, uint64_t mtime); |
|||
void remove(std::string path); |
|||
void write(FILE *f); |
|||
void getChanges(DirTree *snapshot, EventList &events); |
|||
|
|||
std::mutex mMutex; |
|||
std::string root; |
|||
bool isComplete; |
|||
std::unordered_map<std::string, DirEntry> entries; |
|||
|
|||
private: |
|||
DirEntry *_find(std::string path); |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,109 @@ |
|||
#ifndef EVENT_H
|
|||
#define EVENT_H
|
|||
|
|||
#include <string>
|
|||
#include <node_api.h>
|
|||
#include "wasm/include.h"
|
|||
#include <napi.h>
|
|||
#include <mutex>
|
|||
#include <map>
|
|||
#include <optional>
|
|||
|
|||
using namespace Napi; |
|||
|
|||
struct Event { |
|||
std::string path; |
|||
bool isCreated; |
|||
bool isDeleted; |
|||
Event(std::string path) : path(path), isCreated(false), isDeleted(false) {} |
|||
|
|||
Value toJS(const Env& env) { |
|||
EscapableHandleScope scope(env); |
|||
Object res = Object::New(env); |
|||
std::string type = isCreated ? "create" : isDeleted ? "delete" : "update"; |
|||
res.Set(String::New(env, "path"), String::New(env, path.c_str())); |
|||
res.Set(String::New(env, "type"), String::New(env, type.c_str())); |
|||
return scope.Escape(res); |
|||
} |
|||
}; |
|||
|
|||
class EventList { |
|||
public: |
|||
void create(std::string path) { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
Event *event = internalUpdate(path); |
|||
if (event->isDeleted) { |
|||
// Assume update event when rapidly removed and created
|
|||
// https://github.com/parcel-bundler/watcher/issues/72
|
|||
event->isDeleted = false; |
|||
} else { |
|||
event->isCreated = true; |
|||
} |
|||
} |
|||
|
|||
Event *update(std::string path) { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
return internalUpdate(path); |
|||
} |
|||
|
|||
void remove(std::string path) { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
Event *event = internalUpdate(path); |
|||
event->isDeleted = true; |
|||
} |
|||
|
|||
size_t size() { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
return mEvents.size(); |
|||
} |
|||
|
|||
std::vector<Event> getEvents() { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
std::vector<Event> eventsCloneVector; |
|||
for(auto it = mEvents.begin(); it != mEvents.end(); ++it) { |
|||
if (!(it->second.isCreated && it->second.isDeleted)) { |
|||
eventsCloneVector.push_back(it->second); |
|||
} |
|||
} |
|||
return eventsCloneVector; |
|||
} |
|||
|
|||
void clear() { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
mEvents.clear(); |
|||
mError.reset(); |
|||
} |
|||
|
|||
void error(std::string err) { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
if (!mError.has_value()) { |
|||
mError.emplace(err); |
|||
} |
|||
} |
|||
|
|||
bool hasError() { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
return mError.has_value(); |
|||
} |
|||
|
|||
std::string getError() { |
|||
std::lock_guard<std::mutex> l(mMutex); |
|||
return mError.value_or(""); |
|||
} |
|||
|
|||
private: |
|||
mutable std::mutex mMutex; |
|||
std::map<std::string, Event> mEvents; |
|||
std::optional<std::string> mError; |
|||
Event *internalUpdate(std::string path) { |
|||
auto found = mEvents.find(path); |
|||
if (found == mEvents.end()) { |
|||
auto it = mEvents.emplace(path, Event(path)); |
|||
return &it.first->second; |
|||
} |
|||
|
|||
return &found->second; |
|||
} |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,22 @@ |
|||
#include "Glob.hh"
|
|||
|
|||
#ifdef __wasm32__
|
|||
extern "C" bool wasm_regex_match(const char *s, const char *regex); |
|||
#endif
|
|||
|
|||
Glob::Glob(std::string raw) { |
|||
mRaw = raw; |
|||
mHash = std::hash<std::string>()(raw); |
|||
#ifndef __wasm32__
|
|||
mRegex = std::regex(raw); |
|||
#endif
|
|||
} |
|||
|
|||
bool Glob::isIgnored(std::string relative_path) const { |
|||
// Use native JS regex engine for wasm to reduce binary size.
|
|||
#ifdef __wasm32__
|
|||
return wasm_regex_match(relative_path.c_str(), mRaw.c_str()); |
|||
#else
|
|||
return std::regex_match(relative_path, mRegex); |
|||
#endif
|
|||
} |
|||
@ -0,0 +1,34 @@ |
|||
#ifndef GLOB_H
|
|||
#define GLOB_H
|
|||
|
|||
#include <unordered_set>
|
|||
#include <regex>
|
|||
|
|||
struct Glob { |
|||
std::size_t mHash; |
|||
std::string mRaw; |
|||
#ifndef __wasm32__
|
|||
std::regex mRegex; |
|||
#endif
|
|||
|
|||
Glob(std::string raw); |
|||
|
|||
bool operator==(const Glob &other) const { |
|||
return mHash == other.mHash && mRaw == other.mRaw; |
|||
} |
|||
|
|||
bool isIgnored(std::string relative_path) const; |
|||
}; |
|||
|
|||
namespace std |
|||
{ |
|||
template <> |
|||
struct hash<Glob> |
|||
{ |
|||
size_t operator()(const Glob& g) const { |
|||
return g.mHash; |
|||
} |
|||
}; |
|||
} |
|||
|
|||
#endif
|
|||
@ -0,0 +1,101 @@ |
|||
#ifndef PROMISE_RUNNER_H
|
|||
#define PROMISE_RUNNER_H
|
|||
|
|||
#include <node_api.h>
|
|||
#include "wasm/include.h"
|
|||
#include <napi.h>
|
|||
|
|||
using namespace Napi; |
|||
|
|||
class PromiseRunner { |
|||
public: |
|||
const Env env; |
|||
Promise::Deferred deferred; |
|||
|
|||
PromiseRunner(Env env) : env(env), deferred(Promise::Deferred::New(env)) { |
|||
napi_status status = napi_create_async_work(env, nullptr, env.Undefined(), |
|||
onExecute, onWorkComplete, this, &work); |
|||
if (status != napi_ok) { |
|||
work = nullptr; |
|||
const napi_extended_error_info *error_info = 0; |
|||
napi_get_last_error_info(env, &error_info); |
|||
if (error_info->error_message) { |
|||
Error::New(env, error_info->error_message).ThrowAsJavaScriptException(); |
|||
} else { |
|||
Error::New(env).ThrowAsJavaScriptException(); |
|||
} |
|||
} |
|||
} |
|||
|
|||
virtual ~PromiseRunner() {} |
|||
|
|||
Value queue() { |
|||
if (work) { |
|||
napi_status status = napi_queue_async_work(env, work); |
|||
if (status != napi_ok) { |
|||
onError(Error::New(env)); |
|||
} |
|||
} |
|||
|
|||
return deferred.Promise(); |
|||
} |
|||
|
|||
private: |
|||
napi_async_work work; |
|||
std::string error; |
|||
|
|||
static void onExecute(napi_env env, void *this_pointer) { |
|||
PromiseRunner* self = (PromiseRunner*) this_pointer; |
|||
try { |
|||
self->execute(); |
|||
} catch (std::exception &err) { |
|||
self->error = err.what(); |
|||
} |
|||
} |
|||
|
|||
static void onWorkComplete(napi_env env, napi_status status, void *this_pointer) { |
|||
PromiseRunner* self = (PromiseRunner*) this_pointer; |
|||
if (status != napi_cancelled) { |
|||
HandleScope scope(self->env); |
|||
if (status == napi_ok) { |
|||
status = napi_delete_async_work(self->env, self->work); |
|||
if (status == napi_ok) { |
|||
if (self->error.size() == 0) { |
|||
self->onOK(); |
|||
} else { |
|||
self->onError(Error::New(self->env, self->error)); |
|||
} |
|||
delete self; |
|||
return; |
|||
} |
|||
} |
|||
} |
|||
|
|||
// fallthrough for error handling
|
|||
const napi_extended_error_info *error_info = 0; |
|||
napi_get_last_error_info(env, &error_info); |
|||
if (error_info->error_message){ |
|||
self->onError(Error::New(env, error_info->error_message)); |
|||
} else { |
|||
self->onError(Error::New(env)); |
|||
} |
|||
delete self; |
|||
} |
|||
|
|||
virtual void execute() {} |
|||
virtual Value getResult() { |
|||
return env.Null(); |
|||
} |
|||
|
|||
void onOK() { |
|||
HandleScope scope(env); |
|||
Value result = getResult(); |
|||
deferred.Resolve(result); |
|||
} |
|||
|
|||
void onError(const Error &e) { |
|||
deferred.Reject(e.Value()); |
|||
} |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,46 @@ |
|||
#ifndef SIGNAL_H
|
|||
#define SIGNAL_H
|
|||
|
|||
#include <mutex>
|
|||
#include <condition_variable>
|
|||
|
|||
class Signal { |
|||
public: |
|||
Signal() : mFlag(false), mWaiting(false) {} |
|||
void wait() { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
while (!mFlag) { |
|||
mWaiting = true; |
|||
mCond.wait(lock); |
|||
} |
|||
} |
|||
|
|||
std::cv_status waitFor(std::chrono::milliseconds ms) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
return mCond.wait_for(lock, ms); |
|||
} |
|||
|
|||
void notify() { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
mFlag = true; |
|||
mCond.notify_all(); |
|||
} |
|||
|
|||
void reset() { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
mFlag = false; |
|||
mWaiting = false; |
|||
} |
|||
|
|||
bool isWaiting() { |
|||
return mWaiting; |
|||
} |
|||
|
|||
private: |
|||
bool mFlag; |
|||
bool mWaiting; |
|||
std::mutex mMutex; |
|||
std::condition_variable mCond; |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,241 @@ |
|||
#include "Watcher.hh"
|
|||
#include <unordered_set>
|
|||
|
|||
using namespace Napi; |
|||
|
|||
struct WatcherHash { |
|||
std::size_t operator() (WatcherRef const &k) const { |
|||
return std::hash<std::string>()(k->mDir); |
|||
} |
|||
}; |
|||
|
|||
struct WatcherCompare { |
|||
size_t operator() (WatcherRef const &a, WatcherRef const &b) const { |
|||
return *a == *b; |
|||
} |
|||
}; |
|||
|
|||
static std::unordered_set<WatcherRef , WatcherHash, WatcherCompare>& getSharedWatchers() { |
|||
static std::unordered_set<WatcherRef , WatcherHash, WatcherCompare>* sharedWatchers = |
|||
new std::unordered_set<WatcherRef , WatcherHash, WatcherCompare>(); |
|||
return *sharedWatchers; |
|||
} |
|||
|
|||
WatcherRef Watcher::getShared(std::string dir, std::unordered_set<std::string> ignorePaths, std::unordered_set<Glob> ignoreGlobs) { |
|||
WatcherRef watcher = std::make_shared<Watcher>(dir, ignorePaths, ignoreGlobs); |
|||
auto found = getSharedWatchers().find(watcher); |
|||
if (found != getSharedWatchers().end()) { |
|||
return *found; |
|||
} |
|||
|
|||
getSharedWatchers().insert(watcher); |
|||
return watcher; |
|||
} |
|||
|
|||
void removeShared(Watcher *watcher) { |
|||
for (auto it = getSharedWatchers().begin(); it != getSharedWatchers().end(); it++) { |
|||
if (it->get() == watcher) { |
|||
getSharedWatchers().erase(it); |
|||
break; |
|||
} |
|||
} |
|||
|
|||
// Free up memory.
|
|||
if (getSharedWatchers().size() == 0) { |
|||
getSharedWatchers().rehash(0); |
|||
} |
|||
} |
|||
|
|||
Watcher::Watcher(std::string dir, std::unordered_set<std::string> ignorePaths, std::unordered_set<Glob> ignoreGlobs) |
|||
: mDir(dir), |
|||
mIgnorePaths(ignorePaths), |
|||
mIgnoreGlobs(ignoreGlobs) { |
|||
mDebounce = Debounce::getShared(); |
|||
mDebounce->add(this, [this] () { |
|||
triggerCallbacks(); |
|||
}); |
|||
} |
|||
|
|||
Watcher::~Watcher() { |
|||
mDebounce->remove(this); |
|||
} |
|||
|
|||
void Watcher::wait() { |
|||
std::unique_lock<std::mutex> lk(mMutex); |
|||
mCond.wait(lk); |
|||
} |
|||
|
|||
void Watcher::notify() { |
|||
std::unique_lock<std::mutex> lk(mMutex); |
|||
mCond.notify_all(); |
|||
|
|||
if (mCallbacks.size() > 0 && mEvents.size() > 0) { |
|||
// We must release our lock before calling into the debouncer
|
|||
// to avoid a deadlock: the debouncer thread itself will require
|
|||
// our lock from its thread when calling into `triggerCallbacks`
|
|||
// while holding its own debouncer lock.
|
|||
lk.unlock(); |
|||
mDebounce->trigger(); |
|||
} |
|||
} |
|||
|
|||
struct CallbackData { |
|||
std::string error; |
|||
std::vector<Event> events; |
|||
CallbackData(std::string error, std::vector<Event> events) : error(error), events(events) {} |
|||
}; |
|||
|
|||
Value callbackEventsToJS(const Env &env, std::vector<Event> &events) { |
|||
EscapableHandleScope scope(env); |
|||
Array arr = Array::New(env, events.size()); |
|||
uint32_t currentEventIndex = 0; |
|||
for (auto eventIterator = events.begin(); eventIterator != events.end(); eventIterator++) { |
|||
arr.Set(currentEventIndex++, eventIterator->toJS(env)); |
|||
} |
|||
return scope.Escape(arr); |
|||
} |
|||
|
|||
void callJSFunction(Napi::Env env, Function jsCallback, CallbackData *data) { |
|||
HandleScope scope(env); |
|||
auto err = data->error.size() > 0 ? Error::New(env, data->error).Value() : env.Null(); |
|||
auto events = callbackEventsToJS(env, data->events); |
|||
jsCallback.Call({err, events}); |
|||
delete data; |
|||
|
|||
// Throw errors from the callback as fatal exceptions
|
|||
// If we don't handle these node segfaults...
|
|||
if (env.IsExceptionPending()) { |
|||
Napi::Error err = env.GetAndClearPendingException(); |
|||
napi_fatal_exception(env, err.Value()); |
|||
} |
|||
} |
|||
|
|||
void Watcher::notifyError(std::exception &err) { |
|||
std::unique_lock<std::mutex> lk(mMutex); |
|||
for (auto it = mCallbacks.begin(); it != mCallbacks.end(); it++) { |
|||
CallbackData *data = new CallbackData(err.what(), {}); |
|||
it->tsfn.BlockingCall(data, callJSFunction); |
|||
} |
|||
|
|||
clearCallbacks(); |
|||
} |
|||
|
|||
// This function is called from the debounce thread.
|
|||
void Watcher::triggerCallbacks() { |
|||
std::unique_lock<std::mutex> lk(mMutex); |
|||
if (mCallbacks.size() > 0 && (mEvents.size() > 0 || mEvents.hasError())) { |
|||
auto error = mEvents.getError(); |
|||
auto events = mEvents.getEvents(); |
|||
mEvents.clear(); |
|||
|
|||
for (auto it = mCallbacks.begin(); it != mCallbacks.end(); it++) { |
|||
it->tsfn.BlockingCall(new CallbackData(error, events), callJSFunction); |
|||
} |
|||
} |
|||
} |
|||
|
|||
// This should be called from the JavaScript thread.
|
|||
bool Watcher::watch(Function callback) { |
|||
std::unique_lock<std::mutex> lk(mMutex); |
|||
|
|||
auto it = findCallback(callback); |
|||
if (it != mCallbacks.end()) { |
|||
return false; |
|||
} |
|||
|
|||
auto tsfn = ThreadSafeFunction::New( |
|||
callback.Env(), |
|||
callback, |
|||
"Watcher callback", |
|||
0, // Unlimited queue
|
|||
1 // Initial thread count
|
|||
); |
|||
|
|||
mCallbacks.push_back(Callback { |
|||
tsfn, |
|||
Napi::Persistent(callback), |
|||
std::this_thread::get_id() |
|||
}); |
|||
|
|||
return true; |
|||
} |
|||
|
|||
// This should be called from the JavaScript thread.
|
|||
std::vector<Callback>::iterator Watcher::findCallback(Function callback) { |
|||
for (auto it = mCallbacks.begin(); it != mCallbacks.end(); it++) { |
|||
// Only consider callbacks created by the same thread, or V8 will panic.
|
|||
if (it->threadId == std::this_thread::get_id() && it->ref.Value() == callback) { |
|||
return it; |
|||
} |
|||
} |
|||
|
|||
return mCallbacks.end(); |
|||
} |
|||
|
|||
// This should be called from the JavaScript thread.
|
|||
bool Watcher::unwatch(Function callback) { |
|||
std::unique_lock<std::mutex> lk(mMutex); |
|||
|
|||
bool removed = false; |
|||
auto it = findCallback(callback); |
|||
if (it != mCallbacks.end()) { |
|||
it->tsfn.Release(); |
|||
it->ref.Unref(); |
|||
mCallbacks.erase(it); |
|||
removed = true; |
|||
} |
|||
|
|||
if (removed && mCallbacks.size() == 0) { |
|||
unref(); |
|||
return true; |
|||
} |
|||
|
|||
return false; |
|||
} |
|||
|
|||
void Watcher::unref() { |
|||
if (mCallbacks.size() == 0) { |
|||
removeShared(this); |
|||
} |
|||
} |
|||
|
|||
void Watcher::destroy() { |
|||
std::unique_lock<std::mutex> lk(mMutex); |
|||
clearCallbacks(); |
|||
} |
|||
|
|||
// Private because it doesn't lock.
|
|||
void Watcher::clearCallbacks() { |
|||
for (auto it = mCallbacks.begin(); it != mCallbacks.end(); it++) { |
|||
it->tsfn.Release(); |
|||
it->ref.Unref(); |
|||
} |
|||
|
|||
mCallbacks.clear(); |
|||
unref(); |
|||
} |
|||
|
|||
bool Watcher::isIgnored(std::string path) { |
|||
for (auto it = mIgnorePaths.begin(); it != mIgnorePaths.end(); it++) { |
|||
auto dir = *it + DIR_SEP; |
|||
if (*it == path || path.compare(0, dir.size(), dir) == 0) { |
|||
return true; |
|||
} |
|||
} |
|||
|
|||
auto basePath = mDir + DIR_SEP; |
|||
|
|||
if (path.rfind(basePath, 0) != 0) { |
|||
return false; |
|||
} |
|||
|
|||
auto relativePath = path.substr(basePath.size()); |
|||
|
|||
for (auto it = mIgnoreGlobs.begin(); it != mIgnoreGlobs.end(); it++) { |
|||
if (it->isIgnored(relativePath)) { |
|||
return true; |
|||
} |
|||
} |
|||
|
|||
return false; |
|||
} |
|||
@ -0,0 +1,73 @@ |
|||
#ifndef WATCHER_H
|
|||
#define WATCHER_H
|
|||
|
|||
#include <condition_variable>
|
|||
#include <unordered_set>
|
|||
#include <set>
|
|||
#include <node_api.h>
|
|||
#include "Glob.hh"
|
|||
#include "Event.hh"
|
|||
#include "Debounce.hh"
|
|||
#include "DirTree.hh"
|
|||
#include "Signal.hh"
|
|||
|
|||
using namespace Napi; |
|||
|
|||
struct Watcher; |
|||
using WatcherRef = std::shared_ptr<Watcher>; |
|||
|
|||
struct Callback { |
|||
Napi::ThreadSafeFunction tsfn; |
|||
Napi::FunctionReference ref; |
|||
std::thread::id threadId; |
|||
}; |
|||
|
|||
class WatcherState { |
|||
public: |
|||
virtual ~WatcherState() = default; |
|||
}; |
|||
|
|||
struct Watcher { |
|||
std::string mDir; |
|||
std::unordered_set<std::string> mIgnorePaths; |
|||
std::unordered_set<Glob> mIgnoreGlobs; |
|||
EventList mEvents; |
|||
std::shared_ptr<WatcherState> state; |
|||
|
|||
Watcher(std::string dir, std::unordered_set<std::string> ignorePaths, std::unordered_set<Glob> ignoreGlobs); |
|||
~Watcher(); |
|||
|
|||
bool operator==(const Watcher &other) const { |
|||
return mDir == other.mDir && mIgnorePaths == other.mIgnorePaths && mIgnoreGlobs == other.mIgnoreGlobs; |
|||
} |
|||
|
|||
void wait(); |
|||
void notify(); |
|||
void notifyError(std::exception &err); |
|||
bool watch(Function callback); |
|||
bool unwatch(Function callback); |
|||
void unref(); |
|||
bool isIgnored(std::string path); |
|||
void destroy(); |
|||
|
|||
static WatcherRef getShared(std::string dir, std::unordered_set<std::string> ignorePaths, std::unordered_set<Glob> ignoreGlobs); |
|||
|
|||
private: |
|||
std::mutex mMutex; |
|||
std::condition_variable mCond; |
|||
std::vector<Callback> mCallbacks; |
|||
std::shared_ptr<Debounce> mDebounce; |
|||
|
|||
std::vector<Callback>::iterator findCallback(Function callback); |
|||
void clearCallbacks(); |
|||
void triggerCallbacks(); |
|||
}; |
|||
|
|||
class WatcherError : public std::runtime_error { |
|||
public: |
|||
WatcherRef mWatcher; |
|||
WatcherError(std::string msg, WatcherRef watcher) : std::runtime_error(msg), mWatcher(watcher) {} |
|||
WatcherError(const char *msg, WatcherRef watcher) : std::runtime_error(msg), mWatcher(watcher) {} |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,268 @@ |
|||
#include <unordered_set>
|
|||
#include <node_api.h>
|
|||
#include "wasm/include.h"
|
|||
#include <napi.h>
|
|||
#include "Glob.hh"
|
|||
#include "Event.hh"
|
|||
#include "Backend.hh"
|
|||
#include "Watcher.hh"
|
|||
#include "PromiseRunner.hh"
|
|||
|
|||
using namespace Napi; |
|||
|
|||
std::unordered_set<std::string> getIgnorePaths(Env env, Value opts) { |
|||
std::unordered_set<std::string> result; |
|||
|
|||
if (opts.IsObject()) { |
|||
Value v = opts.As<Object>().Get(String::New(env, "ignorePaths")); |
|||
if (v.IsArray()) { |
|||
Array items = v.As<Array>(); |
|||
for (size_t i = 0; i < items.Length(); i++) { |
|||
Value item = items.Get(Number::New(env, static_cast<double>(i))); |
|||
if (item.IsString()) { |
|||
result.insert(std::string(item.As<String>().Utf8Value().c_str())); |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
return result; |
|||
} |
|||
|
|||
std::unordered_set<Glob> getIgnoreGlobs(Env env, Value opts) { |
|||
std::unordered_set<Glob> result; |
|||
|
|||
if (opts.IsObject()) { |
|||
Value v = opts.As<Object>().Get(String::New(env, "ignoreGlobs")); |
|||
if (v.IsArray()) { |
|||
Array items = v.As<Array>(); |
|||
for (size_t i = 0; i < items.Length(); i++) { |
|||
Value item = items.Get(Number::New(env, static_cast<double>(i))); |
|||
if (item.IsString()) { |
|||
auto key = item.As<String>().Utf8Value(); |
|||
try { |
|||
result.emplace(key); |
|||
} catch (const std::regex_error& e) { |
|||
Error::New(env, e.what()).ThrowAsJavaScriptException(); |
|||
} |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
return result; |
|||
} |
|||
|
|||
std::shared_ptr<Backend> getBackend(Env env, Value opts) { |
|||
Value b = opts.As<Object>().Get(String::New(env, "backend")); |
|||
std::string backendName; |
|||
if (b.IsString()) { |
|||
backendName = std::string(b.As<String>().Utf8Value().c_str()); |
|||
} |
|||
|
|||
return Backend::getShared(backendName); |
|||
} |
|||
|
|||
class WriteSnapshotRunner : public PromiseRunner { |
|||
public: |
|||
WriteSnapshotRunner(Env env, Value dir, Value snap, Value opts) |
|||
: PromiseRunner(env), |
|||
snapshotPath(std::string(snap.As<String>().Utf8Value().c_str())) { |
|||
watcher = Watcher::getShared( |
|||
std::string(dir.As<String>().Utf8Value().c_str()), |
|||
getIgnorePaths(env, opts), |
|||
getIgnoreGlobs(env, opts) |
|||
); |
|||
|
|||
backend = getBackend(env, opts); |
|||
} |
|||
|
|||
~WriteSnapshotRunner() { |
|||
watcher->unref(); |
|||
backend->unref(); |
|||
} |
|||
private: |
|||
std::shared_ptr<Backend> backend; |
|||
WatcherRef watcher; |
|||
std::string snapshotPath; |
|||
|
|||
void execute() override { |
|||
backend->writeSnapshot(watcher, &snapshotPath); |
|||
} |
|||
}; |
|||
|
|||
class GetEventsSinceRunner : public PromiseRunner { |
|||
public: |
|||
GetEventsSinceRunner(Env env, Value dir, Value snap, Value opts) |
|||
: PromiseRunner(env), |
|||
snapshotPath(std::string(snap.As<String>().Utf8Value().c_str())) { |
|||
watcher = std::make_shared<Watcher>( |
|||
std::string(dir.As<String>().Utf8Value().c_str()), |
|||
getIgnorePaths(env, opts), |
|||
getIgnoreGlobs(env, opts) |
|||
); |
|||
|
|||
backend = getBackend(env, opts); |
|||
} |
|||
|
|||
~GetEventsSinceRunner() { |
|||
watcher->unref(); |
|||
backend->unref(); |
|||
} |
|||
private: |
|||
std::shared_ptr<Backend> backend; |
|||
WatcherRef watcher; |
|||
std::string snapshotPath; |
|||
|
|||
void execute() override { |
|||
backend->getEventsSince(watcher, &snapshotPath); |
|||
if (watcher->mEvents.hasError()) { |
|||
throw std::runtime_error(watcher->mEvents.getError()); |
|||
} |
|||
} |
|||
|
|||
Value getResult() override { |
|||
std::vector<Event> events = watcher->mEvents.getEvents(); |
|||
Array eventsArray = Array::New(env, events.size()); |
|||
uint32_t i = 0; |
|||
for (auto it = events.begin(); it != events.end(); it++) { |
|||
eventsArray.Set(i++, it->toJS(env)); |
|||
} |
|||
return eventsArray; |
|||
} |
|||
}; |
|||
|
|||
template<class Runner> |
|||
Value queueSnapshotWork(const CallbackInfo& info) { |
|||
Env env = info.Env(); |
|||
if (info.Length() < 1 || !info[0].IsString()) { |
|||
TypeError::New(env, "Expected a string").ThrowAsJavaScriptException(); |
|||
return env.Null(); |
|||
} |
|||
|
|||
if (info.Length() < 2 || !info[1].IsString()) { |
|||
TypeError::New(env, "Expected a string").ThrowAsJavaScriptException(); |
|||
return env.Null(); |
|||
} |
|||
|
|||
if (info.Length() >= 3 && !info[2].IsObject()) { |
|||
TypeError::New(env, "Expected an object").ThrowAsJavaScriptException(); |
|||
return env.Null(); |
|||
} |
|||
|
|||
Runner *runner = new Runner(info.Env(), info[0], info[1], info[2]); |
|||
return runner->queue(); |
|||
} |
|||
|
|||
Value writeSnapshot(const CallbackInfo& info) { |
|||
return queueSnapshotWork<WriteSnapshotRunner>(info); |
|||
} |
|||
|
|||
Value getEventsSince(const CallbackInfo& info) { |
|||
return queueSnapshotWork<GetEventsSinceRunner>(info); |
|||
} |
|||
|
|||
class SubscribeRunner : public PromiseRunner { |
|||
public: |
|||
SubscribeRunner(Env env, Value dir, Value fn, Value opts) : PromiseRunner(env) { |
|||
watcher = Watcher::getShared( |
|||
std::string(dir.As<String>().Utf8Value().c_str()), |
|||
getIgnorePaths(env, opts), |
|||
getIgnoreGlobs(env, opts) |
|||
); |
|||
|
|||
backend = getBackend(env, opts); |
|||
watcher->watch(fn.As<Function>()); |
|||
} |
|||
|
|||
private: |
|||
WatcherRef watcher; |
|||
std::shared_ptr<Backend> backend; |
|||
FunctionReference callback; |
|||
|
|||
void execute() override { |
|||
try { |
|||
backend->watch(watcher); |
|||
} catch (std::exception&) { |
|||
watcher->destroy(); |
|||
throw; |
|||
} |
|||
} |
|||
}; |
|||
|
|||
class UnsubscribeRunner : public PromiseRunner { |
|||
public: |
|||
UnsubscribeRunner(Env env, Value dir, Value fn, Value opts) : PromiseRunner(env) { |
|||
watcher = Watcher::getShared( |
|||
std::string(dir.As<String>().Utf8Value().c_str()), |
|||
getIgnorePaths(env, opts), |
|||
getIgnoreGlobs(env, opts) |
|||
); |
|||
|
|||
backend = getBackend(env, opts); |
|||
shouldUnwatch = watcher->unwatch(fn.As<Function>()); |
|||
} |
|||
|
|||
private: |
|||
WatcherRef watcher; |
|||
std::shared_ptr<Backend> backend; |
|||
bool shouldUnwatch; |
|||
|
|||
void execute() override { |
|||
if (shouldUnwatch) { |
|||
backend->unwatch(watcher); |
|||
} |
|||
} |
|||
}; |
|||
|
|||
template<class Runner> |
|||
Value queueSubscriptionWork(const CallbackInfo& info) { |
|||
Env env = info.Env(); |
|||
if (info.Length() < 1 || !info[0].IsString()) { |
|||
TypeError::New(env, "Expected a string").ThrowAsJavaScriptException(); |
|||
return env.Null(); |
|||
} |
|||
|
|||
if (info.Length() < 2 || !info[1].IsFunction()) { |
|||
TypeError::New(env, "Expected a function").ThrowAsJavaScriptException(); |
|||
return env.Null(); |
|||
} |
|||
|
|||
if (info.Length() >= 3 && !info[2].IsObject()) { |
|||
TypeError::New(env, "Expected an object").ThrowAsJavaScriptException(); |
|||
return env.Null(); |
|||
} |
|||
|
|||
Runner *runner = new Runner(info.Env(), info[0], info[1], info[2]); |
|||
return runner->queue(); |
|||
} |
|||
|
|||
Value subscribe(const CallbackInfo& info) { |
|||
return queueSubscriptionWork<SubscribeRunner>(info); |
|||
} |
|||
|
|||
Value unsubscribe(const CallbackInfo& info) { |
|||
return queueSubscriptionWork<UnsubscribeRunner>(info); |
|||
} |
|||
|
|||
Object Init(Env env, Object exports) { |
|||
exports.Set( |
|||
String::New(env, "writeSnapshot"), |
|||
Function::New(env, writeSnapshot) |
|||
); |
|||
exports.Set( |
|||
String::New(env, "getEventsSince"), |
|||
Function::New(env, getEventsSince) |
|||
); |
|||
exports.Set( |
|||
String::New(env, "subscribe"), |
|||
Function::New(env, subscribe) |
|||
); |
|||
exports.Set( |
|||
String::New(env, "unsubscribe"), |
|||
Function::New(env, unsubscribe) |
|||
); |
|||
return exports; |
|||
} |
|||
|
|||
NODE_API_MODULE(watcher, Init) |
|||
@ -0,0 +1,306 @@ |
|||
#include <memory>
|
|||
#include <poll.h>
|
|||
#include <unistd.h>
|
|||
#include <libgen.h>
|
|||
#include <dirent.h>
|
|||
#include <fcntl.h>
|
|||
#include <sys/stat.h>
|
|||
#include "KqueueBackend.hh"
|
|||
|
|||
#if __APPLE__
|
|||
#define st_mtim st_mtimespec
|
|||
#endif
|
|||
|
|||
#if !defined(O_EVTONLY)
|
|||
#define O_EVTONLY O_RDONLY
|
|||
#endif
|
|||
|
|||
#define CONVERT_TIME(ts) ((uint64_t)ts.tv_sec * 1000000000 + ts.tv_nsec)
|
|||
|
|||
void KqueueBackend::start() { |
|||
if ((mKqueue = kqueue()) < 0) { |
|||
throw std::runtime_error(std::string("Unable to open kqueue: ") + strerror(errno)); |
|||
} |
|||
|
|||
// Create a pipe that we will write to when we want to end the thread.
|
|||
int err = pipe(mPipe); |
|||
if (err == -1) { |
|||
throw std::runtime_error(std::string("Unable to open pipe: ") + strerror(errno)); |
|||
} |
|||
|
|||
// Subscribe kqueue to this pipe.
|
|||
struct kevent ev; |
|||
EV_SET( |
|||
&ev, |
|||
mPipe[0], |
|||
EVFILT_READ, |
|||
EV_ADD | EV_CLEAR, |
|||
0, |
|||
0, |
|||
0 |
|||
); |
|||
|
|||
if (kevent(mKqueue, &ev, 1, NULL, 0, 0)) { |
|||
close(mPipe[0]); |
|||
close(mPipe[1]); |
|||
throw std::runtime_error(std::string("Unable to watch pipe: ") + strerror(errno)); |
|||
} |
|||
|
|||
notifyStarted(); |
|||
|
|||
struct kevent events[128]; |
|||
|
|||
while (true) { |
|||
int event_count = kevent(mKqueue, NULL, 0, events, 128, 0); |
|||
if (event_count < 0 || events[0].flags == EV_ERROR) { |
|||
throw std::runtime_error(std::string("kevent error: ") + strerror(errno)); |
|||
} |
|||
|
|||
// Track all of the watchers that are touched so we can notify them at the end of the events.
|
|||
std::unordered_set<WatcherRef> watchers; |
|||
|
|||
for (int i = 0; i < event_count; i++) { |
|||
int flags = events[i].fflags; |
|||
int fd = events[i].ident; |
|||
if (fd == mPipe[0]) { |
|||
// pipe was written to. break out of the loop.
|
|||
goto done; |
|||
} |
|||
|
|||
auto it = mFdToEntry.find(fd); |
|||
if (it == mFdToEntry.end()) { |
|||
// If fd wasn't in our map, we may have already stopped watching it. Ignore the event.
|
|||
continue; |
|||
} |
|||
|
|||
DirEntry *entry = it->second; |
|||
|
|||
if (flags & NOTE_WRITE && entry && entry->isDir) { |
|||
// If a write occurred on a directory, we have to diff the contents of that
|
|||
// directory to determine what file was added/deleted.
|
|||
compareDir(fd, entry->path, watchers); |
|||
} else { |
|||
std::vector<KqueueSubscription *> subs = findSubscriptions(entry->path); |
|||
for (auto it = subs.begin(); it != subs.end(); it++) { |
|||
KqueueSubscription *sub = *it; |
|||
watchers.insert(sub->watcher); |
|||
if (flags & (NOTE_DELETE | NOTE_RENAME | NOTE_REVOKE)) { |
|||
sub->watcher->mEvents.remove(sub->path); |
|||
sub->tree->remove(sub->path); |
|||
mFdToEntry.erase((int)(size_t)entry->state); |
|||
mSubscriptions.erase(sub->path); |
|||
} else if (flags & (NOTE_WRITE | NOTE_ATTRIB | NOTE_EXTEND)) { |
|||
struct stat st; |
|||
lstat(sub->path.c_str(), &st); |
|||
if (entry->mtime != CONVERT_TIME(st.st_mtim)) { |
|||
entry->mtime = CONVERT_TIME(st.st_mtim); |
|||
sub->watcher->mEvents.update(sub->path); |
|||
} |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
for (auto it = watchers.begin(); it != watchers.end(); it++) { |
|||
(*it)->notify(); |
|||
} |
|||
} |
|||
|
|||
done: |
|||
close(mPipe[0]); |
|||
close(mPipe[1]); |
|||
mEndedSignal.notify(); |
|||
} |
|||
|
|||
KqueueBackend::~KqueueBackend() { |
|||
write(mPipe[1], "X", 1); |
|||
mEndedSignal.wait(); |
|||
} |
|||
|
|||
void KqueueBackend::subscribe(WatcherRef watcher) { |
|||
// Build a full directory tree recursively, and watch each directory.
|
|||
std::shared_ptr<DirTree> tree = getTree(watcher); |
|||
|
|||
for (auto it = tree->entries.begin(); it != tree->entries.end(); it++) { |
|||
bool success = watchDir(watcher, it->second.path, tree); |
|||
if (!success) { |
|||
throw WatcherError(std::string("error watching " + watcher->mDir + ": " + strerror(errno)), watcher); |
|||
} |
|||
} |
|||
} |
|||
|
|||
bool KqueueBackend::watchDir(WatcherRef watcher, std::string path, std::shared_ptr<DirTree> tree) { |
|||
if (watcher->isIgnored(path)) { |
|||
return false; |
|||
} |
|||
|
|||
DirEntry *entry = tree->find(path); |
|||
if (!entry) { |
|||
return false; |
|||
} |
|||
|
|||
KqueueSubscription sub = { |
|||
.watcher = watcher, |
|||
.path = path, |
|||
.tree = tree |
|||
}; |
|||
|
|||
if (!entry->state) { |
|||
int fd = open(path.c_str(), O_EVTONLY); |
|||
if (fd <= 0) { |
|||
return false; |
|||
} |
|||
|
|||
struct kevent event; |
|||
EV_SET( |
|||
&event, |
|||
fd, |
|||
EVFILT_VNODE, |
|||
EV_ADD | EV_CLEAR | EV_ENABLE, |
|||
NOTE_DELETE | NOTE_WRITE | NOTE_EXTEND | NOTE_ATTRIB | NOTE_RENAME | NOTE_REVOKE, |
|||
0, |
|||
0 |
|||
); |
|||
|
|||
if (kevent(mKqueue, &event, 1, NULL, 0, 0)) { |
|||
close(fd); |
|||
return false; |
|||
} |
|||
|
|||
entry->state = (void *)(size_t)fd; |
|||
mFdToEntry.emplace(fd, entry); |
|||
} |
|||
|
|||
sub.fd = (int)(size_t)entry->state; |
|||
mSubscriptions.emplace(path, sub); |
|||
return true; |
|||
} |
|||
|
|||
std::vector<KqueueSubscription *> KqueueBackend::findSubscriptions(std::string &path) { |
|||
// Find the subscriptions affected by this path.
|
|||
// Copy pointers to them into a vector so that modifying mSubscriptions doesn't invalidate the iterator.
|
|||
auto range = mSubscriptions.equal_range(path); |
|||
std::vector<KqueueSubscription *> subs; |
|||
for (auto it = range.first; it != range.second; it++) { |
|||
subs.push_back(&it->second); |
|||
} |
|||
|
|||
return subs; |
|||
} |
|||
|
|||
bool KqueueBackend::compareDir(int fd, std::string &path, std::unordered_set<WatcherRef> &watchers) { |
|||
// macOS doesn't support fdclosedir, so we have to duplicate the file descriptor
|
|||
// to ensure the closedir doesn't also stop watching.
|
|||
#if __APPLE__
|
|||
fd = dup(fd); |
|||
#endif
|
|||
|
|||
DIR *dir = fdopendir(fd); |
|||
if (dir == NULL) { |
|||
return false; |
|||
} |
|||
|
|||
// fdopendir doesn't rewind to the beginning.
|
|||
rewinddir(dir); |
|||
|
|||
std::vector<KqueueSubscription *> subs = findSubscriptions(path); |
|||
std::string dirStart = path + DIR_SEP; |
|||
|
|||
std::unordered_set<std::shared_ptr<DirTree>> trees; |
|||
for (auto it = subs.begin(); it != subs.end(); it++) { |
|||
trees.emplace((*it)->tree); |
|||
} |
|||
|
|||
std::unordered_set<std::string> entries; |
|||
struct dirent *entry; |
|||
while ((entry = readdir(dir))) { |
|||
if (strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) { |
|||
continue; |
|||
} |
|||
|
|||
std::string fullpath = dirStart + entry->d_name; |
|||
entries.emplace(fullpath); |
|||
|
|||
for (auto it = trees.begin(); it != trees.end(); it++) { |
|||
std::shared_ptr<DirTree> tree = *it; |
|||
if (!tree->find(fullpath)) { |
|||
struct stat st; |
|||
fstatat(fd, entry->d_name, &st, AT_SYMLINK_NOFOLLOW); |
|||
tree->add(fullpath, CONVERT_TIME(st.st_mtim), S_ISDIR(st.st_mode)); |
|||
|
|||
// Notify all watchers with the same tree.
|
|||
for (auto i = subs.begin(); i != subs.end(); i++) { |
|||
KqueueSubscription *sub = *i; |
|||
if (sub->tree == tree) { |
|||
if (sub->watcher->isIgnored(fullpath)) { |
|||
continue; |
|||
} |
|||
|
|||
sub->watcher->mEvents.create(fullpath); |
|||
watchers.emplace(sub->watcher); |
|||
|
|||
bool success = watchDir(sub->watcher, fullpath, sub->tree); |
|||
if (!success) { |
|||
sub->tree->remove(fullpath); |
|||
return false; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
for (auto it = trees.begin(); it != trees.end(); it++) { |
|||
std::shared_ptr<DirTree> tree = *it; |
|||
for (auto entry = tree->entries.begin(); entry != tree->entries.end();) { |
|||
|
|||
if ( |
|||
entry->first.rfind(dirStart, 0) == 0 && |
|||
entry->first.find(DIR_SEP, dirStart.length()) == std::string::npos && |
|||
entries.count(entry->first) == 0 |
|||
) { |
|||
// Notify all watchers with the same tree.
|
|||
for (auto i = subs.begin(); i != subs.end(); i++) { |
|||
if ((*i)->tree == tree) { |
|||
KqueueSubscription *sub = *i; |
|||
if (!sub->watcher->isIgnored(entry->first)) { |
|||
sub->watcher->mEvents.remove(entry->first); |
|||
watchers.emplace(sub->watcher); |
|||
} |
|||
} |
|||
} |
|||
|
|||
mFdToEntry.erase((int)(size_t)entry->second.state); |
|||
mSubscriptions.erase(entry->first); |
|||
entry = tree->entries.erase(entry); |
|||
} else { |
|||
entry++; |
|||
} |
|||
} |
|||
} |
|||
|
|||
#if __APPLE__
|
|||
closedir(dir); |
|||
#else
|
|||
fdclosedir(dir); |
|||
#endif
|
|||
|
|||
return true; |
|||
} |
|||
|
|||
void KqueueBackend::unsubscribe(WatcherRef watcher) { |
|||
// Find any subscriptions pointing to this watcher, and remove them.
|
|||
for (auto it = mSubscriptions.begin(); it != mSubscriptions.end();) { |
|||
if (it->second.watcher.get() == watcher.get()) { |
|||
if (mSubscriptions.count(it->first) == 1) { |
|||
// Closing the file descriptor automatically unwatches it in the kqueue.
|
|||
close(it->second.fd); |
|||
mFdToEntry.erase(it->second.fd); |
|||
} |
|||
|
|||
it = mSubscriptions.erase(it); |
|||
} else { |
|||
it++; |
|||
} |
|||
} |
|||
} |
|||
@ -0,0 +1,35 @@ |
|||
#ifndef KQUEUE_H
|
|||
#define KQUEUE_H
|
|||
|
|||
#include <unordered_map>
|
|||
#include <sys/event.h>
|
|||
#include "../shared/BruteForceBackend.hh"
|
|||
#include "../DirTree.hh"
|
|||
#include "../Signal.hh"
|
|||
|
|||
struct KqueueSubscription { |
|||
WatcherRef watcher; |
|||
std::string path; |
|||
std::shared_ptr<DirTree> tree; |
|||
int fd; |
|||
}; |
|||
|
|||
class KqueueBackend : public BruteForceBackend { |
|||
public: |
|||
void start() override; |
|||
~KqueueBackend(); |
|||
void subscribe(WatcherRef watcher) override; |
|||
void unsubscribe(WatcherRef watcher) override; |
|||
private: |
|||
int mKqueue; |
|||
int mPipe[2]; |
|||
std::unordered_multimap<std::string, KqueueSubscription> mSubscriptions; |
|||
std::unordered_map<int, DirEntry *> mFdToEntry; |
|||
Signal mEndedSignal; |
|||
|
|||
bool watchDir(WatcherRef watcher, std::string path, std::shared_ptr<DirTree> tree); |
|||
bool compareDir(int fd, std::string &dir, std::unordered_set<WatcherRef> &watchers); |
|||
std::vector<KqueueSubscription *> findSubscriptions(std::string &path); |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,236 @@ |
|||
#include <memory>
|
|||
#include <poll.h>
|
|||
#include <unistd.h>
|
|||
#include <fcntl.h>
|
|||
#include <sys/stat.h>
|
|||
#include "InotifyBackend.hh"
|
|||
|
|||
#define INOTIFY_MASK \
|
|||
IN_ATTRIB | IN_CREATE | IN_DELETE | \ |
|||
IN_DELETE_SELF | IN_MODIFY | IN_MOVE_SELF | IN_MOVED_FROM | \ |
|||
IN_MOVED_TO | IN_DONT_FOLLOW | IN_ONLYDIR | IN_EXCL_UNLINK |
|||
#define BUFFER_SIZE 8192
|
|||
#define CONVERT_TIME(ts) ((uint64_t)ts.tv_sec * 1000000000 + ts.tv_nsec)
|
|||
|
|||
void InotifyBackend::start() { |
|||
// Create a pipe that we will write to when we want to end the thread.
|
|||
int err = pipe2(mPipe, O_CLOEXEC | O_NONBLOCK); |
|||
if (err == -1) { |
|||
throw std::runtime_error(std::string("Unable to open pipe: ") + strerror(errno)); |
|||
} |
|||
|
|||
// Init inotify file descriptor.
|
|||
mInotify = inotify_init1(IN_NONBLOCK | IN_CLOEXEC); |
|||
if (mInotify == -1) { |
|||
throw std::runtime_error(std::string("Unable to initialize inotify: ") + strerror(errno)); |
|||
} |
|||
|
|||
pollfd pollfds[2]; |
|||
pollfds[0].fd = mPipe[0]; |
|||
pollfds[0].events = POLLIN; |
|||
pollfds[0].revents = 0; |
|||
pollfds[1].fd = mInotify; |
|||
pollfds[1].events = POLLIN; |
|||
pollfds[1].revents = 0; |
|||
|
|||
notifyStarted(); |
|||
|
|||
// Loop until we get an event from the pipe.
|
|||
while (true) { |
|||
int result = poll(pollfds, 2, 500); |
|||
if (result < 0) { |
|||
throw std::runtime_error(std::string("Unable to poll: ") + strerror(errno)); |
|||
} |
|||
|
|||
if (pollfds[0].revents) { |
|||
break; |
|||
} |
|||
|
|||
if (pollfds[1].revents) { |
|||
handleEvents(); |
|||
} |
|||
} |
|||
|
|||
close(mPipe[0]); |
|||
close(mPipe[1]); |
|||
close(mInotify); |
|||
|
|||
mEndedSignal.notify(); |
|||
} |
|||
|
|||
InotifyBackend::~InotifyBackend() { |
|||
write(mPipe[1], "X", 1); |
|||
mEndedSignal.wait(); |
|||
} |
|||
|
|||
// This function is called by Backend::watch which takes a lock on mMutex
|
|||
void InotifyBackend::subscribe(WatcherRef watcher) { |
|||
// Build a full directory tree recursively, and watch each directory.
|
|||
std::shared_ptr<DirTree> tree = getTree(watcher); |
|||
|
|||
for (auto it = tree->entries.begin(); it != tree->entries.end(); it++) { |
|||
if (it->second.isDir) { |
|||
bool success = watchDir(watcher, it->second.path, tree); |
|||
if (!success) { |
|||
throw WatcherError(std::string("inotify_add_watch on '") + it->second.path + std::string("' failed: ") + strerror(errno), watcher); |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
bool InotifyBackend::watchDir(WatcherRef watcher, std::string path, std::shared_ptr<DirTree> tree) { |
|||
int wd = inotify_add_watch(mInotify, path.c_str(), INOTIFY_MASK); |
|||
if (wd == -1) { |
|||
return false; |
|||
} |
|||
|
|||
std::shared_ptr<InotifySubscription> sub = std::make_shared<InotifySubscription>(); |
|||
sub->tree = tree; |
|||
sub->path = path; |
|||
sub->watcher = watcher; |
|||
mSubscriptions.emplace(wd, sub); |
|||
|
|||
return true; |
|||
} |
|||
|
|||
void InotifyBackend::handleEvents() { |
|||
char buf[BUFFER_SIZE] __attribute__ ((aligned(__alignof__(struct inotify_event))));; |
|||
struct inotify_event *event; |
|||
|
|||
// Track all of the watchers that are touched so we can notify them at the end of the events.
|
|||
std::unordered_set<WatcherRef> watchers; |
|||
|
|||
while (true) { |
|||
int n = read(mInotify, &buf, BUFFER_SIZE); |
|||
if (n < 0) { |
|||
if (errno == EAGAIN || errno == EWOULDBLOCK) { |
|||
break; |
|||
} |
|||
|
|||
throw std::runtime_error(std::string("Error reading from inotify: ") + strerror(errno)); |
|||
} |
|||
|
|||
if (n == 0) { |
|||
break; |
|||
} |
|||
|
|||
for (char *ptr = buf; ptr < buf + n; ptr += sizeof(*event) + event->len) { |
|||
event = (struct inotify_event *)ptr; |
|||
|
|||
if ((event->mask & IN_Q_OVERFLOW) == IN_Q_OVERFLOW) { |
|||
// overflow
|
|||
continue; |
|||
} |
|||
|
|||
handleEvent(event, watchers); |
|||
} |
|||
} |
|||
|
|||
for (auto it = watchers.begin(); it != watchers.end(); it++) { |
|||
(*it)->notify(); |
|||
} |
|||
} |
|||
|
|||
void InotifyBackend::handleEvent(struct inotify_event *event, std::unordered_set<WatcherRef> &watchers) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
|
|||
// Find the subscriptions for this watch descriptor
|
|||
auto range = mSubscriptions.equal_range(event->wd); |
|||
std::unordered_set<std::shared_ptr<InotifySubscription>> set; |
|||
for (auto it = range.first; it != range.second; it++) { |
|||
set.insert(it->second); |
|||
} |
|||
|
|||
for (auto it = set.begin(); it != set.end(); it++) { |
|||
if (handleSubscription(event, *it)) { |
|||
watchers.insert((*it)->watcher); |
|||
} |
|||
} |
|||
} |
|||
|
|||
bool InotifyBackend::handleSubscription(struct inotify_event *event, std::shared_ptr<InotifySubscription> sub) { |
|||
// Build full path and check if its in our ignore list.
|
|||
std::shared_ptr<Watcher> watcher = sub->watcher; |
|||
std::string path = std::string(sub->path); |
|||
bool isDir = event->mask & IN_ISDIR; |
|||
|
|||
if (event->len > 0) { |
|||
path += "/" + std::string(event->name); |
|||
} |
|||
|
|||
if (watcher->isIgnored(path)) { |
|||
return false; |
|||
} |
|||
|
|||
// If this is a create, check if it's a directory and start watching if it is.
|
|||
// In any case, keep the directory tree up to date.
|
|||
if (event->mask & (IN_CREATE | IN_MOVED_TO)) { |
|||
watcher->mEvents.create(path); |
|||
|
|||
struct stat st; |
|||
// Use lstat to avoid resolving symbolic links that we cannot watch anyway
|
|||
// https://github.com/parcel-bundler/watcher/issues/76
|
|||
if (lstat(path.c_str(), &st) != 0) { |
|||
return false; |
|||
} |
|||
DirEntry *entry = sub->tree->add(path, CONVERT_TIME(st.st_mtim), S_ISDIR(st.st_mode)); |
|||
|
|||
if (entry->isDir) { |
|||
bool success = watchDir(watcher, path, sub->tree); |
|||
if (!success) { |
|||
sub->tree->remove(path); |
|||
return false; |
|||
} |
|||
} |
|||
} else if (event->mask & (IN_MODIFY | IN_ATTRIB)) { |
|||
watcher->mEvents.update(path); |
|||
|
|||
struct stat st; |
|||
if (stat(path.c_str(), &st) != 0) { |
|||
return false; |
|||
} |
|||
sub->tree->update(path, CONVERT_TIME(st.st_mtim)); |
|||
} else if (event->mask & (IN_DELETE | IN_DELETE_SELF | IN_MOVED_FROM | IN_MOVE_SELF)) { |
|||
bool isSelfEvent = (event->mask & (IN_DELETE_SELF | IN_MOVE_SELF)); |
|||
// Ignore delete/move self events unless this is the recursive watch root
|
|||
if (isSelfEvent && path != watcher->mDir) { |
|||
return false; |
|||
} |
|||
|
|||
// If the entry being deleted/moved is a directory, remove it from the list of subscriptions
|
|||
// XXX: self events don't have the IN_ISDIR mask
|
|||
if (isSelfEvent || isDir) { |
|||
for (auto it = mSubscriptions.begin(); it != mSubscriptions.end();) { |
|||
if (it->second->path == path) { |
|||
it = mSubscriptions.erase(it); |
|||
} else { |
|||
++it; |
|||
} |
|||
} |
|||
} |
|||
|
|||
watcher->mEvents.remove(path); |
|||
sub->tree->remove(path); |
|||
} |
|||
|
|||
return true; |
|||
} |
|||
|
|||
// This function is called by Backend::unwatch which takes a lock on mMutex
|
|||
void InotifyBackend::unsubscribe(WatcherRef watcher) { |
|||
// Find any subscriptions pointing to this watcher, and remove them.
|
|||
for (auto it = mSubscriptions.begin(); it != mSubscriptions.end();) { |
|||
if (it->second->watcher.get() == watcher.get()) { |
|||
if (mSubscriptions.count(it->first) == 1) { |
|||
int err = inotify_rm_watch(mInotify, it->first); |
|||
if (err == -1) { |
|||
throw WatcherError(std::string("Unable to remove watcher: ") + strerror(errno), watcher); |
|||
} |
|||
} |
|||
|
|||
it = mSubscriptions.erase(it); |
|||
} else { |
|||
it++; |
|||
} |
|||
} |
|||
} |
|||
@ -0,0 +1,34 @@ |
|||
#ifndef INOTIFY_H
|
|||
#define INOTIFY_H
|
|||
|
|||
#include <unordered_map>
|
|||
#include <sys/inotify.h>
|
|||
#include "../shared/BruteForceBackend.hh"
|
|||
#include "../DirTree.hh"
|
|||
#include "../Signal.hh"
|
|||
|
|||
struct InotifySubscription { |
|||
std::shared_ptr<DirTree> tree; |
|||
std::string path; |
|||
WatcherRef watcher; |
|||
}; |
|||
|
|||
class InotifyBackend : public BruteForceBackend { |
|||
public: |
|||
void start() override; |
|||
~InotifyBackend(); |
|||
void subscribe(WatcherRef watcher) override; |
|||
void unsubscribe(WatcherRef watcher) override; |
|||
private: |
|||
int mPipe[2]; |
|||
int mInotify; |
|||
std::unordered_multimap<int, std::shared_ptr<InotifySubscription>> mSubscriptions; |
|||
Signal mEndedSignal; |
|||
|
|||
bool watchDir(WatcherRef watcher, std::string path, std::shared_ptr<DirTree> tree); |
|||
void handleEvents(); |
|||
void handleEvent(struct inotify_event *event, std::unordered_set<WatcherRef> &watchers); |
|||
bool handleSubscription(struct inotify_event *event, std::shared_ptr<InotifySubscription> sub); |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,338 @@ |
|||
#include <CoreServices/CoreServices.h>
|
|||
#include <sys/stat.h>
|
|||
#include <string>
|
|||
#include <fstream>
|
|||
#include <unordered_set>
|
|||
#include "../Event.hh"
|
|||
#include "../Backend.hh"
|
|||
#include "./FSEventsBackend.hh"
|
|||
#include "../Watcher.hh"
|
|||
|
|||
#define CONVERT_TIME(ts) ((uint64_t)ts.tv_sec * 1000000000 + ts.tv_nsec)
|
|||
#define IGNORED_FLAGS (kFSEventStreamEventFlagItemIsHardlink | kFSEventStreamEventFlagItemIsLastHardlink | kFSEventStreamEventFlagItemIsSymlink | kFSEventStreamEventFlagItemIsDir | kFSEventStreamEventFlagItemIsFile)
|
|||
|
|||
void stopStream(FSEventStreamRef stream, CFRunLoopRef runLoop) { |
|||
FSEventStreamStop(stream); |
|||
FSEventStreamUnscheduleFromRunLoop(stream, runLoop, kCFRunLoopDefaultMode); |
|||
FSEventStreamInvalidate(stream); |
|||
FSEventStreamRelease(stream); |
|||
} |
|||
|
|||
// macOS has a case insensitive file system by default. In order to detect
|
|||
// file renames that only affect case, we need to get the canonical path
|
|||
// and compare it with the input path to determine if a file was created or deleted.
|
|||
bool pathExists(char *path) { |
|||
int fd = open(path, O_RDONLY | O_SYMLINK); |
|||
if (fd == -1) { |
|||
return false; |
|||
} |
|||
|
|||
char buf[PATH_MAX]; |
|||
if (fcntl(fd, F_GETPATH, buf) == -1) { |
|||
close(fd); |
|||
return false; |
|||
} |
|||
|
|||
bool res = strncmp(path, buf, PATH_MAX) == 0; |
|||
close(fd); |
|||
return res; |
|||
} |
|||
|
|||
class State: public WatcherState { |
|||
public: |
|||
FSEventStreamRef stream; |
|||
std::shared_ptr<DirTree> tree; |
|||
uint64_t since; |
|||
}; |
|||
|
|||
void FSEventsCallback( |
|||
ConstFSEventStreamRef streamRef, |
|||
void *clientCallBackInfo, |
|||
size_t numEvents, |
|||
void *eventPaths, |
|||
const FSEventStreamEventFlags eventFlags[], |
|||
const FSEventStreamEventId eventIds[] |
|||
) { |
|||
char **paths = (char **)eventPaths; |
|||
std::shared_ptr<Watcher>& watcher = *static_cast<std::shared_ptr<Watcher> *>(clientCallBackInfo); |
|||
|
|||
EventList& list = watcher->mEvents; |
|||
if (watcher->state == nullptr) { |
|||
return; |
|||
} |
|||
|
|||
auto stateGuard = watcher->state; |
|||
auto* state = static_cast<State*>(stateGuard.get()); |
|||
uint64_t since = state->since; |
|||
bool deletedRoot = false; |
|||
|
|||
for (size_t i = 0; i < numEvents; ++i) { |
|||
bool isCreated = (eventFlags[i] & kFSEventStreamEventFlagItemCreated) == kFSEventStreamEventFlagItemCreated; |
|||
bool isRemoved = (eventFlags[i] & kFSEventStreamEventFlagItemRemoved) == kFSEventStreamEventFlagItemRemoved; |
|||
bool isModified = (eventFlags[i] & kFSEventStreamEventFlagItemModified) == kFSEventStreamEventFlagItemModified || |
|||
(eventFlags[i] & kFSEventStreamEventFlagItemInodeMetaMod) == kFSEventStreamEventFlagItemInodeMetaMod || |
|||
(eventFlags[i] & kFSEventStreamEventFlagItemFinderInfoMod) == kFSEventStreamEventFlagItemFinderInfoMod || |
|||
(eventFlags[i] & kFSEventStreamEventFlagItemChangeOwner) == kFSEventStreamEventFlagItemChangeOwner || |
|||
(eventFlags[i] & kFSEventStreamEventFlagItemXattrMod) == kFSEventStreamEventFlagItemXattrMod; |
|||
bool isRenamed = (eventFlags[i] & kFSEventStreamEventFlagItemRenamed) == kFSEventStreamEventFlagItemRenamed; |
|||
bool isDone = (eventFlags[i] & kFSEventStreamEventFlagHistoryDone) == kFSEventStreamEventFlagHistoryDone; |
|||
bool isDir = (eventFlags[i] & kFSEventStreamEventFlagItemIsDir) == kFSEventStreamEventFlagItemIsDir; |
|||
|
|||
|
|||
if (eventFlags[i] & kFSEventStreamEventFlagMustScanSubDirs) { |
|||
if (eventFlags[i] & kFSEventStreamEventFlagUserDropped) { |
|||
list.error("Events were dropped by the FSEvents client. File system must be re-scanned."); |
|||
} else if (eventFlags[i] & kFSEventStreamEventFlagKernelDropped) { |
|||
list.error("Events were dropped by the kernel. File system must be re-scanned."); |
|||
} else { |
|||
list.error("Too many events. File system must be re-scanned."); |
|||
} |
|||
} |
|||
|
|||
if (isDone) { |
|||
watcher->notify(); |
|||
break; |
|||
} |
|||
|
|||
auto ignoredFlags = IGNORED_FLAGS; |
|||
if (__builtin_available(macOS 10.13, *)) { |
|||
ignoredFlags |= kFSEventStreamEventFlagItemCloned; |
|||
} |
|||
|
|||
// If we don't care about any of the flags that are set, ignore this event.
|
|||
if ((eventFlags[i] & ~ignoredFlags) == 0) { |
|||
continue; |
|||
} |
|||
|
|||
// FSEvents exclusion paths only apply to files, not directories.
|
|||
if (watcher->isIgnored(paths[i])) { |
|||
continue; |
|||
} |
|||
|
|||
// Handle unambiguous events first
|
|||
if (isCreated && !(isRemoved || isModified || isRenamed)) { |
|||
state->tree->add(paths[i], 0, isDir); |
|||
list.create(paths[i]); |
|||
} else if (isRemoved && !(isCreated || isModified || isRenamed)) { |
|||
state->tree->remove(paths[i]); |
|||
list.remove(paths[i]); |
|||
if (paths[i] == watcher->mDir) { |
|||
deletedRoot = true; |
|||
} |
|||
} else if (isModified && !(isCreated || isRemoved || isRenamed)) { |
|||
struct stat file; |
|||
if (stat(paths[i], &file)) { |
|||
continue; |
|||
} |
|||
|
|||
// Ignore if mtime is the same as the last event.
|
|||
// This prevents duplicate events from being emitted.
|
|||
// If tv_nsec is zero, the file system probably only has second-level
|
|||
// granularity so allow the even through in that case.
|
|||
uint64_t mtime = CONVERT_TIME(file.st_mtimespec); |
|||
DirEntry *entry = state->tree->find(paths[i]); |
|||
if (entry && mtime == entry->mtime && file.st_mtimespec.tv_nsec != 0) { |
|||
continue; |
|||
} |
|||
|
|||
if (entry) { |
|||
// Update mtime.
|
|||
entry->mtime = mtime; |
|||
} else { |
|||
// Add to tree if this path has not been discovered yet.
|
|||
state->tree->add(paths[i], mtime, S_ISDIR(file.st_mode)); |
|||
} |
|||
|
|||
list.update(paths[i]); |
|||
} else { |
|||
// If multiple flags were set, then we need to call `stat` to determine if the file really exists.
|
|||
// This helps disambiguate creates, updates, and deletes.
|
|||
struct stat file; |
|||
if (stat(paths[i], &file) || !pathExists(paths[i])) { |
|||
// File does not exist, so we have to assume it was removed. This is not exact since the
|
|||
// flags set by fsevents get coalesced together (e.g. created & deleted), so there is no way to
|
|||
// know whether the create and delete both happened since our snapshot (in which case
|
|||
// we'd rather ignore this event completely). This will result in some extra delete events
|
|||
// being emitted for files we don't know about, but that is the best we can do.
|
|||
state->tree->remove(paths[i]); |
|||
list.remove(paths[i]); |
|||
if (paths[i] == watcher->mDir) { |
|||
deletedRoot = true; |
|||
} |
|||
continue; |
|||
} |
|||
|
|||
// If the file was modified, and existed before, then this is an update, otherwise a create.
|
|||
uint64_t ctime = CONVERT_TIME(file.st_birthtimespec); |
|||
uint64_t mtime = CONVERT_TIME(file.st_mtimespec); |
|||
DirEntry *entry = !since ? state->tree->find(paths[i]) : NULL; |
|||
if (entry && entry->mtime == mtime && file.st_mtimespec.tv_nsec != 0) { |
|||
continue; |
|||
} |
|||
|
|||
// Some mounted file systems report a creation time of 0/unix epoch which we special case.
|
|||
if (isModified && (entry || (ctime <= since && ctime != 0))) { |
|||
state->tree->update(paths[i], mtime); |
|||
list.update(paths[i]); |
|||
} else { |
|||
state->tree->add(paths[i], mtime, S_ISDIR(file.st_mode)); |
|||
list.create(paths[i]); |
|||
} |
|||
} |
|||
} |
|||
|
|||
if (!since) { |
|||
watcher->notify(); |
|||
} |
|||
|
|||
// Stop watching if the root directory was deleted.
|
|||
if (deletedRoot) { |
|||
stopStream((FSEventStreamRef)streamRef, CFRunLoopGetCurrent()); |
|||
watcher->state = nullptr; |
|||
} |
|||
} |
|||
|
|||
void checkWatcher(WatcherRef watcher) { |
|||
struct stat file; |
|||
if (stat(watcher->mDir.c_str(), &file)) { |
|||
throw WatcherError(strerror(errno), watcher); |
|||
} |
|||
|
|||
if (!S_ISDIR(file.st_mode)) { |
|||
throw WatcherError(strerror(ENOTDIR), watcher); |
|||
} |
|||
} |
|||
|
|||
void FSEventsBackend::startStream(WatcherRef watcher, FSEventStreamEventId id) { |
|||
checkWatcher(watcher); |
|||
|
|||
CFAbsoluteTime latency = 0.001; |
|||
CFStringRef fileWatchPath = CFStringCreateWithCString( |
|||
NULL, |
|||
watcher->mDir.c_str(), |
|||
kCFStringEncodingUTF8 |
|||
); |
|||
|
|||
CFArrayRef pathsToWatch = CFArrayCreate( |
|||
NULL, |
|||
(const void **)&fileWatchPath, |
|||
1, |
|||
NULL |
|||
); |
|||
|
|||
// Make a watcher reference we can pass into the callback. This ensures bumped ref-count.
|
|||
std::shared_ptr<Watcher>* callbackWatcher = new std::shared_ptr<Watcher> (watcher); |
|||
FSEventStreamContext callbackInfo {0, static_cast<void*> (callbackWatcher), nullptr, nullptr, nullptr}; |
|||
FSEventStreamRef stream = FSEventStreamCreate( |
|||
NULL, |
|||
&FSEventsCallback, |
|||
&callbackInfo, |
|||
pathsToWatch, |
|||
id, |
|||
latency, |
|||
kFSEventStreamCreateFlagFileEvents |
|||
); |
|||
|
|||
CFMutableArrayRef exclusions = CFArrayCreateMutable(NULL, watcher->mIgnorePaths.size(), NULL); |
|||
for (auto it = watcher->mIgnorePaths.begin(); it != watcher->mIgnorePaths.end(); it++) { |
|||
CFStringRef path = CFStringCreateWithCString( |
|||
NULL, |
|||
it->c_str(), |
|||
kCFStringEncodingUTF8 |
|||
); |
|||
|
|||
CFArrayAppendValue(exclusions, (const void *)path); |
|||
} |
|||
|
|||
FSEventStreamSetExclusionPaths(stream, exclusions); |
|||
|
|||
FSEventStreamScheduleWithRunLoop(stream, mRunLoop, kCFRunLoopDefaultMode); |
|||
bool started = FSEventStreamStart(stream); |
|||
|
|||
CFRelease(pathsToWatch); |
|||
CFRelease(fileWatchPath); |
|||
|
|||
if (!started) { |
|||
FSEventStreamRelease(stream); |
|||
throw WatcherError("Error starting FSEvents stream", watcher); |
|||
} |
|||
|
|||
auto stateGuard = watcher->state; |
|||
State* s = static_cast<State*>(stateGuard.get()); |
|||
s->tree = std::make_shared<DirTree>(watcher->mDir); |
|||
s->stream = stream; |
|||
} |
|||
|
|||
void FSEventsBackend::start() { |
|||
mRunLoop = CFRunLoopGetCurrent(); |
|||
CFRetain(mRunLoop); |
|||
|
|||
// Unlock once run loop has started.
|
|||
CFRunLoopPerformBlock(mRunLoop, kCFRunLoopDefaultMode, ^ { |
|||
notifyStarted(); |
|||
}); |
|||
|
|||
CFRunLoopWakeUp(mRunLoop); |
|||
CFRunLoopRun(); |
|||
} |
|||
|
|||
FSEventsBackend::~FSEventsBackend() { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
CFRunLoopStop(mRunLoop); |
|||
CFRelease(mRunLoop); |
|||
} |
|||
|
|||
void FSEventsBackend::writeSnapshot(WatcherRef watcher, std::string *snapshotPath) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
checkWatcher(watcher); |
|||
|
|||
FSEventStreamEventId id = FSEventsGetCurrentEventId(); |
|||
std::ofstream ofs(*snapshotPath); |
|||
ofs << id; |
|||
ofs << "\n"; |
|||
|
|||
struct timespec now; |
|||
clock_gettime(CLOCK_REALTIME, &now); |
|||
ofs << CONVERT_TIME(now); |
|||
} |
|||
|
|||
void FSEventsBackend::getEventsSince(WatcherRef watcher, std::string *snapshotPath) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
std::ifstream ifs(*snapshotPath); |
|||
if (ifs.fail()) { |
|||
return; |
|||
} |
|||
|
|||
FSEventStreamEventId id; |
|||
uint64_t since; |
|||
ifs >> id; |
|||
ifs >> since; |
|||
|
|||
auto s = std::make_shared<State>(); |
|||
s->since = since; |
|||
watcher->state = s; |
|||
|
|||
startStream(watcher, id); |
|||
watcher->wait(); |
|||
stopStream(s->stream, mRunLoop); |
|||
|
|||
watcher->state = nullptr; |
|||
} |
|||
|
|||
// This function is called by Backend::watch which takes a lock on mMutex
|
|||
void FSEventsBackend::subscribe(WatcherRef watcher) { |
|||
auto s = std::make_shared<State>(); |
|||
s->since = 0; |
|||
watcher->state = s; |
|||
startStream(watcher, kFSEventStreamEventIdSinceNow); |
|||
} |
|||
|
|||
// This function is called by Backend::unwatch which takes a lock on mMutex
|
|||
void FSEventsBackend::unsubscribe(WatcherRef watcher) { |
|||
auto stateGuard = watcher->state; |
|||
State* s = static_cast<State*>(stateGuard.get()); |
|||
if (s != nullptr) { |
|||
stopStream(s->stream, mRunLoop); |
|||
watcher->state = nullptr; |
|||
} |
|||
} |
|||
@ -0,0 +1,20 @@ |
|||
#ifndef FS_EVENTS_H
|
|||
#define FS_EVENTS_H
|
|||
|
|||
#include <CoreServices/CoreServices.h>
|
|||
#include "../Backend.hh"
|
|||
|
|||
class FSEventsBackend : public Backend { |
|||
public: |
|||
void start() override; |
|||
~FSEventsBackend(); |
|||
void writeSnapshot(WatcherRef watcher, std::string *snapshotPath) override; |
|||
void getEventsSince(WatcherRef watcher, std::string *snapshotPath) override; |
|||
void subscribe(WatcherRef watcher) override; |
|||
void unsubscribe(WatcherRef watcher) override; |
|||
private: |
|||
void startStream(WatcherRef watcher, FSEventStreamEventId id); |
|||
CFRunLoopRef mRunLoop; |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,41 @@ |
|||
#include <string>
|
|||
#include "../DirTree.hh"
|
|||
#include "../Event.hh"
|
|||
#include "./BruteForceBackend.hh"
|
|||
|
|||
std::shared_ptr<DirTree> BruteForceBackend::getTree(WatcherRef watcher, bool shouldRead) { |
|||
auto tree = DirTree::getCached(watcher->mDir); |
|||
|
|||
// If the tree is not complete, read it if needed.
|
|||
if (!tree->isComplete && shouldRead) { |
|||
readTree(watcher, tree); |
|||
tree->isComplete = true; |
|||
} |
|||
|
|||
return tree; |
|||
} |
|||
|
|||
void BruteForceBackend::writeSnapshot(WatcherRef watcher, std::string *snapshotPath) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
auto tree = getTree(watcher); |
|||
FILE *f = fopen(snapshotPath->c_str(), "w"); |
|||
if (!f) { |
|||
throw std::runtime_error(std::string("Unable to open snapshot file: ") + strerror(errno)); |
|||
} |
|||
|
|||
tree->write(f); |
|||
fclose(f); |
|||
} |
|||
|
|||
void BruteForceBackend::getEventsSince(WatcherRef watcher, std::string *snapshotPath) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
FILE *f = fopen(snapshotPath->c_str(), "r"); |
|||
if (!f) { |
|||
throw std::runtime_error(std::string("Unable to open snapshot file: ") + strerror(errno)); |
|||
} |
|||
|
|||
DirTree snapshot{watcher->mDir, f}; |
|||
auto now = getTree(watcher); |
|||
now->getChanges(&snapshot, watcher->mEvents); |
|||
fclose(f); |
|||
} |
|||
@ -0,0 +1,25 @@ |
|||
#ifndef BRUTE_FORCE_H
|
|||
#define BRUTE_FORCE_H
|
|||
|
|||
#include "../Backend.hh"
|
|||
#include "../DirTree.hh"
|
|||
#include "../Watcher.hh"
|
|||
|
|||
class BruteForceBackend : public Backend { |
|||
public: |
|||
void writeSnapshot(WatcherRef watcher, std::string *snapshotPath) override; |
|||
void getEventsSince(WatcherRef watcher, std::string *snapshotPath) override; |
|||
void subscribe(WatcherRef watcher) override { |
|||
throw "Brute force backend doesn't support subscriptions."; |
|||
} |
|||
|
|||
void unsubscribe(WatcherRef watcher) override { |
|||
throw "Brute force backend doesn't support subscriptions."; |
|||
} |
|||
|
|||
std::shared_ptr<DirTree> getTree(WatcherRef watcher, bool shouldRead = true); |
|||
private: |
|||
void readTree(WatcherRef watcher, std::shared_ptr<DirTree> tree); |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,50 @@ |
|||
#include <string>
|
|||
|
|||
// weird error on linux
|
|||
#ifdef __THROW
|
|||
#undef __THROW
|
|||
#endif
|
|||
#define __THROW
|
|||
|
|||
#include <fts.h>
|
|||
#include <sys/stat.h>
|
|||
#include "../DirTree.hh"
|
|||
#include "../shared/BruteForceBackend.hh"
|
|||
|
|||
#define CONVERT_TIME(ts) ((uint64_t)ts.tv_sec * 1000000000 + ts.tv_nsec)
|
|||
#if __APPLE__
|
|||
#define st_mtim st_mtimespec
|
|||
#endif
|
|||
|
|||
void BruteForceBackend::readTree(WatcherRef watcher, std::shared_ptr<DirTree> tree) { |
|||
char *paths[2] {(char *)watcher->mDir.c_str(), NULL}; |
|||
FTS *fts = fts_open(paths, FTS_NOCHDIR | FTS_PHYSICAL, NULL); |
|||
if (!fts) { |
|||
throw WatcherError(strerror(errno), watcher); |
|||
} |
|||
|
|||
FTSENT *node; |
|||
bool isRoot = true; |
|||
|
|||
while ((node = fts_read(fts)) != NULL) { |
|||
if (node->fts_errno) { |
|||
fts_close(fts); |
|||
throw WatcherError(strerror(node->fts_errno), watcher); |
|||
} |
|||
|
|||
if (isRoot && !(node->fts_info & FTS_D)) { |
|||
fts_close(fts); |
|||
throw WatcherError(strerror(ENOTDIR), watcher); |
|||
} |
|||
|
|||
if (watcher->isIgnored(std::string(node->fts_path))) { |
|||
fts_set(fts, node, FTS_SKIP); |
|||
continue; |
|||
} |
|||
|
|||
tree->add(node->fts_path, CONVERT_TIME(node->fts_statp->st_mtim), (node->fts_info & FTS_D) == FTS_D); |
|||
isRoot = false; |
|||
} |
|||
|
|||
fts_close(fts); |
|||
} |
|||
@ -0,0 +1,77 @@ |
|||
#include <string>
|
|||
|
|||
// weird error on linux
|
|||
#ifdef __THROW
|
|||
#undef __THROW
|
|||
#endif
|
|||
#define __THROW
|
|||
|
|||
#ifdef _LIBC
|
|||
# include <include/sys/stat.h>
|
|||
#else
|
|||
# include <sys/stat.h>
|
|||
#endif
|
|||
#include <dirent.h>
|
|||
#include <unistd.h>
|
|||
#include <fcntl.h>
|
|||
|
|||
#include "../DirTree.hh"
|
|||
#include "../shared/BruteForceBackend.hh"
|
|||
|
|||
#define CONVERT_TIME(ts) ((uint64_t)ts.tv_sec * 1000000000 + ts.tv_nsec)
|
|||
#if __APPLE__
|
|||
#define st_mtim st_mtimespec
|
|||
#endif
|
|||
#define ISDOT(a) (a[0] == '.' && (!a[1] || (a[1] == '.' && !a[2])))
|
|||
|
|||
void iterateDir(WatcherRef watcher, const std::shared_ptr <DirTree> tree, const char *relative, int parent_fd, const std::string &dirname) { |
|||
int open_flags = (O_RDONLY | O_CLOEXEC | O_DIRECTORY | O_NOCTTY | O_NONBLOCK | O_NOFOLLOW); |
|||
int new_fd = openat(parent_fd, relative, open_flags); |
|||
if (new_fd == -1) { |
|||
if (errno == EACCES) { |
|||
return; // ignore insufficient permissions
|
|||
} |
|||
|
|||
throw WatcherError(strerror(errno), watcher); |
|||
} |
|||
|
|||
struct stat rootAttributes; |
|||
fstatat(new_fd, ".", &rootAttributes, AT_SYMLINK_NOFOLLOW); |
|||
tree->add(dirname, CONVERT_TIME(rootAttributes.st_mtim), true); |
|||
|
|||
if (DIR *dir = fdopendir(new_fd)) { |
|||
while (struct dirent *ent = (errno = 0, readdir(dir))) { |
|||
if (ISDOT(ent->d_name)) continue; |
|||
|
|||
std::string fullPath = dirname + "/" + ent->d_name; |
|||
|
|||
if (!watcher->isIgnored(fullPath)) { |
|||
struct stat attrib; |
|||
fstatat(new_fd, ent->d_name, &attrib, AT_SYMLINK_NOFOLLOW); |
|||
bool isDir = ent->d_type == DT_DIR; |
|||
|
|||
if (isDir) { |
|||
iterateDir(watcher, tree, ent->d_name, new_fd, fullPath); |
|||
} else { |
|||
tree->add(fullPath, CONVERT_TIME(attrib.st_mtim), isDir); |
|||
} |
|||
} |
|||
} |
|||
|
|||
closedir(dir); |
|||
} else { |
|||
close(new_fd); |
|||
} |
|||
|
|||
if (errno) { |
|||
throw WatcherError(strerror(errno), watcher); |
|||
} |
|||
} |
|||
|
|||
void BruteForceBackend::readTree(WatcherRef watcher, std::shared_ptr <DirTree> tree) { |
|||
int fd = open(watcher->mDir.c_str(), O_RDONLY); |
|||
if (fd) { |
|||
iterateDir(watcher, tree, ".", fd, watcher->mDir); |
|||
close(fd); |
|||
} |
|||
} |
|||
@ -0,0 +1,132 @@ |
|||
#include <sys/stat.h>
|
|||
#include "WasmBackend.hh"
|
|||
|
|||
#define CONVERT_TIME(ts) ((uint64_t)ts.tv_sec * 1000000000 + ts.tv_nsec)
|
|||
|
|||
void WasmBackend::start() { |
|||
notifyStarted(); |
|||
} |
|||
|
|||
void WasmBackend::subscribe(WatcherRef watcher) { |
|||
// Build a full directory tree recursively, and watch each directory.
|
|||
std::shared_ptr<DirTree> tree = getTree(watcher); |
|||
|
|||
for (auto it = tree->entries.begin(); it != tree->entries.end(); it++) { |
|||
if (it->second.isDir) { |
|||
watchDir(watcher, it->second.path, tree); |
|||
} |
|||
} |
|||
} |
|||
|
|||
void WasmBackend::watchDir(WatcherRef watcher, std::string path, std::shared_ptr<DirTree> tree) { |
|||
int wd = wasm_backend_add_watch(path.c_str(), (void *)this); |
|||
std::shared_ptr<WasmSubscription> sub = std::make_shared<WasmSubscription>(); |
|||
sub->tree = tree; |
|||
sub->path = path; |
|||
sub->watcher = watcher; |
|||
mSubscriptions.emplace(wd, sub); |
|||
} |
|||
|
|||
extern "C" void wasm_backend_event_handler(void *backend, int wd, int type, char *filename) { |
|||
WasmBackend *b = (WasmBackend *)(backend); |
|||
b->handleEvent(wd, type, filename); |
|||
} |
|||
|
|||
void WasmBackend::handleEvent(int wd, int type, char *filename) { |
|||
// Find the subscriptions for this watch descriptor
|
|||
auto range = mSubscriptions.equal_range(wd); |
|||
std::unordered_set<std::shared_ptr<WasmSubscription>> set; |
|||
for (auto it = range.first; it != range.second; it++) { |
|||
set.insert(it->second); |
|||
} |
|||
|
|||
for (auto it = set.begin(); it != set.end(); it++) { |
|||
if (handleSubscription(type, filename, *it)) { |
|||
(*it)->watcher->notify(); |
|||
} |
|||
} |
|||
} |
|||
|
|||
bool WasmBackend::handleSubscription(int type, char *filename, std::shared_ptr<WasmSubscription> sub) { |
|||
// Build full path and check if its in our ignore list.
|
|||
WatcherRef watcher = sub->watcher; |
|||
std::string path = std::string(sub->path); |
|||
|
|||
if (filename[0] != '\0') { |
|||
path += "/" + std::string(filename); |
|||
} |
|||
|
|||
if (watcher->isIgnored(path)) { |
|||
return false; |
|||
} |
|||
|
|||
if (type == 1) { |
|||
struct stat st; |
|||
stat(path.c_str(), &st); |
|||
sub->tree->update(path, CONVERT_TIME(st.st_mtim)); |
|||
watcher->mEvents.update(path); |
|||
} else if (type == 2) { |
|||
// Determine if this is a create or delete depending on if the file exists or not.
|
|||
struct stat st; |
|||
if (lstat(path.c_str(), &st)) { |
|||
// If the entry being deleted/moved is a directory, remove it from the list of subscriptions
|
|||
DirEntry *entry = sub->tree->find(path); |
|||
if (!entry) { |
|||
return false; |
|||
} |
|||
|
|||
if (entry->isDir) { |
|||
std::string pathStart = path + DIR_SEP; |
|||
for (auto it = mSubscriptions.begin(); it != mSubscriptions.end();) { |
|||
if (it->second->path == path || it->second->path.rfind(pathStart, 0) == 0) { |
|||
wasm_backend_remove_watch(it->first); |
|||
it = mSubscriptions.erase(it); |
|||
} else { |
|||
++it; |
|||
} |
|||
} |
|||
|
|||
// Remove all sub-entries
|
|||
for (auto it = sub->tree->entries.begin(); it != sub->tree->entries.end();) { |
|||
if (it->first.rfind(pathStart, 0) == 0) { |
|||
watcher->mEvents.remove(it->first); |
|||
it = sub->tree->entries.erase(it); |
|||
} else { |
|||
it++; |
|||
} |
|||
} |
|||
} |
|||
|
|||
watcher->mEvents.remove(path); |
|||
sub->tree->remove(path); |
|||
} else if (sub->tree->find(path)) { |
|||
sub->tree->update(path, CONVERT_TIME(st.st_mtim)); |
|||
watcher->mEvents.update(path); |
|||
} else { |
|||
watcher->mEvents.create(path); |
|||
|
|||
// If this is a create, check if it's a directory and start watching if it is.
|
|||
DirEntry *entry = sub->tree->add(path, CONVERT_TIME(st.st_mtim), S_ISDIR(st.st_mode)); |
|||
if (entry->isDir) { |
|||
watchDir(watcher, path, sub->tree); |
|||
} |
|||
} |
|||
} |
|||
|
|||
return true; |
|||
} |
|||
|
|||
void WasmBackend::unsubscribe(WatcherRef watcher) { |
|||
// Find any subscriptions pointing to this watcher, and remove them.
|
|||
for (auto it = mSubscriptions.begin(); it != mSubscriptions.end();) { |
|||
if (it->second->watcher.get() == watcher.get()) { |
|||
if (mSubscriptions.count(it->first) == 1) { |
|||
wasm_backend_remove_watch(it->first); |
|||
} |
|||
|
|||
it = mSubscriptions.erase(it); |
|||
} else { |
|||
it++; |
|||
} |
|||
} |
|||
} |
|||
@ -0,0 +1,34 @@ |
|||
#ifndef WASM_H
|
|||
#define WASM_H
|
|||
|
|||
#include <unordered_map>
|
|||
#include "../shared/BruteForceBackend.hh"
|
|||
#include "../DirTree.hh"
|
|||
|
|||
extern "C" { |
|||
int wasm_backend_add_watch(const char *filename, void *backend); |
|||
void wasm_backend_remove_watch(int wd); |
|||
void wasm_backend_event_handler(void *backend, int wd, int type, char *filename); |
|||
}; |
|||
|
|||
struct WasmSubscription { |
|||
std::shared_ptr<DirTree> tree; |
|||
std::string path; |
|||
WatcherRef watcher; |
|||
}; |
|||
|
|||
class WasmBackend : public BruteForceBackend { |
|||
public: |
|||
void start() override; |
|||
void subscribe(WatcherRef watcher) override; |
|||
void unsubscribe(WatcherRef watcher) override; |
|||
void handleEvent(int wd, int type, char *filename); |
|||
private: |
|||
int mWasm; |
|||
std::unordered_multimap<int, std::shared_ptr<WasmSubscription>> mSubscriptions; |
|||
|
|||
void watchDir(WatcherRef watcher, std::string path, std::shared_ptr<DirTree> tree); |
|||
bool handleSubscription(int type, char *filename, std::shared_ptr<WasmSubscription> sub); |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,74 @@ |
|||
/* |
|||
Copyright Node.js contributors. All rights reserved. |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to |
|||
deal in the Software without restriction, including without limitation the |
|||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or |
|||
sell copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
|||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
|||
IN THE SOFTWARE. |
|||
*/ |
|||
|
|||
// Node does not include the headers for these functions when compiling for WASM, so add them here. |
|||
#ifdef __wasm32__ |
|||
extern "C" { |
|||
NAPI_EXTERN napi_status NAPI_CDECL |
|||
napi_create_threadsafe_function(napi_env env, |
|||
napi_value func, |
|||
napi_value async_resource, |
|||
napi_value async_resource_name, |
|||
size_t max_queue_size, |
|||
size_t initial_thread_count, |
|||
void* thread_finalize_data, |
|||
napi_finalize thread_finalize_cb, |
|||
void* context, |
|||
napi_threadsafe_function_call_js call_js_cb, |
|||
napi_threadsafe_function* result); |
|||
|
|||
NAPI_EXTERN napi_status NAPI_CDECL napi_get_threadsafe_function_context( |
|||
napi_threadsafe_function func, void** result); |
|||
|
|||
NAPI_EXTERN napi_status NAPI_CDECL |
|||
napi_call_threadsafe_function(napi_threadsafe_function func, |
|||
void* data, |
|||
napi_threadsafe_function_call_mode is_blocking); |
|||
|
|||
NAPI_EXTERN napi_status NAPI_CDECL |
|||
napi_acquire_threadsafe_function(napi_threadsafe_function func); |
|||
|
|||
NAPI_EXTERN napi_status NAPI_CDECL napi_release_threadsafe_function( |
|||
napi_threadsafe_function func, napi_threadsafe_function_release_mode mode); |
|||
|
|||
NAPI_EXTERN napi_status NAPI_CDECL |
|||
napi_unref_threadsafe_function(napi_env env, napi_threadsafe_function func); |
|||
|
|||
NAPI_EXTERN napi_status NAPI_CDECL |
|||
napi_ref_threadsafe_function(napi_env env, napi_threadsafe_function func); |
|||
|
|||
NAPI_EXTERN napi_status NAPI_CDECL |
|||
napi_create_async_work(napi_env env, |
|||
napi_value async_resource, |
|||
napi_value async_resource_name, |
|||
napi_async_execute_callback execute, |
|||
napi_async_complete_callback complete, |
|||
void* data, |
|||
napi_async_work* result); |
|||
NAPI_EXTERN napi_status NAPI_CDECL napi_delete_async_work(napi_env env, |
|||
napi_async_work work); |
|||
NAPI_EXTERN napi_status NAPI_CDECL napi_queue_async_work(napi_env env, |
|||
napi_async_work work); |
|||
NAPI_EXTERN napi_status NAPI_CDECL napi_cancel_async_work(napi_env env, |
|||
napi_async_work work); |
|||
} |
|||
#endif |
|||
@ -0,0 +1,302 @@ |
|||
#include <stdint.h>
|
|||
#include "./BSER.hh"
|
|||
|
|||
BSERType decodeType(std::istream &iss) { |
|||
int8_t type; |
|||
iss.read(reinterpret_cast<char*>(&type), sizeof(type)); |
|||
return (BSERType) type; |
|||
} |
|||
|
|||
void expectType(std::istream &iss, BSERType expected) { |
|||
BSERType got = decodeType(iss); |
|||
if (got != expected) { |
|||
throw std::runtime_error("Unexpected BSER type"); |
|||
} |
|||
} |
|||
|
|||
void encodeType(std::ostream &oss, BSERType type) { |
|||
int8_t t = (int8_t)type; |
|||
oss.write(reinterpret_cast<char*>(&t), sizeof(t)); |
|||
} |
|||
|
|||
template<typename T> |
|||
class Value : public BSERValue { |
|||
public: |
|||
T value; |
|||
Value(T val) { |
|||
value = val; |
|||
} |
|||
|
|||
Value() {} |
|||
}; |
|||
|
|||
class BSERInteger : public Value<int64_t> { |
|||
public: |
|||
BSERInteger(int64_t value) : Value(value) {} |
|||
BSERInteger(std::istream &iss) { |
|||
int8_t int8; |
|||
int16_t int16; |
|||
int32_t int32; |
|||
int64_t int64; |
|||
|
|||
BSERType type = decodeType(iss); |
|||
|
|||
switch (type) { |
|||
case BSER_INT8: |
|||
iss.read(reinterpret_cast<char*>(&int8), sizeof(int8)); |
|||
value = int8; |
|||
break; |
|||
case BSER_INT16: |
|||
iss.read(reinterpret_cast<char*>(&int16), sizeof(int16)); |
|||
value = int16; |
|||
break; |
|||
case BSER_INT32: |
|||
iss.read(reinterpret_cast<char*>(&int32), sizeof(int32)); |
|||
value = int32; |
|||
break; |
|||
case BSER_INT64: |
|||
iss.read(reinterpret_cast<char*>(&int64), sizeof(int64)); |
|||
value = int64; |
|||
break; |
|||
default: |
|||
throw std::runtime_error("Invalid BSER int type"); |
|||
} |
|||
} |
|||
|
|||
int64_t intValue() override { |
|||
return value; |
|||
} |
|||
|
|||
void encode(std::ostream &oss) override { |
|||
if (value <= INT8_MAX) { |
|||
encodeType(oss, BSER_INT8); |
|||
int8_t v = (int8_t)value; |
|||
oss.write(reinterpret_cast<char*>(&v), sizeof(v)); |
|||
} else if (value <= INT16_MAX) { |
|||
encodeType(oss, BSER_INT16); |
|||
int16_t v = (int16_t)value; |
|||
oss.write(reinterpret_cast<char*>(&v), sizeof(v)); |
|||
} else if (value <= INT32_MAX) { |
|||
encodeType(oss, BSER_INT32); |
|||
int32_t v = (int32_t)value; |
|||
oss.write(reinterpret_cast<char*>(&v), sizeof(v)); |
|||
} else { |
|||
encodeType(oss, BSER_INT64); |
|||
oss.write(reinterpret_cast<char*>(&value), sizeof(value)); |
|||
} |
|||
} |
|||
}; |
|||
|
|||
class BSERArray : public Value<BSER::Array> { |
|||
public: |
|||
BSERArray() : Value() {} |
|||
BSERArray(BSER::Array value) : Value(value) {} |
|||
BSERArray(std::istream &iss) { |
|||
expectType(iss, BSER_ARRAY); |
|||
int64_t len = BSERInteger(iss).intValue(); |
|||
for (int64_t i = 0; i < len; i++) { |
|||
value.push_back(BSER(iss)); |
|||
} |
|||
} |
|||
|
|||
BSER::Array arrayValue() override { |
|||
return value; |
|||
} |
|||
|
|||
void encode(std::ostream &oss) override { |
|||
encodeType(oss, BSER_ARRAY); |
|||
BSERInteger(value.size()).encode(oss); |
|||
for (auto it = value.begin(); it != value.end(); it++) { |
|||
it->encode(oss); |
|||
} |
|||
} |
|||
}; |
|||
|
|||
class BSERString : public Value<std::string> { |
|||
public: |
|||
BSERString(std::string value) : Value(value) {} |
|||
BSERString(std::istream &iss) { |
|||
expectType(iss, BSER_STRING); |
|||
int64_t len = BSERInteger(iss).intValue(); |
|||
value.resize(len); |
|||
iss.read(&value[0], len); |
|||
} |
|||
|
|||
std::string stringValue() override { |
|||
return value; |
|||
} |
|||
|
|||
void encode(std::ostream &oss) override { |
|||
encodeType(oss, BSER_STRING); |
|||
BSERInteger(value.size()).encode(oss); |
|||
oss << value; |
|||
} |
|||
}; |
|||
|
|||
class BSERObject : public Value<BSER::Object> { |
|||
public: |
|||
BSERObject() : Value() {} |
|||
BSERObject(BSER::Object value) : Value(value) {} |
|||
BSERObject(std::istream &iss) { |
|||
expectType(iss, BSER_OBJECT); |
|||
int64_t len = BSERInteger(iss).intValue(); |
|||
for (int64_t i = 0; i < len; i++) { |
|||
auto key = BSERString(iss).stringValue(); |
|||
auto val = BSER(iss); |
|||
value.emplace(key, val); |
|||
} |
|||
} |
|||
|
|||
BSER::Object objectValue() override { |
|||
return value; |
|||
} |
|||
|
|||
void encode(std::ostream &oss) override { |
|||
encodeType(oss, BSER_OBJECT); |
|||
BSERInteger(value.size()).encode(oss); |
|||
for (auto it = value.begin(); it != value.end(); it++) { |
|||
BSERString(it->first).encode(oss); |
|||
it->second.encode(oss); |
|||
} |
|||
} |
|||
}; |
|||
|
|||
class BSERDouble : public Value<double> { |
|||
public: |
|||
BSERDouble(double value) : Value(value) {} |
|||
BSERDouble(std::istream &iss) { |
|||
expectType(iss, BSER_REAL); |
|||
iss.read(reinterpret_cast<char*>(&value), sizeof(value)); |
|||
} |
|||
|
|||
double doubleValue() override { |
|||
return value; |
|||
} |
|||
|
|||
void encode(std::ostream &oss) override { |
|||
encodeType(oss, BSER_REAL); |
|||
oss.write(reinterpret_cast<char*>(&value), sizeof(value)); |
|||
} |
|||
}; |
|||
|
|||
class BSERBoolean : public Value<bool> { |
|||
public: |
|||
BSERBoolean(bool value) : Value(value) {} |
|||
bool boolValue() override { return value; } |
|||
void encode(std::ostream &oss) override { |
|||
int8_t t = value == true ? static_cast<int8_t>(BSER_BOOL_TRUE) : static_cast<int8_t>(BSER_BOOL_FALSE); |
|||
oss.write(reinterpret_cast<char*>(&t), sizeof(t)); |
|||
} |
|||
}; |
|||
|
|||
class BSERNull : public Value<bool> { |
|||
public: |
|||
BSERNull() : Value(false) {} |
|||
void encode(std::ostream &oss) override { |
|||
encodeType(oss, BSER_NULL); |
|||
} |
|||
}; |
|||
|
|||
std::shared_ptr<BSERArray> decodeTemplate(std::istream &iss) { |
|||
expectType(iss, BSER_TEMPLATE); |
|||
auto keys = BSERArray(iss).arrayValue(); |
|||
auto len = BSERInteger(iss).intValue(); |
|||
std::shared_ptr<BSERArray> arr = std::make_shared<BSERArray>(); |
|||
for (int64_t i = 0; i < len; i++) { |
|||
BSER::Object obj; |
|||
for (auto it = keys.begin(); it != keys.end(); it++) { |
|||
if (iss.peek() == 0x0c) { |
|||
iss.ignore(1); |
|||
continue; |
|||
} |
|||
|
|||
auto val = BSER(iss); |
|||
obj.emplace(it->stringValue(), val); |
|||
} |
|||
arr->value.push_back(obj); |
|||
} |
|||
return arr; |
|||
} |
|||
|
|||
BSER::BSER(std::istream &iss) { |
|||
BSERType type = decodeType(iss); |
|||
iss.unget(); |
|||
|
|||
switch (type) { |
|||
case BSER_ARRAY: |
|||
m_ptr = std::make_shared<BSERArray>(iss); |
|||
break; |
|||
case BSER_OBJECT: |
|||
m_ptr = std::make_shared<BSERObject>(iss); |
|||
break; |
|||
case BSER_STRING: |
|||
m_ptr = std::make_shared<BSERString>(iss); |
|||
break; |
|||
case BSER_INT8: |
|||
case BSER_INT16: |
|||
case BSER_INT32: |
|||
case BSER_INT64: |
|||
m_ptr = std::make_shared<BSERInteger>(iss); |
|||
break; |
|||
case BSER_REAL: |
|||
m_ptr = std::make_shared<BSERDouble>(iss); |
|||
break; |
|||
case BSER_BOOL_TRUE: |
|||
iss.ignore(1); |
|||
m_ptr = std::make_shared<BSERBoolean>(true); |
|||
break; |
|||
case BSER_BOOL_FALSE: |
|||
iss.ignore(1); |
|||
m_ptr = std::make_shared<BSERBoolean>(false); |
|||
break; |
|||
case BSER_NULL: |
|||
iss.ignore(1); |
|||
m_ptr = std::make_shared<BSERNull>(); |
|||
break; |
|||
case BSER_TEMPLATE: |
|||
m_ptr = decodeTemplate(iss); |
|||
break; |
|||
default: |
|||
throw std::runtime_error("unknown BSER type"); |
|||
} |
|||
} |
|||
|
|||
BSER::BSER() : m_ptr(std::make_shared<BSERNull>()) {} |
|||
BSER::BSER(BSER::Array value) : m_ptr(std::make_shared<BSERArray>(value)) {} |
|||
BSER::BSER(BSER::Object value) : m_ptr(std::make_shared<BSERObject>(value)) {} |
|||
BSER::BSER(const char *value) : m_ptr(std::make_shared<BSERString>(value)) {} |
|||
BSER::BSER(std::string value) : m_ptr(std::make_shared<BSERString>(value)) {} |
|||
BSER::BSER(int64_t value) : m_ptr(std::make_shared<BSERInteger>(value)) {} |
|||
BSER::BSER(double value) : m_ptr(std::make_shared<BSERDouble>(value)) {} |
|||
BSER::BSER(bool value) : m_ptr(std::make_shared<BSERBoolean>(value)) {} |
|||
|
|||
BSER::Array BSER::arrayValue() { return m_ptr->arrayValue(); } |
|||
BSER::Object BSER::objectValue() { return m_ptr->objectValue(); } |
|||
std::string BSER::stringValue() { return m_ptr->stringValue(); } |
|||
int64_t BSER::intValue() { return m_ptr->intValue(); } |
|||
double BSER::doubleValue() { return m_ptr->doubleValue(); } |
|||
bool BSER::boolValue() { return m_ptr->boolValue(); } |
|||
void BSER::encode(std::ostream &oss) { |
|||
m_ptr->encode(oss); |
|||
} |
|||
|
|||
int64_t BSER::decodeLength(std::istream &iss) { |
|||
char pdu[2]; |
|||
if (!iss.read(pdu, 2) || pdu[0] != 0 || pdu[1] != 1) { |
|||
throw std::runtime_error("Invalid BSER"); |
|||
} |
|||
|
|||
return BSERInteger(iss).intValue(); |
|||
} |
|||
|
|||
std::string BSER::encode() { |
|||
std::ostringstream oss(std::ios_base::binary); |
|||
encode(oss); |
|||
|
|||
std::ostringstream res(std::ios_base::binary); |
|||
res.write("\x00\x01", 2); |
|||
|
|||
BSERInteger(oss.str().size()).encode(res); |
|||
res << oss.str(); |
|||
return res.str(); |
|||
} |
|||
@ -0,0 +1,69 @@ |
|||
#ifndef BSER_H
|
|||
#define BSER_H
|
|||
|
|||
#include <string>
|
|||
#include <sstream>
|
|||
#include <vector>
|
|||
#include <unordered_map>
|
|||
#include <memory>
|
|||
|
|||
enum BSERType { |
|||
BSER_ARRAY = 0x00, |
|||
BSER_OBJECT = 0x01, |
|||
BSER_STRING = 0x02, |
|||
BSER_INT8 = 0x03, |
|||
BSER_INT16 = 0x04, |
|||
BSER_INT32 = 0x05, |
|||
BSER_INT64 = 0x06, |
|||
BSER_REAL = 0x07, |
|||
BSER_BOOL_TRUE = 0x08, |
|||
BSER_BOOL_FALSE = 0x09, |
|||
BSER_NULL = 0x0a, |
|||
BSER_TEMPLATE = 0x0b |
|||
}; |
|||
|
|||
class BSERValue; |
|||
|
|||
class BSER { |
|||
public: |
|||
typedef std::vector<BSER> Array; |
|||
typedef std::unordered_map<std::string, BSER> Object; |
|||
|
|||
BSER(); |
|||
BSER(BSER::Array value); |
|||
BSER(BSER::Object value); |
|||
BSER(std::string value); |
|||
BSER(const char *value); |
|||
BSER(int64_t value); |
|||
BSER(double value); |
|||
BSER(bool value); |
|||
BSER(std::istream &iss); |
|||
|
|||
BSER::Array arrayValue(); |
|||
BSER::Object objectValue(); |
|||
std::string stringValue(); |
|||
int64_t intValue(); |
|||
double doubleValue(); |
|||
bool boolValue(); |
|||
void encode(std::ostream &oss); |
|||
|
|||
static int64_t decodeLength(std::istream &iss); |
|||
std::string encode(); |
|||
private: |
|||
std::shared_ptr<BSERValue> m_ptr; |
|||
}; |
|||
|
|||
class BSERValue { |
|||
protected: |
|||
friend class BSER; |
|||
virtual BSER::Array arrayValue() { return BSER::Array(); } |
|||
virtual BSER::Object objectValue() { return BSER::Object(); } |
|||
virtual std::string stringValue() { return std::string(); } |
|||
virtual int64_t intValue() { return 0; } |
|||
virtual double doubleValue() { return 0; } |
|||
virtual bool boolValue() { return false; } |
|||
virtual void encode(std::ostream &oss) {} |
|||
virtual ~BSERValue() {} |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,175 @@ |
|||
#ifndef IPC_H
|
|||
#define IPC_H
|
|||
|
|||
#include <string>
|
|||
#include <stdlib.h>
|
|||
|
|||
#ifdef _WIN32
|
|||
#include <winsock2.h>
|
|||
#include <windows.h>
|
|||
#else
|
|||
#include <unistd.h>
|
|||
#include <sys/socket.h>
|
|||
#include <sys/un.h>
|
|||
#endif
|
|||
|
|||
class IPC { |
|||
public: |
|||
IPC(std::string path) { |
|||
mStopped = false; |
|||
#ifdef _WIN32
|
|||
while (true) { |
|||
mPipe = CreateFile( |
|||
path.data(), // pipe name
|
|||
GENERIC_READ | GENERIC_WRITE, // read and write access
|
|||
0, // no sharing
|
|||
NULL, // default security attributes
|
|||
OPEN_EXISTING, // opens existing pipe
|
|||
FILE_FLAG_OVERLAPPED, // attributes
|
|||
NULL // no template file
|
|||
); |
|||
|
|||
if (mPipe != INVALID_HANDLE_VALUE) { |
|||
break; |
|||
} |
|||
|
|||
if (GetLastError() != ERROR_PIPE_BUSY) { |
|||
throw std::runtime_error("Could not open pipe"); |
|||
} |
|||
|
|||
// Wait for pipe to become available if it is busy
|
|||
if (!WaitNamedPipe(path.data(), 30000)) { |
|||
throw std::runtime_error("Error waiting for pipe"); |
|||
} |
|||
} |
|||
|
|||
mReader = CreateEvent(NULL, true, false, NULL); |
|||
mWriter = CreateEvent(NULL, true, false, NULL); |
|||
#else
|
|||
struct sockaddr_un addr; |
|||
memset(&addr, 0, sizeof(addr)); |
|||
addr.sun_family = AF_UNIX; |
|||
strncpy(addr.sun_path, path.c_str(), sizeof(addr.sun_path) - 1); |
|||
|
|||
mSock = socket(AF_UNIX, SOCK_STREAM, 0); |
|||
if (connect(mSock, (struct sockaddr *) &addr, sizeof(struct sockaddr_un))) { |
|||
throw std::runtime_error("Error connecting to socket"); |
|||
} |
|||
#endif
|
|||
} |
|||
|
|||
~IPC() { |
|||
mStopped = true; |
|||
#ifdef _WIN32
|
|||
CancelIo(mPipe); |
|||
CloseHandle(mPipe); |
|||
CloseHandle(mReader); |
|||
CloseHandle(mWriter); |
|||
#else
|
|||
shutdown(mSock, SHUT_RDWR); |
|||
#endif
|
|||
} |
|||
|
|||
void write(std::string buf) { |
|||
#ifdef _WIN32
|
|||
OVERLAPPED overlapped; |
|||
overlapped.hEvent = mWriter; |
|||
bool success = WriteFile( |
|||
mPipe, // pipe handle
|
|||
buf.data(), // message
|
|||
static_cast<DWORD>(buf.size()), // message length
|
|||
NULL, // bytes written
|
|||
&overlapped // overlapped
|
|||
); |
|||
|
|||
if (mStopped) { |
|||
return; |
|||
} |
|||
|
|||
if (!success) { |
|||
if (GetLastError() != ERROR_IO_PENDING) { |
|||
throw std::runtime_error("Write error"); |
|||
} |
|||
} |
|||
|
|||
DWORD written; |
|||
success = GetOverlappedResult(mPipe, &overlapped, &written, true); |
|||
if (!success) { |
|||
throw std::runtime_error("GetOverlappedResult failed"); |
|||
} |
|||
|
|||
if (written != buf.size()) { |
|||
throw std::runtime_error("Wrong number of bytes written"); |
|||
} |
|||
#else
|
|||
int r = 0; |
|||
for (unsigned int i = 0; i != buf.size(); i += r) { |
|||
r = ::write(mSock, &buf[i], buf.size() - i); |
|||
if (r == -1) { |
|||
if (errno == EAGAIN) { |
|||
r = 0; |
|||
} else if (mStopped) { |
|||
return; |
|||
} else { |
|||
throw std::runtime_error("Write error"); |
|||
} |
|||
} |
|||
} |
|||
#endif
|
|||
} |
|||
|
|||
int read(char *buf, size_t len) { |
|||
#ifdef _WIN32
|
|||
OVERLAPPED overlapped; |
|||
overlapped.hEvent = mReader; |
|||
bool success = ReadFile( |
|||
mPipe, // pipe handle
|
|||
buf, // buffer to receive reply
|
|||
static_cast<DWORD>(len), // size of buffer
|
|||
NULL, // number of bytes read
|
|||
&overlapped // overlapped
|
|||
); |
|||
|
|||
if (!success && !mStopped) { |
|||
if (GetLastError() != ERROR_IO_PENDING) { |
|||
throw std::runtime_error("Read error"); |
|||
} |
|||
} |
|||
|
|||
DWORD read = 0; |
|||
success = GetOverlappedResult(mPipe, &overlapped, &read, true); |
|||
if (!success && !mStopped) { |
|||
throw std::runtime_error("GetOverlappedResult failed"); |
|||
} |
|||
|
|||
return read; |
|||
#else
|
|||
int r = ::read(mSock, buf, len); |
|||
if (r == 0 && !mStopped) { |
|||
throw std::runtime_error("Socket ended unexpectedly"); |
|||
} |
|||
|
|||
if (r < 0) { |
|||
if (mStopped) { |
|||
return 0; |
|||
} |
|||
|
|||
throw std::runtime_error(strerror(errno)); |
|||
} |
|||
|
|||
return r; |
|||
#endif
|
|||
} |
|||
|
|||
private: |
|||
bool mStopped; |
|||
#ifdef _WIN32
|
|||
HANDLE mPipe; |
|||
HANDLE mReader; |
|||
HANDLE mWriter; |
|||
#else
|
|||
int mSock; |
|||
#endif
|
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,342 @@ |
|||
#include <string>
|
|||
#include <fstream>
|
|||
#include <stdlib.h>
|
|||
#include <algorithm>
|
|||
#include "../DirTree.hh"
|
|||
#include "../Event.hh"
|
|||
#include "./BSER.hh"
|
|||
#include "./WatchmanBackend.hh"
|
|||
|
|||
#ifdef _WIN32
|
|||
#include "../windows/win_utils.hh"
|
|||
#define S_ISDIR(mode) ((mode & _S_IFDIR) == _S_IFDIR)
|
|||
#define popen _popen
|
|||
#define pclose _pclose
|
|||
#else
|
|||
#include <sys/stat.h>
|
|||
#define normalizePath(dir) dir
|
|||
#endif
|
|||
|
|||
template<typename T> |
|||
BSER readBSER(T &&do_read) { |
|||
std::stringstream oss; |
|||
char buffer[256]; |
|||
size_t r; |
|||
int64_t len = -1; |
|||
do { |
|||
// Start by reading a minimal amount of data in order to decode the length.
|
|||
// After that, attempt to read the remaining length, up to the buffer size.
|
|||
r = do_read(buffer, len == -1 ? 20 : (len < 256 ? len : 256)); |
|||
oss << std::string(buffer, r); |
|||
|
|||
if (len == -1) { |
|||
uint64_t l = BSER::decodeLength(oss); |
|||
len = l + oss.tellg(); |
|||
} |
|||
|
|||
len -= r; |
|||
} while (len > 0); |
|||
|
|||
return BSER(oss); |
|||
} |
|||
|
|||
std::string getSockPath() { |
|||
auto var = getenv("WATCHMAN_SOCK"); |
|||
if (var && *var) { |
|||
return std::string(var); |
|||
} |
|||
|
|||
#ifdef _WIN32
|
|||
FILE *fp = popen("watchman --output-encoding=bser get-sockname", "r"); |
|||
#else
|
|||
FILE *fp = popen("watchman --output-encoding=bser get-sockname 2>/dev/null", "r"); |
|||
#endif
|
|||
if (fp == NULL || errno == ECHILD) { |
|||
throw std::runtime_error("Failed to execute watchman"); |
|||
} |
|||
|
|||
BSER b = readBSER([fp] (char *buf, size_t len) { |
|||
return fread(buf, sizeof(char), len, fp); |
|||
}); |
|||
|
|||
pclose(fp); |
|||
|
|||
auto objValue = b.objectValue(); |
|||
auto foundSockname = objValue.find("sockname"); |
|||
if (foundSockname == objValue.end()) { |
|||
throw std::runtime_error("sockname not found"); |
|||
} |
|||
return foundSockname->second.stringValue(); |
|||
} |
|||
|
|||
std::unique_ptr<IPC> watchmanConnect() { |
|||
std::string path = getSockPath(); |
|||
return std::unique_ptr<IPC>(new IPC(path)); |
|||
} |
|||
|
|||
BSER watchmanRead(IPC *ipc) { |
|||
return readBSER([ipc] (char *buf, size_t len) { |
|||
return ipc->read(buf, len); |
|||
}); |
|||
} |
|||
|
|||
BSER::Object WatchmanBackend::watchmanRequest(BSER b) { |
|||
std::string cmd = b.encode(); |
|||
mIPC->write(cmd); |
|||
mRequestSignal.notify(); |
|||
|
|||
mResponseSignal.wait(); |
|||
mResponseSignal.reset(); |
|||
|
|||
if (!mError.empty()) { |
|||
std::runtime_error err = std::runtime_error(mError); |
|||
mError = std::string(); |
|||
throw err; |
|||
} |
|||
|
|||
return mResponse; |
|||
} |
|||
|
|||
void WatchmanBackend::watchmanWatch(std::string dir) { |
|||
std::vector<BSER> cmd; |
|||
cmd.push_back("watch"); |
|||
cmd.push_back(normalizePath(dir)); |
|||
watchmanRequest(cmd); |
|||
} |
|||
|
|||
bool WatchmanBackend::checkAvailable() { |
|||
try { |
|||
watchmanConnect(); |
|||
return true; |
|||
} catch (std::exception&) { |
|||
return false; |
|||
} |
|||
} |
|||
|
|||
void handleFiles(WatcherRef watcher, BSER::Object obj) { |
|||
auto found = obj.find("files"); |
|||
if (found == obj.end()) { |
|||
throw WatcherError("Error reading changes from watchman", watcher); |
|||
} |
|||
|
|||
auto files = found->second.arrayValue(); |
|||
for (auto it = files.begin(); it != files.end(); it++) { |
|||
auto file = it->objectValue(); |
|||
auto name = file.find("name")->second.stringValue(); |
|||
#ifdef _WIN32
|
|||
std::replace(name.begin(), name.end(), '/', '\\'); |
|||
#endif
|
|||
auto mode = file.find("mode")->second.intValue(); |
|||
auto isNew = file.find("new")->second.boolValue(); |
|||
auto exists = file.find("exists")->second.boolValue(); |
|||
auto path = watcher->mDir + DIR_SEP + name; |
|||
if (watcher->isIgnored(path)) { |
|||
continue; |
|||
} |
|||
|
|||
if (isNew && exists) { |
|||
watcher->mEvents.create(path); |
|||
} else if (exists && !S_ISDIR(mode)) { |
|||
watcher->mEvents.update(path); |
|||
} else if (!isNew && !exists) { |
|||
watcher->mEvents.remove(path); |
|||
} |
|||
} |
|||
} |
|||
|
|||
void WatchmanBackend::handleSubscription(BSER::Object obj) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
auto subscription = obj.find("subscription")->second.stringValue(); |
|||
auto it = mSubscriptions.find(subscription); |
|||
if (it == mSubscriptions.end()) { |
|||
return; |
|||
} |
|||
|
|||
auto watcher = it->second; |
|||
try { |
|||
handleFiles(watcher, obj); |
|||
watcher->notify(); |
|||
} catch (WatcherError &err) { |
|||
handleWatcherError(err); |
|||
} |
|||
} |
|||
|
|||
void WatchmanBackend::start() { |
|||
mIPC = watchmanConnect(); |
|||
notifyStarted(); |
|||
|
|||
while (true) { |
|||
// If there are no subscriptions we are reading, wait for a request.
|
|||
if (mSubscriptions.size() == 0) { |
|||
mRequestSignal.wait(); |
|||
mRequestSignal.reset(); |
|||
} |
|||
|
|||
// Break out of loop if we are stopped.
|
|||
if (mStopped) { |
|||
break; |
|||
} |
|||
|
|||
// Attempt to read from the socket.
|
|||
// If there is an error and we are stopped, break.
|
|||
BSER b; |
|||
try { |
|||
b = watchmanRead(&*mIPC); |
|||
} catch (std::exception &err) { |
|||
if (mStopped) { |
|||
break; |
|||
} else if (mResponseSignal.isWaiting()) { |
|||
mError = err.what(); |
|||
mResponseSignal.notify(); |
|||
} else { |
|||
// Throwing causes the backend to be destroyed, but we never reach the code below to notify the signal
|
|||
mEndedSignal.notify(); |
|||
throw; |
|||
} |
|||
} |
|||
|
|||
auto obj = b.objectValue(); |
|||
auto error = obj.find("error"); |
|||
if (error != obj.end()) { |
|||
mError = error->second.stringValue(); |
|||
mResponseSignal.notify(); |
|||
continue; |
|||
} |
|||
|
|||
// If this message is for a subscription, handle it, otherwise notify the request.
|
|||
auto subscription = obj.find("subscription"); |
|||
if (subscription != obj.end()) { |
|||
handleSubscription(obj); |
|||
} else { |
|||
mResponse = obj; |
|||
mResponseSignal.notify(); |
|||
} |
|||
} |
|||
|
|||
mEndedSignal.notify(); |
|||
} |
|||
|
|||
WatchmanBackend::~WatchmanBackend() { |
|||
// Mark the watcher as stopped, close the socket, and trigger the lock.
|
|||
// This will cause the read loop to be broken and the thread to exit.
|
|||
mStopped = true; |
|||
mIPC.reset(); |
|||
mRequestSignal.notify(); |
|||
|
|||
// If not ended yet, wait.
|
|||
mEndedSignal.wait(); |
|||
} |
|||
|
|||
std::string WatchmanBackend::clock(WatcherRef watcher) { |
|||
BSER::Array cmd; |
|||
cmd.push_back("clock"); |
|||
cmd.push_back(normalizePath(watcher->mDir)); |
|||
|
|||
BSER::Object obj = watchmanRequest(cmd); |
|||
auto found = obj.find("clock"); |
|||
if (found == obj.end()) { |
|||
throw WatcherError("Error reading clock from watchman", watcher); |
|||
} |
|||
|
|||
return found->second.stringValue(); |
|||
} |
|||
|
|||
void WatchmanBackend::writeSnapshot(WatcherRef watcher, std::string *snapshotPath) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
watchmanWatch(watcher->mDir); |
|||
|
|||
std::ofstream ofs(*snapshotPath); |
|||
ofs << clock(watcher); |
|||
} |
|||
|
|||
void WatchmanBackend::getEventsSince(WatcherRef watcher, std::string *snapshotPath) { |
|||
std::unique_lock<std::mutex> lock(mMutex); |
|||
std::ifstream ifs(*snapshotPath); |
|||
if (ifs.fail()) { |
|||
return; |
|||
} |
|||
|
|||
watchmanWatch(watcher->mDir); |
|||
|
|||
std::string clock; |
|||
ifs >> clock; |
|||
|
|||
BSER::Array cmd; |
|||
cmd.push_back("since"); |
|||
cmd.push_back(normalizePath(watcher->mDir)); |
|||
cmd.push_back(clock); |
|||
|
|||
BSER::Object obj = watchmanRequest(cmd); |
|||
handleFiles(watcher, obj); |
|||
} |
|||
|
|||
std::string getId(WatcherRef watcher) { |
|||
std::ostringstream id; |
|||
id << "parcel-"; |
|||
id << static_cast<void*>(watcher.get()); |
|||
return id.str(); |
|||
} |
|||
|
|||
// This function is called by Backend::watch which takes a lock on mMutex
|
|||
void WatchmanBackend::subscribe(WatcherRef watcher) { |
|||
watchmanWatch(watcher->mDir); |
|||
|
|||
std::string id = getId(watcher); |
|||
BSER::Array cmd; |
|||
cmd.push_back("subscribe"); |
|||
cmd.push_back(normalizePath(watcher->mDir)); |
|||
cmd.push_back(id); |
|||
|
|||
BSER::Array fields; |
|||
fields.push_back("name"); |
|||
fields.push_back("mode"); |
|||
fields.push_back("exists"); |
|||
fields.push_back("new"); |
|||
|
|||
BSER::Object opts; |
|||
opts.emplace("fields", fields); |
|||
opts.emplace("since", clock(watcher)); |
|||
|
|||
if (watcher->mIgnorePaths.size() > 0) { |
|||
BSER::Array ignore; |
|||
BSER::Array anyOf; |
|||
anyOf.push_back("anyof"); |
|||
|
|||
for (auto it = watcher->mIgnorePaths.begin(); it != watcher->mIgnorePaths.end(); it++) { |
|||
std::string pathStart = watcher->mDir + DIR_SEP; |
|||
if (it->rfind(pathStart, 0) == 0) { |
|||
auto relative = it->substr(pathStart.size()); |
|||
BSER::Array dirname; |
|||
dirname.push_back("dirname"); |
|||
dirname.push_back(relative); |
|||
anyOf.push_back(dirname); |
|||
} |
|||
} |
|||
|
|||
ignore.push_back("not"); |
|||
ignore.push_back(anyOf); |
|||
|
|||
opts.emplace("expression", ignore); |
|||
} |
|||
|
|||
cmd.push_back(opts); |
|||
watchmanRequest(cmd); |
|||
|
|||
mSubscriptions.emplace(id, watcher); |
|||
mRequestSignal.notify(); |
|||
} |
|||
|
|||
// This function is called by Backend::unwatch which takes a lock on mMutex
|
|||
void WatchmanBackend::unsubscribe(WatcherRef watcher) { |
|||
std::string id = getId(watcher); |
|||
auto erased = mSubscriptions.erase(id); |
|||
|
|||
if (erased) { |
|||
BSER::Array cmd; |
|||
cmd.push_back("unsubscribe"); |
|||
cmd.push_back(normalizePath(watcher->mDir)); |
|||
cmd.push_back(id); |
|||
|
|||
watchmanRequest(cmd); |
|||
} |
|||
} |
|||
@ -0,0 +1,35 @@ |
|||
#ifndef WATCHMAN_H
|
|||
#define WATCHMAN_H
|
|||
|
|||
#include "../Backend.hh"
|
|||
#include "./BSER.hh"
|
|||
#include "../Signal.hh"
|
|||
#include "./IPC.hh"
|
|||
|
|||
class WatchmanBackend : public Backend { |
|||
public: |
|||
static bool checkAvailable(); |
|||
void start() override; |
|||
WatchmanBackend() : mStopped(false) {}; |
|||
~WatchmanBackend(); |
|||
void writeSnapshot(WatcherRef watcher, std::string *snapshotPath) override; |
|||
void getEventsSince(WatcherRef watcher, std::string *snapshotPath) override; |
|||
void subscribe(WatcherRef watcher) override; |
|||
void unsubscribe(WatcherRef watcher) override; |
|||
private: |
|||
std::unique_ptr<IPC> mIPC; |
|||
Signal mRequestSignal; |
|||
Signal mResponseSignal; |
|||
BSER::Object mResponse; |
|||
std::string mError; |
|||
std::unordered_map<std::string, WatcherRef> mSubscriptions; |
|||
bool mStopped; |
|||
Signal mEndedSignal; |
|||
|
|||
std::string clock(WatcherRef watcher); |
|||
void watchmanWatch(std::string dir); |
|||
BSER::Object watchmanRequest(BSER cmd); |
|||
void handleSubscription(BSER::Object obj); |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,282 @@ |
|||
#include <string>
|
|||
#include <stack>
|
|||
#include "../DirTree.hh"
|
|||
#include "../shared/BruteForceBackend.hh"
|
|||
#include "./WindowsBackend.hh"
|
|||
#include "./win_utils.hh"
|
|||
|
|||
#define DEFAULT_BUF_SIZE 1024 * 1024
|
|||
#define NETWORK_BUF_SIZE 64 * 1024
|
|||
#define CONVERT_TIME(ft) ULARGE_INTEGER{ft.dwLowDateTime, ft.dwHighDateTime}.QuadPart
|
|||
|
|||
void BruteForceBackend::readTree(WatcherRef watcher, std::shared_ptr<DirTree> tree) { |
|||
std::stack<std::string> directories; |
|||
|
|||
directories.push(watcher->mDir); |
|||
|
|||
while (!directories.empty()) { |
|||
HANDLE hFind = INVALID_HANDLE_VALUE; |
|||
|
|||
std::string path = directories.top(); |
|||
std::string spec = path + "\\*"; |
|||
directories.pop(); |
|||
|
|||
WIN32_FIND_DATA ffd; |
|||
hFind = FindFirstFile(spec.c_str(), &ffd); |
|||
|
|||
if (hFind == INVALID_HANDLE_VALUE) { |
|||
if (path == watcher->mDir) { |
|||
FindClose(hFind); |
|||
throw WatcherError("Error opening directory", watcher); |
|||
} |
|||
|
|||
tree->remove(path); |
|||
continue; |
|||
} |
|||
|
|||
do { |
|||
if (strcmp(ffd.cFileName, ".") != 0 && strcmp(ffd.cFileName, "..") != 0) { |
|||
std::string fullPath = path + "\\" + ffd.cFileName; |
|||
if (watcher->isIgnored(fullPath)) { |
|||
continue; |
|||
} |
|||
|
|||
tree->add(fullPath, CONVERT_TIME(ffd.ftLastWriteTime), ffd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY); |
|||
if (ffd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) { |
|||
directories.push(fullPath); |
|||
} |
|||
} |
|||
} while (FindNextFile(hFind, &ffd) != 0); |
|||
|
|||
FindClose(hFind); |
|||
} |
|||
} |
|||
|
|||
void WindowsBackend::start() { |
|||
mRunning = true; |
|||
notifyStarted(); |
|||
|
|||
while (mRunning) { |
|||
SleepEx(INFINITE, true); |
|||
} |
|||
} |
|||
|
|||
WindowsBackend::~WindowsBackend() { |
|||
// Mark as stopped, and queue a noop function in the thread to break the loop
|
|||
mRunning = false; |
|||
QueueUserAPC([](__in ULONG_PTR) {}, mThread.native_handle(), (ULONG_PTR)this); |
|||
} |
|||
|
|||
class Subscription: public WatcherState { |
|||
public: |
|||
Subscription(WindowsBackend *backend, WatcherRef watcher, std::shared_ptr<DirTree> tree) { |
|||
mRunning = true; |
|||
mBackend = backend; |
|||
mWatcher = watcher; |
|||
mTree = tree; |
|||
ZeroMemory(&mOverlapped, sizeof(OVERLAPPED)); |
|||
mOverlapped.hEvent = this; |
|||
mReadBuffer.resize(DEFAULT_BUF_SIZE); |
|||
mWriteBuffer.resize(DEFAULT_BUF_SIZE); |
|||
|
|||
mDirectoryHandle = CreateFileW( |
|||
utf8ToUtf16(watcher->mDir).data(), |
|||
FILE_LIST_DIRECTORY, |
|||
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, |
|||
NULL, |
|||
OPEN_EXISTING, |
|||
FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OVERLAPPED, |
|||
NULL |
|||
); |
|||
|
|||
if (mDirectoryHandle == INVALID_HANDLE_VALUE) { |
|||
throw WatcherError("Invalid handle", mWatcher); |
|||
} |
|||
|
|||
// Ensure that the path is a directory
|
|||
BY_HANDLE_FILE_INFORMATION info; |
|||
bool success = GetFileInformationByHandle( |
|||
mDirectoryHandle, |
|||
&info |
|||
); |
|||
|
|||
if (!success) { |
|||
throw WatcherError("Could not get file information", mWatcher); |
|||
} |
|||
|
|||
if (!(info.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)) { |
|||
throw WatcherError("Not a directory", mWatcher); |
|||
} |
|||
} |
|||
|
|||
virtual ~Subscription() { |
|||
stop(); |
|||
} |
|||
|
|||
void run() { |
|||
try { |
|||
poll(); |
|||
} catch (WatcherError &err) { |
|||
mBackend->handleWatcherError(err); |
|||
} |
|||
} |
|||
|
|||
void stop() { |
|||
if (mRunning) { |
|||
mRunning = false; |
|||
CancelIo(mDirectoryHandle); |
|||
CloseHandle(mDirectoryHandle); |
|||
} |
|||
} |
|||
|
|||
void poll() { |
|||
if (!mRunning) { |
|||
return; |
|||
} |
|||
|
|||
// Asynchronously wait for changes.
|
|||
int success = ReadDirectoryChangesW( |
|||
mDirectoryHandle, |
|||
mWriteBuffer.data(), |
|||
static_cast<DWORD>(mWriteBuffer.size()), |
|||
TRUE, // recursive
|
|||
FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_ATTRIBUTES |
|||
| FILE_NOTIFY_CHANGE_SIZE | FILE_NOTIFY_CHANGE_LAST_WRITE, |
|||
NULL, |
|||
&mOverlapped, |
|||
[](DWORD errorCode, DWORD numBytes, LPOVERLAPPED overlapped) { |
|||
auto subscription = reinterpret_cast<Subscription *>(overlapped->hEvent); |
|||
try { |
|||
subscription->processEvents(errorCode); |
|||
} catch (WatcherError &err) { |
|||
subscription->mBackend->handleWatcherError(err); |
|||
} |
|||
} |
|||
); |
|||
|
|||
if (!success) { |
|||
throw WatcherError("Failed to read changes", mWatcher); |
|||
} |
|||
} |
|||
|
|||
void processEvents(DWORD errorCode) { |
|||
if (!mRunning) { |
|||
return; |
|||
} |
|||
|
|||
switch (errorCode) { |
|||
case ERROR_OPERATION_ABORTED: |
|||
return; |
|||
case ERROR_INVALID_PARAMETER: |
|||
// resize buffers to network size (64kb), and try again
|
|||
mReadBuffer.resize(NETWORK_BUF_SIZE); |
|||
mWriteBuffer.resize(NETWORK_BUF_SIZE); |
|||
poll(); |
|||
return; |
|||
case ERROR_NOTIFY_ENUM_DIR: |
|||
throw WatcherError("Buffer overflow. Some events may have been lost.", mWatcher); |
|||
case ERROR_ACCESS_DENIED: { |
|||
// This can happen if the watched directory is deleted. Check if that is the case,
|
|||
// and if so emit a delete event. Otherwise, fall through to default error case.
|
|||
DWORD attrs = GetFileAttributesW(utf8ToUtf16(mWatcher->mDir).data()); |
|||
bool isDir = attrs != INVALID_FILE_ATTRIBUTES && (attrs & FILE_ATTRIBUTE_DIRECTORY); |
|||
if (!isDir) { |
|||
mWatcher->mEvents.remove(mWatcher->mDir); |
|||
mTree->remove(mWatcher->mDir); |
|||
mWatcher->notify(); |
|||
stop(); |
|||
return; |
|||
} |
|||
} |
|||
default: |
|||
if (errorCode != ERROR_SUCCESS) { |
|||
throw WatcherError("Unknown error", mWatcher); |
|||
} |
|||
} |
|||
|
|||
// Swap read and write buffers, and poll again
|
|||
std::swap(mWriteBuffer, mReadBuffer); |
|||
poll(); |
|||
|
|||
// Read change events
|
|||
BYTE *base = mReadBuffer.data(); |
|||
while (true) { |
|||
PFILE_NOTIFY_INFORMATION info = (PFILE_NOTIFY_INFORMATION)base; |
|||
processEvent(info); |
|||
|
|||
if (info->NextEntryOffset == 0) { |
|||
break; |
|||
} |
|||
|
|||
base += info->NextEntryOffset; |
|||
} |
|||
|
|||
mWatcher->notify(); |
|||
} |
|||
|
|||
void processEvent(PFILE_NOTIFY_INFORMATION info) { |
|||
std::string path = mWatcher->mDir + "\\" + utf16ToUtf8(info->FileName, info->FileNameLength / sizeof(WCHAR)); |
|||
if (mWatcher->isIgnored(path)) { |
|||
return; |
|||
} |
|||
|
|||
switch (info->Action) { |
|||
case FILE_ACTION_ADDED: |
|||
case FILE_ACTION_RENAMED_NEW_NAME: { |
|||
WIN32_FILE_ATTRIBUTE_DATA data; |
|||
if (GetFileAttributesExW(utf8ToUtf16(path).data(), GetFileExInfoStandard, &data)) { |
|||
mWatcher->mEvents.create(path); |
|||
mTree->add(path, CONVERT_TIME(data.ftLastWriteTime), data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY); |
|||
} |
|||
break; |
|||
} |
|||
case FILE_ACTION_MODIFIED: { |
|||
WIN32_FILE_ATTRIBUTE_DATA data; |
|||
if (GetFileAttributesExW(utf8ToUtf16(path).data(), GetFileExInfoStandard, &data)) { |
|||
mTree->update(path, CONVERT_TIME(data.ftLastWriteTime)); |
|||
if (!(data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)) { |
|||
mWatcher->mEvents.update(path); |
|||
} |
|||
} |
|||
break; |
|||
} |
|||
case FILE_ACTION_REMOVED: |
|||
case FILE_ACTION_RENAMED_OLD_NAME: |
|||
mWatcher->mEvents.remove(path); |
|||
mTree->remove(path); |
|||
break; |
|||
} |
|||
} |
|||
|
|||
private: |
|||
WindowsBackend *mBackend; |
|||
std::shared_ptr<Watcher> mWatcher; |
|||
std::shared_ptr<DirTree> mTree; |
|||
bool mRunning; |
|||
HANDLE mDirectoryHandle; |
|||
std::vector<BYTE> mReadBuffer; |
|||
std::vector<BYTE> mWriteBuffer; |
|||
OVERLAPPED mOverlapped; |
|||
}; |
|||
|
|||
// This function is called by Backend::watch which takes a lock on mMutex
|
|||
void WindowsBackend::subscribe(WatcherRef watcher) { |
|||
// Create a subscription for this watcher
|
|||
auto sub = std::make_shared<Subscription>(this, watcher, getTree(watcher, false)); |
|||
watcher->state = sub; |
|||
|
|||
// Queue polling for this subscription in the correct thread.
|
|||
bool success = QueueUserAPC([](__in ULONG_PTR ptr) { |
|||
Subscription *sub = (Subscription *)ptr; |
|||
sub->run(); |
|||
}, mThread.native_handle(), (ULONG_PTR)sub.get()); |
|||
|
|||
if (!success) { |
|||
throw std::runtime_error("Unable to queue APC"); |
|||
} |
|||
} |
|||
|
|||
// This function is called by Backend::unwatch which takes a lock on mMutex
|
|||
void WindowsBackend::unsubscribe(WatcherRef watcher) { |
|||
watcher->state = nullptr; |
|||
} |
|||
@ -0,0 +1,18 @@ |
|||
#ifndef WINDOWS_H
|
|||
#define WINDOWS_H
|
|||
|
|||
#include <winsock2.h>
|
|||
#include <windows.h>
|
|||
#include "../shared/BruteForceBackend.hh"
|
|||
|
|||
class WindowsBackend : public BruteForceBackend { |
|||
public: |
|||
void start() override; |
|||
~WindowsBackend(); |
|||
void subscribe(WatcherRef watcher) override; |
|||
void unsubscribe(WatcherRef watcher) override; |
|||
private: |
|||
bool mRunning; |
|||
}; |
|||
|
|||
#endif
|
|||
@ -0,0 +1,44 @@ |
|||
#include "./win_utils.hh"
|
|||
|
|||
std::wstring utf8ToUtf16(std::string input) { |
|||
unsigned int len = MultiByteToWideChar(CP_UTF8, 0, input.c_str(), -1, NULL, 0); |
|||
WCHAR *output = new WCHAR[len]; |
|||
MultiByteToWideChar(CP_UTF8, 0, input.c_str(), -1, output, len); |
|||
std::wstring res(output); |
|||
delete[] output; |
|||
return res; |
|||
} |
|||
|
|||
std::string utf16ToUtf8(const WCHAR *input, DWORD length) { |
|||
unsigned int len = WideCharToMultiByte(CP_UTF8, 0, input, length, NULL, 0, NULL, NULL); |
|||
char *output = new char[len + 1]; |
|||
WideCharToMultiByte(CP_UTF8, 0, input, length, output, len, NULL, NULL); |
|||
output[len] = '\0'; |
|||
std::string res(output); |
|||
delete[] output; |
|||
return res; |
|||
} |
|||
|
|||
std::string normalizePath(std::string path) { |
|||
// Prevent truncation to MAX_PATH characters by adding the \\?\ prefix
|
|||
std::wstring p = utf8ToUtf16("\\\\?\\" + path); |
|||
|
|||
// Get the required length for the output
|
|||
DWORD len = GetLongPathNameW(p.data(), NULL, 0); |
|||
if (!len) { |
|||
return path; |
|||
} |
|||
|
|||
// Allocate output array and get long path
|
|||
WCHAR *output = new WCHAR[len]; |
|||
len = GetLongPathNameW(p.data(), output, len); |
|||
if (!len) { |
|||
delete[] output; |
|||
return path; |
|||
} |
|||
|
|||
// Convert back to utf8
|
|||
std::string res = utf16ToUtf8(output + 4, len - 4); |
|||
delete[] output; |
|||
return res; |
|||
} |
|||
@ -0,0 +1,11 @@ |
|||
#ifndef WIN_UTILS_H
|
|||
#define WIN_UTILS_H
|
|||
|
|||
#include <string>
|
|||
#include <windows.h>
|
|||
|
|||
std::wstring utf8ToUtf16(std::string input); |
|||
std::string utf16ToUtf8(const WCHAR *input, DWORD length); |
|||
std::string normalizePath(std::string path); |
|||
|
|||
#endif
|
|||
@ -0,0 +1,74 @@ |
|||
const path = require('path'); |
|||
const picomatch = require('picomatch'); |
|||
const isGlob = require('is-glob'); |
|||
|
|||
function normalizeOptions(dir, opts = {}) { |
|||
const { ignore, ...rest } = opts; |
|||
|
|||
if (Array.isArray(ignore)) { |
|||
opts = { ...rest }; |
|||
|
|||
for (const value of ignore) { |
|||
if (isGlob(value)) { |
|||
if (!opts.ignoreGlobs) { |
|||
opts.ignoreGlobs = []; |
|||
} |
|||
|
|||
const regex = picomatch.makeRe(value, { |
|||
// We set `dot: true` to workaround an issue with the
|
|||
// regular expression on Linux where the resulting
|
|||
// negative lookahead `(?!(\\/|^)` was never matching
|
|||
// in some cases. See also https://bit.ly/3UZlQDm
|
|||
dot: true, |
|||
windows: process.platform === 'win32', |
|||
}); |
|||
opts.ignoreGlobs.push(regex.source); |
|||
} else { |
|||
if (!opts.ignorePaths) { |
|||
opts.ignorePaths = []; |
|||
} |
|||
|
|||
opts.ignorePaths.push(path.resolve(dir, value)); |
|||
} |
|||
} |
|||
} |
|||
|
|||
return opts; |
|||
} |
|||
|
|||
exports.createWrapper = (binding) => { |
|||
return { |
|||
writeSnapshot(dir, snapshot, opts) { |
|||
return binding.writeSnapshot( |
|||
path.resolve(dir), |
|||
path.resolve(snapshot), |
|||
normalizeOptions(dir, opts), |
|||
); |
|||
}, |
|||
getEventsSince(dir, snapshot, opts) { |
|||
return binding.getEventsSince( |
|||
path.resolve(dir), |
|||
path.resolve(snapshot), |
|||
normalizeOptions(dir, opts), |
|||
); |
|||
}, |
|||
async subscribe(dir, fn, opts) { |
|||
dir = path.resolve(dir); |
|||
opts = normalizeOptions(dir, opts); |
|||
await binding.subscribe(dir, fn, opts); |
|||
|
|||
return { |
|||
unsubscribe() { |
|||
return binding.unsubscribe(dir, fn, opts); |
|||
}, |
|||
}; |
|||
}, |
|||
unsubscribe(dir, fn, opts) { |
|||
return binding.unsubscribe( |
|||
path.resolve(dir), |
|||
fn, |
|||
normalizeOptions(dir, opts), |
|||
); |
|||
} |
|||
}; |
|||
}; |
|||
@ -0,0 +1,21 @@ |
|||
MIT License |
|||
|
|||
Copyright (c) Tailwind Labs, Inc. |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,36 @@ |
|||
<p align="center"> |
|||
<a href="https://tailwindcss.com" target="_blank"> |
|||
<picture> |
|||
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/tailwindlabs/tailwindcss/HEAD/.github/logo-dark.svg"> |
|||
<source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/tailwindlabs/tailwindcss/HEAD/.github/logo-light.svg"> |
|||
<img alt="Tailwind CSS" src="https://raw.githubusercontent.com/tailwindlabs/tailwindcss/HEAD/.github/logo-light.svg" width="350" height="70" style="max-width: 100%;"> |
|||
</picture> |
|||
</a> |
|||
</p> |
|||
|
|||
<p align="center"> |
|||
A utility-first CSS framework for rapidly building custom user interfaces. |
|||
</p> |
|||
|
|||
<p align="center"> |
|||
<a href="https://github.com/tailwindlabs/tailwindcss/actions"><img src="https://img.shields.io/github/actions/workflow/status/tailwindlabs/tailwindcss/ci.yml?branch=next" alt="Build Status"></a> |
|||
<a href="https://www.npmjs.com/package/tailwindcss"><img src="https://img.shields.io/npm/dt/tailwindcss.svg" alt="Total Downloads"></a> |
|||
<a href="https://github.com/tailwindcss/tailwindcss/releases"><img src="https://img.shields.io/npm/v/tailwindcss.svg" alt="Latest Release"></a> |
|||
<a href="https://github.com/tailwindcss/tailwindcss/blob/master/LICENSE"><img src="https://img.shields.io/npm/l/tailwindcss.svg" alt="License"></a> |
|||
</p> |
|||
|
|||
--- |
|||
|
|||
## Documentation |
|||
|
|||
For full documentation, visit [tailwindcss.com](https://tailwindcss.com). |
|||
|
|||
## Community |
|||
|
|||
For help, discussion about best practices, or feature ideas: |
|||
|
|||
[Discuss Tailwind CSS on GitHub](https://github.com/tailwindcss/tailwindcss/discussions) |
|||
|
|||
## Contributing |
|||
|
|||
If you're interested in contributing to Tailwind CSS, please read our [contributing docs](https://github.com/tailwindcss/tailwindcss/blob/next/.github/CONTRIBUTING.md) **before submitting a pull request**. |
|||
@ -0,0 +1,9 @@ |
|||
#!/usr/bin/env node |
|||
var le=(e,t)=>(t=Symbol[e])?t:Symbol.for("Symbol."+e),ae=e=>{throw TypeError(e)};var G=(e,t,i)=>{if(t!=null){typeof t!="object"&&typeof t!="function"&&ae("Object expected");var r,o;i&&(r=t[le("asyncDispose")]),r===void 0&&(r=t[le("dispose")],i&&(o=r)),typeof r!="function"&&ae("Object not disposable"),o&&(r=function(){try{o.call(this)}catch(n){return Promise.reject(n)}}),e.push([i,r,t])}else i&&e.push([i]);return t},J=(e,t,i)=>{var r=typeof SuppressedError=="function"?SuppressedError:function(u,s,l,p){return p=Error(l),p.name="SuppressedError",p.error=u,p.suppressed=s,p},o=u=>t=i?new r(u,t,"An error was suppressed during disposal"):(i=!0,u),n=u=>{for(;u=e.pop();)try{var s=u[1]&&u[1].call(u[2]);if(u[0])return Promise.resolve(s).then(n,l=>(o(l),n()))}catch(l){o(l)}if(i)throw t};return n()};import ke from"mri";function pe(e,t=process.argv.slice(2)){for(let[o,n]of t.entries())n==="-"&&(t[o]="__IO_DEFAULT_VALUE__");let i=ke(t);for(let o in i){let n=i[o];o!=="_"&&Array.isArray(n)&&(n=n[n.length-1]),n==="__IO_DEFAULT_VALUE__"&&(n="-"),i[o]=n}let r={_:i._};for(let[o,{type:n,alias:u,default:s=n==="boolean"?!1:null}]of Object.entries(e)){if(r[o]=s,u){let l=u.slice(1);i[l]!==void 0&&(r[o]=ue(i[l],n))}{let l=o.slice(2);i[l]!==void 0&&(r[o]=ue(i[l],n))}}return r}function ue(e,t){switch(t){case"string":return D(e);case"boolean":return O(e);case"number":return R(e);case"boolean | string":return O(e)??D(e);case"number | string":return R(e)??D(e);case"boolean | number":return O(e)??R(e);case"boolean | number | string":return O(e)??R(e)??D(e);default:throw new Error(`Unhandled type: ${t}`)}}function O(e){if(e===!0||e===!1)return e;if(e==="true")return!0;if(e==="false")return!1}function R(e){if(typeof e=="number")return e;{let t=Number(e);if(!Number.isNaN(t))return t}}function D(e){return`${e}`}import We from"@parcel/watcher";import{compile as Ee,env as Ue,Instrumentation as he,optimize as Pe,toSourceMap as P}from"@tailwindcss/node";import{clearRequireCache as Ie}from"@tailwindcss/node/require-cache";import{Scanner as Le}from"@tailwindcss/oxide";import{existsSync as je}from"fs";import Z from"fs/promises";import C from"path";var A=class{#e=new Set([]);queueMacrotask(t){let i=setTimeout(t,0);return this.add(()=>{clearTimeout(i)})}add(t){return this.#e.add(t),()=>{this.#e.delete(t),t()}}async dispose(){for(let t of this.#e)await t();this.#e.clear()}};import Re from"fs";import me from"path";import{stripVTControlCharacters as De}from"util";import w from"picocolors";import ce from"enhanced-resolve";import Me from"fs";import{createRequire as Be}from"module";var Oe=Be(import.meta.url).resolve;function fe(e){if(typeof globalThis.__tw_resolve=="function"){let t=globalThis.__tw_resolve(e);if(t)return t}return Oe(e)}var Xe=ce.ResolverFactory.createResolver({fileSystem:new ce.CachedInputFileSystem(Me,4e3),useSyncFileSystemCalls:!0,extensions:[".css"],mainFields:["style"],conditionNames:["style"]});function de(e){let t=typeof e=="number"?BigInt(e):e;return t<1000n?`${t}ns`:(t/=1000n,t<1000n?`${t}\xB5s`:(t/=1000n,t<1000n?`${t}ms`:(t/=1000n,t<60n?`${t}s`:(t/=60n,t<60n?`${t}m`:(t/=60n,t<24n?`${t}h`:(t/=24n,`${t}d`))))))}var z={indent:2};function N(){return`${w.italic(w.bold(w.blue("\u2248")))} tailwindcss ${w.blue(`v${ze()}`)}`}function F(e){return`${w.dim(w.blue("`"))}${w.blue(e)}${w.dim(w.blue("`"))}`}function W(e,t=process.cwd(),{preferAbsoluteIfShorter:i=!0}={}){let r=me.relative(t,e);return r.startsWith("..")||(r=`.${me.sep}${r}`),i&&r.length>e.length?e:r}function Q(e,t){let i=e.split(" "),r=[],o="",n=0;for(let u of i){let s=De(u).length;n+s+1>t&&(r.push(o),o="",n=0),o+=(n?" ":"")+u,n+=s+(n?1:0)}return n&&r.push(o),r}function E(e){let t=de(e);return e<=50*1e6?w.green(t):e<=300*1e6?w.blue(t):e<=1e3*1e6?w.yellow(t):w.red(t)}function k(e,t=0){return`${" ".repeat(t+z.indent)}${e}`}function x(e=""){process.stderr.write(`${e} |
|||
`)}function h(e=""){process.stdout.write(`${e} |
|||
`)}function ze(){if(typeof globalThis.__tw_version=="string")return globalThis.__tw_version;let{version:e}=JSON.parse(Re.readFileSync(fe("tailwindcss/package.json"),"utf-8"));return e}import U from"fs/promises";import Ne from"path";function Y(){return new Promise((e,t)=>{let i="";process.stdin.on("data",r=>{i+=r}),process.stdin.on("end",()=>e(i)),process.stdin.on("error",r=>t(r))})}async function H(e,t){if(!await U.stat(e).then(r=>r.isCharacterDevice()||r.isFIFO()).catch(()=>!1))try{if(await U.readFile(e,"utf8")===t)return}catch{}await U.mkdir(Ne.dirname(e),{recursive:!0}),await U.writeFile(e,t,"utf8")}var ye=String.raw,a=Ue.DEBUG;function I(){return{"--input":{type:"string",description:"Input file",alias:"-i"},"--output":{type:"string",description:"Output file",alias:"-o",default:"-"},"--watch":{type:"boolean | string",description:"Watch for changes and rebuild as needed, and use `always` to keep watching when stdin is closed",alias:"-w",values:["always"]},"--minify":{type:"boolean",description:"Optimize and minify the output",alias:"-m"},"--optimize":{type:"boolean",description:"Optimize the output without minifying"},"--cwd":{type:"string",description:"The current working directory",default:"."},"--map":{type:"boolean | string",description:"Generate a source map",default:!1}}}async function X(e){try{return await e()}catch(t){t instanceof Error&&x(t.toString()),process.exit(1)}}async function ge(e){var ie=[];try{x(N());x();let t=G(ie,new he);a&&t.start("[@tailwindcss/cli] (initial build)");let i=C.resolve(e["--cwd"]);e["--output"]&&e["--output"]!=="-"&&(e["--output"]=C.resolve(i,e["--output"]));e["--input"]&&e["--input"]!=="-"&&(e["--input"]=C.resolve(i,e["--input"]),je(e["--input"])||(x(`Specified input file ${F(W(e["--input"]))} does not exist.`),process.exit(1)));e["--input"]===e["--output"]&&e["--input"]!=="-"&&(x(`Specified input file ${F(W(e["--input"]))} and output file ${F(W(e["--output"]))} are identical.`),process.exit(1));e["--map"]==="-"&&(x("Use --map without a value to inline the source map"),process.exit(1));e["--map"]&&e["--map"]!==!0&&(e["--map"]=C.resolve(i,e["--map"]));let r=process.hrtime.bigint();let o=e["--input"]?e["--input"]==="-"?await Y():await Z.readFile(e["--input"],"utf-8"):ye` |
|||
@import 'tailwindcss'; |
|||
`;let n={css:"",optimizedCss:""};async function u(g,b,f,S){let $=g;if(f["--minify"]||f["--optimize"])if(g!==n.css){a&&S.start("Optimize CSS");let T=Pe(g,{file:f["--input"]??"input.css",minify:f["--minify"]??!1,map:b?.raw??void 0});a&&S.end("Optimize CSS"),n.css=g,n.optimizedCss=T.code,T.map&&(b=P(T.map)),$=T.code}else $=n.optimizedCss;b&&(f["--map"]===!0?($+=` |
|||
`,$+=b.inline):typeof f["--map"]=="string"&&(a&&S.start("Write source map"),await H(f["--map"],b.raw),a&&S.end("Write source map"))),a&&S.start("Write output"),f["--output"]&&f["--output"]!=="-"?await H(f["--output"],$):h($),a&&S.end("Write output")}let s=e["--input"]&&e["--input"]!=="-"?C.resolve(e["--input"]):null;let l=s?C.dirname(s):process.cwd();let p=s?[s]:[];async function m(g,b){a&&b.start("Setup compiler");let f=await Ee(g,{from:e["--output"]?s??"stdin.css":void 0,base:l,onDependency(T){p.push(T)}}),S=(f.root==="none"?[]:f.root===null?[{base:i,pattern:"**/*",negated:!1}]:[{...f.root,negated:!1}]).concat(f.sources),$=new Le({sources:S});return a&&b.end("Setup compiler"),[f,$]}let[d,y]=await X(()=>m(o,t));if(e["--watch"]){let g=[];g.push(await we(be(y),async function b(f){try{var S=[];try{if(f.length===1&&f[0]===e["--output"])return;let c=G(S,new he);a&&c.start("[@tailwindcss/cli] (watcher)");let re=process.hrtime.bigint();let oe=[];let j="incremental";let se=p;for(let _ of f){if(se.includes(_)){j="full";break}oe.push({file:_,extension:C.extname(_).slice(1)})}let V="";let q=null;if(j==="full"){let _=e["--input"]?e["--input"]==="-"?await Y():await Z.readFile(e["--input"],"utf-8"):ye` |
|||
@import 'tailwindcss'; |
|||
`;Ie(se),p=s?[s]:[],[d,y]=await m(_,c),a&&c.start("Scan for candidates");let K=y.scan();a&&c.end("Scan for candidates"),a&&c.start("Setup new watchers");let Fe=await we(be(y),b);a&&c.end("Setup new watchers"),a&&c.start("Cleanup old watchers"),await Promise.all(g.splice(0).map(Ae=>Ae())),a&&c.end("Cleanup old watchers"),g.push(Fe),a&&c.start("Build CSS"),V=d.build(K),a&&c.end("Build CSS"),e["--map"]&&(a&&c.start("Build Source Map"),q=P(d.buildSourceMap()),a&&c.end("Build Source Map"))}else if(j==="incremental"){a&&c.start("Scan for candidates");let _=y.scanFiles(oe);if(a&&c.end("Scan for candidates"),_.length<=0){let K=process.hrtime.bigint();x(`Done in ${E(K-re)}`);return}a&&c.start("Build CSS"),V=d.build(_),a&&c.end("Build CSS"),e["--map"]&&(a&&c.start("Build Source Map"),q=P(d.buildSourceMap()),a&&c.end("Build Source Map"))}await u(V,q,e,c);let Ce=process.hrtime.bigint();x(`Done in ${E(Ce-re)}`)}catch($){var T=$,ve=!0}finally{J(S,T,ve)}}catch(c){c instanceof Error&&x(c.toString())}})),e["--watch"]!=="always"&&process.stdin.on("end",()=>{Promise.all(g.map(b=>b())).then(()=>process.exit(0),()=>process.exit(1))}),process.stdin.resume()}a&&t.start("Scan for candidates");let L=y.scan();a&&t.end("Scan for candidates");a&&t.start("Build CSS");let M=await X(()=>d.build(L));a&&t.end("Build CSS");let B=null;e["--map"]&&(a&&t.start("Build Source Map"),B=await X(()=>P(d.buildSourceMap())),a&&t.end("Build Source Map"));await u(M,B,e,t);let xe=process.hrtime.bigint();x(`Done in ${E(xe-r)}`)}catch($e){var Te=$e,_e=!0}finally{J(ie,Te,_e)}}async function we(e,t){e=e.sort((s,l)=>s.length-l.length);let i=[];for(let s=0;s<e.length;++s)for(let l=0;l<s;++l)e[s].startsWith(`${e[l]}/`)&&i.push(e[s]);e=e.filter(s=>!i.includes(s));let r=new A,o=new Set,n=new A;async function u(){await n.dispose(),n.queueMacrotask(()=>{t(Array.from(o)),o.clear()})}for(let s of e){let{unsubscribe:l}=await We.subscribe(s,async(p,m)=>{if(p){console.error(p);return}await Promise.all(m.map(async d=>{if(d.type==="delete")return;let y=null;try{y=await Z.lstat(d.path)}catch{}!y?.isFile()&&!y?.isSymbolicLink()||o.add(d.path)})),await u()});r.add(l)}return async()=>{await r.dispose(),await n.dispose()}}function be(e){return[...new Set(e.normalizedSources.flatMap(t=>t.base))]}import v from"picocolors";function ee({invalid:e,usage:t,options:i}){let r=process.stdout.columns;if(h(N()),e&&(h(),h(`${v.dim("Invalid command:")} ${e}`)),t&&t.length>0){h(),h(v.dim("Usage:"));for(let[o,n]of t.entries()){let u=n.slice(0,n.indexOf("[")),s=n.slice(n.indexOf("["));s=s.replace(/\[.*?\]/g,m=>v.dim(m));let p=Q(s,r-z.indent-u.length-1);p.length>1&&o!==0&&h(),h(k(`${u}${p.shift()}`));for(let m of p)h(k(m,u.length))}}if(i){let o=0;for(let{alias:l}of Object.values(i))l&&(o=Math.max(o,l.length));let n=[],u=0;for(let[l,{alias:p,values:m}]of Object.entries(i)){m?.length&&(l+=`[=${m.join(", ")}]`);let d=[p&&`${p.padStart(o)}`,p?l:" ".repeat(o+2)+l].filter(Boolean).join(", ");n.push(d),u=Math.max(u,d.length)}h(),h(v.dim("Options:"));let s=8;for(let{description:l,default:p=null}of Object.values(i)){let m=n.shift(),d=s+(u-m.length),y=2,L=r-m.length-d-y-z.indent,M=Q(p!==null?`${l} ${v.dim(`[default:\u202F${F(`${p}`)}]`)}`:l,L);h(k(`${v.blue(m)} ${v.dim(v.gray("\xB7")).repeat(d)} ${M.shift()}`));for(let B of M)h(k(`${" ".repeat(m.length+d+y)}${B}`))}}}var te={"--help":{type:"boolean",description:"Display usage information",alias:"-h"}},ne=pe({...I(),...te}),Se=ne._[0];Se&&(ee({invalid:Se,usage:["tailwindcss [options]"],options:{...I(),...te}}),process.exit(1));(process.stdout.isTTY&&process.argv[2]===void 0||ne["--help"])&&(ee({usage:["tailwindcss [--input input.css] [--output output.css] [--watch] [options\u2026]"],options:{...I(),...te}}),process.exit(0));ge(ne); |
|||
@ -0,0 +1,40 @@ |
|||
{ |
|||
"name": "@tailwindcss/cli", |
|||
"version": "4.1.18", |
|||
"description": "A utility-first CSS framework for rapidly building custom user interfaces.", |
|||
"license": "MIT", |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "https://github.com/tailwindlabs/tailwindcss.git", |
|||
"directory": "packages/@tailwindcss-cli" |
|||
}, |
|||
"bugs": "https://github.com/tailwindlabs/tailwindcss/issues", |
|||
"homepage": "https://tailwindcss.com", |
|||
"bin": { |
|||
"tailwindcss": "./dist/index.mjs" |
|||
}, |
|||
"exports": { |
|||
"./package.json": "./package.json" |
|||
}, |
|||
"files": [ |
|||
"dist" |
|||
], |
|||
"publishConfig": { |
|||
"provenance": true, |
|||
"access": "public" |
|||
}, |
|||
"dependencies": { |
|||
"@parcel/watcher": "^2.5.1", |
|||
"enhanced-resolve": "^5.18.3", |
|||
"mri": "^1.2.0", |
|||
"picocolors": "^1.1.1", |
|||
"@tailwindcss/node": "4.1.18", |
|||
"tailwindcss": "4.1.18", |
|||
"@tailwindcss/oxide": "4.1.18" |
|||
}, |
|||
"scripts": { |
|||
"lint": "tsc --noEmit", |
|||
"build": "tsup-node", |
|||
"dev": "pnpm run build -- --watch" |
|||
} |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
MIT License |
|||
|
|||
Copyright (c) Tailwind Labs, Inc. |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,36 @@ |
|||
<p align="center"> |
|||
<a href="https://tailwindcss.com" target="_blank"> |
|||
<picture> |
|||
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/tailwindlabs/tailwindcss/HEAD/.github/logo-dark.svg"> |
|||
<source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/tailwindlabs/tailwindcss/HEAD/.github/logo-light.svg"> |
|||
<img alt="Tailwind CSS" src="https://raw.githubusercontent.com/tailwindlabs/tailwindcss/HEAD/.github/logo-light.svg" width="350" height="70" style="max-width: 100%;"> |
|||
</picture> |
|||
</a> |
|||
</p> |
|||
|
|||
<p align="center"> |
|||
A utility-first CSS framework for rapidly building custom user interfaces. |
|||
</p> |
|||
|
|||
<p align="center"> |
|||
<a href="https://github.com/tailwindlabs/tailwindcss/actions"><img src="https://img.shields.io/github/actions/workflow/status/tailwindlabs/tailwindcss/ci.yml?branch=next" alt="Build Status"></a> |
|||
<a href="https://www.npmjs.com/package/tailwindcss"><img src="https://img.shields.io/npm/dt/tailwindcss.svg" alt="Total Downloads"></a> |
|||
<a href="https://github.com/tailwindcss/tailwindcss/releases"><img src="https://img.shields.io/npm/v/tailwindcss.svg" alt="Latest Release"></a> |
|||
<a href="https://github.com/tailwindcss/tailwindcss/blob/master/LICENSE"><img src="https://img.shields.io/npm/l/tailwindcss.svg" alt="License"></a> |
|||
</p> |
|||
|
|||
--- |
|||
|
|||
## Documentation |
|||
|
|||
For full documentation, visit [tailwindcss.com](https://tailwindcss.com). |
|||
|
|||
## Community |
|||
|
|||
For help, discussion about best practices, or feature ideas: |
|||
|
|||
[Discuss Tailwind CSS on GitHub](https://github.com/tailwindcss/tailwindcss/discussions) |
|||
|
|||
## Contributing |
|||
|
|||
If you're interested in contributing to Tailwind CSS, please read our [contributing docs](https://github.com/tailwindcss/tailwindcss/blob/next/.github/CONTRIBUTING.md) **before submitting a pull request**. |
|||
@ -0,0 +1,5 @@ |
|||
import { ResolveHook } from 'node:module'; |
|||
|
|||
declare let resolve: ResolveHook; |
|||
|
|||
export { resolve }; |
|||
@ -0,0 +1 @@ |
|||
import{isBuiltin as i}from"module";var o=async(a,e,u)=>{let r=await u(a,e);if(r.url===import.meta.url||i(r.url)||!e.parentURL)return r;let t=new URL(e.parentURL).searchParams.get("id");if(t===null)return r;let l=new URL(r.url);return l.searchParams.set("id",t),{...r,url:`${l}`}};export{o as resolve}; |
|||
@ -0,0 +1,255 @@ |
|||
import { AstNode as AstNode$1 } from './ast'; |
|||
import { Candidate, Variant } from './candidate'; |
|||
import { compileAstNodes } from './compile'; |
|||
import { ClassEntry, VariantEntry, CanonicalizeOptions } from './intellisense'; |
|||
import { Theme } from './theme'; |
|||
import { Utilities } from './utilities'; |
|||
import { Variants } from './variants'; |
|||
import * as tailwindcss from 'tailwindcss'; |
|||
import { Polyfills, Features } from 'tailwindcss'; |
|||
export { Features, Polyfills } from 'tailwindcss'; |
|||
|
|||
declare const DEBUG: boolean; |
|||
|
|||
declare const env_DEBUG: typeof DEBUG; |
|||
declare namespace env { |
|||
export { env_DEBUG as DEBUG }; |
|||
} |
|||
|
|||
declare const enum CompileAstFlags { |
|||
None = 0, |
|||
RespectImportant = 1 |
|||
} |
|||
type DesignSystem = { |
|||
theme: Theme; |
|||
utilities: Utilities; |
|||
variants: Variants; |
|||
invalidCandidates: Set<string>; |
|||
important: boolean; |
|||
getClassOrder(classes: string[]): [string, bigint | null][]; |
|||
getClassList(): ClassEntry[]; |
|||
getVariants(): VariantEntry[]; |
|||
parseCandidate(candidate: string): Readonly<Candidate>[]; |
|||
parseVariant(variant: string): Readonly<Variant> | null; |
|||
compileAstNodes(candidate: Candidate, flags?: CompileAstFlags): ReturnType<typeof compileAstNodes>; |
|||
printCandidate(candidate: Candidate): string; |
|||
printVariant(variant: Variant): string; |
|||
getVariantOrder(): Map<Variant, number>; |
|||
resolveThemeValue(path: string, forceInline?: boolean): string | undefined; |
|||
trackUsedVariables(raw: string): void; |
|||
canonicalizeCandidates(candidates: string[], options?: CanonicalizeOptions): string[]; |
|||
candidatesToCss(classes: string[]): (string | null)[]; |
|||
candidatesToAst(classes: string[]): AstNode$1[][]; |
|||
storage: Record<symbol, unknown>; |
|||
}; |
|||
|
|||
/** |
|||
* The source code for one or more nodes in the AST |
|||
* |
|||
* This generally corresponds to a stylesheet |
|||
*/ |
|||
interface Source { |
|||
/** |
|||
* The path to the file that contains the referenced source code |
|||
* |
|||
* If this references the *output* source code, this is `null`. |
|||
*/ |
|||
file: string | null; |
|||
/** |
|||
* The referenced source code |
|||
*/ |
|||
code: string; |
|||
} |
|||
/** |
|||
* The file and offsets within it that this node covers |
|||
* |
|||
* This can represent either: |
|||
* - A location in the original CSS which caused this node to be created |
|||
* - A location in the output CSS where this node resides |
|||
*/ |
|||
type SourceLocation = [source: Source, start: number, end: number]; |
|||
|
|||
/** |
|||
* Line offset tables are the key to generating our source maps. They allow us |
|||
* to store indexes with our AST nodes and later convert them into positions as |
|||
* when given the source that the indexes refer to. |
|||
*/ |
|||
/** |
|||
* A position in source code |
|||
* |
|||
* https://tc39.es/ecma426/#sec-position-record-type |
|||
*/ |
|||
interface Position { |
|||
/** The line number, one-based */ |
|||
line: number; |
|||
/** The column/character number, one-based */ |
|||
column: number; |
|||
} |
|||
|
|||
interface OriginalPosition extends Position { |
|||
source: DecodedSource; |
|||
} |
|||
/** |
|||
* A "decoded" sourcemap |
|||
* |
|||
* @see https://tc39.es/ecma426/#decoded-source-map-record |
|||
*/ |
|||
interface DecodedSourceMap { |
|||
file: string | null; |
|||
sources: DecodedSource[]; |
|||
mappings: DecodedMapping[]; |
|||
} |
|||
/** |
|||
* A "decoded" source |
|||
* |
|||
* @see https://tc39.es/ecma426/#decoded-source-record |
|||
*/ |
|||
interface DecodedSource { |
|||
url: string | null; |
|||
content: string | null; |
|||
ignore: boolean; |
|||
} |
|||
/** |
|||
* A "decoded" mapping |
|||
* |
|||
* @see https://tc39.es/ecma426/#decoded-mapping-record |
|||
*/ |
|||
interface DecodedMapping { |
|||
originalPosition: OriginalPosition | null; |
|||
generatedPosition: Position; |
|||
name: string | null; |
|||
} |
|||
|
|||
type StyleRule = { |
|||
kind: 'rule'; |
|||
selector: string; |
|||
nodes: AstNode[]; |
|||
src?: SourceLocation; |
|||
dst?: SourceLocation; |
|||
}; |
|||
type AtRule = { |
|||
kind: 'at-rule'; |
|||
name: string; |
|||
params: string; |
|||
nodes: AstNode[]; |
|||
src?: SourceLocation; |
|||
dst?: SourceLocation; |
|||
}; |
|||
type Declaration = { |
|||
kind: 'declaration'; |
|||
property: string; |
|||
value: string | undefined; |
|||
important: boolean; |
|||
src?: SourceLocation; |
|||
dst?: SourceLocation; |
|||
}; |
|||
type Comment = { |
|||
kind: 'comment'; |
|||
value: string; |
|||
src?: SourceLocation; |
|||
dst?: SourceLocation; |
|||
}; |
|||
type Context = { |
|||
kind: 'context'; |
|||
context: Record<string, string | boolean>; |
|||
nodes: AstNode[]; |
|||
src?: undefined; |
|||
dst?: undefined; |
|||
}; |
|||
type AtRoot = { |
|||
kind: 'at-root'; |
|||
nodes: AstNode[]; |
|||
src?: undefined; |
|||
dst?: undefined; |
|||
}; |
|||
type AstNode = StyleRule | AtRule | Declaration | Comment | Context | AtRoot; |
|||
|
|||
type Resolver = (id: string, base: string) => Promise<string | false | undefined>; |
|||
interface CompileOptions { |
|||
base: string; |
|||
from?: string; |
|||
onDependency: (path: string) => void; |
|||
shouldRewriteUrls?: boolean; |
|||
polyfills?: Polyfills; |
|||
customCssResolver?: Resolver; |
|||
customJsResolver?: Resolver; |
|||
} |
|||
declare function compileAst(ast: AstNode[], options: CompileOptions): Promise<{ |
|||
sources: { |
|||
base: string; |
|||
pattern: string; |
|||
negated: boolean; |
|||
}[]; |
|||
root: "none" | { |
|||
base: string; |
|||
pattern: string; |
|||
} | null; |
|||
features: Features; |
|||
build(candidates: string[]): AstNode[]; |
|||
}>; |
|||
declare function compile(css: string, options: CompileOptions): Promise<{ |
|||
sources: { |
|||
base: string; |
|||
pattern: string; |
|||
negated: boolean; |
|||
}[]; |
|||
root: "none" | { |
|||
base: string; |
|||
pattern: string; |
|||
} | null; |
|||
features: Features; |
|||
build(candidates: string[]): string; |
|||
buildSourceMap(): tailwindcss.DecodedSourceMap; |
|||
}>; |
|||
declare function __unstable__loadDesignSystem(css: string, { base }: { |
|||
base: string; |
|||
}): Promise<DesignSystem>; |
|||
declare function loadModule(id: string, base: string, onDependency: (path: string) => void, customJsResolver?: Resolver): Promise<{ |
|||
path: string; |
|||
base: string; |
|||
module: any; |
|||
}>; |
|||
|
|||
declare class Instrumentation implements Disposable { |
|||
#private; |
|||
private defaultFlush; |
|||
constructor(defaultFlush?: (message: string) => undefined); |
|||
hit(label: string): void; |
|||
start(label: string): void; |
|||
end(label: string): void; |
|||
reset(): void; |
|||
report(flush?: (message: string) => undefined): void; |
|||
[Symbol.dispose](): void; |
|||
} |
|||
|
|||
declare function normalizePath(originalPath: string): string; |
|||
|
|||
interface OptimizeOptions { |
|||
/** |
|||
* The file being transformed |
|||
*/ |
|||
file?: string; |
|||
/** |
|||
* Enabled minified output |
|||
*/ |
|||
minify?: boolean; |
|||
/** |
|||
* The output source map before optimization |
|||
* |
|||
* If omitted a resulting source map will not be available |
|||
*/ |
|||
map?: string; |
|||
} |
|||
interface TransformResult { |
|||
code: string; |
|||
map: string | undefined; |
|||
} |
|||
declare function optimize(input: string, { file, minify, map }?: OptimizeOptions): TransformResult; |
|||
|
|||
interface SourceMap { |
|||
readonly raw: string; |
|||
readonly inline: string; |
|||
} |
|||
declare function toSourceMap(map: DecodedSourceMap | string): SourceMap; |
|||
|
|||
export { type CompileOptions, type DecodedSource, type DecodedSourceMap, Instrumentation, type OptimizeOptions, type Resolver, type SourceMap, type TransformResult, __unstable__loadDesignSystem, compile, compileAst, env, loadModule, normalizePath, optimize, toSourceMap }; |
|||
@ -0,0 +1,255 @@ |
|||
import { AstNode as AstNode$1 } from './ast'; |
|||
import { Candidate, Variant } from './candidate'; |
|||
import { compileAstNodes } from './compile'; |
|||
import { ClassEntry, VariantEntry, CanonicalizeOptions } from './intellisense'; |
|||
import { Theme } from './theme'; |
|||
import { Utilities } from './utilities'; |
|||
import { Variants } from './variants'; |
|||
import * as tailwindcss from 'tailwindcss'; |
|||
import { Polyfills, Features } from 'tailwindcss'; |
|||
export { Features, Polyfills } from 'tailwindcss'; |
|||
|
|||
declare const DEBUG: boolean; |
|||
|
|||
declare const env_DEBUG: typeof DEBUG; |
|||
declare namespace env { |
|||
export { env_DEBUG as DEBUG }; |
|||
} |
|||
|
|||
declare const enum CompileAstFlags { |
|||
None = 0, |
|||
RespectImportant = 1 |
|||
} |
|||
type DesignSystem = { |
|||
theme: Theme; |
|||
utilities: Utilities; |
|||
variants: Variants; |
|||
invalidCandidates: Set<string>; |
|||
important: boolean; |
|||
getClassOrder(classes: string[]): [string, bigint | null][]; |
|||
getClassList(): ClassEntry[]; |
|||
getVariants(): VariantEntry[]; |
|||
parseCandidate(candidate: string): Readonly<Candidate>[]; |
|||
parseVariant(variant: string): Readonly<Variant> | null; |
|||
compileAstNodes(candidate: Candidate, flags?: CompileAstFlags): ReturnType<typeof compileAstNodes>; |
|||
printCandidate(candidate: Candidate): string; |
|||
printVariant(variant: Variant): string; |
|||
getVariantOrder(): Map<Variant, number>; |
|||
resolveThemeValue(path: string, forceInline?: boolean): string | undefined; |
|||
trackUsedVariables(raw: string): void; |
|||
canonicalizeCandidates(candidates: string[], options?: CanonicalizeOptions): string[]; |
|||
candidatesToCss(classes: string[]): (string | null)[]; |
|||
candidatesToAst(classes: string[]): AstNode$1[][]; |
|||
storage: Record<symbol, unknown>; |
|||
}; |
|||
|
|||
/** |
|||
* The source code for one or more nodes in the AST |
|||
* |
|||
* This generally corresponds to a stylesheet |
|||
*/ |
|||
interface Source { |
|||
/** |
|||
* The path to the file that contains the referenced source code |
|||
* |
|||
* If this references the *output* source code, this is `null`. |
|||
*/ |
|||
file: string | null; |
|||
/** |
|||
* The referenced source code |
|||
*/ |
|||
code: string; |
|||
} |
|||
/** |
|||
* The file and offsets within it that this node covers |
|||
* |
|||
* This can represent either: |
|||
* - A location in the original CSS which caused this node to be created |
|||
* - A location in the output CSS where this node resides |
|||
*/ |
|||
type SourceLocation = [source: Source, start: number, end: number]; |
|||
|
|||
/** |
|||
* Line offset tables are the key to generating our source maps. They allow us |
|||
* to store indexes with our AST nodes and later convert them into positions as |
|||
* when given the source that the indexes refer to. |
|||
*/ |
|||
/** |
|||
* A position in source code |
|||
* |
|||
* https://tc39.es/ecma426/#sec-position-record-type
|
|||
*/ |
|||
interface Position { |
|||
/** The line number, one-based */ |
|||
line: number; |
|||
/** The column/character number, one-based */ |
|||
column: number; |
|||
} |
|||
|
|||
interface OriginalPosition extends Position { |
|||
source: DecodedSource; |
|||
} |
|||
/** |
|||
* A "decoded" sourcemap |
|||
* |
|||
* @see https://tc39.es/ecma426/#decoded-source-map-record
|
|||
*/ |
|||
interface DecodedSourceMap { |
|||
file: string | null; |
|||
sources: DecodedSource[]; |
|||
mappings: DecodedMapping[]; |
|||
} |
|||
/** |
|||
* A "decoded" source |
|||
* |
|||
* @see https://tc39.es/ecma426/#decoded-source-record
|
|||
*/ |
|||
interface DecodedSource { |
|||
url: string | null; |
|||
content: string | null; |
|||
ignore: boolean; |
|||
} |
|||
/** |
|||
* A "decoded" mapping |
|||
* |
|||
* @see https://tc39.es/ecma426/#decoded-mapping-record
|
|||
*/ |
|||
interface DecodedMapping { |
|||
originalPosition: OriginalPosition | null; |
|||
generatedPosition: Position; |
|||
name: string | null; |
|||
} |
|||
|
|||
type StyleRule = { |
|||
kind: 'rule'; |
|||
selector: string; |
|||
nodes: AstNode[]; |
|||
src?: SourceLocation; |
|||
dst?: SourceLocation; |
|||
}; |
|||
type AtRule = { |
|||
kind: 'at-rule'; |
|||
name: string; |
|||
params: string; |
|||
nodes: AstNode[]; |
|||
src?: SourceLocation; |
|||
dst?: SourceLocation; |
|||
}; |
|||
type Declaration = { |
|||
kind: 'declaration'; |
|||
property: string; |
|||
value: string | undefined; |
|||
important: boolean; |
|||
src?: SourceLocation; |
|||
dst?: SourceLocation; |
|||
}; |
|||
type Comment = { |
|||
kind: 'comment'; |
|||
value: string; |
|||
src?: SourceLocation; |
|||
dst?: SourceLocation; |
|||
}; |
|||
type Context = { |
|||
kind: 'context'; |
|||
context: Record<string, string | boolean>; |
|||
nodes: AstNode[]; |
|||
src?: undefined; |
|||
dst?: undefined; |
|||
}; |
|||
type AtRoot = { |
|||
kind: 'at-root'; |
|||
nodes: AstNode[]; |
|||
src?: undefined; |
|||
dst?: undefined; |
|||
}; |
|||
type AstNode = StyleRule | AtRule | Declaration | Comment | Context | AtRoot; |
|||
|
|||
type Resolver = (id: string, base: string) => Promise<string | false | undefined>; |
|||
interface CompileOptions { |
|||
base: string; |
|||
from?: string; |
|||
onDependency: (path: string) => void; |
|||
shouldRewriteUrls?: boolean; |
|||
polyfills?: Polyfills; |
|||
customCssResolver?: Resolver; |
|||
customJsResolver?: Resolver; |
|||
} |
|||
declare function compileAst(ast: AstNode[], options: CompileOptions): Promise<{ |
|||
sources: { |
|||
base: string; |
|||
pattern: string; |
|||
negated: boolean; |
|||
}[]; |
|||
root: "none" | { |
|||
base: string; |
|||
pattern: string; |
|||
} | null; |
|||
features: Features; |
|||
build(candidates: string[]): AstNode[]; |
|||
}>; |
|||
declare function compile(css: string, options: CompileOptions): Promise<{ |
|||
sources: { |
|||
base: string; |
|||
pattern: string; |
|||
negated: boolean; |
|||
}[]; |
|||
root: "none" | { |
|||
base: string; |
|||
pattern: string; |
|||
} | null; |
|||
features: Features; |
|||
build(candidates: string[]): string; |
|||
buildSourceMap(): tailwindcss.DecodedSourceMap; |
|||
}>; |
|||
declare function __unstable__loadDesignSystem(css: string, { base }: { |
|||
base: string; |
|||
}): Promise<DesignSystem>; |
|||
declare function loadModule(id: string, base: string, onDependency: (path: string) => void, customJsResolver?: Resolver): Promise<{ |
|||
path: string; |
|||
base: string; |
|||
module: any; |
|||
}>; |
|||
|
|||
declare class Instrumentation implements Disposable { |
|||
#private; |
|||
private defaultFlush; |
|||
constructor(defaultFlush?: (message: string) => undefined); |
|||
hit(label: string): void; |
|||
start(label: string): void; |
|||
end(label: string): void; |
|||
reset(): void; |
|||
report(flush?: (message: string) => undefined): void; |
|||
[Symbol.dispose](): void; |
|||
} |
|||
|
|||
declare function normalizePath(originalPath: string): string; |
|||
|
|||
interface OptimizeOptions { |
|||
/** |
|||
* The file being transformed |
|||
*/ |
|||
file?: string; |
|||
/** |
|||
* Enabled minified output |
|||
*/ |
|||
minify?: boolean; |
|||
/** |
|||
* The output source map before optimization |
|||
* |
|||
* If omitted a resulting source map will not be available |
|||
*/ |
|||
map?: string; |
|||
} |
|||
interface TransformResult { |
|||
code: string; |
|||
map: string | undefined; |
|||
} |
|||
declare function optimize(input: string, { file, minify, map }?: OptimizeOptions): TransformResult; |
|||
|
|||
interface SourceMap { |
|||
readonly raw: string; |
|||
readonly inline: string; |
|||
} |
|||
declare function toSourceMap(map: DecodedSourceMap | string): SourceMap; |
|||
|
|||
export { type CompileOptions, type DecodedSource, type DecodedSourceMap, Instrumentation, type OptimizeOptions, type Resolver, type SourceMap, type TransformResult, __unstable__loadDesignSystem, compile, compileAst, env, loadModule, normalizePath, optimize, toSourceMap }; |
|||
18
node_modules/@tailwindcss/node/dist/index.js
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
18
node_modules/@tailwindcss/node/dist/index.mjs
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,3 @@ |
|||
declare function clearRequireCache(files: string[]): void; |
|||
|
|||
export { clearRequireCache }; |
|||
@ -0,0 +1 @@ |
|||
"use strict";var i=Object.defineProperty;var a=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var l=Object.prototype.hasOwnProperty;var n=(r,e)=>{for(var t in e)i(r,t,{get:e[t],enumerable:!0})},u=(r,e,t,o)=>{if(e&&typeof e=="object"||typeof e=="function")for(let c of f(e))!l.call(r,c)&&c!==t&&i(r,c,{get:()=>e[c],enumerable:!(o=a(e,c))||o.enumerable});return r};var h=r=>u(i({},"__esModule",{value:!0}),r);var d={};n(d,{clearRequireCache:()=>q});module.exports=h(d);function q(r){for(let e of r)delete require.cache[e]}0&&(module.exports={clearRequireCache}); |
|||
@ -0,0 +1 @@ |
|||
../jiti/lib/jiti-cli.mjs |
|||
@ -0,0 +1,19 @@ |
|||
Copyright 2024 Justin Ridgewell <justin@ridgewell.name> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,227 @@ |
|||
# @jridgewell/gen-mapping |
|||
|
|||
> Generate source maps |
|||
|
|||
`gen-mapping` allows you to generate a source map during transpilation or minification. |
|||
With a source map, you're able to trace the original location in the source file, either in Chrome's |
|||
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping]. |
|||
|
|||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This |
|||
provides the same `addMapping` and `setSourceContent` API. |
|||
|
|||
## Installation |
|||
|
|||
```sh |
|||
npm install @jridgewell/gen-mapping |
|||
``` |
|||
|
|||
## Usage |
|||
|
|||
```typescript |
|||
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping'; |
|||
|
|||
const map = new GenMapping({ |
|||
file: 'output.js', |
|||
sourceRoot: 'https://example.com/', |
|||
}); |
|||
|
|||
setSourceContent(map, 'input.js', `function foo() {}`); |
|||
|
|||
addMapping(map, { |
|||
// Lines start at line 1, columns at column 0. |
|||
generated: { line: 1, column: 0 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 0 }, |
|||
}); |
|||
|
|||
addMapping(map, { |
|||
generated: { line: 1, column: 9 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 9 }, |
|||
name: 'foo', |
|||
}); |
|||
|
|||
assert.deepEqual(toDecodedMap(map), { |
|||
version: 3, |
|||
file: 'output.js', |
|||
names: ['foo'], |
|||
sourceRoot: 'https://example.com/', |
|||
sources: ['input.js'], |
|||
sourcesContent: ['function foo() {}'], |
|||
mappings: [ |
|||
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ] |
|||
], |
|||
}); |
|||
|
|||
assert.deepEqual(toEncodedMap(map), { |
|||
version: 3, |
|||
file: 'output.js', |
|||
names: ['foo'], |
|||
sourceRoot: 'https://example.com/', |
|||
sources: ['input.js'], |
|||
sourcesContent: ['function foo() {}'], |
|||
mappings: 'AAAA,SAASA', |
|||
}); |
|||
``` |
|||
|
|||
### Smaller Sourcemaps |
|||
|
|||
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly |
|||
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will |
|||
intelligently determine if this marking adds useful information. If not, the marking will be |
|||
skipped. |
|||
|
|||
```typescript |
|||
import { maybeAddMapping } from '@jridgewell/gen-mapping'; |
|||
|
|||
const map = new GenMapping(); |
|||
|
|||
// Adding a sourceless marking at the beginning of a line isn't useful. |
|||
maybeAddMapping(map, { |
|||
generated: { line: 1, column: 0 }, |
|||
}); |
|||
|
|||
// Adding a new source marking is useful. |
|||
maybeAddMapping(map, { |
|||
generated: { line: 1, column: 0 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 0 }, |
|||
}); |
|||
|
|||
// But adding another marking pointing to the exact same original location isn't, even if the |
|||
// generated column changed. |
|||
maybeAddMapping(map, { |
|||
generated: { line: 1, column: 9 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 0 }, |
|||
}); |
|||
|
|||
assert.deepEqual(toEncodedMap(map), { |
|||
version: 3, |
|||
names: [], |
|||
sources: ['input.js'], |
|||
sourcesContent: [null], |
|||
mappings: 'AAAA', |
|||
}); |
|||
``` |
|||
|
|||
## Benchmarks |
|||
|
|||
``` |
|||
node v18.0.0 |
|||
|
|||
amp.js.map |
|||
Memory Usage: |
|||
gen-mapping: addSegment 5852872 bytes |
|||
gen-mapping: addMapping 7716042 bytes |
|||
source-map-js 6143250 bytes |
|||
source-map-0.6.1 6124102 bytes |
|||
source-map-0.8.0 6121173 bytes |
|||
Smallest memory usage is gen-mapping: addSegment |
|||
|
|||
Adding speed: |
|||
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled) |
|||
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled) |
|||
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled) |
|||
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled) |
|||
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled) |
|||
Fastest is gen-mapping: addSegment |
|||
|
|||
Generate speed: |
|||
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled) |
|||
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled) |
|||
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled) |
|||
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled) |
|||
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled) |
|||
Fastest is gen-mapping: decoded output |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
babel.min.js.map |
|||
Memory Usage: |
|||
gen-mapping: addSegment 37578063 bytes |
|||
gen-mapping: addMapping 37212897 bytes |
|||
source-map-js 47638527 bytes |
|||
source-map-0.6.1 47690503 bytes |
|||
source-map-0.8.0 47470188 bytes |
|||
Smallest memory usage is gen-mapping: addMapping |
|||
|
|||
Adding speed: |
|||
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled) |
|||
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled) |
|||
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled) |
|||
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled) |
|||
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled) |
|||
Fastest is gen-mapping: addSegment |
|||
|
|||
Generate speed: |
|||
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled) |
|||
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled) |
|||
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled) |
|||
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled) |
|||
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled) |
|||
Fastest is gen-mapping: decoded output |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
preact.js.map |
|||
Memory Usage: |
|||
gen-mapping: addSegment 416247 bytes |
|||
gen-mapping: addMapping 419824 bytes |
|||
source-map-js 1024619 bytes |
|||
source-map-0.6.1 1146004 bytes |
|||
source-map-0.8.0 1113250 bytes |
|||
Smallest memory usage is gen-mapping: addSegment |
|||
|
|||
Adding speed: |
|||
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled) |
|||
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled) |
|||
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled) |
|||
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled) |
|||
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled) |
|||
Fastest is gen-mapping: addSegment |
|||
|
|||
Generate speed: |
|||
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled) |
|||
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled) |
|||
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled) |
|||
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled) |
|||
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled) |
|||
Fastest is gen-mapping: decoded output |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
react.js.map |
|||
Memory Usage: |
|||
gen-mapping: addSegment 975096 bytes |
|||
gen-mapping: addMapping 1102981 bytes |
|||
source-map-js 2918836 bytes |
|||
source-map-0.6.1 2885435 bytes |
|||
source-map-0.8.0 2874336 bytes |
|||
Smallest memory usage is gen-mapping: addSegment |
|||
|
|||
Adding speed: |
|||
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled) |
|||
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled) |
|||
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled) |
|||
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled) |
|||
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled) |
|||
Fastest is gen-mapping: addSegment |
|||
|
|||
Generate speed: |
|||
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled) |
|||
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled) |
|||
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled) |
|||
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled) |
|||
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled) |
|||
Fastest is gen-mapping: decoded output |
|||
``` |
|||
|
|||
[source-map]: https://www.npmjs.com/package/source-map |
|||
[trace-mapping]: https://github.com/jridgewell/sourcemaps/tree/main/packages/trace-mapping |
|||
@ -0,0 +1,292 @@ |
|||
// src/set-array.ts |
|||
var SetArray = class { |
|||
constructor() { |
|||
this._indexes = { __proto__: null }; |
|||
this.array = []; |
|||
} |
|||
}; |
|||
function cast(set) { |
|||
return set; |
|||
} |
|||
function get(setarr, key) { |
|||
return cast(setarr)._indexes[key]; |
|||
} |
|||
function put(setarr, key) { |
|||
const index = get(setarr, key); |
|||
if (index !== void 0) return index; |
|||
const { array, _indexes: indexes } = cast(setarr); |
|||
const length = array.push(key); |
|||
return indexes[key] = length - 1; |
|||
} |
|||
function remove(setarr, key) { |
|||
const index = get(setarr, key); |
|||
if (index === void 0) return; |
|||
const { array, _indexes: indexes } = cast(setarr); |
|||
for (let i = index + 1; i < array.length; i++) { |
|||
const k = array[i]; |
|||
array[i - 1] = k; |
|||
indexes[k]--; |
|||
} |
|||
indexes[key] = void 0; |
|||
array.pop(); |
|||
} |
|||
|
|||
// src/gen-mapping.ts |
|||
import { |
|||
encode |
|||
} from "@jridgewell/sourcemap-codec"; |
|||
import { TraceMap, decodedMappings } from "@jridgewell/trace-mapping"; |
|||
|
|||
// src/sourcemap-segment.ts |
|||
var COLUMN = 0; |
|||
var SOURCES_INDEX = 1; |
|||
var SOURCE_LINE = 2; |
|||
var SOURCE_COLUMN = 3; |
|||
var NAMES_INDEX = 4; |
|||
|
|||
// src/gen-mapping.ts |
|||
var NO_NAME = -1; |
|||
var GenMapping = class { |
|||
constructor({ file, sourceRoot } = {}) { |
|||
this._names = new SetArray(); |
|||
this._sources = new SetArray(); |
|||
this._sourcesContent = []; |
|||
this._mappings = []; |
|||
this.file = file; |
|||
this.sourceRoot = sourceRoot; |
|||
this._ignoreList = new SetArray(); |
|||
} |
|||
}; |
|||
function cast2(map) { |
|||
return map; |
|||
} |
|||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) { |
|||
return addSegmentInternal( |
|||
false, |
|||
map, |
|||
genLine, |
|||
genColumn, |
|||
source, |
|||
sourceLine, |
|||
sourceColumn, |
|||
name, |
|||
content |
|||
); |
|||
} |
|||
function addMapping(map, mapping) { |
|||
return addMappingInternal(false, map, mapping); |
|||
} |
|||
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
return addSegmentInternal( |
|||
true, |
|||
map, |
|||
genLine, |
|||
genColumn, |
|||
source, |
|||
sourceLine, |
|||
sourceColumn, |
|||
name, |
|||
content |
|||
); |
|||
}; |
|||
var maybeAddMapping = (map, mapping) => { |
|||
return addMappingInternal(true, map, mapping); |
|||
}; |
|||
function setSourceContent(map, source, content) { |
|||
const { |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent |
|||
// _originalScopes: originalScopes, |
|||
} = cast2(map); |
|||
const index = put(sources, source); |
|||
sourcesContent[index] = content; |
|||
} |
|||
function setIgnore(map, source, ignore = true) { |
|||
const { |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent, |
|||
_ignoreList: ignoreList |
|||
// _originalScopes: originalScopes, |
|||
} = cast2(map); |
|||
const index = put(sources, source); |
|||
if (index === sourcesContent.length) sourcesContent[index] = null; |
|||
if (ignore) put(ignoreList, index); |
|||
else remove(ignoreList, index); |
|||
} |
|||
function toDecodedMap(map) { |
|||
const { |
|||
_mappings: mappings, |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent, |
|||
_names: names, |
|||
_ignoreList: ignoreList |
|||
// _originalScopes: originalScopes, |
|||
// _generatedRanges: generatedRanges, |
|||
} = cast2(map); |
|||
removeEmptyFinalLines(mappings); |
|||
return { |
|||
version: 3, |
|||
file: map.file || void 0, |
|||
names: names.array, |
|||
sourceRoot: map.sourceRoot || void 0, |
|||
sources: sources.array, |
|||
sourcesContent, |
|||
mappings, |
|||
// originalScopes, |
|||
// generatedRanges, |
|||
ignoreList: ignoreList.array |
|||
}; |
|||
} |
|||
function toEncodedMap(map) { |
|||
const decoded = toDecodedMap(map); |
|||
return Object.assign({}, decoded, { |
|||
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)), |
|||
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]), |
|||
mappings: encode(decoded.mappings) |
|||
}); |
|||
} |
|||
function fromMap(input) { |
|||
const map = new TraceMap(input); |
|||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot }); |
|||
putAll(cast2(gen)._names, map.names); |
|||
putAll(cast2(gen)._sources, map.sources); |
|||
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null); |
|||
cast2(gen)._mappings = decodedMappings(map); |
|||
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList); |
|||
return gen; |
|||
} |
|||
function allMappings(map) { |
|||
const out = []; |
|||
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map); |
|||
for (let i = 0; i < mappings.length; i++) { |
|||
const line = mappings[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const generated = { line: i + 1, column: seg[COLUMN] }; |
|||
let source = void 0; |
|||
let original = void 0; |
|||
let name = void 0; |
|||
if (seg.length !== 1) { |
|||
source = sources.array[seg[SOURCES_INDEX]]; |
|||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] }; |
|||
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]]; |
|||
} |
|||
out.push({ generated, source, original, name }); |
|||
} |
|||
} |
|||
return out; |
|||
} |
|||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) { |
|||
const { |
|||
_mappings: mappings, |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent, |
|||
_names: names |
|||
// _originalScopes: originalScopes, |
|||
} = cast2(map); |
|||
const line = getIndex(mappings, genLine); |
|||
const index = getColumnIndex(line, genColumn); |
|||
if (!source) { |
|||
if (skipable && skipSourceless(line, index)) return; |
|||
return insert(line, index, [genColumn]); |
|||
} |
|||
assert(sourceLine); |
|||
assert(sourceColumn); |
|||
const sourcesIndex = put(sources, source); |
|||
const namesIndex = name ? put(names, name) : NO_NAME; |
|||
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null; |
|||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { |
|||
return; |
|||
} |
|||
return insert( |
|||
line, |
|||
index, |
|||
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn] |
|||
); |
|||
} |
|||
function assert(_val) { |
|||
} |
|||
function getIndex(arr, index) { |
|||
for (let i = arr.length; i <= index; i++) { |
|||
arr[i] = []; |
|||
} |
|||
return arr[index]; |
|||
} |
|||
function getColumnIndex(line, genColumn) { |
|||
let index = line.length; |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
const current = line[i]; |
|||
if (genColumn >= current[COLUMN]) break; |
|||
} |
|||
return index; |
|||
} |
|||
function insert(array, index, value) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
function removeEmptyFinalLines(mappings) { |
|||
const { length } = mappings; |
|||
let len = length; |
|||
for (let i = len - 1; i >= 0; len = i, i--) { |
|||
if (mappings[i].length > 0) break; |
|||
} |
|||
if (len < length) mappings.length = len; |
|||
} |
|||
function putAll(setarr, array) { |
|||
for (let i = 0; i < array.length; i++) put(setarr, array[i]); |
|||
} |
|||
function skipSourceless(line, index) { |
|||
if (index === 0) return true; |
|||
const prev = line[index - 1]; |
|||
return prev.length === 1; |
|||
} |
|||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { |
|||
if (index === 0) return false; |
|||
const prev = line[index - 1]; |
|||
if (prev.length === 1) return false; |
|||
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME); |
|||
} |
|||
function addMappingInternal(skipable, map, mapping) { |
|||
const { generated, source, original, name, content } = mapping; |
|||
if (!source) { |
|||
return addSegmentInternal( |
|||
skipable, |
|||
map, |
|||
generated.line - 1, |
|||
generated.column, |
|||
null, |
|||
null, |
|||
null, |
|||
null, |
|||
null |
|||
); |
|||
} |
|||
assert(original); |
|||
return addSegmentInternal( |
|||
skipable, |
|||
map, |
|||
generated.line - 1, |
|||
generated.column, |
|||
source, |
|||
original.line - 1, |
|||
original.column, |
|||
name, |
|||
content |
|||
); |
|||
} |
|||
export { |
|||
GenMapping, |
|||
addMapping, |
|||
addSegment, |
|||
allMappings, |
|||
fromMap, |
|||
maybeAddMapping, |
|||
maybeAddSegment, |
|||
setIgnore, |
|||
setSourceContent, |
|||
toDecodedMap, |
|||
toEncodedMap |
|||
}; |
|||
//# sourceMappingURL=gen-mapping.mjs.map |
|||
6
node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,358 @@ |
|||
(function (global, factory) { |
|||
if (typeof exports === 'object' && typeof module !== 'undefined') { |
|||
factory(module, require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')); |
|||
module.exports = def(module); |
|||
} else if (typeof define === 'function' && define.amd) { |
|||
define(['module', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], function(mod) { |
|||
factory.apply(this, arguments); |
|||
mod.exports = def(mod); |
|||
}); |
|||
} else { |
|||
const mod = { exports: {} }; |
|||
factory(mod, global.sourcemapCodec, global.traceMapping); |
|||
global = typeof globalThis !== 'undefined' ? globalThis : global || self; |
|||
global.genMapping = def(mod); |
|||
} |
|||
function def(m) { return 'default' in m.exports ? m.exports.default : m.exports; } |
|||
})(this, (function (module, require_sourcemapCodec, require_traceMapping) { |
|||
"use strict"; |
|||
var __create = Object.create; |
|||
var __defProp = Object.defineProperty; |
|||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; |
|||
var __getOwnPropNames = Object.getOwnPropertyNames; |
|||
var __getProtoOf = Object.getPrototypeOf; |
|||
var __hasOwnProp = Object.prototype.hasOwnProperty; |
|||
var __commonJS = (cb, mod) => function __require() { |
|||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports; |
|||
}; |
|||
var __export = (target, all) => { |
|||
for (var name in all) |
|||
__defProp(target, name, { get: all[name], enumerable: true }); |
|||
}; |
|||
var __copyProps = (to, from, except, desc) => { |
|||
if (from && typeof from === "object" || typeof from === "function") { |
|||
for (let key of __getOwnPropNames(from)) |
|||
if (!__hasOwnProp.call(to, key) && key !== except) |
|||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); |
|||
} |
|||
return to; |
|||
}; |
|||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( |
|||
// If the importer is in node compatibility mode or this is not an ESM
|
|||
// file that has been converted to a CommonJS file using a Babel-
|
|||
// compatible transform (i.e. "__esModule" has not been set), then set
|
|||
// "default" to the CommonJS "module.exports" for node compatibility.
|
|||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, |
|||
mod |
|||
)); |
|||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); |
|||
|
|||
// umd:@jridgewell/sourcemap-codec
|
|||
var require_sourcemap_codec = __commonJS({ |
|||
"umd:@jridgewell/sourcemap-codec"(exports, module2) { |
|||
module2.exports = require_sourcemapCodec; |
|||
} |
|||
}); |
|||
|
|||
// umd:@jridgewell/trace-mapping
|
|||
var require_trace_mapping = __commonJS({ |
|||
"umd:@jridgewell/trace-mapping"(exports, module2) { |
|||
module2.exports = require_traceMapping; |
|||
} |
|||
}); |
|||
|
|||
// src/gen-mapping.ts
|
|||
var gen_mapping_exports = {}; |
|||
__export(gen_mapping_exports, { |
|||
GenMapping: () => GenMapping, |
|||
addMapping: () => addMapping, |
|||
addSegment: () => addSegment, |
|||
allMappings: () => allMappings, |
|||
fromMap: () => fromMap, |
|||
maybeAddMapping: () => maybeAddMapping, |
|||
maybeAddSegment: () => maybeAddSegment, |
|||
setIgnore: () => setIgnore, |
|||
setSourceContent: () => setSourceContent, |
|||
toDecodedMap: () => toDecodedMap, |
|||
toEncodedMap: () => toEncodedMap |
|||
}); |
|||
module.exports = __toCommonJS(gen_mapping_exports); |
|||
|
|||
// src/set-array.ts
|
|||
var SetArray = class { |
|||
constructor() { |
|||
this._indexes = { __proto__: null }; |
|||
this.array = []; |
|||
} |
|||
}; |
|||
function cast(set) { |
|||
return set; |
|||
} |
|||
function get(setarr, key) { |
|||
return cast(setarr)._indexes[key]; |
|||
} |
|||
function put(setarr, key) { |
|||
const index = get(setarr, key); |
|||
if (index !== void 0) return index; |
|||
const { array, _indexes: indexes } = cast(setarr); |
|||
const length = array.push(key); |
|||
return indexes[key] = length - 1; |
|||
} |
|||
function remove(setarr, key) { |
|||
const index = get(setarr, key); |
|||
if (index === void 0) return; |
|||
const { array, _indexes: indexes } = cast(setarr); |
|||
for (let i = index + 1; i < array.length; i++) { |
|||
const k = array[i]; |
|||
array[i - 1] = k; |
|||
indexes[k]--; |
|||
} |
|||
indexes[key] = void 0; |
|||
array.pop(); |
|||
} |
|||
|
|||
// src/gen-mapping.ts
|
|||
var import_sourcemap_codec = __toESM(require_sourcemap_codec()); |
|||
var import_trace_mapping = __toESM(require_trace_mapping()); |
|||
|
|||
// src/sourcemap-segment.ts
|
|||
var COLUMN = 0; |
|||
var SOURCES_INDEX = 1; |
|||
var SOURCE_LINE = 2; |
|||
var SOURCE_COLUMN = 3; |
|||
var NAMES_INDEX = 4; |
|||
|
|||
// src/gen-mapping.ts
|
|||
var NO_NAME = -1; |
|||
var GenMapping = class { |
|||
constructor({ file, sourceRoot } = {}) { |
|||
this._names = new SetArray(); |
|||
this._sources = new SetArray(); |
|||
this._sourcesContent = []; |
|||
this._mappings = []; |
|||
this.file = file; |
|||
this.sourceRoot = sourceRoot; |
|||
this._ignoreList = new SetArray(); |
|||
} |
|||
}; |
|||
function cast2(map) { |
|||
return map; |
|||
} |
|||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) { |
|||
return addSegmentInternal( |
|||
false, |
|||
map, |
|||
genLine, |
|||
genColumn, |
|||
source, |
|||
sourceLine, |
|||
sourceColumn, |
|||
name, |
|||
content |
|||
); |
|||
} |
|||
function addMapping(map, mapping) { |
|||
return addMappingInternal(false, map, mapping); |
|||
} |
|||
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
return addSegmentInternal( |
|||
true, |
|||
map, |
|||
genLine, |
|||
genColumn, |
|||
source, |
|||
sourceLine, |
|||
sourceColumn, |
|||
name, |
|||
content |
|||
); |
|||
}; |
|||
var maybeAddMapping = (map, mapping) => { |
|||
return addMappingInternal(true, map, mapping); |
|||
}; |
|||
function setSourceContent(map, source, content) { |
|||
const { |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent |
|||
// _originalScopes: originalScopes,
|
|||
} = cast2(map); |
|||
const index = put(sources, source); |
|||
sourcesContent[index] = content; |
|||
} |
|||
function setIgnore(map, source, ignore = true) { |
|||
const { |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent, |
|||
_ignoreList: ignoreList |
|||
// _originalScopes: originalScopes,
|
|||
} = cast2(map); |
|||
const index = put(sources, source); |
|||
if (index === sourcesContent.length) sourcesContent[index] = null; |
|||
if (ignore) put(ignoreList, index); |
|||
else remove(ignoreList, index); |
|||
} |
|||
function toDecodedMap(map) { |
|||
const { |
|||
_mappings: mappings, |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent, |
|||
_names: names, |
|||
_ignoreList: ignoreList |
|||
// _originalScopes: originalScopes,
|
|||
// _generatedRanges: generatedRanges,
|
|||
} = cast2(map); |
|||
removeEmptyFinalLines(mappings); |
|||
return { |
|||
version: 3, |
|||
file: map.file || void 0, |
|||
names: names.array, |
|||
sourceRoot: map.sourceRoot || void 0, |
|||
sources: sources.array, |
|||
sourcesContent, |
|||
mappings, |
|||
// originalScopes,
|
|||
// generatedRanges,
|
|||
ignoreList: ignoreList.array |
|||
}; |
|||
} |
|||
function toEncodedMap(map) { |
|||
const decoded = toDecodedMap(map); |
|||
return Object.assign({}, decoded, { |
|||
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
|
|||
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
|
|||
mappings: (0, import_sourcemap_codec.encode)(decoded.mappings) |
|||
}); |
|||
} |
|||
function fromMap(input) { |
|||
const map = new import_trace_mapping.TraceMap(input); |
|||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot }); |
|||
putAll(cast2(gen)._names, map.names); |
|||
putAll(cast2(gen)._sources, map.sources); |
|||
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null); |
|||
cast2(gen)._mappings = (0, import_trace_mapping.decodedMappings)(map); |
|||
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList); |
|||
return gen; |
|||
} |
|||
function allMappings(map) { |
|||
const out = []; |
|||
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map); |
|||
for (let i = 0; i < mappings.length; i++) { |
|||
const line = mappings[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const generated = { line: i + 1, column: seg[COLUMN] }; |
|||
let source = void 0; |
|||
let original = void 0; |
|||
let name = void 0; |
|||
if (seg.length !== 1) { |
|||
source = sources.array[seg[SOURCES_INDEX]]; |
|||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] }; |
|||
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]]; |
|||
} |
|||
out.push({ generated, source, original, name }); |
|||
} |
|||
} |
|||
return out; |
|||
} |
|||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) { |
|||
const { |
|||
_mappings: mappings, |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent, |
|||
_names: names |
|||
// _originalScopes: originalScopes,
|
|||
} = cast2(map); |
|||
const line = getIndex(mappings, genLine); |
|||
const index = getColumnIndex(line, genColumn); |
|||
if (!source) { |
|||
if (skipable && skipSourceless(line, index)) return; |
|||
return insert(line, index, [genColumn]); |
|||
} |
|||
assert(sourceLine); |
|||
assert(sourceColumn); |
|||
const sourcesIndex = put(sources, source); |
|||
const namesIndex = name ? put(names, name) : NO_NAME; |
|||
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null; |
|||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { |
|||
return; |
|||
} |
|||
return insert( |
|||
line, |
|||
index, |
|||
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn] |
|||
); |
|||
} |
|||
function assert(_val) { |
|||
} |
|||
function getIndex(arr, index) { |
|||
for (let i = arr.length; i <= index; i++) { |
|||
arr[i] = []; |
|||
} |
|||
return arr[index]; |
|||
} |
|||
function getColumnIndex(line, genColumn) { |
|||
let index = line.length; |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
const current = line[i]; |
|||
if (genColumn >= current[COLUMN]) break; |
|||
} |
|||
return index; |
|||
} |
|||
function insert(array, index, value) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
function removeEmptyFinalLines(mappings) { |
|||
const { length } = mappings; |
|||
let len = length; |
|||
for (let i = len - 1; i >= 0; len = i, i--) { |
|||
if (mappings[i].length > 0) break; |
|||
} |
|||
if (len < length) mappings.length = len; |
|||
} |
|||
function putAll(setarr, array) { |
|||
for (let i = 0; i < array.length; i++) put(setarr, array[i]); |
|||
} |
|||
function skipSourceless(line, index) { |
|||
if (index === 0) return true; |
|||
const prev = line[index - 1]; |
|||
return prev.length === 1; |
|||
} |
|||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { |
|||
if (index === 0) return false; |
|||
const prev = line[index - 1]; |
|||
if (prev.length === 1) return false; |
|||
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME); |
|||
} |
|||
function addMappingInternal(skipable, map, mapping) { |
|||
const { generated, source, original, name, content } = mapping; |
|||
if (!source) { |
|||
return addSegmentInternal( |
|||
skipable, |
|||
map, |
|||
generated.line - 1, |
|||
generated.column, |
|||
null, |
|||
null, |
|||
null, |
|||
null, |
|||
null |
|||
); |
|||
} |
|||
assert(original); |
|||
return addSegmentInternal( |
|||
skipable, |
|||
map, |
|||
generated.line - 1, |
|||
generated.column, |
|||
source, |
|||
original.line - 1, |
|||
original.column, |
|||
name, |
|||
content |
|||
); |
|||
} |
|||
})); |
|||
//# sourceMappingURL=gen-mapping.umd.js.map
|
|||
6
node_modules/@tailwindcss/node/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,88 @@ |
|||
import type { SourceMapInput } from '@jridgewell/trace-mapping'; |
|||
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types'; |
|||
export type { DecodedSourceMap, EncodedSourceMap, Mapping }; |
|||
export type Options = { |
|||
file?: string | null; |
|||
sourceRoot?: string | null; |
|||
}; |
|||
/** |
|||
* Provides the state to generate a sourcemap. |
|||
*/ |
|||
export declare class GenMapping { |
|||
private _names; |
|||
private _sources; |
|||
private _sourcesContent; |
|||
private _mappings; |
|||
private _ignoreList; |
|||
file: string | null | undefined; |
|||
sourceRoot: string | null | undefined; |
|||
constructor({ file, sourceRoot }?: Options); |
|||
} |
|||
/** |
|||
* A low-level API to associate a generated position with an original source position. Line and |
|||
* column here are 0-based, unlike `addMapping`. |
|||
*/ |
|||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void; |
|||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void; |
|||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void; |
|||
/** |
|||
* A high-level API to associate a generated position with an original source position. Line is |
|||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library. |
|||
*/ |
|||
export declare function addMapping(map: GenMapping, mapping: { |
|||
generated: Pos; |
|||
source?: null; |
|||
original?: null; |
|||
name?: null; |
|||
content?: null; |
|||
}): void; |
|||
export declare function addMapping(map: GenMapping, mapping: { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name?: null; |
|||
content?: string | null; |
|||
}): void; |
|||
export declare function addMapping(map: GenMapping, mapping: { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name: string; |
|||
content?: string | null; |
|||
}): void; |
|||
/** |
|||
* Same as `addSegment`, but will only add the segment if it generates useful information in the |
|||
* resulting map. This only works correctly if segments are added **in order**, meaning you should |
|||
* not add a segment with a lower generated line/column than one that came before. |
|||
*/ |
|||
export declare const maybeAddSegment: typeof addSegment; |
|||
/** |
|||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the |
|||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should |
|||
* not add a mapping with a lower generated line/column than one that came before. |
|||
*/ |
|||
export declare const maybeAddMapping: typeof addMapping; |
|||
/** |
|||
* Adds/removes the content of the source file to the source map. |
|||
*/ |
|||
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void; |
|||
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void; |
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap; |
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap; |
|||
/** |
|||
* Constructs a new GenMapping, using the already present mappings of the input. |
|||
*/ |
|||
export declare function fromMap(input: SourceMapInput): GenMapping; |
|||
/** |
|||
* Returns an array of high-level mapping objects for every recorded segment, which could then be |
|||
* passed to the `source-map` library. |
|||
*/ |
|||
export declare function allMappings(map: GenMapping): Mapping[]; |
|||
Some files were not shown because too many files changed in this diff
Write
Preview
Loading…
Cancel
Save
Reference in new issue