![gal@spitfire.co.il](/assets/img/avatar_default.png)
70 changed files with 4290 additions and 3108 deletions
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
--- |
||||
"fnm": patch |
||||
--- |
||||
|
||||
Disable unused chrono features (#1014) |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
--- |
||||
"fnm": minor |
||||
--- |
||||
|
||||
feat: add remote version sorting and filtering |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
--- |
||||
"fnm": patch |
||||
--- |
||||
|
||||
Fix `cd /D` on windows with `--use-on-cd` |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
--- |
||||
"fnm": patch |
||||
--- |
||||
|
||||
support `x64-musl` arch by adding a `--arch x64-musl` to fnm env |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
--- |
||||
"fnm": patch |
||||
--- |
||||
|
||||
make nicer styling in progress bar (add newline, make it unicode) |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
--- |
||||
"fnm": patch |
||||
--- |
||||
|
||||
fix: return default version if canonicalize fails |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
--- |
||||
"fnm": minor |
||||
--- |
||||
|
||||
Show a progress bar when downloading and extracting node |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
--- |
||||
"fnm": patch |
||||
--- |
||||
|
||||
Fixes a bug when running `eval $(fnm env)` in sh when there a spaces in the $PATH |
@ -1,6 +1,6 @@
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash |
||||
|
||||
eval "$(fnm env --multi)" |
||||
eval "$(fnm env --shell=bash)" |
||||
fnm install v10.11.0 |
||||
fnm use v10.11.0 |
||||
node -v |
||||
|
@ -1,6 +0,0 @@
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash |
||||
|
||||
eval "$(~/.fnm-latest/fnm env --multi)" |
||||
~/.fnm-latest/fnm install v10.11.0 |
||||
~/.fnm-latest/fnm use v10.11.0 |
||||
node -v |
@ -1,6 +0,0 @@
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash |
||||
|
||||
eval "$(~/.fnm/fnm env --multi)" |
||||
~/.fnm/fnm install v10.11.0 |
||||
~/.fnm/fnm use v10.11.0 |
||||
node -v |
@ -0,0 +1,72 @@
@@ -0,0 +1,72 @@
|
||||
# Configuration |
||||
|
||||
fnm comes with many features out of the box. Some of them are not activated by default as they’re changing your shell default behavior, and some are just a feature flag to avoid breaking changes or just experimental until we decide it is worthwhile to introduce them. |
||||
|
||||
All these features can be configured by adding flags to the `fnm env` call when initializing the shell. For instance, if your shell set up looks like `eval "$(fnm env)"` then you can add a flag to it by changing it to `eval "$(fnm env --my-flag=value)"` |
||||
|
||||
Here’s a list of these features and capabilities: |
||||
|
||||
### `--use-on-cd` |
||||
|
||||
**✅ Highly recommended** |
||||
|
||||
`--use-on-cd` appends output to `fnm env`'s output that will hook into your shell upon changing directories, and will switch the Node.js version based on the requirements of the current directory, based on `.node-version` or `.nvmrc` (or `packages.json#engines#node` if `--resolve-engines` was enabled). |
||||
|
||||
This allows you do avoid thinking about `fnm use`, and only `cd <DIR>` to make it work. |
||||
|
||||
### `--version-file-strategy=recursive` |
||||
|
||||
**✅ Highly recommended** |
||||
|
||||
Makes `fnm use` and `fnm install` take parent directories into account when looking for a version file ("dotfile")--when no argument was given. |
||||
|
||||
So, let's say we have the following directory structure: |
||||
|
||||
``` |
||||
repo/ |
||||
├── package.json |
||||
├── .node-version <- with content: `20.0.0` |
||||
└── packages/ |
||||
└── my-package/ <- I am here |
||||
└── package.json |
||||
``` |
||||
|
||||
And I'm running the following command: |
||||
|
||||
```sh-session |
||||
repo/packages/my-package$ fnm use |
||||
``` |
||||
|
||||
Then fnm will switch to Node.js v20.0.0. |
||||
|
||||
Without the explicit flag, the value is set to `local`, which will not traverse the directory tree and therefore will print: |
||||
|
||||
```sh-session |
||||
repo/packages/my-package$ fnm use |
||||
error: Can't find version in dotfiles. Please provide a version manually to the command. |
||||
``` |
||||
|
||||
### `--enable-corepack` |
||||
|
||||
**🧪 Experimental** |
||||
|
||||
Runs [`corepack enable`](https://nodejs.org/api/corepack.html#enabling-the-feature) when a new version of Node.js is installed. Experimental due to the fact Corepack itself is experimental. |
||||
|
||||
### `--resolve-engines` |
||||
|
||||
**🧪 Experimental** |
||||
|
||||
Treats `package.json#engines#node` as a valid Node.js version file ("dotfile"). So, if you have a package.json with the following content: |
||||
|
||||
```json |
||||
{ |
||||
"engines": { |
||||
"node": ">=20 <21" |
||||
} |
||||
} |
||||
``` |
||||
|
||||
Then: |
||||
|
||||
- `fnm install` will install the latest satisfying Node.js 20.x version available in the Node.js dist server |
||||
- `fnm use` will use the latest satisfying Node.js 20.x version available on your system, or prompt to install if no version matched. |
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 31 KiB |
@ -0,0 +1,28 @@
@@ -0,0 +1,28 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP |
||||
|
||||
exports[`Bash installs corepack: Bash 1`] = ` |
||||
"set -e |
||||
eval "$(fnm env --corepack-enabled)" |
||||
fnm install 18 |
||||
fnm exec --using=18 node test-pnpm-corepack.js" |
||||
`; |
||||
|
||||
exports[`Fish installs corepack: Fish 1`] = ` |
||||
"fnm env --corepack-enabled | source |
||||
fnm install 18 |
||||
fnm exec --using=18 node test-pnpm-corepack.js" |
||||
`; |
||||
|
||||
exports[`PowerShell installs corepack: PowerShell 1`] = ` |
||||
"$ErrorActionPreference = "Stop" |
||||
fnm env --corepack-enabled | Out-String | Invoke-Expression |
||||
fnm install 18 |
||||
fnm exec --using=18 node test-pnpm-corepack.js" |
||||
`; |
||||
|
||||
exports[`Zsh installs corepack: Zsh 1`] = ` |
||||
"set -e |
||||
eval "$(fnm env --corepack-enabled)" |
||||
fnm install 18 |
||||
fnm exec --using=18 node test-pnpm-corepack.js" |
||||
`; |
@ -0,0 +1,54 @@
@@ -0,0 +1,54 @@
|
||||
import fs from "fs" |
||||
import { script } from "./shellcode/script.js" |
||||
import { Bash, Fish, PowerShell, Zsh } from "./shellcode/shells.js" |
||||
import describe from "./describe.js" |
||||
import path from "path" |
||||
import testCwd from "./shellcode/test-cwd.js" |
||||
import { createRequire } from "module" |
||||
|
||||
const require = createRequire(import.meta.url) |
||||
const whichPath = require.resolve("which") |
||||
|
||||
const nodescript = ` |
||||
const which = require(${JSON.stringify(whichPath)}); |
||||
const pnpmBinary = which.sync('pnpm') |
||||
const nodeBinary = which.sync('node') |
||||
|
||||
const binPath = require('path').dirname(nodeBinary); |
||||
|
||||
if (!pnpmBinary.includes(binPath)) { |
||||
console.log('pnpm not found in current Node.js bin', { binPath, pnpmBinary }); |
||||
process.exit(1); |
||||
} |
||||
const scriptContents = require('fs').readFileSync(pnpmBinary, 'utf8'); |
||||
console.log('scriptContents', scriptContents) |
||||
if (!scriptContents.includes('corepack')) { |
||||
console.log('corepack not found in pnpm script'); |
||||
process.exit(1); |
||||
} |
||||
` |
||||
|
||||
for (const shell of [Bash, Fish, PowerShell, Zsh]) { |
||||
describe(shell, () => { |
||||
test(`installs corepack`, async () => { |
||||
const cwd = testCwd() |
||||
const filepath = path.join(cwd, "test-pnpm-corepack.js") |
||||
fs.writeFileSync(filepath, nodescript) |
||||
|
||||
await script(shell) |
||||
.then(shell.env({ corepackEnabled: true })) |
||||
.then(shell.call("fnm", ["install", "18"])) |
||||
.then( |
||||
shell.call("fnm", [ |
||||
"exec", |
||||
"--using=18", |
||||
"node", |
||||
"test-pnpm-corepack.js", |
||||
]) |
||||
) |
||||
.takeSnapshot(shell) |
||||
// .addExtraEnvVar("RUST_LOG", "fnm=debug")
|
||||
.execute(shell) |
||||
}) |
||||
}) |
||||
} |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
import { server } from "./tests/proxy-server/index.mjs" |
||||
|
||||
export default function () { |
||||
server.listen(8080) |
||||
} |
@ -0,0 +1,5 @@
@@ -0,0 +1,5 @@
|
||||
import { server } from "./tests/proxy-server/index.mjs" |
||||
|
||||
export default () => { |
||||
server.close() |
||||
} |
@ -0,0 +1,3 @@
@@ -0,0 +1,3 @@
|
||||
[toolchain] |
||||
channel = "1.78" |
||||
components = ["rustfmt", "clippy"] |
@ -0,0 +1,19 @@
@@ -0,0 +1,19 @@
|
||||
use serde::Deserialize; |
||||
|
||||
#[derive(Debug, Deserialize, Default)] |
||||
struct EnginesField { |
||||
node: Option<node_semver::Range>, |
||||
} |
||||
|
||||
#[derive(Debug, Deserialize, Default)] |
||||
pub struct PackageJson { |
||||
engines: Option<EnginesField>, |
||||
} |
||||
|
||||
impl PackageJson { |
||||
pub fn node_range(&self) -> Option<&node_semver::Range> { |
||||
self.engines |
||||
.as_ref() |
||||
.and_then(|engines| engines.node.as_ref()) |
||||
} |
||||
} |
@ -0,0 +1,165 @@
@@ -0,0 +1,165 @@
|
||||
use std::io::Read; |
||||
|
||||
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle}; |
||||
use reqwest::blocking::Response; |
||||
|
||||
pub struct ResponseProgress { |
||||
progress: Option<ProgressBar>, |
||||
response: Response, |
||||
} |
||||
|
||||
#[derive(Default, Clone, Debug, clap::ValueEnum)] |
||||
pub enum ProgressConfig { |
||||
#[default] |
||||
Auto, |
||||
Never, |
||||
Always, |
||||
} |
||||
|
||||
impl ProgressConfig { |
||||
pub fn enabled(&self, config: &crate::config::FnmConfig) -> bool { |
||||
match self { |
||||
Self::Never => false, |
||||
Self::Always => true, |
||||
Self::Auto => config |
||||
.log_level() |
||||
.is_writable(&crate::log_level::LogLevel::Info), |
||||
} |
||||
} |
||||
} |
||||
|
||||
fn make_progress_bar(size: u64, target: ProgressDrawTarget) -> ProgressBar { |
||||
let bar = ProgressBar::with_draw_target(Some(size), target); |
||||
|
||||
bar.set_style( |
||||
ProgressStyle::with_template( |
||||
"{elapsed_precise:.white.dim} {wide_bar:.cyan} {bytes}/{total_bytes} ({bytes_per_sec}, {eta})", |
||||
) |
||||
.unwrap() |
||||
.progress_chars("█▉▊▋▌▍▎▏ "), |
||||
); |
||||
|
||||
bar |
||||
} |
||||
|
||||
impl ResponseProgress { |
||||
pub fn new(response: Response, target: ProgressDrawTarget) -> Self { |
||||
Self { |
||||
progress: response |
||||
.content_length() |
||||
.map(|len| make_progress_bar(len, target)), |
||||
response, |
||||
} |
||||
} |
||||
|
||||
pub fn finish(&self) { |
||||
if let Some(ref bar) = self.progress { |
||||
bar.finish(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl Read for ResponseProgress { |
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> { |
||||
let size = self.response.read(buf)?; |
||||
|
||||
if let Some(ref bar) = self.progress { |
||||
bar.inc(size as u64); |
||||
} |
||||
|
||||
Ok(size) |
||||
} |
||||
} |
||||
|
||||
impl Drop for ResponseProgress { |
||||
fn drop(&mut self) { |
||||
self.finish(); |
||||
eprintln!(); |
||||
} |
||||
} |
||||
|
||||
#[cfg(test)] |
||||
mod tests { |
||||
use indicatif::{ProgressDrawTarget, TermLike}; |
||||
use reqwest::blocking::Response; |
||||
use std::{ |
||||
io::Read, |
||||
sync::{Arc, Mutex}, |
||||
}; |
||||
|
||||
use super::ResponseProgress; |
||||
|
||||
const CONTENT_LENGTH: usize = 100; |
||||
|
||||
#[derive(Debug)] |
||||
struct MockedTerm { |
||||
pub buf: Arc<Mutex<String>>, |
||||
} |
||||
|
||||
impl TermLike for MockedTerm { |
||||
fn width(&self) -> u16 { |
||||
80 |
||||
} |
||||
|
||||
fn move_cursor_up(&self, _n: usize) -> std::io::Result<()> { |
||||
Ok(()) |
||||
} |
||||
|
||||
fn move_cursor_down(&self, _n: usize) -> std::io::Result<()> { |
||||
Ok(()) |
||||
} |
||||
|
||||
fn move_cursor_right(&self, _n: usize) -> std::io::Result<()> { |
||||
Ok(()) |
||||
} |
||||
|
||||
fn move_cursor_left(&self, _n: usize) -> std::io::Result<()> { |
||||
Ok(()) |
||||
} |
||||
|
||||
fn write_line(&self, s: &str) -> std::io::Result<()> { |
||||
self.buf.lock().unwrap().push_str(s); |
||||
Ok(()) |
||||
} |
||||
|
||||
fn write_str(&self, s: &str) -> std::io::Result<()> { |
||||
self.buf.lock().unwrap().push_str(s); |
||||
Ok(()) |
||||
} |
||||
|
||||
fn clear_line(&self) -> std::io::Result<()> { |
||||
Ok(()) |
||||
} |
||||
|
||||
fn flush(&self) -> std::io::Result<()> { |
||||
Ok(()) |
||||
} |
||||
} |
||||
|
||||
#[test] |
||||
fn test_reads_data_and_shows_progress() { |
||||
let response: Response = http::Response::builder() |
||||
.header("Content-Length", CONTENT_LENGTH) |
||||
.body("a".repeat(CONTENT_LENGTH)) |
||||
.unwrap() |
||||
.into(); |
||||
|
||||
let mut buf = [0; CONTENT_LENGTH]; |
||||
|
||||
let out_buf = Arc::new(Mutex::new(String::new())); |
||||
|
||||
let mut progress = ResponseProgress::new( |
||||
response, |
||||
ProgressDrawTarget::term_like(Box::new(MockedTerm { |
||||
buf: out_buf.clone(), |
||||
})), |
||||
); |
||||
let size = progress.read(&mut buf[..]).unwrap(); |
||||
|
||||
drop(progress); |
||||
|
||||
assert_eq!(size, CONTENT_LENGTH); |
||||
assert_eq!(buf, "a".repeat(CONTENT_LENGTH).as_bytes()); |
||||
assert!(out_buf.lock().unwrap().contains(&"█".repeat(40))); |
||||
} |
||||
} |
@ -0,0 +1,21 @@
@@ -0,0 +1,21 @@
|
||||
/// On Bash for Windows, we need to convert the path from a Windows-style
|
||||
/// path to a Unix-style path. This is because Bash for Windows doesn't
|
||||
/// understand Windows-style paths. We use `cygpath` to do this conversion.
|
||||
/// If `cygpath` fails, we assume we're not on Bash for Windows and just
|
||||
/// return the original path.
|
||||
pub fn maybe_fix_windows_path(path: &str) -> Option<String> { |
||||
if !cfg!(windows) { |
||||
return None; |
||||
} |
||||
|
||||
let output = std::process::Command::new("cygpath") |
||||
.arg(path) |
||||
.output() |
||||
.ok()?; |
||||
if output.status.success() { |
||||
let output = String::from_utf8(output.stdout).ok()?; |
||||
Some(output.trim().to_string()) |
||||
} else { |
||||
None |
||||
} |
||||
} |
@ -0,0 +1,65 @@
@@ -0,0 +1,65 @@
|
||||
// @ts-check
|
||||
|
||||
import { createServer } from "node:http" |
||||
import path from "node:path" |
||||
import fs from "node:fs" |
||||
import crypto from "node:crypto" |
||||
import fetch from "node-fetch" |
||||
import chalk from "chalk" |
||||
|
||||
const baseDir = path.join(process.cwd(), ".proxy") |
||||
try { |
||||
fs.mkdirSync(baseDir, { recursive: true }) |
||||
} catch (e) {} |
||||
|
||||
/** @type {Map<string, Promise<{ headers: Record<string, string>, body: ArrayBuffer }>>} */ |
||||
const cache = new Map() |
||||
|
||||
export const server = createServer((req, res) => { |
||||
const pathname = req.url ?? "/" |
||||
const hash = crypto |
||||
.createHash("sha1") |
||||
.update(pathname ?? "/") |
||||
.digest("hex") |
||||
const extension = path.extname(pathname) |
||||
const filename = path.join(baseDir, hash) + extension |
||||
const headersFilename = path.join(baseDir, hash) + ".headers.json" |
||||
try { |
||||
const headers = JSON.parse(fs.readFileSync(headersFilename, "utf-8")) |
||||
const body = fs.createReadStream(filename) |
||||
console.log(chalk.green.dim(`[proxy] hit: ${pathname} -> ${filename}`)) |
||||
res.writeHead(200, headers) |
||||
body.pipe(res) |
||||
} catch { |
||||
let promise = cache.get(filename) |
||||
if (!promise) { |
||||
console.log(chalk.red.dim(`[proxy] miss: ${pathname} -> ${filename}`)) |
||||
promise = fetch( |
||||
"https://nodejs.org/dist/" + pathname.replace(/^\/+/, ""), |
||||
{ |
||||
compress: false, |
||||
} |
||||
).then(async (response) => { |
||||
const headers = Object.fromEntries(response.headers.entries()) |
||||
const body = await response.arrayBuffer() |
||||
fs.writeFileSync(headersFilename, JSON.stringify(headers)) |
||||
fs.writeFileSync(filename, Buffer.from(body)) |
||||
return { headers, body } |
||||
}) |
||||
cache.set(filename, promise) |
||||
promise.finally(() => cache.delete(filename)) |
||||
} |
||||
|
||||
promise.then( |
||||
({ headers, body }) => { |
||||
res.writeHead(200, headers) |
||||
res.end(Buffer.from(body)) |
||||
}, |
||||
(err) => { |
||||
console.error(err) |
||||
res.writeHead(500) |
||||
res.end() |
||||
} |
||||
) |
||||
} |
||||
}) |
Loading…
Reference in new issue