archieve-projects/微信机器人/node_modules/cmd-ts/dist/cjs/newparser/tokenizer.js

38 lines
1.2 KiB
JavaScript
Executable File

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.tokenize = void 0;
const utils_1 = require("../utils");
/**
* Tokenize a list of arguments
*
* @param strings arguments, based on `process.argv`
*/
function tokenize(strings) {
let tokens = [];
let overallIndex = 0;
const push = (token) => {
tokens.push(token);
overallIndex += token.raw.length;
};
for (const [stringIndex, string] of utils_1.enumerate(strings)) {
const chars = [...string];
for (let i = 0; i < chars.length; i++) {
if (chars[i] === '-' && chars[i + 1] === '-') {
push({ type: 'longPrefix', raw: '--', index: overallIndex });
i++;
}
else if (chars[i] === '-') {
push({ type: 'shortPrefix', raw: '-', index: overallIndex });
}
else {
push({ type: 'char', raw: chars[i], index: overallIndex });
}
}
if (stringIndex + 1 !== strings.length) {
push({ type: 'argumentDivider', raw: ' ', index: overallIndex });
}
}
return tokens;
}
exports.tokenize = tokenize;
//# sourceMappingURL=tokenizer.js.map